12 #include <fmt/format.h> 25 if (permutationVector.
GetSize() > 0)
51 weightShape[2] * weightShape[3] });
53 weightShape[0] * weightShape[1],
60 weightInfo.
SetShape({ 1, weightShape[0] * weightShape[1], weightShape[2], weightShape[3] });
65 template <
typename DataType>
70 unsigned int multiplier;
73 unsigned int inputChannels;
77 height = weightShape[0];
78 width = weightShape[1];
79 inputChannels = weightShape[2];
80 multiplier = weightShape[3];
84 height = weightShape[2];
85 width = weightShape[3];
86 inputChannels = weightShape[1];
87 multiplier = weightShape[0];
91 std::vector<DataType> weightAclOrder(height*width*inputChannels*multiplier);
92 unsigned int destinationWeightsChannel;
93 unsigned int totalChannels = inputChannels * multiplier;
94 unsigned int channelSize = height * width;
95 unsigned int inputChannel = 0;
97 for (
unsigned int originWeightsChannel = 0; originWeightsChannel < totalChannels; originWeightsChannel++)
99 inputChannel = originWeightsChannel % inputChannels;
100 destinationWeightsChannel = (originWeightsChannel - inputChannel) / inputChannels + multiplier * inputChannel;
102 for (
unsigned int i = 0; i < channelSize; i++)
104 weightAclOrder[i + destinationWeightsChannel * channelSize] =
105 weight[i + originWeightsChannel * channelSize];
134 return weightPermutedInfo;
144 unsigned int depthMultiplier = 1;
156 permutationVector = { 0, 2, 3, 1 };
166 return std::make_tuple(weightsPermuted, depthMultiplier);
173 unsigned int aclDepthMultiplier = 1;
179 weightsPermuted = weightInfo;
194 return std::make_tuple(weightsPermuted, aclDepthMultiplier);
208 "quantization is applied.");
212 auto weightsShape = weightsInfo.
GetShape();
214 unsigned int depthMultiplier = weightsShape[3] / inputInfo.
GetShape()[channelIndex];
215 weightsInfo.
SetShape({ weightsShape[1],
224 return std::make_tuple(weightsPermuted, depthMultiplier);
248 permutationVector = { 3, 2, 0, 1 };
258 weightPermuted = ReorderWeightChannelsForAcl<float>(weightPermuted, dataLayout, permuteBuffer);
262 ReorderWeightChannelsForAcl<half_float::half>(weightPermuted, dataLayout, permuteBuffer);
266 weightPermuted = ReorderWeightChannelsForAcl<uint8_t>(weightPermuted, dataLayout, permuteBuffer);
272 weightPermuted = ReorderWeightChannelsForAcl<int8_t>(weightPermuted, dataLayout, permuteBuffer);
284 return weightPermuted;
289 int32_t reversedMask = 0;
290 for (
unsigned int i = 0; i < armnn::numeric_cast<unsigned int>(numDim); ++i)
293 int32_t bit = (mask & 1 << i) != 0;
295 reversedMask += (bit << std::max(numDim-(armnn::numeric_cast<int>(i)+1), 0));
armnn::ConstTensor PermuteTensor(const ConstTensorHandle *tensor, const PermutationVector &permutationVector, void *permuteBuffer)
constexpr const char * GetDataLayoutName(DataLayout dataLayout)
const TensorShape & GetShape() const
TensorInfo ConvertWeightTensorInfoFromArmnnToAcl(const TensorInfo &weightInfo, DataLayout dataLayout)
#define ARMNN_NO_DEPRECATE_WARN_BEGIN
bool HasPerAxisQuantization() const
armnn::ConstTensor ConvertWeightTensorFromArmnnToAcl(const ConstTensorHandle *weightTensor, DataLayout dataLayout, void *permuteBuffer)
const TensorShape & GetShape() const
unsigned int GetNumBytes() const
Copyright (c) 2021 ARM Limited and Contributors.
ConstTensor ReorderWeightChannelsForAcl(const ConstTensor &weightHandle, DataLayout dataLayout, void *permuteBuffer)
void SetShape(const TensorShape &newShape)
const TensorInfo & GetTensorInfo() const
std::tuple< ConstTensor, unsigned int > Convert1HWOTensorToAcl(const ConstTensorHandle *weightTensor, const TensorInfo &inputInfo, const DataLayout dataLayout, void *permuteBuffer)
Weights for depthwise have a datalayout of [1,H,W,O] = [1,H,W,I*M] This function coverts a ConstCpuTe...
#define ARMNN_NO_DEPRECATE_WARN_END
#define ARMNN_ASSERT_MSG(COND, MSG)
Provides access to the appropriate indexes for Channels, Height and Width based on DataLayout...
DataType GetDataType() const
A tensor defined by a TensorInfo (shape and data type) and an immutable backing store.
#define ARMNN_FALLTHROUGH
std::tuple< TensorInfo, unsigned int > Convert1HWOTensorInfoToAcl(const TensorInfo &weightInfo, const TensorInfo &inputInfo, const DataLayout dataLayout)
Weights for depthwise have a datalayout of [1,H,W,O] = [1,H,W,I*M] This function coverts a TensorInfo...
const TensorInfo & GetInfo() const
int32_t ConvertMaskToACLFormat(int32_t mask, int32_t numDim)
std::tuple< ConstTensor, unsigned int > Convert1HWOtoMIHW(const ConstTensorHandle *weightTensor, const TensorInfo &inputInfo, const DataLayout &dataLayout, void *permuteBuffer)
Converts a (weights) tensor from [1, H, W, I*M] = [1, H, W, O] to [M, I, H, W].
DataType GetDataType() const
void ReshapeWeightsForAcl(TensorInfo &weightInfo, DataLayout dataLayout)
unsigned int GetChannelsIndex() const
armnn::TensorShape Permuted(const armnn::TensorShape &srcShape, const armnn::PermutationVector &mappings)
const T * GetConstTensor() const
constexpr unsigned int GetDataTypeSize(DataType dataType)