24.08
|
Go to the documentation of this file.
9 #include <CpuExecutor.h>
10 #include <nnapi/OperandTypes.h>
11 #include <nnapi/Result.h>
12 #include <nnapi/Types.h>
26 template <
typename OperandType>
31 :
std::runtime_error(
"Operand type is unsupported")
46 const std::vector<android::nn::RunTimePoolInfo>& memPools);
58 template <
typename TensorType>
60 const std::string& requestName,
61 const std::string& tensorName,
62 const TensorType& tensor);
65 const std::string& dumpDir,
70 const std::string& dumpDir);
73 const std::string& dumpDir,
74 std::vector<uint8_t>& dataCacheData,
75 bool dataCachingActive =
true);
78 const std::string& existingDotFileName,
79 const std::string& dumpDir,
82 void RenameFile(
const std::string& existingName,
83 const std::string& extension,
84 const std::string& dumpDir,
103 shape.dimensions = std::vector<uint32_t>{};
107 std::vector<uint32_t> dimensions;
109 dimensions.resize(numDims);
110 for (
unsigned int outputIdx = 0u; outputIdx < numDims; ++outputIdx)
112 dimensions[outputIdx] = tensorShape[outputIdx];
114 shape.dimensions = dimensions;
117 shape.isSufficient =
true;
122 void CommitPools(std::vector<::android::nn::RunTimePoolInfo>& memPools);
void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled, const std::string &dumpDir, armnn::NetworkId networkId, const armnn::IProfiler *profiler)
::android::nn::Operand Operand
bool IsDynamicTensor(const armnn::TensorInfo &tensorInfo)
Checks if a tensor info represents a dynamic tensor.
void CommitPools(std::vector<::android::nn::RunTimePoolInfo > &memPools)
bool isQuantizedOperand(const OperandType &operandType)
std::string ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork &optimizedNetwork, const std::string &dumpDir)
const armnn::PermutationVector g_DontPermute
::android::nn::OperandType OperandType
void SwizzleAndroidNn4dTensorToArmNn(armnn::TensorInfo &tensorInfo, const void *input, void *output, const armnn::PermutationVector &mappings)
Swizzles tensor data in input according to the dimension mappings.
OutputShape ComputeShape(const armnn::TensorInfo &info)
std::string GetFileTimestamp()
unsigned int GetNumDimensions() const
Function that returns the tensor rank.
void * GetMemoryFromPool(DataLocation location, const std::vector< android::nn::RunTimePoolInfo > &memPools)
Returns a pointer to a specific location in a pool`.
::android::nn::Model Model
Helper classes.
std::string SerializeNetwork(const armnn::INetwork &network, const std::string &dumpDir, std::vector< uint8_t > &dataCacheData, bool dataCachingActive)
std::string GetModelSummary(const Model &model)
void RenameFile(const std::string &existingName, const std::string &extension, const std::string &dumpDir, const armnn::NetworkId networkId)
void RenameExportedFiles(const std::string &existingSerializedFileName, const std::string &existingDotFileName, const std::string &dumpDir, const armnn::NetworkId networkId)
void * GetMemoryFromPointer(const Request::Argument &requestArg)
armnn::TensorInfo GetTensorInfoForOperand(const Operand &operand)
void DumpTensor(const std::string &dumpDir, const std::string &requestName, const std::string &tensorName, const TensorType &tensor)
bool AreDynamicTensorsSupported()
Checks for ArmNN support of dynamic tensors.
std::string GetOperandSummary(const Operand &operand)
Dimensionality GetDimensionality() const
Function that returns the tensor type.
UnsupportedOperand(const OperandType type)
Main network class which provides the interface for building up a neural network.