18 bool DefaultLayerSupport(
const char* func,
24 if (reasonIfUnsupported)
26 std::stringstream message;
27 message << func <<
" is not implemented [" << file <<
":" << line <<
"]";
29 reasonIfUnsupported.
value() = message.str();
41 const std::vector<TensorInfo>& infos,
55 auto desc = *(PolymorphicDowncast<const StandInDescriptor*>(&descriptor));
57 if (infos.size() != (desc.m_NumInputs + desc.m_NumOutputs))
60 "the combined number of input and output slots assigned "
61 "to the StandIn descriptor");
64 std::vector<const TensorInfo*> inputInfos;
65 for (uint32_t i = 0; i < desc.m_NumInputs; i++)
67 inputInfos.push_back(&infos[i]);
69 std::vector<const TensorInfo*> outputInfos;
70 for (uint32_t i = desc.m_NumInputs; i < infos.size(); i++)
72 outputInfos.push_back(&infos[i]);
81 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
95 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
117 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
128 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
135 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
139 const std::vector<const TensorInfo*>&,
143 if (reasonIfUnsupported)
145 std::stringstream message;
146 message <<
"StandIn layer is not executable via backends";
148 reasonIfUnsupported.
value() = message.str();
bool IsMemImportSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
bool IsMergeSupported(const TensorInfo &input0, const TensorInfo &input1, const TensorInfo &output, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
bool IsShapeSupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
bool IsLayerSupported(const LayerType &type, const std::vector< TensorInfo > &infos, const BaseDescriptor &descriptor, const Optional< LstmInputParamsInfo > &lstmParamsInfo=EmptyOptional(), const Optional< QuantizedLstmInputParamsInfo > &quantizedLstmParamsInfo=EmptyOptional(), Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const override
Default implementation of the ILayerSupport interface, Backends should implement this as a switch sta...
bool IsMemCopySupported(const TensorInfo &input, const TensorInfo &output, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
bool IsQuantizedLstmSupported(const TensorInfo &input, const TensorInfo &previousCellStateIn, const TensorInfo &previousOutputIn, const TensorInfo &cellStateOut, const TensorInfo &output, const QuantizedLstmInputParamsInfo ¶msInfo, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
bool IsDetectionPostProcessSupported(const TensorInfo &boxEncodings, const TensorInfo &scores, const TensorInfo &anchors, const TensorInfo &detectionBoxes, const TensorInfo &detectionClasses, const TensorInfo &detectionScores, const TensorInfo &numDetections, const DetectionPostProcessDescriptor &descriptor, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
bool IsStandInSupported(const std::vector< const TensorInfo * > &inputs, const std::vector< const TensorInfo * > &outputs, const StandInDescriptor &descriptor, Optional< std::string & > reasonIfUnsupported=EmptyOptional()) const
Copyright (c) 2021 ARM Limited and Contributors.
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below.
Base class for all descriptors.
A StandInDescriptor for the StandIn layer.