25 for (
auto&& layer : layers)
27 for (
auto&& it = layer->BeginInputSlots(); it != layer->EndInputSlots(); ++it)
29 result.push_back(&(*it));
41 for (
auto&& layer : layers)
43 for (
auto&& it = layer->BeginOutputSlots(); it != layer->EndOutputSlots(); ++it)
45 result.push_back(&(*it));
51 bool checkDataTypeInputandOutput(
const Layer& layer)
53 auto inputInfo = layer.GetInputSlot(0).GetConnection()->GetTensorInfo();
54 auto outputInfo = layer.GetOutputSlot(0).GetTensorInfo();
55 bool sameDataType = (inputInfo.GetDataType() == outputInfo.GetDataType());
62 bool sameScale = (inputInfo.GetQuantizationScale() == outputInfo.GetQuantizationScale());
63 bool sameOffset = (inputInfo.GetQuantizationOffset() == outputInfo.GetQuantizationOffset());
65 return (sameScale && sameOffset);
82 std::vector<Layer*> untouchedVector;
83 for (
const auto& pair : untouched)
85 Layer* layer = pair.second;
93 template<
typename LayerType>
102 replacementLayer->SetAdditionalInfoForObject(std::make_shared<ActivationDescriptor>(activationDesc));
106 {baseLayer, activationLayer});
109 optimizationViews.
AddSubstitution({substitutionSubgraph, replacementSubgraph});
110 return replacementLayer;
113 template<
typename LayerType>
123 replacementLayer->SetAdditionalInfoForObject(std::make_shared<ActivationDescriptor>(activationDesc));
127 {baseLayer, activationLayer});
130 optimizationViews.
AddSubstitution({substitutionSubgraph, replacementSubgraph});
131 return replacementLayer;
134 template<
typename LayerType>
147 replacementLayer->m_Weight = std::move(baseLayer->m_Weight);
148 replacementLayer->m_Bias = std::move(baseLayer->m_Bias);
150 return replacementLayer;
157 template<
typename LayerType>
163 std::vector<Layer*> layers;
166 std::vector<uint32_t> axes;
167 unsigned int recalulatedAxis = 0;
169 for (
unsigned int i = 0; i != desc.
m_vAxis.size(); ++i)
172 TensorInfo layerInfo = baseLayer->GetInputSlot(0).GetConnectedOutputSlot()->GetTensorInfo();
174 axes.emplace_back(desc.
m_vAxis[i]);
182 std::vector<uint32_t> singleAxis(1, desc.
m_vAxis[i] - recalulatedAxis);
186 newReduceDescriptor.
m_vAxis.assign(singleAxis.begin(), singleAxis.end());
189 std::string layerName =
"reduce_layer_" + std::to_string(i);
196 layers[i - 1]->GetOutputSlot(0).Connect(replacementLayer->
GetInputSlot(0));
207 layers.emplace_back(replacementLayer);
211 ARMNN_ASSERT(baseLayer->GetOutputSlot(0).GetTensorInfo() == layers.back()->GetOutputSlot().GetTensorInfo());
219 template<
typename LayerType>
222 std::vector<Layer*>& layers)
224 std::list<Layer*> replacementLayers(layers.begin(), layers.end());
229 std::move(replacementLayers));
231 optimizationViews.
AddSubstitution({substitutionSubgraph, replacementSubgraph});
std::vector< Layer * > ChainReduceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, ReduceDescriptor &desc)
void ReplaceLayers(OptimizationViews &optimizationViews, LayerType *baseLayer, std::vector< Layer *> &layers)
constexpr bool IsQuantizedType()
LayerT * AddLayer(Args &&... args)
Adds a new layer, of type LayerType, to the graph constructed with the arguments passed.
void ReportUntouchedLayers(OptimizationViews &optimizationViews, std::map< LayerGuid, Layer *> untouched)
bool m_KeepDims
if true then output shape has no change.
std::vector< OutputSlot * > OutputSlots
void AddSubstitution(SubstitutionPair &&substitution)
This layer represents an activation operation with the specified activation function.
Copyright (c) 2021 ARM Limited and Contributors.
LayerType * FuseLayerWithParameters(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
The SubgraphView class represents a subgraph of a Graph.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
LayerType * FuseLayerWithoutParameters(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
SubgraphView::InputSlots CreateInputsFrom(const std::vector< Layer *> &layers)
A ReduceDescriptor for the REDUCE operators.
#define ARMNN_ASSERT(COND)
An ActivationDescriptor for the ActivationLayer.
void AddUntouchedSubgraph(SubgraphView &&subgraph)
std::vector< InputSlot * > InputSlots
std::vector< uint32_t > m_vAxis
The indices of the dimensions to reduce.
SubgraphView::OutputSlots CreateOutputsFrom(const std::vector< Layer *> &layers)
void SetTensorInfo(const TensorInfo &tensorInfo) override
LayerType * FuseLayerWithWeightsAndBiases(OptimizationViews &optimizationViews, LayerType *baseLayer, ActivationLayer *activationLayer, ActivationDescriptor &activationDesc, std::string name)
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
const TensorInfo ComputeReductionTensorShape(const armnn::TensorInfo &input, const std::vector< uint32_t > &vAxis, const bool keepDims)
Function to compute the output tensor shape based on the axes and if keepDims is set.
LayerType
When adding a new layer, adapt also the LastLayer enum value in the enum class LayerType below...