23 auto& maxLayer = *PolymorphicDowncast<ElementwiseBinaryLayer*>(&base);
28 auto& minLayer = *PolymorphicDowncast<ElementwiseBinaryLayer*>(&child);
34 if (maxLayer.GetDataType() != minLayer.GetDataType())
41 if (!GetValue(maxLayer, maxValue))
46 if (!GetValue(minLayer, minValue))
52 OutputSlot& parentOut = *maxLayer.GetInputSlot(0).GetConnectedOutputSlot();
56 const std::string name = std::string(
"replaced-") + maxLayer.GetName() + std::string(
"-") + minLayer.GetName()
57 + std::string(
"-with-BoundedRelu");
59 boundedReluDescriptor,
68 minLayer.GetOutputSlot().MoveAllConnections(boundedReluLayer.GetOutputSlot());
76 static float_t GetConstTensorValue(
Layer& layer)
78 auto& constLayer = *PolymorphicDowncast<ConstantLayer*>(&layer);
79 switch (constLayer.GetDataType())
82 return *constLayer.m_LayerOutput->GetConstTensor<
float>();
84 return static_cast<float_t
>(*constLayer.m_LayerOutput->GetConstTensor<
BFloat16>());
86 return static_cast<float_t
>(*constLayer.m_LayerOutput->GetConstTensor<half_float::half>());
89 return static_cast<float_t
>(*constLayer.m_LayerOutput->GetConstTensor<uint8_t>());
92 return static_cast<float_t
>(*constLayer.m_LayerOutput->GetConstTensor<int8_t>());
94 return static_cast<float_t
>(*constLayer.m_LayerOutput->GetConstTensor<int16_t>());
96 return static_cast<float_t
>(*constLayer.m_LayerOutput->GetConstTensor<int32_t>());
98 return static_cast<float_t
>(*constLayer.m_LayerOutput->GetConstTensor<int64_t>());
104 static bool GetValue(
Layer& layer, float_t& value)
114 value = GetConstTensorValue(input0);
122 value = GetConstTensorValue(input1);
This layer represents an activation operation with the specified activation function.
This layer represents a elementwiseBinary operation.
LayerT * InsertNewLayer(InputSlot &insertBefore, Args &&... args)
Inserts a new layer between the output slot currently connected to insertBefore and insertBefore itse...
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
LayerType GetType() const override
Returns the armnn::LayerType of this layer.
void MoveAllConnections(OutputSlot &destination)
Moves all connections to another OutputSlot.
Layer & GetOwningLayer() const
const TensorInfo & GetTensorInfo() const override
unsigned int GetNumElements() const
~MaxMinIntoBoundedReluImpl()=default
MaxMinIntoBoundedReluImpl()=default
static void Run(Graph &graph, InputSlot &connection)
Run for every exclusive connection between any Max & Min layers The Max, Min and its associated const...
@ BoundedReLu
min(a, max(b, input)) ReLu1 & ReLu6.
An ActivationDescriptor for the ActivationLayer.