ArmNN
 25.11
Loading...
Searching...
No Matches
MaxMinIntoBoundedRelu.hpp
Go to the documentation of this file.
1//
2// Copyright © 2024 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#pragma once
7
8#include "Optimization.hpp"
9
11{
12
14{
15public:
16 /// Run for every exclusive connection between any Max & Min layers
17 /// The Max, Min and its associated constant inputs will be removed, and replaced with a BoundedRelu Activation
18 static void Run(Graph& graph, InputSlot& connection)
19 {
20 Layer& base = connection.GetConnectedOutputSlot()->GetOwningLayer();
21 Layer& child = connection.GetOwningLayer();
22
23 auto& maxLayer = *PolymorphicDowncast<ElementwiseBinaryLayer*>(&base);
24 if (maxLayer.GetParameters().m_Operation != BinaryOperation::Maximum)
25 {
26 return;
27 }
28 auto& minLayer = *PolymorphicDowncast<ElementwiseBinaryLayer*>(&child);
29 if (minLayer.GetParameters().m_Operation != BinaryOperation::Minimum)
30 {
31 return;
32 }
33
34 if (maxLayer.GetDataType() != minLayer.GetDataType())
35 {
36 return;
37 }
38
39 // get max and min values
40 float_t maxValue;
41 if (!GetValue(maxLayer, maxValue))
42 {
43 return;
44 }
45 float_t minValue;
46 if (!GetValue(minLayer, minValue))
47 {
48 return;
49 }
50
51 // Save original parent output slot of the max layer
52 OutputSlot& parentOut = *maxLayer.GetInputSlot(0).GetConnectedOutputSlot();
53
54 // Insert activation layer between max layer and its parent layer
55 ActivationDescriptor boundedReluDescriptor(ActivationFunction::BoundedReLu, minValue, maxValue);
56 const std::string name = std::string("replaced-") + maxLayer.GetName() + std::string("-") + minLayer.GetName()
57 + std::string("-with-BoundedRelu");
58 auto& boundedReluLayer = *graph.InsertNewLayer<ActivationLayer>(maxLayer.GetInputSlot(0),
59 boundedReluDescriptor,
60 name.c_str());
61
62 // Reconnects with original parent.
63 boundedReluLayer.GetOutputSlot().MoveAllConnections(parentOut);
64
65 // Moves connections in min layer output to parent layer.
66 // Min layer will be removed as it's left unconnected.
67 // Max layer will be removed if left unconnected.
68 minLayer.GetOutputSlot().MoveAllConnections(boundedReluLayer.GetOutputSlot());
69 }
70
71protected:
74
75private:
76 static float_t GetConstTensorValue(Layer& layer)
77 {
78 auto& constLayer = *PolymorphicDowncast<ConstantLayer*>(&layer);
79 switch (constLayer.GetDataType())
80 {
82 return *constLayer.m_LayerOutput->GetConstTensor<float>();
84 return static_cast<float_t>(*constLayer.m_LayerOutput->GetConstTensor<BFloat16>());
86 return static_cast<float_t>(*constLayer.m_LayerOutput->GetConstTensor<half_float::half>());
89 return static_cast<float_t>(*constLayer.m_LayerOutput->GetConstTensor<uint8_t>());
92 return static_cast<float_t>(*constLayer.m_LayerOutput->GetConstTensor<int8_t>());
94 return static_cast<float_t>(*constLayer.m_LayerOutput->GetConstTensor<int16_t>());
96 return static_cast<float_t>(*constLayer.m_LayerOutput->GetConstTensor<int32_t>());
98 return static_cast<float_t>(*constLayer.m_LayerOutput->GetConstTensor<int64_t>());
99 default:
100 throw InvalidArgumentException("No supported Data Type");
101 }
102 }
103
104 static bool GetValue(Layer& layer, float_t& value)
105 {
108 if (input0.GetType() == LayerType::Constant)
109 {
110 if (input0.GetOutputSlot(0).GetTensorInfo().GetNumElements() != 1)
111 {
112 return false;
113 }
114 value = GetConstTensorValue(input0);
115 }
116 else if (input1.GetType() == LayerType::Constant)
117 {
118 if (input1.GetOutputSlot(0).GetTensorInfo().GetNumElements() != 1)
119 {
120 return false;
121 }
122 value = GetConstTensorValue(input1);
123 }
124 else
125 {
126 return false;
127 }
128 return true;
129 };
130};
131
135
136} // namespace armnn::optimizations
This layer represents an activation operation with the specified activation function.
This layer represents a elementwiseBinary operation.
LayerT * InsertNewLayer(InputSlot &insertBefore, Args &&... args)
Inserts a new layer between the output slot currently connected to insertBefore and insertBefore itse...
Definition Graph.hpp:481
Layer & GetOwningLayer() const
Definition Layer.hpp:53
const OutputSlot * GetConnectedOutputSlot() const
Definition Layer.hpp:56
const InputSlot & GetInputSlot(unsigned int index) const override
Get a const input slot handle by slot index.
Definition Layer.hpp:337
const OutputSlot & GetOutputSlot(unsigned int index=0) const override
Get the const output slot handle by slot index.
Definition Layer.hpp:339
LayerType GetType() const override
Returns the armnn::LayerType of this layer.
Definition Layer.hpp:286
Layer & GetOwningLayer() const
Definition Layer.hpp:132
const TensorInfo & GetTensorInfo() const override
Definition Layer.cpp:100
unsigned int GetNumElements() const
Definition Tensor.hpp:198
static void Run(Graph &graph, InputSlot &connection)
Run for every exclusive connection between any Max & Min layers The Max, Min and its associated const...
OptimizeForExclusiveConnection< ElementwiseBinaryLayer, ElementwiseBinaryLayer, MaxMinIntoBoundedReluImpl > MaxMinIntoBoundedRelu
@ BoundedReLu
min(a, max(b, input)) ReLu1 & ReLu6.
Definition Types.hpp:92
DestType PolymorphicDowncast(SourceType *value)
Polymorphic downcast for build in pointers only.
An ActivationDescriptor for the ActivationLayer.