ArmNN
 24.08
MaxMinIntoBoundedReluImpl Class Reference

#include <MaxMinIntoBoundedRelu.hpp>

Static Public Member Functions

static void Run (Graph &graph, InputSlot &connection)
 Run for every exclusive connection between any Max & Min layers The Max, Min and its associated constant inputs will be removed, and replaced with a BoundedRelu Activation. More...
 

Protected Member Functions

 MaxMinIntoBoundedReluImpl ()=default
 
 ~MaxMinIntoBoundedReluImpl ()=default
 

Detailed Description

Definition at line 13 of file MaxMinIntoBoundedRelu.hpp.

Constructor & Destructor Documentation

◆ MaxMinIntoBoundedReluImpl()

MaxMinIntoBoundedReluImpl ( )
protecteddefault

◆ ~MaxMinIntoBoundedReluImpl()

~MaxMinIntoBoundedReluImpl ( )
protecteddefault

Member Function Documentation

◆ Run()

static void Run ( Graph graph,
InputSlot connection 
)
inlinestatic

Run for every exclusive connection between any Max & Min layers The Max, Min and its associated constant inputs will be removed, and replaced with a BoundedRelu Activation.

Definition at line 18 of file MaxMinIntoBoundedRelu.hpp.

19  {
20  Layer& base = connection.GetConnectedOutputSlot()->GetOwningLayer();
21  Layer& child = connection.GetOwningLayer();
22 
23  auto& maxLayer = *PolymorphicDowncast<ElementwiseBinaryLayer*>(&base);
24  if (maxLayer.GetParameters().m_Operation != BinaryOperation::Maximum)
25  {
26  return;
27  }
28  auto& minLayer = *PolymorphicDowncast<ElementwiseBinaryLayer*>(&child);
29  if (minLayer.GetParameters().m_Operation != BinaryOperation::Minimum)
30  {
31  return;
32  }
33 
34  if (maxLayer.GetDataType() != minLayer.GetDataType())
35  {
36  return;
37  }
38 
39  // get max and min values
40  float_t maxValue;
41  if (!GetValue(maxLayer, maxValue))
42  {
43  return;
44  }
45  float_t minValue;
46  if (!GetValue(minLayer, minValue))
47  {
48  return;
49  }
50 
51  // Save original parent output slot of the max layer
52  OutputSlot& parentOut = *maxLayer.GetInputSlot(0).GetConnectedOutputSlot();
53 
54  // Insert activation layer between max layer and its parent layer
55  ActivationDescriptor boundedReluDescriptor(ActivationFunction::BoundedReLu, minValue, maxValue);
56  const std::string name = std::string("replaced-") + maxLayer.GetName() + std::string("-") + minLayer.GetName()
57  + std::string("-with-BoundedRelu");
58  auto& boundedReluLayer = *graph.InsertNewLayer<ActivationLayer>(maxLayer.GetInputSlot(0),
59  boundedReluDescriptor,
60  name.c_str());
61 
62  // Reconnects with original parent.
63  boundedReluLayer.GetOutputSlot().MoveAllConnections(parentOut);
64 
65  // Moves connections in min layer output to parent layer.
66  // Min layer will be removed as it's left unconnected.
67  // Max layer will be removed if left unconnected.
68  minLayer.GetOutputSlot().MoveAllConnections(boundedReluLayer.GetOutputSlot());
69  }

References armnn::BoundedReLu, InputSlot::GetConnectedOutputSlot(), Layer::GetOutputSlot(), InputSlot::GetOwningLayer(), OutputSlot::GetOwningLayer(), Graph::InsertNewLayer(), armnn::Maximum, armnn::Minimum, and OutputSlot::MoveAllConnections().


The documentation for this class was generated from the following file:
armnn::ActivationFunction::BoundedReLu
@ BoundedReLu
min(a, max(b, input)) ReLu1 & ReLu6.
armnn::BinaryOperation::Maximum
@ Maximum
armnn::BinaryOperation::Minimum
@ Minimum