Run for every exclusive connection between any Max & Min layers The Max, Min and its associated constant inputs will be removed, and replaced with a BoundedRelu Activation.
19 {
20 Layer& base = connection.GetConnectedOutputSlot()->GetOwningLayer();
21 Layer& child = connection.GetOwningLayer();
22
23 auto& maxLayer = *PolymorphicDowncast<ElementwiseBinaryLayer*>(&base);
24 if (maxLayer.GetParameters().m_Operation != BinaryOperation::Maximum)
25 {
26 return;
27 }
28 auto& minLayer = *PolymorphicDowncast<ElementwiseBinaryLayer*>(&child);
29 if (minLayer.GetParameters().m_Operation != BinaryOperation::Minimum)
30 {
31 return;
32 }
33
34 if (maxLayer.GetDataType() != minLayer.GetDataType())
35 {
36 return;
37 }
38
39
40 float_t maxValue;
41 if (!GetValue(maxLayer, maxValue))
42 {
43 return;
44 }
45 float_t minValue;
46 if (!GetValue(minLayer, minValue))
47 {
48 return;
49 }
50
51
52 OutputSlot& parentOut = *maxLayer.GetInputSlot(0).GetConnectedOutputSlot();
53
54
55 ActivationDescriptor boundedReluDescriptor(ActivationFunction::BoundedReLu, minValue, maxValue);
56 const std::string name = std::string("replaced-") + maxLayer.GetName() + std::string("-") + minLayer.GetName()
57 + std::string("-with-BoundedRelu");
58 auto& boundedReluLayer = *graph.InsertNewLayer<ActivationLayer>(maxLayer.GetInputSlot(0),
59 boundedReluDescriptor,
60 name.c_str());
61
62
63 boundedReluLayer.GetOutputSlot().MoveAllConnections(parentOut);
64
65
66
67
68 minLayer.GetOutputSlot().MoveAllConnections(boundedReluLayer.GetOutputSlot());
69 }