24 #ifndef ARM_COMPUTE_TEST_ACTIVATION_LAYER_H
25 #define ARM_COMPUTE_TEST_ACTIVATION_LAYER_H
45 case ActivationLayerInfo::ActivationFunction::ABS:
48 case ActivationLayerInfo::ActivationFunction::LINEAR:
51 case ActivationLayerInfo::ActivationFunction::LOGISTIC:
52 ret =
static_cast<T
>(1) / (
static_cast<T
>(1) + std::exp(-x));
54 case ActivationLayerInfo::ActivationFunction::RELU:
55 ret = std::max<T>(
static_cast<T
>(0), x);
57 case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
58 ret = std::min<T>(a, std::max(
static_cast<T
>(0), x));
60 case ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU:
61 ret = std::min<T>(a, std::max<T>(
b, x));
63 case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
64 ret = (x > 0) ? x : a * x;
66 case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
67 ret = std::log(
static_cast<T
>(1) + std::exp(
static_cast<double>(x)));
69 case ActivationLayerInfo::ActivationFunction::ELU:
70 ret = (x > 0) ? x : a * (std::exp(x) -
static_cast<T
>(1));
72 case ActivationLayerInfo::ActivationFunction::SQRT:
75 case ActivationLayerInfo::ActivationFunction::SQUARE:
78 case ActivationLayerInfo::ActivationFunction::TANH:
79 ret = a * std::tanh(
b * x);
81 case ActivationLayerInfo::ActivationFunction::IDENTITY:
84 case ActivationLayerInfo::ActivationFunction::HARD_SWISH:
85 ret = x * ((std::min(std::max(
static_cast<T
>(x + 3),
static_cast<T
>(0.0f)),
static_cast<T
>(6.0f))) * 0.166666667f);
87 case ActivationLayerInfo::ActivationFunction::SWISH:
88 ret =
static_cast<T
>(x) / (
static_cast<T
>(1) + std::exp(-a*x));
90 case ActivationLayerInfo::ActivationFunction::GELU:
91 ret = x * 0.5f * (1 + erf(x / std::sqrt(2.0f)));
101 template <
typename T>