29 #include "utils/Utils.h"
36 class GraphResNetV2_50Example :
public Example
39 GraphResNetV2_50Example() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0,
"ResNetV2_50")
42 bool do_setup(
int argc,
char **argv)
override
45 cmd_parser.parse(argc, argv);
46 cmd_parser.validate();
52 if (common_params.help)
54 cmd_parser.print_help(argv[0]);
59 std::cout << common_params << std::endl;
62 std::string data_path = common_params.data_path;
63 std::string model_path =
"/cnn_data/resnet_v2_50_model/";
64 if (!data_path.empty())
66 data_path += model_path;
70 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>();
73 const auto operation_layout = common_params.data_layout;
74 const TensorShape tensor_shape =
82 graph << common_params.target << common_params.fast_math_hint
87 PadStrideInfo(2, 2, 3, 3))
93 add_residual_block(data_path,
"block1", weights_layout, 64, 3, 2);
94 add_residual_block(data_path,
"block2", weights_layout, 128, 4, 2);
95 add_residual_block(data_path,
"block3", weights_layout, 256, 6, 2);
96 add_residual_block(data_path,
"block4", weights_layout, 512, 3, 1);
103 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
115 config.
use_tuner = common_params.enable_tuner;
118 config.
mlgo_file = common_params.mlgo_file;
122 graph.finalize(common_params.target, config);
127 void do_run()
override
139 void add_residual_block(
const std::string &data_path,
140 const std::string &
name,
142 unsigned int base_depth,
143 unsigned int num_units,
146 for (
unsigned int i = 0; i < num_units; ++i)
149 std::stringstream unit_path_ss;
150 unit_path_ss <<
name <<
"_unit_" << (i + 1) <<
"_bottleneck_v2_";
151 std::stringstream unit_name_ss;
152 unit_name_ss <<
name <<
"/unit" << (i + 1) <<
"/bottleneck_v2/";
154 std::string unit_path = unit_path_ss.str();
155 std::string unit_name = unit_name_ss.str();
157 const TensorShape last_shape = graph.
graph().
node(graph.
tail_node())->output(0)->desc().shape;
160 unsigned int depth_out = base_depth * 4;
163 unsigned int middle_stride = (i == (num_units - 1)) ? stride : 1;
171 0.000009999999747378752f)
172 .
set_name(unit_name +
"preact/BatchNorm")
173 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
174 .
set_name(unit_name +
"preact/Relu");
178 if (depth_in == depth_out)
180 if (middle_stride != 1)
183 PadStrideInfo(middle_stride, middle_stride, 0, 0),
true))
184 .
set_name(unit_name +
"shortcut/MaxPool");
189 shortcut.forward_tail(preact.tail_node());
194 PadStrideInfo(1, 1, 0, 0))
195 .
set_name(unit_name +
"shortcut/convolution");
203 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
204 PadStrideInfo(1, 1, 0, 0))
205 .
set_name(unit_name +
"conv1/convolution")
211 0.000009999999747378752f)
212 .
set_name(unit_name +
"conv1/BatchNorm")
213 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
217 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
218 PadStrideInfo(middle_stride, middle_stride, 1, 1))
219 .
set_name(unit_name +
"conv2/convolution")
225 0.000009999999747378752f)
226 .
set_name(unit_name +
"conv2/BatchNorm")
227 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
232 PadStrideInfo(1, 1, 0, 0))
233 .
set_name(unit_name +
"conv3/convolution");
235 graph <<
EltwiseLayer(std::move(shortcut), std::move(residual), EltwiseOperation::Add)
255 int main(
int argc,
char **argv)
257 return arm_compute::utils::run_example<GraphResNetV2_50Example>(argc, argv);