29 #include "utils/Utils.h"
38 class InceptionResNetV1Example final :
public Example
41 InceptionResNetV1Example()
43 common_opts(cmd_parser),
45 model_input_width(
nullptr),
46 model_input_height(
nullptr),
47 graph(0,
"InceptionResNetV1")
53 model_input_width->
set_help(
"Input image width.");
54 model_input_height->
set_help(
"Input image height.");
56 InceptionResNetV1Example(
const InceptionResNetV1Example &) =
delete;
57 InceptionResNetV1Example &operator=(
const InceptionResNetV1Example &) =
delete;
58 ~InceptionResNetV1Example()
override =
default;
59 bool do_setup(
int argc,
char **argv)
override
62 cmd_parser.parse(argc, argv);
63 cmd_parser.validate();
69 if (common_params.help)
71 cmd_parser.print_help(argv[0]);
75 const unsigned int image_width = model_input_width->value();
76 const unsigned int image_height = model_input_height->value();
79 if (!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
86 "QASYMM8 not supported for this graph");
89 std::cout << common_params << std::endl;
90 std::cout <<
"Image width: " << image_width << std::endl;
91 std::cout <<
"Image height: " << image_height << std::endl;
94 std::string data_path = common_params.data_path;
95 std::string model_path =
"/cnn_data/inception_resnet_v1_model/";
96 if (!data_path.empty())
98 data_path += model_path;
102 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>(0.f, 1.f);
105 const auto operation_layout = common_params.data_layout;
107 TensorShape(image_width, image_height, 3U, common_params.batches),
DataLayout::NCHW, operation_layout);
114 graph << common_params.target << common_params.fast_math_hint
119 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
120 .
set_name(
"Conv2d_1a_3x3/convolution")
126 .
set_name(
"Conv2d_1a_3x3/BatchNorm")
127 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
132 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
133 .
set_name(
"Conv2d_2a_3x3/convolution")
139 .
set_name(
"Conv2d_2a_3x3/BatchNorm")
140 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
145 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 1, 1))
146 .
set_name(
"Conv2d_2b_3x3/convolution")
152 .
set_name(
"Conv2d_2b_3x3/BatchNorm")
153 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
162 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
163 .
set_name(
"Conv2d_3b_1x1/convolution")
169 .
set_name(
"Conv2d_3b_1x1/BatchNorm")
170 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
175 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
176 .
set_name(
"Conv2d_4a_3x3/convolution")
182 .
set_name(
"Conv2d_4a_3x3/BatchNorm")
183 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
188 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
189 .
set_name(
"Conv2d_4a_3x3/convolution")
195 .
set_name(
"Conv2d_4b_3x3/BatchNorm")
196 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
200 block35_repeat(data_path, weights_layout, 5);
202 reduction_a(data_path, weights_layout);
204 block17_repeat(data_path, weights_layout, 10);
206 reduction_b(data_path, weights_layout);
208 block8_repeat(data_path, weights_layout, 5, 0.2f,
true);
210 block8_repeat(data_path, weights_layout, 1, 1.f,
false);
218 <<
OutputLayer(std::make_unique<DummyAccessor>(0));
223 config.
use_tuner = common_params.enable_tuner;
226 config.
mlgo_file = common_params.mlgo_file;
228 graph.finalize(common_params.target, config);
233 void do_run()
override
247 void block35_repeat(
const std::string &data_path,
DataLayout weights_layout,
unsigned int num_blocks)
249 for (
unsigned int i = 0; i < num_blocks; ++i)
251 std::stringstream unit_path_ss;
252 unit_path_ss <<
"Repeat_block35_" << (i + 1) <<
"_";
253 std::stringstream unit_name_ss;
254 unit_name_ss <<
"Repeat/block35_" << (i + 1) <<
"/";
256 std::string unit_path = unit_path_ss.str();
257 std::string unit_name = unit_name_ss.str();
268 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
269 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/convolution")
273 unit_path +
"Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
277 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/BatchNorm")
278 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
279 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/Relu");
286 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
287 PadStrideInfo(1, 1, 0, 0))
288 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/convolution")
290 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
292 unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
296 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/BatchNorm")
297 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
298 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/Relu")
302 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
303 PadStrideInfo(1, 1, 1, 1))
304 .
set_name(unit_name +
"Branch_1/Conv2d_0b_3x3/convolution")
306 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
308 unit_path +
"Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
312 .
set_name(unit_name +
"Branch_1/Conv2d_0b_3x3/BatchNorm")
313 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
314 .
set_name(unit_name +
"Branch_1/Conv2d_0b_3x3/Relu");
321 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
322 PadStrideInfo(1, 1, 0, 0))
323 .
set_name(unit_name +
"Branch_2/Conv2d_0a_1x1/convolution")
325 get_weights_accessor(data_path, unit_path +
"Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
327 unit_path +
"Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
331 .
set_name(unit_name +
"Branch_2/Conv2d_0a_1x1/BatchNorm")
332 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
333 .
set_name(unit_name +
"Branch_2/Conv2d_0a_1x1/Relu")
337 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
338 PadStrideInfo(1, 1, 1, 1))
339 .
set_name(unit_name +
"Branch_2/Conv2d_0b_3x3/convolution")
341 get_weights_accessor(data_path, unit_path +
"Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
343 unit_path +
"Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
347 .
set_name(unit_name +
"Branch_2/Conv2d_0b_3x3/BatchNorm")
348 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
349 .
set_name(unit_name +
"Branch_2/Conv2d_0b_3x3/Relu")
353 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
354 PadStrideInfo(1, 1, 1, 1))
355 .
set_name(unit_name +
"Branch_2/Conv2d_0c_3x3/convolution")
357 get_weights_accessor(data_path, unit_path +
"Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
359 unit_path +
"Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
363 .
set_name(unit_name +
"Branch_2/Conv2d_0c_3x3/BatchNorm")
364 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
365 .
set_name(unit_name +
"Branch_2/Conv2d_0c_3x3/Relu");
368 i_l <<
ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).
set_name(unit_name +
"concat")
373 PadStrideInfo(1, 1, 0, 0))
374 .
set_name(unit_name +
"Conv2d_1x1/convolution")
375 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f))
378 graph <<
EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).
set_name(unit_name +
"add")
379 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
384 void block17_repeat(
const std::string &data_path,
DataLayout weights_layout,
unsigned int num_blocks)
386 for (
unsigned int i = 0; i < num_blocks; ++i)
388 std::stringstream unit_path_ss;
389 unit_path_ss <<
"Repeat_1_block17_" << (i + 1) <<
"_";
390 std::stringstream unit_name_ss;
391 unit_name_ss <<
"Repeat_1/block17_" << (i + 1) <<
"/";
393 std::string unit_path = unit_path_ss.str();
394 std::string unit_name = unit_name_ss.str();
405 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
406 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/convolution")
410 unit_path +
"Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
414 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/BatchNorm")
415 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
416 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/Relu");
423 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
424 PadStrideInfo(1, 1, 0, 0))
425 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/convolution")
427 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
429 unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
433 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/BatchNorm")
434 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
435 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/Relu")
439 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
440 PadStrideInfo(1, 1, 3, 0))
441 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x7/convolution")
443 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
445 unit_path +
"Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
449 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x7/BatchNorm")
450 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
451 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x7/Relu")
455 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
456 PadStrideInfo(1, 1, 0, 3))
457 .
set_name(unit_name +
"Branch_1/Conv2d_0c_7x1/convolution")
459 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
461 unit_path +
"Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
465 .
set_name(unit_name +
"Branch_1/Conv2d_0c_7x1/BatchNorm")
466 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
467 .
set_name(unit_name +
"Branch_1/Conv2d_0c_7x1/Relu");
475 PadStrideInfo(1, 1, 0, 0))
476 .
set_name(unit_name +
"Conv2d_1x1/convolution")
477 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f))
480 graph <<
EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).
set_name(unit_name +
"add")
481 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
486 void block8_repeat(
const std::string &data_path,
488 unsigned int num_blocks,
492 for (
unsigned int i = 0; i < num_blocks; ++i)
494 std::stringstream unit_path_ss;
495 std::stringstream unit_name_ss;
498 unit_path_ss <<
"Repeat_2_block8_" << (i + 1) <<
"_";
499 unit_name_ss <<
"Repeat_2/block8_" << (i + 1) <<
"/";
503 unit_path_ss <<
"Block8_";
504 unit_name_ss <<
"Block8/";
507 std::string unit_path = unit_path_ss.str();
508 std::string unit_name = unit_name_ss.str();
519 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
520 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/convolution")
524 unit_path +
"Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
528 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/BatchNorm")
529 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
530 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/Relu");
537 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
538 PadStrideInfo(1, 1, 0, 0))
539 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/convolution")
541 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
543 unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
547 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/BatchNorm")
548 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
549 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/Relu")
553 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
554 PadStrideInfo(1, 1, 1, 0))
555 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x3/convolution")
557 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
559 unit_path +
"Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
563 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x3/BatchNorm")
564 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
565 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x3/Relu")
569 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
570 PadStrideInfo(1, 1, 0, 1))
571 .
set_name(unit_name +
"Branch_1/Conv2d_0c_3x1/convolution")
573 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
575 unit_path +
"Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
579 .
set_name(unit_name +
"Branch_1/Conv2d_0c_3x1/BatchNorm")
580 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
581 .
set_name(unit_name +
"Branch_1/Conv2d_0c_3x1/Relu");
589 PadStrideInfo(1, 1, 0, 0))
590 .
set_name(unit_name +
"Conv2d_1x1/convolution");
595 i_l <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR,
scale, 0.f))
600 graph <<
EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).
set_name(unit_name +
"add");
605 graph <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
611 void reduction_a(
const std::string &data_path,
DataLayout weights_layout)
618 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
619 .
set_name(
"Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
622 get_weights_accessor(data_path,
"Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
626 .
set_name(
"Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
627 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
628 .
set_name(
"Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
635 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
636 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
639 get_weights_accessor(data_path,
"Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
643 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
644 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
645 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
649 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 1, 1))
650 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
653 get_weights_accessor(data_path,
"Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
657 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
658 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
659 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
663 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
664 .
set_name(
"Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
667 get_weights_accessor(data_path,
"Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
671 .
set_name(
"Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
672 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
673 .
set_name(
"Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
679 .
set_name(
"Mixed_6a/Branch_2/MaxPool_1a_3x3");
682 graph <<
ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).
set_name(
"Mixed_6a/concat");
685 void reduction_b(
const std::string &data_path,
DataLayout weights_layout)
692 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
693 .
set_name(
"Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
696 get_weights_accessor(data_path,
"Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
700 .
set_name(
"Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
701 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
702 .
set_name(
"Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
706 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
707 .
set_name(
"Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
710 get_weights_accessor(data_path,
"Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
714 .
set_name(
"Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
715 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
716 .
set_name(
"Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
723 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
724 .
set_name(
"Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
727 get_weights_accessor(data_path,
"Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
731 .
set_name(
"Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
732 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
733 .
set_name(
"Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
737 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
738 .
set_name(
"Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
741 get_weights_accessor(data_path,
"Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
745 .
set_name(
"Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
746 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
747 .
set_name(
"Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
754 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
755 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
758 get_weights_accessor(data_path,
"Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
762 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
763 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
764 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
768 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 1, 1))
769 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
772 get_weights_accessor(data_path,
"Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
776 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
777 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
778 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
782 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
783 .
set_name(
"Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
786 get_weights_accessor(data_path,
"Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
790 .
set_name(
"Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
791 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
792 .
set_name(
"Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
798 .
set_name(
"Mixed_7a/Branch_3/MaxPool_1a_3x3");
802 <<
ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).
set_name(
"Mixed_7a/concat");
818 int main(
int argc,
char **argv)
820 return arm_compute::utils::run_example<InceptionResNetV1Example>(argc, argv);