29 #include "utils/Utils.h"
36 class InceptionResNetV2Example final :
public Example
39 InceptionResNetV2Example() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0,
"InceptionResNetV2")
42 bool do_setup(
int argc,
char **argv)
override
45 cmd_parser.parse(argc, argv);
46 cmd_parser.validate();
52 if (common_params.help)
54 cmd_parser.print_help(argv[0]);
59 if (!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
66 "QASYMM8 not supported for this graph");
69 std::cout << common_params << std::endl;
72 std::string data_path = common_params.data_path;
73 std::string model_path =
"/cnn_data/inception_resnet_v2_model/";
74 if (!data_path.empty())
76 data_path += model_path;
80 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>(0.f, 1.f);
83 const auto operation_layout = common_params.data_layout;
84 const TensorShape tensor_shape =
92 graph << common_params.target << common_params.fast_math_hint
97 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
98 .
set_name(
"Conv2d_1a_3x3/convolution")
103 0.0010000000474974513f)
104 .
set_name(
"Conv2d_1a_3x3/BatchNorm")
105 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
110 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
111 .
set_name(
"Conv2d_2a_3x3/convolution")
116 0.0010000000474974513f)
117 .
set_name(
"Conv2d_2a_3x3/BatchNorm")
118 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
123 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 1, 1))
124 .
set_name(
"Conv2d_2b_3x3/convolution")
129 0.0010000000474974513f)
130 .
set_name(
"Conv2d_2b_3x3/BatchNorm")
131 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
140 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
141 .
set_name(
"Conv2d_3b_1x1/convolution")
146 0.0010000000474974513f)
147 .
set_name(
"Conv2d_3b_1x1/BatchNorm")
148 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
153 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
154 .
set_name(
"Conv2d_4a_3x3/convolution")
159 0.0010000000474974513f)
160 .
set_name(
"Conv2d_4a_3x3/BatchNorm")
161 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
165 .
set_name(
"MaxPool_5a_3x3/MaxPool");
167 block_mixed_5b(data_path, weights_layout);
168 block35_repeat(data_path, weights_layout, 10);
169 block_mixed_6a(data_path, weights_layout);
170 block17_repeat(data_path, weights_layout, 20);
171 block_mixed_7a(data_path, weights_layout);
172 block8_repeat(data_path, weights_layout, 9, 0.2f,
true);
173 block8_repeat(data_path, weights_layout, 1, 1.f,
false);
178 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
179 .
set_name(
"Conv2d_7b_1x1/convolution")
184 0.0010000000474974513f)
185 .
set_name(
"Conv2d_7b_1x1/BatchNorm")
186 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
199 config.
use_tuner = common_params.enable_tuner;
202 config.
mlgo_file = common_params.mlgo_file;
204 graph.finalize(common_params.target, config);
209 void do_run()
override
221 void block_mixed_5b(
const std::string &data_path,
DataLayout weights_layout)
228 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
229 .
set_name(
"Mixed_5b/Branch_0/Conv2d_1x1/convolution")
235 0.0010000000474974513f)
236 .
set_name(
"Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm")
237 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
238 .
set_name(
"Mixed_5b/Branch_0/Conv2d_1x1/Relu");
245 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
246 .
set_name(
"Mixed_5b/Branch_1/Conv2d_0a_1x1/convolution")
249 get_weights_accessor(data_path,
"Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
252 0.0010000000474974513f)
253 .
set_name(
"Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm")
254 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
255 .
set_name(
"Mixed_5b/Branch_1/Conv2d_0a_1x1/Relu")
259 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 2, 2))
260 .
set_name(
"Mixed_5b/Branch_1/Conv2d_0b_5x5/convolution")
263 get_weights_accessor(data_path,
"Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_variance.npy"),
266 0.0010000000474974513f)
267 .
set_name(
"Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm")
268 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
269 .
set_name(
"Mixed_5b/Branch_1/Conv2d_0b_5x5/Relu");
276 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
277 .
set_name(
"Mixed_5b/Branch_2/Conv2d_0a_1x1/convolution")
280 get_weights_accessor(data_path,
"Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
283 0.0010000000474974513f)
284 .
set_name(
"Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm")
285 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
286 .
set_name(
"Mixed_5b/Branch_2/Conv2d_0a_1x1/Relu")
290 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 1, 1))
291 .
set_name(
"Mixed_5b/Branch_2/Conv2d_0b_3x3/convolution")
294 get_weights_accessor(data_path,
"Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
297 0.0010000000474974513f)
298 .
set_name(
"Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm")
299 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
300 .
set_name(
"Mixed_5b/Branch_2/Conv2d_0b_3x3/Relu")
304 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 1, 1))
305 .
set_name(
"Mixed_5b/Branch_2/Conv2d_0c_3x3/convolution")
308 get_weights_accessor(data_path,
"Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
311 0.0010000000474974513f)
312 .
set_name(
"Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm")
313 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
314 .
set_name(
"Mixed_5b/Branch_2/Conv2d_0c_3x3/Relu");
320 .
set_name(
"Mixed_5b/Branch_3/AvgPool_0a_3x3")
324 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
325 .
set_name(
"Mixed_5b/Branch_3/Conv2d_0b_1x1/convolution")
328 get_weights_accessor(data_path,
"Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
331 0.0010000000474974513f)
332 .
set_name(
"Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm")
333 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
334 .
set_name(
"Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu");
338 <<
ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).
set_name(
"Mixed_5a/concat");
341 void block_mixed_6a(
const std::string &data_path,
DataLayout weights_layout)
348 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
349 .
set_name(
"Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
352 get_weights_accessor(data_path,
"Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
355 0.0010000000474974513f)
356 .
set_name(
"Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
357 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
358 .
set_name(
"Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
365 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
366 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
369 get_weights_accessor(data_path,
"Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
372 0.0010000000474974513f)
373 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
374 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
375 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
379 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 1, 1))
380 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
383 get_weights_accessor(data_path,
"Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
386 0.0010000000474974513f)
387 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
388 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
389 .
set_name(
"Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
393 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
394 .
set_name(
"Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
397 get_weights_accessor(data_path,
"Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
400 0.0010000000474974513f)
401 .
set_name(
"Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
402 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
403 .
set_name(
"Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
409 .
set_name(
"Mixed_6a/Branch_2/MaxPool_1a_3x3");
412 graph <<
ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).
set_name(
"Mixed_6a/concat");
415 void block_mixed_7a(
const std::string &data_path,
DataLayout weights_layout)
422 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
423 .
set_name(
"Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
426 get_weights_accessor(data_path,
"Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
429 0.0010000000474974513f)
430 .
set_name(
"Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
431 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
432 .
set_name(
"Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
436 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
437 .
set_name(
"Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
440 get_weights_accessor(data_path,
"Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
443 0.0010000000474974513f)
444 .
set_name(
"Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
445 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
446 .
set_name(
"Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
453 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
454 .
set_name(
"Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
457 get_weights_accessor(data_path,
"Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
460 0.0010000000474974513f)
461 .
set_name(
"Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
462 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
463 .
set_name(
"Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
467 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
468 .
set_name(
"Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
471 get_weights_accessor(data_path,
"Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
474 0.0010000000474974513f)
475 .
set_name(
"Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
476 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
477 .
set_name(
"Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
484 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
485 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
488 get_weights_accessor(data_path,
"Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
491 0.0010000000474974513f)
492 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
493 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
494 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
498 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 1, 1))
499 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
502 get_weights_accessor(data_path,
"Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
505 0.0010000000474974513f)
506 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
507 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
508 .
set_name(
"Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
512 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(2, 2, 0, 0))
513 .
set_name(
"Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
516 get_weights_accessor(data_path,
"Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
519 0.0010000000474974513f)
520 .
set_name(
"Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
521 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
522 .
set_name(
"Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
528 .
set_name(
"Mixed_7a/Branch_3/MaxPool_1a_3x3");
532 <<
ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).
set_name(
"Mixed_7a/concat");
535 void block35_repeat(
const std::string &data_path,
DataLayout weights_layout,
unsigned int num_blocks)
537 for (
unsigned int i = 0; i < num_blocks; ++i)
539 std::stringstream unit_path_ss;
540 unit_path_ss <<
"Repeat_block35_" << (i + 1) <<
"_";
541 std::stringstream unit_name_ss;
542 unit_name_ss <<
"Repeat/block35_" << (i + 1) <<
"/";
544 std::string unit_path = unit_path_ss.str();
545 std::string unit_name = unit_name_ss.str();
556 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
557 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/convolution")
561 unit_path +
"Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
564 0.0010000000474974513f)
565 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/BatchNorm")
566 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
567 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/Relu");
574 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
575 PadStrideInfo(1, 1, 0, 0))
576 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/convolution")
578 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
580 unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
583 0.0010000000474974513f)
584 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/BatchNorm")
585 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
586 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/Relu")
590 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
591 PadStrideInfo(1, 1, 1, 1))
592 .
set_name(unit_name +
"Branch_1/Conv2d_0b_3x3/convolution")
594 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
596 unit_path +
"Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
599 0.0010000000474974513f)
600 .
set_name(unit_name +
"Branch_1/Conv2d_0b_3x3/BatchNorm")
601 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
602 .
set_name(unit_name +
"Branch_1/Conv2d_0b_3x3/Relu");
609 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
610 PadStrideInfo(1, 1, 0, 0))
611 .
set_name(unit_name +
"Branch_2/Conv2d_0a_1x1/convolution")
613 get_weights_accessor(data_path, unit_path +
"Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
615 unit_path +
"Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
618 0.0010000000474974513f)
619 .
set_name(unit_name +
"Branch_2/Conv2d_0a_1x1/BatchNorm")
620 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
621 .
set_name(unit_name +
"Branch_2/Conv2d_0a_1x1/Relu")
625 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
626 PadStrideInfo(1, 1, 1, 1))
627 .
set_name(unit_name +
"Branch_2/Conv2d_0b_3x3/convolution")
629 get_weights_accessor(data_path, unit_path +
"Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
631 unit_path +
"Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
634 0.0010000000474974513f)
635 .
set_name(unit_name +
"Branch_2/Conv2d_0b_3x3/BatchNorm")
636 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
637 .
set_name(unit_name +
"Branch_2/Conv2d_0b_3x3/Relu")
641 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
642 PadStrideInfo(1, 1, 1, 1))
643 .
set_name(unit_name +
"Branch_2/Conv2d_0c_3x3/convolution")
645 get_weights_accessor(data_path, unit_path +
"Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
647 unit_path +
"Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
650 0.0010000000474974513f)
651 .
set_name(unit_name +
"Branch_2/Conv2d_0c_3x3/BatchNorm")
652 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
653 .
set_name(unit_name +
"Branch_2/Conv2d_0c_3x3/Relu");
656 i_l <<
ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).
set_name(unit_name +
"concat")
661 PadStrideInfo(1, 1, 0, 0))
662 .
set_name(unit_name +
"Conv2d_1x1/convolution")
663 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f))
666 graph <<
EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).
set_name(unit_name +
"add")
667 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
672 void block17_repeat(
const std::string &data_path,
DataLayout weights_layout,
unsigned int num_blocks)
674 for (
unsigned int i = 0; i < num_blocks; ++i)
676 std::stringstream unit_path_ss;
677 unit_path_ss <<
"Repeat_1_block17_" << (i + 1) <<
"_";
678 std::stringstream unit_name_ss;
679 unit_name_ss <<
"Repeat_1/block17_" << (i + 1) <<
"/";
681 std::string unit_path = unit_path_ss.str();
682 std::string unit_name = unit_name_ss.str();
693 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
694 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/convolution")
698 unit_path +
"Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
701 0.0010000000474974513f)
702 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/BatchNorm")
703 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
704 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/Relu");
711 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
712 PadStrideInfo(1, 1, 0, 0))
713 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/convolution")
715 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
717 unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
720 0.0010000000474974513f)
721 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/BatchNorm")
722 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
723 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/Relu")
727 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
728 PadStrideInfo(1, 1, 3, 0))
729 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x7/convolution")
731 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
733 unit_path +
"Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
736 0.0010000000474974513f)
737 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x7/BatchNorm")
738 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
739 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x7/Relu")
743 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
744 PadStrideInfo(1, 1, 0, 3))
745 .
set_name(unit_name +
"Branch_1/Conv2d_0c_7x1/convolution")
747 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
749 unit_path +
"Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
752 0.0010000000474974513f)
753 .
set_name(unit_name +
"Branch_1/Conv2d_0c_7x1/BatchNorm")
754 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
755 .
set_name(unit_name +
"Branch_1/Conv2d_0c_7x1/Relu");
763 PadStrideInfo(1, 1, 0, 0))
764 .
set_name(unit_name +
"Conv2d_1x1/convolution")
765 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f))
768 graph <<
EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).
set_name(unit_name +
"add")
769 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
774 void block8_repeat(
const std::string &data_path,
776 unsigned int num_blocks,
780 for (
unsigned int i = 0; i < num_blocks; ++i)
782 std::stringstream unit_path_ss;
783 std::stringstream unit_name_ss;
786 unit_path_ss <<
"Repeat_2_block8_" << (i + 1) <<
"_";
787 unit_name_ss <<
"Repeat_2/block8_" << (i + 1) <<
"/";
791 unit_path_ss <<
"Block8_";
792 unit_name_ss <<
"Block8/";
795 std::string unit_path = unit_path_ss.str();
796 std::string unit_name = unit_name_ss.str();
807 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr), PadStrideInfo(1, 1, 0, 0))
808 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/convolution")
812 unit_path +
"Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
815 0.0010000000474974513f)
816 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/BatchNorm")
817 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
818 .
set_name(unit_name +
"Branch_0/Conv2d_1x1/Relu");
825 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
826 PadStrideInfo(1, 1, 0, 0))
827 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/convolution")
829 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
831 unit_path +
"Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
834 0.0010000000474974513f)
835 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/BatchNorm")
836 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
837 .
set_name(unit_name +
"Branch_1/Conv2d_0a_1x1/Relu")
841 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
842 PadStrideInfo(1, 1, 1, 0))
843 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x3/convolution")
845 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
847 unit_path +
"Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
850 0.0010000000474974513f)
851 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x3/BatchNorm")
852 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
853 .
set_name(unit_name +
"Branch_1/Conv2d_0b_1x3/Relu")
857 std::unique_ptr<arm_compute::graph::ITensorAccessor>(
nullptr),
858 PadStrideInfo(1, 1, 0, 1))
859 .
set_name(unit_name +
"Branch_1/Conv2d_0c_3x1/convolution")
861 get_weights_accessor(data_path, unit_path +
"Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
863 unit_path +
"Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
866 0.0010000000474974513f)
867 .
set_name(unit_name +
"Branch_1/Conv2d_0c_3x1/BatchNorm")
868 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
869 .
set_name(unit_name +
"Branch_1/Conv2d_0c_3x1/Relu");
877 PadStrideInfo(1, 1, 0, 0))
878 .
set_name(unit_name +
"Conv2d_1x1/convolution");
883 i_l <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR,
scale, 0.f))
888 graph <<
EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).
set_name(unit_name +
"add");
893 graph <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
914 int main(
int argc,
char **argv)
916 return arm_compute::utils::run_example<InceptionResNetV2Example>(argc, argv);