Compute Library
 21.02
graph_inception_v3.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2017-2021 Arm Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #include "arm_compute/graph.h"
27 #include "utils/GraphUtils.h"
28 #include "utils/Utils.h"
29 
30 using namespace arm_compute::utils;
31 using namespace arm_compute::graph::frontend;
32 using namespace arm_compute::graph_utils;
33 
34 /** Example demonstrating how to implement InceptionV3's network using the Compute Library's graph API */
35 class InceptionV3Example : public Example
36 {
37 public:
38  InceptionV3Example()
39  : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionV3")
40  {
41  }
42  bool do_setup(int argc, char **argv) override
43  {
44  // Parse arguments
45  cmd_parser.parse(argc, argv);
46  cmd_parser.validate();
47 
48  // Consume common parameters
49  common_params = consume_common_graph_parameters(common_opts);
50 
51  // Return when help menu is requested
52  if(common_params.help)
53  {
54  cmd_parser.print_help(argv[0]);
55  return false;
56  }
57 
58  // Print parameter values
59  std::cout << common_params << std::endl;
60 
61  // Get trainable parameters data path
62  std::string data_path = common_params.data_path;
63 
64  // Create a preprocessor object
65  std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<TFPreproccessor>();
66 
67  // Create input descriptor
68  const auto operation_layout = common_params.data_layout;
69  const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, 1U), DataLayout::NCHW, operation_layout);
70  TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
71 
72  // Set weights trained layout
73  const DataLayout weights_layout = DataLayout::NCHW;
74 
75  graph << common_params.target
76  << common_params.fast_math_hint
77  << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
78  << ConvolutionLayer(3U, 3U, 32U,
79  get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_weights.npy", weights_layout),
80  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(2, 2, 0, 0))
81  .set_name("Conv2d_1a_3x3/convolution")
83  "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
84  get_weights_accessor(data_path,
85  "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
86  nullptr, get_weights_accessor(data_path,
87  "/cnn_data/inceptionv3_model/Conv2d_1a_3x3_BatchNorm_beta.npy"),
88  0.001f)
89  .set_name("Conv2d_1a_3x3/BatchNorm/batchnorm")
90  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
91  << ConvolutionLayer(3U, 3U, 32U,
92  get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_weights.npy", weights_layout),
93  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
94  .set_name("Conv2d_2a_3x3/convolution")
96  "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
97  get_weights_accessor(data_path,
98  "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
99  nullptr, get_weights_accessor(data_path,
100  "/cnn_data/inceptionv3_model/Conv2d_2a_3x3_BatchNorm_beta.npy"),
101  0.001f)
102  .set_name("Conv2d_2a_3x3/BatchNorm/batchnorm")
103  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
104 
105  << ConvolutionLayer(3U, 3U, 64U,
106  get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_weights.npy", weights_layout),
107  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 1, 1))
108  .set_name("Conv2d_2b_3x3/convolution")
110  "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
111  get_weights_accessor(data_path,
112  "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
113  nullptr, get_weights_accessor(data_path,
114  "/cnn_data/inceptionv3_model/Conv2d_2b_3x3_BatchNorm_beta.npy"),
115  0.001f)
116  .set_name("Conv2d_2b_3x3/BatchNorm/batchnorm")
117  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
118 
119  << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_3a_3x3/MaxPool")
120 
121  << ConvolutionLayer(1U, 1U, 80U,
122  get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_weights.npy", weights_layout),
123  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
124  .set_name("Conv2d_3b_1x1/convolution")
126  "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
127  get_weights_accessor(data_path,
128  "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
129  nullptr, get_weights_accessor(data_path,
130  "/cnn_data/inceptionv3_model/Conv2d_3b_1x1_BatchNorm_beta.npy"),
131  0.001f)
132  .set_name("Conv2d_3b_1x1/BatchNorm/batchnorm")
133  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
134 
135  << ConvolutionLayer(3U, 3U, 192U,
136  get_weights_accessor(data_path, "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_weights.npy", weights_layout),
137  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr), PadStrideInfo(1, 1, 0, 0))
138  .set_name("Conv2d_4a_3x3/convolution")
140  "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
141  get_weights_accessor(data_path,
142  "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
143  nullptr, get_weights_accessor(data_path,
144  "/cnn_data/inceptionv3_model/Conv2d_4a_3x3_BatchNorm_beta.npy"),
145  0.001f)
146  .set_name("Conv2d_4a_3x3/BatchNorm/batchnorm")
147  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
148 
149  << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name("MaxPool_5a_3x3/MaxPool");
150 
151  graph << get_inception_node_A(data_path, "Mixed_5b", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
152  32U)
153  .set_name("Mixed_5b/concat");
154  graph << get_inception_node_A(data_path, "Mixed_5c", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
155  64U, true)
156  .set_name("Mixed_5c/concat");
157  graph << get_inception_node_A(data_path, "Mixed_5d", weights_layout, 64U, std::make_tuple(48U, 64U), std::make_tuple(64U, 96U, 96U),
158  64U)
159  .set_name("Mixed_5d/concat");
160 
161  graph << get_inception_node_B(data_path, "Mixed_6a", weights_layout, 384U, std::make_tuple(64U, 96U, 96U)).set_name("Mixed_6a/concat");
162 
163  graph << get_inception_node_C(data_path, "Mixed_6b", weights_layout, 192U, std::make_tuple(128U, 128U, 192U),
164  std::make_tuple(128U, 128U, 128U, 128U, 192U), 192U)
165  .set_name("Mixed_6b/concat");
166  graph << get_inception_node_C(data_path, "Mixed_6c", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
167  std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
168  .set_name("Mixed_6c/concat");
169  graph << get_inception_node_C(data_path, "Mixed_6d", weights_layout, 192U, std::make_tuple(160U, 160U, 192U),
170  std::make_tuple(160U, 160U, 160U, 160U, 192U), 192U)
171  .set_name("Mixed_6d/concat");
172  graph << get_inception_node_C(data_path, "Mixed_6e", weights_layout, 192U, std::make_tuple(192U, 192U, 192U),
173  std::make_tuple(192U, 192U, 192U, 192U, 192U), 192U)
174  .set_name("Mixed_6e/concat");
175 
176  graph << get_inception_node_D(data_path, "Mixed_7a", weights_layout, std::make_tuple(192U, 320U),
177  std::make_tuple(192U, 192U, 192U, 192U))
178  .set_name("Mixed_7a/concat");
179 
180  graph << get_inception_node_E(data_path, "Mixed_7b", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
181  std::make_tuple(448U, 384U, 384U, 384U), 192U)
182  .set_name("Mixed_7b/concat");
183  graph << get_inception_node_E(data_path, "Mixed_7c", weights_layout, 320U, std::make_tuple(384U, 384U, 384U),
184  std::make_tuple(448U, 384U, 384U, 384U), 192U, true)
185  .set_name("Mixed_7c/concat");
186 
187  graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 8, operation_layout, PadStrideInfo(1, 1, 0, 0, DimensionRoundingType::CEIL))).set_name("Logits/AvgPool_1a_8x8/AvgPool")
188  << ConvolutionLayer(1U, 1U, 1001U, get_weights_accessor(data_path,
189  "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_weights.npy", weights_layout),
190  get_weights_accessor(data_path,
191  "/cnn_data/inceptionv3_model/Logits_Conv2d_1c_1x1_biases.npy"),
192  PadStrideInfo(1, 1, 0, 0))
193  .set_name("Logits/Conv2d_1c_1x1/convolution")
194  << ReshapeLayer(TensorShape(1001U)).set_name("Predictions/Reshape")
195  << SoftmaxLayer().set_name("Predictions/Softmax")
196  << OutputLayer(get_output_accessor(common_params, 5));
197 
198  // Finalize graph
199  GraphConfig config;
200  config.num_threads = common_params.threads;
201  config.use_tuner = common_params.enable_tuner;
202  config.tuner_mode = common_params.tuner_mode;
203  config.tuner_file = common_params.tuner_file;
204  config.mlgo_file = common_params.mlgo_file;
205  config.convert_to_uint8 = (common_params.data_type == DataType::QASYMM8);
206 
207  graph.finalize(common_params.target, config);
208 
209  return true;
210  }
211 
212  void do_run() override
213  {
214  graph.run();
215  }
216 
217 private:
218  CommandLineParser cmd_parser;
219  CommonGraphOptions common_opts;
220  CommonGraphParams common_params;
221  Stream graph;
222 
223 private:
224  ConcatLayer get_inception_node_A(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
225  unsigned int a_filt,
226  std::tuple<unsigned int, unsigned int> b_filters,
227  std::tuple<unsigned int, unsigned int, unsigned int> c_filters,
228  unsigned int d_filt,
229  bool is_name_different = false)
230  {
231  std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
232 
233  // This is due to a naming issue in the tf model
234  std::string conv_id0 = "_0a_";
235  std::string conv_id1 = "2d_0b_";
236  if(is_name_different)
237  {
238  conv_id0 = "_0b_";
239  conv_id1 = "_1_0c_";
240  }
241 
242  SubStream i_a(graph);
243  i_a << ConvolutionLayer(
244  1U, 1U, a_filt,
245  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
246  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
247  PadStrideInfo(1, 1, 0, 0))
248  .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
250  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
251  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
252  nullptr,
253  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
254  0.001f)
255  .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
256  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
257 
258  SubStream i_b(graph);
259  i_b << ConvolutionLayer(
260  1U, 1U, std::get<0>(b_filters),
261  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_weights.npy", weights_layout),
262  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
263  PadStrideInfo(1, 1, 0, 0))
264  .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/convolution")
266  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_mean.npy"),
267  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_moving_variance.npy"),
268  nullptr,
269  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id0 + "1x1_BatchNorm_beta.npy"),
270  0.001f)
271  .set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/BatchNorm/batchnorm")
272  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id0 + "1x1/Relu")
273  << ConvolutionLayer(
274  5U, 5U, std::get<1>(b_filters),
275  get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_weights.npy", weights_layout),
276  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
277  PadStrideInfo(1, 1, 2, 2))
278  .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/convolution")
280  get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_mean.npy"),
281  get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_moving_variance.npy"),
282  nullptr,
283  get_weights_accessor(data_path, total_path + "Branch_1_Conv" + conv_id1 + "5x5_BatchNorm_beta.npy"),
284  0.001f)
285  .set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/BatchNorm/batchnorm")
286  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id1 + "5x5/Relu");
287 
288  SubStream i_c(graph);
289  i_c << ConvolutionLayer(
290  1U, 1U, std::get<0>(c_filters),
291  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
292  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
293  PadStrideInfo(1, 1, 0, 0))
294  .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
296  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
297  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
298  nullptr,
299  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
300  0.001f)
301  .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
302  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
303  << ConvolutionLayer(
304  3U, 3U, std::get<1>(c_filters),
305  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
306  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
307  PadStrideInfo(1, 1, 1, 1))
308  .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
310  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
311  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
312  nullptr,
313  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
314  0.001f)
315  .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
316  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu")
317  << ConvolutionLayer(
318  3U, 3U, std::get<2>(c_filters),
319  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
320  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
321  PadStrideInfo(1, 1, 1, 1))
322  .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/convolution")
324  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
325  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
326  nullptr,
327  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
328  0.001f)
329  .set_name(param_path + "/Branch_2/Conv2d_0c_3x3/BatchNorm/batcnorm")
330  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_3x3/Relu");
331 
332  SubStream i_d(graph);
333  i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
334  true))
335  .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
336  << ConvolutionLayer(
337  1U, 1U, d_filt,
338  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
339  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
340  PadStrideInfo(1, 1, 0, 0))
341  .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
343  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
344  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
345  nullptr,
346  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
347  0.001f)
348  .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
349  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
350 
351  return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
352  }
353 
354  ConcatLayer get_inception_node_B(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
355  unsigned int a_filt,
356  std::tuple<unsigned int, unsigned int, unsigned int> b_filters)
357  {
358  std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
359  SubStream i_a(graph);
360  i_a << ConvolutionLayer(
361  3U, 3U, a_filt,
362  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_weights.npy", weights_layout),
363  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
364  PadStrideInfo(2, 2, 0, 0))
365  .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/convolution")
367  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
368  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
369  nullptr,
370  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_1x1_BatchNorm_beta.npy"),
371  0.001f)
372  .set_name(param_path + "/Branch_0/Conv2d_1a_1x1/BatchNorm/batchnorm")
373  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_1x1/Relu");
374 
375  SubStream i_b(graph);
376  i_b << ConvolutionLayer(
377  1U, 1U, std::get<0>(b_filters),
378  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
379  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
380  PadStrideInfo(1, 1, 0, 0))
381  .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
383  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
384  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
385  nullptr,
386  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
387  0.001f)
388  .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
389  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
390  << ConvolutionLayer(
391  3U, 3U, std::get<1>(b_filters),
392  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
393  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
394  PadStrideInfo(1, 1, 1, 1))
395  .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/convolution")
397  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
398  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
399  nullptr,
400  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
401  0.001f)
402  .set_name(param_path + "/Branch_1/Conv2d_0b_3x3/BatchNorm/batchnorm")
403  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_3x3/Relu")
404  << ConvolutionLayer(
405  3U, 3U, std::get<2>(b_filters),
406  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_weights.npy", weights_layout),
407  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
408  PadStrideInfo(2, 2, 0, 0))
409  .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/convolution")
411  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_mean.npy"),
412  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_moving_variance.npy"),
413  nullptr,
414  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_1x1_BatchNorm_beta.npy"),
415  0.001f)
416  .set_name(param_path + "/Branch_1/Conv2d_1a_1x1/BatchNorm/batchnorm")
417  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_1x1/Relu");
418 
419  SubStream i_c(graph);
420  i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
421 
422  return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
423  }
424 
425  ConcatLayer get_inception_node_C(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
426  unsigned int a_filt,
427  std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
428  std::tuple<unsigned int, unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
429  unsigned int d_filt)
430  {
431  std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
432  SubStream i_a(graph);
433  i_a << ConvolutionLayer(
434  1U, 1U, a_filt,
435  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
436  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
437  PadStrideInfo(1, 1, 0, 0))
438  .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
440  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
441  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
442  nullptr,
443  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
444  0.001f)
445  .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
446  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
447 
448  SubStream i_b(graph);
449  i_b << ConvolutionLayer(
450  1U, 1U, std::get<0>(b_filters),
451  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
452  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
453  PadStrideInfo(1, 1, 0, 0))
454  .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
456  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
457  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
458  nullptr,
459  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
460  0.001f)
461  .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
462  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
463  << ConvolutionLayer(
464  7U, 1U, std::get<1>(b_filters),
465  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
466  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
467  PadStrideInfo(1, 1, 3, 0))
468  .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
470  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
471  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
472  nullptr,
473  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
474  0.001f)
475  .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
476  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
477  << ConvolutionLayer(
478  1U, 7U, std::get<2>(b_filters),
479  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
480  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
481  PadStrideInfo(1, 1, 0, 3))
482  .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
484  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
485  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
486  nullptr,
487  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
488  0.001f)
489  .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
490  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0c_7x1/Relu");
491 
492  SubStream i_c(graph);
493  i_c << ConvolutionLayer(
494  1U, 1U, std::get<0>(c_filters),
495  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
496  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
497  PadStrideInfo(1, 1, 0, 0))
498  .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
500  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
501  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
502  nullptr,
503  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
504  0.001f)
505  .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
506  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
507  << ConvolutionLayer(
508  1U, 7U, std::get<1>(c_filters),
509  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_weights.npy", weights_layout),
510  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
511  PadStrideInfo(1, 1, 0, 3))
512  .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/convolution")
514  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_mean.npy"),
515  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_moving_variance.npy"),
516  nullptr,
517  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_7x1_BatchNorm_beta.npy"),
518  0.001f)
519  .set_name(param_path + "/Branch_2/Conv2d_0b_7x1/BatchNorm/batchnorm")
520  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_7x1/Relu")
521  << ConvolutionLayer(
522  7U, 1U, std::get<2>(c_filters),
523  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_weights.npy", weights_layout),
524  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
525  PadStrideInfo(1, 1, 3, 0))
526  .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/convolution")
528  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_mean.npy"),
529  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_moving_variance.npy"),
530  nullptr,
531  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x7_BatchNorm_beta.npy"),
532  0.001f)
533  .set_name(param_path + "/Branch_2/Conv2d_0c_1x7/BatchNorm/batchnorm")
534  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x7/Relu")
535  << ConvolutionLayer(
536  1U, 7U, std::get<3>(c_filters),
537  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_weights.npy", weights_layout),
538  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
539  PadStrideInfo(1, 1, 0, 3))
540  .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/convolution")
542  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_mean.npy"),
543  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_moving_variance.npy"),
544  nullptr,
545  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_7x1_BatchNorm_beta.npy"),
546  0.001f)
547  .set_name(param_path + "/Branch_2/Conv2d_0d_7x1/BatchNorm/batchnorm")
548  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_7x1/Relu")
549  << ConvolutionLayer(
550  7U, 1U, std::get<4>(c_filters),
551  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_weights.npy", weights_layout),
552  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
553  PadStrideInfo(1, 1, 3, 0))
554  .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/convolution")
556  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_mean.npy"),
557  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_moving_variance.npy"),
558  nullptr,
559  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0e_1x7_BatchNorm_beta.npy"),
560  0.001f)
561  .set_name(param_path + "/Branch_2/Conv2d_0e_1x7/BatchNorm/batchnorm")
562  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0e_1x7/Relu");
563 
564  SubStream i_d(graph);
565  i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
566  true))
567  .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
568  << ConvolutionLayer(
569  1U, 1U, d_filt,
570  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
571  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
572  PadStrideInfo(1, 1, 0, 0))
573  .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
575  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
576  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
577  nullptr,
578  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
579  0.001f)
580  .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
581  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
582 
583  return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
584  }
585 
586  ConcatLayer get_inception_node_D(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
587  std::tuple<unsigned int, unsigned int> a_filters,
588  std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> b_filters)
589  {
590  std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
591  SubStream i_a(graph);
592  i_a << ConvolutionLayer(
593  1U, 1U, std::get<0>(a_filters),
594  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
595  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
596  PadStrideInfo(1, 1, 0, 0))
597  .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
599  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
600  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
601  nullptr,
602  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
603  0.001f)
604  .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
605  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu")
606  << ConvolutionLayer(
607  3U, 3U, std::get<1>(a_filters),
608  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
609  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
610  PadStrideInfo(2, 2, 0, 0))
611  .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/convolution")
613  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
614  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
615  nullptr,
616  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
617  0.001f)
618  .set_name(param_path + "/Branch_0/Conv2d_1a_3x3/BatchNorm/batchnorm")
619  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_1a_3x3/Relu");
620 
621  SubStream i_b(graph);
622  i_b << ConvolutionLayer(
623  1U, 1U, std::get<0>(b_filters),
624  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
625  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
626  PadStrideInfo(1, 1, 0, 0))
627  .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
629  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
630  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
631  nullptr,
632  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
633  0.001f)
634  .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
635  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu")
636  << ConvolutionLayer(
637  7U, 1U, std::get<1>(b_filters),
638  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
639  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
640  PadStrideInfo(1, 1, 3, 0))
641  .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/convolution")
643  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
644  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
645  nullptr,
646  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
647  0.001f)
648  .set_name(param_path + "/Branch_1/Conv2d_0b_1x7/BatchNorm/batchnorm")
649  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x7/Relu")
650  << ConvolutionLayer(
651  1U, 7U, std::get<2>(b_filters),
652  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
653  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
654  PadStrideInfo(1, 1, 0, 3))
655  .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/convolution")
657  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
658  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
659  nullptr,
660  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
661  0.001f)
662  .set_name(param_path + "/Branch_1/Conv2d_0c_7x1/BatchNorm/batchnorm")
663  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0c_7x1/Relu")
664  << ConvolutionLayer(
665  3U, 3U, std::get<3>(b_filters),
666  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
667  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
668  PadStrideInfo(2, 2, 0, 0))
669  .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/convolution")
671  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
672  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
673  nullptr,
674  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
675  0.001f)
676  .set_name(param_path + "/Branch_1/Conv2d_1a_3x3/BatchNorm/batchnorm")
677  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_1a_3x3/Relu");
678 
679  SubStream i_c(graph);
680  i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL))).set_name(param_path + "/Branch_2/MaxPool_1a_3x3/MaxPool");
681 
682  return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c));
683  }
684 
685  ConcatLayer get_inception_node_E(const std::string &data_path, std::string &&param_path, DataLayout weights_layout,
686  unsigned int a_filt,
687  std::tuple<unsigned int, unsigned int, unsigned int> b_filters,
688  std::tuple<unsigned int, unsigned int, unsigned int, unsigned int> c_filters,
689  unsigned int d_filt,
690  bool is_name_different = false)
691  {
692  // This is due to a naming issue in the tf model
693  std::string conv_id = "_0b_";
694  if(is_name_different)
695  {
696  conv_id = "_0c_";
697  }
698 
699  std::string total_path = "/cnn_data/inceptionv3_model/" + param_path + "_";
700  SubStream i_a(graph);
701  i_a << ConvolutionLayer(
702  1U, 1U, a_filt,
703  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
704  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
705  PadStrideInfo(1, 1, 0, 0))
706  .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/convolution")
708  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
709  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
710  nullptr,
711  get_weights_accessor(data_path, total_path + "Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
712  0.001f)
713  .set_name(param_path + "/Branch_0/Conv2d_0a_1x1/BatchNorm/batchnorm")
714  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_0/Conv2d_0a_1x1/Relu");
715 
716  SubStream i_b(graph);
717  i_b << ConvolutionLayer(
718  1U, 1U, std::get<0>(b_filters),
719  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
720  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
721  PadStrideInfo(1, 1, 0, 0))
722  .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/convolution")
724  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
725  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
726  nullptr,
727  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
728  0.001f)
729  .set_name(param_path + "/Branch_1/Conv2d_0a_1x1/BatchNorm/batchnorm")
730  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0a_1x1/Relu");
731 
732  SubStream i_b1(i_b);
733  i_b1 << ConvolutionLayer(
734  3U, 1U, std::get<1>(b_filters),
735  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
736  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
737  PadStrideInfo(1, 1, 1, 0))
738  .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/convolution")
740  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
741  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
742  nullptr,
743  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
744  0.001f)
745  .set_name(param_path + "/Branch_1/Conv2d_0b_1x3/BatchNorm/batchnorm")
746  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d_0b_1x3/Relu");
747 
748  SubStream i_b2(i_b);
749  i_b2 << ConvolutionLayer(
750  1U, 3U, std::get<2>(b_filters),
751  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_weights.npy", weights_layout),
752  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
753  PadStrideInfo(1, 1, 0, 1))
754  .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/convolution")
756  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_mean.npy"),
757  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_moving_variance.npy"),
758  nullptr,
759  get_weights_accessor(data_path, total_path + "Branch_1_Conv2d" + conv_id + "3x1_BatchNorm_beta.npy"),
760  0.001f)
761  .set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/BatchNorm/batchnorm")
762  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_1/Conv2d" + conv_id + "3x1/Relu");
763 
764  // Merge b1 and b2
765  i_b << ConcatLayer(std::move(i_b1), std::move(i_b2)).set_name(param_path + "/Branch_1/concat");
766 
767  SubStream i_c(graph);
768  i_c << ConvolutionLayer(
769  1U, 1U, std::get<0>(c_filters),
770  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
771  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
772  PadStrideInfo(1, 1, 0, 0))
773  .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/convolution")
775  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
776  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
777  nullptr,
778  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
779  0.001f)
780  .set_name(param_path + "/Branch_2/Conv2d_0a_1x1/BatchNorm/batchnorm")
781  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0a_1x1/Relu")
782  << ConvolutionLayer(
783  3U, 3U, std::get<1>(c_filters),
784  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
785  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
786  PadStrideInfo(1, 1, 1, 1))
787  .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/convolution")
789  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
790  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
791  nullptr,
792  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
793  0.001f)
794  .set_name(param_path + "/Branch_2/Conv2d_0b_3x3/BatchNorm/batchnorm")
795  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0b_3x3/Relu");
796 
797  SubStream i_c1(i_c);
798  i_c1 << ConvolutionLayer(
799  3U, 1U, std::get<2>(c_filters),
800  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_weights.npy", weights_layout),
801  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
802  PadStrideInfo(1, 1, 1, 0))
803  .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/convolution")
805  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_mean.npy"),
806  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_moving_variance.npy"),
807  nullptr,
808  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0c_1x3_BatchNorm_beta.npy"),
809  0.001f)
810  .set_name(param_path + "/Branch_2/Conv2d_0c_1x3/BatchNorm/batchnorm")
811  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0c_1x3/Relu");
812 
813  SubStream i_c2(i_c);
814  i_c2 << ConvolutionLayer(
815  1U, 3U, std::get<3>(c_filters),
816  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_weights.npy", weights_layout),
817  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
818  PadStrideInfo(1, 1, 0, 1))
819  .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/convolution")
821  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_mean.npy"),
822  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_moving_variance.npy"),
823  nullptr,
824  get_weights_accessor(data_path, total_path + "Branch_2_Conv2d_0d_3x1_BatchNorm_beta.npy"),
825  0.001f)
826  .set_name(param_path + "/Branch_2/Conv2d_0d_3x1/BatchNorm/batchnorm")
827  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_2/Conv2d_0d_3x1/Relu");
828 
829  // Merge i_c1 and i_c2
830  i_c << ConcatLayer(std::move(i_c1), std::move(i_c2)).set_name(param_path + "/Branch_2/concat");
831 
832  SubStream i_d(graph);
833  i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL),
834  true))
835  .set_name(param_path + "/Branch_3/AvgPool_0a_3x3/AvgPool")
836  << ConvolutionLayer(
837  1U, 1U, d_filt,
838  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
839  std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
840  PadStrideInfo(1, 1, 0, 0))
841  .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/convolution")
843  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
844  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
845  nullptr,
846  get_weights_accessor(data_path, total_path + "Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
847  0.001f)
848  .set_name(param_path + "/Branch_3/Conv2d_0b_1x1/BatchNorm/batchnorm")
849  << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(param_path + "/Branch_3/Conv2d_0b_1x1/Relu");
850 
851  return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
852  }
853 };
854 
855 /** Main program for Inception V3
856  *
857  * Model is based on:
858  * https://arxiv.org/abs/1512.00567
859  * "Rethinking the Inception Architecture for Computer Vision"
860  * Christian Szegedy, Vincent Vanhoucke, Sergey Ioffe, Jonathon Shlens, Zbigniew Wojna
861  *
862  * Provenance: download.tensorflow.org/models/inception_v3_2016_08_28.tar.gz
863  *
864  * @note To list all the possible arguments execute the binary appended with the --help option
865  *
866  * @param[in] argc Number of arguments
867  * @param[in] argv Arguments
868  */
869 int main(int argc, char **argv)
870 {
871  return arm_compute::utils::run_example<InceptionV3Example>(argc, argv);
872 }
Graph configuration structure Device target types.
Definition: Types.h:80
CLTunerMode tuner_mode
Tuner mode to be used by the CL tuner.
Definition: Types.h:87
std::unique_ptr< graph::ITensorAccessor > get_input_accessor(const arm_compute::utils::CommonGraphParams &graph_parameters, std::unique_ptr< IPreprocessor > preprocessor=nullptr, bool bgr=true)
Generates appropriate input accessor according to the specified graph parameters. ...
Definition: GraphUtils.h:497
bool convert_to_uint8
Convert graph to a synthetic uint8 graph.
Definition: Types.h:86
void consume_common_graph_parameters(CommonGraphValidateOptions &options, CommonParams &common_params)
Consumes the consume_common_graph_parameters graph options and creates a structure containing any inf...
Includes all the Graph headers at once.
Common command line options used to configure the graph examples.
int main(int argc, char **argv)
Main program for Inception V3.
Class to parse command line arguments.
std::string mlgo_file
Filename to load MLGO heuristics from.
Definition: Types.h:90
std::string tuner_file
File to load/store tuning values from.
Definition: Types.h:89
quantized, asymmetric fixed-point 8-bit number unsigned
Abstract Example class.
Definition: Utils.h:78
Num samples, channels, height, width.
TensorShape permute_shape(TensorShape tensor_shape, DataLayout in_data_layout, DataLayout out_data_layout)
Permutes a given tensor shape given the input and output data layout.
Definition: GraphUtils.h:664
TensorDescriptor & set_layout(DataLayout data_layout)
Sets tensor descriptor data layout.
Structure holding all the common graph parameters.
std::unique_ptr< graph::ITensorAccessor > get_output_accessor(const arm_compute::utils::CommonGraphParams &graph_parameters, size_t top_n=5, bool is_validation=false, std::ostream &output_stream=std::cout)
Generates appropriate output accessor according to the specified graph parameters.
Definition: GraphUtils.h:543
bool use_tuner
Use a tuner in tunable backends.
Definition: Types.h:85
std::unique_ptr< graph::ITensorAccessor > get_weights_accessor(const std::string &path, const std::string &data_file, DataLayout file_layout=DataLayout::NCHW)
Generates appropriate weights accessor according to the specified path.
Definition: GraphUtils.h:475
int num_threads
Number of threads to use (thread capable backends), if 0 the backend will auto-initialize, if -1 the backend will stay as it is.
Definition: Types.h:88
Stream frontend class to construct simple graphs in a stream fashion.
Definition: Stream.h:45
DataLayout
[DataLayout enum definition]
Definition: Types.h:120
ILayer & set_name(std::string name)
Sets the name of the layer.
Definition: ILayer.h:55