29 #include "utils/Utils.h"
36 class GraphGooglenetExample :
public Example
39 GraphGooglenetExample() : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0,
"GoogleNet")
42 bool do_setup(
int argc,
char **argv)
override
45 cmd_parser.parse(argc, argv);
46 cmd_parser.validate();
52 if (common_params.help)
54 cmd_parser.print_help(argv[0]);
60 "QASYMM8 not supported for this graph");
63 std::cout << common_params << std::endl;
66 std::string data_path = common_params.data_path;
69 const std::array<float, 3> mean_rgb{{122.68f, 116.67f, 104.01f}};
70 std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<CaffePreproccessor>(mean_rgb);
73 const auto operation_layout = common_params.data_layout;
74 const TensorShape tensor_shape =
82 graph << common_params.target << common_params.fast_math_hint
88 PadStrideInfo(2, 2, 3, 3))
90 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
102 PadStrideInfo(1, 1, 0, 0))
104 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
108 get_weights_accessor(data_path,
"/cnn_data/googlenet_model/conv2/conv2_3x3_w.npy", weights_layout),
110 PadStrideInfo(1, 1, 1, 1))
112 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
119 graph << get_inception_node(data_path,
"inception_3a", weights_layout, 64, std::make_tuple(96U, 128U),
120 std::make_tuple(16U, 32U), 32U)
122 graph << get_inception_node(data_path,
"inception_3b", weights_layout, 128, std::make_tuple(128U, 192U),
123 std::make_tuple(32U, 96U), 64U)
128 graph << get_inception_node(data_path,
"inception_4a", weights_layout, 192, std::make_tuple(96U, 208U),
129 std::make_tuple(16U, 48U), 64U)
131 graph << get_inception_node(data_path,
"inception_4b", weights_layout, 160, std::make_tuple(112U, 224U),
132 std::make_tuple(24U, 64U), 64U)
134 graph << get_inception_node(data_path,
"inception_4c", weights_layout, 128, std::make_tuple(128U, 256U),
135 std::make_tuple(24U, 64U), 64U)
137 graph << get_inception_node(data_path,
"inception_4d", weights_layout, 112, std::make_tuple(144U, 288U),
138 std::make_tuple(32U, 64U), 64U)
140 graph << get_inception_node(data_path,
"inception_4e", weights_layout, 256, std::make_tuple(160U, 320U),
141 std::make_tuple(32U, 128U), 128U)
146 graph << get_inception_node(data_path,
"inception_5a", weights_layout, 256, std::make_tuple(160U, 320U),
147 std::make_tuple(32U, 128U), 128U)
149 graph << get_inception_node(data_path,
"inception_5b", weights_layout, 384, std::make_tuple(192U, 384U),
150 std::make_tuple(48U, 128U), 128U)
166 config.
use_tuner = common_params.enable_tuner;
169 config.
mlgo_file = common_params.mlgo_file;
171 graph.finalize(common_params.target, config);
175 void do_run()
override
187 ConcatLayer get_inception_node(
const std::string &data_path,
188 std::string &¶m_path,
191 std::tuple<unsigned int, unsigned int> b_filters,
192 std::tuple<unsigned int, unsigned int> c_filters,
195 std::string total_path =
"/cnn_data/googlenet_model/" + param_path +
"/" + param_path +
"_";
201 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
202 .
set_name(param_path +
"/relu_1x1");
205 i_b << ConvolutionLayer(1U, 1U, std::get<0>(b_filters),
208 PadStrideInfo(1, 1, 0, 0))
209 .set_name(param_path +
"/3x3_reduce")
210 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
211 .
set_name(param_path +
"/relu_3x3_reduce")
216 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
217 .
set_name(param_path +
"/relu_3x3");
220 i_c << ConvolutionLayer(1U, 1U, std::get<0>(c_filters),
223 PadStrideInfo(1, 1, 0, 0))
224 .set_name(param_path +
"/5x5_reduce")
225 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
226 .
set_name(param_path +
"/relu_5x5_reduce")
231 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
232 .
set_name(param_path +
"/relu_5x5");
239 1U, 1U, d_filt,
get_weights_accessor(data_path, total_path +
"pool_proj_w.npy", weights_layout),
241 .
set_name(param_path +
"/pool_proj")
242 <<
ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
243 .
set_name(param_path +
"/relu_pool_proj");
245 return ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d));
263 int main(
int argc,
char **argv)
265 return arm_compute::utils::run_example<GraphGooglenetExample>(argc, argv);