blob: e99f688319f216184f1089fa8c28058f338bd7d8 [file] [log] [blame]
Georgios Pinitasdacd3de2018-12-04 17:25:48 +00001/*
2 * Copyright (c) 2018 ARM Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
34const float batch_norm_epsilon = 0.0010000000474974513f;
35
36/** Example demonstrating how to implement Inception ResNet V1 network using the Compute Library's graph API */
37class InceptionResNetV1Example final : public Example
38{
39public:
40 InceptionResNetV1Example()
41 : cmd_parser(), common_opts(cmd_parser), common_params(), model_input_width(nullptr), model_input_height(nullptr), graph(0, "InceptionResNetV1")
42 {
43 model_input_width = cmd_parser.add_option<SimpleOption<unsigned int>>("image-width", 512);
44 model_input_height = cmd_parser.add_option<SimpleOption<unsigned int>>("image-height", 512);
45
46 // Add model id option
47 model_input_width->set_help("Input image width.");
48 model_input_height->set_help("Input image height.");
49 }
50 InceptionResNetV1Example(const InceptionResNetV1Example &) = delete;
51 InceptionResNetV1Example &operator=(const InceptionResNetV1Example &) = delete;
52 InceptionResNetV1Example(InceptionResNetV1Example &&) = default; // NOLINT
53 InceptionResNetV1Example &operator=(InceptionResNetV1Example &&) = default; // NOLINT
54 ~InceptionResNetV1Example() override = default;
55 bool do_setup(int argc, char **argv) override
56 {
57 // Parse arguments
58 cmd_parser.parse(argc, argv);
59
60 // Consume common parameters
61 common_params = consume_common_graph_parameters(common_opts);
62
63 // Return when help menu is requested
64 if(common_params.help)
65 {
66 cmd_parser.print_help(argv[0]);
67 return false;
68 }
69 // Get input image width and height
70 const unsigned int image_width = model_input_width->value();
71 const unsigned int image_height = model_input_height->value();
72
73 // Set default layout if needed
74 if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
75 {
76 common_params.data_layout = DataLayout::NCHW;
77 }
78
79 // Checks
80 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
81
82 // Print parameter values
83 std::cout << common_params << std::endl;
84 std::cout << "Image width: " << image_width << std::endl;
85 std::cout << "Image height: " << image_height << std::endl;
86
87 // Create model path
88 std::string data_path = common_params.data_path;
89 std::string model_path = "/cnn_data/inception_resnet_v1_model/";
90 if(!data_path.empty())
91 {
92 data_path += model_path;
93 }
94
95 // Create a preprocessor object
96 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>(0.f, 1.f);
97
98 // Create input descriptor
99 const TensorShape tensor_shape = permute_shape(TensorShape(image_width, image_height, 3U, 1U), DataLayout::NCHW, common_params.data_layout);
100 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout);
101
102 // Set weights trained layout
103 const DataLayout weights_layout = DataLayout::NCHW;
104
105 graph << common_params.target
106 << common_params.fast_math_hint
107 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
108 // Conv2d_1a_3x3
109 << ConvolutionLayer(3U, 3U, 32U,
110 get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
111 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
112 PadStrideInfo(2, 2, 0, 0))
113 .set_name("Conv2d_1a_3x3/convolution")
114 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
115 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
116 get_random_accessor(1.f, 1.f),
117 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
118 batch_norm_epsilon)
119 .set_name("Conv2d_1a_3x3/BatchNorm")
120 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
121 // Conv2d_2a_3x3
122 << ConvolutionLayer(3U, 3U, 32U,
123 get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
124 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
125 PadStrideInfo(1, 1, 0, 0))
126 .set_name("Conv2d_2a_3x3/convolution")
127 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
128 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
129 get_random_accessor(1.f, 1.f),
130 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
131 batch_norm_epsilon)
132 .set_name("Conv2d_2a_3x3/BatchNorm")
133 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
134 // Conv2d_2b_3x3
135 << ConvolutionLayer(3U, 3U, 64U,
136 get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
137 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
138 PadStrideInfo(1, 1, 1, 1))
139 .set_name("Conv2d_2b_3x3/convolution")
140 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
141 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
142 get_random_accessor(1.f, 1.f),
143 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
144 batch_norm_epsilon)
145 .set_name("Conv2d_2b_3x3/BatchNorm")
146 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
147 // MaxPool_3a_3x3
148 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
149 // Conv2d_3b_1x1
150 << ConvolutionLayer(1U, 1U, 80U,
151 get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
152 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
153 PadStrideInfo(1, 1, 0, 0))
154 .set_name("Conv2d_3b_1x1/convolution")
155 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
156 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
157 get_random_accessor(1.f, 1.f),
158 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
159 batch_norm_epsilon)
160 .set_name("Conv2d_3b_1x1/BatchNorm")
161 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
162 // Conv2d_4a_3x3
163 << ConvolutionLayer(3U, 3U, 192U,
164 get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
165 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
166 PadStrideInfo(1, 1, 0, 0))
167 .set_name("Conv2d_4a_3x3/convolution")
168 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
169 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
170 get_random_accessor(1.f, 1.f),
171 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
172 batch_norm_epsilon)
173 .set_name("Conv2d_4a_3x3/BatchNorm")
174 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
175 // Conv2d_4b_3x3
176 << ConvolutionLayer(3U, 3U, 256U,
177 get_weights_accessor(data_path, "Conv2d_4b_3x3_weights.npy", weights_layout),
178 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
179 PadStrideInfo(2, 2, 0, 0))
180 .set_name("Conv2d_4a_3x3/convolution")
181 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_mean.npy"),
182 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_variance.npy"),
183 get_random_accessor(1.f, 1.f),
184 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_beta.npy"),
185 batch_norm_epsilon)
186 .set_name("Conv2d_4b_3x3/BatchNorm")
187 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4b_3x3/Relu");
188
189 // 5 x Inception-resnet-A
190 block35_repeat(data_path, weights_layout, 5);
191 // Reduction-A
192 reduction_a(data_path, weights_layout);
193 // 10 x Inception-Resnet-B
194 block17_repeat(data_path, weights_layout, 10);
195 // Reduction-B
196 reduction_b(data_path, weights_layout);
197 // 5 x Inception-resnet-C
198 block8_repeat(data_path, weights_layout, 5, 0.2f, true);
199
200 block8_repeat(data_path, weights_layout, 1, 1.f, false);
201
202 // Logits tail
203 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG)).set_name("Logits/AvgPool_1a_8x8")
204 << FlattenLayer().set_name("Logits/Flatten")
205 << FullyConnectedLayer(
206 128U,
207 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
208 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
209 .set_name("Logits/Logits")
210 << OutputLayer(arm_compute::support::cpp14::make_unique<DummyAccessor>(0));
211
212 // Finalize graph
213 GraphConfig config;
214 config.num_threads = common_params.threads;
215 config.use_tuner = common_params.enable_tuner;
216 config.tuner_file = common_params.tuner_file;
217
218 graph.finalize(common_params.target, config);
219
220 return true;
221 }
222
223 void do_run() override
224 {
225 graph.run();
226 }
227
228private:
229 CommandLineParser cmd_parser;
230 CommonGraphOptions common_opts;
231 CommonGraphParams common_params;
232 SimpleOption<unsigned int> *model_input_width{ nullptr };
233 SimpleOption<unsigned int> *model_input_height{ nullptr };
234 Stream graph;
235
236private:
237 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
238 {
239 for(unsigned int i = 0; i < num_blocks; ++i)
240 {
241 std::stringstream unit_path_ss;
242 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
243 std::stringstream unit_name_ss;
244 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
245
246 std::string unit_path = unit_path_ss.str();
247 std::string unit_name = unit_name_ss.str();
248
249 // Create left and write substreams
250 SubStream i_l(graph);
251 SubStream i_r(graph);
252
253 // Branch 0
254 SubStream i_la(i_l);
255 i_la << ConvolutionLayer(1U, 1U, 32U,
256 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
257 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
258 PadStrideInfo(1, 1, 0, 0))
259 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
260 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
261 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
262 get_random_accessor(1.f, 1.f),
263 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
264 batch_norm_epsilon)
265 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
266 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
267
268 // Branch 1
269 SubStream i_lb(i_l);
270 i_lb << ConvolutionLayer(1U, 1U, 32U,
271 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
272 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
273 PadStrideInfo(1, 1, 0, 0))
274 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
275 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
276 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
277 get_random_accessor(1.f, 1.f),
278 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
279 batch_norm_epsilon)
280 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
281 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
282 << ConvolutionLayer(3U, 3U, 32U,
283 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
284 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
285 PadStrideInfo(1, 1, 1, 1))
286 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
287 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
288 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
289 get_random_accessor(1.f, 1.f),
290 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
291 batch_norm_epsilon)
292 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
293 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
294
295 // Branch 2
296 SubStream i_lc(i_l);
297 i_lc << ConvolutionLayer(1U, 1U, 32U,
298 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
299 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
300 PadStrideInfo(1, 1, 0, 0))
301 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
302 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
303 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
304 get_random_accessor(1.f, 1.f),
305 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
306 batch_norm_epsilon)
307 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
308 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
309 << ConvolutionLayer(3U, 3U, 32U,
310 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
311 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
312 PadStrideInfo(1, 1, 1, 1))
313 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
314 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
315 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
316 get_random_accessor(1.f, 1.f),
317 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
318 batch_norm_epsilon)
319 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
320 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
321 << ConvolutionLayer(3U, 3U, 32U,
322 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
323 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
324 PadStrideInfo(1, 1, 1, 1))
325 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
326 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
327 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
328 get_random_accessor(1.f, 1.f),
329 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
330 batch_norm_epsilon)
331 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
332 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
333
334 // Concatenate
335 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
336 << ConvolutionLayer(1U, 1U, 256U,
337 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
338 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
339 PadStrideInfo(1, 1, 0, 0))
340 .set_name(unit_name + "Conv2d_1x1/convolution")
341 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
342
343 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
344 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
345 }
346 }
347
348 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
349 {
350 for(unsigned int i = 0; i < num_blocks; ++i)
351 {
352 std::stringstream unit_path_ss;
353 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
354 std::stringstream unit_name_ss;
355 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
356
357 std::string unit_path = unit_path_ss.str();
358 std::string unit_name = unit_name_ss.str();
359
360 // Create left and write substreams
361 SubStream i_l(graph);
362 SubStream i_r(graph);
363
364 // Branch 0
365 SubStream i_la(i_l);
366 i_la << ConvolutionLayer(1U, 1U, 128U,
367 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
368 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
369 PadStrideInfo(1, 1, 0, 0))
370 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
371 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
372 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
373 get_random_accessor(1.f, 1.f),
374 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
375 batch_norm_epsilon)
376 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
377 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
378
379 // Branch 1
380 SubStream i_lb(i_l);
381 i_lb << ConvolutionLayer(1U, 1U, 128U,
382 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
383 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
384 PadStrideInfo(1, 1, 0, 0))
385 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
386 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
387 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
388 get_random_accessor(1.f, 1.f),
389 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
390 batch_norm_epsilon)
391 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
392 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
393 << ConvolutionLayer(7U, 1U, 128U,
394 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
395 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
396 PadStrideInfo(1, 1, 3, 0))
397 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
398 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
399 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
400 get_random_accessor(1.f, 1.f),
401 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
402 batch_norm_epsilon)
403 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
404 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
405 << ConvolutionLayer(1U, 7U, 128U,
406 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
407 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
408 PadStrideInfo(1, 1, 0, 3))
409 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
410 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
411 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
412 get_random_accessor(1.f, 1.f),
413 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
414 batch_norm_epsilon)
415 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
416 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
417
418 // Concatenate
419 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
420 << ConvolutionLayer(1U, 1U, 896U,
421 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
422 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
423 PadStrideInfo(1, 1, 0, 0))
424 .set_name(unit_name + "Conv2d_1x1/convolution")
425 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
426
427 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
428 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
429 }
430 }
431
432 void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
433 {
434 for(unsigned int i = 0; i < num_blocks; ++i)
435 {
436 std::stringstream unit_path_ss;
437 std::stringstream unit_name_ss;
438 if(num_blocks != 1)
439 {
440 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
441 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
442 }
443 else
444 {
445 unit_path_ss << "Block8_";
446 unit_name_ss << "Block8/";
447 }
448
449 std::string unit_path = unit_path_ss.str();
450 std::string unit_name = unit_name_ss.str();
451
452 // Create left and write substreams
453 SubStream i_l(graph);
454 SubStream i_r(graph);
455
456 // Branch 0
457 SubStream i_la(i_l);
458 i_la << ConvolutionLayer(1U, 1U, 192U,
459 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
460 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
461 PadStrideInfo(1, 1, 0, 0))
462 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
463 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
464 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
465 get_random_accessor(1.f, 1.f),
466 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
467 batch_norm_epsilon)
468 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
469 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
470
471 // Branch 1
472 SubStream i_lb(i_l);
473 i_lb << ConvolutionLayer(1U, 1U, 192U,
474 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
475 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
476 PadStrideInfo(1, 1, 0, 0))
477 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
478 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
479 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
480 get_random_accessor(1.f, 1.f),
481 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
482 batch_norm_epsilon)
483 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
484 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
485 << ConvolutionLayer(3U, 1U, 192U,
486 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
487 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
488 PadStrideInfo(1, 1, 1, 0))
489 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
490 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
491 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
492 get_random_accessor(1.f, 1.f),
493 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
494 batch_norm_epsilon)
495 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
496 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
497 << ConvolutionLayer(1U, 3U, 192U,
498 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
499 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
500 PadStrideInfo(1, 1, 0, 1))
501 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
502 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
503 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
504 get_random_accessor(1.f, 1.f),
505 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
506 batch_norm_epsilon)
507 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
508 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
509
510 // Concatenate
511 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
512 << ConvolutionLayer(1U, 1U, 1792U,
513 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
514 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
515 PadStrideInfo(1, 1, 0, 0))
516 .set_name(unit_name + "Conv2d_1x1/convolution");
517
518 // Scale result
519 if(scale != 1.f)
520 {
521 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
522 }
523
524 // Residual add
525 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
526
527 // Apply activation if needed
528 if(has_activation)
529 {
530 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
531 }
532 }
533 }
534
535 void reduction_a(const std::string &data_path, DataLayout weights_layout)
536 {
537 // Branch 0
538 SubStream i_a(graph);
539 i_a << ConvolutionLayer(3U, 3U, 384U,
540 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
541 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
542 PadStrideInfo(2, 2, 0, 0))
543 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
544 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
545 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
546 get_random_accessor(1.f, 1.f),
547 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
548 batch_norm_epsilon)
549 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
550 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
551
552 // Branch 1
553 SubStream i_b(graph);
554 i_b << ConvolutionLayer(1U, 1U, 192U,
555 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
556 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
557 PadStrideInfo(1, 1, 0, 0))
558 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
559 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
560 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
561 get_random_accessor(1.f, 1.f),
562 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
563 batch_norm_epsilon)
564 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
565 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
566 << ConvolutionLayer(3U, 3U, 192U,
567 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
568 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
569 PadStrideInfo(1, 1, 1, 1))
570 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
571 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
572 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
573 get_random_accessor(1.f, 1.f),
574 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
575 batch_norm_epsilon)
576 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
577 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
578 << ConvolutionLayer(3U, 3U, 256U,
579 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
580 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
581 PadStrideInfo(2, 2, 0, 0))
582 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
583 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
584 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
585 get_random_accessor(1.f, 1.f),
586 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
587 batch_norm_epsilon)
588 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
589 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
590
591 // Branch 2
592 SubStream i_c(graph);
593 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
594
595 // Concatenate
596 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
597 }
598
599 void reduction_b(const std::string &data_path, DataLayout weights_layout)
600 {
601 // Branch 0
602 SubStream i_a(graph);
603 i_a << ConvolutionLayer(1U, 1U, 256U,
604 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
605 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
606 PadStrideInfo(1, 1, 0, 0))
607 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
608 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
609 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
610 get_random_accessor(1.f, 1.f),
611 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
612 batch_norm_epsilon)
613 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
614 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
615 << ConvolutionLayer(3U, 3U, 384U,
616 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
617 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
618 PadStrideInfo(2, 2, 0, 0))
619 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
620 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
621 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
622 get_random_accessor(1.f, 1.f),
623 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
624 batch_norm_epsilon)
625 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
626 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
627
628 // Branch 1
629 SubStream i_b(graph);
630 i_b << ConvolutionLayer(1U, 1U, 256U,
631 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
632 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
633 PadStrideInfo(1, 1, 0, 0))
634 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
635 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
636 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
637 get_random_accessor(1.f, 1.f),
638 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
639 batch_norm_epsilon)
640 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
641 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
642 << ConvolutionLayer(3U, 3U, 256U,
643 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
644 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
645 PadStrideInfo(2, 2, 0, 0))
646 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
647 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
648 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
649 get_random_accessor(1.f, 1.f),
650 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
651 batch_norm_epsilon)
652 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
653 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
654
655 // Branch 2
656 SubStream i_c(graph);
657 i_c << ConvolutionLayer(1U, 1U, 256U,
658 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
659 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
660 PadStrideInfo(1, 1, 0, 0))
661 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
662 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
663 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
664 get_random_accessor(1.f, 1.f),
665 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
666 batch_norm_epsilon)
667 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
668 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
669 << ConvolutionLayer(3U, 3U, 256U,
670 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
671 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
672 PadStrideInfo(1, 1, 1, 1))
673 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
674 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
675 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
676 get_random_accessor(1.f, 1.f),
677 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
678 batch_norm_epsilon)
679 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
680 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
681 << ConvolutionLayer(3U, 3U, 256U,
682 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
683 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
684 PadStrideInfo(2, 2, 0, 0))
685 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
686 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
687 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
688 get_random_accessor(1.f, 1.f),
689 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
690 batch_norm_epsilon)
691 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
692 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
693
694 // Branch 3
695 SubStream i_d(graph);
696 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
697
698 // Concatenate
699 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
700 }
701};
702
703/** Main program for Inception ResNet V1
704 *
705 * Model is based on:
706 * https://arxiv.org/abs/1602.07261
707 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
708 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
709 *
710 * @note To list all the possible arguments execute the binary appended with the --help option
711 *
712 * @param[in] argc Number of arguments
713 * @param[in] argv Arguments
714 */
715int main(int argc, char **argv)
716{
717 return arm_compute::utils::run_example<InceptionResNetV1Example>(argc, argv);
718}