blob: 599aa5c8acf860ee52458e1ae63e34b29911a299 [file] [log] [blame]
Georgios Pinitasdacd3de2018-12-04 17:25:48 +00001/*
Sang-Hoon Park11fedda2020-01-15 14:44:04 +00002 * Copyright (c) 2018-2020 ARM Limited.
Georgios Pinitasdacd3de2018-12-04 17:25:48 +00003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
34const float batch_norm_epsilon = 0.0010000000474974513f;
35
36/** Example demonstrating how to implement Inception ResNet V1 network using the Compute Library's graph API */
37class InceptionResNetV1Example final : public Example
38{
39public:
40 InceptionResNetV1Example()
41 : cmd_parser(), common_opts(cmd_parser), common_params(), model_input_width(nullptr), model_input_height(nullptr), graph(0, "InceptionResNetV1")
42 {
43 model_input_width = cmd_parser.add_option<SimpleOption<unsigned int>>("image-width", 512);
44 model_input_height = cmd_parser.add_option<SimpleOption<unsigned int>>("image-height", 512);
45
46 // Add model id option
47 model_input_width->set_help("Input image width.");
48 model_input_height->set_help("Input image height.");
49 }
50 InceptionResNetV1Example(const InceptionResNetV1Example &) = delete;
51 InceptionResNetV1Example &operator=(const InceptionResNetV1Example &) = delete;
52 InceptionResNetV1Example(InceptionResNetV1Example &&) = default; // NOLINT
53 InceptionResNetV1Example &operator=(InceptionResNetV1Example &&) = default; // NOLINT
54 ~InceptionResNetV1Example() override = default;
55 bool do_setup(int argc, char **argv) override
56 {
57 // Parse arguments
58 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010059 cmd_parser.validate();
Georgios Pinitasdacd3de2018-12-04 17:25:48 +000060
61 // Consume common parameters
62 common_params = consume_common_graph_parameters(common_opts);
63
64 // Return when help menu is requested
65 if(common_params.help)
66 {
67 cmd_parser.print_help(argv[0]);
68 return false;
69 }
70 // Get input image width and height
71 const unsigned int image_width = model_input_width->value();
72 const unsigned int image_height = model_input_height->value();
73
74 // Set default layout if needed
75 if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
76 {
77 common_params.data_layout = DataLayout::NCHW;
78 }
79
80 // Checks
81 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
82
83 // Print parameter values
84 std::cout << common_params << std::endl;
85 std::cout << "Image width: " << image_width << std::endl;
86 std::cout << "Image height: " << image_height << std::endl;
87
88 // Create model path
89 std::string data_path = common_params.data_path;
90 std::string model_path = "/cnn_data/inception_resnet_v1_model/";
91 if(!data_path.empty())
92 {
93 data_path += model_path;
94 }
95
96 // Create a preprocessor object
97 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>(0.f, 1.f);
98
99 // Create input descriptor
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000100 const auto operation_layout = common_params.data_layout;
101 const TensorShape tensor_shape = permute_shape(TensorShape(image_width, image_height, 3U, 1U), DataLayout::NCHW, operation_layout);
102 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000103
104 // Set weights trained layout
105 const DataLayout weights_layout = DataLayout::NCHW;
106
107 graph << common_params.target
108 << common_params.fast_math_hint
109 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
110 // Conv2d_1a_3x3
111 << ConvolutionLayer(3U, 3U, 32U,
112 get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
113 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
114 PadStrideInfo(2, 2, 0, 0))
115 .set_name("Conv2d_1a_3x3/convolution")
116 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
117 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
118 get_random_accessor(1.f, 1.f),
119 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
120 batch_norm_epsilon)
121 .set_name("Conv2d_1a_3x3/BatchNorm")
122 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
123 // Conv2d_2a_3x3
124 << ConvolutionLayer(3U, 3U, 32U,
125 get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
126 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
127 PadStrideInfo(1, 1, 0, 0))
128 .set_name("Conv2d_2a_3x3/convolution")
129 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
130 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
131 get_random_accessor(1.f, 1.f),
132 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
133 batch_norm_epsilon)
134 .set_name("Conv2d_2a_3x3/BatchNorm")
135 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
136 // Conv2d_2b_3x3
137 << ConvolutionLayer(3U, 3U, 64U,
138 get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
139 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
140 PadStrideInfo(1, 1, 1, 1))
141 .set_name("Conv2d_2b_3x3/convolution")
142 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
143 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
144 get_random_accessor(1.f, 1.f),
145 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
146 batch_norm_epsilon)
147 .set_name("Conv2d_2b_3x3/BatchNorm")
148 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
149 // MaxPool_3a_3x3
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000150 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000151 // Conv2d_3b_1x1
152 << ConvolutionLayer(1U, 1U, 80U,
153 get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
154 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
155 PadStrideInfo(1, 1, 0, 0))
156 .set_name("Conv2d_3b_1x1/convolution")
157 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
158 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
159 get_random_accessor(1.f, 1.f),
160 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
161 batch_norm_epsilon)
162 .set_name("Conv2d_3b_1x1/BatchNorm")
163 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
164 // Conv2d_4a_3x3
165 << ConvolutionLayer(3U, 3U, 192U,
166 get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
167 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
168 PadStrideInfo(1, 1, 0, 0))
169 .set_name("Conv2d_4a_3x3/convolution")
170 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
171 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
172 get_random_accessor(1.f, 1.f),
173 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
174 batch_norm_epsilon)
175 .set_name("Conv2d_4a_3x3/BatchNorm")
176 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
177 // Conv2d_4b_3x3
178 << ConvolutionLayer(3U, 3U, 256U,
179 get_weights_accessor(data_path, "Conv2d_4b_3x3_weights.npy", weights_layout),
180 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
181 PadStrideInfo(2, 2, 0, 0))
182 .set_name("Conv2d_4a_3x3/convolution")
183 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_mean.npy"),
184 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_moving_variance.npy"),
185 get_random_accessor(1.f, 1.f),
186 get_weights_accessor(data_path, "Conv2d_4b_3x3_BatchNorm_beta.npy"),
187 batch_norm_epsilon)
188 .set_name("Conv2d_4b_3x3/BatchNorm")
189 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4b_3x3/Relu");
190
191 // 5 x Inception-resnet-A
192 block35_repeat(data_path, weights_layout, 5);
193 // Reduction-A
194 reduction_a(data_path, weights_layout);
195 // 10 x Inception-Resnet-B
196 block17_repeat(data_path, weights_layout, 10);
197 // Reduction-B
198 reduction_b(data_path, weights_layout);
199 // 5 x Inception-resnet-C
200 block8_repeat(data_path, weights_layout, 5, 0.2f, true);
201
202 block8_repeat(data_path, weights_layout, 1, 1.f, false);
203
204 // Logits tail
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000205 graph << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a_8x8")
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000206 << FlattenLayer().set_name("Logits/Flatten")
207 << FullyConnectedLayer(
208 128U,
209 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
210 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
211 .set_name("Logits/Logits")
212 << OutputLayer(arm_compute::support::cpp14::make_unique<DummyAccessor>(0));
213
214 // Finalize graph
215 GraphConfig config;
216 config.num_threads = common_params.threads;
217 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100218 config.tuner_mode = common_params.tuner_mode;
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000219 config.tuner_file = common_params.tuner_file;
220
221 graph.finalize(common_params.target, config);
222
223 return true;
224 }
225
226 void do_run() override
227 {
228 graph.run();
229 }
230
231private:
232 CommandLineParser cmd_parser;
233 CommonGraphOptions common_opts;
234 CommonGraphParams common_params;
235 SimpleOption<unsigned int> *model_input_width{ nullptr };
236 SimpleOption<unsigned int> *model_input_height{ nullptr };
237 Stream graph;
238
239private:
240 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
241 {
242 for(unsigned int i = 0; i < num_blocks; ++i)
243 {
244 std::stringstream unit_path_ss;
245 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
246 std::stringstream unit_name_ss;
247 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
248
249 std::string unit_path = unit_path_ss.str();
250 std::string unit_name = unit_name_ss.str();
251
252 // Create left and write substreams
253 SubStream i_l(graph);
254 SubStream i_r(graph);
255
256 // Branch 0
257 SubStream i_la(i_l);
258 i_la << ConvolutionLayer(1U, 1U, 32U,
259 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
260 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
261 PadStrideInfo(1, 1, 0, 0))
262 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
263 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
264 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
265 get_random_accessor(1.f, 1.f),
266 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
267 batch_norm_epsilon)
268 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
269 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
270
271 // Branch 1
272 SubStream i_lb(i_l);
273 i_lb << ConvolutionLayer(1U, 1U, 32U,
274 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
275 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
276 PadStrideInfo(1, 1, 0, 0))
277 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
278 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
279 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
280 get_random_accessor(1.f, 1.f),
281 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
282 batch_norm_epsilon)
283 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
284 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
285 << ConvolutionLayer(3U, 3U, 32U,
286 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
287 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
288 PadStrideInfo(1, 1, 1, 1))
289 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
290 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
291 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
292 get_random_accessor(1.f, 1.f),
293 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
294 batch_norm_epsilon)
295 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
296 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
297
298 // Branch 2
299 SubStream i_lc(i_l);
300 i_lc << ConvolutionLayer(1U, 1U, 32U,
301 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
302 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
303 PadStrideInfo(1, 1, 0, 0))
304 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
305 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
306 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
307 get_random_accessor(1.f, 1.f),
308 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
309 batch_norm_epsilon)
310 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
311 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
312 << ConvolutionLayer(3U, 3U, 32U,
313 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
314 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
315 PadStrideInfo(1, 1, 1, 1))
316 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
317 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
318 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
319 get_random_accessor(1.f, 1.f),
320 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
321 batch_norm_epsilon)
322 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
323 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
324 << ConvolutionLayer(3U, 3U, 32U,
325 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
326 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
327 PadStrideInfo(1, 1, 1, 1))
328 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
329 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
330 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
331 get_random_accessor(1.f, 1.f),
332 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
333 batch_norm_epsilon)
334 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
335 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
336
337 // Concatenate
338 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
339 << ConvolutionLayer(1U, 1U, 256U,
340 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
341 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
342 PadStrideInfo(1, 1, 0, 0))
343 .set_name(unit_name + "Conv2d_1x1/convolution")
344 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
345
346 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
347 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
348 }
349 }
350
351 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
352 {
353 for(unsigned int i = 0; i < num_blocks; ++i)
354 {
355 std::stringstream unit_path_ss;
356 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
357 std::stringstream unit_name_ss;
358 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
359
360 std::string unit_path = unit_path_ss.str();
361 std::string unit_name = unit_name_ss.str();
362
363 // Create left and write substreams
364 SubStream i_l(graph);
365 SubStream i_r(graph);
366
367 // Branch 0
368 SubStream i_la(i_l);
369 i_la << ConvolutionLayer(1U, 1U, 128U,
370 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
371 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
372 PadStrideInfo(1, 1, 0, 0))
373 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
374 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
375 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
376 get_random_accessor(1.f, 1.f),
377 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
378 batch_norm_epsilon)
379 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
380 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
381
382 // Branch 1
383 SubStream i_lb(i_l);
384 i_lb << ConvolutionLayer(1U, 1U, 128U,
385 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
386 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
387 PadStrideInfo(1, 1, 0, 0))
388 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
389 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
390 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
391 get_random_accessor(1.f, 1.f),
392 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
393 batch_norm_epsilon)
394 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
395 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
396 << ConvolutionLayer(7U, 1U, 128U,
397 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
398 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
399 PadStrideInfo(1, 1, 3, 0))
400 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
401 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
402 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
403 get_random_accessor(1.f, 1.f),
404 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
405 batch_norm_epsilon)
406 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
407 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
408 << ConvolutionLayer(1U, 7U, 128U,
409 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
410 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
411 PadStrideInfo(1, 1, 0, 3))
412 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
413 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
414 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
415 get_random_accessor(1.f, 1.f),
416 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
417 batch_norm_epsilon)
418 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
419 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
420
421 // Concatenate
422 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
423 << ConvolutionLayer(1U, 1U, 896U,
424 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
425 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
426 PadStrideInfo(1, 1, 0, 0))
427 .set_name(unit_name + "Conv2d_1x1/convolution")
428 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
429
430 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
431 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
432 }
433 }
434
435 void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
436 {
437 for(unsigned int i = 0; i < num_blocks; ++i)
438 {
439 std::stringstream unit_path_ss;
440 std::stringstream unit_name_ss;
441 if(num_blocks != 1)
442 {
443 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
444 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
445 }
446 else
447 {
448 unit_path_ss << "Block8_";
449 unit_name_ss << "Block8/";
450 }
451
452 std::string unit_path = unit_path_ss.str();
453 std::string unit_name = unit_name_ss.str();
454
455 // Create left and write substreams
456 SubStream i_l(graph);
457 SubStream i_r(graph);
458
459 // Branch 0
460 SubStream i_la(i_l);
461 i_la << ConvolutionLayer(1U, 1U, 192U,
462 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
463 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
464 PadStrideInfo(1, 1, 0, 0))
465 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
466 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
467 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
468 get_random_accessor(1.f, 1.f),
469 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
470 batch_norm_epsilon)
471 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
472 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
473
474 // Branch 1
475 SubStream i_lb(i_l);
476 i_lb << ConvolutionLayer(1U, 1U, 192U,
477 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
478 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
479 PadStrideInfo(1, 1, 0, 0))
480 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
481 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
482 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
483 get_random_accessor(1.f, 1.f),
484 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
485 batch_norm_epsilon)
486 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
487 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
488 << ConvolutionLayer(3U, 1U, 192U,
489 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
490 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
491 PadStrideInfo(1, 1, 1, 0))
492 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
493 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
494 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
495 get_random_accessor(1.f, 1.f),
496 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
497 batch_norm_epsilon)
498 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
499 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
500 << ConvolutionLayer(1U, 3U, 192U,
501 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
502 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
503 PadStrideInfo(1, 1, 0, 1))
504 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
505 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
506 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
507 get_random_accessor(1.f, 1.f),
508 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
509 batch_norm_epsilon)
510 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
511 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
512
513 // Concatenate
514 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
515 << ConvolutionLayer(1U, 1U, 1792U,
516 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
517 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
518 PadStrideInfo(1, 1, 0, 0))
519 .set_name(unit_name + "Conv2d_1x1/convolution");
520
521 // Scale result
522 if(scale != 1.f)
523 {
524 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
525 }
526
527 // Residual add
528 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
529
530 // Apply activation if needed
531 if(has_activation)
532 {
533 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
534 }
535 }
536 }
537
538 void reduction_a(const std::string &data_path, DataLayout weights_layout)
539 {
540 // Branch 0
541 SubStream i_a(graph);
542 i_a << ConvolutionLayer(3U, 3U, 384U,
543 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
544 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
545 PadStrideInfo(2, 2, 0, 0))
546 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
547 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
548 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
549 get_random_accessor(1.f, 1.f),
550 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
551 batch_norm_epsilon)
552 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
553 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
554
555 // Branch 1
556 SubStream i_b(graph);
557 i_b << ConvolutionLayer(1U, 1U, 192U,
558 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
559 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
560 PadStrideInfo(1, 1, 0, 0))
561 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
562 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
563 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
564 get_random_accessor(1.f, 1.f),
565 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
566 batch_norm_epsilon)
567 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
568 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
569 << ConvolutionLayer(3U, 3U, 192U,
570 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
571 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
572 PadStrideInfo(1, 1, 1, 1))
573 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
574 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
575 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
576 get_random_accessor(1.f, 1.f),
577 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
578 batch_norm_epsilon)
579 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
580 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
581 << ConvolutionLayer(3U, 3U, 256U,
582 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
583 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
584 PadStrideInfo(2, 2, 0, 0))
585 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
586 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
587 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
588 get_random_accessor(1.f, 1.f),
589 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
590 batch_norm_epsilon)
591 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
592 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
593
594 // Branch 2
595 SubStream i_c(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000596 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000597
598 // Concatenate
599 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
600 }
601
602 void reduction_b(const std::string &data_path, DataLayout weights_layout)
603 {
604 // Branch 0
605 SubStream i_a(graph);
606 i_a << ConvolutionLayer(1U, 1U, 256U,
607 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
608 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
609 PadStrideInfo(1, 1, 0, 0))
610 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
611 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
612 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
613 get_random_accessor(1.f, 1.f),
614 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
615 batch_norm_epsilon)
616 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
617 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
618 << ConvolutionLayer(3U, 3U, 384U,
619 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
620 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
621 PadStrideInfo(2, 2, 0, 0))
622 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
623 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
624 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
625 get_random_accessor(1.f, 1.f),
626 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
627 batch_norm_epsilon)
628 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
629 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
630
631 // Branch 1
632 SubStream i_b(graph);
633 i_b << ConvolutionLayer(1U, 1U, 256U,
634 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
635 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
636 PadStrideInfo(1, 1, 0, 0))
637 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
638 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
639 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
640 get_random_accessor(1.f, 1.f),
641 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
642 batch_norm_epsilon)
643 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
644 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
645 << ConvolutionLayer(3U, 3U, 256U,
646 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
647 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
648 PadStrideInfo(2, 2, 0, 0))
649 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
650 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
651 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
652 get_random_accessor(1.f, 1.f),
653 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
654 batch_norm_epsilon)
655 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
656 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
657
658 // Branch 2
659 SubStream i_c(graph);
660 i_c << ConvolutionLayer(1U, 1U, 256U,
661 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
662 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
663 PadStrideInfo(1, 1, 0, 0))
664 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
665 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
666 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
667 get_random_accessor(1.f, 1.f),
668 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
669 batch_norm_epsilon)
670 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
671 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
672 << ConvolutionLayer(3U, 3U, 256U,
673 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
674 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
675 PadStrideInfo(1, 1, 1, 1))
676 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
677 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
678 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
679 get_random_accessor(1.f, 1.f),
680 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
681 batch_norm_epsilon)
682 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
683 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
684 << ConvolutionLayer(3U, 3U, 256U,
685 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
686 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
687 PadStrideInfo(2, 2, 0, 0))
688 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
689 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
690 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
691 get_random_accessor(1.f, 1.f),
692 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
693 batch_norm_epsilon)
694 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
695 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
696
697 // Branch 3
698 SubStream i_d(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000699 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
Georgios Pinitasdacd3de2018-12-04 17:25:48 +0000700
701 // Concatenate
702 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
703 }
704};
705
706/** Main program for Inception ResNet V1
707 *
708 * Model is based on:
709 * https://arxiv.org/abs/1602.07261
710 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
711 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
712 *
713 * @note To list all the possible arguments execute the binary appended with the --help option
714 *
715 * @param[in] argc Number of arguments
716 * @param[in] argv Arguments
717 */
718int main(int argc, char **argv)
719{
720 return arm_compute::utils::run_example<InceptionResNetV1Example>(argc, argv);
721}