blob: d14c34eb9daed6b09326c8922d3dbd543be540e6 [file] [log] [blame]
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01001/*
Michele Di Giorgiod9eaf612020-07-08 11:12:57 +01002 * Copyright (c) 2018-2020 Arm Limited.
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
Georgios Pinitas108ab0b2018-09-14 18:35:11 +010034/** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010035class InceptionResNetV2Example final : public Example
36{
37public:
38 InceptionResNetV2Example()
39 : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionResNetV2")
40 {
41 }
42 bool do_setup(int argc, char **argv) override
43 {
44 // Parse arguments
45 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010046 cmd_parser.validate();
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010047
48 // Consume common parameters
49 common_params = consume_common_graph_parameters(common_opts);
50
51 // Return when help menu is requested
52 if(common_params.help)
53 {
54 cmd_parser.print_help(argv[0]);
55 return false;
56 }
57
58 // Set default layout if needed
59 if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
60 {
61 common_params.data_layout = DataLayout::NCHW;
62 }
63
64 // Checks
Anthony Barbiercdd68c02018-08-23 15:03:41 +010065 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010066
67 // Print parameter values
68 std::cout << common_params << std::endl;
69
70 // Create model path
71 std::string data_path = common_params.data_path;
72 std::string model_path = "/cnn_data/inception_resnet_v2_model/";
73 if(!data_path.empty())
74 {
75 data_path += model_path;
76 }
77
78 // Create a preprocessor object
79 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>(0.f, 1.f);
80
81 // Create input descriptor
Sang-Hoon Park11fedda2020-01-15 14:44:04 +000082 const auto operation_layout = common_params.data_layout;
83 const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, 1U), DataLayout::NCHW, operation_layout);
84 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010085
86 // Set weights trained layout
87 const DataLayout weights_layout = DataLayout::NCHW;
88
89 graph << common_params.target
90 << common_params.fast_math_hint
91 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
92 // Conv2d_1a_3x3
93 << ConvolutionLayer(3U, 3U, 32U,
94 get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
95 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
96 PadStrideInfo(2, 2, 0, 0))
97 .set_name("Conv2d_1a_3x3/convolution")
98 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
99 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
100 get_random_accessor(1.f, 1.f),
101 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
102 0.0010000000474974513f)
103 .set_name("Conv2d_1a_3x3/BatchNorm")
104 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
105 // Conv2d_2a_3x3
106 << ConvolutionLayer(3U, 3U, 32U,
107 get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
108 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
109 PadStrideInfo(1, 1, 0, 0))
110 .set_name("Conv2d_2a_3x3/convolution")
111 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
112 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
113 get_random_accessor(1.f, 1.f),
114 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
115 0.0010000000474974513f)
116 .set_name("Conv2d_2a_3x3/BatchNorm")
117 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
118 // Conv2d_2b_3x3
119 << ConvolutionLayer(3U, 3U, 64U,
120 get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
121 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
122 PadStrideInfo(1, 1, 1, 1))
123 .set_name("Conv2d_2b_3x3/convolution")
124 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
125 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
126 get_random_accessor(1.f, 1.f),
127 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
128 0.0010000000474974513f)
129 .set_name("Conv2d_2b_3x3/BatchNorm")
130 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
131 // MaxPool_3a_3x3
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000132 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100133 // Conv2d_3b_1x1
134 << ConvolutionLayer(1U, 1U, 80U,
135 get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
136 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
137 PadStrideInfo(1, 1, 0, 0))
138 .set_name("Conv2d_3b_1x1/convolution")
139 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
140 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
141 get_random_accessor(1.f, 1.f),
142 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
143 0.0010000000474974513f)
144 .set_name("Conv2d_3b_1x1/BatchNorm")
145 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
146 // Conv2d_4a_3x3
147 << ConvolutionLayer(3U, 3U, 192U,
148 get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
149 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
150 PadStrideInfo(1, 1, 0, 0))
151 .set_name("Conv2d_4a_3x3/convolution")
152 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
153 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
154 get_random_accessor(1.f, 1.f),
155 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
156 0.0010000000474974513f)
157 .set_name("Conv2d_4a_3x3/BatchNorm")
158 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
159 // MaxPool_5a_3x3
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000160 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("MaxPool_5a_3x3/MaxPool");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100161
162 block_mixed_5b(data_path, weights_layout);
163 block35_repeat(data_path, weights_layout, 10);
164 block_mixed_6a(data_path, weights_layout);
165 block17_repeat(data_path, weights_layout, 20);
166 block_mixed_7a(data_path, weights_layout);
167 block8_repeat(data_path, weights_layout, 9, 0.2f, true);
168 block8_repeat(data_path, weights_layout, 1, 1.f, false);
169
170 // Conv2d_7b_1x1
171 graph << ConvolutionLayer(1U, 1U, 1536U,
172 get_weights_accessor(data_path, "Conv2d_7b_1x1_weights.npy", weights_layout),
173 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
174 PadStrideInfo(1, 1, 0, 0))
175 .set_name("Conv2d_7b_1x1/convolution")
176 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_mean.npy"),
177 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_variance.npy"),
178 get_random_accessor(1.f, 1.f),
179 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_beta.npy"),
180 0.0010000000474974513f)
181 .set_name("Conv2d_7b_1x1/BatchNorm")
182 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_7b_1x1/Relu")
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000183 << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, operation_layout)).set_name("Logits/AvgPool_1a_8x8")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100184 << FlattenLayer().set_name("Logits/Flatten")
185 << FullyConnectedLayer(
186 1001U,
187 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
188 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
189 .set_name("Logits/Logits")
190 << SoftmaxLayer().set_name("Logits/Predictions")
191 << OutputLayer(get_output_accessor(common_params, 5));
192
193 // Finalize graph
194 GraphConfig config;
195 config.num_threads = common_params.threads;
196 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100197 config.tuner_mode = common_params.tuner_mode;
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100198 config.tuner_file = common_params.tuner_file;
199
200 graph.finalize(common_params.target, config);
201
202 return true;
203 }
204
205 void do_run() override
206 {
207 graph.run();
208 }
209
210private:
211 CommandLineParser cmd_parser;
212 CommonGraphOptions common_opts;
213 CommonGraphParams common_params;
214 Stream graph;
215
216private:
217 void block_mixed_5b(const std::string &data_path, DataLayout weights_layout)
218 {
219 // Branch 0
220 SubStream i_a(graph);
221 i_a << ConvolutionLayer(1U, 1U, 96U,
222 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_weights.npy", weights_layout),
223 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
224 PadStrideInfo(1, 1, 0, 0))
225 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/convolution")
226 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
227 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
228 get_random_accessor(1.f, 1.f),
229 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
230 0.0010000000474974513f)
231 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm")
232 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_0/Conv2d_1x1/Relu");
233
234 // Branch 1
235 SubStream i_b(graph);
236 i_b << ConvolutionLayer(1U, 1U, 48U,
237 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
238 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
239 PadStrideInfo(1, 1, 0, 0))
240 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/convolution")
241 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
242 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
243 get_random_accessor(1.f, 1.f),
244 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
245 0.0010000000474974513f)
246 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm")
247 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/Relu")
248 << ConvolutionLayer(5U, 5U, 64U,
249 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_weights.npy", weights_layout),
250 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
251 PadStrideInfo(1, 1, 2, 2))
252 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/convolution")
253 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_mean.npy"),
254 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_variance.npy"),
255 get_random_accessor(1.f, 1.f),
256 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_beta.npy"),
257 0.0010000000474974513f)
258 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm")
259 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/Relu");
260
261 // Branch 2
262 SubStream i_c(graph);
263 i_c << ConvolutionLayer(1U, 1U, 64U,
264 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
265 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
266 PadStrideInfo(1, 1, 0, 0))
267 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/convolution")
268 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
269 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
270 get_random_accessor(1.f, 1.f),
271 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
272 0.0010000000474974513f)
273 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm")
274 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/Relu")
275 << ConvolutionLayer(3U, 3U, 96U,
276 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
277 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
278 PadStrideInfo(1, 1, 1, 1))
279 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/convolution")
280 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
281 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
282 get_random_accessor(1.f, 1.f),
283 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
284 0.0010000000474974513f)
285 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm")
286 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/Relu")
287 << ConvolutionLayer(3U, 3U, 96U,
288 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
289 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
290 PadStrideInfo(1, 1, 1, 1))
291 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/convolution")
292 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
293 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
294 get_random_accessor(1.f, 1.f),
295 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
296 0.0010000000474974513f)
297 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm")
298 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/Relu");
299
300 // Branch 3
301 SubStream i_d(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000302 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, common_params.data_layout, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name("Mixed_5b/Branch_3/AvgPool_0a_3x3")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100303 << ConvolutionLayer(1U, 1U, 64U,
304 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
305 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
306 PadStrideInfo(1, 1, 0, 0))
307 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/convolution")
308 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
309 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
310 get_random_accessor(1.f, 1.f),
311 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
312 0.0010000000474974513f)
313 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm")
314 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu");
315
316 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100317 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100318 }
319
320 void block_mixed_6a(const std::string &data_path, DataLayout weights_layout)
321 {
322 // Branch 0
323 SubStream i_a(graph);
324 i_a << ConvolutionLayer(3U, 3U, 384U,
325 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
326 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
327 PadStrideInfo(2, 2, 0, 0))
328 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
329 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
330 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
331 get_random_accessor(1.f, 1.f),
332 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
333 0.0010000000474974513f)
334 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
335 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
336
337 // Branch 1
338 SubStream i_b(graph);
339 i_b << ConvolutionLayer(1U, 1U, 256U,
340 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
341 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
342 PadStrideInfo(1, 1, 0, 0))
343 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
344 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
345 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
346 get_random_accessor(1.f, 1.f),
347 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
348 0.0010000000474974513f)
349 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
350 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
351 << ConvolutionLayer(3U, 3U, 256U,
352 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
353 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
354 PadStrideInfo(1, 1, 1, 1))
355 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
356 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
357 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
358 get_random_accessor(1.f, 1.f),
359 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
360 0.0010000000474974513f)
361 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
362 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
363 << ConvolutionLayer(3U, 3U, 384U,
364 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
365 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
366 PadStrideInfo(2, 2, 0, 0))
367 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
368 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
369 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
370 get_random_accessor(1.f, 1.f),
371 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
372 0.0010000000474974513f)
373 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
374 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
375
376 // Branch 2
377 SubStream i_c(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000378 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100379
380 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100381 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100382 }
383
384 void block_mixed_7a(const std::string &data_path, DataLayout weights_layout)
385 {
386 // Branch 0
387 SubStream i_a(graph);
388 i_a << ConvolutionLayer(1U, 1U, 256U,
389 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
390 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
391 PadStrideInfo(1, 1, 0, 0))
392 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
393 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
394 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
395 get_random_accessor(1.f, 1.f),
396 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
397 0.0010000000474974513f)
398 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
399 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
400 << ConvolutionLayer(3U, 3U, 384U,
401 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
402 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
403 PadStrideInfo(2, 2, 0, 0))
404 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
405 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
406 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
407 get_random_accessor(1.f, 1.f),
408 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
409 0.0010000000474974513f)
410 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
411 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
412
413 // Branch 1
414 SubStream i_b(graph);
415 i_b << ConvolutionLayer(1U, 1U, 256U,
416 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
417 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
418 PadStrideInfo(1, 1, 0, 0))
419 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
420 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
421 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
422 get_random_accessor(1.f, 1.f),
423 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
424 0.0010000000474974513f)
425 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
426 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
427 << ConvolutionLayer(3U, 3U, 288U,
428 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
429 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
430 PadStrideInfo(2, 2, 0, 0))
431 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
432 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
433 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
434 get_random_accessor(1.f, 1.f),
435 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
436 0.0010000000474974513f)
437 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
438 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
439
440 // Branch 2
441 SubStream i_c(graph);
442 i_c << ConvolutionLayer(1U, 1U, 256U,
443 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
444 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
445 PadStrideInfo(1, 1, 0, 0))
446 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
447 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
448 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
449 get_random_accessor(1.f, 1.f),
450 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
451 0.0010000000474974513f)
452 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
453 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
454 << ConvolutionLayer(3U, 3U, 288U,
455 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
456 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
457 PadStrideInfo(1, 1, 1, 1))
458 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
459 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
460 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
461 get_random_accessor(1.f, 1.f),
462 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
463 0.0010000000474974513f)
464 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
465 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
466 << ConvolutionLayer(3U, 3U, 320U,
467 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
468 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
469 PadStrideInfo(2, 2, 0, 0))
470 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
471 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
472 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
473 get_random_accessor(1.f, 1.f),
474 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
475 0.0010000000474974513f)
476 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
477 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
478
479 // Branch 3
480 SubStream i_d(graph);
Sang-Hoon Park11fedda2020-01-15 14:44:04 +0000481 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, common_params.data_layout, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100482
483 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100484 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100485 }
486
487 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
488 {
489 for(unsigned int i = 0; i < num_blocks; ++i)
490 {
491 std::stringstream unit_path_ss;
492 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
493 std::stringstream unit_name_ss;
494 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
495
496 std::string unit_path = unit_path_ss.str();
497 std::string unit_name = unit_name_ss.str();
498
499 // Create left and write substreams
500 SubStream i_l(graph);
501 SubStream i_r(graph);
502
503 // Branch 0
504 SubStream i_la(i_l);
505 i_la << ConvolutionLayer(1U, 1U, 32U,
506 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
507 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
508 PadStrideInfo(1, 1, 0, 0))
509 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
510 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
511 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
512 get_random_accessor(1.f, 1.f),
513 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
514 0.0010000000474974513f)
515 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
516 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
517
518 // Branch 1
519 SubStream i_lb(i_l);
520 i_lb << ConvolutionLayer(1U, 1U, 32U,
521 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
522 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
523 PadStrideInfo(1, 1, 0, 0))
524 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
525 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
526 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
527 get_random_accessor(1.f, 1.f),
528 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
529 0.0010000000474974513f)
530 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
531 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
532 << ConvolutionLayer(3U, 3U, 32U,
533 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
534 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
535 PadStrideInfo(1, 1, 1, 1))
536 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
537 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
538 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
539 get_random_accessor(1.f, 1.f),
540 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
541 0.0010000000474974513f)
542 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
543 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
544
545 // Branch 2
546 SubStream i_lc(i_l);
547 i_lc << ConvolutionLayer(1U, 1U, 32U,
548 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
549 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
550 PadStrideInfo(1, 1, 0, 0))
551 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
552 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
553 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
554 get_random_accessor(1.f, 1.f),
555 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
556 0.0010000000474974513f)
557 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
558 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
559 << ConvolutionLayer(3U, 3U, 48U,
560 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
561 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
562 PadStrideInfo(1, 1, 1, 1))
563 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
564 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
565 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
566 get_random_accessor(1.f, 1.f),
567 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
568 0.0010000000474974513f)
569 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
570 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
571 << ConvolutionLayer(3U, 3U, 64U,
572 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
573 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
574 PadStrideInfo(1, 1, 1, 1))
575 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
576 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
577 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
578 get_random_accessor(1.f, 1.f),
579 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
580 0.0010000000474974513f)
581 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
582 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
583
584 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100585 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100586 << ConvolutionLayer(1U, 1U, 320U,
587 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
588 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
589 PadStrideInfo(1, 1, 0, 0))
590 .set_name(unit_name + "Conv2d_1x1/convolution")
591 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
592
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100593 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100594 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
595 }
596 }
597
598 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
599 {
600 for(unsigned int i = 0; i < num_blocks; ++i)
601 {
602 std::stringstream unit_path_ss;
603 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
604 std::stringstream unit_name_ss;
605 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
606
607 std::string unit_path = unit_path_ss.str();
608 std::string unit_name = unit_name_ss.str();
609
610 // Create left and write substreams
611 SubStream i_l(graph);
612 SubStream i_r(graph);
613
614 // Branch 0
615 SubStream i_la(i_l);
616 i_la << ConvolutionLayer(1U, 1U, 192U,
617 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
618 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
619 PadStrideInfo(1, 1, 0, 0))
620 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
621 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
622 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
623 get_random_accessor(1.f, 1.f),
624 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
625 0.0010000000474974513f)
626 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
627 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
628
629 // Branch 1
630 SubStream i_lb(i_l);
631 i_lb << ConvolutionLayer(1U, 1U, 128U,
632 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
633 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
634 PadStrideInfo(1, 1, 0, 0))
635 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
636 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
637 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
638 get_random_accessor(1.f, 1.f),
639 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
640 0.0010000000474974513f)
641 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
642 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
643 << ConvolutionLayer(7U, 1U, 160U,
644 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
645 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
646 PadStrideInfo(1, 1, 3, 0))
647 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
648 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
649 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
650 get_random_accessor(1.f, 1.f),
651 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
652 0.0010000000474974513f)
653 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
654 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
655 << ConvolutionLayer(1U, 7U, 192U,
656 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
657 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
658 PadStrideInfo(1, 1, 0, 3))
659 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
660 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
661 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
662 get_random_accessor(1.f, 1.f),
663 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
664 0.0010000000474974513f)
665 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
666 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
667
668 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100669 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100670 << ConvolutionLayer(1U, 1U, 1088U,
671 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
672 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
673 PadStrideInfo(1, 1, 0, 0))
674 .set_name(unit_name + "Conv2d_1x1/convolution")
675 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
676
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100677 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100678 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
679 }
680 }
681
682 void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
683 {
684 for(unsigned int i = 0; i < num_blocks; ++i)
685 {
686 std::stringstream unit_path_ss;
687 std::stringstream unit_name_ss;
688 if(num_blocks != 1)
689 {
690 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
691 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
692 }
693 else
694 {
695 unit_path_ss << "Block8_";
696 unit_name_ss << "Block8/";
697 }
698
699 std::string unit_path = unit_path_ss.str();
700 std::string unit_name = unit_name_ss.str();
701
702 // Create left and write substreams
703 SubStream i_l(graph);
704 SubStream i_r(graph);
705
706 // Branch 0
707 SubStream i_la(i_l);
708 i_la << ConvolutionLayer(1U, 1U, 192U,
709 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
710 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
711 PadStrideInfo(1, 1, 0, 0))
712 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
713 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
714 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
715 get_random_accessor(1.f, 1.f),
716 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
717 0.0010000000474974513f)
718 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
719 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
720
721 // Branch 1
722 SubStream i_lb(i_l);
723 i_lb << ConvolutionLayer(1U, 1U, 192U,
724 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
725 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
726 PadStrideInfo(1, 1, 0, 0))
727 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
728 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
729 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
730 get_random_accessor(1.f, 1.f),
731 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
732 0.0010000000474974513f)
733 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
734 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
735 << ConvolutionLayer(3U, 1U, 224U,
736 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
737 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
738 PadStrideInfo(1, 1, 1, 0))
739 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
740 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
741 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
742 get_random_accessor(1.f, 1.f),
743 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
744 0.0010000000474974513f)
745 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
746 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
747 << ConvolutionLayer(1U, 3U, 256U,
748 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
749 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
750 PadStrideInfo(1, 1, 0, 1))
751 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
752 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
753 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
754 get_random_accessor(1.f, 1.f),
755 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
756 0.0010000000474974513f)
757 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
758 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
759
760 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100761 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100762 << ConvolutionLayer(1U, 1U, 2080U,
763 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
764 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
765 PadStrideInfo(1, 1, 0, 0))
766 .set_name(unit_name + "Conv2d_1x1/convolution");
767
768 // Scale result
769 if(scale != 1.f)
770 {
771 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
772 }
773
774 // Residual add
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100775 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100776
777 // Apply activation if needed
778 if(has_activation)
779 {
780 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
781 }
782 }
783 }
784};
785
786/** Main program for Inception ResNet V2
787 *
Georgios Pinitasbdbbbe82018-11-07 16:06:47 +0000788 * Model is based on:
789 * https://arxiv.org/abs/1602.07261
790 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
791 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
792 *
Georgios Pinitas588ebc52018-12-21 13:39:07 +0000793 * Provenance: download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz
794 *
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100795 * @note To list all the possible arguments execute the binary appended with the --help option
796 *
797 * @param[in] argc Number of arguments
798 * @param[in] argv Arguments
799 */
800int main(int argc, char **argv)
801{
802 return arm_compute::utils::run_example<InceptionResNetV2Example>(argc, argv);
803}