blob: 424884f16f302e7da9724ce1d70589a38123d4ac [file] [log] [blame]
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01001/*
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +01002 * Copyright (c) 2018-2019 ARM Limited.
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01003 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
Georgios Pinitas108ab0b2018-09-14 18:35:11 +010034/** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010035class InceptionResNetV2Example final : public Example
36{
37public:
38 InceptionResNetV2Example()
39 : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionResNetV2")
40 {
41 }
42 bool do_setup(int argc, char **argv) override
43 {
44 // Parse arguments
45 cmd_parser.parse(argc, argv);
Georgios Pinitascd60a5f2019-08-21 17:06:54 +010046 cmd_parser.validate();
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010047
48 // Consume common parameters
49 common_params = consume_common_graph_parameters(common_opts);
50
51 // Return when help menu is requested
52 if(common_params.help)
53 {
54 cmd_parser.print_help(argv[0]);
55 return false;
56 }
57
58 // Set default layout if needed
59 if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
60 {
61 common_params.data_layout = DataLayout::NCHW;
62 }
63
64 // Checks
Anthony Barbiercdd68c02018-08-23 15:03:41 +010065 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010066
67 // Print parameter values
68 std::cout << common_params << std::endl;
69
70 // Create model path
71 std::string data_path = common_params.data_path;
72 std::string model_path = "/cnn_data/inception_resnet_v2_model/";
73 if(!data_path.empty())
74 {
75 data_path += model_path;
76 }
77
78 // Create a preprocessor object
79 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>(0.f, 1.f);
80
81 // Create input descriptor
82 const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, 1U), DataLayout::NCHW, common_params.data_layout);
83 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout);
84
85 // Set weights trained layout
86 const DataLayout weights_layout = DataLayout::NCHW;
87
88 graph << common_params.target
89 << common_params.fast_math_hint
90 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
91 // Conv2d_1a_3x3
92 << ConvolutionLayer(3U, 3U, 32U,
93 get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
94 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
95 PadStrideInfo(2, 2, 0, 0))
96 .set_name("Conv2d_1a_3x3/convolution")
97 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
98 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
99 get_random_accessor(1.f, 1.f),
100 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
101 0.0010000000474974513f)
102 .set_name("Conv2d_1a_3x3/BatchNorm")
103 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
104 // Conv2d_2a_3x3
105 << ConvolutionLayer(3U, 3U, 32U,
106 get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
107 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
108 PadStrideInfo(1, 1, 0, 0))
109 .set_name("Conv2d_2a_3x3/convolution")
110 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
111 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
112 get_random_accessor(1.f, 1.f),
113 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
114 0.0010000000474974513f)
115 .set_name("Conv2d_2a_3x3/BatchNorm")
116 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
117 // Conv2d_2b_3x3
118 << ConvolutionLayer(3U, 3U, 64U,
119 get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
120 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
121 PadStrideInfo(1, 1, 1, 1))
122 .set_name("Conv2d_2b_3x3/convolution")
123 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
124 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
125 get_random_accessor(1.f, 1.f),
126 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
127 0.0010000000474974513f)
128 .set_name("Conv2d_2b_3x3/BatchNorm")
129 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
130 // MaxPool_3a_3x3
131 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
132 // Conv2d_3b_1x1
133 << ConvolutionLayer(1U, 1U, 80U,
134 get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
135 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
136 PadStrideInfo(1, 1, 0, 0))
137 .set_name("Conv2d_3b_1x1/convolution")
138 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
139 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
140 get_random_accessor(1.f, 1.f),
141 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
142 0.0010000000474974513f)
143 .set_name("Conv2d_3b_1x1/BatchNorm")
144 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
145 // Conv2d_4a_3x3
146 << ConvolutionLayer(3U, 3U, 192U,
147 get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
148 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
149 PadStrideInfo(1, 1, 0, 0))
150 .set_name("Conv2d_4a_3x3/convolution")
151 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
152 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
153 get_random_accessor(1.f, 1.f),
154 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
155 0.0010000000474974513f)
156 .set_name("Conv2d_4a_3x3/BatchNorm")
157 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
158 // MaxPool_5a_3x3
159 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("MaxPool_5a_3x3/MaxPool");
160
161 block_mixed_5b(data_path, weights_layout);
162 block35_repeat(data_path, weights_layout, 10);
163 block_mixed_6a(data_path, weights_layout);
164 block17_repeat(data_path, weights_layout, 20);
165 block_mixed_7a(data_path, weights_layout);
166 block8_repeat(data_path, weights_layout, 9, 0.2f, true);
167 block8_repeat(data_path, weights_layout, 1, 1.f, false);
168
169 // Conv2d_7b_1x1
170 graph << ConvolutionLayer(1U, 1U, 1536U,
171 get_weights_accessor(data_path, "Conv2d_7b_1x1_weights.npy", weights_layout),
172 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
173 PadStrideInfo(1, 1, 0, 0))
174 .set_name("Conv2d_7b_1x1/convolution")
175 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_mean.npy"),
176 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_variance.npy"),
177 get_random_accessor(1.f, 1.f),
178 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_beta.npy"),
179 0.0010000000474974513f)
180 .set_name("Conv2d_7b_1x1/BatchNorm")
181 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_7b_1x1/Relu")
182 << PoolingLayer(PoolingLayerInfo(PoolingType::AVG)).set_name("Logits/AvgPool_1a_8x8")
183 << FlattenLayer().set_name("Logits/Flatten")
184 << FullyConnectedLayer(
185 1001U,
186 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
187 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
188 .set_name("Logits/Logits")
189 << SoftmaxLayer().set_name("Logits/Predictions")
190 << OutputLayer(get_output_accessor(common_params, 5));
191
192 // Finalize graph
193 GraphConfig config;
194 config.num_threads = common_params.threads;
195 config.use_tuner = common_params.enable_tuner;
Vidhya Sudhan Loganathan050471e2019-04-25 09:27:24 +0100196 config.tuner_mode = common_params.tuner_mode;
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100197 config.tuner_file = common_params.tuner_file;
198
199 graph.finalize(common_params.target, config);
200
201 return true;
202 }
203
204 void do_run() override
205 {
206 graph.run();
207 }
208
209private:
210 CommandLineParser cmd_parser;
211 CommonGraphOptions common_opts;
212 CommonGraphParams common_params;
213 Stream graph;
214
215private:
216 void block_mixed_5b(const std::string &data_path, DataLayout weights_layout)
217 {
218 // Branch 0
219 SubStream i_a(graph);
220 i_a << ConvolutionLayer(1U, 1U, 96U,
221 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_weights.npy", weights_layout),
222 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
223 PadStrideInfo(1, 1, 0, 0))
224 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/convolution")
225 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
226 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
227 get_random_accessor(1.f, 1.f),
228 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
229 0.0010000000474974513f)
230 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm")
231 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_0/Conv2d_1x1/Relu");
232
233 // Branch 1
234 SubStream i_b(graph);
235 i_b << ConvolutionLayer(1U, 1U, 48U,
236 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
237 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
238 PadStrideInfo(1, 1, 0, 0))
239 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/convolution")
240 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
241 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
242 get_random_accessor(1.f, 1.f),
243 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
244 0.0010000000474974513f)
245 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm")
246 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/Relu")
247 << ConvolutionLayer(5U, 5U, 64U,
248 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_weights.npy", weights_layout),
249 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
250 PadStrideInfo(1, 1, 2, 2))
251 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/convolution")
252 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_mean.npy"),
253 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_variance.npy"),
254 get_random_accessor(1.f, 1.f),
255 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_beta.npy"),
256 0.0010000000474974513f)
257 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm")
258 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/Relu");
259
260 // Branch 2
261 SubStream i_c(graph);
262 i_c << ConvolutionLayer(1U, 1U, 64U,
263 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
264 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
265 PadStrideInfo(1, 1, 0, 0))
266 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/convolution")
267 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
268 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
269 get_random_accessor(1.f, 1.f),
270 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
271 0.0010000000474974513f)
272 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm")
273 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/Relu")
274 << ConvolutionLayer(3U, 3U, 96U,
275 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
276 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
277 PadStrideInfo(1, 1, 1, 1))
278 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/convolution")
279 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
280 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
281 get_random_accessor(1.f, 1.f),
282 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
283 0.0010000000474974513f)
284 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm")
285 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/Relu")
286 << ConvolutionLayer(3U, 3U, 96U,
287 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
288 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
289 PadStrideInfo(1, 1, 1, 1))
290 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/convolution")
291 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
292 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
293 get_random_accessor(1.f, 1.f),
294 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
295 0.0010000000474974513f)
296 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm")
297 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/Relu");
298
299 // Branch 3
300 SubStream i_d(graph);
301 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name("Mixed_5b/Branch_3/AvgPool_0a_3x3")
302 << ConvolutionLayer(1U, 1U, 64U,
303 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
304 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
305 PadStrideInfo(1, 1, 0, 0))
306 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/convolution")
307 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
308 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
309 get_random_accessor(1.f, 1.f),
310 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
311 0.0010000000474974513f)
312 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm")
313 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu");
314
315 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100316 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100317 }
318
319 void block_mixed_6a(const std::string &data_path, DataLayout weights_layout)
320 {
321 // Branch 0
322 SubStream i_a(graph);
323 i_a << ConvolutionLayer(3U, 3U, 384U,
324 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
325 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
326 PadStrideInfo(2, 2, 0, 0))
327 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
328 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
329 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
330 get_random_accessor(1.f, 1.f),
331 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
332 0.0010000000474974513f)
333 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
334 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
335
336 // Branch 1
337 SubStream i_b(graph);
338 i_b << ConvolutionLayer(1U, 1U, 256U,
339 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
340 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
341 PadStrideInfo(1, 1, 0, 0))
342 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
343 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
344 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
345 get_random_accessor(1.f, 1.f),
346 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
347 0.0010000000474974513f)
348 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
349 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
350 << ConvolutionLayer(3U, 3U, 256U,
351 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
352 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
353 PadStrideInfo(1, 1, 1, 1))
354 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
355 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
356 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
357 get_random_accessor(1.f, 1.f),
358 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
359 0.0010000000474974513f)
360 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
361 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
362 << ConvolutionLayer(3U, 3U, 384U,
363 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
364 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
365 PadStrideInfo(2, 2, 0, 0))
366 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
367 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
368 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
369 get_random_accessor(1.f, 1.f),
370 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
371 0.0010000000474974513f)
372 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
373 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
374
375 // Branch 2
376 SubStream i_c(graph);
377 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
378
379 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100380 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100381 }
382
383 void block_mixed_7a(const std::string &data_path, DataLayout weights_layout)
384 {
385 // Branch 0
386 SubStream i_a(graph);
387 i_a << ConvolutionLayer(1U, 1U, 256U,
388 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
389 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
390 PadStrideInfo(1, 1, 0, 0))
391 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
392 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
393 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
394 get_random_accessor(1.f, 1.f),
395 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
396 0.0010000000474974513f)
397 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
398 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
399 << ConvolutionLayer(3U, 3U, 384U,
400 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
401 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
402 PadStrideInfo(2, 2, 0, 0))
403 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
404 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
405 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
406 get_random_accessor(1.f, 1.f),
407 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
408 0.0010000000474974513f)
409 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
410 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
411
412 // Branch 1
413 SubStream i_b(graph);
414 i_b << ConvolutionLayer(1U, 1U, 256U,
415 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
416 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
417 PadStrideInfo(1, 1, 0, 0))
418 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
419 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
420 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
421 get_random_accessor(1.f, 1.f),
422 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
423 0.0010000000474974513f)
424 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
425 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
426 << ConvolutionLayer(3U, 3U, 288U,
427 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
428 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
429 PadStrideInfo(2, 2, 0, 0))
430 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
431 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
432 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
433 get_random_accessor(1.f, 1.f),
434 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
435 0.0010000000474974513f)
436 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
437 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
438
439 // Branch 2
440 SubStream i_c(graph);
441 i_c << ConvolutionLayer(1U, 1U, 256U,
442 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
443 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
444 PadStrideInfo(1, 1, 0, 0))
445 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
446 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
447 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
448 get_random_accessor(1.f, 1.f),
449 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
450 0.0010000000474974513f)
451 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
452 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
453 << ConvolutionLayer(3U, 3U, 288U,
454 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
455 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
456 PadStrideInfo(1, 1, 1, 1))
457 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
458 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
459 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
460 get_random_accessor(1.f, 1.f),
461 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
462 0.0010000000474974513f)
463 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
464 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
465 << ConvolutionLayer(3U, 3U, 320U,
466 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
467 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
468 PadStrideInfo(2, 2, 0, 0))
469 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
470 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
471 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
472 get_random_accessor(1.f, 1.f),
473 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
474 0.0010000000474974513f)
475 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
476 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
477
478 // Branch 3
479 SubStream i_d(graph);
480 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
481
482 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100483 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100484 }
485
486 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
487 {
488 for(unsigned int i = 0; i < num_blocks; ++i)
489 {
490 std::stringstream unit_path_ss;
491 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
492 std::stringstream unit_name_ss;
493 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
494
495 std::string unit_path = unit_path_ss.str();
496 std::string unit_name = unit_name_ss.str();
497
498 // Create left and write substreams
499 SubStream i_l(graph);
500 SubStream i_r(graph);
501
502 // Branch 0
503 SubStream i_la(i_l);
504 i_la << ConvolutionLayer(1U, 1U, 32U,
505 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
506 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
507 PadStrideInfo(1, 1, 0, 0))
508 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
509 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
510 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
511 get_random_accessor(1.f, 1.f),
512 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
513 0.0010000000474974513f)
514 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
515 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
516
517 // Branch 1
518 SubStream i_lb(i_l);
519 i_lb << ConvolutionLayer(1U, 1U, 32U,
520 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
521 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
522 PadStrideInfo(1, 1, 0, 0))
523 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
524 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
525 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
526 get_random_accessor(1.f, 1.f),
527 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
528 0.0010000000474974513f)
529 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
530 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
531 << ConvolutionLayer(3U, 3U, 32U,
532 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
533 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
534 PadStrideInfo(1, 1, 1, 1))
535 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
536 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
537 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
538 get_random_accessor(1.f, 1.f),
539 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
540 0.0010000000474974513f)
541 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
542 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
543
544 // Branch 2
545 SubStream i_lc(i_l);
546 i_lc << ConvolutionLayer(1U, 1U, 32U,
547 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
548 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
549 PadStrideInfo(1, 1, 0, 0))
550 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
551 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
552 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
553 get_random_accessor(1.f, 1.f),
554 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
555 0.0010000000474974513f)
556 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
557 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
558 << ConvolutionLayer(3U, 3U, 48U,
559 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
560 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
561 PadStrideInfo(1, 1, 1, 1))
562 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
563 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
564 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
565 get_random_accessor(1.f, 1.f),
566 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
567 0.0010000000474974513f)
568 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
569 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
570 << ConvolutionLayer(3U, 3U, 64U,
571 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
572 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
573 PadStrideInfo(1, 1, 1, 1))
574 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
575 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
576 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
577 get_random_accessor(1.f, 1.f),
578 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
579 0.0010000000474974513f)
580 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
581 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
582
583 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100584 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100585 << ConvolutionLayer(1U, 1U, 320U,
586 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
587 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
588 PadStrideInfo(1, 1, 0, 0))
589 .set_name(unit_name + "Conv2d_1x1/convolution")
590 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
591
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100592 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100593 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
594 }
595 }
596
597 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
598 {
599 for(unsigned int i = 0; i < num_blocks; ++i)
600 {
601 std::stringstream unit_path_ss;
602 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
603 std::stringstream unit_name_ss;
604 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
605
606 std::string unit_path = unit_path_ss.str();
607 std::string unit_name = unit_name_ss.str();
608
609 // Create left and write substreams
610 SubStream i_l(graph);
611 SubStream i_r(graph);
612
613 // Branch 0
614 SubStream i_la(i_l);
615 i_la << ConvolutionLayer(1U, 1U, 192U,
616 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
617 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
618 PadStrideInfo(1, 1, 0, 0))
619 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
620 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
621 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
622 get_random_accessor(1.f, 1.f),
623 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
624 0.0010000000474974513f)
625 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
626 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
627
628 // Branch 1
629 SubStream i_lb(i_l);
630 i_lb << ConvolutionLayer(1U, 1U, 128U,
631 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
632 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
633 PadStrideInfo(1, 1, 0, 0))
634 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
635 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
636 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
637 get_random_accessor(1.f, 1.f),
638 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
639 0.0010000000474974513f)
640 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
641 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
642 << ConvolutionLayer(7U, 1U, 160U,
643 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
644 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
645 PadStrideInfo(1, 1, 3, 0))
646 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
647 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
648 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
649 get_random_accessor(1.f, 1.f),
650 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
651 0.0010000000474974513f)
652 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
653 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
654 << ConvolutionLayer(1U, 7U, 192U,
655 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
656 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
657 PadStrideInfo(1, 1, 0, 3))
658 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
659 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
660 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
661 get_random_accessor(1.f, 1.f),
662 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
663 0.0010000000474974513f)
664 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
665 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
666
667 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100668 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100669 << ConvolutionLayer(1U, 1U, 1088U,
670 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
671 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
672 PadStrideInfo(1, 1, 0, 0))
673 .set_name(unit_name + "Conv2d_1x1/convolution")
674 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
675
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100676 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100677 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
678 }
679 }
680
681 void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
682 {
683 for(unsigned int i = 0; i < num_blocks; ++i)
684 {
685 std::stringstream unit_path_ss;
686 std::stringstream unit_name_ss;
687 if(num_blocks != 1)
688 {
689 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
690 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
691 }
692 else
693 {
694 unit_path_ss << "Block8_";
695 unit_name_ss << "Block8/";
696 }
697
698 std::string unit_path = unit_path_ss.str();
699 std::string unit_name = unit_name_ss.str();
700
701 // Create left and write substreams
702 SubStream i_l(graph);
703 SubStream i_r(graph);
704
705 // Branch 0
706 SubStream i_la(i_l);
707 i_la << ConvolutionLayer(1U, 1U, 192U,
708 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
709 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
710 PadStrideInfo(1, 1, 0, 0))
711 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
712 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
713 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
714 get_random_accessor(1.f, 1.f),
715 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
716 0.0010000000474974513f)
717 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
718 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
719
720 // Branch 1
721 SubStream i_lb(i_l);
722 i_lb << ConvolutionLayer(1U, 1U, 192U,
723 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
724 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
725 PadStrideInfo(1, 1, 0, 0))
726 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
727 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
728 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
729 get_random_accessor(1.f, 1.f),
730 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
731 0.0010000000474974513f)
732 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
733 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
734 << ConvolutionLayer(3U, 1U, 224U,
735 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
736 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
737 PadStrideInfo(1, 1, 1, 0))
738 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
739 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
740 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
741 get_random_accessor(1.f, 1.f),
742 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
743 0.0010000000474974513f)
744 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
745 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
746 << ConvolutionLayer(1U, 3U, 256U,
747 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
748 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
749 PadStrideInfo(1, 1, 0, 1))
750 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
751 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
752 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
753 get_random_accessor(1.f, 1.f),
754 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
755 0.0010000000474974513f)
756 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
757 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
758
759 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100760 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100761 << ConvolutionLayer(1U, 1U, 2080U,
762 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
763 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
764 PadStrideInfo(1, 1, 0, 0))
765 .set_name(unit_name + "Conv2d_1x1/convolution");
766
767 // Scale result
768 if(scale != 1.f)
769 {
770 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
771 }
772
773 // Residual add
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100774 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100775
776 // Apply activation if needed
777 if(has_activation)
778 {
779 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
780 }
781 }
782 }
783};
784
785/** Main program for Inception ResNet V2
786 *
Georgios Pinitasbdbbbe82018-11-07 16:06:47 +0000787 * Model is based on:
788 * https://arxiv.org/abs/1602.07261
789 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
790 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
791 *
Georgios Pinitas588ebc52018-12-21 13:39:07 +0000792 * Provenance: download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz
793 *
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100794 * @note To list all the possible arguments execute the binary appended with the --help option
795 *
796 * @param[in] argc Number of arguments
797 * @param[in] argv Arguments
798 */
799int main(int argc, char **argv)
800{
801 return arm_compute::utils::run_example<InceptionResNetV2Example>(argc, argv);
802}