blob: 8e799783f21c6c6d9f9c2e2129bcfb709e13b010 [file] [log] [blame]
Georgios Pinitasbe2772a2018-08-17 15:33:39 +01001/*
2 * Copyright (c) 2018 ARM Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#include "arm_compute/graph.h"
25#include "support/ToolchainSupport.h"
26#include "utils/CommonGraphOptions.h"
27#include "utils/GraphUtils.h"
28#include "utils/Utils.h"
29
30using namespace arm_compute::utils;
31using namespace arm_compute::graph::frontend;
32using namespace arm_compute::graph_utils;
33
Georgios Pinitas108ab0b2018-09-14 18:35:11 +010034/** Example demonstrating how to implement InceptionV4's network using the Compute Library's graph API */
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010035class InceptionResNetV2Example final : public Example
36{
37public:
38 InceptionResNetV2Example()
39 : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "InceptionResNetV2")
40 {
41 }
42 bool do_setup(int argc, char **argv) override
43 {
44 // Parse arguments
45 cmd_parser.parse(argc, argv);
46
47 // Consume common parameters
48 common_params = consume_common_graph_parameters(common_opts);
49
50 // Return when help menu is requested
51 if(common_params.help)
52 {
53 cmd_parser.print_help(argv[0]);
54 return false;
55 }
56
57 // Set default layout if needed
58 if(!common_opts.data_layout->is_set() && common_params.target == Target::NEON)
59 {
60 common_params.data_layout = DataLayout::NCHW;
61 }
62
63 // Checks
Anthony Barbiercdd68c02018-08-23 15:03:41 +010064 ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +010065
66 // Print parameter values
67 std::cout << common_params << std::endl;
68
69 // Create model path
70 std::string data_path = common_params.data_path;
71 std::string model_path = "/cnn_data/inception_resnet_v2_model/";
72 if(!data_path.empty())
73 {
74 data_path += model_path;
75 }
76
77 // Create a preprocessor object
78 std::unique_ptr<IPreprocessor> preprocessor = arm_compute::support::cpp14::make_unique<TFPreproccessor>(0.f, 1.f);
79
80 // Create input descriptor
81 const TensorShape tensor_shape = permute_shape(TensorShape(299U, 299U, 3U, 1U), DataLayout::NCHW, common_params.data_layout);
82 TensorDescriptor input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(common_params.data_layout);
83
84 // Set weights trained layout
85 const DataLayout weights_layout = DataLayout::NCHW;
86
87 graph << common_params.target
88 << common_params.fast_math_hint
89 << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor), false))
90 // Conv2d_1a_3x3
91 << ConvolutionLayer(3U, 3U, 32U,
92 get_weights_accessor(data_path, "Conv2d_1a_3x3_weights.npy", weights_layout),
93 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
94 PadStrideInfo(2, 2, 0, 0))
95 .set_name("Conv2d_1a_3x3/convolution")
96 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
97 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
98 get_random_accessor(1.f, 1.f),
99 get_weights_accessor(data_path, "Conv2d_1a_3x3_BatchNorm_beta.npy"),
100 0.0010000000474974513f)
101 .set_name("Conv2d_1a_3x3/BatchNorm")
102 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_1a_3x3/Relu")
103 // Conv2d_2a_3x3
104 << ConvolutionLayer(3U, 3U, 32U,
105 get_weights_accessor(data_path, "Conv2d_2a_3x3_weights.npy", weights_layout),
106 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
107 PadStrideInfo(1, 1, 0, 0))
108 .set_name("Conv2d_2a_3x3/convolution")
109 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_mean.npy"),
110 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_moving_variance.npy"),
111 get_random_accessor(1.f, 1.f),
112 get_weights_accessor(data_path, "Conv2d_2a_3x3_BatchNorm_beta.npy"),
113 0.0010000000474974513f)
114 .set_name("Conv2d_2a_3x3/BatchNorm")
115 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2a_3x3/Relu")
116 // Conv2d_2b_3x3
117 << ConvolutionLayer(3U, 3U, 64U,
118 get_weights_accessor(data_path, "Conv2d_2b_3x3_weights.npy", weights_layout),
119 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
120 PadStrideInfo(1, 1, 1, 1))
121 .set_name("Conv2d_2b_3x3/convolution")
122 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_mean.npy"),
123 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_moving_variance.npy"),
124 get_random_accessor(1.f, 1.f),
125 get_weights_accessor(data_path, "Conv2d_2b_3x3_BatchNorm_beta.npy"),
126 0.0010000000474974513f)
127 .set_name("Conv2d_2b_3x3/BatchNorm")
128 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_2b_3x3/Relu")
129 // MaxPool_3a_3x3
130 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("MaxPool_3a_3x3/MaxPool")
131 // Conv2d_3b_1x1
132 << ConvolutionLayer(1U, 1U, 80U,
133 get_weights_accessor(data_path, "Conv2d_3b_1x1_weights.npy", weights_layout),
134 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
135 PadStrideInfo(1, 1, 0, 0))
136 .set_name("Conv2d_3b_1x1/convolution")
137 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_mean.npy"),
138 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_moving_variance.npy"),
139 get_random_accessor(1.f, 1.f),
140 get_weights_accessor(data_path, "Conv2d_3b_1x1_BatchNorm_beta.npy"),
141 0.0010000000474974513f)
142 .set_name("Conv2d_3b_1x1/BatchNorm")
143 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_3b_1x1/Relu")
144 // Conv2d_4a_3x3
145 << ConvolutionLayer(3U, 3U, 192U,
146 get_weights_accessor(data_path, "Conv2d_4a_3x3_weights.npy", weights_layout),
147 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
148 PadStrideInfo(1, 1, 0, 0))
149 .set_name("Conv2d_4a_3x3/convolution")
150 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_mean.npy"),
151 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_moving_variance.npy"),
152 get_random_accessor(1.f, 1.f),
153 get_weights_accessor(data_path, "Conv2d_4a_3x3_BatchNorm_beta.npy"),
154 0.0010000000474974513f)
155 .set_name("Conv2d_4a_3x3/BatchNorm")
156 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_4a_3x3/Relu")
157 // MaxPool_5a_3x3
158 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("MaxPool_5a_3x3/MaxPool");
159
160 block_mixed_5b(data_path, weights_layout);
161 block35_repeat(data_path, weights_layout, 10);
162 block_mixed_6a(data_path, weights_layout);
163 block17_repeat(data_path, weights_layout, 20);
164 block_mixed_7a(data_path, weights_layout);
165 block8_repeat(data_path, weights_layout, 9, 0.2f, true);
166 block8_repeat(data_path, weights_layout, 1, 1.f, false);
167
168 // Conv2d_7b_1x1
169 graph << ConvolutionLayer(1U, 1U, 1536U,
170 get_weights_accessor(data_path, "Conv2d_7b_1x1_weights.npy", weights_layout),
171 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
172 PadStrideInfo(1, 1, 0, 0))
173 .set_name("Conv2d_7b_1x1/convolution")
174 << BatchNormalizationLayer(get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_mean.npy"),
175 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_moving_variance.npy"),
176 get_random_accessor(1.f, 1.f),
177 get_weights_accessor(data_path, "Conv2d_7b_1x1_BatchNorm_beta.npy"),
178 0.0010000000474974513f)
179 .set_name("Conv2d_7b_1x1/BatchNorm")
180 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Conv2d_7b_1x1/Relu")
181 << PoolingLayer(PoolingLayerInfo(PoolingType::AVG)).set_name("Logits/AvgPool_1a_8x8")
182 << FlattenLayer().set_name("Logits/Flatten")
183 << FullyConnectedLayer(
184 1001U,
185 get_weights_accessor(data_path, "Logits_Logits_weights.npy", weights_layout),
186 get_weights_accessor(data_path, "Logits_Logits_biases.npy"))
187 .set_name("Logits/Logits")
188 << SoftmaxLayer().set_name("Logits/Predictions")
189 << OutputLayer(get_output_accessor(common_params, 5));
190
191 // Finalize graph
192 GraphConfig config;
193 config.num_threads = common_params.threads;
194 config.use_tuner = common_params.enable_tuner;
195 config.tuner_file = common_params.tuner_file;
196
197 graph.finalize(common_params.target, config);
198
199 return true;
200 }
201
202 void do_run() override
203 {
204 graph.run();
205 }
206
207private:
208 CommandLineParser cmd_parser;
209 CommonGraphOptions common_opts;
210 CommonGraphParams common_params;
211 Stream graph;
212
213private:
214 void block_mixed_5b(const std::string &data_path, DataLayout weights_layout)
215 {
216 // Branch 0
217 SubStream i_a(graph);
218 i_a << ConvolutionLayer(1U, 1U, 96U,
219 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_weights.npy", weights_layout),
220 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
221 PadStrideInfo(1, 1, 0, 0))
222 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/convolution")
223 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
224 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
225 get_random_accessor(1.f, 1.f),
226 get_weights_accessor(data_path, "Mixed_5b_Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
227 0.0010000000474974513f)
228 .set_name("Mixed_5b/Branch_0/Conv2d_1x1/BatchNorm")
229 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_0/Conv2d_1x1/Relu");
230
231 // Branch 1
232 SubStream i_b(graph);
233 i_b << ConvolutionLayer(1U, 1U, 48U,
234 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
235 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
236 PadStrideInfo(1, 1, 0, 0))
237 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/convolution")
238 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
239 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
240 get_random_accessor(1.f, 1.f),
241 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
242 0.0010000000474974513f)
243 .set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/BatchNorm")
244 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0a_1x1/Relu")
245 << ConvolutionLayer(5U, 5U, 64U,
246 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_weights.npy", weights_layout),
247 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
248 PadStrideInfo(1, 1, 2, 2))
249 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/convolution")
250 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_mean.npy"),
251 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_moving_variance.npy"),
252 get_random_accessor(1.f, 1.f),
253 get_weights_accessor(data_path, "Mixed_5b_Branch_1_Conv2d_0b_5x5_BatchNorm_beta.npy"),
254 0.0010000000474974513f)
255 .set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/BatchNorm")
256 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_1/Conv2d_0b_5x5/Relu");
257
258 // Branch 2
259 SubStream i_c(graph);
260 i_c << ConvolutionLayer(1U, 1U, 64U,
261 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
262 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
263 PadStrideInfo(1, 1, 0, 0))
264 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/convolution")
265 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
266 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
267 get_random_accessor(1.f, 1.f),
268 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
269 0.0010000000474974513f)
270 .set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/BatchNorm")
271 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0a_1x1/Relu")
272 << ConvolutionLayer(3U, 3U, 96U,
273 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
274 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
275 PadStrideInfo(1, 1, 1, 1))
276 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/convolution")
277 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
278 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
279 get_random_accessor(1.f, 1.f),
280 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
281 0.0010000000474974513f)
282 .set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/BatchNorm")
283 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0b_3x3/Relu")
284 << ConvolutionLayer(3U, 3U, 96U,
285 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
286 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
287 PadStrideInfo(1, 1, 1, 1))
288 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/convolution")
289 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
290 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
291 get_random_accessor(1.f, 1.f),
292 get_weights_accessor(data_path, "Mixed_5b_Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
293 0.0010000000474974513f)
294 .set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/BatchNorm")
295 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_2/Conv2d_0c_3x3/Relu");
296
297 // Branch 3
298 SubStream i_d(graph);
299 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::AVG, 3, PadStrideInfo(1, 1, 1, 1, DimensionRoundingType::CEIL), true)).set_name("Mixed_5b/Branch_3/AvgPool_0a_3x3")
300 << ConvolutionLayer(1U, 1U, 64U,
301 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_weights.npy", weights_layout),
302 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
303 PadStrideInfo(1, 1, 0, 0))
304 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/convolution")
305 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_mean.npy"),
306 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_moving_variance.npy"),
307 get_random_accessor(1.f, 1.f),
308 get_weights_accessor(data_path, "Mixed_5b_Branch_3_Conv2d_0b_1x1_BatchNorm_beta.npy"),
309 0.0010000000474974513f)
310 .set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/BatchNorm")
311 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_5b/Branch_3/Conv2d_0b_1x1/Relu");
312
313 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100314 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_5a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100315 }
316
317 void block_mixed_6a(const std::string &data_path, DataLayout weights_layout)
318 {
319 // Branch 0
320 SubStream i_a(graph);
321 i_a << ConvolutionLayer(3U, 3U, 384U,
322 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
323 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
324 PadStrideInfo(2, 2, 0, 0))
325 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/convolution")
326 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
327 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
328 get_random_accessor(1.f, 1.f),
329 get_weights_accessor(data_path, "Mixed_6a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
330 0.0010000000474974513f)
331 .set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/BatchNorm")
332 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_0/Conv2d_1a_3x3/Relu");
333
334 // Branch 1
335 SubStream i_b(graph);
336 i_b << ConvolutionLayer(1U, 1U, 256U,
337 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
338 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
339 PadStrideInfo(1, 1, 0, 0))
340 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/convolution")
341 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
342 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
343 get_random_accessor(1.f, 1.f),
344 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
345 0.0010000000474974513f)
346 .set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/BatchNorm")
347 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0a_1x1/Relu")
348 << ConvolutionLayer(3U, 3U, 256U,
349 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
350 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
351 PadStrideInfo(1, 1, 1, 1))
352 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/convolution")
353 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
354 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
355 get_random_accessor(1.f, 1.f),
356 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
357 0.0010000000474974513f)
358 .set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/BatchNorm")
359 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_0b_3x3/Relu")
360 << ConvolutionLayer(3U, 3U, 384U,
361 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
362 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
363 PadStrideInfo(2, 2, 0, 0))
364 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/convolution")
365 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
366 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
367 get_random_accessor(1.f, 1.f),
368 get_weights_accessor(data_path, "Mixed_6a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
369 0.0010000000474974513f)
370 .set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/BatchNorm")
371 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_6a/Branch_1/Conv2d_1a_3x3/Relu");
372
373 // Branch 2
374 SubStream i_c(graph);
375 i_c << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0), true)).set_name("Mixed_6a/Branch_2/MaxPool_1a_3x3");
376
377 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100378 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c)).set_name("Mixed_6a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100379 }
380
381 void block_mixed_7a(const std::string &data_path, DataLayout weights_layout)
382 {
383 // Branch 0
384 SubStream i_a(graph);
385 i_a << ConvolutionLayer(1U, 1U, 256U,
386 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_weights.npy", weights_layout),
387 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
388 PadStrideInfo(1, 1, 0, 0))
389 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/convolution")
390 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
391 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
392 get_random_accessor(1.f, 1.f),
393 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_0a_1x1_BatchNorm_beta.npy"),
394 0.0010000000474974513f)
395 .set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/BatchNorm")
396 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_0a_1x1/Relu")
397 << ConvolutionLayer(3U, 3U, 384U,
398 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_weights.npy", weights_layout),
399 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
400 PadStrideInfo(2, 2, 0, 0))
401 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/convolution")
402 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
403 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
404 get_random_accessor(1.f, 1.f),
405 get_weights_accessor(data_path, "Mixed_7a_Branch_0_Conv2d_1a_3x3_BatchNorm_beta.npy"),
406 0.0010000000474974513f)
407 .set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/BatchNorm")
408 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_0/Conv2d_1a_3x3/Relu");
409
410 // Branch 1
411 SubStream i_b(graph);
412 i_b << ConvolutionLayer(1U, 1U, 256U,
413 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
414 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
415 PadStrideInfo(1, 1, 0, 0))
416 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/convolution")
417 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
418 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
419 get_random_accessor(1.f, 1.f),
420 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
421 0.0010000000474974513f)
422 .set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/BatchNorm")
423 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_0a_1x1/Relu")
424 << ConvolutionLayer(3U, 3U, 288U,
425 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_weights.npy", weights_layout),
426 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
427 PadStrideInfo(2, 2, 0, 0))
428 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/convolution")
429 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
430 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
431 get_random_accessor(1.f, 1.f),
432 get_weights_accessor(data_path, "Mixed_7a_Branch_1_Conv2d_1a_3x3_BatchNorm_beta.npy"),
433 0.0010000000474974513f)
434 .set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/BatchNorm")
435 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_1/Conv2d_1a_3x3/Relu");
436
437 // Branch 2
438 SubStream i_c(graph);
439 i_c << ConvolutionLayer(1U, 1U, 256U,
440 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
441 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
442 PadStrideInfo(1, 1, 0, 0))
443 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/convolution")
444 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
445 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
446 get_random_accessor(1.f, 1.f),
447 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
448 0.0010000000474974513f)
449 .set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/BatchNorm")
450 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0a_1x1/Relu")
451 << ConvolutionLayer(3U, 3U, 288U,
452 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
453 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
454 PadStrideInfo(1, 1, 1, 1))
455 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/convolution")
456 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
457 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
458 get_random_accessor(1.f, 1.f),
459 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
460 0.0010000000474974513f)
461 .set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/BatchNorm")
462 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_0b_3x3/Relu")
463 << ConvolutionLayer(3U, 3U, 320U,
464 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_weights.npy", weights_layout),
465 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
466 PadStrideInfo(2, 2, 0, 0))
467 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/convolution")
468 << BatchNormalizationLayer(get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_mean.npy"),
469 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_moving_variance.npy"),
470 get_random_accessor(1.f, 1.f),
471 get_weights_accessor(data_path, "Mixed_7a_Branch_2_Conv2d_1a_3x3_BatchNorm_beta.npy"),
472 0.0010000000474974513f)
473 .set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/BatchNorm")
474 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("Mixed_7a/Branch_2/Conv2d_1a_3x3/Relu");
475
476 // Branch 3
477 SubStream i_d(graph);
478 i_d << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, PadStrideInfo(2, 2, 0, 0, DimensionRoundingType::CEIL), true)).set_name("Mixed_7a/Branch_3/MaxPool_1a_3x3");
479
480 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100481 graph << ConcatLayer(std::move(i_a), std::move(i_b), std::move(i_c), std::move(i_d)).set_name("Mixed_7a/concat");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100482 }
483
484 void block35_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
485 {
486 for(unsigned int i = 0; i < num_blocks; ++i)
487 {
488 std::stringstream unit_path_ss;
489 unit_path_ss << "Repeat_block35_" << (i + 1) << "_";
490 std::stringstream unit_name_ss;
491 unit_name_ss << "Repeat/block35_" << (i + 1) << "/";
492
493 std::string unit_path = unit_path_ss.str();
494 std::string unit_name = unit_name_ss.str();
495
496 // Create left and write substreams
497 SubStream i_l(graph);
498 SubStream i_r(graph);
499
500 // Branch 0
501 SubStream i_la(i_l);
502 i_la << ConvolutionLayer(1U, 1U, 32U,
503 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
504 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
505 PadStrideInfo(1, 1, 0, 0))
506 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
507 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
508 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
509 get_random_accessor(1.f, 1.f),
510 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
511 0.0010000000474974513f)
512 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
513 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
514
515 // Branch 1
516 SubStream i_lb(i_l);
517 i_lb << ConvolutionLayer(1U, 1U, 32U,
518 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
519 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
520 PadStrideInfo(1, 1, 0, 0))
521 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
522 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
523 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
524 get_random_accessor(1.f, 1.f),
525 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
526 0.0010000000474974513f)
527 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
528 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
529 << ConvolutionLayer(3U, 3U, 32U,
530 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_weights.npy", weights_layout),
531 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
532 PadStrideInfo(1, 1, 1, 1))
533 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/convolution")
534 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
535 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
536 get_random_accessor(1.f, 1.f),
537 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_3x3_BatchNorm_beta.npy"),
538 0.0010000000474974513f)
539 .set_name(unit_name + "Branch_1/Conv2d_0b_3x3/BatchNorm")
540 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_3x3/Relu");
541
542 // Branch 2
543 SubStream i_lc(i_l);
544 i_lc << ConvolutionLayer(1U, 1U, 32U,
545 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_weights.npy", weights_layout),
546 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
547 PadStrideInfo(1, 1, 0, 0))
548 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/convolution")
549 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
550 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
551 get_random_accessor(1.f, 1.f),
552 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0a_1x1_BatchNorm_beta.npy"),
553 0.0010000000474974513f)
554 .set_name(unit_name + "Branch_2/Conv2d_0a_1x1/BatchNorm")
555 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0a_1x1/Relu")
556 << ConvolutionLayer(3U, 3U, 48U,
557 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_weights.npy", weights_layout),
558 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
559 PadStrideInfo(1, 1, 1, 1))
560 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/convolution")
561 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_mean.npy"),
562 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_moving_variance.npy"),
563 get_random_accessor(1.f, 1.f),
564 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0b_3x3_BatchNorm_beta.npy"),
565 0.0010000000474974513f)
566 .set_name(unit_name + "Branch_2/Conv2d_0b_3x3/BatchNorm")
567 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0b_3x3/Relu")
568 << ConvolutionLayer(3U, 3U, 64U,
569 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_weights.npy", weights_layout),
570 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
571 PadStrideInfo(1, 1, 1, 1))
572 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/convolution")
573 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_mean.npy"),
574 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_moving_variance.npy"),
575 get_random_accessor(1.f, 1.f),
576 get_weights_accessor(data_path, unit_path + "Branch_2_Conv2d_0c_3x3_BatchNorm_beta.npy"),
577 0.0010000000474974513f)
578 .set_name(unit_name + "Branch_2/Conv2d_0c_3x3/BatchNorm")
579 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_2/Conv2d_0c_3x3/Relu");
580
581 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100582 i_l << ConcatLayer(std::move(i_la), std::move(i_lb), std::move(i_lc)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100583 << ConvolutionLayer(1U, 1U, 320U,
584 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
585 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
586 PadStrideInfo(1, 1, 0, 0))
587 .set_name(unit_name + "Conv2d_1x1/convolution")
588 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.17f, 0.f)).set_name(unit_name + "mul");
589
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100590 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100591 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
592 }
593 }
594
595 void block17_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks)
596 {
597 for(unsigned int i = 0; i < num_blocks; ++i)
598 {
599 std::stringstream unit_path_ss;
600 unit_path_ss << "Repeat_1_block17_" << (i + 1) << "_";
601 std::stringstream unit_name_ss;
602 unit_name_ss << "Repeat_1/block17_" << (i + 1) << "/";
603
604 std::string unit_path = unit_path_ss.str();
605 std::string unit_name = unit_name_ss.str();
606
607 // Create left and write substreams
608 SubStream i_l(graph);
609 SubStream i_r(graph);
610
611 // Branch 0
612 SubStream i_la(i_l);
613 i_la << ConvolutionLayer(1U, 1U, 192U,
614 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
615 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
616 PadStrideInfo(1, 1, 0, 0))
617 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
618 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
619 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
620 get_random_accessor(1.f, 1.f),
621 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
622 0.0010000000474974513f)
623 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
624 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
625
626 // Branch 1
627 SubStream i_lb(i_l);
628 i_lb << ConvolutionLayer(1U, 1U, 128U,
629 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
630 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
631 PadStrideInfo(1, 1, 0, 0))
632 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
633 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
634 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
635 get_random_accessor(1.f, 1.f),
636 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
637 0.0010000000474974513f)
638 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
639 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
640 << ConvolutionLayer(7U, 1U, 160U,
641 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_weights.npy", weights_layout),
642 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
643 PadStrideInfo(1, 1, 3, 0))
644 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/convolution")
645 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_mean.npy"),
646 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_moving_variance.npy"),
647 get_random_accessor(1.f, 1.f),
648 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x7_BatchNorm_beta.npy"),
649 0.0010000000474974513f)
650 .set_name(unit_name + "Branch_1/Conv2d_0b_1x7/BatchNorm")
651 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x7/Relu")
652 << ConvolutionLayer(1U, 7U, 192U,
653 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_weights.npy", weights_layout),
654 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
655 PadStrideInfo(1, 1, 0, 3))
656 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/convolution")
657 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_mean.npy"),
658 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_moving_variance.npy"),
659 get_random_accessor(1.f, 1.f),
660 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_7x1_BatchNorm_beta.npy"),
661 0.0010000000474974513f)
662 .set_name(unit_name + "Branch_1/Conv2d_0c_7x1/BatchNorm")
663 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_7x1/Relu");
664
665 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100666 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100667 << ConvolutionLayer(1U, 1U, 1088U,
668 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
669 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
670 PadStrideInfo(1, 1, 0, 0))
671 .set_name(unit_name + "Conv2d_1x1/convolution")
672 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, 0.10f, 0.f)).set_name(unit_name + "mul");
673
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100674 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100675 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
676 }
677 }
678
679 void block8_repeat(const std::string &data_path, DataLayout weights_layout, unsigned int num_blocks, float scale, bool has_activation)
680 {
681 for(unsigned int i = 0; i < num_blocks; ++i)
682 {
683 std::stringstream unit_path_ss;
684 std::stringstream unit_name_ss;
685 if(num_blocks != 1)
686 {
687 unit_path_ss << "Repeat_2_block8_" << (i + 1) << "_";
688 unit_name_ss << "Repeat_2/block8_" << (i + 1) << "/";
689 }
690 else
691 {
692 unit_path_ss << "Block8_";
693 unit_name_ss << "Block8/";
694 }
695
696 std::string unit_path = unit_path_ss.str();
697 std::string unit_name = unit_name_ss.str();
698
699 // Create left and write substreams
700 SubStream i_l(graph);
701 SubStream i_r(graph);
702
703 // Branch 0
704 SubStream i_la(i_l);
705 i_la << ConvolutionLayer(1U, 1U, 192U,
706 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_weights.npy", weights_layout),
707 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
708 PadStrideInfo(1, 1, 0, 0))
709 .set_name(unit_name + "Branch_0/Conv2d_1x1/convolution")
710 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_mean.npy"),
711 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_moving_variance.npy"),
712 get_random_accessor(1.f, 1.f),
713 get_weights_accessor(data_path, unit_path + "Branch_0_Conv2d_1x1_BatchNorm_beta.npy"),
714 0.0010000000474974513f)
715 .set_name(unit_name + "Branch_0/Conv2d_1x1/BatchNorm")
716 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_0/Conv2d_1x1/Relu");
717
718 // Branch 1
719 SubStream i_lb(i_l);
720 i_lb << ConvolutionLayer(1U, 1U, 192U,
721 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_weights.npy", weights_layout),
722 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
723 PadStrideInfo(1, 1, 0, 0))
724 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/convolution")
725 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_mean.npy"),
726 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_moving_variance.npy"),
727 get_random_accessor(1.f, 1.f),
728 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0a_1x1_BatchNorm_beta.npy"),
729 0.0010000000474974513f)
730 .set_name(unit_name + "Branch_1/Conv2d_0a_1x1/BatchNorm")
731 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0a_1x1/Relu")
732 << ConvolutionLayer(3U, 1U, 224U,
733 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_weights.npy", weights_layout),
734 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
735 PadStrideInfo(1, 1, 1, 0))
736 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/convolution")
737 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_mean.npy"),
738 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_moving_variance.npy"),
739 get_random_accessor(1.f, 1.f),
740 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0b_1x3_BatchNorm_beta.npy"),
741 0.0010000000474974513f)
742 .set_name(unit_name + "Branch_1/Conv2d_0b_1x3/BatchNorm")
743 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0b_1x3/Relu")
744 << ConvolutionLayer(1U, 3U, 256U,
745 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_weights.npy", weights_layout),
746 std::unique_ptr<arm_compute::graph::ITensorAccessor>(nullptr),
747 PadStrideInfo(1, 1, 0, 1))
748 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/convolution")
749 << BatchNormalizationLayer(get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_mean.npy"),
750 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_moving_variance.npy"),
751 get_random_accessor(1.f, 1.f),
752 get_weights_accessor(data_path, unit_path + "Branch_1_Conv2d_0c_3x1_BatchNorm_beta.npy"),
753 0.0010000000474974513f)
754 .set_name(unit_name + "Branch_1/Conv2d_0c_3x1/BatchNorm")
755 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Branch_1/Conv2d_0c_3x1/Relu");
756
757 // Concatenate
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100758 i_l << ConcatLayer(std::move(i_la), std::move(i_lb)).set_name(unit_name + "concat")
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100759 << ConvolutionLayer(1U, 1U, 2080U,
760 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_weights.npy", weights_layout),
761 get_weights_accessor(data_path, unit_path + "Conv2d_1x1_biases.npy", weights_layout),
762 PadStrideInfo(1, 1, 0, 0))
763 .set_name(unit_name + "Conv2d_1x1/convolution");
764
765 // Scale result
766 if(scale != 1.f)
767 {
768 i_l << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LINEAR, scale, 0.f)).set_name(unit_name + "mul");
769 }
770
771 // Residual add
Georgios Pinitas427bbbf2018-08-28 13:32:02 +0100772 graph << EltwiseLayer(std::move(i_l), std::move(i_r), EltwiseOperation::Add).set_name(unit_name + "add");
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100773
774 // Apply activation if needed
775 if(has_activation)
776 {
777 graph << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name(unit_name + "Relu");
778 }
779 }
780 }
781};
782
783/** Main program for Inception ResNet V2
784 *
Georgios Pinitasbdbbbe82018-11-07 16:06:47 +0000785 * Model is based on:
786 * https://arxiv.org/abs/1602.07261
787 * "Inception-v4, Inception-ResNet and the Impact of Residual Connections on Learning"
788 * Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
789 *
Georgios Pinitas588ebc52018-12-21 13:39:07 +0000790 * Provenance: download.tensorflow.org/models/inception_resnet_v2_2016_08_30.tar.gz
791 *
Georgios Pinitasbe2772a2018-08-17 15:33:39 +0100792 * @note To list all the possible arguments execute the binary appended with the --help option
793 *
794 * @param[in] argc Number of arguments
795 * @param[in] argv Arguments
796 */
797int main(int argc, char **argv)
798{
799 return arm_compute::utils::run_example<InceptionResNetV2Example>(argc, argv);
800}