blob: ba59503161ba6c0fd27731db7b02d23a578b3ee9 [file] [log] [blame]
Gian Marco Iodicee10bddb2017-10-11 15:03:26 +01001/*
2 * Copyright (c) 2017 ARM Limited.
3 *
4 * SPDX-License-Identifier: MIT
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a copy
7 * of this software and associated documentation files (the "Software"), to
8 * deal in the Software without restriction, including without limitation the
9 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10 * sell copies of the Software, and to permit persons to whom the Software is
11 * furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in all
14 * copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 * SOFTWARE.
23 */
24#ifndef ARM_COMPUTE_CL /* Needed by Utils.cpp to handle OpenCL exceptions properly */
25#error "This example needs to be built with -DARM_COMPUTE_CL"
26#endif /* ARM_COMPUTE_CL */
27
28#include "arm_compute/graph/Graph.h"
29#include "arm_compute/graph/Nodes.h"
30#include "arm_compute/runtime/CL/CLScheduler.h"
31#include "support/ToolchainSupport.h"
32#include "utils/GraphUtils.h"
33#include "utils/Utils.h"
34
35#include <cstdlib>
36
37using namespace arm_compute::graph;
38using namespace arm_compute::graph_utils;
39
40/** Example demonstrating how to implement VGG16's network using the Compute Library's graph API
41 *
42 * @param[in] argc Number of arguments
43 * @param[in] argv Arguments ( [optional] Path to the weights folder, [optional] image, [optional] labels )
44 */
45void main_graph_vgg16(int argc, const char **argv)
46{
47 std::string data_path; /* Path to the trainable data */
48 std::string image; /* Image data */
49 std::string label; /* Label data */
50
51 constexpr float mean_r = 123.68f; /* Mean value to subtract from red channel */
52 constexpr float mean_g = 116.779f; /* Mean value to subtract from green channel */
53 constexpr float mean_b = 103.939f; /* Mean value to subtract from blue channel */
54
55 // Parse arguments
56 if(argc < 2)
57 {
58 // Print help
59 std::cout << "Usage: " << argv[0] << " [path_to_data] [image] [labels]\n\n";
60 std::cout << "No data folder provided: using random values\n\n";
61 }
62 else if(argc == 2)
63 {
64 data_path = argv[1];
65 std::cout << "Usage: " << argv[0] << " " << argv[1] << " [image] [labels]\n\n";
66 std::cout << "No image provided: using random values\n\n";
67 }
68 else if(argc == 3)
69 {
70 data_path = argv[1];
71 image = argv[2];
72 std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [labels]\n\n";
73 std::cout << "No text file with labels provided: skipping output accessor\n\n";
74 }
75 else
76 {
77 data_path = argv[1];
78 image = argv[2];
79 label = argv[3];
80 }
81
82 // Check if OpenCL is available and initialize the scheduler
83 TargetHint hint = TargetHint::NEON;
84 if(arm_compute::opencl_is_available())
85 {
86 arm_compute::CLScheduler::get().default_init();
87 hint = TargetHint::OPENCL;
88 }
89
90 Graph graph;
91
92 graph << hint
93 << Tensor(TensorInfo(TensorShape(224U, 224U, 3U, 1U), 1, DataType::F32),
94 get_input_accessor(image, mean_r, mean_g, mean_b))
95 << ConvolutionMethodHint::DIRECT
96 // Layer 1
97 << ConvolutionLayer(
98 3U, 3U, 64U,
99 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_1_w.npy"),
100 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_1_b.npy"),
101 PadStrideInfo(1, 1, 1, 1))
102 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
103 // Layer 2
104 << ConvolutionLayer(
105 3U, 3U, 64U,
106 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_2_w.npy"),
107 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv1_2_b.npy"),
108 PadStrideInfo(1, 1, 1, 1))
109 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
110 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0)))
111 // Layer 3
112 << ConvolutionLayer(
113 3U, 3U, 128U,
114 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_1_w.npy"),
115 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_1_b.npy"),
116 PadStrideInfo(1, 1, 1, 1))
117 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
118 // Layer 4
119 << ConvolutionLayer(
120 3U, 3U, 128U,
121 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_2_w.npy"),
122 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv2_2_b.npy"),
123 PadStrideInfo(1, 1, 1, 1))
124 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
125 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0)))
126 // Layer 5
127 << ConvolutionLayer(
128 3U, 3U, 256U,
129 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_1_w.npy"),
130 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_1_b.npy"),
131 PadStrideInfo(1, 1, 1, 1))
132 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
133 // Layer 6
134 << ConvolutionLayer(
135 3U, 3U, 256U,
136 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_2_w.npy"),
137 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_2_b.npy"),
138 PadStrideInfo(1, 1, 1, 1))
139 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
140 // Layer 7
141 << ConvolutionLayer(
142 3U, 3U, 256U,
143 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_3_w.npy"),
144 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv3_3_b.npy"),
145 PadStrideInfo(1, 1, 1, 1))
146 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
147 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0)))
148 // Layer 8
149 << ConvolutionLayer(
150 3U, 3U, 512U,
151 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_1_w.npy"),
152 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_1_b.npy"),
153 PadStrideInfo(1, 1, 1, 1))
154 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
155 // Layer 9
156 << ConvolutionLayer(
157 3U, 3U, 512U,
158 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_2_w.npy"),
159 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_2_b.npy"),
160 PadStrideInfo(1, 1, 1, 1))
161 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
162 // Layer 10
163 << ConvolutionLayer(
164 3U, 3U, 512U,
165 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_3_w.npy"),
166 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv4_3_b.npy"),
167 PadStrideInfo(1, 1, 1, 1))
168 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
169 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0)))
170 // Layer 11
171 << ConvolutionLayer(
172 3U, 3U, 512U,
173 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_1_w.npy"),
174 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_1_b.npy"),
175 PadStrideInfo(1, 1, 1, 1))
176 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
177 // Layer 12
178 << ConvolutionLayer(
179 3U, 3U, 512U,
180 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_2_w.npy"),
181 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_2_b.npy"),
182 PadStrideInfo(1, 1, 1, 1))
183 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
184 // Layer 13
185 << ConvolutionLayer(
186 3U, 3U, 512U,
187 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_3_w.npy"),
188 get_weights_accessor(data_path, "/cnn_data/vgg16_model/conv5_3_b.npy"),
189 PadStrideInfo(1, 1, 1, 1))
190 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
191 << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 2, PadStrideInfo(2, 2, 0, 0)))
192 // Layer 14
193 << FullyConnectedLayer(
194 4096U,
195 get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc6_w.npy"),
196 get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc6_b.npy"))
197 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
198 // Layer 15
199 << FullyConnectedLayer(
200 4096U,
201 get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc7_w.npy"),
202 get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc7_b.npy"))
203 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU))
204 // Layer 16
205 << FullyConnectedLayer(
206 1000U,
207 get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc8_w.npy"),
208 get_weights_accessor(data_path, "/cnn_data/vgg16_model/fc8_b.npy"))
209 // Softmax
210 << SoftmaxLayer()
211 << Tensor(get_output_accessor(label, 5));
212
213 // Run graph
214 graph.run();
215}
216
217/** Main program for VGG16
218 *
219 * @param[in] argc Number of arguments
220 * @param[in] argv Arguments ( [optional] Path to the weights folder, [optional] image, [optional] labels )
221 */
222int main(int argc, const char **argv)
223{
224 return arm_compute::utils::run_example(argc, argv, main_graph_vgg16);
225}