blob: 3248954e3bea40d344d9aac87c1c1c4b5d05003c [file] [log] [blame]
Aron Virginas-Tar69362cc2018-11-22 15:04:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "JsonPrinterTestImpl.hpp"
7
8#include <Profiling.hpp>
9
10#include <armnn/Descriptors.hpp>
11#include <armnn/IRuntime.hpp>
12#include <armnn/INetwork.hpp>
13
14#include <boost/algorithm/string.hpp>
15#include <boost/lexical_cast.hpp>
16#include <boost/test/unit_test.hpp>
17
18#include <sstream>
19#include <stack>
20#include <string>
21
22inline bool AreMatchingPair(const char opening, const char closing)
23{
24 return (opening == '{' && closing == '}') || (opening == '[' && closing == ']');
25}
26
27bool AreParenthesesMatching(const std::string& exp)
28{
29 std::stack<char> expStack;
30 for (size_t i = 0; i < exp.length(); ++i)
31 {
32 if (exp[i] == '{' || exp[i] == '[')
33 {
34 expStack.push(exp[i]);
35 }
36 else if (exp[i] == '}' || exp[i] == ']')
37 {
38 if (expStack.empty() || !AreMatchingPair(expStack.top(), exp[i]))
39 {
40 return false;
41 }
42 else
43 {
44 expStack.pop();
45 }
46 }
47 }
48 return expStack.empty();
49}
50
51std::vector<double> ExtractMeasurements(const std::string& exp)
52{
53 std::vector<double> numbers;
54 bool inArray = false;
55 std::string numberString;
56 for (size_t i = 0; i < exp.size(); ++i)
57 {
58 if (exp[i] == '[')
59 {
60 inArray = true;
61 }
62 else if (exp[i] == ']' && inArray)
63 {
64 try
65 {
66 boost::trim_if(numberString, boost::is_any_of("\t,\n"));
67 numbers.push_back(std::stod(numberString));
68 }
69 catch (std::invalid_argument const& e)
70 {
71 BOOST_FAIL("Could not convert measurements to double: " + numberString);
72 }
73
74 numberString.clear();
75 inArray = false;
76 }
77 else if (exp[i] == ',' && inArray)
78 {
79 try
80 {
81 boost::trim_if(numberString, boost::is_any_of("\t,\n"));
82 numbers.push_back(std::stod(numberString));
83 }
84 catch (std::invalid_argument const& e)
85 {
86 BOOST_FAIL("Could not convert measurements to double: " + numberString);
87 }
88 numberString.clear();
89 }
90 else if (exp[i] != '[' && inArray && exp[i] != ',' && exp[i] != ' ')
91 {
92 numberString += exp[i];
93 }
94 }
95 return numbers;
96}
97
98std::vector<std::string> ExtractSections(const std::string& exp)
99{
100 std::vector<std::string> sections;
101
102 std::stack<size_t> s;
103 for (size_t i = 0; i < exp.size(); i++)
104 {
105 if (exp.at(i) == '{')
106 {
107 s.push(i);
108 }
109 else if (exp.at(i) == '}')
110 {
111 size_t from = s.top();
112 s.pop();
113 sections.push_back(exp.substr(from, i - from + 1));
114 }
115 }
116
117 return sections;
118}
119
120std::string GetSoftmaxProfilerJson(const std::vector<armnn::BackendId>& backends)
121{
122 using namespace armnn;
123
124 BOOST_CHECK(!backends.empty());
125
126 ProfilerManager& profilerManager = armnn::ProfilerManager::GetInstance();
127
128 // Create runtime in which test will run
129 IRuntime::CreationOptions options;
130 options.m_EnableGpuProfiling = backends.front() == armnn::Compute::GpuAcc;
131 IRuntimePtr runtime(IRuntime::Create(options));
132
133 // build up the structure of the network
134 INetworkPtr net(INetwork::Create());
135
136 IConnectableLayer* input = net->AddInputLayer(0, "input");
137 IConnectableLayer* softmax = net->AddSoftmaxLayer(SoftmaxDescriptor(), "softmax");
138 IConnectableLayer* output = net->AddOutputLayer(0, "output");
139
140 input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
141 softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
142
143 // set the tensors in the network
144 TensorInfo inputTensorInfo(TensorShape({1, 5}), DataType::QuantisedAsymm8);
145 inputTensorInfo.SetQuantizationOffset(100);
146 inputTensorInfo.SetQuantizationScale(10000.0f);
147 input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
148
149 TensorInfo outputTensorInfo(TensorShape({1, 5}), DataType::QuantisedAsymm8);
150 outputTensorInfo.SetQuantizationOffset(0);
151 outputTensorInfo.SetQuantizationScale(1.0f / 256.0f);
152 softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
153
154 // optimize the network
155 IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
156 if(!optNet)
157 {
158 BOOST_FAIL("Error occurred during Optimization, Optimize() returned nullptr.");
159 }
160 // load it into the runtime
161 NetworkId netId;
162 auto error = runtime->LoadNetwork(netId, std::move(optNet));
163 BOOST_TEST(error == Status::Success);
164
165 // create structures for input & output
166 std::vector<uint8_t> inputData
167 {
168 1, 10, 3, 200, 5
169 // one of inputs is sufficiently larger than the others to saturate softmax
170 };
171 std::vector<uint8_t> outputData(5);
172
173 armnn::InputTensors inputTensors
174 {
175 {0, armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
176 };
177 armnn::OutputTensors outputTensors
178 {
179 {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
180 };
181
182 runtime->GetProfiler(netId)->EnableProfiling(true);
183
184 // do the inferences
185 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
186 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
187 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
188
189 // retrieve the Profiler.Print() output
190 std::stringstream ss;
191 profilerManager.GetProfiler()->Print(ss);
192
193 return ss.str();
194}
195
196inline void ValidateProfilerJson(std::string& result, const std::string& testData)
197{
198 // ensure all measurements are greater than zero
199 std::vector<double> measurementsVector = ExtractMeasurements(result);
200 BOOST_CHECK(!measurementsVector.empty());
201
202 // check sections contain raw and unit tags
203 // first ensure Parenthesis are balanced
204 if (AreParenthesesMatching(result))
205 {
206 // remove parent sections that will not have raw or unit tag
207 std::vector<std::string> sectionVector = ExtractSections(result);
208 for (size_t i = 0; i < sectionVector.size(); ++i)
209 {
210 if (boost::contains(sectionVector[i], "\"ArmNN\":")
211 || boost::contains(sectionVector[i], "\"inference_measurements\":"))
212 {
213 sectionVector.erase(sectionVector.begin() + static_cast<int>(i));
214 }
215 }
216 BOOST_CHECK(!sectionVector.empty());
217
218 BOOST_CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
219 [](std::string i) { return boost::contains(i, "\"raw\":"); }));
220
221 BOOST_CHECK(std::all_of(sectionVector.begin(), sectionVector.end(),
222 [](std::string i) { return boost::contains(i, "\"unit\":"); }));
223 }
224
225 // remove the time measurements as they vary from test to test
226 result.erase(std::remove_if (result.begin(),result.end(),
227 [](char c) { return c == '.'; }), result.end());
228 result.erase(std::remove_if (result.begin(), result.end(), &isdigit), result.end());
229 result.erase(std::remove_if (result.begin(),result.end(),
230 [](char c) { return c == '\t'; }), result.end());
231
232 BOOST_CHECK(boost::contains(result, "ArmNN"));
233 BOOST_CHECK(boost::contains(result, "inference_measurements"));
234 BOOST_CHECK(boost::contains(result, "layer_measurements"));
235 BOOST_CHECK_EQUAL(result, testData);
236
237 // ensure no spare parenthesis present in print output
238 BOOST_CHECK(AreParenthesesMatching(result));
239}
240
241void RunSoftmaxProfilerJsonPrinterTest(const std::vector<armnn::BackendId>& backends)
242{
243 // setup the test fixture and obtain JSON Printer result
244 std::string result = GetSoftmaxProfilerJson(backends);
245
246 std::string backend = "Ref";
247 std::string changeLine31 = "\n},\n\"CopyMemGeneric_Execute\": {";
248 std::string changeLine39 = "us\"";
249 std::string changeLine40;
250 std::string changeLine45;
251
252 const armnn::BackendId& firstBackend = backends.at(0);
253 if (firstBackend == armnn::Compute::GpuAcc)
254 {
255 backend = "Cl";
256 changeLine31 = ",\n\"OpenClKernelTimer/: softmax_layer_max_shift_exp_sum_quantized_serial GWS[,,]\": {";
257 changeLine39 = R"(us"
258},
259"OpenClKernelTimer/: softmax_layer_norm_quantized GWS[,,]": {
260"raw": [
261,
262,
263
264],
265"unit": "us")";
266
267 changeLine40 = R"(
268},
269"CopyMemGeneric_Execute": {
270"raw": [
271,
272,
273
274],
275"unit": "us")";
276 changeLine45 = "}\n";
277 }
278 else if (firstBackend == armnn::Compute::CpuAcc)
279 {
280 backend = "Neon";
281 changeLine31 = ",\n\"NeonKernelTimer/: NEFillBorderKernel\": {";
282 changeLine39 = R"(us"
283},
284"NeonKernelTimer/: NELogitsDMaxKernel": {
285"raw": [
286,
287,
288
289],
290"unit": "us"
291},
292"NeonKernelTimer/: NELogitsDSoftmaxKernel": {
293"raw": [
294,
295,
296
297],
298"unit": "us")";
299 changeLine40 = R"(
300},
301"CopyMemGeneric_Execute": {
302"raw": [
303,
304,
305
306],
307"unit": "us")";
308 changeLine45 = "}\n";
309 }
310
311 std::string testData = R"({
312"ArmNN": {
313"inference_measurements": {
314"raw": [
315,
316,
317
318],
319"unit": "us",
320"layer_measurements": {
321"raw": [
322,
323,
324
325],
326"unit": "us",
327"CopyMemGeneric_Execute": {
328"raw": [
329,
330,
331
332],
333"unit": "us"
334},
335")" + backend + R"(SoftmaxUintWorkload_Execute": {
336"raw": [
337,
338,
339
340],
341"unit": "us")" + changeLine31 + R"(
342"raw": [
343,
344,
345
346],
347"unit": ")" + changeLine39 + R"(
348})" + changeLine40 + R"(
349}
350}
351}
352}
353)" + changeLine45 + R"()";
354
355 // validate the JSON Printer result
356 ValidateProfilerJson(result, testData);
357}