blob: 2d373cd1dd07213eb34d517d1fdd4085d9913090 [file] [log] [blame]
Derek Lambertid6cb30e2020-04-28 13:31:29 +01001//
2// Copyright © 2020 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
Jan Eilerse7cc7c32020-06-25 13:18:47 +01005
Derek Lambertid6cb30e2020-04-28 13:31:29 +01006#include "armnnTfLiteParser/ITfLiteParser.hpp"
7
8#include "NMS.hpp"
9
10#include <stb/stb_image.h>
11
12#include <armnn/INetwork.hpp>
13#include <armnn/IRuntime.hpp>
14#include <armnn/Logging.hpp>
15#include <armnn/utility/IgnoreUnused.hpp>
16
Jan Eilerse7cc7c32020-06-25 13:18:47 +010017#include <cxxopts/cxxopts.hpp>
18#include <ghc/filesystem.hpp>
19
Derek Lambertid6cb30e2020-04-28 13:31:29 +010020#include <chrono>
Derek Lambertid6cb30e2020-04-28 13:31:29 +010021#include <fstream>
Jan Eilerse7cc7c32020-06-25 13:18:47 +010022#include <iostream>
23#include <stdlib.h>
Derek Lambertid6cb30e2020-04-28 13:31:29 +010024
25using namespace armnnTfLiteParser;
26using namespace armnn;
27
28static const int OPEN_FILE_ERROR = -2;
29static const int OPTIMIZE_NETWORK_ERROR = -3;
30static const int LOAD_NETWORK_ERROR = -4;
31static const int LOAD_IMAGE_ERROR = -5;
32static const int GENERAL_ERROR = -100;
33
Pavel Macenauer855a47b2020-05-26 10:54:22 +000034#define CHECK_OK(v) \
35 do { \
36 try { \
37 auto r_local = v; \
38 if (r_local != 0) { return r_local;} \
39 } \
40 catch (const armnn::Exception& e) \
41 { \
42 ARMNN_LOG(error) << "Oops: " << e.what(); \
43 return GENERAL_ERROR; \
44 } \
Derek Lambertid6cb30e2020-04-28 13:31:29 +010045 } while(0)
46
47
48
49template<typename TContainer>
50inline armnn::InputTensors MakeInputTensors(const std::vector<armnn::BindingPointInfo>& inputBindings,
Narumol Prangnawarat2adf5f02020-07-21 10:21:19 +010051 const std::vector<std::reference_wrapper<TContainer>>& inputDataContainers)
Derek Lambertid6cb30e2020-04-28 13:31:29 +010052{
53 armnn::InputTensors inputTensors;
54
55 const size_t numInputs = inputBindings.size();
56 if (numInputs != inputDataContainers.size())
57 {
58 throw armnn::Exception("Mismatching vectors");
59 }
60
61 for (size_t i = 0; i < numInputs; i++)
62 {
63 const armnn::BindingPointInfo& inputBinding = inputBindings[i];
Narumol Prangnawarat2adf5f02020-07-21 10:21:19 +010064 const TContainer& inputData = inputDataContainers[i].get();
Derek Lambertid6cb30e2020-04-28 13:31:29 +010065
66 armnn::ConstTensor inputTensor(inputBinding.second, inputData.data());
67 inputTensors.push_back(std::make_pair(inputBinding.first, inputTensor));
68 }
69
70 return inputTensors;
71}
72
73template<typename TContainer>
Narumol Prangnawarat2adf5f02020-07-21 10:21:19 +010074inline armnn::OutputTensors MakeOutputTensors(
75 const std::vector<armnn::BindingPointInfo>& outputBindings,
76 const std::vector<std::reference_wrapper<TContainer>>& outputDataContainers)
Derek Lambertid6cb30e2020-04-28 13:31:29 +010077{
78 armnn::OutputTensors outputTensors;
79
80 const size_t numOutputs = outputBindings.size();
81 if (numOutputs != outputDataContainers.size())
82 {
83 throw armnn::Exception("Mismatching vectors");
84 }
85
Narumol Prangnawarat2adf5f02020-07-21 10:21:19 +010086 outputTensors.reserve(numOutputs);
87
Derek Lambertid6cb30e2020-04-28 13:31:29 +010088 for (size_t i = 0; i < numOutputs; i++)
89 {
90 const armnn::BindingPointInfo& outputBinding = outputBindings[i];
Narumol Prangnawarat2adf5f02020-07-21 10:21:19 +010091 const TContainer& outputData = outputDataContainers[i].get();
Derek Lambertid6cb30e2020-04-28 13:31:29 +010092
93 armnn::Tensor outputTensor(outputBinding.second, const_cast<float*>(outputData.data()));
94 outputTensors.push_back(std::make_pair(outputBinding.first, outputTensor));
95 }
96
97 return outputTensors;
98}
99
100int LoadModel(const char* filename,
101 ITfLiteParser& parser,
102 IRuntime& runtime,
103 NetworkId& networkId,
Narumol Prangnawaratef6f3002020-08-17 17:02:12 +0100104 const std::vector<BackendId>& backendPreferences,
105 bool enableImport = false)
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100106{
107 std::ifstream stream(filename, std::ios::in | std::ios::binary);
108 if (!stream.is_open())
109 {
110 ARMNN_LOG(error) << "Could not open model: " << filename;
111 return OPEN_FILE_ERROR;
112 }
113
114 std::vector<uint8_t> contents((std::istreambuf_iterator<char>(stream)), std::istreambuf_iterator<char>());
115 stream.close();
116
117 auto model = parser.CreateNetworkFromBinary(contents);
118 contents.clear();
119 ARMNN_LOG(debug) << "Model loaded ok: " << filename;
120
121 // Optimize backbone model
Narumol Prangnawarata2493a02020-08-19 14:39:07 +0100122 OptimizerOptions options;
123 options.m_ImportEnabled = enableImport;
124 auto optimizedModel = Optimize(*model, backendPreferences, runtime.GetDeviceSpec(), options);
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100125 if (!optimizedModel)
126 {
127 ARMNN_LOG(fatal) << "Could not optimize the model:" << filename;
128 return OPTIMIZE_NETWORK_ERROR;
129 }
130
Narumol Prangnawaratef6f3002020-08-17 17:02:12 +0100131 // Load model into runtime
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100132 {
133 std::string errorMessage;
Narumol Prangnawaratef6f3002020-08-17 17:02:12 +0100134 INetworkProperties modelProps(enableImport, enableImport);
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100135 Status status = runtime.LoadNetwork(networkId, std::move(optimizedModel), errorMessage, modelProps);
136 if (status != Status::Success)
137 {
138 ARMNN_LOG(fatal) << "Could not load " << filename << " model into runtime: " << errorMessage;
139 return LOAD_NETWORK_ERROR;
140 }
141 }
142
143 return 0;
144}
145
146std::vector<float> LoadImage(const char* filename)
147{
148 struct Memory
149 {
150 ~Memory() {stbi_image_free(m_Data);}
151 bool IsLoaded() const { return m_Data != nullptr;}
152
153 unsigned char* m_Data;
154 };
155
156 std::vector<float> image;
157
158 int width;
159 int height;
160 int channels;
161
162 Memory mem = {stbi_load(filename, &width, &height, &channels, 3)};
163 if (!mem.IsLoaded())
164 {
165 ARMNN_LOG(error) << "Could not load input image file: " << filename;
166 return image;
167 }
168
169 if (width != 1920 || height != 1080 || channels != 3)
170 {
171 ARMNN_LOG(error) << "Input image has wong dimension: " << width << "x" << height << "x" << channels << ". "
172 " Expected 1920x1080x3.";
173 return image;
174 }
175
176 image.resize(1920*1080*3);
177
178 // Expand to float. Does this need de-gamma?
179 for (unsigned int idx=0; idx <= 1920*1080*3; idx++)
180 {
181 image[idx] = static_cast<float>(mem.m_Data[idx]) /255.0f;
182 }
183
184 return image;
185}
186
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100187
188bool ValidateFilePath(std::string& file)
189{
190 if (!ghc::filesystem::exists(file))
191 {
192 std::cerr << "Given file path " << file << " does not exist" << std::endl;
193 return false;
194 }
195 if (!ghc::filesystem::is_regular_file(file))
196 {
197 std::cerr << "Given file path " << file << " is not a regular file" << std::endl;
198 return false;
199 }
200 return true;
201}
202
Ryan OShea74af0932020-08-07 16:27:34 +0100203void CheckAccuracy(std::vector<float>* toDetector0, std::vector<float>* toDetector1,
204 std::vector<float>* toDetector2, std::vector<float>* detectorOutput,
205 const std::vector<yolov3::Detection>& nmsOut, const std::vector<std::string>& filePaths)
206{
207 std::ifstream pathStream;
208 std::vector<float> expected;
209 std::vector<std::vector<float>*> outputs;
210 float compare = 0;
211 unsigned int count = 0;
212
213 //Push back output vectors from inference for use in loop
214 outputs.push_back(toDetector0);
215 outputs.push_back(toDetector1);
216 outputs.push_back(toDetector2);
217 outputs.push_back(detectorOutput);
218
219 for (unsigned int i = 0; i < outputs.size(); ++i)
220 {
221 // Reading expected output files and assigning them to @expected. Close and Clear to reuse stream and clean RAM
222 pathStream.open(filePaths[i]);
223 if (!pathStream.is_open())
224 {
225 ARMNN_LOG(error) << "Expected output file can not be opened: " << filePaths[i];
226 continue;
227 }
228
229 expected.assign(std::istream_iterator<float>(pathStream), {});
230 pathStream.close();
231 pathStream.clear();
232
233 // Ensure each vector is the same length
234 if (expected.size() != outputs[i]->size())
235 {
236 ARMNN_LOG(error) << "Expected output size does not match actual output size: " << filePaths[i];
237 }
238 else
239 {
240 count = 0;
241
242 // Compare abs(difference) with tolerance to check for value by value equality
243 for (unsigned int j = 0; j < outputs[i]->size(); ++j)
244 {
245 compare = abs(expected[j] - outputs[i]->at(j));
246 if (compare > 0.001f)
247 {
248 count++;
249 }
250 }
251 if (count > 0)
252 {
253 ARMNN_LOG(error) << count << " output(s) do not match expected values in: " << filePaths[i];
254 }
255 }
256 }
257
258 pathStream.open(filePaths[4]);
259 if (!pathStream.is_open())
260 {
261 ARMNN_LOG(error) << "Expected output file can not be opened: " << filePaths[4];
262 }
263 else
264 {
265 expected.assign(std::istream_iterator<float>(pathStream), {});
266 pathStream.close();
267 pathStream.clear();
268 unsigned int y = 0;
269 unsigned int numOfMember = 6;
270 std::vector<float> intermediate;
271
272 for (auto& detection: nmsOut)
273 {
274 for (unsigned int x = y * numOfMember; x < ((y * numOfMember) + numOfMember); ++x)
275 {
276 intermediate.push_back(expected[x]);
277 }
278 if (!yolov3::compare_detection(detection, intermediate))
279 {
280 ARMNN_LOG(error) << "Expected NMS output does not match: Detection " << y + 1;
281 }
282 intermediate.clear();
283 y++;
284 }
285 }
286}
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100287
288struct ParseArgs
289{
290 ParseArgs(int ac, char *av[]) : options{"TfLiteYoloV3Big-Armnn",
291 "Executes YoloV3Big using ArmNN. YoloV3Big consists "
292 "of 3 parts: A backbone TfLite model, a detector TfLite "
293 "model, and None Maximum Suppression. All parts are "
294 "executed successively."}
295 {
296 options.add_options()
297 ("b,backbone-path",
298 "File path where the TfLite model for the yoloV3big backbone "
299 "can be found e.g. mydir/yoloV3big_backbone.tflite",
300 cxxopts::value<std::string>())
301
Ryan OShea74af0932020-08-07 16:27:34 +0100302 ("c,comparison-files",
303 "Defines the expected outputs for the model "
304 "of yoloV3big e.g. 'mydir/file1.txt,mydir/file2.txt,mydir/file3.txt,mydir/file4.txt'->InputToDetector1"
305 " will be tried first then InputToDetector2 then InputToDetector3 then the Detector Output and finally"
306 " the NMS output. NOTE: Files are passed as comma separated list without whitespaces.",
307 cxxopts::value<std::vector<std::string>>())
308
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100309 ("d,detector-path",
310 "File path where the TfLite model for the yoloV3big "
311 "detector can be found e.g.'mydir/yoloV3big_detector.tflite'",
312 cxxopts::value<std::string>())
313
314 ("h,help", "Produce help message")
315
316 ("i,image-path",
317 "File path to a 1080x1920 jpg image that should be "
318 "processed e.g. 'mydir/example_img_180_1920.jpg'",
319 cxxopts::value<std::string>())
320
321 ("B,preferred-backends-backbone",
322 "Defines the preferred backends to run the backbone model "
323 "of yoloV3big e.g. 'GpuAcc,CpuRef' -> GpuAcc will be tried "
324 "first before falling back to CpuRef. NOTE: Backends are passed "
325 "as comma separated list without whitespaces.",
326 cxxopts::value<std::vector<std::string>>()->default_value("GpuAcc,CpuRef"))
327
328 ("D,preferred-backends-detector",
329 "Defines the preferred backends to run the detector model "
330 "of yoloV3big e.g. 'CpuAcc,CpuRef' -> CpuAcc will be tried "
331 "first before falling back to CpuRef. NOTE: Backends are passed "
332 "as comma separated list without whitespaces.",
333 cxxopts::value<std::vector<std::string>>()->default_value("CpuAcc,CpuRef"));
334
335 auto result = options.parse(ac, av);
336
337 if (result.count("help"))
338 {
339 std::cout << options.help() << "\n";
340 exit(EXIT_SUCCESS);
341 }
342
343 backboneDir = GetPathArgument(result, "backbone-path");
Ryan OShea74af0932020-08-07 16:27:34 +0100344 comparisonFiles = GetPathArgument(result["comparison-files"].as<std::vector<std::string>>());
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100345 detectorDir = GetPathArgument(result, "detector-path");
346 imageDir = GetPathArgument(result, "image-path");
347
Ryan OShea74af0932020-08-07 16:27:34 +0100348
349
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100350 prefBackendsBackbone = GetBackendIDs(result["preferred-backends-backbone"].as<std::vector<std::string>>());
351 LogBackendsInfo(prefBackendsBackbone, "Backbone");
352 prefBackendsDetector = GetBackendIDs(result["preferred-backends-detector"].as<std::vector<std::string>>());
353 LogBackendsInfo(prefBackendsDetector, "detector");
354 }
355
356 /// Takes a vector of backend strings and returns a vector of backendIDs
357 std::vector<BackendId> GetBackendIDs(const std::vector<std::string>& backendStrings)
358 {
359 std::vector<BackendId> backendIDs;
360 for (const auto& b : backendStrings)
361 {
362 backendIDs.push_back(BackendId(b));
363 }
364 return backendIDs;
365 }
366
367 /// Verifies if the program argument with the name argName contains a valid file path.
368 /// Returns the valid file path string if given argument is associated a valid file path.
369 /// Otherwise throws an exception.
370 std::string GetPathArgument(cxxopts::ParseResult& result, std::string&& argName)
371 {
372 if (result.count(argName))
373 {
374 std::string fileDir = result[argName].as<std::string>();
375 if (!ValidateFilePath(fileDir))
376 {
377 throw cxxopts::option_syntax_exception("Argument given to backbone-path is not a valid file path");
378 }
379 return fileDir;
380 }
381 else
382 {
383 throw cxxopts::missing_argument_exception(argName);
384 }
385 }
386
Ryan OShea74af0932020-08-07 16:27:34 +0100387 /// Assigns vector of strings to struct member variable
388 std::vector<std::string> GetPathArgument(const std::vector<std::string>& pathStrings)
389 {
390 if (pathStrings.size() < 5){
391 throw cxxopts::option_syntax_exception("Comparison files requires 5 file paths.");
392 }
393
394 std::vector<std::string> filePaths;
395 for (auto& path : pathStrings)
396 {
397 filePaths.push_back(path);
398 if (!ValidateFilePath(filePaths.back()))
399 {
400 throw cxxopts::option_syntax_exception("Argument given to Comparison Files is not a valid file path");
401 }
402 }
403 return filePaths;
404 }
405
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100406 /// Log info about assigned backends
407 void LogBackendsInfo(std::vector<BackendId>& backends, std::string&& modelName)
408 {
409 std::string info;
410 info = "Preferred backends for " + modelName + " set to [ ";
411 for (auto const &backend : backends)
412 {
413 info = info + std::string(backend) + " ";
414 }
415 ARMNN_LOG(info) << info << "]";
416 }
417
418 // Member variables
419 std::string backboneDir;
Ryan OShea74af0932020-08-07 16:27:34 +0100420 std::vector<std::string> comparisonFiles;
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100421 std::string detectorDir;
422 std::string imageDir;
423
424 std::vector<BackendId> prefBackendsBackbone;
425 std::vector<BackendId> prefBackendsDetector;
426
427 cxxopts::Options options;
428};
429
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100430int main(int argc, char* argv[])
431{
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100432 // Configure logging
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100433 SetAllLoggingSinks(true, true, true);
434 SetLogFilter(LogSeverity::Trace);
435
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100436 // Check and get given program arguments
437 ParseArgs progArgs = ParseArgs(argc, argv);
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100438
439 // Create runtime
440 IRuntime::CreationOptions runtimeOptions; // default
441 auto runtime = IRuntime::Create(runtimeOptions);
442 if (!runtime)
443 {
444 ARMNN_LOG(fatal) << "Could not create runtime.";
445 return -1;
446 }
447
448 // Create TfLite Parsers
449 ITfLiteParser::TfLiteParserOptions parserOptions;
450 auto parser = ITfLiteParser::Create(parserOptions);
451
452 // Load backbone model
453 ARMNN_LOG(info) << "Loading backbone...";
454 NetworkId backboneId;
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100455 CHECK_OK(LoadModel(progArgs.backboneDir.c_str(), *parser, *runtime, backboneId, progArgs.prefBackendsBackbone));
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100456 auto inputId = parser->GetNetworkInputBindingInfo(0, "inputs");
457 auto bbOut0Id = parser->GetNetworkOutputBindingInfo(0, "input_to_detector_1");
458 auto bbOut1Id = parser->GetNetworkOutputBindingInfo(0, "input_to_detector_2");
459 auto bbOut2Id = parser->GetNetworkOutputBindingInfo(0, "input_to_detector_3");
460 auto backboneProfile = runtime->GetProfiler(backboneId);
461 backboneProfile->EnableProfiling(true);
462
463 // Load detector model
464 ARMNN_LOG(info) << "Loading detector...";
465 NetworkId detectorId;
Narumol Prangnawaratef6f3002020-08-17 17:02:12 +0100466 CHECK_OK(LoadModel(
467 progArgs.detectorDir.c_str(), *parser, *runtime, detectorId, progArgs.prefBackendsDetector, true));
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100468 auto detectIn0Id = parser->GetNetworkInputBindingInfo(0, "input_to_detector_1");
469 auto detectIn1Id = parser->GetNetworkInputBindingInfo(0, "input_to_detector_2");
470 auto detectIn2Id = parser->GetNetworkInputBindingInfo(0, "input_to_detector_3");
471 auto outputBoxesId = parser->GetNetworkOutputBindingInfo(0, "output_boxes");
472 auto detectorProfile = runtime->GetProfiler(detectorId);
473
474 // Load input from file
475 ARMNN_LOG(info) << "Loading test image...";
Jan Eilerse7cc7c32020-06-25 13:18:47 +0100476 auto image = LoadImage(progArgs.imageDir.c_str());
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100477 if (image.empty())
478 {
479 return LOAD_IMAGE_ERROR;
480 }
481
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100482 // Allocate the intermediate tensors
483 std::vector<float> intermediateMem0(bbOut0Id.second.GetNumElements());
484 std::vector<float> intermediateMem1(bbOut1Id.second.GetNumElements());
485 std::vector<float> intermediateMem2(bbOut2Id.second.GetNumElements());
486 std::vector<float> intermediateMem3(outputBoxesId.second.GetNumElements());
487
488 // Setup inputs and outputs
489 using BindingInfos = std::vector<armnn::BindingPointInfo>;
Narumol Prangnawarat2adf5f02020-07-21 10:21:19 +0100490 using FloatTensors = std::vector<std::reference_wrapper<std::vector<float>>>;
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100491
Narumol Prangnawarat2adf5f02020-07-21 10:21:19 +0100492 InputTensors bbInputTensors = MakeInputTensors(BindingInfos{ inputId },
493 FloatTensors{ image });
494 OutputTensors bbOutputTensors = MakeOutputTensors(BindingInfos{ bbOut0Id, bbOut1Id, bbOut2Id },
495 FloatTensors{ intermediateMem0,
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100496 intermediateMem1,
Narumol Prangnawarat2adf5f02020-07-21 10:21:19 +0100497 intermediateMem2 });
498 InputTensors detectInputTensors = MakeInputTensors(BindingInfos{ detectIn0Id,
499 detectIn1Id,
500 detectIn2Id } ,
501 FloatTensors{ intermediateMem0,
502 intermediateMem1,
503 intermediateMem2 });
504 OutputTensors detectOutputTensors = MakeOutputTensors(BindingInfos{ outputBoxesId },
505 FloatTensors{ intermediateMem3 });
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100506
507 static const int numIterations=2;
508 using DurationUS = std::chrono::duration<double, std::micro>;
509 std::vector<DurationUS> nmsDurations(0);
Ryan OShea74af0932020-08-07 16:27:34 +0100510 std::vector<yolov3::Detection> filtered_boxes;
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100511 nmsDurations.reserve(numIterations);
512 for (int i=0; i < numIterations; i++)
513 {
514 // Execute backbone
515 ARMNN_LOG(info) << "Running backbone...";
516 runtime->EnqueueWorkload(backboneId, bbInputTensors, bbOutputTensors);
517
518 // Execute detector
519 ARMNN_LOG(info) << "Running detector...";
520 runtime->EnqueueWorkload(detectorId, detectInputTensors, detectOutputTensors);
521
522 // Execute NMS
523 ARMNN_LOG(info) << "Running nms...";
524 using clock = std::chrono::steady_clock;
525 auto nmsStartTime = clock::now();
526 yolov3::NMSConfig config;
527 config.num_boxes = 127800;
528 config.num_classes = 80;
529 config.confidence_threshold = 0.9f;
530 config.iou_threshold = 0.5f;
Ryan OShea74af0932020-08-07 16:27:34 +0100531 filtered_boxes = yolov3::nms(config, intermediateMem3);
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100532 auto nmsEndTime = clock::now();
533
534 // Enable the profiling after the warm-up run
535 if (i>0)
536 {
537 print_detection(std::cout, filtered_boxes);
538
539 const auto nmsDuration = DurationUS(nmsStartTime - nmsEndTime);
540 nmsDurations.push_back(nmsDuration);
541 }
542 backboneProfile->EnableProfiling(true);
543 detectorProfile->EnableProfiling(true);
544 }
545 // Log timings to file
546 std::ofstream backboneProfileStream("backbone.json");
547 backboneProfile->Print(backboneProfileStream);
548 backboneProfileStream.close();
549
550 std::ofstream detectorProfileStream("detector.json");
551 detectorProfile->Print(detectorProfileStream);
552 detectorProfileStream.close();
553
554 // Manually construct the json output
555 std::ofstream nmsProfileStream("nms.json");
556 nmsProfileStream << "{" << "\n";
557 nmsProfileStream << R"( "NmsTimings": {)" << "\n";
558 nmsProfileStream << R"( "raw": [)" << "\n";
559 bool isFirst = true;
560 for (auto duration : nmsDurations)
561 {
562 if (!isFirst)
563 {
564 nmsProfileStream << ",\n";
565 }
566
567 nmsProfileStream << " " << duration.count();
568 isFirst = false;
569 }
570 nmsProfileStream << "\n";
571 nmsProfileStream << R"( "units": "us")" << "\n";
572 nmsProfileStream << " ]" << "\n";
573 nmsProfileStream << " }" << "\n";
574 nmsProfileStream << "}" << "\n";
575 nmsProfileStream.close();
576
Ryan OShea74af0932020-08-07 16:27:34 +0100577 CheckAccuracy(&intermediateMem0, &intermediateMem1,
578 &intermediateMem2, &intermediateMem3,
579 filtered_boxes, progArgs.comparisonFiles);
580
Derek Lambertid6cb30e2020-04-28 13:31:29 +0100581 ARMNN_LOG(info) << "Run completed";
582 return 0;
583}