blob: 8ca69d8292d08ed196f452de2337f96f5f28bcb7 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +01005#include <armnn/ArmNN.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +01006#include <armnn/TypesUtils.hpp>
7
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +00008#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +00009#include "armnnDeserializer/IDeserializer.hpp"
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +000010#endif
telsoa014fcda012018-03-09 14:13:49 +000011#if defined(ARMNN_CAFFE_PARSER)
12#include "armnnCaffeParser/ICaffeParser.hpp"
13#endif
surmeh01bceff2f2018-03-29 16:29:27 +010014#if defined(ARMNN_TF_PARSER)
15#include "armnnTfParser/ITfParser.hpp"
16#endif
telsoa01c577f2c2018-08-31 09:22:23 +010017#if defined(ARMNN_TF_LITE_PARSER)
18#include "armnnTfLiteParser/ITfLiteParser.hpp"
19#endif
20#if defined(ARMNN_ONNX_PARSER)
21#include "armnnOnnxParser/IOnnxParser.hpp"
22#endif
23#include "CsvReader.hpp"
telsoa014fcda012018-03-09 14:13:49 +000024#include "../InferenceTest.hpp"
25
telsoa01c577f2c2018-08-31 09:22:23 +010026#include <Logging.hpp>
27#include <Profiling.hpp>
28
29#include <boost/algorithm/string/trim.hpp>
telsoa014fcda012018-03-09 14:13:49 +000030#include <boost/algorithm/string/split.hpp>
31#include <boost/algorithm/string/classification.hpp>
telsoa01c577f2c2018-08-31 09:22:23 +010032#include <boost/program_options.hpp>
Ferran Balaguerc602f292019-02-08 17:09:55 +000033#include <boost/variant.hpp>
telsoa014fcda012018-03-09 14:13:49 +000034
35#include <iostream>
36#include <fstream>
telsoa01c577f2c2018-08-31 09:22:23 +010037#include <functional>
38#include <future>
39#include <algorithm>
40#include <iterator>
telsoa014fcda012018-03-09 14:13:49 +000041
42namespace
43{
44
telsoa01c577f2c2018-08-31 09:22:23 +010045// Configure boost::program_options for command-line parsing and validation.
46namespace po = boost::program_options;
47
telsoa014fcda012018-03-09 14:13:49 +000048template<typename T, typename TParseElementFunc>
Ferran Balaguerc602f292019-02-08 17:09:55 +000049std::vector<T> ParseArrayImpl(std::istream& stream, TParseElementFunc parseElementFunc, const char * chars = "\t ,:")
telsoa014fcda012018-03-09 14:13:49 +000050{
51 std::vector<T> result;
telsoa01c577f2c2018-08-31 09:22:23 +010052 // Processes line-by-line.
telsoa014fcda012018-03-09 14:13:49 +000053 std::string line;
54 while (std::getline(stream, line))
55 {
56 std::vector<std::string> tokens;
surmeh013537c2c2018-05-18 16:31:43 +010057 try
58 {
59 // Coverity fix: boost::split() may throw an exception of type boost::bad_function_call.
Ferran Balaguerc602f292019-02-08 17:09:55 +000060 boost::split(tokens, line, boost::algorithm::is_any_of(chars), boost::token_compress_on);
surmeh013537c2c2018-05-18 16:31:43 +010061 }
62 catch (const std::exception& e)
63 {
64 BOOST_LOG_TRIVIAL(error) << "An error occurred when splitting tokens: " << e.what();
65 continue;
66 }
telsoa014fcda012018-03-09 14:13:49 +000067 for (const std::string& token : tokens)
68 {
69 if (!token.empty()) // See https://stackoverflow.com/questions/10437406/
70 {
71 try
72 {
73 result.push_back(parseElementFunc(token));
74 }
75 catch (const std::exception&)
76 {
77 BOOST_LOG_TRIVIAL(error) << "'" << token << "' is not a valid number. It has been ignored.";
78 }
79 }
80 }
81 }
82
83 return result;
84}
85
telsoa01c577f2c2018-08-31 09:22:23 +010086bool CheckOption(const po::variables_map& vm,
87 const char* option)
88{
89 // Check that the given option is valid.
90 if (option == nullptr)
91 {
92 return false;
93 }
94
95 // Check whether 'option' is provided.
96 return vm.find(option) != vm.end();
97}
98
99void CheckOptionDependency(const po::variables_map& vm,
100 const char* option,
101 const char* required)
102{
103 // Check that the given options are valid.
104 if (option == nullptr || required == nullptr)
105 {
106 throw po::error("Invalid option to check dependency for");
107 }
108
109 // Check that if 'option' is provided, 'required' is also provided.
110 if (CheckOption(vm, option) && !vm[option].defaulted())
111 {
112 if (CheckOption(vm, required) == 0 || vm[required].defaulted())
113 {
114 throw po::error(std::string("Option '") + option + "' requires option '" + required + "'.");
115 }
116 }
117}
118
119void CheckOptionDependencies(const po::variables_map& vm)
120{
121 CheckOptionDependency(vm, "model-path", "model-format");
122 CheckOptionDependency(vm, "model-path", "input-name");
123 CheckOptionDependency(vm, "model-path", "input-tensor-data");
124 CheckOptionDependency(vm, "model-path", "output-name");
125 CheckOptionDependency(vm, "input-tensor-shape", "model-path");
telsoa014fcda012018-03-09 14:13:49 +0000126}
127
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000128template<armnn::DataType NonQuantizedType>
129auto ParseDataArray(std::istream & stream);
130
131template<armnn::DataType QuantizedType>
132auto ParseDataArray(std::istream& stream,
133 const float& quantizationScale,
134 const int32_t& quantizationOffset);
telsoa014fcda012018-03-09 14:13:49 +0000135
136template<>
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000137auto ParseDataArray<armnn::DataType::Float32>(std::istream & stream)
telsoa014fcda012018-03-09 14:13:49 +0000138{
139 return ParseArrayImpl<float>(stream, [](const std::string& s) { return std::stof(s); });
140}
141
142template<>
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000143auto ParseDataArray<armnn::DataType::Signed32>(std::istream & stream)
144{
145 return ParseArrayImpl<int>(stream, [](const std::string & s) { return std::stoi(s); });
146}
147
148template<>
149auto ParseDataArray<armnn::DataType::QuantisedAsymm8>(std::istream& stream,
150 const float& quantizationScale,
151 const int32_t& quantizationOffset)
152{
153 return ParseArrayImpl<uint8_t>(stream,
154 [&quantizationScale, &quantizationOffset](const std::string & s)
155 {
156 return boost::numeric_cast<uint8_t>(
157 armnn::Quantize<u_int8_t>(std::stof(s),
158 quantizationScale,
159 quantizationOffset));
160 });
161}
162
telsoa014fcda012018-03-09 14:13:49 +0000163std::vector<unsigned int> ParseArray(std::istream& stream)
164{
165 return ParseArrayImpl<unsigned int>(stream,
166 [](const std::string& s) { return boost::numeric_cast<unsigned int>(std::stoi(s)); });
167}
168
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000169std::vector<std::string> ParseStringList(const std::string & inputString, const char * delimiter)
Ferran Balaguerc602f292019-02-08 17:09:55 +0000170{
171 std::stringstream stream(inputString);
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000172 return ParseArrayImpl<std::string>(stream, [](const std::string& s) { return boost::trim_copy(s); }, delimiter);
telsoa014fcda012018-03-09 14:13:49 +0000173}
174
David Beckf0b48452018-10-19 15:20:56 +0100175void RemoveDuplicateDevices(std::vector<armnn::BackendId>& computeDevices)
telsoa014fcda012018-03-09 14:13:49 +0000176{
telsoa01c577f2c2018-08-31 09:22:23 +0100177 // Mark the duplicate devices as 'Undefined'.
178 for (auto i = computeDevices.begin(); i != computeDevices.end(); ++i)
179 {
180 for (auto j = std::next(i); j != computeDevices.end(); ++j)
181 {
182 if (*j == *i)
183 {
184 *j = armnn::Compute::Undefined;
185 }
186 }
187 }
188
189 // Remove 'Undefined' devices.
190 computeDevices.erase(std::remove(computeDevices.begin(), computeDevices.end(), armnn::Compute::Undefined),
191 computeDevices.end());
192}
193
Derek Lambertida470482019-05-14 16:55:25 +0100194struct TensorPrinter : public boost::static_visitor<>
195{
196 TensorPrinter(const std::string& binding, const armnn::TensorInfo& info)
197 : m_OutputBinding(binding)
198 , m_Scale(info.GetQuantizationScale())
199 , m_Offset(info.GetQuantizationOffset())
200 {}
201
202 void operator()(const std::vector<float>& values)
203 {
204 ForEachValue(values, [](float value){
205 printf("%f ", value);
206 });
207 }
208
209 void operator()(const std::vector<uint8_t>& values)
210 {
211 auto& scale = m_Scale;
212 auto& offset = m_Offset;
213 ForEachValue(values, [&scale, &offset](uint8_t value)
214 {
215 printf("%f ", armnn::Dequantize(value, scale, offset));
216 });
217 }
218
219 void operator()(const std::vector<int>& values)
220 {
221 ForEachValue(values, [](int value)
222 {
223 printf("%d ", value);
224 });
225 }
226
227private:
228 template<typename Container, typename Delegate>
229 void ForEachValue(const Container& c, Delegate delegate)
230 {
231 std::cout << m_OutputBinding << ": ";
232 for (const auto& value : c)
233 {
234 delegate(value);
235 }
236 printf("\n");
237 }
238
239 std::string m_OutputBinding;
240 float m_Scale=0.0f;
241 int m_Offset=0;
242};
243
244
telsoa01c577f2c2018-08-31 09:22:23 +0100245} // namespace
246
247template<typename TParser, typename TDataType>
248int MainImpl(const char* modelPath,
249 bool isModelBinary,
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000250 const std::vector<armnn::BackendId>& computeDevices,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000251 const std::vector<string>& inputNames,
252 const std::vector<std::unique_ptr<armnn::TensorShape>>& inputTensorShapes,
253 const std::vector<string>& inputTensorDataFilePaths,
254 const std::vector<string>& inputTypes,
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000255 const std::vector<string>& outputTypes,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000256 const std::vector<string>& outputNames,
telsoa01c577f2c2018-08-31 09:22:23 +0100257 bool enableProfiling,
Ruomei Yan2fcce082019-04-02 16:47:34 +0100258 bool enableFp16TurboMode,
James Conroy7b4886f2019-04-11 10:23:58 +0100259 const double& thresholdTime,
telsoa01c577f2c2018-08-31 09:22:23 +0100260 const size_t subgraphId,
261 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
262{
Ferran Balaguerc602f292019-02-08 17:09:55 +0000263 using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000264
Ferran Balaguerc602f292019-02-08 17:09:55 +0000265 std::vector<TContainer> inputDataContainers;
266
telsoa014fcda012018-03-09 14:13:49 +0000267 try
268 {
telsoa01c577f2c2018-08-31 09:22:23 +0100269 // Creates an InferenceModel, which will parse the model and load it into an IRuntime.
telsoa014fcda012018-03-09 14:13:49 +0000270 typename InferenceModel<TParser, TDataType>::Params params;
271 params.m_ModelPath = modelPath;
272 params.m_IsModelBinary = isModelBinary;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000273 params.m_ComputeDevices = computeDevices;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000274
275 for(const std::string& inputName: inputNames)
276 {
277 params.m_InputBindings.push_back(inputName);
278 }
279
280 for(unsigned int i = 0; i < inputTensorShapes.size(); ++i)
281 {
282 params.m_InputShapes.push_back(*inputTensorShapes[i]);
283 }
284
285 for(const std::string& outputName: outputNames)
286 {
287 params.m_OutputBindings.push_back(outputName);
288 }
289
telsoa01c577f2c2018-08-31 09:22:23 +0100290 params.m_SubgraphId = subgraphId;
Ruomei Yan2fcce082019-04-02 16:47:34 +0100291 params.m_EnableFp16TurboMode = enableFp16TurboMode;
Matthew Bentham3e68b972019-04-09 13:10:46 +0100292 InferenceModel<TParser, TDataType> model(params, enableProfiling, runtime);
telsoa014fcda012018-03-09 14:13:49 +0000293
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000294 for(unsigned int i = 0; i < inputTensorDataFilePaths.size(); ++i)
295 {
296 std::ifstream inputTensorFile(inputTensorDataFilePaths[i]);
297
298 if (inputTypes[i].compare("float") == 0)
299 {
300 inputDataContainers.push_back(
301 ParseDataArray<armnn::DataType::Float32>(inputTensorFile));
302 }
303 else if (inputTypes[i].compare("int") == 0)
304 {
305 inputDataContainers.push_back(
306 ParseDataArray<armnn::DataType::Signed32>(inputTensorFile));
307 }
308 else if (inputTypes[i].compare("qasymm8") == 0)
309 {
310 auto inputBinding = model.GetInputBindingInfo();
311 inputDataContainers.push_back(
312 ParseDataArray<armnn::DataType::QuantisedAsymm8>(inputTensorFile,
313 inputBinding.second.GetQuantizationScale(),
314 inputBinding.second.GetQuantizationOffset()));
315 }
316 else
317 {
318 BOOST_LOG_TRIVIAL(fatal) << "Unsupported tensor data type \"" << inputTypes[i] << "\". ";
319 return EXIT_FAILURE;
320 }
321
322 inputTensorFile.close();
323 }
324
Ferran Balaguerc602f292019-02-08 17:09:55 +0000325 const size_t numOutputs = params.m_OutputBindings.size();
326 std::vector<TContainer> outputDataContainers;
Aron Virginas-Tar9b937472019-01-30 17:41:47 +0000327
Ferran Balaguerc602f292019-02-08 17:09:55 +0000328 for (unsigned int i = 0; i < numOutputs; ++i)
329 {
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000330 if (outputTypes[i].compare("float") == 0)
331 {
332 outputDataContainers.push_back(std::vector<float>(model.GetOutputSize(i)));
333 }
334 else if (outputTypes[i].compare("int") == 0)
335 {
336 outputDataContainers.push_back(std::vector<int>(model.GetOutputSize(i)));
337 }
338 else if (outputTypes[i].compare("qasymm8") == 0)
339 {
340 outputDataContainers.push_back(std::vector<uint8_t>(model.GetOutputSize(i)));
341 }
342 else
343 {
344 BOOST_LOG_TRIVIAL(fatal) << "Unsupported tensor data type \"" << outputTypes[i] << "\". ";
345 return EXIT_FAILURE;
346 }
Ferran Balaguerc602f292019-02-08 17:09:55 +0000347 }
Aron Virginas-Tar93f5f972019-01-31 13:12:34 +0000348
James Conroy7b4886f2019-04-11 10:23:58 +0100349 // model.Run returns the inference time elapsed in EnqueueWorkload (in milliseconds)
350 auto inference_duration = model.Run(inputDataContainers, outputDataContainers);
telsoa014fcda012018-03-09 14:13:49 +0000351
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000352 // Print output tensors
Derek Lambertida470482019-05-14 16:55:25 +0100353 const auto& infosOut = model.GetOutputBindingInfos();
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000354 for (size_t i = 0; i < numOutputs; i++)
355 {
Derek Lambertida470482019-05-14 16:55:25 +0100356 const armnn::TensorInfo& infoOut = infosOut[i].second;
357 TensorPrinter printer(params.m_OutputBindings[i], infoOut);
358 boost::apply_visitor(printer, outputDataContainers[i]);
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000359 }
James Conroy7b4886f2019-04-11 10:23:58 +0100360
361 BOOST_LOG_TRIVIAL(info) << "\nInference time: " << std::setprecision(2)
362 << std::fixed << inference_duration.count() << " ms";
363
364 // If thresholdTime == 0.0 (default), then it hasn't been supplied at command line
365 if (thresholdTime != 0.0)
366 {
367 BOOST_LOG_TRIVIAL(info) << "Threshold time: " << std::setprecision(2)
368 << std::fixed << thresholdTime << " ms";
369 auto thresholdMinusInference = thresholdTime - inference_duration.count();
370 BOOST_LOG_TRIVIAL(info) << "Threshold time - Inference time: " << std::setprecision(2)
371 << std::fixed << thresholdMinusInference << " ms" << "\n";
372
373 if (thresholdMinusInference < 0)
374 {
375 BOOST_LOG_TRIVIAL(fatal) << "Elapsed inference time is greater than provided threshold time.\n";
376 return EXIT_FAILURE;
377 }
378 }
379
380
telsoa014fcda012018-03-09 14:13:49 +0000381 }
382 catch (armnn::Exception const& e)
383 {
384 BOOST_LOG_TRIVIAL(fatal) << "Armnn Error: " << e.what();
telsoa01c577f2c2018-08-31 09:22:23 +0100385 return EXIT_FAILURE;
telsoa014fcda012018-03-09 14:13:49 +0000386 }
387
telsoa01c577f2c2018-08-31 09:22:23 +0100388 return EXIT_SUCCESS;
telsoa014fcda012018-03-09 14:13:49 +0000389}
390
telsoa01c577f2c2018-08-31 09:22:23 +0100391// This will run a test
Ferran Balaguerc602f292019-02-08 17:09:55 +0000392int RunTest(const std::string& format,
393 const std::string& inputTensorShapesStr,
David Beckf0b48452018-10-19 15:20:56 +0100394 const vector<armnn::BackendId>& computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000395 const std::string& path,
396 const std::string& inputNames,
397 const std::string& inputTensorDataFilePaths,
398 const std::string& inputTypes,
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000399 const std::string& outputTypes,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000400 const std::string& outputNames,
telsoa01c577f2c2018-08-31 09:22:23 +0100401 bool enableProfiling,
Ruomei Yan2fcce082019-04-02 16:47:34 +0100402 bool enableFp16TurboMode,
James Conroy7b4886f2019-04-11 10:23:58 +0100403 const double& thresholdTime,
telsoa01c577f2c2018-08-31 09:22:23 +0100404 const size_t subgraphId,
405 const std::shared_ptr<armnn::IRuntime>& runtime = nullptr)
telsoa014fcda012018-03-09 14:13:49 +0000406{
Ferran Balaguerc602f292019-02-08 17:09:55 +0000407 std::string modelFormat = boost::trim_copy(format);
408 std::string modelPath = boost::trim_copy(path);
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000409 std::vector<std::string> inputNamesVector = ParseStringList(inputNames, ",");
410 std::vector<std::string> inputTensorShapesVector = ParseStringList(inputTensorShapesStr, ";");
411 std::vector<std::string> inputTensorDataFilePathsVector = ParseStringList(
412 inputTensorDataFilePaths, ",");
413 std::vector<std::string> outputNamesVector = ParseStringList(outputNames, ",");
414 std::vector<std::string> inputTypesVector = ParseStringList(inputTypes, ",");
415 std::vector<std::string> outputTypesVector = ParseStringList(outputTypes, ",");
Ferran Balaguerc602f292019-02-08 17:09:55 +0000416
telsoa014fcda012018-03-09 14:13:49 +0000417 // Parse model binary flag from the model-format string we got from the command-line
418 bool isModelBinary;
419 if (modelFormat.find("bin") != std::string::npos)
420 {
421 isModelBinary = true;
422 }
423 else if (modelFormat.find("txt") != std::string::npos || modelFormat.find("text") != std::string::npos)
424 {
425 isModelBinary = false;
426 }
427 else
428 {
429 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Please include 'binary' or 'text'";
telsoa01c577f2c2018-08-31 09:22:23 +0100430 return EXIT_FAILURE;
telsoa014fcda012018-03-09 14:13:49 +0000431 }
432
Ferran Balaguerc602f292019-02-08 17:09:55 +0000433 if ((inputTensorShapesVector.size() != 0) && (inputTensorShapesVector.size() != inputNamesVector.size()))
telsoa014fcda012018-03-09 14:13:49 +0000434 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000435 BOOST_LOG_TRIVIAL(fatal) << "input-name and input-tensor-shape must have the same amount of elements.";
436 return EXIT_FAILURE;
437 }
surmeh013537c2c2018-05-18 16:31:43 +0100438
Ferran Balaguerc602f292019-02-08 17:09:55 +0000439 if ((inputTensorDataFilePathsVector.size() != 0) &&
440 (inputTensorDataFilePathsVector.size() != inputNamesVector.size()))
441 {
442 BOOST_LOG_TRIVIAL(fatal) << "input-name and input-tensor-data must have the same amount of elements.";
443 return EXIT_FAILURE;
444 }
445
446 if (inputTypesVector.size() == 0)
447 {
448 //Defaults the value of all inputs to "float"
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000449 inputTypesVector.assign(inputNamesVector.size(), "float");
450 }
451 if (outputTypesVector.size() == 0)
452 {
453 //Defaults the value of all outputs to "float"
454 outputTypesVector.assign(outputNamesVector.size(), "float");
Ferran Balaguerc602f292019-02-08 17:09:55 +0000455 }
456 else if ((inputTypesVector.size() != 0) && (inputTypesVector.size() != inputNamesVector.size()))
457 {
458 BOOST_LOG_TRIVIAL(fatal) << "input-name and input-type must have the same amount of elements.";
459 return EXIT_FAILURE;
460 }
461
462 // Parse input tensor shape from the string we got from the command-line.
463 std::vector<std::unique_ptr<armnn::TensorShape>> inputTensorShapes;
464
465 if (!inputTensorShapesVector.empty())
466 {
467 inputTensorShapes.reserve(inputTensorShapesVector.size());
468
469 for(const std::string& shape : inputTensorShapesVector)
surmeh013537c2c2018-05-18 16:31:43 +0100470 {
Ferran Balaguerc602f292019-02-08 17:09:55 +0000471 std::stringstream ss(shape);
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000472 std::vector<unsigned int> dims = ParseArray(ss);
Ferran Balaguerc602f292019-02-08 17:09:55 +0000473
474 try
475 {
476 // Coverity fix: An exception of type armnn::InvalidArgumentException is thrown and never caught.
477 inputTensorShapes.push_back(std::make_unique<armnn::TensorShape>(dims.size(), dims.data()));
478 }
479 catch (const armnn::InvalidArgumentException& e)
480 {
481 BOOST_LOG_TRIVIAL(fatal) << "Cannot create tensor shape: " << e.what();
482 return EXIT_FAILURE;
483 }
surmeh013537c2c2018-05-18 16:31:43 +0100484 }
telsoa014fcda012018-03-09 14:13:49 +0000485 }
486
James Conroy7b4886f2019-04-11 10:23:58 +0100487 // Check that threshold time is not less than zero
488 if (thresholdTime < 0)
489 {
490 BOOST_LOG_TRIVIAL(fatal) << "Threshold time supplied as a commoand line argument is less than zero.";
491 return EXIT_FAILURE;
492 }
493
telsoa014fcda012018-03-09 14:13:49 +0000494 // Forward to implementation based on the parser type
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000495 if (modelFormat.find("armnn") != std::string::npos)
496 {
497#if defined(ARMNN_SERIALIZER)
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000498 return MainImpl<armnnDeserializer::IDeserializer, float>(
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000499 modelPath.c_str(), isModelBinary, computeDevice,
500 inputNamesVector, inputTensorShapes,
James Conroy7b4886f2019-04-11 10:23:58 +0100501 inputTensorDataFilePathsVector, inputTypesVector,
502 outputTypesVector, outputNamesVector, enableProfiling,
503 enableFp16TurboMode, thresholdTime, subgraphId, runtime);
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000504#else
505 BOOST_LOG_TRIVIAL(fatal) << "Not built with serialization support.";
506 return EXIT_FAILURE;
507#endif
508 }
509 else if (modelFormat.find("caffe") != std::string::npos)
telsoa014fcda012018-03-09 14:13:49 +0000510 {
511#if defined(ARMNN_CAFFE_PARSER)
512 return MainImpl<armnnCaffeParser::ICaffeParser, float>(modelPath.c_str(), isModelBinary, computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000513 inputNamesVector, inputTensorShapes,
514 inputTensorDataFilePathsVector, inputTypesVector,
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000515 outputTypesVector, outputNamesVector, enableProfiling,
James Conroy7b4886f2019-04-11 10:23:58 +0100516 enableFp16TurboMode, thresholdTime, subgraphId, runtime);
telsoa014fcda012018-03-09 14:13:49 +0000517#else
518 BOOST_LOG_TRIVIAL(fatal) << "Not built with Caffe parser support.";
telsoa01c577f2c2018-08-31 09:22:23 +0100519 return EXIT_FAILURE;
520#endif
521 }
522 else if (modelFormat.find("onnx") != std::string::npos)
523{
524#if defined(ARMNN_ONNX_PARSER)
525 return MainImpl<armnnOnnxParser::IOnnxParser, float>(modelPath.c_str(), isModelBinary, computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000526 inputNamesVector, inputTensorShapes,
527 inputTensorDataFilePathsVector, inputTypesVector,
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000528 outputTypesVector, outputNamesVector, enableProfiling,
James Conroy7b4886f2019-04-11 10:23:58 +0100529 enableFp16TurboMode, thresholdTime, subgraphId, runtime);
telsoa01c577f2c2018-08-31 09:22:23 +0100530#else
531 BOOST_LOG_TRIVIAL(fatal) << "Not built with Onnx parser support.";
532 return EXIT_FAILURE;
telsoa014fcda012018-03-09 14:13:49 +0000533#endif
534 }
535 else if (modelFormat.find("tensorflow") != std::string::npos)
536 {
surmeh01bceff2f2018-03-29 16:29:27 +0100537#if defined(ARMNN_TF_PARSER)
538 return MainImpl<armnnTfParser::ITfParser, float>(modelPath.c_str(), isModelBinary, computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000539 inputNamesVector, inputTensorShapes,
540 inputTensorDataFilePathsVector, inputTypesVector,
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000541 outputTypesVector, outputNamesVector, enableProfiling,
James Conroy7b4886f2019-04-11 10:23:58 +0100542 enableFp16TurboMode, thresholdTime, subgraphId, runtime);
surmeh01bceff2f2018-03-29 16:29:27 +0100543#else
telsoa014fcda012018-03-09 14:13:49 +0000544 BOOST_LOG_TRIVIAL(fatal) << "Not built with Tensorflow parser support.";
telsoa01c577f2c2018-08-31 09:22:23 +0100545 return EXIT_FAILURE;
546#endif
547 }
548 else if(modelFormat.find("tflite") != std::string::npos)
549 {
550#if defined(ARMNN_TF_LITE_PARSER)
551 if (! isModelBinary)
552 {
553 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat << "'. Only 'binary' format supported \
554 for tflite files";
555 return EXIT_FAILURE;
556 }
557 return MainImpl<armnnTfLiteParser::ITfLiteParser, float>(modelPath.c_str(), isModelBinary, computeDevice,
Ferran Balaguerc602f292019-02-08 17:09:55 +0000558 inputNamesVector, inputTensorShapes,
559 inputTensorDataFilePathsVector, inputTypesVector,
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000560 outputTypesVector, outputNamesVector, enableProfiling,
James Conroy7b4886f2019-04-11 10:23:58 +0100561 enableFp16TurboMode, thresholdTime, subgraphId,
562 runtime);
telsoa01c577f2c2018-08-31 09:22:23 +0100563#else
564 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat <<
565 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
566 return EXIT_FAILURE;
surmeh01bceff2f2018-03-29 16:29:27 +0100567#endif
telsoa014fcda012018-03-09 14:13:49 +0000568 }
569 else
570 {
571 BOOST_LOG_TRIVIAL(fatal) << "Unknown model format: '" << modelFormat <<
telsoa01c577f2c2018-08-31 09:22:23 +0100572 "'. Please include 'caffe', 'tensorflow', 'tflite' or 'onnx'";
573 return EXIT_FAILURE;
574 }
575}
576
Ruomei Yan2fcce082019-04-02 16:47:34 +0100577int RunCsvTest(const armnnUtils::CsvRow &csvRow, const std::shared_ptr<armnn::IRuntime>& runtime,
James Conroy7b4886f2019-04-11 10:23:58 +0100578 const bool enableProfiling, const bool enableFp16TurboMode, const double& thresholdTime)
telsoa01c577f2c2018-08-31 09:22:23 +0100579{
580 std::string modelFormat;
581 std::string modelPath;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000582 std::string inputNames;
583 std::string inputTensorShapes;
584 std::string inputTensorDataFilePaths;
585 std::string outputNames;
586 std::string inputTypes;
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000587 std::string outputTypes;
telsoa01c577f2c2018-08-31 09:22:23 +0100588
589 size_t subgraphId = 0;
590
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100591 const std::string backendsMessage = std::string("The preferred order of devices to run layers on by default. ")
592 + std::string("Possible choices: ")
593 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
594
telsoa01c577f2c2018-08-31 09:22:23 +0100595 po::options_description desc("Options");
596 try
597 {
598 desc.add_options()
599 ("model-format,f", po::value(&modelFormat),
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000600 "armnn-binary, caffe-binary, caffe-text, tflite-binary, onnx-binary, onnx-text, tensorflow-binary or "
601 "tensorflow-text.")
602 ("model-path,m", po::value(&modelPath), "Path to model file, e.g. .armnn, .caffemodel, .prototxt, "
603 ".tflite, .onnx")
David Beckf0b48452018-10-19 15:20:56 +0100604 ("compute,c", po::value<std::vector<armnn::BackendId>>()->multitoken(),
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100605 backendsMessage.c_str())
Ferran Balaguerc602f292019-02-08 17:09:55 +0000606 ("input-name,i", po::value(&inputNames), "Identifier of the input tensors in the network separated by comma.")
telsoa01c577f2c2018-08-31 09:22:23 +0100607 ("subgraph-number,n", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be "
Ferran Balaguerc602f292019-02-08 17:09:55 +0000608 "executed. Defaults to 0.")
609 ("input-tensor-shape,s", po::value(&inputTensorShapes),
610 "The shape of the input tensors in the network as a flat array of integers separated by comma. "
611 "Several shapes can be passed separating them by semicolon. "
telsoa01c577f2c2018-08-31 09:22:23 +0100612 "This parameter is optional, depending on the network.")
Ferran Balaguerc602f292019-02-08 17:09:55 +0000613 ("input-tensor-data,d", po::value(&inputTensorDataFilePaths),
614 "Path to files containing the input data as a flat array separated by whitespace. "
615 "Several paths can be passed separating them by comma.")
616 ("input-type,y",po::value(&inputTypes), "The type of the input tensors in the network separated by comma. "
617 "If unset, defaults to \"float\" for all defined inputs. "
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000618 "Accepted values (float, int or qasymm8).")
619 ("output-type,z",po::value(&outputTypes), "The type of the output tensors in the network separated by comma. "
620 "If unset, defaults to \"float\" for all defined outputs. "
621 "Accepted values (float, int or qasymm8).")
Ferran Balaguerc602f292019-02-08 17:09:55 +0000622 ("output-name,o", po::value(&outputNames),
623 "Identifier of the output tensors in the network separated by comma.");
telsoa01c577f2c2018-08-31 09:22:23 +0100624 }
625 catch (const std::exception& e)
626 {
627 // Coverity points out that default_value(...) can throw a bad_lexical_cast,
628 // and that desc.add_options() can throw boost::io::too_few_args.
629 // They really won't in any of these cases.
630 BOOST_ASSERT_MSG(false, "Caught unexpected exception");
631 BOOST_LOG_TRIVIAL(fatal) << "Fatal internal error: " << e.what();
632 return EXIT_FAILURE;
633 }
634
635 std::vector<const char*> clOptions;
636 clOptions.reserve(csvRow.values.size());
637 for (const std::string& value : csvRow.values)
638 {
639 clOptions.push_back(value.c_str());
640 }
641
642 po::variables_map vm;
643 try
644 {
645 po::store(po::parse_command_line(static_cast<int>(clOptions.size()), clOptions.data(), desc), vm);
646
647 po::notify(vm);
648
649 CheckOptionDependencies(vm);
650 }
651 catch (const po::error& e)
652 {
653 std::cerr << e.what() << std::endl << std::endl;
654 std::cerr << desc << std::endl;
655 return EXIT_FAILURE;
656 }
657
telsoa01c577f2c2018-08-31 09:22:23 +0100658 // Get the preferred order of compute devices.
David Beckf0b48452018-10-19 15:20:56 +0100659 std::vector<armnn::BackendId> computeDevices = vm["compute"].as<std::vector<armnn::BackendId>>();
telsoa01c577f2c2018-08-31 09:22:23 +0100660
661 // Remove duplicates from the list of compute devices.
662 RemoveDuplicateDevices(computeDevices);
663
664 // Check that the specified compute devices are valid.
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100665 std::string invalidBackends;
666 if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
telsoa01c577f2c2018-08-31 09:22:23 +0100667 {
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100668 BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
669 << invalidBackends;
telsoa01c577f2c2018-08-31 09:22:23 +0100670 return EXIT_FAILURE;
671 }
672
James Conroy7b4886f2019-04-11 10:23:58 +0100673 return RunTest(modelFormat, inputTensorShapes, computeDevices, modelPath, inputNames,
674 inputTensorDataFilePaths, inputTypes, outputTypes, outputNames,
675 enableProfiling, enableFp16TurboMode, thresholdTime, subgraphId);
telsoa01c577f2c2018-08-31 09:22:23 +0100676}
677
James Conroy7b4886f2019-04-11 10:23:58 +0100678// MAIN
telsoa01c577f2c2018-08-31 09:22:23 +0100679int main(int argc, const char* argv[])
680{
681 // Configures logging for both the ARMNN library and this test program.
682#ifdef NDEBUG
683 armnn::LogSeverity level = armnn::LogSeverity::Info;
684#else
685 armnn::LogSeverity level = armnn::LogSeverity::Debug;
686#endif
687 armnn::ConfigureLogging(true, true, level);
688 armnnUtils::ConfigureLogging(boost::log::core::get().get(), true, true, level);
689
690 std::string testCasesFile;
691
692 std::string modelFormat;
693 std::string modelPath;
Ferran Balaguerc602f292019-02-08 17:09:55 +0000694 std::string inputNames;
695 std::string inputTensorShapes;
696 std::string inputTensorDataFilePaths;
697 std::string outputNames;
698 std::string inputTypes;
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000699 std::string outputTypes;
telsoa01c577f2c2018-08-31 09:22:23 +0100700
James Conroy7b4886f2019-04-11 10:23:58 +0100701 double thresholdTime = 0.0;
702
telsoa01c577f2c2018-08-31 09:22:23 +0100703 size_t subgraphId = 0;
704
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100705 const std::string backendsMessage = "Which device to run layers on by default. Possible choices: "
706 + armnn::BackendRegistryInstance().GetBackendIdsAsString();
707
telsoa01c577f2c2018-08-31 09:22:23 +0100708 po::options_description desc("Options");
709 try
710 {
711 desc.add_options()
712 ("help", "Display usage information")
713 ("test-cases,t", po::value(&testCasesFile), "Path to a CSV file containing test cases to run. "
714 "If set, further parameters -- with the exception of compute device and concurrency -- will be ignored, "
715 "as they are expected to be defined in the file for each test in particular.")
716 ("concurrent,n", po::bool_switch()->default_value(false),
717 "Whether or not the test cases should be executed in parallel")
Matteo Martincigh49124022019-01-11 13:25:59 +0000718 ("model-format,f", po::value(&modelFormat)->required(),
Aron Virginas-Tar64e4ccb2019-02-12 11:27:53 +0000719 "armnn-binary, caffe-binary, caffe-text, onnx-binary, onnx-text, tflite-binary, tensorflow-binary or "
720 "tensorflow-text.")
721 ("model-path,m", po::value(&modelPath)->required(), "Path to model file, e.g. .armnn, .caffemodel, "
722 ".prototxt, .tflite, .onnx")
David Beckf0b48452018-10-19 15:20:56 +0100723 ("compute,c", po::value<std::vector<std::string>>()->multitoken(),
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100724 backendsMessage.c_str())
Ferran Balaguerc602f292019-02-08 17:09:55 +0000725 ("input-name,i", po::value(&inputNames),
726 "Identifier of the input tensors in the network separated by comma.")
telsoa01c577f2c2018-08-31 09:22:23 +0100727 ("subgraph-number,x", po::value<size_t>(&subgraphId)->default_value(0), "Id of the subgraph to be executed."
728 "Defaults to 0")
Ferran Balaguerc602f292019-02-08 17:09:55 +0000729 ("input-tensor-shape,s", po::value(&inputTensorShapes),
730 "The shape of the input tensors in the network as a flat array of integers separated by comma. "
731 "Several shapes can be passed separating them by semicolon. "
telsoa01c577f2c2018-08-31 09:22:23 +0100732 "This parameter is optional, depending on the network.")
Ferran Balaguerc602f292019-02-08 17:09:55 +0000733 ("input-tensor-data,d", po::value(&inputTensorDataFilePaths),
734 "Path to files containing the input data as a flat array separated by whitespace. "
735 "Several paths can be passed separating them by comma. ")
736 ("input-type,y",po::value(&inputTypes), "The type of the input tensors in the network separated by comma. "
737 "If unset, defaults to \"float\" for all defined inputs. "
Éanna Ó Catháinb3d481a2019-02-26 11:26:24 +0000738 "Accepted values (float, int or qasymm8)")
739 ("output-type,z",po::value(&outputTypes),
740 "The type of the output tensors in the network separated by comma. "
741 "If unset, defaults to \"float\" for all defined outputs. "
742 "Accepted values (float, int or qasymm8).")
Ferran Balaguerc602f292019-02-08 17:09:55 +0000743 ("output-name,o", po::value(&outputNames),
744 "Identifier of the output tensors in the network separated by comma.")
telsoa01c577f2c2018-08-31 09:22:23 +0100745 ("event-based-profiling,e", po::bool_switch()->default_value(false),
Ruomei Yan2fcce082019-04-02 16:47:34 +0100746 "Enables built in profiler. If unset, defaults to off.")
747 ("fp16-turbo-mode,h", po::bool_switch()->default_value(false), "If this option is enabled, FP32 layers, "
James Conroy7b4886f2019-04-11 10:23:58 +0100748 "weights and biases will be converted to FP16 where the backend supports it")
749 ("threshold-time,r", po::value<double>(&thresholdTime)->default_value(0.0),
750 "Threshold time is the maximum allowed time for inference measured in milliseconds. If the actual "
751 "inference time is greater than the threshold time, the test will fail. By default, no threshold "
752 "time is used.");
telsoa01c577f2c2018-08-31 09:22:23 +0100753 }
754 catch (const std::exception& e)
755 {
756 // Coverity points out that default_value(...) can throw a bad_lexical_cast,
757 // and that desc.add_options() can throw boost::io::too_few_args.
758 // They really won't in any of these cases.
759 BOOST_ASSERT_MSG(false, "Caught unexpected exception");
760 BOOST_LOG_TRIVIAL(fatal) << "Fatal internal error: " << e.what();
761 return EXIT_FAILURE;
762 }
763
764 // Parses the command-line.
765 po::variables_map vm;
766 try
767 {
768 po::store(po::parse_command_line(argc, argv, desc), vm);
769
770 if (CheckOption(vm, "help") || argc <= 1)
771 {
772 std::cout << "Executes a neural network model using the provided input tensor. " << std::endl;
773 std::cout << "Prints the resulting output tensor." << std::endl;
774 std::cout << std::endl;
775 std::cout << desc << std::endl;
776 return EXIT_SUCCESS;
777 }
778
779 po::notify(vm);
780 }
781 catch (const po::error& e)
782 {
783 std::cerr << e.what() << std::endl << std::endl;
784 std::cerr << desc << std::endl;
785 return EXIT_FAILURE;
786 }
787
788 // Get the value of the switch arguments.
789 bool concurrent = vm["concurrent"].as<bool>();
790 bool enableProfiling = vm["event-based-profiling"].as<bool>();
Ruomei Yan2fcce082019-04-02 16:47:34 +0100791 bool enableFp16TurboMode = vm["fp16-turbo-mode"].as<bool>();
telsoa01c577f2c2018-08-31 09:22:23 +0100792
793 // Check whether we have to load test cases from a file.
794 if (CheckOption(vm, "test-cases"))
795 {
796 // Check that the file exists.
797 if (!boost::filesystem::exists(testCasesFile))
798 {
799 BOOST_LOG_TRIVIAL(fatal) << "Given file \"" << testCasesFile << "\" does not exist";
800 return EXIT_FAILURE;
801 }
802
803 // Parse CSV file and extract test cases
804 armnnUtils::CsvReader reader;
805 std::vector<armnnUtils::CsvRow> testCases = reader.ParseFile(testCasesFile);
806
807 // Check that there is at least one test case to run
808 if (testCases.empty())
809 {
810 BOOST_LOG_TRIVIAL(fatal) << "Given file \"" << testCasesFile << "\" has no test cases";
811 return EXIT_FAILURE;
812 }
813
814 // Create runtime
815 armnn::IRuntime::CreationOptions options;
Nina Drozd549ae372018-09-10 14:26:44 +0100816 options.m_EnableGpuProfiling = enableProfiling;
817
telsoa01c577f2c2018-08-31 09:22:23 +0100818 std::shared_ptr<armnn::IRuntime> runtime(armnn::IRuntime::Create(options));
819
820 const std::string executableName("ExecuteNetwork");
821
822 // Check whether we need to run the test cases concurrently
823 if (concurrent)
824 {
825 std::vector<std::future<int>> results;
826 results.reserve(testCases.size());
827
828 // Run each test case in its own thread
829 for (auto& testCase : testCases)
830 {
831 testCase.values.insert(testCase.values.begin(), executableName);
Nina Drozd549ae372018-09-10 14:26:44 +0100832 results.push_back(std::async(std::launch::async, RunCsvTest, std::cref(testCase), std::cref(runtime),
James Conroy7b4886f2019-04-11 10:23:58 +0100833 enableProfiling, enableFp16TurboMode, thresholdTime));
telsoa01c577f2c2018-08-31 09:22:23 +0100834 }
835
836 // Check results
837 for (auto& result : results)
838 {
839 if (result.get() != EXIT_SUCCESS)
840 {
841 return EXIT_FAILURE;
842 }
843 }
844 }
845 else
846 {
847 // Run tests sequentially
848 for (auto& testCase : testCases)
849 {
850 testCase.values.insert(testCase.values.begin(), executableName);
James Conroy7b4886f2019-04-11 10:23:58 +0100851 if (RunCsvTest(testCase, runtime, enableProfiling, enableFp16TurboMode, thresholdTime) != EXIT_SUCCESS)
telsoa01c577f2c2018-08-31 09:22:23 +0100852 {
853 return EXIT_FAILURE;
854 }
855 }
856 }
857
858 return EXIT_SUCCESS;
859 }
860 else // Run single test
861 {
Aron Virginas-Tar382e21c2019-01-22 14:10:39 +0000862 // Get the preferred order of compute devices. If none are specified, default to using CpuRef
863 const std::string computeOption("compute");
864 std::vector<std::string> computeDevicesAsStrings = CheckOption(vm, computeOption.c_str()) ?
865 vm[computeOption].as<std::vector<std::string>>() :
866 std::vector<std::string>({ "CpuRef" });
Matteo Martincigh067112f2018-10-29 11:01:09 +0000867 std::vector<armnn::BackendId> computeDevices(computeDevicesAsStrings.begin(), computeDevicesAsStrings.end());
telsoa01c577f2c2018-08-31 09:22:23 +0100868
869 // Remove duplicates from the list of compute devices.
870 RemoveDuplicateDevices(computeDevices);
871
872 // Check that the specified compute devices are valid.
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100873 std::string invalidBackends;
874 if (!CheckRequestedBackendsAreValid(computeDevices, armnn::Optional<std::string&>(invalidBackends)))
telsoa01c577f2c2018-08-31 09:22:23 +0100875 {
Aron Virginas-Tar5cc8e562018-10-23 15:14:46 +0100876 BOOST_LOG_TRIVIAL(fatal) << "The list of preferred devices contains invalid backend IDs: "
877 << invalidBackends;
telsoa01c577f2c2018-08-31 09:22:23 +0100878 return EXIT_FAILURE;
879 }
880
881 try
882 {
883 CheckOptionDependencies(vm);
884 }
885 catch (const po::error& e)
886 {
887 std::cerr << e.what() << std::endl << std::endl;
888 std::cerr << desc << std::endl;
889 return EXIT_FAILURE;
890 }
891
James Conroy7b4886f2019-04-11 10:23:58 +0100892 return RunTest(modelFormat, inputTensorShapes, computeDevices, modelPath, inputNames,
893 inputTensorDataFilePaths, inputTypes, outputTypes, outputNames,
894 enableProfiling, enableFp16TurboMode, thresholdTime, subgraphId);
telsoa014fcda012018-03-09 14:13:49 +0000895 }
896}