blob: fd888e2137dd3295173a853f05851e972ca5bdc8 [file] [log] [blame]
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +00001//
telsoa014fcda012018-03-09 14:13:49 +00002// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#include "InferenceTest.hpp"
6
telsoa014fcda012018-03-09 14:13:49 +00007#include <boost/algorithm/string.hpp>
8#include <boost/numeric/conversion/cast.hpp>
telsoa014fcda012018-03-09 14:13:49 +00009#include <boost/filesystem/path.hpp>
10#include <boost/assert.hpp>
11#include <boost/format.hpp>
12#include <boost/program_options.hpp>
13#include <boost/filesystem/operations.hpp>
14
15#include <fstream>
16#include <iostream>
17#include <iomanip>
18#include <array>
19#include <chrono>
20
21using namespace std;
22using namespace std::chrono;
23using namespace armnn::test;
24
25namespace armnn
26{
27namespace test
28{
29
Ferran Balaguerc602f292019-02-08 17:09:55 +000030using TContainer = boost::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
telsoa01c577f2c2018-08-31 09:22:23 +010031
telsoa014fcda012018-03-09 14:13:49 +000032template <typename TTestCaseDatabase, typename TModel>
33ClassifierTestCase<TTestCaseDatabase, TModel>::ClassifierTestCase(
34 int& numInferencesRef,
35 int& numCorrectInferencesRef,
36 const std::vector<unsigned int>& validationPredictions,
37 std::vector<unsigned int>* validationPredictionsOut,
38 TModel& model,
39 unsigned int testCaseId,
40 unsigned int label,
41 std::vector<typename TModel::DataType> modelInput)
Ferran Balaguerc602f292019-02-08 17:09:55 +000042 : InferenceModelTestCase<TModel>(
43 model, testCaseId, std::vector<TContainer>{ modelInput }, { model.GetOutputSize() })
telsoa014fcda012018-03-09 14:13:49 +000044 , m_Label(label)
telsoa01c577f2c2018-08-31 09:22:23 +010045 , m_QuantizationParams(model.GetQuantizationParams())
telsoa014fcda012018-03-09 14:13:49 +000046 , m_NumInferencesRef(numInferencesRef)
47 , m_NumCorrectInferencesRef(numCorrectInferencesRef)
48 , m_ValidationPredictions(validationPredictions)
49 , m_ValidationPredictionsOut(validationPredictionsOut)
50{
51}
52
Derek Lambertiac737602019-05-16 16:33:00 +010053struct ClassifierResultProcessor : public boost::static_visitor<>
54{
55 using ResultMap = std::map<float,int>;
56
57 ClassifierResultProcessor(float scale, int offset)
58 : m_Scale(scale)
59 , m_Offset(offset)
60 {}
61
62 void operator()(const std::vector<float>& values)
63 {
64 SortPredictions(values, [](float value)
65 {
66 return value;
67 });
68 }
69
70 void operator()(const std::vector<uint8_t>& values)
71 {
72 auto& scale = m_Scale;
73 auto& offset = m_Offset;
74 SortPredictions(values, [&scale, &offset](uint8_t value)
75 {
76 return armnn::Dequantize(value, scale, offset);
77 });
78 }
79
80 void operator()(const std::vector<int>& values)
81 {
82 BOOST_ASSERT_MSG(false, "Non-float predictions output not supported.");
83 }
84
85 ResultMap& GetResultMap() { return m_ResultMap; }
86
87private:
88 template<typename Container, typename Delegate>
89 void SortPredictions(const Container& c, Delegate delegate)
90 {
91 int index = 0;
92 for (const auto& value : c)
93 {
94 int classification = index++;
95 // Take the first class with each probability
96 // This avoids strange results when looping over batched results produced
97 // with identical test data.
98 ResultMap::iterator lb = m_ResultMap.lower_bound(value);
99
100 if (lb == m_ResultMap.end() || !m_ResultMap.key_comp()(value, lb->first))
101 {
102 // If the key is not already in the map, insert it.
103 m_ResultMap.insert(lb, ResultMap::value_type(delegate(value), classification));
104 }
105 }
106 }
107
108 ResultMap m_ResultMap;
109
110 float m_Scale=0.0f;
111 int m_Offset=0;
112};
113
telsoa014fcda012018-03-09 14:13:49 +0000114template <typename TTestCaseDatabase, typename TModel>
115TestCaseResult ClassifierTestCase<TTestCaseDatabase, TModel>::ProcessResult(const InferenceTestOptions& params)
116{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000117 auto& output = this->GetOutputs()[0];
telsoa014fcda012018-03-09 14:13:49 +0000118 const auto testCaseId = this->GetTestCaseId();
119
Derek Lambertiac737602019-05-16 16:33:00 +0100120 ClassifierResultProcessor resultProcessor(m_QuantizationParams.first, m_QuantizationParams.second);
121 boost::apply_visitor(resultProcessor, output);
122
Derek Lamberti08446972019-11-26 16:38:31 +0000123 ARMNN_LOG(info) << "= Prediction values for test #" << testCaseId;
Derek Lambertiac737602019-05-16 16:33:00 +0100124 auto it = resultProcessor.GetResultMap().rbegin();
125 for (int i=0; i<5 && it != resultProcessor.GetResultMap().rend(); ++i)
surmeh01bceff2f2018-03-29 16:29:27 +0100126 {
Derek Lamberti08446972019-11-26 16:38:31 +0000127 ARMNN_LOG(info) << "Top(" << (i+1) << ") prediction is " << it->second <<
Derek Lambertiac737602019-05-16 16:33:00 +0100128 " with value: " << (it->first);
129 ++it;
surmeh01bceff2f2018-03-29 16:29:27 +0100130 }
131
Ferran Balaguerc602f292019-02-08 17:09:55 +0000132 unsigned int prediction = 0;
133 boost::apply_visitor([&](auto&& value)
134 {
135 prediction = boost::numeric_cast<unsigned int>(
136 std::distance(value.begin(), std::max_element(value.begin(), value.end())));
137 },
138 output);
telsoa014fcda012018-03-09 14:13:49 +0000139
telsoa01c577f2c2018-08-31 09:22:23 +0100140 // If we're just running the defaultTestCaseIds, each one must be classified correctly.
telsoa014fcda012018-03-09 14:13:49 +0000141 if (params.m_IterationCount == 0 && prediction != m_Label)
142 {
Derek Lamberti08446972019-11-26 16:38:31 +0000143 ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
telsoa014fcda012018-03-09 14:13:49 +0000144 " is incorrect (should be " << m_Label << ")";
145 return TestCaseResult::Failed;
146 }
147
telsoa01c577f2c2018-08-31 09:22:23 +0100148 // If a validation file was provided as input, it checks that the prediction matches.
telsoa014fcda012018-03-09 14:13:49 +0000149 if (!m_ValidationPredictions.empty() && prediction != m_ValidationPredictions[testCaseId])
150 {
Derek Lamberti08446972019-11-26 16:38:31 +0000151 ARMNN_LOG(error) << "Prediction for test case " << testCaseId << " (" << prediction << ")" <<
telsoa014fcda012018-03-09 14:13:49 +0000152 " doesn't match the prediction in the validation file (" << m_ValidationPredictions[testCaseId] << ")";
153 return TestCaseResult::Failed;
154 }
155
telsoa01c577f2c2018-08-31 09:22:23 +0100156 // If a validation file was requested as output, it stores the predictions.
telsoa014fcda012018-03-09 14:13:49 +0000157 if (m_ValidationPredictionsOut)
158 {
159 m_ValidationPredictionsOut->push_back(prediction);
160 }
161
telsoa01c577f2c2018-08-31 09:22:23 +0100162 // Updates accuracy stats.
telsoa014fcda012018-03-09 14:13:49 +0000163 m_NumInferencesRef++;
164 if (prediction == m_Label)
165 {
166 m_NumCorrectInferencesRef++;
167 }
168
169 return TestCaseResult::Ok;
170}
171
172template <typename TDatabase, typename InferenceModel>
173template <typename TConstructDatabaseCallable, typename TConstructModelCallable>
174ClassifierTestCaseProvider<TDatabase, InferenceModel>::ClassifierTestCaseProvider(
175 TConstructDatabaseCallable constructDatabase, TConstructModelCallable constructModel)
176 : m_ConstructModel(constructModel)
177 , m_ConstructDatabase(constructDatabase)
178 , m_NumInferences(0)
179 , m_NumCorrectInferences(0)
180{
181}
182
183template <typename TDatabase, typename InferenceModel>
184void ClassifierTestCaseProvider<TDatabase, InferenceModel>::AddCommandLineOptions(
185 boost::program_options::options_description& options)
186{
187 namespace po = boost::program_options;
188
189 options.add_options()
190 ("validation-file-in", po::value<std::string>(&m_ValidationFileIn)->default_value(""),
191 "Reads expected predictions from the given file and confirms they match the actual predictions.")
192 ("validation-file-out", po::value<std::string>(&m_ValidationFileOut)->default_value(""),
193 "Predictions are saved to the given file for later use via --validation-file-in.")
194 ("data-dir,d", po::value<std::string>(&m_DataDir)->required(),
195 "Path to directory containing test data");
196
197 InferenceModel::AddCommandLineOptions(options, m_ModelCommandLineOptions);
198}
199
200template <typename TDatabase, typename InferenceModel>
Matthew Bentham3e68b972019-04-09 13:10:46 +0100201bool ClassifierTestCaseProvider<TDatabase, InferenceModel>::ProcessCommandLineOptions(
202 const InferenceTestOptions& commonOptions)
telsoa014fcda012018-03-09 14:13:49 +0000203{
204 if (!ValidateDirectory(m_DataDir))
205 {
206 return false;
207 }
208
209 ReadPredictions();
210
Matthew Bentham3e68b972019-04-09 13:10:46 +0100211 m_Model = m_ConstructModel(commonOptions, m_ModelCommandLineOptions);
telsoa014fcda012018-03-09 14:13:49 +0000212 if (!m_Model)
213 {
214 return false;
215 }
216
telsoa01c577f2c2018-08-31 09:22:23 +0100217 m_Database = std::make_unique<TDatabase>(m_ConstructDatabase(m_DataDir.c_str(), *m_Model));
telsoa014fcda012018-03-09 14:13:49 +0000218 if (!m_Database)
219 {
220 return false;
221 }
222
223 return true;
224}
225
226template <typename TDatabase, typename InferenceModel>
227std::unique_ptr<IInferenceTestCase>
228ClassifierTestCaseProvider<TDatabase, InferenceModel>::GetTestCase(unsigned int testCaseId)
229{
230 std::unique_ptr<typename TDatabase::TTestCaseData> testCaseData = m_Database->GetTestCaseData(testCaseId);
231 if (testCaseData == nullptr)
232 {
233 return nullptr;
234 }
235
236 return std::make_unique<ClassifierTestCase<TDatabase, InferenceModel>>(
237 m_NumInferences,
238 m_NumCorrectInferences,
239 m_ValidationPredictions,
240 m_ValidationFileOut.empty() ? nullptr : &m_ValidationPredictionsOut,
241 *m_Model,
242 testCaseId,
243 testCaseData->m_Label,
244 std::move(testCaseData->m_InputImage));
245}
246
247template <typename TDatabase, typename InferenceModel>
248bool ClassifierTestCaseProvider<TDatabase, InferenceModel>::OnInferenceTestFinished()
249{
250 const double accuracy = boost::numeric_cast<double>(m_NumCorrectInferences) /
251 boost::numeric_cast<double>(m_NumInferences);
Derek Lamberti08446972019-11-26 16:38:31 +0000252 ARMNN_LOG(info) << std::fixed << std::setprecision(3) << "Overall accuracy: " << accuracy;
telsoa014fcda012018-03-09 14:13:49 +0000253
telsoa01c577f2c2018-08-31 09:22:23 +0100254 // If a validation file was requested as output, the predictions are saved to it.
telsoa014fcda012018-03-09 14:13:49 +0000255 if (!m_ValidationFileOut.empty())
256 {
257 std::ofstream validationFileOut(m_ValidationFileOut.c_str(), std::ios_base::trunc | std::ios_base::out);
258 if (validationFileOut.good())
259 {
260 for (const unsigned int prediction : m_ValidationPredictionsOut)
261 {
262 validationFileOut << prediction << std::endl;
263 }
264 }
265 else
266 {
Derek Lamberti08446972019-11-26 16:38:31 +0000267 ARMNN_LOG(error) << "Failed to open output validation file: " << m_ValidationFileOut;
telsoa014fcda012018-03-09 14:13:49 +0000268 return false;
269 }
270 }
271
272 return true;
273}
274
275template <typename TDatabase, typename InferenceModel>
276void ClassifierTestCaseProvider<TDatabase, InferenceModel>::ReadPredictions()
277{
telsoa01c577f2c2018-08-31 09:22:23 +0100278 // Reads the expected predictions from the input validation file (if provided).
telsoa014fcda012018-03-09 14:13:49 +0000279 if (!m_ValidationFileIn.empty())
280 {
281 std::ifstream validationFileIn(m_ValidationFileIn.c_str(), std::ios_base::in);
282 if (validationFileIn.good())
283 {
284 while (!validationFileIn.eof())
285 {
286 unsigned int i;
287 validationFileIn >> i;
288 m_ValidationPredictions.emplace_back(i);
289 }
290 }
291 else
292 {
293 throw armnn::Exception(boost::str(boost::format("Failed to open input validation file: %1%")
294 % m_ValidationFileIn));
295 }
296 }
297}
298
299template<typename TConstructTestCaseProvider>
300int InferenceTestMain(int argc,
301 char* argv[],
302 const std::vector<unsigned int>& defaultTestCaseIds,
303 TConstructTestCaseProvider constructTestCaseProvider)
304{
telsoa01c577f2c2018-08-31 09:22:23 +0100305 // Configures logging for both the ARMNN library and this test program.
telsoa014fcda012018-03-09 14:13:49 +0000306#ifdef NDEBUG
307 armnn::LogSeverity level = armnn::LogSeverity::Info;
308#else
309 armnn::LogSeverity level = armnn::LogSeverity::Debug;
310#endif
311 armnn::ConfigureLogging(true, true, level);
telsoa014fcda012018-03-09 14:13:49 +0000312
313 try
314 {
315 std::unique_ptr<IInferenceTestCaseProvider> testCaseProvider = constructTestCaseProvider();
316 if (!testCaseProvider)
317 {
318 return 1;
319 }
320
321 InferenceTestOptions inferenceTestOptions;
322 if (!ParseCommandLine(argc, argv, *testCaseProvider, inferenceTestOptions))
323 {
324 return 1;
325 }
326
327 const bool success = InferenceTest(inferenceTestOptions, defaultTestCaseIds, *testCaseProvider);
328 return success ? 0 : 1;
329 }
330 catch (armnn::Exception const& e)
331 {
Derek Lamberti08446972019-11-26 16:38:31 +0000332 ARMNN_LOG(fatal) << "Armnn Error: " << e.what();
telsoa014fcda012018-03-09 14:13:49 +0000333 return 1;
334 }
335}
336
telsoa01c577f2c2018-08-31 09:22:23 +0100337//
338// This function allows us to create a classifier inference test based on:
339// - a model file name
340// - which can be a binary or a text file for protobuf formats
341// - an input tensor name
342// - an output tensor name
343// - a set of test case ids
344// - a callback method which creates an object that can return images
345// called 'Database' in these tests
346// - and an input tensor shape
347//
telsoa014fcda012018-03-09 14:13:49 +0000348template<typename TDatabase,
telsoa01c577f2c2018-08-31 09:22:23 +0100349 typename TParser,
350 typename TConstructDatabaseCallable>
351int ClassifierInferenceTestMain(int argc,
352 char* argv[],
353 const char* modelFilename,
354 bool isModelBinary,
355 const char* inputBindingName,
356 const char* outputBindingName,
357 const std::vector<unsigned int>& defaultTestCaseIds,
358 TConstructDatabaseCallable constructDatabase,
359 const armnn::TensorShape* inputTensorShape)
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000360
telsoa014fcda012018-03-09 14:13:49 +0000361{
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000362 BOOST_ASSERT(modelFilename);
363 BOOST_ASSERT(inputBindingName);
364 BOOST_ASSERT(outputBindingName);
365
telsoa014fcda012018-03-09 14:13:49 +0000366 return InferenceTestMain(argc, argv, defaultTestCaseIds,
367 [=]
368 ()
369 {
telsoa01c577f2c2018-08-31 09:22:23 +0100370 using InferenceModel = InferenceModel<TParser, typename TDatabase::DataType>;
telsoa014fcda012018-03-09 14:13:49 +0000371 using TestCaseProvider = ClassifierTestCaseProvider<TDatabase, InferenceModel>;
372
373 return make_unique<TestCaseProvider>(constructDatabase,
374 [&]
Matthew Bentham3e68b972019-04-09 13:10:46 +0100375 (const InferenceTestOptions &commonOptions,
376 typename InferenceModel::CommandLineOptions modelOptions)
telsoa014fcda012018-03-09 14:13:49 +0000377 {
378 if (!ValidateDirectory(modelOptions.m_ModelDir))
379 {
380 return std::unique_ptr<InferenceModel>();
381 }
382
383 typename InferenceModel::Params modelParams;
384 modelParams.m_ModelPath = modelOptions.m_ModelDir + modelFilename;
Aron Virginas-Tar7cf0eaa2019-01-24 17:05:36 +0000385 modelParams.m_InputBindings = { inputBindingName };
386 modelParams.m_OutputBindings = { outputBindingName };
387
388 if (inputTensorShape)
389 {
390 modelParams.m_InputShapes.push_back(*inputTensorShape);
391 }
392
telsoa014fcda012018-03-09 14:13:49 +0000393 modelParams.m_IsModelBinary = isModelBinary;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +0000394 modelParams.m_ComputeDevices = modelOptions.GetComputeDevicesAsBackendIds();
surmeh013537c2c2018-05-18 16:31:43 +0100395 modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
telsoa01c577f2c2018-08-31 09:22:23 +0100396 modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
telsoa014fcda012018-03-09 14:13:49 +0000397
Matteo Martincigh00dda4a2019-08-14 11:42:30 +0100398 return std::make_unique<InferenceModel>(modelParams,
399 commonOptions.m_EnableProfiling,
400 commonOptions.m_DynamicBackendsPath);
telsoa014fcda012018-03-09 14:13:49 +0000401 });
402 });
403}
404
405} // namespace test
406} // namespace armnn