blob: 2afd9416365bbf2f6ccfe1e1988b28c98d864659 [file] [log] [blame]
Jan Eilers45274902020-10-15 18:34:43 +01001//
2// Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "NetworkExecutionUtils.hpp"
7
8#include <Filesystem.hpp>
9#include <InferenceTest.hpp>
10#include <ResolveType.hpp>
11
12#if defined(ARMNN_SERIALIZER)
13#include "armnnDeserializer/IDeserializer.hpp"
14#endif
15#if defined(ARMNN_CAFFE_PARSER)
16#include "armnnCaffeParser/ICaffeParser.hpp"
17#endif
18#if defined(ARMNN_TF_PARSER)
19#include "armnnTfParser/ITfParser.hpp"
20#endif
21#if defined(ARMNN_TF_LITE_PARSER)
22#include "armnnTfLiteParser/ITfLiteParser.hpp"
23#endif
24#if defined(ARMNN_ONNX_PARSER)
25#include "armnnOnnxParser/IOnnxParser.hpp"
26#endif
27
Jan Eilers45274902020-10-15 18:34:43 +010028template<armnn::DataType NonQuantizedType>
29auto ParseDataArray(std::istream& stream);
30
31template<armnn::DataType QuantizedType>
32auto ParseDataArray(std::istream& stream,
33 const float& quantizationScale,
34 const int32_t& quantizationOffset);
35
36template<>
37auto ParseDataArray<armnn::DataType::Float32>(std::istream& stream)
38{
39 return ParseArrayImpl<float>(stream, [](const std::string& s) { return std::stof(s); });
40}
41
42template<>
43auto ParseDataArray<armnn::DataType::Signed32>(std::istream& stream)
44{
45 return ParseArrayImpl<int>(stream, [](const std::string& s) { return std::stoi(s); });
46}
47
48template<>
49auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream)
50{
51 return ParseArrayImpl<uint8_t>(stream,
52 [](const std::string& s) { return armnn::numeric_cast<uint8_t>(std::stoi(s)); });
53}
54
55template<>
56auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream,
57 const float& quantizationScale,
58 const int32_t& quantizationOffset)
59{
60 return ParseArrayImpl<uint8_t>(stream,
61 [&quantizationScale, &quantizationOffset](const std::string& s)
62 {
63 return armnn::numeric_cast<uint8_t>(
64 armnn::Quantize<uint8_t>(std::stof(s),
65 quantizationScale,
66 quantizationOffset));
67 });
68}
69
70template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
71std::vector<T> GenerateDummyTensorData(unsigned int numElements)
72{
73 return std::vector<T>(numElements, static_cast<T>(0));
74}
75
76
77std::vector<unsigned int> ParseArray(std::istream& stream)
78{
79 return ParseArrayImpl<unsigned int>(
80 stream,
81 [](const std::string& s) { return armnn::numeric_cast<unsigned int>(std::stoi(s)); });
82}
83
84std::vector<std::string> ParseStringList(const std::string& inputString, const char* delimiter)
85{
86 std::stringstream stream(inputString);
87 return ParseArrayImpl<std::string>(stream, [](const std::string& s) {
88 return armnn::stringUtils::StringTrimCopy(s); }, delimiter);
89}
90
91
92TensorPrinter::TensorPrinter(const std::string& binding,
93 const armnn::TensorInfo& info,
94 const std::string& outputTensorFile,
95 bool dequantizeOutput)
96 : m_OutputBinding(binding)
97 , m_Scale(info.GetQuantizationScale())
98 , m_Offset(info.GetQuantizationOffset())
99 , m_OutputTensorFile(outputTensorFile)
100 , m_DequantizeOutput(dequantizeOutput) {}
101
102void TensorPrinter::operator()(const std::vector<float>& values)
103{
104 ForEachValue(values, [](float value)
105 {
106 printf("%f ", value);
107 });
108 WriteToFile(values);
109}
110
111void TensorPrinter::operator()(const std::vector<uint8_t>& values)
112{
113 if(m_DequantizeOutput)
114 {
115 auto& scale = m_Scale;
116 auto& offset = m_Offset;
117 std::vector<float> dequantizedValues;
118 ForEachValue(values, [&scale, &offset, &dequantizedValues](uint8_t value)
119 {
120 auto dequantizedValue = armnn::Dequantize(value, scale, offset);
121 printf("%f ", dequantizedValue);
122 dequantizedValues.push_back(dequantizedValue);
123 });
124 WriteToFile(dequantizedValues);
125 }
126 else
127 {
128 const std::vector<int> intValues(values.begin(), values.end());
129 operator()(intValues);
130 }
131}
132
133void TensorPrinter::operator()(const std::vector<int>& values)
134{
135 ForEachValue(values, [](int value)
136 {
137 printf("%d ", value);
138 });
139 WriteToFile(values);
140}
141
142template<typename Container, typename Delegate>
143void TensorPrinter::ForEachValue(const Container& c, Delegate delegate)
144{
145 std::cout << m_OutputBinding << ": ";
146 for (const auto& value : c)
147 {
148 delegate(value);
149 }
150 printf("\n");
151}
152
153template<typename T>
154void TensorPrinter::WriteToFile(const std::vector<T>& values)
155{
156 if (!m_OutputTensorFile.empty())
157 {
158 std::ofstream outputTensorFile;
159 outputTensorFile.open(m_OutputTensorFile, std::ofstream::out | std::ofstream::trunc);
160 if (outputTensorFile.is_open())
161 {
162 outputTensorFile << m_OutputBinding << ": ";
163 std::copy(values.begin(), values.end(), std::ostream_iterator<T>(outputTensorFile, " "));
164 }
165 else
166 {
167 ARMNN_LOG(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!";
168 }
169 outputTensorFile.close();
170 }
171}
172
173using TContainer = mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>>;
174using QuantizationParams = std::pair<float, int32_t>;
175
176void PopulateTensorWithData(TContainer& tensorData,
177 unsigned int numElements,
178 const std::string& dataTypeStr,
179 const armnn::Optional<QuantizationParams>& qParams,
180 const armnn::Optional<std::string>& dataFile)
181{
182 const bool readFromFile = dataFile.has_value() && !dataFile.value().empty();
183 const bool quantizeData = qParams.has_value();
184
185 std::ifstream inputTensorFile;
186 if (readFromFile)
187 {
188 inputTensorFile = std::ifstream(dataFile.value());
189 }
190
191 if (dataTypeStr.compare("float") == 0)
192 {
193 if (quantizeData)
194 {
195 const float qScale = qParams.value().first;
196 const int qOffset = qParams.value().second;
197
198 tensorData = readFromFile ?
199 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile, qScale, qOffset) :
200 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
201 }
202 else
203 {
204 tensorData = readFromFile ?
205 ParseDataArray<armnn::DataType::Float32>(inputTensorFile) :
206 GenerateDummyTensorData<armnn::DataType::Float32>(numElements);
207 }
208 }
209 else if (dataTypeStr.compare("int") == 0)
210 {
211 tensorData = readFromFile ?
212 ParseDataArray<armnn::DataType::Signed32>(inputTensorFile) :
213 GenerateDummyTensorData<armnn::DataType::Signed32>(numElements);
214 }
215 else if (dataTypeStr.compare("qasymm8") == 0)
216 {
217 tensorData = readFromFile ?
218 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile) :
219 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
220 }
221 else
222 {
223 std::string errorMessage = "Unsupported tensor data type " + dataTypeStr;
224 ARMNN_LOG(fatal) << errorMessage;
225
226 inputTensorFile.close();
227 throw armnn::Exception(errorMessage);
228 }
229
230 inputTensorFile.close();
231}
232
233bool ValidatePath(const std::string& file, const bool expectFile)
234{
235 if (!fs::exists(file))
236 {
237 std::cerr << "Given file path '" << file << "' does not exist" << std::endl;
238 return false;
239 }
240 if (!fs::is_regular_file(file) && expectFile)
241 {
242 std::cerr << "Given file path '" << file << "' is not a regular file" << std::endl;
243 return false;
244 }
245 return true;
246}
247
248bool ValidatePaths(const std::vector<std::string>& fileVec, const bool expectFile)
249{
250 bool allPathsValid = true;
251 for (auto const& file : fileVec)
252 {
253 if(!ValidatePath(file, expectFile))
254 {
255 allPathsValid = false;
256 }
257 }
258 return allPathsValid;
259}
260
261
262