blob: 0906c1cf3f61c4d06942184abbe6f0a0ff3ab6f7 [file] [log] [blame]
Jan Eilers45274902020-10-15 18:34:43 +01001//
2// Copyright © 2020 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "NetworkExecutionUtils.hpp"
7
Rob Hughes9542f902021-07-14 09:48:54 +01008#include <armnnUtils/Filesystem.hpp>
Jan Eilers45274902020-10-15 18:34:43 +01009#include <InferenceTest.hpp>
10#include <ResolveType.hpp>
11
12#if defined(ARMNN_SERIALIZER)
13#include "armnnDeserializer/IDeserializer.hpp"
14#endif
Jan Eilers45274902020-10-15 18:34:43 +010015#if defined(ARMNN_TF_LITE_PARSER)
16#include "armnnTfLiteParser/ITfLiteParser.hpp"
17#endif
18#if defined(ARMNN_ONNX_PARSER)
19#include "armnnOnnxParser/IOnnxParser.hpp"
20#endif
21
Jan Eilers45274902020-10-15 18:34:43 +010022template<armnn::DataType NonQuantizedType>
23auto ParseDataArray(std::istream& stream);
24
25template<armnn::DataType QuantizedType>
26auto ParseDataArray(std::istream& stream,
27 const float& quantizationScale,
28 const int32_t& quantizationOffset);
29
30template<>
31auto ParseDataArray<armnn::DataType::Float32>(std::istream& stream)
32{
33 return ParseArrayImpl<float>(stream, [](const std::string& s) { return std::stof(s); });
34}
35
36template<>
37auto ParseDataArray<armnn::DataType::Signed32>(std::istream& stream)
38{
39 return ParseArrayImpl<int>(stream, [](const std::string& s) { return std::stoi(s); });
40}
41
42template<>
Mike Kellyd7ed6d42021-07-21 09:42:43 +010043auto ParseDataArray<armnn::DataType::QAsymmS8>(std::istream& stream)
44{
45 return ParseArrayImpl<int8_t>(stream,
46 [](const std::string& s) { return armnn::numeric_cast<int8_t>(std::stoi(s)); });
47}
48
49template<>
Jan Eilers45274902020-10-15 18:34:43 +010050auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream)
51{
52 return ParseArrayImpl<uint8_t>(stream,
53 [](const std::string& s) { return armnn::numeric_cast<uint8_t>(std::stoi(s)); });
54}
55
Finn Williamsf806c4d2021-02-22 15:13:12 +000056
57template<>
58auto ParseDataArray<armnn::DataType::QSymmS8>(std::istream& stream)
59{
60 return ParseArrayImpl<int8_t>(stream,
61 [](const std::string& s) { return armnn::numeric_cast<int8_t>(std::stoi(s)); });
62}
63
Mike Kellyd7ed6d42021-07-21 09:42:43 +010064template<>
65auto ParseDataArray<armnn::DataType::QAsymmS8>(std::istream& stream,
66 const float& quantizationScale,
67 const int32_t& quantizationOffset)
68{
69 return ParseArrayImpl<int8_t>(stream,
70 [&quantizationScale, &quantizationOffset](const std::string& s)
71 {
72 return armnn::numeric_cast<int8_t>(
73 armnn::Quantize<int8_t>(std::stof(s),
74 quantizationScale,
75 quantizationOffset));
76 });
77}
Finn Williamsf806c4d2021-02-22 15:13:12 +000078
Jan Eilers45274902020-10-15 18:34:43 +010079template<>
80auto ParseDataArray<armnn::DataType::QAsymmU8>(std::istream& stream,
81 const float& quantizationScale,
82 const int32_t& quantizationOffset)
83{
84 return ParseArrayImpl<uint8_t>(stream,
85 [&quantizationScale, &quantizationOffset](const std::string& s)
86 {
87 return armnn::numeric_cast<uint8_t>(
88 armnn::Quantize<uint8_t>(std::stof(s),
89 quantizationScale,
90 quantizationOffset));
91 });
92}
93
94template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
95std::vector<T> GenerateDummyTensorData(unsigned int numElements)
96{
97 return std::vector<T>(numElements, static_cast<T>(0));
98}
99
100
101std::vector<unsigned int> ParseArray(std::istream& stream)
102{
103 return ParseArrayImpl<unsigned int>(
104 stream,
105 [](const std::string& s) { return armnn::numeric_cast<unsigned int>(std::stoi(s)); });
106}
107
108std::vector<std::string> ParseStringList(const std::string& inputString, const char* delimiter)
109{
110 std::stringstream stream(inputString);
111 return ParseArrayImpl<std::string>(stream, [](const std::string& s) {
112 return armnn::stringUtils::StringTrimCopy(s); }, delimiter);
113}
114
115
116TensorPrinter::TensorPrinter(const std::string& binding,
117 const armnn::TensorInfo& info,
118 const std::string& outputTensorFile,
119 bool dequantizeOutput)
120 : m_OutputBinding(binding)
121 , m_Scale(info.GetQuantizationScale())
122 , m_Offset(info.GetQuantizationOffset())
123 , m_OutputTensorFile(outputTensorFile)
124 , m_DequantizeOutput(dequantizeOutput) {}
125
126void TensorPrinter::operator()(const std::vector<float>& values)
127{
128 ForEachValue(values, [](float value)
129 {
130 printf("%f ", value);
131 });
132 WriteToFile(values);
133}
134
135void TensorPrinter::operator()(const std::vector<uint8_t>& values)
136{
137 if(m_DequantizeOutput)
138 {
139 auto& scale = m_Scale;
140 auto& offset = m_Offset;
141 std::vector<float> dequantizedValues;
142 ForEachValue(values, [&scale, &offset, &dequantizedValues](uint8_t value)
143 {
144 auto dequantizedValue = armnn::Dequantize(value, scale, offset);
145 printf("%f ", dequantizedValue);
146 dequantizedValues.push_back(dequantizedValue);
147 });
148 WriteToFile(dequantizedValues);
149 }
150 else
151 {
152 const std::vector<int> intValues(values.begin(), values.end());
153 operator()(intValues);
154 }
155}
156
Finn Williamsf806c4d2021-02-22 15:13:12 +0000157void TensorPrinter::operator()(const std::vector<int8_t>& values)
158{
159 ForEachValue(values, [](int8_t value)
160 {
161 printf("%d ", value);
162 });
163 WriteToFile(values);
164}
165
Jan Eilers45274902020-10-15 18:34:43 +0100166void TensorPrinter::operator()(const std::vector<int>& values)
167{
168 ForEachValue(values, [](int value)
169 {
170 printf("%d ", value);
171 });
172 WriteToFile(values);
173}
174
175template<typename Container, typename Delegate>
176void TensorPrinter::ForEachValue(const Container& c, Delegate delegate)
177{
178 std::cout << m_OutputBinding << ": ";
179 for (const auto& value : c)
180 {
181 delegate(value);
182 }
183 printf("\n");
184}
185
186template<typename T>
187void TensorPrinter::WriteToFile(const std::vector<T>& values)
188{
189 if (!m_OutputTensorFile.empty())
190 {
191 std::ofstream outputTensorFile;
192 outputTensorFile.open(m_OutputTensorFile, std::ofstream::out | std::ofstream::trunc);
193 if (outputTensorFile.is_open())
194 {
195 outputTensorFile << m_OutputBinding << ": ";
196 std::copy(values.begin(), values.end(), std::ostream_iterator<T>(outputTensorFile, " "));
197 }
198 else
199 {
200 ARMNN_LOG(info) << "Output Tensor File: " << m_OutputTensorFile << " could not be opened!";
201 }
202 outputTensorFile.close();
203 }
204}
205
Finn Williamsf806c4d2021-02-22 15:13:12 +0000206using TContainer =
207 mapbox::util::variant<std::vector<float>, std::vector<int>, std::vector<unsigned char>, std::vector<int8_t>>;
Jan Eilers45274902020-10-15 18:34:43 +0100208using QuantizationParams = std::pair<float, int32_t>;
209
210void PopulateTensorWithData(TContainer& tensorData,
211 unsigned int numElements,
212 const std::string& dataTypeStr,
213 const armnn::Optional<QuantizationParams>& qParams,
214 const armnn::Optional<std::string>& dataFile)
215{
216 const bool readFromFile = dataFile.has_value() && !dataFile.value().empty();
217 const bool quantizeData = qParams.has_value();
218
219 std::ifstream inputTensorFile;
220 if (readFromFile)
221 {
222 inputTensorFile = std::ifstream(dataFile.value());
223 }
224
225 if (dataTypeStr.compare("float") == 0)
226 {
227 if (quantizeData)
228 {
229 const float qScale = qParams.value().first;
230 const int qOffset = qParams.value().second;
231
232 tensorData = readFromFile ?
233 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile, qScale, qOffset) :
234 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
235 }
236 else
237 {
238 tensorData = readFromFile ?
239 ParseDataArray<armnn::DataType::Float32>(inputTensorFile) :
240 GenerateDummyTensorData<armnn::DataType::Float32>(numElements);
241 }
242 }
243 else if (dataTypeStr.compare("int") == 0)
244 {
245 tensorData = readFromFile ?
246 ParseDataArray<armnn::DataType::Signed32>(inputTensorFile) :
247 GenerateDummyTensorData<armnn::DataType::Signed32>(numElements);
248 }
Finn Williamsf806c4d2021-02-22 15:13:12 +0000249 else if (dataTypeStr.compare("qsymms8") == 0)
250 {
251 tensorData = readFromFile ?
252 ParseDataArray<armnn::DataType::QSymmS8>(inputTensorFile) :
253 GenerateDummyTensorData<armnn::DataType::QSymmS8>(numElements);
254 }
Mike Kellyd7ed6d42021-07-21 09:42:43 +0100255 else if (dataTypeStr.compare("qasymm8") == 0 || dataTypeStr.compare("qasymmu8") == 0)
Jan Eilers45274902020-10-15 18:34:43 +0100256 {
257 tensorData = readFromFile ?
258 ParseDataArray<armnn::DataType::QAsymmU8>(inputTensorFile) :
259 GenerateDummyTensorData<armnn::DataType::QAsymmU8>(numElements);
260 }
Mike Kellyd7ed6d42021-07-21 09:42:43 +0100261 else if (dataTypeStr.compare("qasymms8") == 0)
262 {
263 tensorData = readFromFile ?
264 ParseDataArray<armnn::DataType::QAsymmS8>(inputTensorFile) :
265 GenerateDummyTensorData<armnn::DataType::QAsymmS8>(numElements);
266 }
Jan Eilers45274902020-10-15 18:34:43 +0100267 else
268 {
269 std::string errorMessage = "Unsupported tensor data type " + dataTypeStr;
270 ARMNN_LOG(fatal) << errorMessage;
271
272 inputTensorFile.close();
273 throw armnn::Exception(errorMessage);
274 }
275
276 inputTensorFile.close();
277}
278
279bool ValidatePath(const std::string& file, const bool expectFile)
280{
281 if (!fs::exists(file))
282 {
283 std::cerr << "Given file path '" << file << "' does not exist" << std::endl;
284 return false;
285 }
286 if (!fs::is_regular_file(file) && expectFile)
287 {
288 std::cerr << "Given file path '" << file << "' is not a regular file" << std::endl;
289 return false;
290 }
291 return true;
292}
293
294bool ValidatePaths(const std::vector<std::string>& fileVec, const bool expectFile)
295{
296 bool allPathsValid = true;
297 for (auto const& file : fileVec)
298 {
299 if(!ValidatePath(file, expectFile))
300 {
301 allPathsValid = false;
302 }
303 }
304 return allPathsValid;
305}
306
307
308