blob: 3583d62dd7982a5165756f324ba51b92a541080c [file] [log] [blame]
telsoa015307bc12018-03-09 13:51:08 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beck93e48982018-09-05 13:05:09 +01003// SPDX-License-Identifier: MIT
telsoa015307bc12018-03-09 13:51:08 +00004//
5
6#define LOG_TAG "ArmnnDriver"
7
8#include "Utils.hpp"
Jim Flynnf2e175c2019-12-12 15:11:30 +00009#include "Half.hpp"
telsoa015307bc12018-03-09 13:51:08 +000010
Matteo Martincigh00d6ed12019-11-28 17:13:24 +000011#include <armnnUtils/Permute.hpp>
12
Derek Lambertid00ad912020-01-22 15:55:16 +000013#include <armnn/Utils.hpp>
14
telsoa015307bc12018-03-09 13:51:08 +000015#include <cassert>
Jim Flynn829ad302019-12-13 14:43:24 +000016#include <cerrno>
telsoa015307bc12018-03-09 13:51:08 +000017#include <cinttypes>
Jim Flynn829ad302019-12-13 14:43:24 +000018#include <sstream>
19#include <cstdio>
20#include <time.h>
21
22
telsoa015307bc12018-03-09 13:51:08 +000023
24using namespace android;
telsoa01ce3e84a2018-08-31 09:31:35 +010025using namespace android::hardware;
telsoa015307bc12018-03-09 13:51:08 +000026using namespace android::hidl::memory::V1_0;
27
28namespace armnn_driver
29{
30const armnn::PermutationVector g_DontPermute{};
31
32namespace
33{
34
telsoa015307bc12018-03-09 13:51:08 +000035void SwizzleAndroidNn4dTensorToArmNn(const armnn::TensorShape& inTensorShape, const void* input,
Matteo Martincighbf19d2a2019-11-29 11:46:50 +000036 void* output, size_t dataTypeSize, const armnn::PermutationVector& mappings)
telsoa015307bc12018-03-09 13:51:08 +000037{
Matteo Martincighbf19d2a2019-11-29 11:46:50 +000038 assert(inTensorShape.GetNumDimensions() == 4U);
telsoa015307bc12018-03-09 13:51:08 +000039
Matteo Martincighbf19d2a2019-11-29 11:46:50 +000040 armnnUtils::Permute(armnnUtils::Permuted(inTensorShape, mappings), mappings, input, output, dataTypeSize);
telsoa015307bc12018-03-09 13:51:08 +000041}
42
43} // anonymous namespace
44
45void SwizzleAndroidNn4dTensorToArmNn(const armnn::TensorInfo& tensor, const void* input, void* output,
46 const armnn::PermutationVector& mappings)
47{
48 assert(tensor.GetNumDimensions() == 4U);
49
Matteo Martincighbf19d2a2019-11-29 11:46:50 +000050 armnn::DataType dataType = tensor.GetDataType();
51 switch (dataType)
telsoa015307bc12018-03-09 13:51:08 +000052 {
Mike Kelly3c673942019-07-25 09:26:06 +010053 case armnn::DataType::Float16:
telsoa015307bc12018-03-09 13:51:08 +000054 case armnn::DataType::Float32:
Derek Lamberti1a38cda2020-01-10 17:28:20 +000055 case armnn::DataType::QAsymmU8:
Derek Lambertid00ad912020-01-22 15:55:16 +000056 case armnn::DataType::QSymmS8:
Matteo Martincighbf19d2a2019-11-29 11:46:50 +000057 SwizzleAndroidNn4dTensorToArmNn(tensor.GetShape(), input, output, armnn::GetDataTypeSize(dataType), mappings);
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +000058 break;
telsoa015307bc12018-03-09 13:51:08 +000059 default:
60 ALOGW("Unknown armnn::DataType for swizzling");
61 assert(0);
62 }
63}
64
65void* GetMemoryFromPool(DataLocation location, const std::vector<android::nn::RunTimePoolInfo>& memPools)
66{
67 // find the location within the pool
68 assert(location.poolIndex < memPools.size());
69
surmeh01deb3bdb2018-07-05 12:06:04 +010070 const android::nn::RunTimePoolInfo& memPool = memPools[location.poolIndex];
71
72 // Type android::nn::RunTimePoolInfo has changed between Android O and Android P, where
73 // "buffer" has been made private and must be accessed via the accessor method "getBuffer".
Mike Kellyb5fdf382019-06-11 16:35:25 +010074#if defined(ARMNN_ANDROID_P) || defined(ARMNN_ANDROID_Q) // Use the new Android implementation.
surmeh01deb3bdb2018-07-05 12:06:04 +010075 uint8_t* memPoolBuffer = memPool.getBuffer();
76#else // Fallback to the old Android O implementation.
77 uint8_t* memPoolBuffer = memPool.buffer;
78#endif
79
80 uint8_t* memory = memPoolBuffer + location.offset;
telsoa015307bc12018-03-09 13:51:08 +000081
82 return memory;
83}
84
Matthew Bentham912b3622019-05-03 15:49:14 +010085armnn::TensorInfo GetTensorInfoForOperand(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +000086{
87 armnn::DataType type;
88
89 switch (operand.type)
90 {
Matthew Bentham912b3622019-05-03 15:49:14 +010091 case V1_0::OperandType::TENSOR_FLOAT32:
telsoa015307bc12018-03-09 13:51:08 +000092 type = armnn::DataType::Float32;
93 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010094 case V1_0::OperandType::TENSOR_QUANT8_ASYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +000095 type = armnn::DataType::QAsymmU8;
telsoa015307bc12018-03-09 13:51:08 +000096 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010097 case V1_0::OperandType::TENSOR_INT32:
telsoa015307bc12018-03-09 13:51:08 +000098 type = armnn::DataType::Signed32;
99 break;
100 default:
Mike Kellyb5fdf382019-06-11 16:35:25 +0100101 throw UnsupportedOperand<V1_0::OperandType>(operand.type);
telsoa015307bc12018-03-09 13:51:08 +0000102 }
103
104 armnn::TensorInfo ret(operand.dimensions.size(), operand.dimensions.data(), type);
105
106 ret.SetQuantizationScale(operand.scale);
107 ret.SetQuantizationOffset(operand.zeroPoint);
108
109 return ret;
110}
111
Mike Kellyb5fdf382019-06-11 16:35:25 +0100112#ifdef ARMNN_ANDROID_NN_V1_2 // Using ::android::hardware::neuralnetworks::V1_2
113
114armnn::TensorInfo GetTensorInfoForOperand(const V1_2::Operand& operand)
115{
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000116 using namespace armnn;
Derek Lambertid00ad912020-01-22 15:55:16 +0000117 bool perChannel = false;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100118
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000119 DataType type;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100120 switch (operand.type)
121 {
122 case V1_2::OperandType::TENSOR_FLOAT32:
123 type = armnn::DataType::Float32;
124 break;
Mike Kelly3c673942019-07-25 09:26:06 +0100125 case V1_2::OperandType::TENSOR_FLOAT16:
126 type = armnn::DataType::Float16;
127 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100128 case V1_2::OperandType::TENSOR_QUANT8_ASYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000129 type = armnn::DataType::QAsymmU8;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100130 break;
Derek Lambertid00ad912020-01-22 15:55:16 +0000131 case V1_2::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL:
132 perChannel=true;
133 ARMNN_FALLTHROUGH;
Mike Kelly0e2e31b2019-11-19 09:16:00 +0000134 case V1_2::OperandType::TENSOR_QUANT8_SYMM:
FinnWilliamsArm624fe9f2019-12-06 17:12:42 +0000135 type = armnn::DataType::QSymmS8;
Mike Kelly0e2e31b2019-11-19 09:16:00 +0000136 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100137 case V1_2::OperandType::TENSOR_QUANT16_SYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000138 type = armnn::DataType::QSymmS16;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100139 break;
140 case V1_2::OperandType::TENSOR_INT32:
141 type = armnn::DataType::Signed32;
142 break;
143 default:
144 throw UnsupportedOperand<V1_2::OperandType>(operand.type);
145 }
146
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000147 TensorInfo ret(operand.dimensions.size(), operand.dimensions.data(), type);
Derek Lambertid00ad912020-01-22 15:55:16 +0000148 if (perChannel)
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000149 {
150 // ExtraParams is expected to be of type channelQuant
151 BOOST_ASSERT(operand.extraParams.getDiscriminator() ==
152 V1_2::Operand::ExtraParams::hidl_discriminator::channelQuant);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100153
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000154 auto perAxisQuantParams = operand.extraParams.channelQuant();
155
156 ret.SetQuantizationScales(perAxisQuantParams.scales);
157 ret.SetQuantizationDim(MakeOptional<unsigned int>(perAxisQuantParams.channelDim));
158 }
159 else
160 {
161 ret.SetQuantizationScale(operand.scale);
162 ret.SetQuantizationOffset(operand.zeroPoint);
163 }
Mike Kellyb5fdf382019-06-11 16:35:25 +0100164
165 return ret;
166}
167
168#endif
169
Matthew Bentham912b3622019-05-03 15:49:14 +0100170std::string GetOperandSummary(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +0000171{
172 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
173 toString(operand.type);
174}
175
Mike Kellyb5fdf382019-06-11 16:35:25 +0100176#ifdef ARMNN_ANDROID_NN_V1_2 // Using ::android::hardware::neuralnetworks::V1_2
177
178std::string GetOperandSummary(const V1_2::Operand& operand)
179{
180 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
181 toString(operand.type);
182}
183
184#endif
185
telsoa015307bc12018-03-09 13:51:08 +0000186using DumpElementFunction = void (*)(const armnn::ConstTensor& tensor,
187 unsigned int elementIndex,
188 std::ofstream& fileStream);
189
190namespace
191{
192template <typename ElementType, typename PrintableType = ElementType>
193void DumpTensorElement(const armnn::ConstTensor& tensor, unsigned int elementIndex, std::ofstream& fileStream)
194{
195 const ElementType* elements = reinterpret_cast<const ElementType*>(tensor.GetMemoryArea());
196 fileStream << static_cast<PrintableType>(elements[elementIndex]) << ",";
197}
198
199constexpr const char* MemoryLayoutString(const armnn::ConstTensor& tensor)
200{
201 const char* str = "";
202
203 switch (tensor.GetNumDimensions())
204 {
205 case 4: { str = "(BHWC) "; break; }
206 case 3: { str = "(HWC) "; break; }
207 case 2: { str = "(HW) "; break; }
208 default: { str = ""; break; }
209 }
210
211 return str;
212}
213} // namespace
214
215void DumpTensor(const std::string& dumpDir,
216 const std::string& requestName,
217 const std::string& tensorName,
218 const armnn::ConstTensor& tensor)
219{
220 // The dump directory must exist in advance.
221 const std::string fileName = boost::str(boost::format("%1%/%2%_%3%.dump") % dumpDir % requestName % tensorName);
222
223 std::ofstream fileStream;
224 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
225
226 if (!fileStream.good())
227 {
228 ALOGW("Could not open file %s for writing", fileName.c_str());
229 return;
230 }
231
232 DumpElementFunction dumpElementFunction = nullptr;
233
234 switch (tensor.GetDataType())
235 {
236 case armnn::DataType::Float32:
237 {
238 dumpElementFunction = &DumpTensorElement<float>;
239 break;
240 }
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000241 case armnn::DataType::QAsymmU8:
telsoa015307bc12018-03-09 13:51:08 +0000242 {
243 dumpElementFunction = &DumpTensorElement<uint8_t, uint32_t>;
244 break;
245 }
246 case armnn::DataType::Signed32:
247 {
248 dumpElementFunction = &DumpTensorElement<int32_t>;
249 break;
250 }
Jim Flynnf2e175c2019-12-12 15:11:30 +0000251 case armnn::DataType::Float16:
252 {
253 dumpElementFunction = &DumpTensorElement<armnn::Half>;
254 break;
255 }
telsoa015307bc12018-03-09 13:51:08 +0000256 default:
257 {
258 dumpElementFunction = nullptr;
259 }
260 }
261
262 if (dumpElementFunction != nullptr)
263 {
264 const unsigned int numDimensions = tensor.GetNumDimensions();
265
266 const unsigned int batch = (numDimensions == 4) ? tensor.GetShape()[numDimensions - 4] : 1;
267
268 const unsigned int height = (numDimensions >= 3)
269 ? tensor.GetShape()[numDimensions - 3]
270 : (numDimensions >= 2) ? tensor.GetShape()[numDimensions - 2] : 1;
271
272 const unsigned int width = (numDimensions >= 3)
273 ? tensor.GetShape()[numDimensions - 2]
274 : (numDimensions >= 1) ? tensor.GetShape()[numDimensions - 1] : 0;
275
276 const unsigned int channels = (numDimensions >= 3) ? tensor.GetShape()[numDimensions - 1] : 1;
277
278 fileStream << "# Number of elements " << tensor.GetNumElements() << std::endl;
279 fileStream << "# Dimensions " << MemoryLayoutString(tensor);
280 fileStream << "[" << tensor.GetShape()[0];
281 for (unsigned int d = 1; d < numDimensions; d++)
282 {
283 fileStream << "," << tensor.GetShape()[d];
284 }
285 fileStream << "]" << std::endl;
286
287 for (unsigned int e = 0, b = 0; b < batch; ++b)
288 {
289 if (numDimensions >= 4)
290 {
291 fileStream << "# Batch " << b << std::endl;
292 }
293 for (unsigned int c = 0; c < channels; c++)
294 {
295 if (numDimensions >= 3)
296 {
297 fileStream << "# Channel " << c << std::endl;
298 }
299 for (unsigned int h = 0; h < height; h++)
300 {
301 for (unsigned int w = 0; w < width; w++, e += channels)
302 {
303 (*dumpElementFunction)(tensor, e, fileStream);
304 }
305 fileStream << std::endl;
306 }
307 e -= channels - 1;
308 if (c < channels)
309 {
310 e -= ((height * width) - 1) * channels;
311 }
312 }
313 fileStream << std::endl;
314 }
315 fileStream << std::endl;
316 }
317 else
318 {
319 fileStream << "Cannot dump tensor elements: Unsupported data type "
320 << static_cast<unsigned int>(tensor.GetDataType()) << std::endl;
321 }
322
323 if (!fileStream.good())
324 {
325 ALOGW("An error occurred when writing to file %s", fileName.c_str());
326 }
327}
328
telsoa01ce3e84a2018-08-31 09:31:35 +0100329void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled,
330 const std::string& dumpDir,
331 armnn::NetworkId networkId,
332 const armnn::IProfiler* profiler)
333{
334 // Check if profiling is required.
335 if (!gpuProfilingEnabled)
336 {
337 return;
338 }
339
340 // The dump directory must exist in advance.
341 if (dumpDir.empty())
342 {
343 return;
344 }
345
346 BOOST_ASSERT(profiler);
347
348 // Set the name of the output profiling file.
349 const std::string fileName = boost::str(boost::format("%1%/%2%_%3%.json")
350 % dumpDir
351 % std::to_string(networkId)
352 % "profiling");
353
354 // Open the ouput file for writing.
355 std::ofstream fileStream;
356 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
357
358 if (!fileStream.good())
359 {
360 ALOGW("Could not open file %s for writing", fileName.c_str());
361 return;
362 }
363
364 // Write the profiling info to a JSON file.
365 profiler->Print(fileStream);
366}
367
Jim Flynn829ad302019-12-13 14:43:24 +0000368std::string ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
369 const std::string& dumpDir)
370{
371 std::string fileName;
372 // The dump directory must exist in advance.
373 if (dumpDir.empty())
374 {
375 return fileName;
376 }
377
378 std::string timestamp = GetFileTimestamp();
379 if (timestamp.empty())
380 {
381 return fileName;
382 }
383
384 // Set the name of the output .dot file.
385 fileName = boost::str(boost::format("%1%/%2%_networkgraph.dot")
386 % dumpDir
387 % timestamp);
388
389 ALOGV("Exporting the optimized network graph to file: %s", fileName.c_str());
390
391 // Write the network graph to a dot file.
392 std::ofstream fileStream;
393 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
394
395 if (!fileStream.good())
396 {
397 ALOGW("Could not open file %s for writing", fileName.c_str());
398 return fileName;
399 }
400
401 if (optimizedNetwork.SerializeToDot(fileStream) != armnn::Status::Success)
402 {
403 ALOGW("An error occurred when writing to file %s", fileName.c_str());
404 }
405 return fileName;
406}
407
Aron Virginas-Tar573a8fa2019-07-23 14:01:37 +0100408bool IsDynamicTensor(const armnn::TensorInfo& outputInfo)
409{
410 // Dynamic tensors have at least one 0-sized dimension
411 return outputInfo.GetNumElements() == 0u;
412}
413
Jim Flynn829ad302019-12-13 14:43:24 +0000414std::string GetFileTimestamp()
415{
416 // used to get a timestamp to name diagnostic files (the ArmNN serialized graph
417 // and getSupportedOperations.txt files)
418 timespec ts;
419 int iRet = clock_gettime(CLOCK_MONOTONIC_RAW, &ts);
420 std::stringstream ss;
421 if (iRet == 0)
422 {
423 ss << std::to_string(ts.tv_sec) << "_" << std::to_string(ts.tv_nsec);
424 }
425 else
426 {
427 ALOGW("clock_gettime failed with errno %s : %s", std::to_string(errno).c_str(), std::strerror(errno));
428 }
429 return ss.str();
430}
431
432void RenameGraphDotFile(const std::string& oldName, const std::string& dumpDir, const armnn::NetworkId networkId)
433{
434 if (dumpDir.empty())
435 {
436 return;
437 }
438 if (oldName.empty())
439 {
440 return;
441 }
442 const std::string newFileName = boost::str(boost::format("%1%/%2%_networkgraph.dot")
443 % dumpDir
444 % std::to_string(networkId));
445 int iRet = rename(oldName.c_str(), newFileName.c_str());
446 if (iRet != 0)
447 {
448 std::stringstream ss;
449 ss << "rename of [" << oldName << "] to [" << newFileName << "] failed with errno " << std::to_string(errno)
450 << " : " << std::strerror(errno);
451 ALOGW(ss.str().c_str());
452 }
453}
454
455
456
telsoa015307bc12018-03-09 13:51:08 +0000457} // namespace armnn_driver