blob: 314ac25d450ffacab6982028e4e80d8d5cfc259d [file] [log] [blame]
telsoa015307bc12018-03-09 13:51:08 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beck93e48982018-09-05 13:05:09 +01003// SPDX-License-Identifier: MIT
telsoa015307bc12018-03-09 13:51:08 +00004//
5
6#define LOG_TAG "ArmnnDriver"
7
8#include "Utils.hpp"
9
Mike Kelly3c673942019-07-25 09:26:06 +010010#include <Half.hpp>
telsoa015307bc12018-03-09 13:51:08 +000011#include <Permute.hpp>
12
telsoa015307bc12018-03-09 13:51:08 +000013#include <cassert>
Jim Flynn14557e72019-12-16 11:50:29 +000014#include <cerrno>
telsoa015307bc12018-03-09 13:51:08 +000015#include <cinttypes>
Jim Flynn14557e72019-12-16 11:50:29 +000016#include <sstream>
17#include <cstdio>
18#include <time.h>
19
20
telsoa015307bc12018-03-09 13:51:08 +000021
22using namespace android;
telsoa01ce3e84a2018-08-31 09:31:35 +010023using namespace android::hardware;
telsoa015307bc12018-03-09 13:51:08 +000024using namespace android::hidl::memory::V1_0;
25
26namespace armnn_driver
27{
28const armnn::PermutationVector g_DontPermute{};
29
30namespace
31{
32
33template <typename T>
34void SwizzleAndroidNn4dTensorToArmNn(const armnn::TensorShape& inTensorShape, const void* input,
35 void* output, const armnn::PermutationVector& mappings)
36{
37 const auto inputData = static_cast<const T*>(input);
38 const auto outputData = static_cast<T*>(output);
39
Matteo Martincigh2c444fc2019-01-07 10:18:47 +000040 armnnUtils::Permute(armnnUtils::Permuted(inTensorShape, mappings), mappings, inputData, outputData, sizeof(T));
telsoa015307bc12018-03-09 13:51:08 +000041}
42
43} // anonymous namespace
44
45void SwizzleAndroidNn4dTensorToArmNn(const armnn::TensorInfo& tensor, const void* input, void* output,
46 const armnn::PermutationVector& mappings)
47{
48 assert(tensor.GetNumDimensions() == 4U);
49
50 switch(tensor.GetDataType())
51 {
Mike Kelly3c673942019-07-25 09:26:06 +010052 case armnn::DataType::Float16:
53 SwizzleAndroidNn4dTensorToArmNn<armnn::Half>(tensor.GetShape(), input, output, mappings);
54 break;
telsoa015307bc12018-03-09 13:51:08 +000055 case armnn::DataType::Float32:
56 SwizzleAndroidNn4dTensorToArmNn<float>(tensor.GetShape(), input, output, mappings);
57 break;
58 case armnn::DataType::QuantisedAsymm8:
59 SwizzleAndroidNn4dTensorToArmNn<uint8_t>(tensor.GetShape(), input, output, mappings);
60 break;
61 default:
62 ALOGW("Unknown armnn::DataType for swizzling");
63 assert(0);
64 }
65}
66
67void* GetMemoryFromPool(DataLocation location, const std::vector<android::nn::RunTimePoolInfo>& memPools)
68{
69 // find the location within the pool
70 assert(location.poolIndex < memPools.size());
71
surmeh01deb3bdb2018-07-05 12:06:04 +010072 const android::nn::RunTimePoolInfo& memPool = memPools[location.poolIndex];
73
74 // Type android::nn::RunTimePoolInfo has changed between Android O and Android P, where
75 // "buffer" has been made private and must be accessed via the accessor method "getBuffer".
Mike Kellyb5fdf382019-06-11 16:35:25 +010076#if defined(ARMNN_ANDROID_P) || defined(ARMNN_ANDROID_Q) // Use the new Android implementation.
surmeh01deb3bdb2018-07-05 12:06:04 +010077 uint8_t* memPoolBuffer = memPool.getBuffer();
78#else // Fallback to the old Android O implementation.
79 uint8_t* memPoolBuffer = memPool.buffer;
80#endif
81
82 uint8_t* memory = memPoolBuffer + location.offset;
telsoa015307bc12018-03-09 13:51:08 +000083
84 return memory;
85}
86
Matthew Bentham912b3622019-05-03 15:49:14 +010087armnn::TensorInfo GetTensorInfoForOperand(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +000088{
89 armnn::DataType type;
90
91 switch (operand.type)
92 {
Matthew Bentham912b3622019-05-03 15:49:14 +010093 case V1_0::OperandType::TENSOR_FLOAT32:
telsoa015307bc12018-03-09 13:51:08 +000094 type = armnn::DataType::Float32;
95 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010096 case V1_0::OperandType::TENSOR_QUANT8_ASYMM:
telsoa015307bc12018-03-09 13:51:08 +000097 type = armnn::DataType::QuantisedAsymm8;
98 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010099 case V1_0::OperandType::TENSOR_INT32:
telsoa015307bc12018-03-09 13:51:08 +0000100 type = armnn::DataType::Signed32;
101 break;
102 default:
Mike Kellyb5fdf382019-06-11 16:35:25 +0100103 throw UnsupportedOperand<V1_0::OperandType>(operand.type);
telsoa015307bc12018-03-09 13:51:08 +0000104 }
105
106 armnn::TensorInfo ret(operand.dimensions.size(), operand.dimensions.data(), type);
107
108 ret.SetQuantizationScale(operand.scale);
109 ret.SetQuantizationOffset(operand.zeroPoint);
110
111 return ret;
112}
113
Mike Kellyb5fdf382019-06-11 16:35:25 +0100114#ifdef ARMNN_ANDROID_NN_V1_2 // Using ::android::hardware::neuralnetworks::V1_2
115
116armnn::TensorInfo GetTensorInfoForOperand(const V1_2::Operand& operand)
117{
118 armnn::DataType type;
119
120 switch (operand.type)
121 {
122 case V1_2::OperandType::TENSOR_FLOAT32:
123 type = armnn::DataType::Float32;
124 break;
Mike Kelly3c673942019-07-25 09:26:06 +0100125 case V1_2::OperandType::TENSOR_FLOAT16:
126 type = armnn::DataType::Float16;
127 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100128 case V1_2::OperandType::TENSOR_QUANT8_ASYMM:
129 type = armnn::DataType::QuantisedAsymm8;
130 break;
Mike Kellyc7d0d442019-12-11 19:27:11 +0000131 case V1_2::OperandType::TENSOR_QUANT8_SYMM:
132 type = armnn::DataType::QuantisedSymm8;
133 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100134 case V1_2::OperandType::TENSOR_QUANT16_SYMM:
135 type = armnn::DataType::QuantisedSymm16;
136 break;
137 case V1_2::OperandType::TENSOR_INT32:
138 type = armnn::DataType::Signed32;
139 break;
140 default:
141 throw UnsupportedOperand<V1_2::OperandType>(operand.type);
142 }
143
144 armnn::TensorInfo ret(operand.dimensions.size(), operand.dimensions.data(), type);
145
146 ret.SetQuantizationScale(operand.scale);
147 ret.SetQuantizationOffset(operand.zeroPoint);
148
149 return ret;
150}
151
152#endif
153
Matthew Bentham912b3622019-05-03 15:49:14 +0100154std::string GetOperandSummary(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +0000155{
156 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
157 toString(operand.type);
158}
159
Mike Kellyb5fdf382019-06-11 16:35:25 +0100160#ifdef ARMNN_ANDROID_NN_V1_2 // Using ::android::hardware::neuralnetworks::V1_2
161
162std::string GetOperandSummary(const V1_2::Operand& operand)
163{
164 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
165 toString(operand.type);
166}
167
168#endif
169
telsoa015307bc12018-03-09 13:51:08 +0000170using DumpElementFunction = void (*)(const armnn::ConstTensor& tensor,
171 unsigned int elementIndex,
172 std::ofstream& fileStream);
173
174namespace
175{
176template <typename ElementType, typename PrintableType = ElementType>
177void DumpTensorElement(const armnn::ConstTensor& tensor, unsigned int elementIndex, std::ofstream& fileStream)
178{
179 const ElementType* elements = reinterpret_cast<const ElementType*>(tensor.GetMemoryArea());
180 fileStream << static_cast<PrintableType>(elements[elementIndex]) << ",";
181}
182
183constexpr const char* MemoryLayoutString(const armnn::ConstTensor& tensor)
184{
185 const char* str = "";
186
187 switch (tensor.GetNumDimensions())
188 {
189 case 4: { str = "(BHWC) "; break; }
190 case 3: { str = "(HWC) "; break; }
191 case 2: { str = "(HW) "; break; }
192 default: { str = ""; break; }
193 }
194
195 return str;
196}
197} // namespace
198
199void DumpTensor(const std::string& dumpDir,
200 const std::string& requestName,
201 const std::string& tensorName,
202 const armnn::ConstTensor& tensor)
203{
204 // The dump directory must exist in advance.
205 const std::string fileName = boost::str(boost::format("%1%/%2%_%3%.dump") % dumpDir % requestName % tensorName);
206
207 std::ofstream fileStream;
208 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
209
210 if (!fileStream.good())
211 {
212 ALOGW("Could not open file %s for writing", fileName.c_str());
213 return;
214 }
215
216 DumpElementFunction dumpElementFunction = nullptr;
217
218 switch (tensor.GetDataType())
219 {
220 case armnn::DataType::Float32:
221 {
222 dumpElementFunction = &DumpTensorElement<float>;
223 break;
224 }
225 case armnn::DataType::QuantisedAsymm8:
226 {
227 dumpElementFunction = &DumpTensorElement<uint8_t, uint32_t>;
228 break;
229 }
230 case armnn::DataType::Signed32:
231 {
232 dumpElementFunction = &DumpTensorElement<int32_t>;
233 break;
234 }
Jim Flynna3e5b692019-12-12 15:11:30 +0000235 case armnn::DataType::Float16:
236 {
237 dumpElementFunction = &DumpTensorElement<armnn::Half>;
238 break;
239 }
telsoa015307bc12018-03-09 13:51:08 +0000240 default:
241 {
242 dumpElementFunction = nullptr;
243 }
244 }
245
246 if (dumpElementFunction != nullptr)
247 {
248 const unsigned int numDimensions = tensor.GetNumDimensions();
249
250 const unsigned int batch = (numDimensions == 4) ? tensor.GetShape()[numDimensions - 4] : 1;
251
252 const unsigned int height = (numDimensions >= 3)
253 ? tensor.GetShape()[numDimensions - 3]
254 : (numDimensions >= 2) ? tensor.GetShape()[numDimensions - 2] : 1;
255
256 const unsigned int width = (numDimensions >= 3)
257 ? tensor.GetShape()[numDimensions - 2]
258 : (numDimensions >= 1) ? tensor.GetShape()[numDimensions - 1] : 0;
259
260 const unsigned int channels = (numDimensions >= 3) ? tensor.GetShape()[numDimensions - 1] : 1;
261
262 fileStream << "# Number of elements " << tensor.GetNumElements() << std::endl;
263 fileStream << "# Dimensions " << MemoryLayoutString(tensor);
264 fileStream << "[" << tensor.GetShape()[0];
265 for (unsigned int d = 1; d < numDimensions; d++)
266 {
267 fileStream << "," << tensor.GetShape()[d];
268 }
269 fileStream << "]" << std::endl;
270
271 for (unsigned int e = 0, b = 0; b < batch; ++b)
272 {
273 if (numDimensions >= 4)
274 {
275 fileStream << "# Batch " << b << std::endl;
276 }
277 for (unsigned int c = 0; c < channels; c++)
278 {
279 if (numDimensions >= 3)
280 {
281 fileStream << "# Channel " << c << std::endl;
282 }
283 for (unsigned int h = 0; h < height; h++)
284 {
285 for (unsigned int w = 0; w < width; w++, e += channels)
286 {
287 (*dumpElementFunction)(tensor, e, fileStream);
288 }
289 fileStream << std::endl;
290 }
291 e -= channels - 1;
292 if (c < channels)
293 {
294 e -= ((height * width) - 1) * channels;
295 }
296 }
297 fileStream << std::endl;
298 }
299 fileStream << std::endl;
300 }
301 else
302 {
303 fileStream << "Cannot dump tensor elements: Unsupported data type "
304 << static_cast<unsigned int>(tensor.GetDataType()) << std::endl;
305 }
306
307 if (!fileStream.good())
308 {
309 ALOGW("An error occurred when writing to file %s", fileName.c_str());
310 }
311}
312
telsoa01ce3e84a2018-08-31 09:31:35 +0100313void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled,
314 const std::string& dumpDir,
315 armnn::NetworkId networkId,
316 const armnn::IProfiler* profiler)
317{
318 // Check if profiling is required.
319 if (!gpuProfilingEnabled)
320 {
321 return;
322 }
323
324 // The dump directory must exist in advance.
325 if (dumpDir.empty())
326 {
327 return;
328 }
329
330 BOOST_ASSERT(profiler);
331
332 // Set the name of the output profiling file.
333 const std::string fileName = boost::str(boost::format("%1%/%2%_%3%.json")
334 % dumpDir
335 % std::to_string(networkId)
336 % "profiling");
337
338 // Open the ouput file for writing.
339 std::ofstream fileStream;
340 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
341
342 if (!fileStream.good())
343 {
344 ALOGW("Could not open file %s for writing", fileName.c_str());
345 return;
346 }
347
348 // Write the profiling info to a JSON file.
349 profiler->Print(fileStream);
350}
351
Jim Flynn14557e72019-12-16 11:50:29 +0000352std::string ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
353 const std::string& dumpDir)
Jim Flynn4d3a24b2019-12-13 14:43:24 +0000354{
Jim Flynn14557e72019-12-16 11:50:29 +0000355 std::string fileName;
Jim Flynn4d3a24b2019-12-13 14:43:24 +0000356 // The dump directory must exist in advance.
357 if (dumpDir.empty())
358 {
Jim Flynn14557e72019-12-16 11:50:29 +0000359 return fileName;
360 }
361
362 std::string timestamp = GetFileTimestamp();
363 if (timestamp.empty())
364 {
365 return fileName;
Jim Flynn4d3a24b2019-12-13 14:43:24 +0000366 }
367
368 // Set the name of the output .dot file.
Jim Flynn14557e72019-12-16 11:50:29 +0000369 fileName = boost::str(boost::format("%1%/%2%_networkgraph.dot")
370 % dumpDir
371 % timestamp);
Jim Flynn4d3a24b2019-12-13 14:43:24 +0000372
373 ALOGV("Exporting the optimized network graph to file: %s", fileName.c_str());
374
375 // Write the network graph to a dot file.
376 std::ofstream fileStream;
377 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
378
379 if (!fileStream.good())
380 {
381 ALOGW("Could not open file %s for writing", fileName.c_str());
Jim Flynn14557e72019-12-16 11:50:29 +0000382 return fileName;
Jim Flynn4d3a24b2019-12-13 14:43:24 +0000383 }
384
385 if (optimizedNetwork.SerializeToDot(fileStream) != armnn::Status::Success)
386 {
387 ALOGW("An error occurred when writing to file %s", fileName.c_str());
388 }
Jim Flynn14557e72019-12-16 11:50:29 +0000389 return fileName;
Jim Flynn4d3a24b2019-12-13 14:43:24 +0000390}
391
Aron Virginas-Tar573a8fa2019-07-23 14:01:37 +0100392bool IsDynamicTensor(const armnn::TensorInfo& outputInfo)
393{
394 // Dynamic tensors have at least one 0-sized dimension
395 return outputInfo.GetNumElements() == 0u;
396}
397
Jim Flynn14557e72019-12-16 11:50:29 +0000398std::string GetFileTimestamp()
399{
400 // used to get a timestamp to name diagnostic files (the ArmNN serialized graph
401 // and getSupportedOperations.txt files)
402 timespec ts;
403 int iRet = clock_gettime(CLOCK_MONOTONIC_RAW, &ts);
404 std::stringstream ss;
405 if (iRet == 0)
406 {
407 ss << std::to_string(ts.tv_sec) << "_" << std::to_string(ts.tv_nsec);
408 }
409 else
410 {
411 ALOGW("clock_gettime failed with errno %s : %s", std::to_string(errno).c_str(), std::strerror(errno));
412 }
413 return ss.str();
414}
415
416void RenameGraphDotFile(const std::string& oldName, const std::string& dumpDir, const armnn::NetworkId networkId)
417{
418 if (dumpDir.empty())
419 {
420 return;
421 }
422 if (oldName.empty())
423 {
424 return;
425 }
426 const std::string newFileName = boost::str(boost::format("%1%/%2%_networkgraph.dot")
427 % dumpDir
428 % std::to_string(networkId));
429 int iRet = rename(oldName.c_str(), newFileName.c_str());
430 if (iRet != 0)
431 {
432 std::stringstream ss;
433 ss << "rename of [" << oldName << "] to [" << newFileName << "] failed with errno " << std::to_string(errno)
434 << " : " << std::strerror(errno);
435 ALOGW(ss.str().c_str());
436 }
437}
438
439
440
telsoa015307bc12018-03-09 13:51:08 +0000441} // namespace armnn_driver