blob: dcee44a1fafe3ba94b3594d0975d5c2167ec1b74 [file] [log] [blame]
telsoa015307bc12018-03-09 13:51:08 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beck93e48982018-09-05 13:05:09 +01003// SPDX-License-Identifier: MIT
telsoa015307bc12018-03-09 13:51:08 +00004//
5
6#define LOG_TAG "ArmnnDriver"
7
8#include "Utils.hpp"
Jim Flynnf2e175c2019-12-12 15:11:30 +00009#include "Half.hpp"
telsoa015307bc12018-03-09 13:51:08 +000010
Sadik Armaganb3021432021-01-13 15:56:51 +000011#include <armnnSerializer/ISerializer.hpp>
Rob Hughes083be702021-07-19 15:29:47 +010012#include <armnnUtils/Filesystem.hpp>
Matteo Martincigh00d6ed12019-11-28 17:13:24 +000013#include <armnnUtils/Permute.hpp>
14
Derek Lambertid00ad912020-01-22 15:55:16 +000015#include <armnn/Utils.hpp>
Narumol Prangnawarat4d07e5e2020-04-06 16:46:21 +010016#include <armnn/utility/Assert.hpp>
Colm Donelan08d9a1c2020-09-09 17:56:55 +010017#include <log/log.h>
Derek Lambertid00ad912020-01-22 15:55:16 +000018
telsoa015307bc12018-03-09 13:51:08 +000019#include <cassert>
Jim Flynn829ad302019-12-13 14:43:24 +000020#include <cerrno>
telsoa015307bc12018-03-09 13:51:08 +000021#include <cinttypes>
Jim Flynn829ad302019-12-13 14:43:24 +000022#include <sstream>
23#include <cstdio>
24#include <time.h>
25
telsoa015307bc12018-03-09 13:51:08 +000026using namespace android;
telsoa01ce3e84a2018-08-31 09:31:35 +010027using namespace android::hardware;
telsoa015307bc12018-03-09 13:51:08 +000028using namespace android::hidl::memory::V1_0;
29
30namespace armnn_driver
31{
32const armnn::PermutationVector g_DontPermute{};
33
Jan Eilersa71c0632021-04-12 13:12:19 +010034void SwizzleAndroidNn4dTensorToArmNn(armnn::TensorInfo& tensorInfo, const void* input, void* output,
telsoa015307bc12018-03-09 13:51:08 +000035 const armnn::PermutationVector& mappings)
36{
Jan Eilersa71c0632021-04-12 13:12:19 +010037 assert(tensorInfo.GetNumDimensions() == 4U);
telsoa015307bc12018-03-09 13:51:08 +000038
Jan Eilersa71c0632021-04-12 13:12:19 +010039 armnn::DataType dataType = tensorInfo.GetDataType();
Matteo Martincighbf19d2a2019-11-29 11:46:50 +000040 switch (dataType)
telsoa015307bc12018-03-09 13:51:08 +000041 {
Mike Kelly3c673942019-07-25 09:26:06 +010042 case armnn::DataType::Float16:
telsoa015307bc12018-03-09 13:51:08 +000043 case armnn::DataType::Float32:
Derek Lamberti1a38cda2020-01-10 17:28:20 +000044 case armnn::DataType::QAsymmU8:
Derek Lambertid00ad912020-01-22 15:55:16 +000045 case armnn::DataType::QSymmS8:
Sadik Armagan1153d1e2020-04-01 15:09:39 +010046 case armnn::DataType::QAsymmS8:
Jan Eilersa71c0632021-04-12 13:12:19 +010047 // First swizzle tensor info
48 tensorInfo = armnnUtils::Permuted(tensorInfo, mappings);
49 // Then swizzle tensor data
50 armnnUtils::Permute(tensorInfo.GetShape(), mappings, input, output, armnn::GetDataTypeSize(dataType));
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +000051 break;
telsoa015307bc12018-03-09 13:51:08 +000052 default:
53 ALOGW("Unknown armnn::DataType for swizzling");
54 assert(0);
55 }
56}
57
Sadik Armagan188675f2021-02-12 17:16:42 +000058void* GetMemoryFromPool(V1_0::DataLocation location, const std::vector<android::nn::RunTimePoolInfo>& memPools)
telsoa015307bc12018-03-09 13:51:08 +000059{
60 // find the location within the pool
61 assert(location.poolIndex < memPools.size());
62
surmeh01deb3bdb2018-07-05 12:06:04 +010063 const android::nn::RunTimePoolInfo& memPool = memPools[location.poolIndex];
64
surmeh01deb3bdb2018-07-05 12:06:04 +010065 uint8_t* memPoolBuffer = memPool.getBuffer();
surmeh01deb3bdb2018-07-05 12:06:04 +010066
67 uint8_t* memory = memPoolBuffer + location.offset;
telsoa015307bc12018-03-09 13:51:08 +000068
69 return memory;
70}
71
Matthew Bentham912b3622019-05-03 15:49:14 +010072armnn::TensorInfo GetTensorInfoForOperand(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +000073{
Finn Williamsa4983ce2020-07-23 12:55:12 +010074 using namespace armnn;
75 DataType type;
telsoa015307bc12018-03-09 13:51:08 +000076
77 switch (operand.type)
78 {
Matthew Bentham912b3622019-05-03 15:49:14 +010079 case V1_0::OperandType::TENSOR_FLOAT32:
telsoa015307bc12018-03-09 13:51:08 +000080 type = armnn::DataType::Float32;
81 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010082 case V1_0::OperandType::TENSOR_QUANT8_ASYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +000083 type = armnn::DataType::QAsymmU8;
telsoa015307bc12018-03-09 13:51:08 +000084 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010085 case V1_0::OperandType::TENSOR_INT32:
telsoa015307bc12018-03-09 13:51:08 +000086 type = armnn::DataType::Signed32;
87 break;
88 default:
Mike Kellyb5fdf382019-06-11 16:35:25 +010089 throw UnsupportedOperand<V1_0::OperandType>(operand.type);
telsoa015307bc12018-03-09 13:51:08 +000090 }
91
Finn Williamsa4983ce2020-07-23 12:55:12 +010092 TensorInfo ret;
93 if (operand.dimensions.size() == 0)
94 {
95 TensorShape tensorShape(Dimensionality::NotSpecified);
96 ret = TensorInfo(tensorShape, type);
97 }
98 else
99 {
100 bool dimensionsSpecificity[5] = { true, true, true, true, true };
101 int count = 0;
102 std::for_each(operand.dimensions.data(),
103 operand.dimensions.data() + operand.dimensions.size(),
104 [&](const unsigned int val)
105 {
106 if (val == 0)
107 {
108 dimensionsSpecificity[count] = false;
109 }
110 count++;
111 });
112
113 TensorShape tensorShape(operand.dimensions.size(), operand.dimensions.data(), dimensionsSpecificity);
114 ret = TensorInfo(tensorShape, type);
115 }
telsoa015307bc12018-03-09 13:51:08 +0000116
117 ret.SetQuantizationScale(operand.scale);
118 ret.SetQuantizationOffset(operand.zeroPoint);
119
120 return ret;
121}
122
Kevin May42477c12020-03-26 13:34:14 +0000123#if defined(ARMNN_ANDROID_NN_V1_2) || defined(ARMNN_ANDROID_NN_V1_3)// Using ::android::hardware::neuralnetworks::V1_2
Mike Kellyb5fdf382019-06-11 16:35:25 +0100124
125armnn::TensorInfo GetTensorInfoForOperand(const V1_2::Operand& operand)
126{
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000127 using namespace armnn;
Derek Lambertid00ad912020-01-22 15:55:16 +0000128 bool perChannel = false;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100129
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000130 DataType type;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100131 switch (operand.type)
132 {
Sadik Armagan793a70c2020-03-19 13:54:04 +0000133 case V1_2::OperandType::TENSOR_BOOL8:
134 type = armnn::DataType::Boolean;
135 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100136 case V1_2::OperandType::TENSOR_FLOAT32:
137 type = armnn::DataType::Float32;
138 break;
Mike Kelly3c673942019-07-25 09:26:06 +0100139 case V1_2::OperandType::TENSOR_FLOAT16:
140 type = armnn::DataType::Float16;
141 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100142 case V1_2::OperandType::TENSOR_QUANT8_ASYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000143 type = armnn::DataType::QAsymmU8;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100144 break;
Derek Lambertid00ad912020-01-22 15:55:16 +0000145 case V1_2::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL:
146 perChannel=true;
147 ARMNN_FALLTHROUGH;
Mike Kelly0e2e31b2019-11-19 09:16:00 +0000148 case V1_2::OperandType::TENSOR_QUANT8_SYMM:
FinnWilliamsArm624fe9f2019-12-06 17:12:42 +0000149 type = armnn::DataType::QSymmS8;
Mike Kelly0e2e31b2019-11-19 09:16:00 +0000150 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100151 case V1_2::OperandType::TENSOR_QUANT16_SYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000152 type = armnn::DataType::QSymmS16;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100153 break;
154 case V1_2::OperandType::TENSOR_INT32:
155 type = armnn::DataType::Signed32;
156 break;
157 default:
158 throw UnsupportedOperand<V1_2::OperandType>(operand.type);
159 }
160
Finn Williamsa4983ce2020-07-23 12:55:12 +0100161 TensorInfo ret;
162 if (operand.dimensions.size() == 0)
163 {
164 TensorShape tensorShape(Dimensionality::NotSpecified);
165 ret = TensorInfo(tensorShape, type);
166 }
167 else
168 {
169 bool dimensionsSpecificity[5] = { true, true, true, true, true };
170 int count = 0;
171 std::for_each(operand.dimensions.data(),
172 operand.dimensions.data() + operand.dimensions.size(),
173 [&](const unsigned int val)
174 {
175 if (val == 0)
176 {
177 dimensionsSpecificity[count] = false;
178 }
179 count++;
180 });
181
182 TensorShape tensorShape(operand.dimensions.size(), operand.dimensions.data(), dimensionsSpecificity);
183 ret = TensorInfo(tensorShape, type);
184 }
185
Derek Lambertid00ad912020-01-22 15:55:16 +0000186 if (perChannel)
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000187 {
188 // ExtraParams is expected to be of type channelQuant
Narumol Prangnawarat4d07e5e2020-04-06 16:46:21 +0100189 ARMNN_ASSERT(operand.extraParams.getDiscriminator() ==
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000190 V1_2::Operand::ExtraParams::hidl_discriminator::channelQuant);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100191
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000192 auto perAxisQuantParams = operand.extraParams.channelQuant();
193
194 ret.SetQuantizationScales(perAxisQuantParams.scales);
195 ret.SetQuantizationDim(MakeOptional<unsigned int>(perAxisQuantParams.channelDim));
196 }
197 else
198 {
199 ret.SetQuantizationScale(operand.scale);
200 ret.SetQuantizationOffset(operand.zeroPoint);
201 }
Mike Kellyb5fdf382019-06-11 16:35:25 +0100202
203 return ret;
204}
205
206#endif
207
Kevin May42477c12020-03-26 13:34:14 +0000208#ifdef ARMNN_ANDROID_NN_V1_3 // Using ::android::hardware::neuralnetworks::V1_3
209
210armnn::TensorInfo GetTensorInfoForOperand(const V1_3::Operand& operand)
211{
212 using namespace armnn;
213 bool perChannel = false;
Teresa Charlin896572b2020-07-15 12:37:51 +0100214 bool isScalar = false;
Kevin May42477c12020-03-26 13:34:14 +0000215
216 DataType type;
217 switch (operand.type)
218 {
Sadik Armagan51ba2c62020-03-31 15:36:25 +0100219 case V1_3::OperandType::TENSOR_BOOL8:
220 type = armnn::DataType::Boolean;
221 break;
Kevin May42477c12020-03-26 13:34:14 +0000222 case V1_3::OperandType::TENSOR_FLOAT32:
223 type = armnn::DataType::Float32;
224 break;
225 case V1_3::OperandType::TENSOR_FLOAT16:
226 type = armnn::DataType::Float16;
227 break;
228 case V1_3::OperandType::TENSOR_QUANT8_ASYMM:
229 type = armnn::DataType::QAsymmU8;
230 break;
231 case V1_3::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL:
232 perChannel=true;
233 ARMNN_FALLTHROUGH;
234 case V1_3::OperandType::TENSOR_QUANT8_SYMM:
235 type = armnn::DataType::QSymmS8;
236 break;
237 case V1_3::OperandType::TENSOR_QUANT16_SYMM:
238 type = armnn::DataType::QSymmS16;
239 break;
240 case V1_3::OperandType::TENSOR_INT32:
241 type = armnn::DataType::Signed32;
242 break;
Finn Williamsfc884b42020-06-11 17:35:44 +0100243 case V1_3::OperandType::INT32:
244 type = armnn::DataType::Signed32;
Teresa Charlin896572b2020-07-15 12:37:51 +0100245 isScalar = true;
Finn Williamsfc884b42020-06-11 17:35:44 +0100246 break;
Kevin May42477c12020-03-26 13:34:14 +0000247 case V1_3::OperandType::TENSOR_QUANT8_ASYMM_SIGNED:
248 type = armnn::DataType::QAsymmS8;
249 break;
250 default:
251 throw UnsupportedOperand<V1_3::OperandType>(operand.type);
252 }
253
Finn Williamsfc884b42020-06-11 17:35:44 +0100254 TensorInfo ret;
Teresa Charlin896572b2020-07-15 12:37:51 +0100255 if (isScalar)
Finn Williamsfc884b42020-06-11 17:35:44 +0100256 {
Teresa Charlin896572b2020-07-15 12:37:51 +0100257 ret = TensorInfo(TensorShape(armnn::Dimensionality::Scalar), type);
Finn Williamsfc884b42020-06-11 17:35:44 +0100258 }
259 else
260 {
Finn Williamsa4983ce2020-07-23 12:55:12 +0100261 if (operand.dimensions.size() == 0)
262 {
263 TensorShape tensorShape(Dimensionality::NotSpecified);
264 ret = TensorInfo(tensorShape, type);
265 }
266 else
267 {
268 bool dimensionsSpecificity[5] = { true, true, true, true, true };
269 int count = 0;
270 std::for_each(operand.dimensions.data(),
271 operand.dimensions.data() + operand.dimensions.size(),
272 [&](const unsigned int val)
273 {
274 if (val == 0)
275 {
276 dimensionsSpecificity[count] = false;
277 }
278 count++;
279 });
280
281 TensorShape tensorShape(operand.dimensions.size(), operand.dimensions.data(), dimensionsSpecificity);
282 ret = TensorInfo(tensorShape, type);
283 }
Finn Williamsfc884b42020-06-11 17:35:44 +0100284 }
285
Kevin May42477c12020-03-26 13:34:14 +0000286 if (perChannel)
287 {
288 // ExtraParams is expected to be of type channelQuant
Narumol Prangnawarat4d07e5e2020-04-06 16:46:21 +0100289 ARMNN_ASSERT(operand.extraParams.getDiscriminator() ==
Kevin May352d8382020-03-31 15:03:42 +0100290 V1_2::Operand::ExtraParams::hidl_discriminator::channelQuant);
Kevin May42477c12020-03-26 13:34:14 +0000291
292 auto perAxisQuantParams = operand.extraParams.channelQuant();
293
294 ret.SetQuantizationScales(perAxisQuantParams.scales);
295 ret.SetQuantizationDim(MakeOptional<unsigned int>(perAxisQuantParams.channelDim));
296 }
297 else
298 {
299 ret.SetQuantizationScale(operand.scale);
300 ret.SetQuantizationOffset(operand.zeroPoint);
301 }
Kevin May42477c12020-03-26 13:34:14 +0000302 return ret;
303}
304
305#endif
306
Matthew Bentham912b3622019-05-03 15:49:14 +0100307std::string GetOperandSummary(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +0000308{
309 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
310 toString(operand.type);
311}
312
Kevin May42477c12020-03-26 13:34:14 +0000313#if defined(ARMNN_ANDROID_NN_V1_2) || defined(ARMNN_ANDROID_NN_V1_3) // Using ::android::hardware::neuralnetworks::V1_2
Mike Kellyb5fdf382019-06-11 16:35:25 +0100314
315std::string GetOperandSummary(const V1_2::Operand& operand)
316{
317 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
318 toString(operand.type);
319}
320
321#endif
322
Kevin May42477c12020-03-26 13:34:14 +0000323#ifdef ARMNN_ANDROID_NN_V1_3 // Using ::android::hardware::neuralnetworks::V1_3
324
325std::string GetOperandSummary(const V1_3::Operand& operand)
326{
327 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
328 toString(operand.type);
329}
330
331#endif
332
telsoa015307bc12018-03-09 13:51:08 +0000333using DumpElementFunction = void (*)(const armnn::ConstTensor& tensor,
334 unsigned int elementIndex,
335 std::ofstream& fileStream);
336
337namespace
338{
339template <typename ElementType, typename PrintableType = ElementType>
340void DumpTensorElement(const armnn::ConstTensor& tensor, unsigned int elementIndex, std::ofstream& fileStream)
341{
342 const ElementType* elements = reinterpret_cast<const ElementType*>(tensor.GetMemoryArea());
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000343 fileStream << static_cast<PrintableType>(elements[elementIndex]) << " ";
telsoa015307bc12018-03-09 13:51:08 +0000344}
345
telsoa015307bc12018-03-09 13:51:08 +0000346} // namespace
347
348void DumpTensor(const std::string& dumpDir,
349 const std::string& requestName,
350 const std::string& tensorName,
351 const armnn::ConstTensor& tensor)
352{
353 // The dump directory must exist in advance.
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100354 fs::path dumpPath = dumpDir;
355 const fs::path fileName = dumpPath / (requestName + "_" + tensorName + ".dump");
telsoa015307bc12018-03-09 13:51:08 +0000356
357 std::ofstream fileStream;
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100358 fileStream.open(fileName.c_str(), std::ofstream::out | std::ofstream::trunc);
telsoa015307bc12018-03-09 13:51:08 +0000359
360 if (!fileStream.good())
361 {
362 ALOGW("Could not open file %s for writing", fileName.c_str());
363 return;
364 }
365
366 DumpElementFunction dumpElementFunction = nullptr;
367
368 switch (tensor.GetDataType())
369 {
370 case armnn::DataType::Float32:
371 {
372 dumpElementFunction = &DumpTensorElement<float>;
373 break;
374 }
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000375 case armnn::DataType::QAsymmU8:
telsoa015307bc12018-03-09 13:51:08 +0000376 {
377 dumpElementFunction = &DumpTensorElement<uint8_t, uint32_t>;
378 break;
379 }
380 case armnn::DataType::Signed32:
381 {
382 dumpElementFunction = &DumpTensorElement<int32_t>;
383 break;
384 }
Jim Flynnf2e175c2019-12-12 15:11:30 +0000385 case armnn::DataType::Float16:
386 {
387 dumpElementFunction = &DumpTensorElement<armnn::Half>;
388 break;
389 }
Teresa Charlinb248ec12020-04-30 11:06:34 +0100390 case armnn::DataType::QAsymmS8:
391 {
392 dumpElementFunction = &DumpTensorElement<int8_t, int32_t>;
393 break;
394 }
395 case armnn::DataType::Boolean:
396 {
397 dumpElementFunction = &DumpTensorElement<bool>;
398 break;
399 }
telsoa015307bc12018-03-09 13:51:08 +0000400 default:
401 {
402 dumpElementFunction = nullptr;
403 }
404 }
405
406 if (dumpElementFunction != nullptr)
407 {
408 const unsigned int numDimensions = tensor.GetNumDimensions();
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000409 const armnn::TensorShape shape = tensor.GetShape();
telsoa015307bc12018-03-09 13:51:08 +0000410
Mike Kelly7780e602021-04-26 21:54:55 +0100411 if (!shape.AreAllDimensionsSpecified())
412 {
413 fileStream << "Cannot dump tensor elements: not all dimensions are specified" << std::endl;
414 return;
415 }
telsoa015307bc12018-03-09 13:51:08 +0000416 fileStream << "# Number of elements " << tensor.GetNumElements() << std::endl;
Mike Kelly7780e602021-04-26 21:54:55 +0100417
418 if (numDimensions == 0)
419 {
420 fileStream << "# Shape []" << std::endl;
421 return;
422 }
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000423 fileStream << "# Shape [" << shape[0];
424 for (unsigned int d = 1; d < numDimensions; ++d)
telsoa015307bc12018-03-09 13:51:08 +0000425 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000426 fileStream << "," << shape[d];
telsoa015307bc12018-03-09 13:51:08 +0000427 }
428 fileStream << "]" << std::endl;
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000429 fileStream << "Each line contains the data of each of the elements of dimension0. In NCHW and NHWC, each line"
430 " will be a batch" << std::endl << std::endl;
telsoa015307bc12018-03-09 13:51:08 +0000431
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000432 // Split will create a new line after all elements of the first dimension
433 // (in a 4, 3, 2, 3 tensor, there will be 4 lines of 18 elements)
434 unsigned int split = 1;
435 if (numDimensions == 1)
telsoa015307bc12018-03-09 13:51:08 +0000436 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000437 split = shape[0];
438 }
439 else
440 {
441 for (unsigned int i = 1; i < numDimensions; ++i)
telsoa015307bc12018-03-09 13:51:08 +0000442 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000443 split *= shape[i];
telsoa015307bc12018-03-09 13:51:08 +0000444 }
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000445 }
446
447 // Print all elements in the tensor
448 for (unsigned int elementIndex = 0; elementIndex < tensor.GetNumElements(); ++elementIndex)
449 {
450 (*dumpElementFunction)(tensor, elementIndex, fileStream);
451
452 if ( (elementIndex + 1) % split == 0 )
telsoa015307bc12018-03-09 13:51:08 +0000453 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000454 fileStream << std::endl;
telsoa015307bc12018-03-09 13:51:08 +0000455 }
telsoa015307bc12018-03-09 13:51:08 +0000456 }
457 fileStream << std::endl;
458 }
459 else
460 {
461 fileStream << "Cannot dump tensor elements: Unsupported data type "
462 << static_cast<unsigned int>(tensor.GetDataType()) << std::endl;
463 }
464
465 if (!fileStream.good())
466 {
467 ALOGW("An error occurred when writing to file %s", fileName.c_str());
468 }
469}
470
telsoa01ce3e84a2018-08-31 09:31:35 +0100471void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled,
472 const std::string& dumpDir,
473 armnn::NetworkId networkId,
474 const armnn::IProfiler* profiler)
475{
476 // Check if profiling is required.
477 if (!gpuProfilingEnabled)
478 {
479 return;
480 }
481
482 // The dump directory must exist in advance.
483 if (dumpDir.empty())
484 {
485 return;
486 }
487
Narumol Prangnawarat4d07e5e2020-04-06 16:46:21 +0100488 ARMNN_ASSERT(profiler);
telsoa01ce3e84a2018-08-31 09:31:35 +0100489
490 // Set the name of the output profiling file.
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100491 fs::path dumpPath = dumpDir;
492 const fs::path fileName = dumpPath / (std::to_string(networkId) + "_profiling.json");
telsoa01ce3e84a2018-08-31 09:31:35 +0100493
494 // Open the ouput file for writing.
495 std::ofstream fileStream;
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100496 fileStream.open(fileName.c_str(), std::ofstream::out | std::ofstream::trunc);
telsoa01ce3e84a2018-08-31 09:31:35 +0100497
498 if (!fileStream.good())
499 {
500 ALOGW("Could not open file %s for writing", fileName.c_str());
501 return;
502 }
503
504 // Write the profiling info to a JSON file.
505 profiler->Print(fileStream);
506}
507
Jim Flynn829ad302019-12-13 14:43:24 +0000508std::string ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
509 const std::string& dumpDir)
510{
511 std::string fileName;
512 // The dump directory must exist in advance.
513 if (dumpDir.empty())
514 {
515 return fileName;
516 }
517
518 std::string timestamp = GetFileTimestamp();
519 if (timestamp.empty())
520 {
521 return fileName;
522 }
523
524 // Set the name of the output .dot file.
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100525 fs::path dumpPath = dumpDir;
526 fs::path tempFilePath = dumpPath / (timestamp + "_networkgraph.dot");
527 fileName = tempFilePath.string();
Jim Flynn829ad302019-12-13 14:43:24 +0000528
529 ALOGV("Exporting the optimized network graph to file: %s", fileName.c_str());
530
531 // Write the network graph to a dot file.
532 std::ofstream fileStream;
533 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
534
535 if (!fileStream.good())
536 {
537 ALOGW("Could not open file %s for writing", fileName.c_str());
538 return fileName;
539 }
540
541 if (optimizedNetwork.SerializeToDot(fileStream) != armnn::Status::Success)
542 {
543 ALOGW("An error occurred when writing to file %s", fileName.c_str());
544 }
545 return fileName;
546}
547
Sadik Armaganb3021432021-01-13 15:56:51 +0000548std::string SerializeNetwork(const armnn::INetwork& network, const std::string& dumpDir)
549{
550 std::string fileName;
551 // The dump directory must exist in advance.
552 if (dumpDir.empty())
553 {
554 return fileName;
555 }
556
557 std::string timestamp = GetFileTimestamp();
558 if (timestamp.empty())
559 {
560 return fileName;
561 }
562
563 auto serializer(armnnSerializer::ISerializer::Create());
564
565 // Serialize the Network
566 serializer->Serialize(network);
567
568 // Set the name of the output .armnn file.
569 fs::path dumpPath = dumpDir;
570 fs::path tempFilePath = dumpPath / (timestamp + "_network.armnn");
571 fileName = tempFilePath.string();
572
573 // Save serialized network to a file
574 std::ofstream serializedFile(fileName, std::ios::out | std::ios::binary);
575 bool serialized = serializer->SaveSerializedToStream(serializedFile);
576 if (!serialized)
577 {
578 ALOGW("An error occurred when serializing to file %s", fileName.c_str());
579 }
580 return fileName;
581}
582
Finn Williamsa4983ce2020-07-23 12:55:12 +0100583bool IsDynamicTensor(const armnn::TensorInfo& tensorInfo)
Aron Virginas-Tar573a8fa2019-07-23 14:01:37 +0100584{
Finn Williamsa4983ce2020-07-23 12:55:12 +0100585 if (tensorInfo.GetShape().GetDimensionality() == armnn::Dimensionality::NotSpecified)
586 {
587 return true;
588 }
Teresa Charlin4bd9a742020-08-12 12:58:50 +0100589 // Account for the usage of the TensorShape empty constructor
590 if (tensorInfo.GetNumDimensions() == 0)
591 {
592 return true;
593 }
Finn Williamsa4983ce2020-07-23 12:55:12 +0100594 return !tensorInfo.GetShape().AreAllDimensionsSpecified();
595}
596
597bool AreDynamicTensorsSupported()
598{
599#if defined(ARMNN_ANDROID_NN_V1_3)
600 return true;
601#else
602 return false;
603#endif
Aron Virginas-Tar573a8fa2019-07-23 14:01:37 +0100604}
605
Teresa Charlind3381d52021-06-02 18:35:16 +0100606bool isQuantizedOperand(const V1_0::OperandType& operandType)
607{
608 if (operandType == V1_0::OperandType::TENSOR_QUANT8_ASYMM)
609 {
610 return true;
611 }
612 else
613 {
614 return false;
615 }
616}
617
618#if defined(ARMNN_ANDROID_NN_V1_2) || defined(ARMNN_ANDROID_NN_V1_3)// Using ::android::hardware::neuralnetworks::V1_2
619bool isQuantizedOperand(const V1_2::OperandType& operandType)
620{
621 if (operandType == V1_2::OperandType::TENSOR_QUANT8_ASYMM ||
622 operandType == V1_2::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL ||
623 operandType == V1_2::OperandType::TENSOR_QUANT8_SYMM ||
624 operandType == V1_2::OperandType::TENSOR_QUANT16_SYMM )
625 {
626 return true;
627 }
628 else
629 {
630 return false;
631 }
632}
633#endif
634
635#ifdef ARMNN_ANDROID_NN_V1_3 // Using ::android::hardware::neuralnetworks::V1_3
636bool isQuantizedOperand(const V1_3::OperandType& operandType)
637{
638 if (operandType == V1_3::OperandType::TENSOR_QUANT8_ASYMM ||
639 operandType == V1_3::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL ||
640 operandType == V1_3::OperandType::TENSOR_QUANT8_SYMM ||
641 operandType == V1_3::OperandType::TENSOR_QUANT16_SYMM ||
642 operandType == V1_3::OperandType::TENSOR_QUANT8_ASYMM_SIGNED)
643 {
644 return true;
645 }
646 else
647 {
648 return false;
649 }
650}
651#endif
652
Jim Flynn829ad302019-12-13 14:43:24 +0000653std::string GetFileTimestamp()
654{
655 // used to get a timestamp to name diagnostic files (the ArmNN serialized graph
656 // and getSupportedOperations.txt files)
657 timespec ts;
658 int iRet = clock_gettime(CLOCK_MONOTONIC_RAW, &ts);
659 std::stringstream ss;
660 if (iRet == 0)
661 {
662 ss << std::to_string(ts.tv_sec) << "_" << std::to_string(ts.tv_nsec);
663 }
664 else
665 {
666 ALOGW("clock_gettime failed with errno %s : %s", std::to_string(errno).c_str(), std::strerror(errno));
667 }
668 return ss.str();
669}
670
Sadik Armaganb3021432021-01-13 15:56:51 +0000671void RenameExportedFiles(const std::string& existingSerializedFileName,
672 const std::string& existingDotFileName,
673 const std::string& dumpDir,
674 const armnn::NetworkId networkId)
Jim Flynn829ad302019-12-13 14:43:24 +0000675{
676 if (dumpDir.empty())
677 {
678 return;
679 }
Sadik Armaganb3021432021-01-13 15:56:51 +0000680 RenameFile(existingSerializedFileName, std::string("_network.armnn"), dumpDir, networkId);
681 RenameFile(existingDotFileName, std::string("_networkgraph.dot"), dumpDir, networkId);
682}
683
684void RenameFile(const std::string& existingName,
685 const std::string& extension,
686 const std::string& dumpDir,
687 const armnn::NetworkId networkId)
688{
689 if (existingName.empty() || dumpDir.empty())
Jim Flynn829ad302019-12-13 14:43:24 +0000690 {
691 return;
692 }
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100693
Sadik Armaganb3021432021-01-13 15:56:51 +0000694 fs::path dumpPath = dumpDir;
695 const fs::path newFileName = dumpPath / (std::to_string(networkId) + extension);
696 int iRet = rename(existingName.c_str(), newFileName.c_str());
Jim Flynn829ad302019-12-13 14:43:24 +0000697 if (iRet != 0)
698 {
699 std::stringstream ss;
Sadik Armaganb3021432021-01-13 15:56:51 +0000700 ss << "rename of [" << existingName << "] to [" << newFileName << "] failed with errno "
701 << std::to_string(errno) << " : " << std::strerror(errno);
Jim Flynn829ad302019-12-13 14:43:24 +0000702 ALOGW(ss.str().c_str());
703 }
704}
705
Kevin May42477c12020-03-26 13:34:14 +0000706void CommitPools(std::vector<::android::nn::RunTimePoolInfo>& memPools)
707{
708 if (memPools.empty())
709 {
710 return;
711 }
712 // Commit output buffers.
713 // Note that we update *all* pools, even if they aren't actually used as outputs -
714 // this is simpler and is what the CpuExecutor does.
715 for (auto& pool : memPools)
716 {
717 // Type android::nn::RunTimePoolInfo has changed between Android P & Q and Android R, where
718 // update() has been removed and flush() added.
Sadik Armagan188675f2021-02-12 17:16:42 +0000719#if defined(ARMNN_ANDROID_R) || defined(ARMNN_ANDROID_S) // Use the new Android implementation.
Kevin May42477c12020-03-26 13:34:14 +0000720 pool.flush();
721#else
722 pool.update();
723#endif
724 }
725}
telsoa015307bc12018-03-09 13:51:08 +0000726} // namespace armnn_driver