blob: 6fd1a785eb84dab6251e9aba6ebe44f491105d18 [file] [log] [blame]
telsoa015307bc12018-03-09 13:51:08 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beck93e48982018-09-05 13:05:09 +01003// SPDX-License-Identifier: MIT
telsoa015307bc12018-03-09 13:51:08 +00004//
5
6#define LOG_TAG "ArmnnDriver"
7
8#include "Utils.hpp"
Jim Flynnf2e175c2019-12-12 15:11:30 +00009#include "Half.hpp"
telsoa015307bc12018-03-09 13:51:08 +000010
Sadik Armaganb3021432021-01-13 15:56:51 +000011#include <armnnSerializer/ISerializer.hpp>
Matteo Martincigh00d6ed12019-11-28 17:13:24 +000012#include <armnnUtils/Permute.hpp>
13
Derek Lambertid00ad912020-01-22 15:55:16 +000014#include <armnn/Utils.hpp>
Narumol Prangnawarat4d07e5e2020-04-06 16:46:21 +010015#include <armnn/utility/Assert.hpp>
Colm Donelan08d9a1c2020-09-09 17:56:55 +010016#include <Filesystem.hpp>
17#include <log/log.h>
Derek Lambertid00ad912020-01-22 15:55:16 +000018
telsoa015307bc12018-03-09 13:51:08 +000019#include <cassert>
Jim Flynn829ad302019-12-13 14:43:24 +000020#include <cerrno>
telsoa015307bc12018-03-09 13:51:08 +000021#include <cinttypes>
Jim Flynn829ad302019-12-13 14:43:24 +000022#include <sstream>
23#include <cstdio>
24#include <time.h>
25
telsoa015307bc12018-03-09 13:51:08 +000026using namespace android;
telsoa01ce3e84a2018-08-31 09:31:35 +010027using namespace android::hardware;
telsoa015307bc12018-03-09 13:51:08 +000028using namespace android::hidl::memory::V1_0;
29
30namespace armnn_driver
31{
32const armnn::PermutationVector g_DontPermute{};
33
Jan Eilersa71c0632021-04-12 13:12:19 +010034void SwizzleAndroidNn4dTensorToArmNn(armnn::TensorInfo& tensorInfo, const void* input, void* output,
telsoa015307bc12018-03-09 13:51:08 +000035 const armnn::PermutationVector& mappings)
36{
Jan Eilersa71c0632021-04-12 13:12:19 +010037 assert(tensorInfo.GetNumDimensions() == 4U);
telsoa015307bc12018-03-09 13:51:08 +000038
Jan Eilersa71c0632021-04-12 13:12:19 +010039 armnn::DataType dataType = tensorInfo.GetDataType();
Matteo Martincighbf19d2a2019-11-29 11:46:50 +000040 switch (dataType)
telsoa015307bc12018-03-09 13:51:08 +000041 {
Mike Kelly3c673942019-07-25 09:26:06 +010042 case armnn::DataType::Float16:
telsoa015307bc12018-03-09 13:51:08 +000043 case armnn::DataType::Float32:
Derek Lamberti1a38cda2020-01-10 17:28:20 +000044 case armnn::DataType::QAsymmU8:
Derek Lambertid00ad912020-01-22 15:55:16 +000045 case armnn::DataType::QSymmS8:
Sadik Armagan1153d1e2020-04-01 15:09:39 +010046 case armnn::DataType::QAsymmS8:
Jan Eilersa71c0632021-04-12 13:12:19 +010047 // First swizzle tensor info
48 tensorInfo = armnnUtils::Permuted(tensorInfo, mappings);
49 // Then swizzle tensor data
50 armnnUtils::Permute(tensorInfo.GetShape(), mappings, input, output, armnn::GetDataTypeSize(dataType));
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +000051 break;
telsoa015307bc12018-03-09 13:51:08 +000052 default:
53 ALOGW("Unknown armnn::DataType for swizzling");
54 assert(0);
55 }
56}
57
Sadik Armagan188675f2021-02-12 17:16:42 +000058void* GetMemoryFromPool(V1_0::DataLocation location, const std::vector<android::nn::RunTimePoolInfo>& memPools)
telsoa015307bc12018-03-09 13:51:08 +000059{
60 // find the location within the pool
61 assert(location.poolIndex < memPools.size());
62
surmeh01deb3bdb2018-07-05 12:06:04 +010063 const android::nn::RunTimePoolInfo& memPool = memPools[location.poolIndex];
64
surmeh01deb3bdb2018-07-05 12:06:04 +010065 uint8_t* memPoolBuffer = memPool.getBuffer();
surmeh01deb3bdb2018-07-05 12:06:04 +010066
67 uint8_t* memory = memPoolBuffer + location.offset;
telsoa015307bc12018-03-09 13:51:08 +000068
69 return memory;
70}
71
Matthew Bentham912b3622019-05-03 15:49:14 +010072armnn::TensorInfo GetTensorInfoForOperand(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +000073{
Finn Williamsa4983ce2020-07-23 12:55:12 +010074 using namespace armnn;
75 DataType type;
telsoa015307bc12018-03-09 13:51:08 +000076
77 switch (operand.type)
78 {
Matthew Bentham912b3622019-05-03 15:49:14 +010079 case V1_0::OperandType::TENSOR_FLOAT32:
telsoa015307bc12018-03-09 13:51:08 +000080 type = armnn::DataType::Float32;
81 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010082 case V1_0::OperandType::TENSOR_QUANT8_ASYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +000083 type = armnn::DataType::QAsymmU8;
telsoa015307bc12018-03-09 13:51:08 +000084 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010085 case V1_0::OperandType::TENSOR_INT32:
telsoa015307bc12018-03-09 13:51:08 +000086 type = armnn::DataType::Signed32;
87 break;
88 default:
Mike Kellyb5fdf382019-06-11 16:35:25 +010089 throw UnsupportedOperand<V1_0::OperandType>(operand.type);
telsoa015307bc12018-03-09 13:51:08 +000090 }
91
Finn Williamsa4983ce2020-07-23 12:55:12 +010092 TensorInfo ret;
93 if (operand.dimensions.size() == 0)
94 {
95 TensorShape tensorShape(Dimensionality::NotSpecified);
96 ret = TensorInfo(tensorShape, type);
97 }
98 else
99 {
100 bool dimensionsSpecificity[5] = { true, true, true, true, true };
101 int count = 0;
102 std::for_each(operand.dimensions.data(),
103 operand.dimensions.data() + operand.dimensions.size(),
104 [&](const unsigned int val)
105 {
106 if (val == 0)
107 {
108 dimensionsSpecificity[count] = false;
109 }
110 count++;
111 });
112
113 TensorShape tensorShape(operand.dimensions.size(), operand.dimensions.data(), dimensionsSpecificity);
114 ret = TensorInfo(tensorShape, type);
115 }
telsoa015307bc12018-03-09 13:51:08 +0000116
117 ret.SetQuantizationScale(operand.scale);
118 ret.SetQuantizationOffset(operand.zeroPoint);
119
120 return ret;
121}
122
Kevin May42477c12020-03-26 13:34:14 +0000123#if defined(ARMNN_ANDROID_NN_V1_2) || defined(ARMNN_ANDROID_NN_V1_3)// Using ::android::hardware::neuralnetworks::V1_2
Mike Kellyb5fdf382019-06-11 16:35:25 +0100124
125armnn::TensorInfo GetTensorInfoForOperand(const V1_2::Operand& operand)
126{
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000127 using namespace armnn;
Derek Lambertid00ad912020-01-22 15:55:16 +0000128 bool perChannel = false;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100129
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000130 DataType type;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100131 switch (operand.type)
132 {
Sadik Armagan793a70c2020-03-19 13:54:04 +0000133 case V1_2::OperandType::TENSOR_BOOL8:
134 type = armnn::DataType::Boolean;
135 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100136 case V1_2::OperandType::TENSOR_FLOAT32:
137 type = armnn::DataType::Float32;
138 break;
Mike Kelly3c673942019-07-25 09:26:06 +0100139 case V1_2::OperandType::TENSOR_FLOAT16:
140 type = armnn::DataType::Float16;
141 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100142 case V1_2::OperandType::TENSOR_QUANT8_ASYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000143 type = armnn::DataType::QAsymmU8;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100144 break;
Derek Lambertid00ad912020-01-22 15:55:16 +0000145 case V1_2::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL:
146 perChannel=true;
147 ARMNN_FALLTHROUGH;
Mike Kelly0e2e31b2019-11-19 09:16:00 +0000148 case V1_2::OperandType::TENSOR_QUANT8_SYMM:
FinnWilliamsArm624fe9f2019-12-06 17:12:42 +0000149 type = armnn::DataType::QSymmS8;
Mike Kelly0e2e31b2019-11-19 09:16:00 +0000150 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100151 case V1_2::OperandType::TENSOR_QUANT16_SYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000152 type = armnn::DataType::QSymmS16;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100153 break;
154 case V1_2::OperandType::TENSOR_INT32:
155 type = armnn::DataType::Signed32;
156 break;
157 default:
158 throw UnsupportedOperand<V1_2::OperandType>(operand.type);
159 }
160
Finn Williamsa4983ce2020-07-23 12:55:12 +0100161 TensorInfo ret;
162 if (operand.dimensions.size() == 0)
163 {
164 TensorShape tensorShape(Dimensionality::NotSpecified);
165 ret = TensorInfo(tensorShape, type);
166 }
167 else
168 {
169 bool dimensionsSpecificity[5] = { true, true, true, true, true };
170 int count = 0;
171 std::for_each(operand.dimensions.data(),
172 operand.dimensions.data() + operand.dimensions.size(),
173 [&](const unsigned int val)
174 {
175 if (val == 0)
176 {
177 dimensionsSpecificity[count] = false;
178 }
179 count++;
180 });
181
182 TensorShape tensorShape(operand.dimensions.size(), operand.dimensions.data(), dimensionsSpecificity);
183 ret = TensorInfo(tensorShape, type);
184 }
185
Derek Lambertid00ad912020-01-22 15:55:16 +0000186 if (perChannel)
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000187 {
188 // ExtraParams is expected to be of type channelQuant
Narumol Prangnawarat4d07e5e2020-04-06 16:46:21 +0100189 ARMNN_ASSERT(operand.extraParams.getDiscriminator() ==
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000190 V1_2::Operand::ExtraParams::hidl_discriminator::channelQuant);
Mike Kellyb5fdf382019-06-11 16:35:25 +0100191
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000192 auto perAxisQuantParams = operand.extraParams.channelQuant();
193
194 ret.SetQuantizationScales(perAxisQuantParams.scales);
195 ret.SetQuantizationDim(MakeOptional<unsigned int>(perAxisQuantParams.channelDim));
196 }
197 else
198 {
199 ret.SetQuantizationScale(operand.scale);
200 ret.SetQuantizationOffset(operand.zeroPoint);
201 }
Mike Kellyb5fdf382019-06-11 16:35:25 +0100202
203 return ret;
204}
205
206#endif
207
Kevin May42477c12020-03-26 13:34:14 +0000208#ifdef ARMNN_ANDROID_NN_V1_3 // Using ::android::hardware::neuralnetworks::V1_3
209
210armnn::TensorInfo GetTensorInfoForOperand(const V1_3::Operand& operand)
211{
212 using namespace armnn;
213 bool perChannel = false;
Teresa Charlin896572b2020-07-15 12:37:51 +0100214 bool isScalar = false;
Kevin May42477c12020-03-26 13:34:14 +0000215
216 DataType type;
217 switch (operand.type)
218 {
Sadik Armagan51ba2c62020-03-31 15:36:25 +0100219 case V1_3::OperandType::TENSOR_BOOL8:
220 type = armnn::DataType::Boolean;
221 break;
Kevin May42477c12020-03-26 13:34:14 +0000222 case V1_3::OperandType::TENSOR_FLOAT32:
223 type = armnn::DataType::Float32;
224 break;
225 case V1_3::OperandType::TENSOR_FLOAT16:
226 type = armnn::DataType::Float16;
227 break;
228 case V1_3::OperandType::TENSOR_QUANT8_ASYMM:
229 type = armnn::DataType::QAsymmU8;
230 break;
231 case V1_3::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL:
232 perChannel=true;
233 ARMNN_FALLTHROUGH;
234 case V1_3::OperandType::TENSOR_QUANT8_SYMM:
235 type = armnn::DataType::QSymmS8;
236 break;
237 case V1_3::OperandType::TENSOR_QUANT16_SYMM:
238 type = armnn::DataType::QSymmS16;
239 break;
240 case V1_3::OperandType::TENSOR_INT32:
241 type = armnn::DataType::Signed32;
242 break;
Finn Williamsfc884b42020-06-11 17:35:44 +0100243 case V1_3::OperandType::INT32:
244 type = armnn::DataType::Signed32;
Teresa Charlin896572b2020-07-15 12:37:51 +0100245 isScalar = true;
Finn Williamsfc884b42020-06-11 17:35:44 +0100246 break;
Kevin May42477c12020-03-26 13:34:14 +0000247 case V1_3::OperandType::TENSOR_QUANT8_ASYMM_SIGNED:
248 type = armnn::DataType::QAsymmS8;
249 break;
250 default:
251 throw UnsupportedOperand<V1_3::OperandType>(operand.type);
252 }
253
Finn Williamsfc884b42020-06-11 17:35:44 +0100254 TensorInfo ret;
Teresa Charlin896572b2020-07-15 12:37:51 +0100255 if (isScalar)
Finn Williamsfc884b42020-06-11 17:35:44 +0100256 {
Teresa Charlin896572b2020-07-15 12:37:51 +0100257 ret = TensorInfo(TensorShape(armnn::Dimensionality::Scalar), type);
Finn Williamsfc884b42020-06-11 17:35:44 +0100258 }
259 else
260 {
Finn Williamsa4983ce2020-07-23 12:55:12 +0100261 if (operand.dimensions.size() == 0)
262 {
263 TensorShape tensorShape(Dimensionality::NotSpecified);
264 ret = TensorInfo(tensorShape, type);
265 }
266 else
267 {
268 bool dimensionsSpecificity[5] = { true, true, true, true, true };
269 int count = 0;
270 std::for_each(operand.dimensions.data(),
271 operand.dimensions.data() + operand.dimensions.size(),
272 [&](const unsigned int val)
273 {
274 if (val == 0)
275 {
276 dimensionsSpecificity[count] = false;
277 }
278 count++;
279 });
280
281 TensorShape tensorShape(operand.dimensions.size(), operand.dimensions.data(), dimensionsSpecificity);
282 ret = TensorInfo(tensorShape, type);
283 }
Finn Williamsfc884b42020-06-11 17:35:44 +0100284 }
285
Kevin May42477c12020-03-26 13:34:14 +0000286 if (perChannel)
287 {
288 // ExtraParams is expected to be of type channelQuant
Narumol Prangnawarat4d07e5e2020-04-06 16:46:21 +0100289 ARMNN_ASSERT(operand.extraParams.getDiscriminator() ==
Kevin May352d8382020-03-31 15:03:42 +0100290 V1_2::Operand::ExtraParams::hidl_discriminator::channelQuant);
Kevin May42477c12020-03-26 13:34:14 +0000291
292 auto perAxisQuantParams = operand.extraParams.channelQuant();
293
294 ret.SetQuantizationScales(perAxisQuantParams.scales);
295 ret.SetQuantizationDim(MakeOptional<unsigned int>(perAxisQuantParams.channelDim));
296 }
297 else
298 {
299 ret.SetQuantizationScale(operand.scale);
300 ret.SetQuantizationOffset(operand.zeroPoint);
301 }
Kevin May42477c12020-03-26 13:34:14 +0000302 return ret;
303}
304
305#endif
306
Matthew Bentham912b3622019-05-03 15:49:14 +0100307std::string GetOperandSummary(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +0000308{
309 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
310 toString(operand.type);
311}
312
Kevin May42477c12020-03-26 13:34:14 +0000313#if defined(ARMNN_ANDROID_NN_V1_2) || defined(ARMNN_ANDROID_NN_V1_3) // Using ::android::hardware::neuralnetworks::V1_2
Mike Kellyb5fdf382019-06-11 16:35:25 +0100314
315std::string GetOperandSummary(const V1_2::Operand& operand)
316{
317 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
318 toString(operand.type);
319}
320
321#endif
322
Kevin May42477c12020-03-26 13:34:14 +0000323#ifdef ARMNN_ANDROID_NN_V1_3 // Using ::android::hardware::neuralnetworks::V1_3
324
325std::string GetOperandSummary(const V1_3::Operand& operand)
326{
327 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
328 toString(operand.type);
329}
330
331#endif
332
telsoa015307bc12018-03-09 13:51:08 +0000333using DumpElementFunction = void (*)(const armnn::ConstTensor& tensor,
334 unsigned int elementIndex,
335 std::ofstream& fileStream);
336
337namespace
338{
339template <typename ElementType, typename PrintableType = ElementType>
340void DumpTensorElement(const armnn::ConstTensor& tensor, unsigned int elementIndex, std::ofstream& fileStream)
341{
342 const ElementType* elements = reinterpret_cast<const ElementType*>(tensor.GetMemoryArea());
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000343 fileStream << static_cast<PrintableType>(elements[elementIndex]) << " ";
telsoa015307bc12018-03-09 13:51:08 +0000344}
345
telsoa015307bc12018-03-09 13:51:08 +0000346} // namespace
347
348void DumpTensor(const std::string& dumpDir,
349 const std::string& requestName,
350 const std::string& tensorName,
351 const armnn::ConstTensor& tensor)
352{
353 // The dump directory must exist in advance.
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100354 fs::path dumpPath = dumpDir;
355 const fs::path fileName = dumpPath / (requestName + "_" + tensorName + ".dump");
telsoa015307bc12018-03-09 13:51:08 +0000356
357 std::ofstream fileStream;
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100358 fileStream.open(fileName.c_str(), std::ofstream::out | std::ofstream::trunc);
telsoa015307bc12018-03-09 13:51:08 +0000359
360 if (!fileStream.good())
361 {
362 ALOGW("Could not open file %s for writing", fileName.c_str());
363 return;
364 }
365
366 DumpElementFunction dumpElementFunction = nullptr;
367
368 switch (tensor.GetDataType())
369 {
370 case armnn::DataType::Float32:
371 {
372 dumpElementFunction = &DumpTensorElement<float>;
373 break;
374 }
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000375 case armnn::DataType::QAsymmU8:
telsoa015307bc12018-03-09 13:51:08 +0000376 {
377 dumpElementFunction = &DumpTensorElement<uint8_t, uint32_t>;
378 break;
379 }
380 case armnn::DataType::Signed32:
381 {
382 dumpElementFunction = &DumpTensorElement<int32_t>;
383 break;
384 }
Jim Flynnf2e175c2019-12-12 15:11:30 +0000385 case armnn::DataType::Float16:
386 {
387 dumpElementFunction = &DumpTensorElement<armnn::Half>;
388 break;
389 }
Teresa Charlinb248ec12020-04-30 11:06:34 +0100390 case armnn::DataType::QAsymmS8:
391 {
392 dumpElementFunction = &DumpTensorElement<int8_t, int32_t>;
393 break;
394 }
395 case armnn::DataType::Boolean:
396 {
397 dumpElementFunction = &DumpTensorElement<bool>;
398 break;
399 }
telsoa015307bc12018-03-09 13:51:08 +0000400 default:
401 {
402 dumpElementFunction = nullptr;
403 }
404 }
405
406 if (dumpElementFunction != nullptr)
407 {
408 const unsigned int numDimensions = tensor.GetNumDimensions();
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000409 const armnn::TensorShape shape = tensor.GetShape();
telsoa015307bc12018-03-09 13:51:08 +0000410
411 fileStream << "# Number of elements " << tensor.GetNumElements() << std::endl;
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000412 fileStream << "# Shape [" << shape[0];
413 for (unsigned int d = 1; d < numDimensions; ++d)
telsoa015307bc12018-03-09 13:51:08 +0000414 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000415 fileStream << "," << shape[d];
telsoa015307bc12018-03-09 13:51:08 +0000416 }
417 fileStream << "]" << std::endl;
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000418 fileStream << "Each line contains the data of each of the elements of dimension0. In NCHW and NHWC, each line"
419 " will be a batch" << std::endl << std::endl;
telsoa015307bc12018-03-09 13:51:08 +0000420
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000421 // Split will create a new line after all elements of the first dimension
422 // (in a 4, 3, 2, 3 tensor, there will be 4 lines of 18 elements)
423 unsigned int split = 1;
424 if (numDimensions == 1)
telsoa015307bc12018-03-09 13:51:08 +0000425 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000426 split = shape[0];
427 }
428 else
429 {
430 for (unsigned int i = 1; i < numDimensions; ++i)
telsoa015307bc12018-03-09 13:51:08 +0000431 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000432 split *= shape[i];
telsoa015307bc12018-03-09 13:51:08 +0000433 }
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000434 }
435
436 // Print all elements in the tensor
437 for (unsigned int elementIndex = 0; elementIndex < tensor.GetNumElements(); ++elementIndex)
438 {
439 (*dumpElementFunction)(tensor, elementIndex, fileStream);
440
441 if ( (elementIndex + 1) % split == 0 )
telsoa015307bc12018-03-09 13:51:08 +0000442 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000443 fileStream << std::endl;
telsoa015307bc12018-03-09 13:51:08 +0000444 }
telsoa015307bc12018-03-09 13:51:08 +0000445 }
446 fileStream << std::endl;
447 }
448 else
449 {
450 fileStream << "Cannot dump tensor elements: Unsupported data type "
451 << static_cast<unsigned int>(tensor.GetDataType()) << std::endl;
452 }
453
454 if (!fileStream.good())
455 {
456 ALOGW("An error occurred when writing to file %s", fileName.c_str());
457 }
458}
459
telsoa01ce3e84a2018-08-31 09:31:35 +0100460void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled,
461 const std::string& dumpDir,
462 armnn::NetworkId networkId,
463 const armnn::IProfiler* profiler)
464{
465 // Check if profiling is required.
466 if (!gpuProfilingEnabled)
467 {
468 return;
469 }
470
471 // The dump directory must exist in advance.
472 if (dumpDir.empty())
473 {
474 return;
475 }
476
Narumol Prangnawarat4d07e5e2020-04-06 16:46:21 +0100477 ARMNN_ASSERT(profiler);
telsoa01ce3e84a2018-08-31 09:31:35 +0100478
479 // Set the name of the output profiling file.
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100480 fs::path dumpPath = dumpDir;
481 const fs::path fileName = dumpPath / (std::to_string(networkId) + "_profiling.json");
telsoa01ce3e84a2018-08-31 09:31:35 +0100482
483 // Open the ouput file for writing.
484 std::ofstream fileStream;
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100485 fileStream.open(fileName.c_str(), std::ofstream::out | std::ofstream::trunc);
telsoa01ce3e84a2018-08-31 09:31:35 +0100486
487 if (!fileStream.good())
488 {
489 ALOGW("Could not open file %s for writing", fileName.c_str());
490 return;
491 }
492
493 // Write the profiling info to a JSON file.
494 profiler->Print(fileStream);
495}
496
Jim Flynn829ad302019-12-13 14:43:24 +0000497std::string ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
498 const std::string& dumpDir)
499{
500 std::string fileName;
501 // The dump directory must exist in advance.
502 if (dumpDir.empty())
503 {
504 return fileName;
505 }
506
507 std::string timestamp = GetFileTimestamp();
508 if (timestamp.empty())
509 {
510 return fileName;
511 }
512
513 // Set the name of the output .dot file.
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100514 fs::path dumpPath = dumpDir;
515 fs::path tempFilePath = dumpPath / (timestamp + "_networkgraph.dot");
516 fileName = tempFilePath.string();
Jim Flynn829ad302019-12-13 14:43:24 +0000517
518 ALOGV("Exporting the optimized network graph to file: %s", fileName.c_str());
519
520 // Write the network graph to a dot file.
521 std::ofstream fileStream;
522 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
523
524 if (!fileStream.good())
525 {
526 ALOGW("Could not open file %s for writing", fileName.c_str());
527 return fileName;
528 }
529
530 if (optimizedNetwork.SerializeToDot(fileStream) != armnn::Status::Success)
531 {
532 ALOGW("An error occurred when writing to file %s", fileName.c_str());
533 }
534 return fileName;
535}
536
Sadik Armaganb3021432021-01-13 15:56:51 +0000537std::string SerializeNetwork(const armnn::INetwork& network, const std::string& dumpDir)
538{
539 std::string fileName;
540 // The dump directory must exist in advance.
541 if (dumpDir.empty())
542 {
543 return fileName;
544 }
545
546 std::string timestamp = GetFileTimestamp();
547 if (timestamp.empty())
548 {
549 return fileName;
550 }
551
552 auto serializer(armnnSerializer::ISerializer::Create());
553
554 // Serialize the Network
555 serializer->Serialize(network);
556
557 // Set the name of the output .armnn file.
558 fs::path dumpPath = dumpDir;
559 fs::path tempFilePath = dumpPath / (timestamp + "_network.armnn");
560 fileName = tempFilePath.string();
561
562 // Save serialized network to a file
563 std::ofstream serializedFile(fileName, std::ios::out | std::ios::binary);
564 bool serialized = serializer->SaveSerializedToStream(serializedFile);
565 if (!serialized)
566 {
567 ALOGW("An error occurred when serializing to file %s", fileName.c_str());
568 }
569 return fileName;
570}
571
Finn Williamsa4983ce2020-07-23 12:55:12 +0100572bool IsDynamicTensor(const armnn::TensorInfo& tensorInfo)
Aron Virginas-Tar573a8fa2019-07-23 14:01:37 +0100573{
Finn Williamsa4983ce2020-07-23 12:55:12 +0100574 if (tensorInfo.GetShape().GetDimensionality() == armnn::Dimensionality::NotSpecified)
575 {
576 return true;
577 }
Teresa Charlin4bd9a742020-08-12 12:58:50 +0100578 // Account for the usage of the TensorShape empty constructor
579 if (tensorInfo.GetNumDimensions() == 0)
580 {
581 return true;
582 }
Finn Williamsa4983ce2020-07-23 12:55:12 +0100583 return !tensorInfo.GetShape().AreAllDimensionsSpecified();
584}
585
586bool AreDynamicTensorsSupported()
587{
588#if defined(ARMNN_ANDROID_NN_V1_3)
589 return true;
590#else
591 return false;
592#endif
Aron Virginas-Tar573a8fa2019-07-23 14:01:37 +0100593}
594
Jim Flynn829ad302019-12-13 14:43:24 +0000595std::string GetFileTimestamp()
596{
597 // used to get a timestamp to name diagnostic files (the ArmNN serialized graph
598 // and getSupportedOperations.txt files)
599 timespec ts;
600 int iRet = clock_gettime(CLOCK_MONOTONIC_RAW, &ts);
601 std::stringstream ss;
602 if (iRet == 0)
603 {
604 ss << std::to_string(ts.tv_sec) << "_" << std::to_string(ts.tv_nsec);
605 }
606 else
607 {
608 ALOGW("clock_gettime failed with errno %s : %s", std::to_string(errno).c_str(), std::strerror(errno));
609 }
610 return ss.str();
611}
612
Sadik Armaganb3021432021-01-13 15:56:51 +0000613void RenameExportedFiles(const std::string& existingSerializedFileName,
614 const std::string& existingDotFileName,
615 const std::string& dumpDir,
616 const armnn::NetworkId networkId)
Jim Flynn829ad302019-12-13 14:43:24 +0000617{
618 if (dumpDir.empty())
619 {
620 return;
621 }
Sadik Armaganb3021432021-01-13 15:56:51 +0000622 RenameFile(existingSerializedFileName, std::string("_network.armnn"), dumpDir, networkId);
623 RenameFile(existingDotFileName, std::string("_networkgraph.dot"), dumpDir, networkId);
624}
625
626void RenameFile(const std::string& existingName,
627 const std::string& extension,
628 const std::string& dumpDir,
629 const armnn::NetworkId networkId)
630{
631 if (existingName.empty() || dumpDir.empty())
Jim Flynn829ad302019-12-13 14:43:24 +0000632 {
633 return;
634 }
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100635
Sadik Armaganb3021432021-01-13 15:56:51 +0000636 fs::path dumpPath = dumpDir;
637 const fs::path newFileName = dumpPath / (std::to_string(networkId) + extension);
638 int iRet = rename(existingName.c_str(), newFileName.c_str());
Jim Flynn829ad302019-12-13 14:43:24 +0000639 if (iRet != 0)
640 {
641 std::stringstream ss;
Sadik Armaganb3021432021-01-13 15:56:51 +0000642 ss << "rename of [" << existingName << "] to [" << newFileName << "] failed with errno "
643 << std::to_string(errno) << " : " << std::strerror(errno);
Jim Flynn829ad302019-12-13 14:43:24 +0000644 ALOGW(ss.str().c_str());
645 }
646}
647
Kevin May42477c12020-03-26 13:34:14 +0000648void CommitPools(std::vector<::android::nn::RunTimePoolInfo>& memPools)
649{
650 if (memPools.empty())
651 {
652 return;
653 }
654 // Commit output buffers.
655 // Note that we update *all* pools, even if they aren't actually used as outputs -
656 // this is simpler and is what the CpuExecutor does.
657 for (auto& pool : memPools)
658 {
659 // Type android::nn::RunTimePoolInfo has changed between Android P & Q and Android R, where
660 // update() has been removed and flush() added.
Sadik Armagan188675f2021-02-12 17:16:42 +0000661#if defined(ARMNN_ANDROID_R) || defined(ARMNN_ANDROID_S) // Use the new Android implementation.
Kevin May42477c12020-03-26 13:34:14 +0000662 pool.flush();
663#else
664 pool.update();
665#endif
666 }
667}
telsoa015307bc12018-03-09 13:51:08 +0000668} // namespace armnn_driver