blob: 58356ac110d8992183be01542e3dab7c42832720 [file] [log] [blame]
telsoa015307bc12018-03-09 13:51:08 +00001//
Mike Kellyde547162023-03-08 10:08:20 +00002// Copyright © 2017-2021,2023 Arm Ltd and Contributors. All rights reserved.
David Beck93e48982018-09-05 13:05:09 +01003// SPDX-License-Identifier: MIT
telsoa015307bc12018-03-09 13:51:08 +00004//
5
6#define LOG_TAG "ArmnnDriver"
7
8#include "Utils.hpp"
Jim Flynnf2e175c2019-12-12 15:11:30 +00009#include "Half.hpp"
telsoa015307bc12018-03-09 13:51:08 +000010
Sadik Armaganb3021432021-01-13 15:56:51 +000011#include <armnnSerializer/ISerializer.hpp>
Rob Hughes083be702021-07-19 15:29:47 +010012#include <armnnUtils/Filesystem.hpp>
Matteo Martincigh00d6ed12019-11-28 17:13:24 +000013#include <armnnUtils/Permute.hpp>
14
Derek Lambertid00ad912020-01-22 15:55:16 +000015#include <armnn/Utils.hpp>
Colm Donelan08d9a1c2020-09-09 17:56:55 +010016#include <log/log.h>
Derek Lambertid00ad912020-01-22 15:55:16 +000017
Jim Flynn829ad302019-12-13 14:43:24 +000018#include <cerrno>
telsoa015307bc12018-03-09 13:51:08 +000019#include <cinttypes>
Jim Flynn829ad302019-12-13 14:43:24 +000020#include <sstream>
21#include <cstdio>
22#include <time.h>
23
telsoa015307bc12018-03-09 13:51:08 +000024using namespace android;
telsoa01ce3e84a2018-08-31 09:31:35 +010025using namespace android::hardware;
telsoa015307bc12018-03-09 13:51:08 +000026using namespace android::hidl::memory::V1_0;
27
28namespace armnn_driver
29{
30const armnn::PermutationVector g_DontPermute{};
31
Jan Eilersa71c0632021-04-12 13:12:19 +010032void SwizzleAndroidNn4dTensorToArmNn(armnn::TensorInfo& tensorInfo, const void* input, void* output,
telsoa015307bc12018-03-09 13:51:08 +000033 const armnn::PermutationVector& mappings)
34{
Mike Kellye2d611e2021-10-14 12:35:58 +010035 if (tensorInfo.GetNumDimensions() != 4U)
36 {
37 throw armnn::InvalidArgumentException("NumDimensions must be 4");
38 }
Jan Eilersa71c0632021-04-12 13:12:19 +010039 armnn::DataType dataType = tensorInfo.GetDataType();
Matteo Martincighbf19d2a2019-11-29 11:46:50 +000040 switch (dataType)
telsoa015307bc12018-03-09 13:51:08 +000041 {
Mike Kelly3c673942019-07-25 09:26:06 +010042 case armnn::DataType::Float16:
telsoa015307bc12018-03-09 13:51:08 +000043 case armnn::DataType::Float32:
Derek Lamberti1a38cda2020-01-10 17:28:20 +000044 case armnn::DataType::QAsymmU8:
Mike Kellye2d611e2021-10-14 12:35:58 +010045 case armnn::DataType::QSymmS16:
Derek Lambertid00ad912020-01-22 15:55:16 +000046 case armnn::DataType::QSymmS8:
Sadik Armagan1153d1e2020-04-01 15:09:39 +010047 case armnn::DataType::QAsymmS8:
Jan Eilersa71c0632021-04-12 13:12:19 +010048 // First swizzle tensor info
49 tensorInfo = armnnUtils::Permuted(tensorInfo, mappings);
50 // Then swizzle tensor data
51 armnnUtils::Permute(tensorInfo.GetShape(), mappings, input, output, armnn::GetDataTypeSize(dataType));
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +000052 break;
telsoa015307bc12018-03-09 13:51:08 +000053 default:
Mike Kellye2d611e2021-10-14 12:35:58 +010054 throw armnn::InvalidArgumentException("Unknown DataType for swizzling");
telsoa015307bc12018-03-09 13:51:08 +000055 }
56}
57
Sadik Armagan188675f2021-02-12 17:16:42 +000058void* GetMemoryFromPool(V1_0::DataLocation location, const std::vector<android::nn::RunTimePoolInfo>& memPools)
telsoa015307bc12018-03-09 13:51:08 +000059{
60 // find the location within the pool
Mike Kellye2d611e2021-10-14 12:35:58 +010061 if (location.poolIndex >= memPools.size())
62 {
63 throw armnn::InvalidArgumentException("The poolIndex is greater than the memPools size.");
64 }
telsoa015307bc12018-03-09 13:51:08 +000065
surmeh01deb3bdb2018-07-05 12:06:04 +010066 const android::nn::RunTimePoolInfo& memPool = memPools[location.poolIndex];
67
surmeh01deb3bdb2018-07-05 12:06:04 +010068 uint8_t* memPoolBuffer = memPool.getBuffer();
surmeh01deb3bdb2018-07-05 12:06:04 +010069
70 uint8_t* memory = memPoolBuffer + location.offset;
telsoa015307bc12018-03-09 13:51:08 +000071
72 return memory;
73}
74
Matthew Bentham912b3622019-05-03 15:49:14 +010075armnn::TensorInfo GetTensorInfoForOperand(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +000076{
Finn Williamsa4983ce2020-07-23 12:55:12 +010077 using namespace armnn;
78 DataType type;
telsoa015307bc12018-03-09 13:51:08 +000079
80 switch (operand.type)
81 {
Matthew Bentham912b3622019-05-03 15:49:14 +010082 case V1_0::OperandType::TENSOR_FLOAT32:
telsoa015307bc12018-03-09 13:51:08 +000083 type = armnn::DataType::Float32;
84 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010085 case V1_0::OperandType::TENSOR_QUANT8_ASYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +000086 type = armnn::DataType::QAsymmU8;
telsoa015307bc12018-03-09 13:51:08 +000087 break;
Matthew Bentham912b3622019-05-03 15:49:14 +010088 case V1_0::OperandType::TENSOR_INT32:
telsoa015307bc12018-03-09 13:51:08 +000089 type = armnn::DataType::Signed32;
90 break;
91 default:
Mike Kellyb5fdf382019-06-11 16:35:25 +010092 throw UnsupportedOperand<V1_0::OperandType>(operand.type);
telsoa015307bc12018-03-09 13:51:08 +000093 }
94
Finn Williamsa4983ce2020-07-23 12:55:12 +010095 TensorInfo ret;
96 if (operand.dimensions.size() == 0)
97 {
98 TensorShape tensorShape(Dimensionality::NotSpecified);
99 ret = TensorInfo(tensorShape, type);
100 }
101 else
102 {
Mike Kelly60101b32023-07-31 12:43:02 +0100103 std::vector<unsigned char> dimensionsSpecificity(operand.dimensions.size(), true);
Finn Williamsa4983ce2020-07-23 12:55:12 +0100104
Mike Kelly60101b32023-07-31 12:43:02 +0100105 for (unsigned int i = 0; i < static_cast<unsigned int>(operand.dimensions.size()); ++i)
106 {
107 auto dim = operand.dimensions[i];
108 if (dim == 0)
109 {
110 dimensionsSpecificity[i] = false;
111 }
112 }
113 TensorShape tensorShape(operand.dimensions.size(),
114 operand.dimensions.data(),
115 reinterpret_cast<const bool *>(dimensionsSpecificity.data()));
Finn Williamsa4983ce2020-07-23 12:55:12 +0100116 ret = TensorInfo(tensorShape, type);
117 }
telsoa015307bc12018-03-09 13:51:08 +0000118
119 ret.SetQuantizationScale(operand.scale);
120 ret.SetQuantizationOffset(operand.zeroPoint);
121
122 return ret;
123}
124
Kevin May42477c12020-03-26 13:34:14 +0000125#if defined(ARMNN_ANDROID_NN_V1_2) || defined(ARMNN_ANDROID_NN_V1_3)// Using ::android::hardware::neuralnetworks::V1_2
Mike Kellyb5fdf382019-06-11 16:35:25 +0100126
127armnn::TensorInfo GetTensorInfoForOperand(const V1_2::Operand& operand)
128{
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000129 using namespace armnn;
Derek Lambertid00ad912020-01-22 15:55:16 +0000130 bool perChannel = false;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100131
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000132 DataType type;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100133 switch (operand.type)
134 {
Sadik Armagan793a70c2020-03-19 13:54:04 +0000135 case V1_2::OperandType::TENSOR_BOOL8:
136 type = armnn::DataType::Boolean;
137 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100138 case V1_2::OperandType::TENSOR_FLOAT32:
139 type = armnn::DataType::Float32;
140 break;
Mike Kelly3c673942019-07-25 09:26:06 +0100141 case V1_2::OperandType::TENSOR_FLOAT16:
142 type = armnn::DataType::Float16;
143 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100144 case V1_2::OperandType::TENSOR_QUANT8_ASYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000145 type = armnn::DataType::QAsymmU8;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100146 break;
Derek Lambertid00ad912020-01-22 15:55:16 +0000147 case V1_2::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL:
148 perChannel=true;
149 ARMNN_FALLTHROUGH;
Mike Kelly0e2e31b2019-11-19 09:16:00 +0000150 case V1_2::OperandType::TENSOR_QUANT8_SYMM:
FinnWilliamsArm624fe9f2019-12-06 17:12:42 +0000151 type = armnn::DataType::QSymmS8;
Mike Kelly0e2e31b2019-11-19 09:16:00 +0000152 break;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100153 case V1_2::OperandType::TENSOR_QUANT16_SYMM:
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000154 type = armnn::DataType::QSymmS16;
Mike Kellyb5fdf382019-06-11 16:35:25 +0100155 break;
156 case V1_2::OperandType::TENSOR_INT32:
157 type = armnn::DataType::Signed32;
158 break;
159 default:
160 throw UnsupportedOperand<V1_2::OperandType>(operand.type);
161 }
162
Finn Williamsa4983ce2020-07-23 12:55:12 +0100163 TensorInfo ret;
164 if (operand.dimensions.size() == 0)
165 {
166 TensorShape tensorShape(Dimensionality::NotSpecified);
167 ret = TensorInfo(tensorShape, type);
168 }
169 else
170 {
171 bool dimensionsSpecificity[5] = { true, true, true, true, true };
172 int count = 0;
173 std::for_each(operand.dimensions.data(),
174 operand.dimensions.data() + operand.dimensions.size(),
175 [&](const unsigned int val)
176 {
177 if (val == 0)
178 {
179 dimensionsSpecificity[count] = false;
180 }
181 count++;
182 });
183
184 TensorShape tensorShape(operand.dimensions.size(), operand.dimensions.data(), dimensionsSpecificity);
185 ret = TensorInfo(tensorShape, type);
186 }
187
Derek Lambertid00ad912020-01-22 15:55:16 +0000188 if (perChannel)
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000189 {
Mike Kellye2d611e2021-10-14 12:35:58 +0100190 if (operand.extraParams.getDiscriminator() != V1_2::Operand::ExtraParams::hidl_discriminator::channelQuant)
191 {
192 throw armnn::InvalidArgumentException("ExtraParams is expected to be of type channelQuant");
193 }
Mike Kellyb5fdf382019-06-11 16:35:25 +0100194
Aron Virginas-Tar9f0693b2019-11-06 14:32:30 +0000195 auto perAxisQuantParams = operand.extraParams.channelQuant();
196
197 ret.SetQuantizationScales(perAxisQuantParams.scales);
198 ret.SetQuantizationDim(MakeOptional<unsigned int>(perAxisQuantParams.channelDim));
199 }
200 else
201 {
202 ret.SetQuantizationScale(operand.scale);
203 ret.SetQuantizationOffset(operand.zeroPoint);
204 }
Mike Kellyb5fdf382019-06-11 16:35:25 +0100205
206 return ret;
207}
208
209#endif
210
Kevin May42477c12020-03-26 13:34:14 +0000211#ifdef ARMNN_ANDROID_NN_V1_3 // Using ::android::hardware::neuralnetworks::V1_3
212
213armnn::TensorInfo GetTensorInfoForOperand(const V1_3::Operand& operand)
214{
215 using namespace armnn;
216 bool perChannel = false;
Teresa Charlin896572b2020-07-15 12:37:51 +0100217 bool isScalar = false;
Kevin May42477c12020-03-26 13:34:14 +0000218
219 DataType type;
220 switch (operand.type)
221 {
Sadik Armagan51ba2c62020-03-31 15:36:25 +0100222 case V1_3::OperandType::TENSOR_BOOL8:
223 type = armnn::DataType::Boolean;
224 break;
Kevin May42477c12020-03-26 13:34:14 +0000225 case V1_3::OperandType::TENSOR_FLOAT32:
226 type = armnn::DataType::Float32;
227 break;
228 case V1_3::OperandType::TENSOR_FLOAT16:
229 type = armnn::DataType::Float16;
230 break;
231 case V1_3::OperandType::TENSOR_QUANT8_ASYMM:
232 type = armnn::DataType::QAsymmU8;
233 break;
234 case V1_3::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL:
235 perChannel=true;
236 ARMNN_FALLTHROUGH;
237 case V1_3::OperandType::TENSOR_QUANT8_SYMM:
238 type = armnn::DataType::QSymmS8;
239 break;
240 case V1_3::OperandType::TENSOR_QUANT16_SYMM:
241 type = armnn::DataType::QSymmS16;
242 break;
243 case V1_3::OperandType::TENSOR_INT32:
244 type = armnn::DataType::Signed32;
245 break;
Finn Williamsfc884b42020-06-11 17:35:44 +0100246 case V1_3::OperandType::INT32:
247 type = armnn::DataType::Signed32;
Teresa Charlin896572b2020-07-15 12:37:51 +0100248 isScalar = true;
Finn Williamsfc884b42020-06-11 17:35:44 +0100249 break;
Kevin May42477c12020-03-26 13:34:14 +0000250 case V1_3::OperandType::TENSOR_QUANT8_ASYMM_SIGNED:
251 type = armnn::DataType::QAsymmS8;
252 break;
253 default:
254 throw UnsupportedOperand<V1_3::OperandType>(operand.type);
255 }
256
Finn Williamsfc884b42020-06-11 17:35:44 +0100257 TensorInfo ret;
Teresa Charlin896572b2020-07-15 12:37:51 +0100258 if (isScalar)
Finn Williamsfc884b42020-06-11 17:35:44 +0100259 {
Teresa Charlin896572b2020-07-15 12:37:51 +0100260 ret = TensorInfo(TensorShape(armnn::Dimensionality::Scalar), type);
Finn Williamsfc884b42020-06-11 17:35:44 +0100261 }
262 else
263 {
Finn Williamsa4983ce2020-07-23 12:55:12 +0100264 if (operand.dimensions.size() == 0)
265 {
266 TensorShape tensorShape(Dimensionality::NotSpecified);
267 ret = TensorInfo(tensorShape, type);
268 }
269 else
270 {
271 bool dimensionsSpecificity[5] = { true, true, true, true, true };
272 int count = 0;
273 std::for_each(operand.dimensions.data(),
274 operand.dimensions.data() + operand.dimensions.size(),
275 [&](const unsigned int val)
276 {
277 if (val == 0)
278 {
279 dimensionsSpecificity[count] = false;
280 }
281 count++;
282 });
283
284 TensorShape tensorShape(operand.dimensions.size(), operand.dimensions.data(), dimensionsSpecificity);
285 ret = TensorInfo(tensorShape, type);
286 }
Finn Williamsfc884b42020-06-11 17:35:44 +0100287 }
288
Kevin May42477c12020-03-26 13:34:14 +0000289 if (perChannel)
290 {
291 // ExtraParams is expected to be of type channelQuant
Mike Kellye2d611e2021-10-14 12:35:58 +0100292 if (operand.extraParams.getDiscriminator() != V1_2::Operand::ExtraParams::hidl_discriminator::channelQuant)
293 {
294 throw armnn::InvalidArgumentException("ExtraParams is expected to be of type channelQuant");
295 }
Kevin May42477c12020-03-26 13:34:14 +0000296 auto perAxisQuantParams = operand.extraParams.channelQuant();
297
298 ret.SetQuantizationScales(perAxisQuantParams.scales);
299 ret.SetQuantizationDim(MakeOptional<unsigned int>(perAxisQuantParams.channelDim));
300 }
301 else
302 {
303 ret.SetQuantizationScale(operand.scale);
304 ret.SetQuantizationOffset(operand.zeroPoint);
305 }
Kevin May42477c12020-03-26 13:34:14 +0000306 return ret;
307}
308
309#endif
310
Matthew Bentham912b3622019-05-03 15:49:14 +0100311std::string GetOperandSummary(const V1_0::Operand& operand)
telsoa015307bc12018-03-09 13:51:08 +0000312{
313 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
314 toString(operand.type);
315}
316
Kevin May42477c12020-03-26 13:34:14 +0000317#if defined(ARMNN_ANDROID_NN_V1_2) || defined(ARMNN_ANDROID_NN_V1_3) // Using ::android::hardware::neuralnetworks::V1_2
Mike Kellyb5fdf382019-06-11 16:35:25 +0100318
319std::string GetOperandSummary(const V1_2::Operand& operand)
320{
321 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
322 toString(operand.type);
323}
324
325#endif
326
Kevin May42477c12020-03-26 13:34:14 +0000327#ifdef ARMNN_ANDROID_NN_V1_3 // Using ::android::hardware::neuralnetworks::V1_3
328
329std::string GetOperandSummary(const V1_3::Operand& operand)
330{
331 return android::hardware::details::arrayToString(operand.dimensions, operand.dimensions.size()) + " " +
332 toString(operand.type);
333}
334
335#endif
336
Mike Kelly37c3e502021-11-09 15:43:37 +0000337template <typename TensorType>
338using DumpElementFunction = void (*)(const TensorType& tensor,
telsoa015307bc12018-03-09 13:51:08 +0000339 unsigned int elementIndex,
340 std::ofstream& fileStream);
341
342namespace
343{
Mike Kelly37c3e502021-11-09 15:43:37 +0000344template <typename TensorType, typename ElementType, typename PrintableType = ElementType>
345void DumpTensorElement(const TensorType& tensor, unsigned int elementIndex, std::ofstream& fileStream)
telsoa015307bc12018-03-09 13:51:08 +0000346{
347 const ElementType* elements = reinterpret_cast<const ElementType*>(tensor.GetMemoryArea());
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000348 fileStream << static_cast<PrintableType>(elements[elementIndex]) << " ";
telsoa015307bc12018-03-09 13:51:08 +0000349}
350
telsoa015307bc12018-03-09 13:51:08 +0000351} // namespace
352
Mike Kelly37c3e502021-11-09 15:43:37 +0000353template <typename TensorType>
telsoa015307bc12018-03-09 13:51:08 +0000354void DumpTensor(const std::string& dumpDir,
355 const std::string& requestName,
356 const std::string& tensorName,
Mike Kelly37c3e502021-11-09 15:43:37 +0000357 const TensorType& tensor)
telsoa015307bc12018-03-09 13:51:08 +0000358{
359 // The dump directory must exist in advance.
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100360 fs::path dumpPath = dumpDir;
361 const fs::path fileName = dumpPath / (requestName + "_" + tensorName + ".dump");
telsoa015307bc12018-03-09 13:51:08 +0000362
363 std::ofstream fileStream;
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100364 fileStream.open(fileName.c_str(), std::ofstream::out | std::ofstream::trunc);
telsoa015307bc12018-03-09 13:51:08 +0000365
366 if (!fileStream.good())
367 {
368 ALOGW("Could not open file %s for writing", fileName.c_str());
369 return;
370 }
371
Mike Kelly37c3e502021-11-09 15:43:37 +0000372 DumpElementFunction<TensorType> dumpElementFunction = nullptr;
telsoa015307bc12018-03-09 13:51:08 +0000373
374 switch (tensor.GetDataType())
375 {
376 case armnn::DataType::Float32:
377 {
Mike Kelly37c3e502021-11-09 15:43:37 +0000378 dumpElementFunction = &DumpTensorElement<TensorType, float>;
telsoa015307bc12018-03-09 13:51:08 +0000379 break;
380 }
Derek Lamberti1a38cda2020-01-10 17:28:20 +0000381 case armnn::DataType::QAsymmU8:
telsoa015307bc12018-03-09 13:51:08 +0000382 {
Mike Kelly37c3e502021-11-09 15:43:37 +0000383 dumpElementFunction = &DumpTensorElement<TensorType, uint8_t, uint32_t>;
telsoa015307bc12018-03-09 13:51:08 +0000384 break;
385 }
386 case armnn::DataType::Signed32:
387 {
Mike Kelly37c3e502021-11-09 15:43:37 +0000388 dumpElementFunction = &DumpTensorElement<TensorType, int32_t>;
telsoa015307bc12018-03-09 13:51:08 +0000389 break;
390 }
Jim Flynnf2e175c2019-12-12 15:11:30 +0000391 case armnn::DataType::Float16:
392 {
Mike Kelly37c3e502021-11-09 15:43:37 +0000393 dumpElementFunction = &DumpTensorElement<TensorType, armnn::Half>;
Jim Flynnf2e175c2019-12-12 15:11:30 +0000394 break;
395 }
Teresa Charlinb248ec12020-04-30 11:06:34 +0100396 case armnn::DataType::QAsymmS8:
397 {
Mike Kelly37c3e502021-11-09 15:43:37 +0000398 dumpElementFunction = &DumpTensorElement<TensorType, int8_t, int32_t>;
Teresa Charlinb248ec12020-04-30 11:06:34 +0100399 break;
400 }
401 case armnn::DataType::Boolean:
402 {
Mike Kelly37c3e502021-11-09 15:43:37 +0000403 dumpElementFunction = &DumpTensorElement<TensorType, bool>;
Teresa Charlinb248ec12020-04-30 11:06:34 +0100404 break;
405 }
telsoa015307bc12018-03-09 13:51:08 +0000406 default:
407 {
408 dumpElementFunction = nullptr;
409 }
410 }
411
412 if (dumpElementFunction != nullptr)
413 {
414 const unsigned int numDimensions = tensor.GetNumDimensions();
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000415 const armnn::TensorShape shape = tensor.GetShape();
telsoa015307bc12018-03-09 13:51:08 +0000416
Mike Kelly7780e602021-04-26 21:54:55 +0100417 if (!shape.AreAllDimensionsSpecified())
418 {
419 fileStream << "Cannot dump tensor elements: not all dimensions are specified" << std::endl;
420 return;
421 }
telsoa015307bc12018-03-09 13:51:08 +0000422 fileStream << "# Number of elements " << tensor.GetNumElements() << std::endl;
Mike Kelly7780e602021-04-26 21:54:55 +0100423
424 if (numDimensions == 0)
425 {
426 fileStream << "# Shape []" << std::endl;
427 return;
428 }
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000429 fileStream << "# Shape [" << shape[0];
430 for (unsigned int d = 1; d < numDimensions; ++d)
telsoa015307bc12018-03-09 13:51:08 +0000431 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000432 fileStream << "," << shape[d];
telsoa015307bc12018-03-09 13:51:08 +0000433 }
434 fileStream << "]" << std::endl;
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000435 fileStream << "Each line contains the data of each of the elements of dimension0. In NCHW and NHWC, each line"
436 " will be a batch" << std::endl << std::endl;
telsoa015307bc12018-03-09 13:51:08 +0000437
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000438 // Split will create a new line after all elements of the first dimension
439 // (in a 4, 3, 2, 3 tensor, there will be 4 lines of 18 elements)
440 unsigned int split = 1;
441 if (numDimensions == 1)
telsoa015307bc12018-03-09 13:51:08 +0000442 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000443 split = shape[0];
444 }
445 else
446 {
447 for (unsigned int i = 1; i < numDimensions; ++i)
telsoa015307bc12018-03-09 13:51:08 +0000448 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000449 split *= shape[i];
telsoa015307bc12018-03-09 13:51:08 +0000450 }
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000451 }
452
453 // Print all elements in the tensor
454 for (unsigned int elementIndex = 0; elementIndex < tensor.GetNumElements(); ++elementIndex)
455 {
456 (*dumpElementFunction)(tensor, elementIndex, fileStream);
457
458 if ( (elementIndex + 1) % split == 0 )
telsoa015307bc12018-03-09 13:51:08 +0000459 {
Teresa Charlin32fe97e2021-03-08 19:28:24 +0000460 fileStream << std::endl;
telsoa015307bc12018-03-09 13:51:08 +0000461 }
telsoa015307bc12018-03-09 13:51:08 +0000462 }
463 fileStream << std::endl;
464 }
465 else
466 {
467 fileStream << "Cannot dump tensor elements: Unsupported data type "
468 << static_cast<unsigned int>(tensor.GetDataType()) << std::endl;
469 }
470
471 if (!fileStream.good())
472 {
473 ALOGW("An error occurred when writing to file %s", fileName.c_str());
474 }
475}
476
Mike Kelly37c3e502021-11-09 15:43:37 +0000477
478template void DumpTensor<armnn::ConstTensor>(const std::string& dumpDir,
479 const std::string& requestName,
480 const std::string& tensorName,
481 const armnn::ConstTensor& tensor);
482
483template void DumpTensor<armnn::Tensor>(const std::string& dumpDir,
484 const std::string& requestName,
485 const std::string& tensorName,
486 const armnn::Tensor& tensor);
487
telsoa01ce3e84a2018-08-31 09:31:35 +0100488void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled,
489 const std::string& dumpDir,
490 armnn::NetworkId networkId,
491 const armnn::IProfiler* profiler)
492{
493 // Check if profiling is required.
494 if (!gpuProfilingEnabled)
495 {
496 return;
497 }
498
499 // The dump directory must exist in advance.
500 if (dumpDir.empty())
501 {
502 return;
503 }
504
Mike Kellye2d611e2021-10-14 12:35:58 +0100505 if (!profiler)
506 {
507 ALOGW("profiler was null");
508 return;
509 }
telsoa01ce3e84a2018-08-31 09:31:35 +0100510
511 // Set the name of the output profiling file.
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100512 fs::path dumpPath = dumpDir;
513 const fs::path fileName = dumpPath / (std::to_string(networkId) + "_profiling.json");
telsoa01ce3e84a2018-08-31 09:31:35 +0100514
515 // Open the ouput file for writing.
516 std::ofstream fileStream;
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100517 fileStream.open(fileName.c_str(), std::ofstream::out | std::ofstream::trunc);
telsoa01ce3e84a2018-08-31 09:31:35 +0100518
519 if (!fileStream.good())
520 {
521 ALOGW("Could not open file %s for writing", fileName.c_str());
522 return;
523 }
524
525 // Write the profiling info to a JSON file.
526 profiler->Print(fileStream);
527}
528
Jim Flynn829ad302019-12-13 14:43:24 +0000529std::string ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
530 const std::string& dumpDir)
531{
532 std::string fileName;
533 // The dump directory must exist in advance.
534 if (dumpDir.empty())
535 {
536 return fileName;
537 }
538
539 std::string timestamp = GetFileTimestamp();
540 if (timestamp.empty())
541 {
542 return fileName;
543 }
544
545 // Set the name of the output .dot file.
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100546 fs::path dumpPath = dumpDir;
547 fs::path tempFilePath = dumpPath / (timestamp + "_networkgraph.dot");
548 fileName = tempFilePath.string();
Jim Flynn829ad302019-12-13 14:43:24 +0000549
550 ALOGV("Exporting the optimized network graph to file: %s", fileName.c_str());
551
552 // Write the network graph to a dot file.
553 std::ofstream fileStream;
554 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
555
556 if (!fileStream.good())
557 {
558 ALOGW("Could not open file %s for writing", fileName.c_str());
559 return fileName;
560 }
561
562 if (optimizedNetwork.SerializeToDot(fileStream) != armnn::Status::Success)
563 {
564 ALOGW("An error occurred when writing to file %s", fileName.c_str());
565 }
566 return fileName;
567}
568
Sadik Armagan0a2dfab2021-10-06 16:41:44 +0100569std::string SerializeNetwork(const armnn::INetwork& network,
570 const std::string& dumpDir,
571 std::vector<uint8_t>& dataCacheData,
572 bool dataCachingActive)
Sadik Armaganb3021432021-01-13 15:56:51 +0000573{
574 std::string fileName;
Sadik Armagan0a2dfab2021-10-06 16:41:44 +0100575 bool bSerializeToFile = true;
Sadik Armaganb3021432021-01-13 15:56:51 +0000576 if (dumpDir.empty())
577 {
Sadik Armagan0a2dfab2021-10-06 16:41:44 +0100578 bSerializeToFile = false;
Sadik Armaganb3021432021-01-13 15:56:51 +0000579 }
Sadik Armagan0a2dfab2021-10-06 16:41:44 +0100580 else
581 {
582 std::string timestamp = GetFileTimestamp();
583 if (timestamp.empty())
584 {
585 bSerializeToFile = false;
586 }
587 }
588 if (!bSerializeToFile && !dataCachingActive)
Sadik Armaganb3021432021-01-13 15:56:51 +0000589 {
590 return fileName;
591 }
592
593 auto serializer(armnnSerializer::ISerializer::Create());
Sadik Armaganb3021432021-01-13 15:56:51 +0000594 // Serialize the Network
595 serializer->Serialize(network);
Sadik Armagan0a2dfab2021-10-06 16:41:44 +0100596 if (dataCachingActive)
Sadik Armaganb3021432021-01-13 15:56:51 +0000597 {
Sadik Armagan0a2dfab2021-10-06 16:41:44 +0100598 std::stringstream stream;
599 auto serialized = serializer->SaveSerializedToStream(stream);
600 if (serialized)
601 {
602 std::string const serializedString{stream.str()};
603 std::copy(serializedString.begin(), serializedString.end(), std::back_inserter(dataCacheData));
604 }
605 }
606
607 if (bSerializeToFile)
608 {
609 // Set the name of the output .armnn file.
610 fs::path dumpPath = dumpDir;
611 std::string timestamp = GetFileTimestamp();
612 fs::path tempFilePath = dumpPath / (timestamp + "_network.armnn");
613 fileName = tempFilePath.string();
614
615 // Save serialized network to a file
616 std::ofstream serializedFile(fileName, std::ios::out | std::ios::binary);
617 auto serialized = serializer->SaveSerializedToStream(serializedFile);
618 if (!serialized)
619 {
620 ALOGW("An error occurred when serializing to file %s", fileName.c_str());
621 }
Sadik Armaganb3021432021-01-13 15:56:51 +0000622 }
623 return fileName;
624}
625
Finn Williamsa4983ce2020-07-23 12:55:12 +0100626bool IsDynamicTensor(const armnn::TensorInfo& tensorInfo)
Aron Virginas-Tar573a8fa2019-07-23 14:01:37 +0100627{
Finn Williamsa4983ce2020-07-23 12:55:12 +0100628 if (tensorInfo.GetShape().GetDimensionality() == armnn::Dimensionality::NotSpecified)
629 {
630 return true;
631 }
Teresa Charlin4bd9a742020-08-12 12:58:50 +0100632 // Account for the usage of the TensorShape empty constructor
633 if (tensorInfo.GetNumDimensions() == 0)
634 {
635 return true;
636 }
Finn Williamsa4983ce2020-07-23 12:55:12 +0100637 return !tensorInfo.GetShape().AreAllDimensionsSpecified();
638}
639
640bool AreDynamicTensorsSupported()
641{
642#if defined(ARMNN_ANDROID_NN_V1_3)
643 return true;
644#else
645 return false;
646#endif
Aron Virginas-Tar573a8fa2019-07-23 14:01:37 +0100647}
648
Teresa Charlind3381d52021-06-02 18:35:16 +0100649bool isQuantizedOperand(const V1_0::OperandType& operandType)
650{
651 if (operandType == V1_0::OperandType::TENSOR_QUANT8_ASYMM)
652 {
653 return true;
654 }
655 else
656 {
657 return false;
658 }
659}
660
661#if defined(ARMNN_ANDROID_NN_V1_2) || defined(ARMNN_ANDROID_NN_V1_3)// Using ::android::hardware::neuralnetworks::V1_2
662bool isQuantizedOperand(const V1_2::OperandType& operandType)
663{
664 if (operandType == V1_2::OperandType::TENSOR_QUANT8_ASYMM ||
665 operandType == V1_2::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL ||
666 operandType == V1_2::OperandType::TENSOR_QUANT8_SYMM ||
667 operandType == V1_2::OperandType::TENSOR_QUANT16_SYMM )
668 {
669 return true;
670 }
671 else
672 {
673 return false;
674 }
675}
676#endif
677
678#ifdef ARMNN_ANDROID_NN_V1_3 // Using ::android::hardware::neuralnetworks::V1_3
679bool isQuantizedOperand(const V1_3::OperandType& operandType)
680{
681 if (operandType == V1_3::OperandType::TENSOR_QUANT8_ASYMM ||
682 operandType == V1_3::OperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL ||
683 operandType == V1_3::OperandType::TENSOR_QUANT8_SYMM ||
684 operandType == V1_3::OperandType::TENSOR_QUANT16_SYMM ||
685 operandType == V1_3::OperandType::TENSOR_QUANT8_ASYMM_SIGNED)
686 {
687 return true;
688 }
689 else
690 {
691 return false;
692 }
693}
694#endif
695
Jim Flynn829ad302019-12-13 14:43:24 +0000696std::string GetFileTimestamp()
697{
698 // used to get a timestamp to name diagnostic files (the ArmNN serialized graph
699 // and getSupportedOperations.txt files)
700 timespec ts;
701 int iRet = clock_gettime(CLOCK_MONOTONIC_RAW, &ts);
702 std::stringstream ss;
703 if (iRet == 0)
704 {
705 ss << std::to_string(ts.tv_sec) << "_" << std::to_string(ts.tv_nsec);
706 }
707 else
708 {
709 ALOGW("clock_gettime failed with errno %s : %s", std::to_string(errno).c_str(), std::strerror(errno));
710 }
711 return ss.str();
712}
713
Sadik Armaganb3021432021-01-13 15:56:51 +0000714void RenameExportedFiles(const std::string& existingSerializedFileName,
715 const std::string& existingDotFileName,
716 const std::string& dumpDir,
717 const armnn::NetworkId networkId)
Jim Flynn829ad302019-12-13 14:43:24 +0000718{
719 if (dumpDir.empty())
720 {
721 return;
722 }
Sadik Armaganb3021432021-01-13 15:56:51 +0000723 RenameFile(existingSerializedFileName, std::string("_network.armnn"), dumpDir, networkId);
724 RenameFile(existingDotFileName, std::string("_networkgraph.dot"), dumpDir, networkId);
725}
726
727void RenameFile(const std::string& existingName,
728 const std::string& extension,
729 const std::string& dumpDir,
730 const armnn::NetworkId networkId)
731{
732 if (existingName.empty() || dumpDir.empty())
Jim Flynn829ad302019-12-13 14:43:24 +0000733 {
734 return;
735 }
Colm Donelan08d9a1c2020-09-09 17:56:55 +0100736
Sadik Armaganb3021432021-01-13 15:56:51 +0000737 fs::path dumpPath = dumpDir;
738 const fs::path newFileName = dumpPath / (std::to_string(networkId) + extension);
739 int iRet = rename(existingName.c_str(), newFileName.c_str());
Jim Flynn829ad302019-12-13 14:43:24 +0000740 if (iRet != 0)
741 {
742 std::stringstream ss;
Sadik Armaganb3021432021-01-13 15:56:51 +0000743 ss << "rename of [" << existingName << "] to [" << newFileName << "] failed with errno "
744 << std::to_string(errno) << " : " << std::strerror(errno);
Jim Flynn829ad302019-12-13 14:43:24 +0000745 ALOGW(ss.str().c_str());
746 }
747}
748
Kevin May42477c12020-03-26 13:34:14 +0000749void CommitPools(std::vector<::android::nn::RunTimePoolInfo>& memPools)
750{
751 if (memPools.empty())
752 {
753 return;
754 }
755 // Commit output buffers.
756 // Note that we update *all* pools, even if they aren't actually used as outputs -
757 // this is simpler and is what the CpuExecutor does.
758 for (auto& pool : memPools)
759 {
760 // Type android::nn::RunTimePoolInfo has changed between Android P & Q and Android R, where
761 // update() has been removed and flush() added.
Sadik Armagan188675f2021-02-12 17:16:42 +0000762#if defined(ARMNN_ANDROID_R) || defined(ARMNN_ANDROID_S) // Use the new Android implementation.
Kevin May42477c12020-03-26 13:34:14 +0000763 pool.flush();
764#else
765 pool.update();
766#endif
767 }
768}
Mike Kellyde547162023-03-08 10:08:20 +0000769
770size_t GetSize(const V1_0::Request& request, const V1_0::RequestArgument& requestArgument)
771{
772 return request.pools[requestArgument.location.poolIndex].size();
773}
774
775#ifdef ARMNN_ANDROID_NN_V1_3
776size_t GetSize(const V1_3::Request& request, const V1_0::RequestArgument& requestArgument)
777{
778 if (request.pools[requestArgument.location.poolIndex].getDiscriminator() ==
779 V1_3::Request::MemoryPool::hidl_discriminator::hidlMemory)
780 {
781 return request.pools[requestArgument.location.poolIndex].hidlMemory().size();
782 }
783 else
784 {
785 return 0;
786 }
787}
788#endif
789
790template <typename ErrorStatus, typename Request>
791ErrorStatus ValidateRequestArgument(const Request& request,
792 const armnn::TensorInfo& tensorInfo,
793 const V1_0::RequestArgument& requestArgument,
794 std::string descString)
795{
796 if (requestArgument.location.poolIndex >= request.pools.size())
797 {
798 std::string err = fmt::format("Invalid {} pool at index {} the pool index is greater than the number "
799 "of available pools {}",
800 descString, requestArgument.location.poolIndex, request.pools.size());
801 ALOGE(err.c_str());
802 return ErrorStatus::GENERAL_FAILURE;
803 }
804 const size_t size = GetSize(request, requestArgument);
805 size_t totalLength = tensorInfo.GetNumBytes();
806
807 if (static_cast<size_t>(requestArgument.location.offset) + totalLength > size)
808 {
809 std::string err = fmt::format("Invalid {} pool at index {} the offset {} and length {} are greater "
810 "than the pool size {}", descString, requestArgument.location.poolIndex,
811 requestArgument.location.offset, totalLength, size);
812 ALOGE(err.c_str());
813 return ErrorStatus::GENERAL_FAILURE;
814 }
815 return ErrorStatus::NONE;
816}
817
818template V1_0::ErrorStatus ValidateRequestArgument<V1_0::ErrorStatus, V1_0::Request>(
819 const V1_0::Request& request,
820 const armnn::TensorInfo& tensorInfo,
821 const V1_0::RequestArgument& requestArgument,
822 std::string descString);
823
824#ifdef ARMNN_ANDROID_NN_V1_3
825template V1_3::ErrorStatus ValidateRequestArgument<V1_3::ErrorStatus, V1_3::Request>(
826 const V1_3::Request& request,
827 const armnn::TensorInfo& tensorInfo,
828 const V1_0::RequestArgument& requestArgument,
829 std::string descString);
830#endif
831
telsoa015307bc12018-03-09 13:51:08 +0000832} // namespace armnn_driver