blob: e55d8f80ff57573eea390ce795bead791284fc4a [file] [log] [blame]
telsoa015307bc12018-03-09 13:51:08 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beck93e48982018-09-05 13:05:09 +01003// SPDX-License-Identifier: MIT
telsoa015307bc12018-03-09 13:51:08 +00004//
5
6#pragma once
7
telsoa015307bc12018-03-09 13:51:08 +00008#include <armnn/ArmNN.hpp>
arovir01b0717b52018-09-05 17:03:25 +01009
telsoa015307bc12018-03-09 13:51:08 +000010#include <CpuExecutor.h>
arovir01b0717b52018-09-05 17:03:25 +010011#include <HalInterfaces.h>
12#include <NeuralNetworks.h>
telsoa015307bc12018-03-09 13:51:08 +000013
Matteo Martincighe48bdff2018-09-03 13:50:50 +010014#include <boost/format.hpp>
15#include <log/log.h>
16
telsoa015307bc12018-03-09 13:51:08 +000017#include <vector>
18#include <string>
Matteo Martincighe48bdff2018-09-03 13:50:50 +010019#include <fstream>
20#include <iomanip>
telsoa015307bc12018-03-09 13:51:08 +000021
Matthew Bentham912b3622019-05-03 15:49:14 +010022namespace V1_0 = ::android::hardware::neuralnetworks::V1_0;
23
telsoa015307bc12018-03-09 13:51:08 +000024namespace armnn_driver
25{
26
27extern const armnn::PermutationVector g_DontPermute;
28
29class UnsupportedOperand: public std::runtime_error
30{
31public:
32 UnsupportedOperand(const OperandType type)
33 : std::runtime_error("Operand type is unsupported")
34 , m_type(type)
35 {}
36
37 OperandType m_type;
38};
39
40/// Swizzles tensor data in @a input according to the dimension mappings.
41void SwizzleAndroidNn4dTensorToArmNn(const armnn::TensorInfo& tensor, const void* input, void* output,
42 const armnn::PermutationVector& mappings);
43
44/// Returns a pointer to a specific location in a pool
45void* GetMemoryFromPool(DataLocation location,
46 const std::vector<android::nn::RunTimePoolInfo>& memPools);
47
48/// Can throw UnsupportedOperand
Matthew Bentham912b3622019-05-03 15:49:14 +010049armnn::TensorInfo GetTensorInfoForOperand(const V1_0::Operand& operand);
telsoa015307bc12018-03-09 13:51:08 +000050
Matthew Bentham912b3622019-05-03 15:49:14 +010051std::string GetOperandSummary(const V1_0::Operand& operand);
kevmay01bc5f7842018-08-30 12:34:39 +010052
Matteo Martincighe48bdff2018-09-03 13:50:50 +010053template <typename HalModel>
54std::string GetModelSummary(const HalModel& model)
kevmay01bc5f7842018-08-30 12:34:39 +010055{
56 std::stringstream result;
57
58 result << model.inputIndexes.size() << " input(s), " << model.operations.size() << " operation(s), " <<
59 model.outputIndexes.size() << " output(s), " << model.operands.size() << " operand(s)" << std::endl;
60
61 result << "Inputs: ";
62 for (uint32_t i = 0; i < model.inputIndexes.size(); i++)
63 {
64 result << GetOperandSummary(model.operands[model.inputIndexes[i]]) << ", ";
65 }
66 result << std::endl;
67
68 result << "Operations: ";
69 for (uint32_t i = 0; i < model.operations.size(); i++)
70 {
71 result << toString(model.operations[i].type).c_str() << ", ";
72 }
73 result << std::endl;
74
75 result << "Outputs: ";
76 for (uint32_t i = 0; i < model.outputIndexes.size(); i++)
77 {
78 result << GetOperandSummary(model.operands[model.outputIndexes[i]]) << ", ";
79 }
80 result << std::endl;
81
82 return result.str();
83}
telsoa015307bc12018-03-09 13:51:08 +000084
85void DumpTensor(const std::string& dumpDir,
telsoa01ce3e84a2018-08-31 09:31:35 +010086 const std::string& requestName,
87 const std::string& tensorName,
88 const armnn::ConstTensor& tensor);
89
90void DumpJsonProfilingIfRequired(bool gpuProfilingEnabled,
91 const std::string& dumpDir,
92 armnn::NetworkId networkId,
93 const armnn::IProfiler* profiler);
telsoa015307bc12018-03-09 13:51:08 +000094
Matteo Martincighe48bdff2018-09-03 13:50:50 +010095template <typename HalModel>
surmeh0176660052018-03-29 16:33:54 +010096void ExportNetworkGraphToDotFile(const armnn::IOptimizedNetwork& optimizedNetwork,
97 const std::string& dumpDir,
Matteo Martincighe48bdff2018-09-03 13:50:50 +010098 const HalModel& model)
99{
100 // The dump directory must exist in advance.
101 if (dumpDir.empty())
102 {
103 return;
104 }
telsoa01ce3e84a2018-08-31 09:31:35 +0100105
Matteo Martincighe48bdff2018-09-03 13:50:50 +0100106 // Get the memory address of the model and convert it to a hex string (of at least a '0' character).
107 size_t modelAddress = uintptr_t(&model);
108 std::stringstream ss;
109 ss << std::uppercase << std::hex << std::setfill('0') << std::setw(1) << modelAddress;
110 std::string modelAddressHexString = ss.str();
111
112 // Set the name of the output .dot file.
113 const std::string fileName = boost::str(boost::format("%1%/networkgraph_%2%.dot")
114 % dumpDir
115 % modelAddressHexString);
116
117 ALOGV("Exporting the optimized network graph to file: %s", fileName.c_str());
118
119 // Write the network graph to a dot file.
120 std::ofstream fileStream;
121 fileStream.open(fileName, std::ofstream::out | std::ofstream::trunc);
122
123 if (!fileStream.good())
124 {
125 ALOGW("Could not open file %s for writing", fileName.c_str());
126 return;
127 }
128
129 if (optimizedNetwork.SerializeToDot(fileStream) != armnn::Status::Success)
130 {
131 ALOGW("An error occurred when writing to file %s", fileName.c_str());
132 }
133}
134
Matthew Bentham912b3622019-05-03 15:49:14 +0100135} // namespace armnn_driver