blob: 842624641448e0ca90628a464ded54b2d403f27d [file] [log] [blame]
Aron Virginas-Tarc975f922019-10-23 17:38:17 +01001//
Finn Williamsb49ed182021-06-29 15:50:08 +01002// Copyright © 2019 Arm Ltd and Contributors. All rights reserved.
Aron Virginas-Tarc975f922019-10-23 17:38:17 +01003// SPDX-License-Identifier: MIT
4//
5
6#include "ParserFlatbuffersFixture.hpp"
Aron Virginas-Tarc975f922019-10-23 17:38:17 +01007
Jan Eilers1b2654f2021-09-24 15:45:46 +01008#include <armnn/StrategyBase.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01009#include <armnn/utility/Assert.hpp>
Matthew Sloyan589e3e82020-09-11 16:17:48 +010010#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010011#include <armnn/utility/PolymorphicDowncast.hpp>
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010012
13#include <layers/StandInLayer.hpp>
14
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010015#include <sstream>
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010016#include <vector>
17
Sadik Armagan1625efc2021-06-10 18:24:34 +010018TEST_SUITE("TensorflowLiteParser_Unsupported")
19{
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010020using namespace armnn;
21
Jan Eilers1b2654f2021-09-24 15:45:46 +010022class StandInLayerVerifier : public StrategyBase<NoThrowStrategy>
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010023{
24public:
25 StandInLayerVerifier(const std::vector<TensorInfo>& inputInfos,
26 const std::vector<TensorInfo>& outputInfos)
Jan Eilers1b2654f2021-09-24 15:45:46 +010027 : m_InputInfos(inputInfos)
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010028 , m_OutputInfos(outputInfos) {}
29
Jan Eilers1b2654f2021-09-24 15:45:46 +010030 void ExecuteStrategy(const armnn::IConnectableLayer* layer,
31 const armnn::BaseDescriptor& descriptor,
32 const std::vector<armnn::ConstTensor>& constants,
33 const char* name,
34 const armnn::LayerBindingId id = 0) override
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010035 {
Jan Eilers1b2654f2021-09-24 15:45:46 +010036 armnn::IgnoreUnused(descriptor, constants, id);
37 switch (layer->GetType())
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010038 {
Jan Eilers1b2654f2021-09-24 15:45:46 +010039 case armnn::LayerType::StandIn:
40 {
41 auto standInDescriptor = static_cast<const armnn::StandInDescriptor&>(descriptor);
42 unsigned int numInputs = armnn::numeric_cast<unsigned int>(m_InputInfos.size());
43 CHECK(standInDescriptor.m_NumInputs == numInputs);
44 CHECK(layer->GetNumInputSlots() == numInputs);
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010045
Jan Eilers1b2654f2021-09-24 15:45:46 +010046 unsigned int numOutputs = armnn::numeric_cast<unsigned int>(m_OutputInfos.size());
47 CHECK(standInDescriptor.m_NumOutputs == numOutputs);
48 CHECK(layer->GetNumOutputSlots() == numOutputs);
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010049
Jan Eilers1b2654f2021-09-24 15:45:46 +010050 const StandInLayer* standInLayer = PolymorphicDowncast<const StandInLayer*>(layer);
51 for (unsigned int i = 0u; i < numInputs; ++i)
52 {
53 const OutputSlot* connectedSlot = standInLayer->GetInputSlot(i).GetConnectedOutputSlot();
54 CHECK(connectedSlot != nullptr);
55
56 const TensorInfo& inputInfo = connectedSlot->GetTensorInfo();
57 CHECK(inputInfo == m_InputInfos[i]);
58 }
59
60 for (unsigned int i = 0u; i < numOutputs; ++i)
61 {
62 const TensorInfo& outputInfo = layer->GetOutputSlot(i).GetTensorInfo();
63 CHECK(outputInfo == m_OutputInfos[i]);
64 }
65 break;
66 }
67 default:
68 {
69 m_DefaultStrategy.Apply(GetLayerTypeAsCString(layer->GetType()));
70 }
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010071 }
72 }
73
74private:
75 std::vector<TensorInfo> m_InputInfos;
76 std::vector<TensorInfo> m_OutputInfos;
77};
78
79class DummyCustomFixture : public ParserFlatbuffersFixture
80{
81public:
82 explicit DummyCustomFixture(const std::vector<TensorInfo>& inputInfos,
83 const std::vector<TensorInfo>& outputInfos)
84 : ParserFlatbuffersFixture()
85 , m_StandInLayerVerifier(inputInfos, outputInfos)
86 {
Matthew Sloyan589e3e82020-09-11 16:17:48 +010087 const unsigned int numInputs = armnn::numeric_cast<unsigned int>(inputInfos.size());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010088 ARMNN_ASSERT(numInputs > 0);
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010089
Matthew Sloyan589e3e82020-09-11 16:17:48 +010090 const unsigned int numOutputs = armnn::numeric_cast<unsigned int>(outputInfos.size());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010091 ARMNN_ASSERT(numOutputs > 0);
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010092
93 m_JsonString = R"(
94 {
95 "version": 3,
96 "operator_codes": [{
97 "builtin_code": "CUSTOM",
98 "custom_code": "DummyCustomOperator"
99 }],
100 "subgraphs": [ {
101 "tensors": [)";
102
103 // Add input tensors
104 for (unsigned int i = 0u; i < numInputs; ++i)
105 {
106 const TensorInfo& inputInfo = inputInfos[i];
107 m_JsonString += R"(
108 {
109 "shape": )" + GetTensorShapeAsString(inputInfo.GetShape()) + R"(,
110 "type": )" + GetDataTypeAsString(inputInfo.GetDataType()) + R"(,
111 "buffer": 0,
112 "name": "inputTensor)" + std::to_string(i) + R"(",
113 "quantization": {
114 "min": [ 0.0 ],
115 "max": [ 255.0 ],
116 "scale": [ )" + std::to_string(inputInfo.GetQuantizationScale()) + R"( ],
117 "zero_point": [ )" + std::to_string(inputInfo.GetQuantizationOffset()) + R"( ],
118 }
119 },)";
120 }
121
122 // Add output tensors
123 for (unsigned int i = 0u; i < numOutputs; ++i)
124 {
125 const TensorInfo& outputInfo = outputInfos[i];
126 m_JsonString += R"(
127 {
128 "shape": )" + GetTensorShapeAsString(outputInfo.GetShape()) + R"(,
129 "type": )" + GetDataTypeAsString(outputInfo.GetDataType()) + R"(,
130 "buffer": 0,
131 "name": "outputTensor)" + std::to_string(i) + R"(",
132 "quantization": {
133 "min": [ 0.0 ],
134 "max": [ 255.0 ],
135 "scale": [ )" + std::to_string(outputInfo.GetQuantizationScale()) + R"( ],
136 "zero_point": [ )" + std::to_string(outputInfo.GetQuantizationOffset()) + R"( ],
137 }
138 })";
139
140 if (i + 1 < numOutputs)
141 {
142 m_JsonString += ",";
143 }
144 }
145
146 const std::string inputIndices = GetIndicesAsString(0u, numInputs - 1u);
147 const std::string outputIndices = GetIndicesAsString(numInputs, numInputs + numOutputs - 1u);
148
149 // Add dummy custom operator
150 m_JsonString += R"(],
151 "inputs": )" + inputIndices + R"(,
152 "outputs": )" + outputIndices + R"(,
153 "operators": [
154 {
155 "opcode_index": 0,
156 "inputs": )" + inputIndices + R"(,
157 "outputs": )" + outputIndices + R"(,
158 "builtin_options_type": 0,
159 "custom_options": [ ],
160 "custom_options_format": "FLEXBUFFERS"
161 }
162 ],
163 } ],
164 "buffers" : [
165 { },
166 { }
167 ]
168 }
169 )";
170
171 ReadStringToBinary();
172 }
173
174 void RunTest()
175 {
176 INetworkPtr network = m_Parser->CreateNetworkFromBinary(m_GraphBinary);
Jan Eilers1b2654f2021-09-24 15:45:46 +0100177 network->ExecuteStrategy(m_StandInLayerVerifier);
Aron Virginas-Tarc975f922019-10-23 17:38:17 +0100178 }
179
180private:
181 static std::string GetTensorShapeAsString(const TensorShape& tensorShape)
182 {
183 std::stringstream stream;
184 stream << "[ ";
185 for (unsigned int i = 0u; i < tensorShape.GetNumDimensions(); ++i)
186 {
187 stream << tensorShape[i];
188 if (i + 1 < tensorShape.GetNumDimensions())
189 {
190 stream << ",";
191 }
192 stream << " ";
193 }
194 stream << "]";
195
196 return stream.str();
197 }
198
199 static std::string GetDataTypeAsString(DataType dataType)
200 {
201 switch (dataType)
202 {
203 case DataType::Float32: return "FLOAT32";
Derek Lambertif90c56d2020-01-10 17:14:08 +0000204 case DataType::QAsymmU8: return "UINT8";
Aron Virginas-Tarc975f922019-10-23 17:38:17 +0100205 default: return "UNKNOWN";
206 }
207 }
208
209 static std::string GetIndicesAsString(unsigned int first, unsigned int last)
210 {
211 std::stringstream stream;
212 stream << "[ ";
213 for (unsigned int i = first; i <= last ; ++i)
214 {
215 stream << i;
216 if (i + 1 <= last)
217 {
218 stream << ",";
219 }
220 stream << " ";
221 }
222 stream << "]";
223
224 return stream.str();
225 }
226
227 StandInLayerVerifier m_StandInLayerVerifier;
228};
229
230class DummyCustom1Input1OutputFixture : public DummyCustomFixture
231{
232public:
233 DummyCustom1Input1OutputFixture()
234 : DummyCustomFixture({ TensorInfo({ 1, 1 }, DataType::Float32) },
235 { TensorInfo({ 2, 2 }, DataType::Float32) }) {}
236};
237
238class DummyCustom2Inputs1OutputFixture : public DummyCustomFixture
239{
240public:
241 DummyCustom2Inputs1OutputFixture()
242 : DummyCustomFixture({ TensorInfo({ 1, 1 }, DataType::Float32), TensorInfo({ 2, 2 }, DataType::Float32) },
243 { TensorInfo({ 3, 3 }, DataType::Float32) }) {}
244};
245
Sadik Armagan1625efc2021-06-10 18:24:34 +0100246TEST_CASE_FIXTURE(DummyCustom1Input1OutputFixture, "UnsupportedCustomOperator1Input1Output")
Aron Virginas-Tarc975f922019-10-23 17:38:17 +0100247{
248 RunTest();
249}
250
Sadik Armagan1625efc2021-06-10 18:24:34 +0100251TEST_CASE_FIXTURE(DummyCustom2Inputs1OutputFixture, "UnsupportedCustomOperator2Inputs1Output")
Aron Virginas-Tarc975f922019-10-23 17:38:17 +0100252{
253 RunTest();
254}
255
Sadik Armagan1625efc2021-06-10 18:24:34 +0100256}