blob: b0bfdfc0167a1b4d060bbee10a5858ea78aa9d2f [file] [log] [blame]
telsoa01c577f2c2018-08-31 09:22:23 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa01c577f2c2018-08-31 09:22:23 +01004//
5
6#pragma once
7
Matteo Martincighc601aa62019-10-29 15:03:22 +00008#include "Schema.hpp"
9
keidav01222c7532019-03-14 17:12:10 +000010#include <armnn/Descriptors.hpp>
11#include <armnn/IRuntime.hpp>
12#include <armnn/TypesUtils.hpp>
Matteo Martincighc601aa62019-10-29 15:03:22 +000013#include <armnn/BackendRegistry.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010014#include <armnn/utility/Assert.hpp>
keidav01222c7532019-03-14 17:12:10 +000015
Matteo Martincighc601aa62019-10-29 15:03:22 +000016#include <armnnTfLiteParser/ITfLiteParser.hpp>
17
18#include <ResolveType.hpp>
19
20#include <test/TensorHelpers.hpp>
21
James Ward58dec6b2020-09-11 17:32:44 +010022#include <fmt/format.h>
Sadik Armagan1625efc2021-06-10 18:24:34 +010023#include <doctest/doctest.h>
keidav01222c7532019-03-14 17:12:10 +000024
telsoa01c577f2c2018-08-31 09:22:23 +010025#include "flatbuffers/idl.h"
26#include "flatbuffers/util.h"
keidav01222c7532019-03-14 17:12:10 +000027#include "flatbuffers/flexbuffers.h"
telsoa01c577f2c2018-08-31 09:22:23 +010028
29#include <schema_generated.h>
Matteo Martincighc601aa62019-10-29 15:03:22 +000030
telsoa01c577f2c2018-08-31 09:22:23 +010031#include <iostream>
32
33using armnnTfLiteParser::ITfLiteParser;
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010034using armnnTfLiteParser::ITfLiteParserPtr;
telsoa01c577f2c2018-08-31 09:22:23 +010035
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010036using TensorRawPtr = const tflite::TensorT *;
telsoa01c577f2c2018-08-31 09:22:23 +010037struct ParserFlatbuffersFixture
38{
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000039 ParserFlatbuffersFixture() :
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010040 m_Parser(nullptr, &ITfLiteParser::Destroy),
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000041 m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions())),
42 m_NetworkIdentifier(-1)
telsoa01c577f2c2018-08-31 09:22:23 +010043 {
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010044 ITfLiteParser::TfLiteParserOptions options;
45 options.m_StandInLayerForUnsupported = true;
Sadik Armagand109a4d2020-07-28 10:42:13 +010046 options.m_InferAndValidate = true;
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010047
48 m_Parser.reset(ITfLiteParser::CreateRaw(armnn::Optional<ITfLiteParser::TfLiteParserOptions>(options)));
telsoa01c577f2c2018-08-31 09:22:23 +010049 }
50
51 std::vector<uint8_t> m_GraphBinary;
Aron Virginas-Tarc975f922019-10-23 17:38:17 +010052 std::string m_JsonString;
53 ITfLiteParserPtr m_Parser;
54 armnn::IRuntimePtr m_Runtime;
55 armnn::NetworkId m_NetworkIdentifier;
telsoa01c577f2c2018-08-31 09:22:23 +010056
57 /// If the single-input-single-output overload of Setup() is called, these will store the input and output name
58 /// so they don't need to be passed to the single-input-single-output overload of RunTest().
59 std::string m_SingleInputName;
60 std::string m_SingleOutputName;
61
62 void Setup()
63 {
64 bool ok = ReadStringToBinary();
65 if (!ok) {
66 throw armnn::Exception("LoadNetwork failed while reading binary input");
67 }
68
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000069 armnn::INetworkPtr network =
70 m_Parser->CreateNetworkFromBinary(m_GraphBinary);
71
72 if (!network) {
73 throw armnn::Exception("The parser failed to create an ArmNN network");
74 }
75
76 auto optimized = Optimize(*network, { armnn::Compute::CpuRef },
77 m_Runtime->GetDeviceSpec());
78 std::string errorMessage;
79
80 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
81
82 if (ret != armnn::Status::Success)
telsoa01c577f2c2018-08-31 09:22:23 +010083 {
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000084 throw armnn::Exception(
James Ward58dec6b2020-09-11 17:32:44 +010085 fmt::format("The runtime failed to load the network. "
86 "Error was: {}. in {} [{}:{}]",
87 errorMessage,
88 __func__,
89 __FILE__,
90 __LINE__));
telsoa01c577f2c2018-08-31 09:22:23 +010091 }
92 }
93
94 void SetupSingleInputSingleOutput(const std::string& inputName, const std::string& outputName)
95 {
96 // Store the input and output name so they don't need to be passed to the single-input-single-output RunTest().
97 m_SingleInputName = inputName;
98 m_SingleOutputName = outputName;
99 Setup();
100 }
101
102 bool ReadStringToBinary()
103 {
Rob Hughesff3c4262019-12-20 17:43:16 +0000104 std::string schemafile(g_TfLiteSchemaText, g_TfLiteSchemaText + g_TfLiteSchemaText_len);
telsoa01c577f2c2018-08-31 09:22:23 +0100105
106 // parse schema first, so we can use it to parse the data after
107 flatbuffers::Parser parser;
108
Matthew Bentham6c8e8e72019-01-15 17:57:00 +0000109 bool ok = parser.Parse(schemafile.c_str());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100110 ARMNN_ASSERT_MSG(ok, "Failed to parse schema file");
telsoa01c577f2c2018-08-31 09:22:23 +0100111
112 ok &= parser.Parse(m_JsonString.c_str());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100113 ARMNN_ASSERT_MSG(ok, "Failed to parse json input");
telsoa01c577f2c2018-08-31 09:22:23 +0100114
115 if (!ok)
116 {
117 return false;
118 }
119
120 {
121 const uint8_t * bufferPtr = parser.builder_.GetBufferPointer();
122 size_t size = static_cast<size_t>(parser.builder_.GetSize());
123 m_GraphBinary.assign(bufferPtr, bufferPtr+size);
124 }
125 return ok;
126 }
127
128 /// Executes the network with the given input tensor and checks the result against the given output tensor.
keidav011b3e2ea2019-02-21 10:07:37 +0000129 /// This assumes the network has a single input and a single output.
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000130 template <std::size_t NumOutputDimensions,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000131 armnn::DataType ArmnnType>
telsoa01c577f2c2018-08-31 09:22:23 +0100132 void RunTest(size_t subgraphId,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000133 const std::vector<armnn::ResolveType<ArmnnType>>& inputData,
134 const std::vector<armnn::ResolveType<ArmnnType>>& expectedOutputData);
telsoa01c577f2c2018-08-31 09:22:23 +0100135
136 /// Executes the network with the given input tensors and checks the results against the given output tensors.
137 /// This overload supports multiple inputs and multiple outputs, identified by name.
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000138 template <std::size_t NumOutputDimensions,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000139 armnn::DataType ArmnnType>
telsoa01c577f2c2018-08-31 09:22:23 +0100140 void RunTest(size_t subgraphId,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000141 const std::map<std::string, std::vector<armnn::ResolveType<ArmnnType>>>& inputData,
142 const std::map<std::string, std::vector<armnn::ResolveType<ArmnnType>>>& expectedOutputData);
telsoa01c577f2c2018-08-31 09:22:23 +0100143
keidav011b3e2ea2019-02-21 10:07:37 +0000144 /// Multiple Inputs, Multiple Outputs w/ Variable Datatypes and different dimension sizes.
145 /// Executes the network with the given input tensors and checks the results against the given output tensors.
146 /// This overload supports multiple inputs and multiple outputs, identified by name along with the allowance for
147 /// the input datatype to be different to the output
148 template <std::size_t NumOutputDimensions,
149 armnn::DataType ArmnnType1,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000150 armnn::DataType ArmnnType2>
keidav011b3e2ea2019-02-21 10:07:37 +0000151 void RunTest(size_t subgraphId,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000152 const std::map<std::string, std::vector<armnn::ResolveType<ArmnnType1>>>& inputData,
Sadik Armagand109a4d2020-07-28 10:42:13 +0100153 const std::map<std::string, std::vector<armnn::ResolveType<ArmnnType2>>>& expectedOutputData,
154 bool isDynamic = false);
keidav011b3e2ea2019-02-21 10:07:37 +0000155
Sadik Armagan26868492021-01-22 14:25:31 +0000156 /// Multiple Inputs with different DataTypes, Multiple Outputs w/ Variable DataTypes
157 /// Executes the network with the given input tensors and checks the results against the given output tensors.
158 /// This overload supports multiple inputs and multiple outputs, identified by name along with the allowance for
159 /// the input datatype to be different to the output
160 template <std::size_t NumOutputDimensions,
161 armnn::DataType inputType1,
162 armnn::DataType inputType2,
163 armnn::DataType outputType>
164 void RunTest(size_t subgraphId,
165 const std::map<std::string, std::vector<armnn::ResolveType<inputType1>>>& input1Data,
166 const std::map<std::string, std::vector<armnn::ResolveType<inputType2>>>& input2Data,
167 const std::map<std::string, std::vector<armnn::ResolveType<outputType>>>& expectedOutputData);
keidav011b3e2ea2019-02-21 10:07:37 +0000168
169 /// Multiple Inputs, Multiple Outputs w/ Variable Datatypes and different dimension sizes.
170 /// Executes the network with the given input tensors and checks the results against the given output tensors.
171 /// This overload supports multiple inputs and multiple outputs, identified by name along with the allowance for
172 /// the input datatype to be different to the output
173 template<armnn::DataType ArmnnType1,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000174 armnn::DataType ArmnnType2>
keidav011b3e2ea2019-02-21 10:07:37 +0000175 void RunTest(std::size_t subgraphId,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000176 const std::map<std::string, std::vector<armnn::ResolveType<ArmnnType1>>>& inputData,
177 const std::map<std::string, std::vector<armnn::ResolveType<ArmnnType2>>>& expectedOutputData);
keidav011b3e2ea2019-02-21 10:07:37 +0000178
keidav01222c7532019-03-14 17:12:10 +0000179 static inline std::string GenerateDetectionPostProcessJsonString(
180 const armnn::DetectionPostProcessDescriptor& descriptor)
181 {
182 flexbuffers::Builder detectPostProcess;
183 detectPostProcess.Map([&]() {
184 detectPostProcess.Bool("use_regular_nms", descriptor.m_UseRegularNms);
185 detectPostProcess.Int("max_detections", descriptor.m_MaxDetections);
186 detectPostProcess.Int("max_classes_per_detection", descriptor.m_MaxClassesPerDetection);
187 detectPostProcess.Int("detections_per_class", descriptor.m_DetectionsPerClass);
188 detectPostProcess.Int("num_classes", descriptor.m_NumClasses);
189 detectPostProcess.Float("nms_score_threshold", descriptor.m_NmsScoreThreshold);
190 detectPostProcess.Float("nms_iou_threshold", descriptor.m_NmsIouThreshold);
191 detectPostProcess.Float("h_scale", descriptor.m_ScaleH);
192 detectPostProcess.Float("w_scale", descriptor.m_ScaleW);
193 detectPostProcess.Float("x_scale", descriptor.m_ScaleX);
194 detectPostProcess.Float("y_scale", descriptor.m_ScaleY);
195 });
196 detectPostProcess.Finish();
197
198 // Create JSON string
199 std::stringstream strStream;
200 std::vector<uint8_t> buffer = detectPostProcess.GetBuffer();
201 std::copy(buffer.begin(), buffer.end(),std::ostream_iterator<int>(strStream,","));
202
203 return strStream.str();
204 }
205
telsoa01c577f2c2018-08-31 09:22:23 +0100206 void CheckTensors(const TensorRawPtr& tensors, size_t shapeSize, const std::vector<int32_t>& shape,
207 tflite::TensorType tensorType, uint32_t buffer, const std::string& name,
208 const std::vector<float>& min, const std::vector<float>& max,
209 const std::vector<float>& scale, const std::vector<int64_t>& zeroPoint)
210 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100211 CHECK(tensors);
212 CHECK_EQ(shapeSize, tensors->shape.size());
213 CHECK(std::equal(shape.begin(), shape.end(), tensors->shape.begin(), tensors->shape.end()));
214 CHECK_EQ(tensorType, tensors->type);
215 CHECK_EQ(buffer, tensors->buffer);
216 CHECK_EQ(name, tensors->name);
217 CHECK(tensors->quantization);
218 CHECK(std::equal(min.begin(), min.end(), tensors->quantization.get()->min.begin(),
219 tensors->quantization.get()->min.end()));
220 CHECK(std::equal(max.begin(), max.end(), tensors->quantization.get()->max.begin(),
221 tensors->quantization.get()->max.end()));
222 CHECK(std::equal(scale.begin(), scale.end(), tensors->quantization.get()->scale.begin(),
223 tensors->quantization.get()->scale.end()));
224 CHECK(std::equal(zeroPoint.begin(), zeroPoint.end(),
telsoa01c577f2c2018-08-31 09:22:23 +0100225 tensors->quantization.get()->zero_point.begin(),
Sadik Armagan1625efc2021-06-10 18:24:34 +0100226 tensors->quantization.get()->zero_point.end()));
telsoa01c577f2c2018-08-31 09:22:23 +0100227 }
Sadik Armagan26868492021-01-22 14:25:31 +0000228
229private:
230 /// Fills the InputTensors with given input data
231 template <armnn::DataType dataType>
232 void FillInputTensors(armnn::InputTensors& inputTensors,
233 const std::map<std::string, std::vector<armnn::ResolveType<dataType>>>& inputData,
234 size_t subgraphId);
telsoa01c577f2c2018-08-31 09:22:23 +0100235};
236
Sadik Armagan26868492021-01-22 14:25:31 +0000237/// Fills the InputTensors with given input data
238template <armnn::DataType dataType>
239void ParserFlatbuffersFixture::FillInputTensors(
240 armnn::InputTensors& inputTensors,
241 const std::map<std::string, std::vector<armnn::ResolveType<dataType>>>& inputData,
242 size_t subgraphId)
243{
244 for (auto&& it : inputData)
245 {
246 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(subgraphId, it.first);
247 armnn::VerifyTensorInfoDataType(bindingInfo.second, dataType);
248 inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
249 }
250}
251
keidav011b3e2ea2019-02-21 10:07:37 +0000252/// Single Input, Single Output
253/// Executes the network with the given input tensor and checks the result against the given output tensor.
254/// This overload assumes the network has a single input and a single output.
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000255template <std::size_t NumOutputDimensions,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000256 armnn::DataType armnnType>
telsoa01c577f2c2018-08-31 09:22:23 +0100257void ParserFlatbuffersFixture::RunTest(size_t subgraphId,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000258 const std::vector<armnn::ResolveType<armnnType>>& inputData,
259 const std::vector<armnn::ResolveType<armnnType>>& expectedOutputData)
telsoa01c577f2c2018-08-31 09:22:23 +0100260{
keidav011b3e2ea2019-02-21 10:07:37 +0000261 RunTest<NumOutputDimensions, armnnType>(subgraphId,
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000262 { { m_SingleInputName, inputData } },
263 { { m_SingleOutputName, expectedOutputData } });
telsoa01c577f2c2018-08-31 09:22:23 +0100264}
265
keidav011b3e2ea2019-02-21 10:07:37 +0000266/// Multiple Inputs, Multiple Outputs
267/// Executes the network with the given input tensors and checks the results against the given output tensors.
268/// This overload supports multiple inputs and multiple outputs, identified by name.
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000269template <std::size_t NumOutputDimensions,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000270 armnn::DataType armnnType>
Nattapat Chaimanowong649dd952019-01-22 16:10:44 +0000271void ParserFlatbuffersFixture::RunTest(size_t subgraphId,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000272 const std::map<std::string, std::vector<armnn::ResolveType<armnnType>>>& inputData,
273 const std::map<std::string, std::vector<armnn::ResolveType<armnnType>>>& expectedOutputData)
telsoa01c577f2c2018-08-31 09:22:23 +0100274{
keidav011b3e2ea2019-02-21 10:07:37 +0000275 RunTest<NumOutputDimensions, armnnType, armnnType>(subgraphId, inputData, expectedOutputData);
276}
277
278/// Multiple Inputs, Multiple Outputs w/ Variable Datatypes
279/// Executes the network with the given input tensors and checks the results against the given output tensors.
280/// This overload supports multiple inputs and multiple outputs, identified by name along with the allowance for
281/// the input datatype to be different to the output
282template <std::size_t NumOutputDimensions,
283 armnn::DataType armnnType1,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000284 armnn::DataType armnnType2>
keidav011b3e2ea2019-02-21 10:07:37 +0000285void ParserFlatbuffersFixture::RunTest(size_t subgraphId,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000286 const std::map<std::string, std::vector<armnn::ResolveType<armnnType1>>>& inputData,
Sadik Armagand109a4d2020-07-28 10:42:13 +0100287 const std::map<std::string, std::vector<armnn::ResolveType<armnnType2>>>& expectedOutputData,
288 bool isDynamic)
keidav011b3e2ea2019-02-21 10:07:37 +0000289{
Rob Hughesfc6bf052019-12-16 17:10:51 +0000290 using DataType2 = armnn::ResolveType<armnnType2>;
291
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000292 // Setup the armnn input tensors from the given vectors.
293 armnn::InputTensors inputTensors;
Sadik Armagan26868492021-01-22 14:25:31 +0000294 FillInputTensors<armnnType1>(inputTensors, inputData, subgraphId);
telsoa01c577f2c2018-08-31 09:22:23 +0100295
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000296 // Allocate storage for the output tensors to be written to and setup the armnn output tensors.
Sadik Armagan483c8112021-06-01 09:24:52 +0100297 std::map<std::string, std::vector<DataType2>> outputStorage;
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000298 armnn::OutputTensors outputTensors;
299 for (auto&& it : expectedOutputData)
300 {
Narumol Prangnawarat386681a2019-04-29 16:40:55 +0100301 armnn::LayerBindingId outputBindingId = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first).first;
302 armnn::TensorInfo outputTensorInfo = m_Runtime->GetOutputTensorInfo(m_NetworkIdentifier, outputBindingId);
303
304 // Check that output tensors have correct number of dimensions (NumOutputDimensions specified in test)
305 auto outputNumDimensions = outputTensorInfo.GetNumDimensions();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100306 CHECK_MESSAGE((outputNumDimensions == NumOutputDimensions),
James Ward58dec6b2020-09-11 17:32:44 +0100307 fmt::format("Number of dimensions expected {}, but got {} for output layer {}",
308 NumOutputDimensions,
309 outputNumDimensions,
310 it.first));
Narumol Prangnawarat386681a2019-04-29 16:40:55 +0100311
312 armnn::VerifyTensorInfoDataType(outputTensorInfo, armnnType2);
Sadik Armagan483c8112021-06-01 09:24:52 +0100313 outputStorage.emplace(it.first, std::vector<DataType2>(outputTensorInfo.GetNumElements()));
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000314 outputTensors.push_back(
Narumol Prangnawarat386681a2019-04-29 16:40:55 +0100315 { outputBindingId, armnn::Tensor(outputTensorInfo, outputStorage.at(it.first).data()) });
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000316 }
telsoa01c577f2c2018-08-31 09:22:23 +0100317
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000318 m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
telsoa01c577f2c2018-08-31 09:22:23 +0100319
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000320 // Compare each output tensor to the expected values
321 for (auto&& it : expectedOutputData)
322 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100323 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first);
Sadik Armagan483c8112021-06-01 09:24:52 +0100324 auto outputExpected = it.second;
325 auto result = CompareTensors(outputExpected, outputStorage[it.first],
326 bindingInfo.second.GetShape(), bindingInfo.second.GetShape(),
327 false, isDynamic);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100328 CHECK_MESSAGE(result.m_Result, result.m_Message.str());
telsoa01c577f2c2018-08-31 09:22:23 +0100329 }
330}
keidav011b3e2ea2019-02-21 10:07:37 +0000331
332/// Multiple Inputs, Multiple Outputs w/ Variable Datatypes and different dimension sizes.
333/// Executes the network with the given input tensors and checks the results against the given output tensors.
334/// This overload supports multiple inputs and multiple outputs, identified by name along with the allowance for
335/// the input datatype to be different to the output.
336template <armnn::DataType armnnType1,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000337 armnn::DataType armnnType2>
keidav011b3e2ea2019-02-21 10:07:37 +0000338void ParserFlatbuffersFixture::RunTest(std::size_t subgraphId,
Rob Hughesfc6bf052019-12-16 17:10:51 +0000339 const std::map<std::string, std::vector<armnn::ResolveType<armnnType1>>>& inputData,
340 const std::map<std::string, std::vector<armnn::ResolveType<armnnType2>>>& expectedOutputData)
keidav011b3e2ea2019-02-21 10:07:37 +0000341{
Rob Hughesfc6bf052019-12-16 17:10:51 +0000342 using DataType2 = armnn::ResolveType<armnnType2>;
343
keidav011b3e2ea2019-02-21 10:07:37 +0000344 // Setup the armnn input tensors from the given vectors.
345 armnn::InputTensors inputTensors;
Sadik Armagan26868492021-01-22 14:25:31 +0000346 FillInputTensors<armnnType1>(inputTensors, inputData, subgraphId);
keidav011b3e2ea2019-02-21 10:07:37 +0000347
348 armnn::OutputTensors outputTensors;
349 outputTensors.reserve(expectedOutputData.size());
350 std::map<std::string, std::vector<DataType2>> outputStorage;
351 for (auto&& it : expectedOutputData)
352 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100353 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first);
keidav011b3e2ea2019-02-21 10:07:37 +0000354 armnn::VerifyTensorInfoDataType(bindingInfo.second, armnnType2);
355
356 std::vector<DataType2> out(it.second.size());
357 outputStorage.emplace(it.first, out);
358 outputTensors.push_back({ bindingInfo.first,
359 armnn::Tensor(bindingInfo.second,
360 outputStorage.at(it.first).data()) });
361 }
362
363 m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
364
365 // Checks the results.
366 for (auto&& it : expectedOutputData)
367 {
Rob Hughesfc6bf052019-12-16 17:10:51 +0000368 std::vector<armnn::ResolveType<armnnType2>> out = outputStorage.at(it.first);
keidav011b3e2ea2019-02-21 10:07:37 +0000369 {
370 for (unsigned int i = 0; i < out.size(); ++i)
371 {
Sadik Armagan1625efc2021-06-10 18:24:34 +0100372 CHECK(doctest::Approx(it.second[i]).epsilon(0.000001f) == out[i]);
keidav011b3e2ea2019-02-21 10:07:37 +0000373 }
374 }
375 }
Aron Virginas-Tarc975f922019-10-23 17:38:17 +0100376}
Sadik Armagan26868492021-01-22 14:25:31 +0000377
378/// Multiple Inputs with different DataTypes, Multiple Outputs w/ Variable DataTypes
379/// Executes the network with the given input tensors and checks the results against the given output tensors.
380/// This overload supports multiple inputs and multiple outputs, identified by name along with the allowance for
381/// the input datatype to be different to the output
382template <std::size_t NumOutputDimensions,
383 armnn::DataType inputType1,
384 armnn::DataType inputType2,
385 armnn::DataType outputType>
386void ParserFlatbuffersFixture::RunTest(size_t subgraphId,
387 const std::map<std::string, std::vector<armnn::ResolveType<inputType1>>>& input1Data,
388 const std::map<std::string, std::vector<armnn::ResolveType<inputType2>>>& input2Data,
389 const std::map<std::string, std::vector<armnn::ResolveType<outputType>>>& expectedOutputData)
390{
391 using DataType2 = armnn::ResolveType<outputType>;
392
393 // Setup the armnn input tensors from the given vectors.
394 armnn::InputTensors inputTensors;
395 FillInputTensors<inputType1>(inputTensors, input1Data, subgraphId);
396 FillInputTensors<inputType2>(inputTensors, input2Data, subgraphId);
397
398 // Allocate storage for the output tensors to be written to and setup the armnn output tensors.
Sadik Armagan483c8112021-06-01 09:24:52 +0100399 std::map<std::string, std::vector<DataType2>> outputStorage;
Sadik Armagan26868492021-01-22 14:25:31 +0000400 armnn::OutputTensors outputTensors;
401 for (auto&& it : expectedOutputData)
402 {
403 armnn::LayerBindingId outputBindingId = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first).first;
404 armnn::TensorInfo outputTensorInfo = m_Runtime->GetOutputTensorInfo(m_NetworkIdentifier, outputBindingId);
405
406 // Check that output tensors have correct number of dimensions (NumOutputDimensions specified in test)
407 auto outputNumDimensions = outputTensorInfo.GetNumDimensions();
Sadik Armagan1625efc2021-06-10 18:24:34 +0100408 CHECK_MESSAGE((outputNumDimensions == NumOutputDimensions),
Sadik Armagan26868492021-01-22 14:25:31 +0000409 fmt::format("Number of dimensions expected {}, but got {} for output layer {}",
410 NumOutputDimensions,
411 outputNumDimensions,
412 it.first));
413
414 armnn::VerifyTensorInfoDataType(outputTensorInfo, outputType);
Sadik Armagan483c8112021-06-01 09:24:52 +0100415 outputStorage.emplace(it.first, std::vector<DataType2>(outputTensorInfo.GetNumElements()));
Sadik Armagan26868492021-01-22 14:25:31 +0000416 outputTensors.push_back(
417 { outputBindingId, armnn::Tensor(outputTensorInfo, outputStorage.at(it.first).data()) });
418 }
419
420 m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
421
422 // Compare each output tensor to the expected values
423 for (auto&& it : expectedOutputData)
424 {
425 armnn::BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first);
Sadik Armagan483c8112021-06-01 09:24:52 +0100426 auto outputExpected = it.second;
427 auto result = CompareTensors(outputExpected, outputStorage[it.first],
428 bindingInfo.second.GetShape(), bindingInfo.second.GetShape(), false);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100429 CHECK_MESSAGE(result.m_Result, result.m_Message.str());
Sadik Armagan26868492021-01-22 14:25:31 +0000430 }
431}