blob: a62cb96eb6820bb92bf9154bf14f68ef2ba551d2 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#pragma once
7
8#include "SchemaSerialize.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00009#include "test/TensorHelpers.hpp"
10
11#include "flatbuffers/idl.h"
12#include "flatbuffers/util.h"
13
Matthew Bentham268509a2019-02-25 13:58:24 +000014#include <ArmnnSchema_generated.h>
Jan Eilers8eb25602020-03-09 12:13:48 +000015#include <armnn/IRuntime.hpp>
16#include <armnnDeserializer/IDeserializer.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010017#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000018#include <armnn/utility/IgnoreUnused.hpp>
19#include <ResolveType.hpp>
20
Colm Donelan5b5c2222020-09-09 12:48:16 +010021#include <fmt/format.h>
Jan Eilers8eb25602020-03-09 12:13:48 +000022
Sadik Armagan483c8112021-06-01 09:24:52 +010023#include <vector>
Kevin May43a799c2019-02-08 16:31:42 +000024
Derek Lamberti0028d1b2019-02-20 13:57:42 +000025using armnnDeserializer::IDeserializer;
Matthew Bentham268509a2019-02-25 13:58:24 +000026using TensorRawPtr = armnnSerializer::TensorInfo*;
Kevin May43a799c2019-02-08 16:31:42 +000027
28struct ParserFlatbuffersSerializeFixture
29{
30 ParserFlatbuffersSerializeFixture() :
Derek Lamberti0028d1b2019-02-20 13:57:42 +000031 m_Parser(IDeserializer::Create()),
Kevin May43a799c2019-02-08 16:31:42 +000032 m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions())),
33 m_NetworkIdentifier(-1)
34 {
35 }
36
37 std::vector<uint8_t> m_GraphBinary;
38 std::string m_JsonString;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000039 std::unique_ptr<IDeserializer, void (*)(IDeserializer* parser)> m_Parser;
Kevin May43a799c2019-02-08 16:31:42 +000040 armnn::IRuntimePtr m_Runtime;
41 armnn::NetworkId m_NetworkIdentifier;
42
43 /// If the single-input-single-output overload of Setup() is called, these will store the input and output name
44 /// so they don't need to be passed to the single-input-single-output overload of RunTest().
45 std::string m_SingleInputName;
46 std::string m_SingleOutputName;
47
48 void Setup()
49 {
50 bool ok = ReadStringToBinary();
51 if (!ok)
52 {
53 throw armnn::Exception("LoadNetwork failed while reading binary input");
54 }
55
56 armnn::INetworkPtr network =
57 m_Parser->CreateNetworkFromBinary(m_GraphBinary);
58
59 if (!network)
60 {
61 throw armnn::Exception("The parser failed to create an ArmNN network");
62 }
63
64 auto optimized = Optimize(*network, {armnn::Compute::CpuRef},
65 m_Runtime->GetDeviceSpec());
66
67 std::string errorMessage;
68 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
69
70 if (ret != armnn::Status::Success)
71 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010072 throw armnn::Exception(fmt::format("The runtime failed to load the network. "
73 "Error was: {0}. in {1} [{2}:{3}]",
74 errorMessage,
75 __func__,
76 __FILE__,
77 __LINE__));
Kevin May43a799c2019-02-08 16:31:42 +000078 }
79
80 }
81
82 void SetupSingleInputSingleOutput(const std::string& inputName, const std::string& outputName)
83 {
84 // Store the input and output name so they don't need to be passed to the single-input-single-output RunTest().
85 m_SingleInputName = inputName;
86 m_SingleOutputName = outputName;
87 Setup();
88 }
89
90 bool ReadStringToBinary()
91 {
92 std::string schemafile(&deserialize_schema_start, &deserialize_schema_end);
93
94 // parse schema first, so we can use it to parse the data after
95 flatbuffers::Parser parser;
96
97 bool ok = parser.Parse(schemafile.c_str());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010098 ARMNN_ASSERT_MSG(ok, "Failed to parse schema file");
Kevin May43a799c2019-02-08 16:31:42 +000099
100 ok &= parser.Parse(m_JsonString.c_str());
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100101 ARMNN_ASSERT_MSG(ok, "Failed to parse json input");
Kevin May43a799c2019-02-08 16:31:42 +0000102
103 if (!ok)
104 {
105 return false;
106 }
107
108 {
109 const uint8_t* bufferPtr = parser.builder_.GetBufferPointer();
110 size_t size = static_cast<size_t>(parser.builder_.GetSize());
111 m_GraphBinary.assign(bufferPtr, bufferPtr+size);
112 }
113 return ok;
114 }
115
116 /// Executes the network with the given input tensor and checks the result against the given output tensor.
117 /// This overload assumes the network has a single input and a single output.
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000118 template<std::size_t NumOutputDimensions,
119 armnn::DataType ArmnnType,
120 typename DataType = armnn::ResolveType<ArmnnType>>
Kevin May43a799c2019-02-08 16:31:42 +0000121 void RunTest(unsigned int layersId,
122 const std::vector<DataType>& inputData,
123 const std::vector<DataType>& expectedOutputData);
124
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000125 template<std::size_t NumOutputDimensions,
126 armnn::DataType ArmnnInputType,
127 armnn::DataType ArmnnOutputType,
128 typename InputDataType = armnn::ResolveType<ArmnnInputType>,
129 typename OutputDataType = armnn::ResolveType<ArmnnOutputType>>
130 void RunTest(unsigned int layersId,
131 const std::vector<InputDataType>& inputData,
132 const std::vector<OutputDataType>& expectedOutputData);
133
Kevin May43a799c2019-02-08 16:31:42 +0000134 /// Executes the network with the given input tensors and checks the results against the given output tensors.
135 /// This overload supports multiple inputs and multiple outputs, identified by name.
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000136 template<std::size_t NumOutputDimensions,
137 armnn::DataType ArmnnType,
138 typename DataType = armnn::ResolveType<ArmnnType>>
Kevin May43a799c2019-02-08 16:31:42 +0000139 void RunTest(unsigned int layersId,
140 const std::map<std::string, std::vector<DataType>>& inputData,
141 const std::map<std::string, std::vector<DataType>>& expectedOutputData);
142
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000143 template<std::size_t NumOutputDimensions,
144 armnn::DataType ArmnnInputType,
145 armnn::DataType ArmnnOutputType,
146 typename InputDataType = armnn::ResolveType<ArmnnInputType>,
147 typename OutputDataType = armnn::ResolveType<ArmnnOutputType>>
148 void RunTest(unsigned int layersId,
149 const std::map<std::string, std::vector<InputDataType>>& inputData,
150 const std::map<std::string, std::vector<OutputDataType>>& expectedOutputData);
151
Kevin May43a799c2019-02-08 16:31:42 +0000152 void CheckTensors(const TensorRawPtr& tensors, size_t shapeSize, const std::vector<int32_t>& shape,
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000153 armnnSerializer::TensorInfo tensorType, const std::string& name,
Kevin May43a799c2019-02-08 16:31:42 +0000154 const float scale, const int64_t zeroPoint)
155 {
Jan Eilers8eb25602020-03-09 12:13:48 +0000156 armnn::IgnoreUnused(name);
Kevin May43a799c2019-02-08 16:31:42 +0000157 BOOST_CHECK_EQUAL(shapeSize, tensors->dimensions()->size());
158 BOOST_CHECK_EQUAL_COLLECTIONS(shape.begin(), shape.end(),
159 tensors->dimensions()->begin(), tensors->dimensions()->end());
160 BOOST_CHECK_EQUAL(tensorType.dataType(), tensors->dataType());
161 BOOST_CHECK_EQUAL(scale, tensors->quantizationScale());
162 BOOST_CHECK_EQUAL(zeroPoint, tensors->quantizationOffset());
163 }
164};
165
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000166template<std::size_t NumOutputDimensions, armnn::DataType ArmnnType, typename DataType>
Kevin May43a799c2019-02-08 16:31:42 +0000167void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId,
168 const std::vector<DataType>& inputData,
169 const std::vector<DataType>& expectedOutputData)
170{
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000171 RunTest<NumOutputDimensions, ArmnnType, ArmnnType, DataType, DataType>(layersId, inputData, expectedOutputData);
Kevin May43a799c2019-02-08 16:31:42 +0000172}
173
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000174template<std::size_t NumOutputDimensions,
175 armnn::DataType ArmnnInputType,
176 armnn::DataType ArmnnOutputType,
177 typename InputDataType,
178 typename OutputDataType>
179void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId,
180 const std::vector<InputDataType>& inputData,
181 const std::vector<OutputDataType>& expectedOutputData)
182{
183 RunTest<NumOutputDimensions, ArmnnInputType, ArmnnOutputType>(layersId,
184 { { m_SingleInputName, inputData } },
185 { { m_SingleOutputName, expectedOutputData } });
186}
187
188template<std::size_t NumOutputDimensions, armnn::DataType ArmnnType, typename DataType>
Kevin May43a799c2019-02-08 16:31:42 +0000189void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId,
190 const std::map<std::string, std::vector<DataType>>& inputData,
191 const std::map<std::string, std::vector<DataType>>& expectedOutputData)
192{
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000193 RunTest<NumOutputDimensions, ArmnnType, ArmnnType, DataType, DataType>(layersId, inputData, expectedOutputData);
194}
195
196template<std::size_t NumOutputDimensions,
197 armnn::DataType ArmnnInputType,
198 armnn::DataType ArmnnOutputType,
199 typename InputDataType,
200 typename OutputDataType>
201void ParserFlatbuffersSerializeFixture::RunTest(
202 unsigned int layersId,
203 const std::map<std::string, std::vector<InputDataType>>& inputData,
204 const std::map<std::string, std::vector<OutputDataType>>& expectedOutputData)
205{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000206 auto ConvertBindingInfo = [](const armnnDeserializer::BindingPointInfo& bindingInfo)
207 {
208 return std::make_pair(bindingInfo.m_BindingId, bindingInfo.m_TensorInfo);
209 };
210
Kevin May43a799c2019-02-08 16:31:42 +0000211 // Setup the armnn input tensors from the given vectors.
212 armnn::InputTensors inputTensors;
213 for (auto&& it : inputData)
214 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100215 armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
216 m_Parser->GetNetworkInputBindingInfo(layersId, it.first));
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000217 armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnInputType);
Kevin May43a799c2019-02-08 16:31:42 +0000218 inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
219 }
220
221 // Allocate storage for the output tensors to be written to and setup the armnn output tensors.
Sadik Armagan483c8112021-06-01 09:24:52 +0100222 std::map<std::string, std::vector<OutputDataType>> outputStorage;
Kevin May43a799c2019-02-08 16:31:42 +0000223 armnn::OutputTensors outputTensors;
224 for (auto&& it : expectedOutputData)
225 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100226 armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
227 m_Parser->GetNetworkOutputBindingInfo(layersId, it.first));
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000228 armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnOutputType);
Sadik Armagan483c8112021-06-01 09:24:52 +0100229 outputStorage.emplace(it.first, std::vector<OutputDataType>(bindingInfo.second.GetNumElements()));
Kevin May43a799c2019-02-08 16:31:42 +0000230 outputTensors.push_back(
231 { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
232 }
233
234 m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
235
236 // Compare each output tensor to the expected values
237 for (auto&& it : expectedOutputData)
238 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100239 armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
240 m_Parser->GetNetworkOutputBindingInfo(layersId, it.first));
Sadik Armagan483c8112021-06-01 09:24:52 +0100241 auto outputExpected = it.second;
242 auto result = CompareTensors(outputExpected, outputStorage[it.first],
243 bindingInfo.second.GetShape(), bindingInfo.second.GetShape());
Colm Donelan25ab3a82021-05-17 13:01:52 +0100244 BOOST_TEST(result.m_Result, result.m_Message.str());
Kevin May43a799c2019-02-08 16:31:42 +0000245 }
Matthew Bentham268509a2019-02-25 13:58:24 +0000246}