blob: de7fe5cb5e44247b027b3f61523f8fa4c267edec [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#pragma once
7
8#include "SchemaSerialize.hpp"
9
10#include <armnn/IRuntime.hpp>
Derek Lamberti0028d1b2019-02-20 13:57:42 +000011#include <armnnDeserializer/IDeserializer.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000012
Derek Lamberti859f9ce2019-12-10 22:05:21 +000013#include <boost/core/ignore_unused.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000014#include <boost/assert.hpp>
15#include <boost/format.hpp>
16
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010017#include <ResolveType.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000018#include "test/TensorHelpers.hpp"
19
20#include "flatbuffers/idl.h"
21#include "flatbuffers/util.h"
22
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Derek Lamberti0028d1b2019-02-20 13:57:42 +000025using armnnDeserializer::IDeserializer;
Matthew Bentham268509a2019-02-25 13:58:24 +000026using TensorRawPtr = armnnSerializer::TensorInfo*;
Kevin May43a799c2019-02-08 16:31:42 +000027
28struct ParserFlatbuffersSerializeFixture
29{
30 ParserFlatbuffersSerializeFixture() :
Derek Lamberti0028d1b2019-02-20 13:57:42 +000031 m_Parser(IDeserializer::Create()),
Kevin May43a799c2019-02-08 16:31:42 +000032 m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions())),
33 m_NetworkIdentifier(-1)
34 {
35 }
36
37 std::vector<uint8_t> m_GraphBinary;
38 std::string m_JsonString;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000039 std::unique_ptr<IDeserializer, void (*)(IDeserializer* parser)> m_Parser;
Kevin May43a799c2019-02-08 16:31:42 +000040 armnn::IRuntimePtr m_Runtime;
41 armnn::NetworkId m_NetworkIdentifier;
42
43 /// If the single-input-single-output overload of Setup() is called, these will store the input and output name
44 /// so they don't need to be passed to the single-input-single-output overload of RunTest().
45 std::string m_SingleInputName;
46 std::string m_SingleOutputName;
47
48 void Setup()
49 {
50 bool ok = ReadStringToBinary();
51 if (!ok)
52 {
53 throw armnn::Exception("LoadNetwork failed while reading binary input");
54 }
55
56 armnn::INetworkPtr network =
57 m_Parser->CreateNetworkFromBinary(m_GraphBinary);
58
59 if (!network)
60 {
61 throw armnn::Exception("The parser failed to create an ArmNN network");
62 }
63
64 auto optimized = Optimize(*network, {armnn::Compute::CpuRef},
65 m_Runtime->GetDeviceSpec());
66
67 std::string errorMessage;
68 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
69
70 if (ret != armnn::Status::Success)
71 {
72 throw armnn::Exception(
73 boost::str(
74 boost::format("The runtime failed to load the network. "
75 "Error was: %1%. in %2% [%3%:%4%]") %
76 errorMessage %
77 __func__ %
78 __FILE__ %
79 __LINE__));
80 }
81
82 }
83
84 void SetupSingleInputSingleOutput(const std::string& inputName, const std::string& outputName)
85 {
86 // Store the input and output name so they don't need to be passed to the single-input-single-output RunTest().
87 m_SingleInputName = inputName;
88 m_SingleOutputName = outputName;
89 Setup();
90 }
91
92 bool ReadStringToBinary()
93 {
94 std::string schemafile(&deserialize_schema_start, &deserialize_schema_end);
95
96 // parse schema first, so we can use it to parse the data after
97 flatbuffers::Parser parser;
98
99 bool ok = parser.Parse(schemafile.c_str());
100 BOOST_ASSERT_MSG(ok, "Failed to parse schema file");
101
102 ok &= parser.Parse(m_JsonString.c_str());
103 BOOST_ASSERT_MSG(ok, "Failed to parse json input");
104
105 if (!ok)
106 {
107 return false;
108 }
109
110 {
111 const uint8_t* bufferPtr = parser.builder_.GetBufferPointer();
112 size_t size = static_cast<size_t>(parser.builder_.GetSize());
113 m_GraphBinary.assign(bufferPtr, bufferPtr+size);
114 }
115 return ok;
116 }
117
118 /// Executes the network with the given input tensor and checks the result against the given output tensor.
119 /// This overload assumes the network has a single input and a single output.
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000120 template<std::size_t NumOutputDimensions,
121 armnn::DataType ArmnnType,
122 typename DataType = armnn::ResolveType<ArmnnType>>
Kevin May43a799c2019-02-08 16:31:42 +0000123 void RunTest(unsigned int layersId,
124 const std::vector<DataType>& inputData,
125 const std::vector<DataType>& expectedOutputData);
126
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000127 template<std::size_t NumOutputDimensions,
128 armnn::DataType ArmnnInputType,
129 armnn::DataType ArmnnOutputType,
130 typename InputDataType = armnn::ResolveType<ArmnnInputType>,
131 typename OutputDataType = armnn::ResolveType<ArmnnOutputType>>
132 void RunTest(unsigned int layersId,
133 const std::vector<InputDataType>& inputData,
134 const std::vector<OutputDataType>& expectedOutputData);
135
Kevin May43a799c2019-02-08 16:31:42 +0000136 /// Executes the network with the given input tensors and checks the results against the given output tensors.
137 /// This overload supports multiple inputs and multiple outputs, identified by name.
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000138 template<std::size_t NumOutputDimensions,
139 armnn::DataType ArmnnType,
140 typename DataType = armnn::ResolveType<ArmnnType>>
Kevin May43a799c2019-02-08 16:31:42 +0000141 void RunTest(unsigned int layersId,
142 const std::map<std::string, std::vector<DataType>>& inputData,
143 const std::map<std::string, std::vector<DataType>>& expectedOutputData);
144
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000145 template<std::size_t NumOutputDimensions,
146 armnn::DataType ArmnnInputType,
147 armnn::DataType ArmnnOutputType,
148 typename InputDataType = armnn::ResolveType<ArmnnInputType>,
149 typename OutputDataType = armnn::ResolveType<ArmnnOutputType>>
150 void RunTest(unsigned int layersId,
151 const std::map<std::string, std::vector<InputDataType>>& inputData,
152 const std::map<std::string, std::vector<OutputDataType>>& expectedOutputData);
153
Kevin May43a799c2019-02-08 16:31:42 +0000154 void CheckTensors(const TensorRawPtr& tensors, size_t shapeSize, const std::vector<int32_t>& shape,
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000155 armnnSerializer::TensorInfo tensorType, const std::string& name,
Kevin May43a799c2019-02-08 16:31:42 +0000156 const float scale, const int64_t zeroPoint)
157 {
Derek Lamberti859f9ce2019-12-10 22:05:21 +0000158 boost::ignore_unused(name);
Kevin May43a799c2019-02-08 16:31:42 +0000159 BOOST_CHECK_EQUAL(shapeSize, tensors->dimensions()->size());
160 BOOST_CHECK_EQUAL_COLLECTIONS(shape.begin(), shape.end(),
161 tensors->dimensions()->begin(), tensors->dimensions()->end());
162 BOOST_CHECK_EQUAL(tensorType.dataType(), tensors->dataType());
163 BOOST_CHECK_EQUAL(scale, tensors->quantizationScale());
164 BOOST_CHECK_EQUAL(zeroPoint, tensors->quantizationOffset());
165 }
166};
167
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000168template<std::size_t NumOutputDimensions, armnn::DataType ArmnnType, typename DataType>
Kevin May43a799c2019-02-08 16:31:42 +0000169void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId,
170 const std::vector<DataType>& inputData,
171 const std::vector<DataType>& expectedOutputData)
172{
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000173 RunTest<NumOutputDimensions, ArmnnType, ArmnnType, DataType, DataType>(layersId, inputData, expectedOutputData);
Kevin May43a799c2019-02-08 16:31:42 +0000174}
175
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000176template<std::size_t NumOutputDimensions,
177 armnn::DataType ArmnnInputType,
178 armnn::DataType ArmnnOutputType,
179 typename InputDataType,
180 typename OutputDataType>
181void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId,
182 const std::vector<InputDataType>& inputData,
183 const std::vector<OutputDataType>& expectedOutputData)
184{
185 RunTest<NumOutputDimensions, ArmnnInputType, ArmnnOutputType>(layersId,
186 { { m_SingleInputName, inputData } },
187 { { m_SingleOutputName, expectedOutputData } });
188}
189
190template<std::size_t NumOutputDimensions, armnn::DataType ArmnnType, typename DataType>
Kevin May43a799c2019-02-08 16:31:42 +0000191void ParserFlatbuffersSerializeFixture::RunTest(unsigned int layersId,
192 const std::map<std::string, std::vector<DataType>>& inputData,
193 const std::map<std::string, std::vector<DataType>>& expectedOutputData)
194{
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000195 RunTest<NumOutputDimensions, ArmnnType, ArmnnType, DataType, DataType>(layersId, inputData, expectedOutputData);
196}
197
198template<std::size_t NumOutputDimensions,
199 armnn::DataType ArmnnInputType,
200 armnn::DataType ArmnnOutputType,
201 typename InputDataType,
202 typename OutputDataType>
203void ParserFlatbuffersSerializeFixture::RunTest(
204 unsigned int layersId,
205 const std::map<std::string, std::vector<InputDataType>>& inputData,
206 const std::map<std::string, std::vector<OutputDataType>>& expectedOutputData)
207{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000208 auto ConvertBindingInfo = [](const armnnDeserializer::BindingPointInfo& bindingInfo)
209 {
210 return std::make_pair(bindingInfo.m_BindingId, bindingInfo.m_TensorInfo);
211 };
212
Kevin May43a799c2019-02-08 16:31:42 +0000213 // Setup the armnn input tensors from the given vectors.
214 armnn::InputTensors inputTensors;
215 for (auto&& it : inputData)
216 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100217 armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
218 m_Parser->GetNetworkInputBindingInfo(layersId, it.first));
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000219 armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnInputType);
Kevin May43a799c2019-02-08 16:31:42 +0000220 inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
221 }
222
223 // Allocate storage for the output tensors to be written to and setup the armnn output tensors.
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000224 std::map<std::string, boost::multi_array<OutputDataType, NumOutputDimensions>> outputStorage;
Kevin May43a799c2019-02-08 16:31:42 +0000225 armnn::OutputTensors outputTensors;
226 for (auto&& it : expectedOutputData)
227 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100228 armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
229 m_Parser->GetNetworkOutputBindingInfo(layersId, it.first));
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000230 armnn::VerifyTensorInfoDataType(bindingInfo.second, ArmnnOutputType);
231 outputStorage.emplace(it.first, MakeTensor<OutputDataType, NumOutputDimensions>(bindingInfo.second));
Kevin May43a799c2019-02-08 16:31:42 +0000232 outputTensors.push_back(
233 { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
234 }
235
236 m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
237
238 // Compare each output tensor to the expected values
239 for (auto&& it : expectedOutputData)
240 {
Jim Flynnb4d7eae2019-05-01 14:44:27 +0100241 armnn::BindingPointInfo bindingInfo = ConvertBindingInfo(
242 m_Parser->GetNetworkOutputBindingInfo(layersId, it.first));
Nattapat Chaimanowong9066d3c2019-02-27 17:27:16 +0000243 auto outputExpected = MakeTensor<OutputDataType, NumOutputDimensions>(bindingInfo.second, it.second);
Kevin May43a799c2019-02-08 16:31:42 +0000244 BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first]));
245 }
Matthew Bentham268509a2019-02-25 13:58:24 +0000246}