blob: 676dc7120dd60aa9ae68348741d2fb33f5a2d946 [file] [log] [blame]
telsoa01c577f2c2018-08-31 09:22:23 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa01c577f2c2018-08-31 09:22:23 +01004//
5
6#pragma once
7
8#include <boost/filesystem.hpp>
9#include <boost/assert.hpp>
10#include <boost/format.hpp>
11#include <experimental/filesystem>
12#include <armnn/IRuntime.hpp>
13#include <armnn/TypesUtils.hpp>
14#include "test/TensorHelpers.hpp"
15
16#include "armnnTfLiteParser/ITfLiteParser.hpp"
17
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000018#include <backendsCommon/BackendRegistry.hpp>
Aron Virginas-Tar54e95722018-10-25 11:47:31 +010019
telsoa01c577f2c2018-08-31 09:22:23 +010020#include "flatbuffers/idl.h"
21#include "flatbuffers/util.h"
22
23#include <schema_generated.h>
24#include <iostream>
25
26using armnnTfLiteParser::ITfLiteParser;
27using TensorRawPtr = const tflite::TensorT *;
28
29struct ParserFlatbuffersFixture
30{
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000031 ParserFlatbuffersFixture() :
32 m_Parser(ITfLiteParser::Create()),
33 m_Runtime(armnn::IRuntime::Create(armnn::IRuntime::CreationOptions())),
34 m_NetworkIdentifier(-1)
telsoa01c577f2c2018-08-31 09:22:23 +010035 {
telsoa01c577f2c2018-08-31 09:22:23 +010036 }
37
38 std::vector<uint8_t> m_GraphBinary;
39 std::string m_JsonString;
40 std::unique_ptr<ITfLiteParser, void (*)(ITfLiteParser *parser)> m_Parser;
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000041 armnn::IRuntimePtr m_Runtime;
telsoa01c577f2c2018-08-31 09:22:23 +010042 armnn::NetworkId m_NetworkIdentifier;
43
44 /// If the single-input-single-output overload of Setup() is called, these will store the input and output name
45 /// so they don't need to be passed to the single-input-single-output overload of RunTest().
46 std::string m_SingleInputName;
47 std::string m_SingleOutputName;
48
49 void Setup()
50 {
51 bool ok = ReadStringToBinary();
52 if (!ok) {
53 throw armnn::Exception("LoadNetwork failed while reading binary input");
54 }
55
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000056 armnn::INetworkPtr network =
57 m_Parser->CreateNetworkFromBinary(m_GraphBinary);
58
59 if (!network) {
60 throw armnn::Exception("The parser failed to create an ArmNN network");
61 }
62
63 auto optimized = Optimize(*network, { armnn::Compute::CpuRef },
64 m_Runtime->GetDeviceSpec());
65 std::string errorMessage;
66
67 armnn::Status ret = m_Runtime->LoadNetwork(m_NetworkIdentifier, move(optimized), errorMessage);
68
69 if (ret != armnn::Status::Success)
telsoa01c577f2c2018-08-31 09:22:23 +010070 {
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +000071 throw armnn::Exception(
72 boost::str(
73 boost::format("The runtime failed to load the network. "
74 "Error was: %1%. in %2% [%3%:%4%]") %
75 errorMessage %
76 __func__ %
77 __FILE__ %
78 __LINE__));
telsoa01c577f2c2018-08-31 09:22:23 +010079 }
80 }
81
82 void SetupSingleInputSingleOutput(const std::string& inputName, const std::string& outputName)
83 {
84 // Store the input and output name so they don't need to be passed to the single-input-single-output RunTest().
85 m_SingleInputName = inputName;
86 m_SingleOutputName = outputName;
87 Setup();
88 }
89
90 bool ReadStringToBinary()
91 {
92 const char* schemafileName = getenv("ARMNN_TF_LITE_SCHEMA_PATH");
93 if (schemafileName == nullptr)
94 {
95 schemafileName = ARMNN_TF_LITE_SCHEMA_PATH;
96 }
97 std::string schemafile;
98
99 bool ok = flatbuffers::LoadFile(schemafileName, false, &schemafile);
100 BOOST_ASSERT_MSG(ok, "Couldn't load schema file " ARMNN_TF_LITE_SCHEMA_PATH);
101 if (!ok)
102 {
103 return false;
104 }
105
106 // parse schema first, so we can use it to parse the data after
107 flatbuffers::Parser parser;
108
109 ok &= parser.Parse(schemafile.c_str());
110 BOOST_ASSERT_MSG(ok, "Failed to parse schema file");
111
112 ok &= parser.Parse(m_JsonString.c_str());
113 BOOST_ASSERT_MSG(ok, "Failed to parse json input");
114
115 if (!ok)
116 {
117 return false;
118 }
119
120 {
121 const uint8_t * bufferPtr = parser.builder_.GetBufferPointer();
122 size_t size = static_cast<size_t>(parser.builder_.GetSize());
123 m_GraphBinary.assign(bufferPtr, bufferPtr+size);
124 }
125 return ok;
126 }
127
128 /// Executes the network with the given input tensor and checks the result against the given output tensor.
129 /// This overload assumes the network has a single input and a single output.
130 template <std::size_t NumOutputDimensions, typename DataType>
131 void RunTest(size_t subgraphId,
132 const std::vector<DataType>& inputData,
133 const std::vector<DataType>& expectedOutputData);
134
135 /// Executes the network with the given input tensors and checks the results against the given output tensors.
136 /// This overload supports multiple inputs and multiple outputs, identified by name.
137 template <std::size_t NumOutputDimensions, typename DataType>
138 void RunTest(size_t subgraphId,
139 const std::map<std::string, std::vector<DataType>>& inputData,
140 const std::map<std::string, std::vector<DataType>>& expectedOutputData);
141
142 void CheckTensors(const TensorRawPtr& tensors, size_t shapeSize, const std::vector<int32_t>& shape,
143 tflite::TensorType tensorType, uint32_t buffer, const std::string& name,
144 const std::vector<float>& min, const std::vector<float>& max,
145 const std::vector<float>& scale, const std::vector<int64_t>& zeroPoint)
146 {
147 BOOST_CHECK(tensors);
148 BOOST_CHECK_EQUAL(shapeSize, tensors->shape.size());
149 BOOST_CHECK_EQUAL_COLLECTIONS(shape.begin(), shape.end(), tensors->shape.begin(), tensors->shape.end());
150 BOOST_CHECK_EQUAL(tensorType, tensors->type);
151 BOOST_CHECK_EQUAL(buffer, tensors->buffer);
152 BOOST_CHECK_EQUAL(name, tensors->name);
153 BOOST_CHECK(tensors->quantization);
154 BOOST_CHECK_EQUAL_COLLECTIONS(min.begin(), min.end(), tensors->quantization.get()->min.begin(),
155 tensors->quantization.get()->min.end());
156 BOOST_CHECK_EQUAL_COLLECTIONS(max.begin(), max.end(), tensors->quantization.get()->max.begin(),
157 tensors->quantization.get()->max.end());
158 BOOST_CHECK_EQUAL_COLLECTIONS(scale.begin(), scale.end(), tensors->quantization.get()->scale.begin(),
159 tensors->quantization.get()->scale.end());
160 BOOST_CHECK_EQUAL_COLLECTIONS(zeroPoint.begin(), zeroPoint.end(),
161 tensors->quantization.get()->zero_point.begin(),
162 tensors->quantization.get()->zero_point.end());
163 }
164};
165
166template <std::size_t NumOutputDimensions, typename DataType>
167void ParserFlatbuffersFixture::RunTest(size_t subgraphId,
168 const std::vector<DataType>& inputData,
169 const std::vector<DataType>& expectedOutputData)
170{
171 RunTest<NumOutputDimensions, DataType>(subgraphId,
172 { { m_SingleInputName, inputData } },
173 { { m_SingleOutputName, expectedOutputData } });
174}
175
176template <std::size_t NumOutputDimensions, typename DataType>
177void
178ParserFlatbuffersFixture::RunTest(size_t subgraphId,
179 const std::map<std::string, std::vector<DataType>>& inputData,
180 const std::map<std::string, std::vector<DataType>>& expectedOutputData)
181{
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000182 using BindingPointInfo = std::pair<armnn::LayerBindingId, armnn::TensorInfo>;
183
184 // Setup the armnn input tensors from the given vectors.
185 armnn::InputTensors inputTensors;
186 for (auto&& it : inputData)
telsoa01c577f2c2018-08-31 09:22:23 +0100187 {
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000188 BindingPointInfo bindingInfo = m_Parser->GetNetworkInputBindingInfo(subgraphId, it.first);
189 armnn::VerifyTensorInfoDataType<DataType>(bindingInfo.second);
190 inputTensors.push_back({ bindingInfo.first, armnn::ConstTensor(bindingInfo.second, it.second.data()) });
191 }
telsoa01c577f2c2018-08-31 09:22:23 +0100192
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000193 // Allocate storage for the output tensors to be written to and setup the armnn output tensors.
194 std::map<std::string, boost::multi_array<DataType, NumOutputDimensions>> outputStorage;
195 armnn::OutputTensors outputTensors;
196 for (auto&& it : expectedOutputData)
197 {
198 BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first);
199 armnn::VerifyTensorInfoDataType<DataType>(bindingInfo.second);
200 outputStorage.emplace(it.first, MakeTensor<DataType, NumOutputDimensions>(bindingInfo.second));
201 outputTensors.push_back(
202 { bindingInfo.first, armnn::Tensor(bindingInfo.second, outputStorage.at(it.first).data()) });
203 }
telsoa01c577f2c2018-08-31 09:22:23 +0100204
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000205 m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
telsoa01c577f2c2018-08-31 09:22:23 +0100206
Aron Virginas-Tar1d67a6902018-11-19 10:58:30 +0000207 // Compare each output tensor to the expected values
208 for (auto&& it : expectedOutputData)
209 {
210 BindingPointInfo bindingInfo = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first);
211 auto outputExpected = MakeTensor<DataType, NumOutputDimensions>(bindingInfo.second, it.second);
212 BOOST_TEST(CompareTensors(outputExpected, outputStorage[it.first]));
telsoa01c577f2c2018-08-31 09:22:23 +0100213 }
214}