blob: 586d2a05a53a64617ff48d1a8ad9a009094ac727 [file] [log] [blame]
Finn Williamsb454c5c2021-02-09 15:56:23 +00001//
2// Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "SerializerTestUtils.hpp"
7#include "../Serializer.hpp"
8
9using armnnDeserializer::IDeserializer;
10
11LayerVerifierBase::LayerVerifierBase(const std::string& layerName,
12 const std::vector<armnn::TensorInfo>& inputInfos,
13 const std::vector<armnn::TensorInfo>& outputInfos)
14 : m_LayerName(layerName)
15 , m_InputTensorInfos(inputInfos)
16 , m_OutputTensorInfos(outputInfos)
17{}
18
19void LayerVerifierBase::ExecuteStrategy(const armnn::IConnectableLayer* layer,
20 const armnn::BaseDescriptor& descriptor,
21 const std::vector<armnn::ConstTensor>& constants,
22 const char* name,
23 const armnn::LayerBindingId id)
24{
25 armnn::IgnoreUnused(descriptor, constants, id);
26 switch (layer->GetType())
27 {
28 case armnn::LayerType::Input: break;
29 case armnn::LayerType::Output: break;
30 default:
31 {
32 VerifyNameAndConnections(layer, name);
33 }
34 }
35}
36
37
38void LayerVerifierBase::VerifyNameAndConnections(const armnn::IConnectableLayer* layer, const char* name)
39{
40 BOOST_TEST(name == m_LayerName.c_str());
41
42 BOOST_TEST(layer->GetNumInputSlots() == m_InputTensorInfos.size());
43 BOOST_TEST(layer->GetNumOutputSlots() == m_OutputTensorInfos.size());
44
45 for (unsigned int i = 0; i < m_InputTensorInfos.size(); i++)
46 {
47 const armnn::IOutputSlot* connectedOutput = layer->GetInputSlot(i).GetConnection();
48 BOOST_CHECK(connectedOutput);
49
50 const armnn::TensorInfo& connectedInfo = connectedOutput->GetTensorInfo();
51 BOOST_TEST(connectedInfo.GetShape() == m_InputTensorInfos[i].GetShape());
52 BOOST_TEST(
53 GetDataTypeName(connectedInfo.GetDataType()) == GetDataTypeName(m_InputTensorInfos[i].GetDataType()));
54
55 BOOST_TEST(connectedInfo.GetQuantizationScale() == m_InputTensorInfos[i].GetQuantizationScale());
56 BOOST_TEST(connectedInfo.GetQuantizationOffset() == m_InputTensorInfos[i].GetQuantizationOffset());
57 }
58
59 for (unsigned int i = 0; i < m_OutputTensorInfos.size(); i++)
60 {
61 const armnn::TensorInfo& outputInfo = layer->GetOutputSlot(i).GetTensorInfo();
62 BOOST_TEST(outputInfo.GetShape() == m_OutputTensorInfos[i].GetShape());
63 BOOST_TEST(
64 GetDataTypeName(outputInfo.GetDataType()) == GetDataTypeName(m_OutputTensorInfos[i].GetDataType()));
65
66 BOOST_TEST(outputInfo.GetQuantizationScale() == m_OutputTensorInfos[i].GetQuantizationScale());
67 BOOST_TEST(outputInfo.GetQuantizationOffset() == m_OutputTensorInfos[i].GetQuantizationOffset());
68 }
69}
70
71void LayerVerifierBase::VerifyConstTensors(const std::string& tensorName,
72 const armnn::ConstTensor* expectedPtr,
73 const armnn::ConstTensor* actualPtr)
74{
75 if (expectedPtr == nullptr)
76 {
77 BOOST_CHECK_MESSAGE(actualPtr == nullptr, tensorName + " should not exist");
78 }
79 else
80 {
81 BOOST_CHECK_MESSAGE(actualPtr != nullptr, tensorName + " should have been set");
82 if (actualPtr != nullptr)
83 {
84 const armnn::TensorInfo& expectedInfo = expectedPtr->GetInfo();
85 const armnn::TensorInfo& actualInfo = actualPtr->GetInfo();
86
87 BOOST_CHECK_MESSAGE(expectedInfo.GetShape() == actualInfo.GetShape(),
88 tensorName + " shapes don't match");
89 BOOST_CHECK_MESSAGE(
90 GetDataTypeName(expectedInfo.GetDataType()) == GetDataTypeName(actualInfo.GetDataType()),
91 tensorName + " data types don't match");
92
93 BOOST_CHECK_MESSAGE(expectedPtr->GetNumBytes() == actualPtr->GetNumBytes(),
94 tensorName + " (GetNumBytes) data sizes do not match");
95 if (expectedPtr->GetNumBytes() == actualPtr->GetNumBytes())
96 {
97 //check the data is identical
98 const char* expectedData = static_cast<const char*>(expectedPtr->GetMemoryArea());
99 const char* actualData = static_cast<const char*>(actualPtr->GetMemoryArea());
100 bool same = true;
101 for (unsigned int i = 0; i < expectedPtr->GetNumBytes(); ++i)
102 {
103 same = expectedData[i] == actualData[i];
104 if (!same)
105 {
106 break;
107 }
108 }
109 BOOST_CHECK_MESSAGE(same, tensorName + " data does not match");
110 }
111 }
112 }
113}
114
115void CompareConstTensor(const armnn::ConstTensor& tensor1, const armnn::ConstTensor& tensor2)
116{
117 BOOST_TEST(tensor1.GetShape() == tensor2.GetShape());
118 BOOST_TEST(GetDataTypeName(tensor1.GetDataType()) == GetDataTypeName(tensor2.GetDataType()));
119
120 switch (tensor1.GetDataType())
121 {
122 case armnn::DataType::Float32:
123 CompareConstTensorData<const float*>(
124 tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
125 break;
126 case armnn::DataType::QAsymmU8:
127 case armnn::DataType::Boolean:
128 CompareConstTensorData<const uint8_t*>(
129 tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
130 break;
131 case armnn::DataType::QSymmS8:
132 CompareConstTensorData<const int8_t*>(
133 tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
134 break;
135 case armnn::DataType::Signed32:
136 CompareConstTensorData<const int32_t*>(
137 tensor1.GetMemoryArea(), tensor2.GetMemoryArea(), tensor1.GetNumElements());
138 break;
139 default:
140 // Note that Float16 is not yet implemented
141 BOOST_TEST_MESSAGE("Unexpected datatype");
142 BOOST_TEST(false);
143 }
144}
145
146armnn::INetworkPtr DeserializeNetwork(const std::string& serializerString)
147{
148 std::vector<std::uint8_t> const serializerVector{serializerString.begin(), serializerString.end()};
149 return IDeserializer::Create()->CreateNetworkFromBinary(serializerVector);
150}
151
152std::string SerializeNetwork(const armnn::INetwork& network)
153{
154 armnnSerializer::ISerializerPtr serializer = armnnSerializer::ISerializer::Create();
155
156 serializer->Serialize(network);
157
158 std::stringstream stream;
159 serializer->SaveSerializedToStream(stream);
160
161 std::string serializerString{stream.str()};
162 return serializerString;
163}