blob: 98e75cb8df60468e364614876df2060d8976f616 [file] [log] [blame]
Teresa Charlina38da592022-10-31 22:09:23 +00001//
Teresa Charlin1fe6c812022-11-01 15:59:50 +00002// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved.
Teresa Charlina38da592022-10-31 22:09:23 +00003// SPDX-License-Identifier: MIT
4//
5#pragma once
6
7#include <ResolveType.hpp>
8
9#include <armnn/INetwork.hpp>
10
11#include <doctest/doctest.h>
12#include <CommonTestUtils.hpp>
13
14namespace
15{
16
17template<typename armnn::DataType DataType>
18armnn::INetworkPtr CreateBatchMatMulNetwork(const armnn::TensorShape& inputXShape,
19 const armnn::TensorShape& inputYShape,
20 const armnn::TensorShape& outputShape,
21 const float qScale = 1.0f,
22 const int32_t qOffset = 0)
23{
24 using namespace armnn;
25
26 INetworkPtr network(INetwork::Create());
27
28 TensorInfo inputXTensorInfo(inputXShape, DataType, qScale, qOffset, true);
29 TensorInfo inputYTensorInfo(inputYShape, DataType, qScale, qOffset, true);
30
31 TensorInfo outputTensorInfo(outputShape, DataType, qScale, qOffset);
32
33 BatchMatMulDescriptor batchMatMulDesc;
34 batchMatMulDesc.m_TransposeX = false;
35 batchMatMulDesc.m_TransposeY = true;
36
37 IConnectableLayer* batchMatMul = network->AddBatchMatMulLayer(batchMatMulDesc, "batchMatMul");
38 IConnectableLayer* inputX = network->AddInputLayer(0, "inputX");
39 IConnectableLayer* inputY = network->AddInputLayer(1, "inputY");
40 IConnectableLayer* output = network->AddOutputLayer(0, "output");
41
42 Connect(inputX, batchMatMul, inputXTensorInfo, 0, 0);
43 Connect(inputY, batchMatMul, inputYTensorInfo, 0, 1);
44 Connect(batchMatMul, output, outputTensorInfo, 0, 0);
45
46 return network;
47}
48
49template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
50void BatchMatMulEndToEnd(const std::vector<armnn::BackendId>& backends)
51{
52 using namespace armnn;
53
54 const TensorShape& inputXShape = { 2, 2, 2 };
55 const TensorShape& inputYShape = { 2, 2, 2 };
56 const TensorShape& outputShape = { 2, 2, 2 };
57
Teresa Charlin1fe6c812022-11-01 15:59:50 +000058 constexpr float qScale = 1.0f;
59 constexpr int32_t qOffset = 0;
60
61 INetworkPtr network = CreateBatchMatMulNetwork<ArmnnType>(inputXShape, inputYShape, outputShape, qScale, qOffset);
Teresa Charlina38da592022-10-31 22:09:23 +000062
63 CHECK(network);
64
Teresa Charlin1fe6c812022-11-01 15:59:50 +000065 std::vector<float> floatInputXData{ 1., 2.,
66 3., 4.,
Teresa Charlina38da592022-10-31 22:09:23 +000067
Teresa Charlin1fe6c812022-11-01 15:59:50 +000068 9., 10.,
69 11., 12. };
70 std::vector<T> inputXData = armnnUtils::QuantizedVector<T>(floatInputXData, qScale, qOffset);
Teresa Charlina38da592022-10-31 22:09:23 +000071
Teresa Charlin1fe6c812022-11-01 15:59:50 +000072 std::vector<float> floatInputYData{ 5., 7.,
73 6., 8.,
Teresa Charlina38da592022-10-31 22:09:23 +000074
Teresa Charlin1fe6c812022-11-01 15:59:50 +000075 13., 15.,
76 14., 16. };
77 std::vector<T> inputYData = armnnUtils::QuantizedVector<T>(floatInputYData, qScale, qOffset);
Teresa Charlina38da592022-10-31 22:09:23 +000078
Teresa Charlin1fe6c812022-11-01 15:59:50 +000079 std::vector<float> floatExpectedOutputData{ 19., 22.,
80 43., 50.,
Teresa Charlina38da592022-10-31 22:09:23 +000081
Teresa Charlin1fe6c812022-11-01 15:59:50 +000082 267., 286.,
83 323., 346. };
84 std::vector<T> expectedOutputData = armnnUtils::QuantizedVector<T>(floatExpectedOutputData, qScale, qOffset);
85
86 std::map<int, std::vector<T>> inputTensor = {{ 0, inputXData }, {1, inputYData}};
87 std::map<int, std::vector<T>> expectedOutput = { { 0, expectedOutputData } };
88
89 EndToEndLayerTestImpl<ArmnnType, ArmnnType>(std::move(network), inputTensor, expectedOutput, backends);
Teresa Charlina38da592022-10-31 22:09:23 +000090}
91
92} // anonymous namespace