blob: 9ffa2a672c6213aa659401dcffde1bfb3affaf90 [file] [log] [blame]
Sadik Armagan20bea002019-10-16 09:29:38 +01001//
2// Copyright © 2019 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "EndToEndTestImpl.hpp"
7#include "LogSoftmaxEndToEndTestImpl.hpp"
8
9#include <armnn/INetwork.hpp>
10
Sadik Armagana097d2a2021-11-24 15:47:28 +000011#include <TestUtils.hpp>
Sadik Armagan20bea002019-10-16 09:29:38 +010012
Sadik Armagan1625efc2021-06-10 18:24:34 +010013#include <doctest/doctest.h>
Sadik Armagan20bea002019-10-16 09:29:38 +010014
15namespace {
16
17template <typename armnn::DataType DataType>
18armnn::INetworkPtr CreateLogSoftmaxNetwork(const armnn::TensorShape& inputShape,
19 const armnn::TensorShape& outputShape,
20 const float beta,
21 const int axis,
22 const float qScale = 1.0f,
23 const int32_t qOffset = 0)
24{
25 using namespace armnn;
26
27 // Builds up the structure of the network.
28 INetworkPtr net(INetwork::Create());
29
Cathal Corbett5b8093c2021-10-22 11:12:07 +010030 TensorInfo inputTensorInfo(inputShape, DataType, qScale, qOffset, true);
Sadik Armagan20bea002019-10-16 09:29:38 +010031
32 LogSoftmaxDescriptor logSoftmaxDesc;
33 logSoftmaxDesc.m_Beta = beta;
34 logSoftmaxDesc.m_Axis = axis;
35
36 IConnectableLayer* logSoftmax = net->AddLogSoftmaxLayer(logSoftmaxDesc, "Log_Softmax");
37 IConnectableLayer* input = net->AddInputLayer(0, "input");
38 Connect(input, logSoftmax, inputTensorInfo, 0, 0);
39
40 TensorInfo outputTensorInfo(outputShape, DataType, qScale, qOffset);
41 IConnectableLayer* output = net->AddOutputLayer(0, "output");
42 Connect(logSoftmax, output, outputTensorInfo, 0, 0);
43
44 return net;
45}
46
47void LogSoftmaxEndToEnd(const std::vector<armnn::BackendId>& backends,
48 armnn::TensorInfo& inputTensorInfo,
49 armnn::TensorInfo& outputTensorInfo,
50 std::vector<float>& inputData,
51 std::vector<float>& expectedOutputData,
52 const float beta,
53 const int axis)
54{
55 using namespace armnn;
56
57 // Builds up the structure of the network
58 INetworkPtr net = CreateLogSoftmaxNetwork<DataType::Float32>(inputTensorInfo.GetShape(),
59 outputTensorInfo.GetShape(),
60 beta,
61 axis);
62
Sadik Armagan1625efc2021-06-10 18:24:34 +010063 CHECK(net);
Sadik Armagan20bea002019-10-16 09:29:38 +010064
65 std::map<int, std::vector<float>> inputTensorData = { {0, inputData} };
66 std::map<int, std::vector<float>> expectedOutputTensorData = { {0, expectedOutputData} };
67
68 EndToEndLayerTestImpl<DataType::Float32, DataType::Float32>(move(net),
69 inputTensorData,
70 expectedOutputTensorData,
71 backends);
72}
73
74} // anonymous namespace
75
76void LogSoftmaxEndToEndTest(const std::vector<armnn::BackendId>& defaultBackends)
77{
78 using namespace armnn;
79
80 const float beta = 10.0f; // non-default beta
81 const int axis = 3; // positive axis
82
83 const TensorShape inputShape{1, 1, 2, 4};
84 TensorInfo inputTensorInfo(inputShape, DataType::Float32);
85
86 const TensorShape outputShape{1, 1, 2, 4};
87 TensorInfo outputTensorInfo(outputShape, DataType::Float32);
88
89 std::vector<float> inputData = std::vector<float>({
90 0.0f, -0.6f, 0.2f, 0.4f,
91 0.3f, -0.2f, 1.0f, 0.1f
92 });
93
94 std::vector<float> expectedOutputData = std::vector<float>({
95 -4.14297f, -10.14297f, -2.14297f, -0.14297f,
96 -7.00104f, -12.00104f, -0.00104087f, -9.00104f
97 });
98
99 LogSoftmaxEndToEnd(defaultBackends,
100 inputTensorInfo,
101 outputTensorInfo,
102 inputData,
103 expectedOutputData,
104 beta,
105 axis);
106}