blob: 3f530ccb150f56fa313bd09092f4b36894bdf6fe [file] [log] [blame]
josh minor4a3c6102020-01-06 16:40:46 -06001//
2// Copyright © 2019 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#pragma once
6
Sadik Armagana097d2a2021-11-24 15:47:28 +00007#include <CommonTestUtils.hpp>
josh minor4a3c6102020-01-06 16:40:46 -06008
9#include <ResolveType.hpp>
10
11#include <armnn/INetwork.hpp>
12
Matthew Sloyan171214c2020-09-09 09:07:37 +010013#include <armnn/utility/NumericCast.hpp>
14
Sadik Armagan1625efc2021-06-10 18:24:34 +010015#include <doctest/doctest.h>
josh minor4a3c6102020-01-06 16:40:46 -060016
17#include <vector>
18
19namespace
20{
21
22template<armnn::DataType ArmnnTypeInput>
23INetworkPtr CreateElementwiseUnaryNetwork(const TensorShape& inputShape,
24 const TensorShape& outputShape,
25 UnaryOperation operation,
26 const float qScale = 1.0f,
27 const int32_t qOffset = 0)
28{
29 using namespace armnn;
30
31 INetworkPtr net(INetwork::Create());
32
33 ElementwiseUnaryDescriptor descriptor(operation);
34 IConnectableLayer* elementwiseUnaryLayer = net->AddElementwiseUnaryLayer(descriptor, "elementwiseUnary");
35
Cathal Corbett5b8093c2021-10-22 11:12:07 +010036 TensorInfo inputTensorInfo(inputShape, ArmnnTypeInput, qScale, qOffset, true);
Matthew Sloyan171214c2020-09-09 09:07:37 +010037 IConnectableLayer* input = net->AddInputLayer(armnn::numeric_cast<LayerBindingId>(0));
josh minor4a3c6102020-01-06 16:40:46 -060038 Connect(input, elementwiseUnaryLayer, inputTensorInfo, 0, 0);
39
40 TensorInfo outputTensorInfo(outputShape, ArmnnTypeInput, qScale, qOffset);
41 IConnectableLayer* output = net->AddOutputLayer(0, "output");
42 Connect(elementwiseUnaryLayer, output, outputTensorInfo, 0, 0);
43
44 return net;
45}
46
47template<armnn::DataType ArmnnInType,
48 typename TInput = armnn::ResolveType<ArmnnInType>>
49void ElementwiseUnarySimpleEndToEnd(const std::vector<BackendId>& backends,
50 UnaryOperation operation,
51 const std::vector<float> expectedOutput)
52{
53 using namespace armnn;
54
55 const float qScale = IsQuantizedType<TInput>() ? 0.25f : 1.0f;
56 const int32_t qOffset = IsQuantizedType<TInput>() ? 50 : 0;
57
58 const TensorShape& inputShape = { 2, 2, 2, 2 };
59 const TensorShape& outputShape = { 2, 2, 2, 2 };
60
61 // Builds up the structure of the network
62 INetworkPtr net = CreateElementwiseUnaryNetwork<ArmnnInType>(inputShape, outputShape, operation, qScale, qOffset);
63
Sadik Armagan1625efc2021-06-10 18:24:34 +010064 CHECK(net);
josh minor4a3c6102020-01-06 16:40:46 -060065
66 const std::vector<float> input({ 1, -1, 1, 1, 5, -5, 5, 5,
67 -3, 3, 3, 3, 4, 4, -4, 4 });
68
69 // quantize data
70 std::vector<TInput> qInputData = armnnUtils::QuantizedVector<TInput>(input, qScale, qOffset);
71 std::vector<TInput> qExpectedOutput = armnnUtils::QuantizedVector<TInput>(expectedOutput, qScale, qOffset);
72
73 std::map<int, std::vector<TInput>> inputTensorData = {{ 0, qInputData }};
74 std::map<int, std::vector<TInput>> expectedOutputData = {{ 0, qExpectedOutput }};
75
76 EndToEndLayerTestImpl<ArmnnInType, ArmnnInType>(move(net), inputTensorData, expectedOutputData, backends);
77}
78
79} // anonymous namespace