telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 1 | // |
Teresa Charlin | 5266473 | 2020-06-29 16:27:03 +0100 | [diff] [blame] | 2 | // Copyright © 2017 Arm Ltd and Contributors. All rights reserved. |
David Beck | ecb56cd | 2018-09-05 12:52:57 +0100 | [diff] [blame] | 3 | // SPDX-License-Identifier: MIT |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 4 | // |
| 5 | #pragma once |
| 6 | |
Aron Virginas-Tar | c9cc804 | 2018-11-01 16:15:57 +0000 | [diff] [blame] | 7 | #include <Graph.hpp> |
| 8 | |
Jim Flynn | 68db06f | 2020-10-06 10:14:50 +0100 | [diff] [blame] | 9 | #include <backendsCommon/MapWorkload.hpp> |
Jim Flynn | 3a40ea5 | 2020-10-08 11:42:30 +0100 | [diff] [blame] | 10 | #include <backendsCommon/UnmapWorkload.hpp> |
Aron Virginas-Tar | c9cc804 | 2018-11-01 16:15:57 +0000 | [diff] [blame] | 11 | #include <backendsCommon/WorkloadFactory.hpp> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 12 | |
Jan Eilers | 8eb2560 | 2020-03-09 12:13:48 +0000 | [diff] [blame] | 13 | #include <armnn/utility/IgnoreUnused.hpp> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 14 | |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 15 | #include <doctest/doctest.h> |
| 16 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 17 | namespace |
| 18 | { |
| 19 | armnn::Graph dummyGraph; |
| 20 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 21 | // Make a dummy TensorInfo object. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 22 | template<armnn::DataType DataType> |
| 23 | armnn::TensorInfo MakeDummyTensorInfo() |
| 24 | { |
Teresa Charlin | 33d5827 | 2020-01-28 12:24:34 +0000 | [diff] [blame] | 25 | return armnn::TensorInfo({2,2,2,2}, DataType, 1.0, 0); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 26 | } |
| 27 | |
| 28 | |
| 29 | // Make a dummy WorkloadInfo using a dummy TensorInfo. |
| 30 | template<armnn::DataType DataType> |
| 31 | armnn::WorkloadInfo MakeDummyWorkloadInfo(unsigned int numInputs, unsigned int numOutputs) |
| 32 | { |
| 33 | armnn::WorkloadInfo info; |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 34 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 35 | for (unsigned int i=0; i < numInputs; i++) |
| 36 | { |
| 37 | info.m_InputTensorInfos.push_back(MakeDummyTensorInfo<DataType>()); |
| 38 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 39 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 40 | for (unsigned int o=0; o < numOutputs; o++) |
| 41 | { |
| 42 | info.m_OutputTensorInfos.push_back(MakeDummyTensorInfo<DataType>()); |
| 43 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 44 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 45 | return info; |
| 46 | } |
| 47 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 48 | // Template class to create a dummy layer (2 parameters). |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 49 | template<typename LayerType, typename DescType = typename LayerType::DescriptorType> |
| 50 | struct DummyLayer |
| 51 | { |
| 52 | DummyLayer() |
| 53 | { |
| 54 | m_Layer = dummyGraph.AddLayer<LayerType>(DescType(), ""); |
| 55 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 56 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 57 | ~DummyLayer() |
| 58 | { |
| 59 | dummyGraph.EraseLayer(m_Layer); |
| 60 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 61 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 62 | LayerType* m_Layer; |
| 63 | }; |
| 64 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 65 | // Template class to create a dummy layer (1 parameter). |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 66 | template<typename LayerType> |
| 67 | struct DummyLayer<LayerType, void> |
| 68 | { |
| 69 | DummyLayer() |
| 70 | { |
| 71 | m_Layer = dummyGraph.AddLayer<LayerType>(""); |
| 72 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 73 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 74 | ~DummyLayer() |
| 75 | { |
| 76 | dummyGraph.EraseLayer(m_Layer); |
| 77 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 78 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 79 | LayerType* m_Layer; |
| 80 | }; |
| 81 | |
| 82 | template<> |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 83 | struct DummyLayer<armnn::BatchNormalizationLayer> |
| 84 | { |
| 85 | DummyLayer() |
| 86 | { |
| 87 | m_Layer = dummyGraph.AddLayer<armnn::BatchNormalizationLayer>(armnn::BatchNormalizationDescriptor(), ""); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 88 | m_Layer->m_Mean = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 89 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 90 | m_Layer->m_Variance = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 91 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 92 | m_Layer->m_Beta = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 93 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 94 | m_Layer->m_Gamma = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 95 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 96 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 97 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 98 | ~DummyLayer() |
| 99 | { |
| 100 | dummyGraph.EraseLayer(m_Layer); |
| 101 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 102 | |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 103 | armnn::BatchNormalizationLayer* m_Layer; |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 104 | }; |
| 105 | |
| 106 | template<> |
Éanna Ó Catháin | 4e1e136 | 2018-11-12 11:36:34 +0000 | [diff] [blame] | 107 | struct DummyLayer<armnn::BatchToSpaceNdLayer> |
| 108 | { |
| 109 | DummyLayer() |
| 110 | { |
| 111 | m_Layer = dummyGraph.AddLayer<armnn::BatchToSpaceNdLayer>(armnn::BatchToSpaceNdDescriptor(), ""); |
| 112 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 113 | |
Éanna Ó Catháin | 4e1e136 | 2018-11-12 11:36:34 +0000 | [diff] [blame] | 114 | ~DummyLayer() |
| 115 | { |
| 116 | dummyGraph.EraseLayer(m_Layer); |
| 117 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 118 | |
Éanna Ó Catháin | 4e1e136 | 2018-11-12 11:36:34 +0000 | [diff] [blame] | 119 | armnn::BatchToSpaceNdLayer* m_Layer; |
| 120 | }; |
| 121 | |
| 122 | template<> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 123 | struct DummyLayer<armnn::ConstantLayer, void> |
| 124 | { |
| 125 | DummyLayer() |
| 126 | { |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 127 | m_Layer = dummyGraph.AddLayer<armnn::ConstantLayer>(""); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 128 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 129 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 130 | ~DummyLayer() |
| 131 | { |
| 132 | dummyGraph.EraseLayer(m_Layer); |
| 133 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 134 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 135 | armnn::ConstantLayer* m_Layer; |
| 136 | }; |
| 137 | |
| 138 | template<> |
| 139 | struct DummyLayer<armnn::InputLayer, armnn::LayerBindingId> |
| 140 | { |
| 141 | DummyLayer() |
| 142 | { |
| 143 | m_Layer = dummyGraph.AddLayer<armnn::InputLayer>(armnn::LayerBindingId(), ""); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 144 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 145 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 146 | ~DummyLayer() |
| 147 | { |
| 148 | dummyGraph.EraseLayer(m_Layer); |
| 149 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 150 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 151 | armnn::InputLayer* m_Layer; |
| 152 | }; |
| 153 | |
| 154 | template<> |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 155 | struct DummyLayer<armnn::ConcatLayer> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 156 | { |
| 157 | DummyLayer() |
| 158 | { |
| 159 | armnn::OriginsDescriptor desc(2); |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 160 | m_Layer = dummyGraph.AddLayer<armnn::ConcatLayer>(desc, ""); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 161 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 162 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 163 | ~DummyLayer() |
| 164 | { |
| 165 | dummyGraph.EraseLayer(m_Layer); |
| 166 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 167 | |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 168 | armnn::ConcatLayer* m_Layer; |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 169 | }; |
| 170 | |
| 171 | template<> |
Jim Flynn | 68db06f | 2020-10-06 10:14:50 +0100 | [diff] [blame] | 172 | struct DummyLayer<armnn::MapLayer, void> |
| 173 | { |
| 174 | DummyLayer() |
| 175 | { |
| 176 | m_Layer = dummyGraph.AddLayer<armnn::MapLayer>(""); |
| 177 | } |
| 178 | |
| 179 | ~DummyLayer() |
| 180 | { |
| 181 | dummyGraph.EraseLayer(m_Layer); |
| 182 | } |
| 183 | |
| 184 | armnn::MapLayer* m_Layer; |
| 185 | }; |
| 186 | |
| 187 | template<> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 188 | struct DummyLayer<armnn::OutputLayer, armnn::LayerBindingId> |
| 189 | { |
| 190 | DummyLayer() |
| 191 | { |
| 192 | m_Layer = dummyGraph.AddLayer<armnn::OutputLayer>(armnn::LayerBindingId(), ""); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 193 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 194 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 195 | ~DummyLayer() |
| 196 | { |
| 197 | dummyGraph.EraseLayer(m_Layer); |
| 198 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 199 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 200 | armnn::OutputLayer* m_Layer; |
| 201 | }; |
| 202 | |
| 203 | template<> |
| 204 | struct DummyLayer<armnn::SplitterLayer> |
| 205 | { |
| 206 | DummyLayer() |
| 207 | { |
| 208 | armnn::ViewsDescriptor desc(1); |
| 209 | m_Layer = dummyGraph.AddLayer<armnn::SplitterLayer>(desc, ""); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 210 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 211 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 212 | ~DummyLayer() |
| 213 | { |
| 214 | dummyGraph.EraseLayer(m_Layer); |
| 215 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 216 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 217 | armnn::SplitterLayer* m_Layer; |
| 218 | }; |
| 219 | |
Jim Flynn | 3a40ea5 | 2020-10-08 11:42:30 +0100 | [diff] [blame] | 220 | template<> |
| 221 | struct DummyLayer<armnn::UnmapLayer, void> |
| 222 | { |
| 223 | DummyLayer() |
| 224 | { |
| 225 | m_Layer = dummyGraph.AddLayer<armnn::UnmapLayer>(""); |
| 226 | } |
| 227 | |
| 228 | ~DummyLayer() |
| 229 | { |
| 230 | dummyGraph.EraseLayer(m_Layer); |
| 231 | } |
| 232 | |
| 233 | armnn::UnmapLayer* m_Layer; |
| 234 | }; |
| 235 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 236 | template <typename ConvolutionLayerType> |
| 237 | struct DummyConvolutionLayer |
| 238 | { |
| 239 | DummyConvolutionLayer() |
| 240 | { |
| 241 | typename ConvolutionLayerType::DescriptorType desc; |
James Conroy | 663c184 | 2019-11-01 15:21:48 +0000 | [diff] [blame] | 242 | desc.m_StrideX = 1; |
| 243 | desc.m_StrideY = 1; |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 244 | m_Layer = dummyGraph.AddLayer<ConvolutionLayerType>(desc, ""); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 245 | m_Layer->m_Weight = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 246 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 247 | m_Layer->m_Bias = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 248 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 249 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 250 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 251 | ~DummyConvolutionLayer() |
| 252 | { |
| 253 | dummyGraph.EraseLayer(m_Layer); |
| 254 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 255 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 256 | ConvolutionLayerType* m_Layer; |
| 257 | }; |
| 258 | |
| 259 | template<> |
| 260 | struct DummyLayer<armnn::Convolution2dLayer> |
| 261 | : public DummyConvolutionLayer<armnn::Convolution2dLayer> |
| 262 | { |
| 263 | }; |
| 264 | |
| 265 | template<> |
| 266 | struct DummyLayer<armnn::DepthwiseConvolution2dLayer> |
| 267 | : public DummyConvolutionLayer<armnn::DepthwiseConvolution2dLayer> |
| 268 | { |
| 269 | }; |
| 270 | |
Aron Virginas-Tar | 639fb04 | 2019-06-20 14:28:19 +0100 | [diff] [blame] | 271 | template<> |
| 272 | struct DummyLayer<armnn::TransposeConvolution2dLayer> |
| 273 | : public DummyConvolutionLayer<armnn::TransposeConvolution2dLayer> |
| 274 | { |
| 275 | }; |
| 276 | |
Derek Lamberti | 6a5e5e8 | 2019-12-05 14:41:20 +0000 | [diff] [blame] | 277 | template<> |
| 278 | struct DummyLayer<armnn::DetectionPostProcessLayer> |
| 279 | { |
| 280 | DummyLayer() |
| 281 | { |
| 282 | m_Layer = dummyGraph.AddLayer<armnn::DetectionPostProcessLayer>(armnn::DetectionPostProcessDescriptor(), ""); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 283 | m_Layer->m_Anchors = std::make_unique<armnn::ScopedTensorHandle>( |
Derek Lamberti | 6a5e5e8 | 2019-12-05 14:41:20 +0000 | [diff] [blame] | 284 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 285 | } |
| 286 | |
| 287 | ~DummyLayer() |
| 288 | { |
| 289 | dummyGraph.EraseLayer(m_Layer); |
| 290 | } |
| 291 | |
| 292 | armnn::DetectionPostProcessLayer* m_Layer; |
| 293 | }; |
| 294 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 295 | template <typename LstmLayerType> |
| 296 | struct DummyLstmLayer |
| 297 | { |
| 298 | DummyLstmLayer() |
| 299 | { |
| 300 | typename LstmLayerType::DescriptorType desc; |
| 301 | desc.m_CifgEnabled = false; |
| 302 | |
Matthew Bentham | 6f24b1a | 2021-06-29 15:18:32 +0100 | [diff] [blame] | 303 | m_Layer = dummyGraph.AddLayer<LstmLayerType>(desc, ""); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 304 | m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 305 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 306 | m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 307 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 308 | m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 309 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 310 | m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 311 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 312 | m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 313 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 314 | m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 315 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 316 | m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 317 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 318 | m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 319 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 320 | m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 321 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 322 | |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 323 | m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 324 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 325 | m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 326 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 327 | m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 328 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 329 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 330 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 331 | ~DummyLstmLayer() |
| 332 | { |
| 333 | dummyGraph.EraseLayer(m_Layer); |
| 334 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 335 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 336 | armnn::LstmLayer* m_Layer; |
| 337 | }; |
| 338 | |
| 339 | template<> |
| 340 | struct DummyLayer<armnn::LstmLayer> |
| 341 | : public DummyLstmLayer<armnn::LstmLayer> |
| 342 | { |
| 343 | }; |
| 344 | |
Narumol Prangnawarat | 8ed39ae | 2021-07-15 16:16:25 +0100 | [diff] [blame] | 345 | template <typename UnidirectionalSequenceLstmLayerType> |
| 346 | struct DummyUnidirectionalSequenceLstmLayer |
| 347 | { |
| 348 | DummyUnidirectionalSequenceLstmLayer() |
| 349 | { |
| 350 | typename UnidirectionalSequenceLstmLayerType::DescriptorType desc; |
| 351 | desc.m_CifgEnabled = false; |
| 352 | |
| 353 | m_Layer = dummyGraph.AddLayer<UnidirectionalSequenceLstmLayerType>(desc, ""); |
| 354 | m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>( |
| 355 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 356 | m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>( |
| 357 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 358 | m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
| 359 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 360 | m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>( |
| 361 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 362 | m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>( |
| 363 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 364 | m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
| 365 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 366 | m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
| 367 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 368 | m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>( |
| 369 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 370 | m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
| 371 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 372 | |
| 373 | m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
| 374 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 375 | m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
| 376 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 377 | m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
| 378 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 379 | } |
| 380 | |
| 381 | ~DummyUnidirectionalSequenceLstmLayer() |
| 382 | { |
| 383 | dummyGraph.EraseLayer(m_Layer); |
| 384 | } |
| 385 | |
| 386 | armnn::UnidirectionalSequenceLstmLayer* m_Layer; |
| 387 | }; |
| 388 | |
| 389 | template<> |
| 390 | struct DummyLayer<armnn::UnidirectionalSequenceLstmLayer> |
| 391 | : public DummyUnidirectionalSequenceLstmLayer<armnn::UnidirectionalSequenceLstmLayer> |
| 392 | { |
| 393 | }; |
| 394 | |
Matthew Bentham | 6f24b1a | 2021-06-29 15:18:32 +0100 | [diff] [blame] | 395 | template<> |
| 396 | struct DummyLayer<armnn::QLstmLayer> |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 397 | { |
Matthew Bentham | 6f24b1a | 2021-06-29 15:18:32 +0100 | [diff] [blame] | 398 | DummyLayer() |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 399 | { |
Matthew Bentham | 6f24b1a | 2021-06-29 15:18:32 +0100 | [diff] [blame] | 400 | armnn::QLstmLayer::DescriptorType desc; |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 401 | desc.m_CifgEnabled = false; |
| 402 | desc.m_PeepholeEnabled = true; |
| 403 | desc.m_ProjectionEnabled = true; |
| 404 | desc.m_LayerNormEnabled = true; |
| 405 | |
Matthew Bentham | 6f24b1a | 2021-06-29 15:18:32 +0100 | [diff] [blame] | 406 | m_Layer = dummyGraph.AddLayer<armnn::QLstmLayer>(desc, "qLstm"); |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 407 | |
| 408 | // Basic params |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 409 | m_Layer->m_BasicParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 410 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 411 | m_Layer->m_BasicParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 412 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 413 | m_Layer->m_BasicParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 414 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8)); |
| 415 | |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 416 | m_Layer->m_BasicParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 417 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 418 | m_Layer->m_BasicParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 419 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 420 | m_Layer->m_BasicParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 421 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8)); |
| 422 | |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 423 | m_Layer->m_BasicParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 424 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 425 | m_Layer->m_BasicParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 426 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 427 | m_Layer->m_BasicParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 428 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32)); |
| 429 | |
| 430 | // CIFG optional params |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 431 | m_Layer->m_CifgParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 432 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 433 | m_Layer->m_CifgParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 434 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 435 | m_Layer->m_CifgParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 436 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32)); |
| 437 | |
| 438 | // Projection optional params |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 439 | m_Layer->m_ProjectionParameters.m_ProjectionWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 440 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 441 | m_Layer->m_ProjectionParameters.m_ProjectionBias = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 442 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32)); |
| 443 | |
| 444 | // Peephole optional params |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 445 | m_Layer->m_PeepholeParameters.m_CellToInputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 446 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 447 | m_Layer->m_PeepholeParameters.m_CellToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 448 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 449 | m_Layer->m_PeepholeParameters.m_CellToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 450 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16)); |
| 451 | |
| 452 | // Layer normalization optional params |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 453 | m_Layer->m_LayerNormParameters.m_InputLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 454 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 455 | m_Layer->m_LayerNormParameters.m_ForgetLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 456 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 457 | m_Layer->m_LayerNormParameters.m_CellLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 458 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 459 | m_Layer->m_LayerNormParameters.m_OutputLayerNormWeights = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 460 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QSymmS16)); |
| 461 | } |
| 462 | |
Matthew Bentham | 6f24b1a | 2021-06-29 15:18:32 +0100 | [diff] [blame] | 463 | ~DummyLayer() |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 464 | { |
| 465 | dummyGraph.EraseLayer(m_Layer); |
| 466 | } |
| 467 | |
| 468 | armnn::QLstmLayer* m_Layer; |
| 469 | }; |
| 470 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 471 | template<> |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 472 | struct DummyLayer<armnn::QuantizedLstmLayer, void> |
| 473 | { |
| 474 | DummyLayer() |
| 475 | { |
| 476 | m_Layer = dummyGraph.AddLayer<armnn::QuantizedLstmLayer>(""); |
| 477 | |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 478 | m_Layer->m_QuantizedLstmParameters.m_InputToInputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame] | 479 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 480 | m_Layer->m_QuantizedLstmParameters.m_InputToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>( |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame] | 481 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 482 | m_Layer->m_QuantizedLstmParameters.m_InputToCellWeights = std::make_unique<armnn::ScopedTensorHandle>( |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame] | 483 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 484 | m_Layer->m_QuantizedLstmParameters.m_InputToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame] | 485 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8)); |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 486 | |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 487 | m_Layer->m_QuantizedLstmParameters.m_RecurrentToInputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame] | 488 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 489 | m_Layer->m_QuantizedLstmParameters.m_RecurrentToForgetWeights = std::make_unique<armnn::ScopedTensorHandle>( |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame] | 490 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 491 | m_Layer->m_QuantizedLstmParameters.m_RecurrentToCellWeights = std::make_unique<armnn::ScopedTensorHandle>( |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame] | 492 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 493 | m_Layer->m_QuantizedLstmParameters.m_RecurrentToOutputWeights = std::make_unique<armnn::ScopedTensorHandle>( |
Derek Lamberti | f90c56d | 2020-01-10 17:14:08 +0000 | [diff] [blame] | 494 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::QAsymmU8)); |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 495 | |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 496 | m_Layer->m_QuantizedLstmParameters.m_InputGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 497 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 498 | m_Layer->m_QuantizedLstmParameters.m_ForgetGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 499 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 500 | m_Layer->m_QuantizedLstmParameters.m_CellBias = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 501 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32)); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 502 | m_Layer->m_QuantizedLstmParameters.m_OutputGateBias = std::make_unique<armnn::ScopedTensorHandle>( |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 503 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Signed32)); |
| 504 | } |
| 505 | |
| 506 | ~DummyLayer() |
| 507 | { |
| 508 | dummyGraph.EraseLayer(m_Layer); |
| 509 | } |
| 510 | |
| 511 | armnn::QuantizedLstmLayer* m_Layer; |
| 512 | }; |
| 513 | |
| 514 | template<> |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 515 | struct DummyLayer<armnn::FullyConnectedLayer> |
| 516 | { |
| 517 | DummyLayer() |
| 518 | { |
| 519 | armnn::FullyConnectedLayer::DescriptorType desc; |
| 520 | m_Layer = dummyGraph.AddLayer<armnn::FullyConnectedLayer>(desc, ""); |
James Conroy | 1f58f03 | 2021-04-27 17:13:27 +0100 | [diff] [blame] | 521 | m_Layer->m_Weight = std::make_unique<armnn::ScopedTensorHandle>( |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 522 | armnn::TensorInfo(armnn::TensorShape({1,1,1,1}), armnn::DataType::Float32)); |
| 523 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 524 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 525 | ~DummyLayer() |
| 526 | { |
| 527 | dummyGraph.EraseLayer(m_Layer); |
| 528 | } |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 529 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 530 | armnn::FullyConnectedLayer* m_Layer; |
| 531 | }; |
| 532 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 533 | // Tag for giving LayerType entries a unique strong type each. |
| 534 | template<armnn::LayerType> |
| 535 | struct Tag{}; |
| 536 | |
| 537 | #define DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, descType) \ |
| 538 | template<armnn::DataType DataType> \ |
| 539 | struct LayerTypePolicy<armnn::LayerType::name, DataType> \ |
| 540 | { \ |
| 541 | using Type = armnn::name##Layer; \ |
| 542 | using Desc = descType; \ |
| 543 | using QueueDesc = armnn::name##QueueDescriptor; \ |
| 544 | constexpr static const char* NameStr = #name; \ |
Derek Lamberti | e606b7c | 2019-10-21 16:51:11 +0100 | [diff] [blame] | 545 | constexpr static const bool IsException = false; \ |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 546 | \ |
| 547 | static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \ |
| 548 | unsigned int nIn, unsigned int nOut) \ |
| 549 | { \ |
| 550 | QueueDesc desc; \ |
| 551 | armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \ |
| 552 | return factory->Create##name(desc, info); \ |
| 553 | } \ |
| 554 | }; |
| 555 | |
Jim Flynn | 68db06f | 2020-10-06 10:14:50 +0100 | [diff] [blame] | 556 | #define DECLARE_LAYER_POLICY_MAP_PARAM(name, descType) \ |
| 557 | template<armnn::DataType DataType> \ |
| 558 | struct LayerTypePolicy<armnn::LayerType::name, DataType> \ |
| 559 | { \ |
| 560 | using Type = armnn::name##Layer; \ |
| 561 | using Desc = descType; \ |
| 562 | using QueueDesc = armnn::name##QueueDescriptor; \ |
| 563 | using Workload = armnn::name##Workload; \ |
| 564 | constexpr static const char* NameStr = #name; \ |
| 565 | constexpr static const bool IsException = false; \ |
| 566 | \ |
| 567 | static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory* factory, \ |
| 568 | unsigned int nIn, unsigned int nOut) \ |
| 569 | { \ |
| 570 | IgnoreUnused(factory); \ |
| 571 | QueueDesc desc; \ |
| 572 | armnn::WorkloadInfo info = MakeDummyWorkloadInfo<DataType>(nIn, nOut); \ |
| 573 | return std::make_unique<armnn::name##Workload>(desc, info); \ |
| 574 | } \ |
| 575 | }; |
| 576 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 577 | // Define a layer policy specialization for use with the IsLayerSupported tests. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 578 | // Use this version for layers whose constructor takes 1 parameter(name). |
| 579 | #define DECLARE_LAYER_POLICY_1_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, void) |
| 580 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 581 | // Define a layer policy specialization for use with the IsLayerSupported tests. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 582 | // Use this version for layers whose constructor takes 2 parameters(descriptor and name). |
| 583 | #define DECLARE_LAYER_POLICY_2_PARAM(name) DECLARE_LAYER_POLICY_CUSTOM_PARAM(name, armnn::name##Descriptor) |
| 584 | |
Derek Lamberti | 013c390 | 2019-10-21 10:46:16 +0100 | [diff] [blame] | 585 | |
| 586 | #define DECLARE_LAYER_POLICY_EXCEPTION(name, descType) \ |
| 587 | template<armnn::DataType DataType> \ |
| 588 | struct LayerTypePolicy<armnn::LayerType::name, DataType> \ |
| 589 | { \ |
| 590 | using Type = armnn::name##Layer; \ |
| 591 | using Desc = descType; \ |
| 592 | constexpr static const char* NameStr = #name; \ |
Derek Lamberti | b99ef39 | 2019-10-21 14:10:38 +0100 | [diff] [blame] | 593 | constexpr static const bool IsException = true; \ |
Derek Lamberti | 013c390 | 2019-10-21 10:46:16 +0100 | [diff] [blame] | 594 | \ |
| 595 | static std::unique_ptr<armnn::IWorkload> MakeDummyWorkload(armnn::IWorkloadFactory *factory, \ |
| 596 | unsigned int nIn, unsigned int nOut) \ |
| 597 | { \ |
Jan Eilers | 8eb2560 | 2020-03-09 12:13:48 +0000 | [diff] [blame] | 598 | IgnoreUnused(factory, nIn, nOut); \ |
Derek Lamberti | 013c390 | 2019-10-21 10:46:16 +0100 | [diff] [blame] | 599 | return std::unique_ptr<armnn::IWorkload>(); \ |
| 600 | } \ |
| 601 | }; |
| 602 | |
| 603 | #define DECLARE_LAYER_POLICY_EXCEPTION_1_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, void) |
| 604 | #define DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(name) DECLARE_LAYER_POLICY_EXCEPTION(name, armnn::name##Descriptor) |
| 605 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 606 | // Layer policy template. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 607 | template<armnn::LayerType Type, armnn::DataType DataType> |
| 608 | struct LayerTypePolicy; |
| 609 | |
| 610 | // Every entry in the armnn::LayerType enum must be accounted for below. |
| 611 | DECLARE_LAYER_POLICY_2_PARAM(Activation) |
| 612 | |
| 613 | DECLARE_LAYER_POLICY_1_PARAM(Addition) |
| 614 | |
Nikhil Raj | ee391d5 | 2019-09-05 17:50:44 +0100 | [diff] [blame] | 615 | DECLARE_LAYER_POLICY_2_PARAM(ArgMinMax) |
| 616 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 617 | DECLARE_LAYER_POLICY_2_PARAM(BatchNormalization) |
| 618 | |
Éanna Ó Catháin | 4e1e136 | 2018-11-12 11:36:34 +0000 | [diff] [blame] | 619 | DECLARE_LAYER_POLICY_2_PARAM(BatchToSpaceNd) |
| 620 | |
mathad01 | b392e98 | 2021-04-07 12:07:30 +0100 | [diff] [blame] | 621 | DECLARE_LAYER_POLICY_1_PARAM(Cast) |
| 622 | |
Aron Virginas-Tar | 77bfb5e | 2019-10-16 17:45:38 +0100 | [diff] [blame] | 623 | DECLARE_LAYER_POLICY_2_PARAM(Comparison) |
| 624 | |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 625 | DECLARE_LAYER_POLICY_2_PARAM(Concat) |
| 626 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 627 | DECLARE_LAYER_POLICY_1_PARAM(Constant) |
| 628 | |
Narumol Prangnawarat | 7ddbbae | 2020-03-13 10:26:05 +0000 | [diff] [blame] | 629 | DECLARE_LAYER_POLICY_1_PARAM(ConvertBf16ToFp32) |
| 630 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 631 | DECLARE_LAYER_POLICY_1_PARAM(ConvertFp16ToFp32) |
| 632 | |
Narumol Prangnawarat | ea54a01 | 2020-03-16 16:36:10 +0000 | [diff] [blame] | 633 | DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToBf16) |
| 634 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 635 | DECLARE_LAYER_POLICY_1_PARAM(ConvertFp32ToFp16) |
| 636 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 637 | DECLARE_LAYER_POLICY_2_PARAM(Convolution2d) |
| 638 | |
| 639 | DECLARE_LAYER_POLICY_1_PARAM(MemCopy) |
| 640 | |
Derek Lamberti | f674aa0 | 2019-08-01 15:56:25 +0100 | [diff] [blame] | 641 | DECLARE_LAYER_POLICY_1_PARAM(MemImport) |
| 642 | |
Nattapat Chaimanowong | 964e955 | 2019-03-26 11:03:26 +0000 | [diff] [blame] | 643 | DECLARE_LAYER_POLICY_1_PARAM(Debug) |
Nattapat Chaimanowong | a9a1cf1 | 2018-12-03 16:06:49 +0000 | [diff] [blame] | 644 | |
Aron Virginas-Tar | dd6247f | 2019-09-19 14:31:17 +0100 | [diff] [blame] | 645 | DECLARE_LAYER_POLICY_2_PARAM(DepthToSpace) |
| 646 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 647 | DECLARE_LAYER_POLICY_2_PARAM(DepthwiseConvolution2d) |
| 648 | |
Nattapat Chaimanowong | e4294fd | 2019-03-28 09:56:53 +0000 | [diff] [blame] | 649 | DECLARE_LAYER_POLICY_1_PARAM(Dequantize) |
| 650 | |
Narumol Prangnawarat | 94dd5d8 | 2019-01-23 18:06:26 +0000 | [diff] [blame] | 651 | DECLARE_LAYER_POLICY_2_PARAM(DetectionPostProcess) |
| 652 | |
josh minor | 4a3c610 | 2020-01-06 16:40:46 -0600 | [diff] [blame] | 653 | DECLARE_LAYER_POLICY_2_PARAM(ElementwiseUnary) |
| 654 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 655 | DECLARE_LAYER_POLICY_2_PARAM(FakeQuantization) |
| 656 | |
Ryan OShea | ec6c680 | 2020-06-05 17:17:06 +0100 | [diff] [blame] | 657 | DECLARE_LAYER_POLICY_2_PARAM(Fill) |
| 658 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 659 | DECLARE_LAYER_POLICY_1_PARAM(Floor) |
| 660 | |
| 661 | DECLARE_LAYER_POLICY_2_PARAM(FullyConnected) |
| 662 | |
Teresa Charlin | 5266473 | 2020-06-29 16:27:03 +0100 | [diff] [blame] | 663 | DECLARE_LAYER_POLICY_2_PARAM(Gather) |
narpra01 | b89b05f | 2019-01-16 09:53:09 +0000 | [diff] [blame] | 664 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 665 | DECLARE_LAYER_POLICY_CUSTOM_PARAM(Input, armnn::LayerBindingId) |
| 666 | |
Kevin May | ce5045a | 2019-10-02 14:07:47 +0100 | [diff] [blame] | 667 | DECLARE_LAYER_POLICY_2_PARAM(InstanceNormalization) |
| 668 | |
Matteo Martincigh | bcd3c85 | 2018-09-28 14:14:12 +0100 | [diff] [blame] | 669 | DECLARE_LAYER_POLICY_2_PARAM(L2Normalization) |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 670 | |
James Conroy | aba90cd | 2020-11-06 16:28:18 +0000 | [diff] [blame] | 671 | DECLARE_LAYER_POLICY_2_PARAM(LogicalBinary) |
| 672 | |
Aron Virginas-Tar | f982dea | 2019-10-11 14:07:53 +0100 | [diff] [blame] | 673 | DECLARE_LAYER_POLICY_2_PARAM(LogSoftmax) |
| 674 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 675 | DECLARE_LAYER_POLICY_2_PARAM(Lstm) |
| 676 | |
Jim Flynn | 68db06f | 2020-10-06 10:14:50 +0100 | [diff] [blame] | 677 | DECLARE_LAYER_POLICY_MAP_PARAM(Map, void) |
| 678 | |
Nattapat Chaimanowong | 5a4304a | 2018-11-28 10:44:37 +0000 | [diff] [blame] | 679 | DECLARE_LAYER_POLICY_1_PARAM(Maximum) |
| 680 | |
narpra01 | 32b9046 | 2018-09-13 11:07:48 +0100 | [diff] [blame] | 681 | DECLARE_LAYER_POLICY_2_PARAM(Mean) |
| 682 | |
Nattapat Chaimanowong | 1f88630 | 2019-04-05 13:37:19 +0100 | [diff] [blame] | 683 | DECLARE_LAYER_POLICY_1_PARAM(Merge) |
| 684 | |
kevmay01 | 9053969 | 2018-11-29 08:40:19 +0000 | [diff] [blame] | 685 | DECLARE_LAYER_POLICY_1_PARAM(Minimum) |
| 686 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 687 | DECLARE_LAYER_POLICY_1_PARAM(Multiplication) |
| 688 | |
| 689 | DECLARE_LAYER_POLICY_2_PARAM(Normalization) |
| 690 | |
| 691 | DECLARE_LAYER_POLICY_CUSTOM_PARAM(Output, armnn::LayerBindingId) |
| 692 | |
Mohamed Nour Abouelseoud | 5662c20 | 2018-09-24 13:30:09 +0100 | [diff] [blame] | 693 | DECLARE_LAYER_POLICY_2_PARAM(Pad) |
| 694 | |
Derek Lamberti | a9cca6a | 2019-03-25 15:41:58 +0000 | [diff] [blame] | 695 | DECLARE_LAYER_POLICY_1_PARAM(Quantize) |
| 696 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 697 | DECLARE_LAYER_POLICY_2_PARAM(Permute) |
| 698 | |
| 699 | DECLARE_LAYER_POLICY_2_PARAM(Pooling2d) |
| 700 | |
Matteo Martincigh | 4912402 | 2019-01-11 13:25:59 +0000 | [diff] [blame] | 701 | DECLARE_LAYER_POLICY_2_PARAM(PreCompiled) |
| 702 | |
Matteo Martincigh | 0e406ee | 2019-06-12 15:42:18 +0100 | [diff] [blame] | 703 | DECLARE_LAYER_POLICY_1_PARAM(Prelu) |
Narumol Prangnawarat | 8ed39ae | 2021-07-15 16:16:25 +0100 | [diff] [blame] | 704 | |
James Conroy | 586a9aa | 2020-03-20 08:49:33 +0000 | [diff] [blame] | 705 | DECLARE_LAYER_POLICY_2_PARAM(QLstm) |
| 706 | |
James Conroy | ee18dc8 | 2019-07-17 11:27:46 +0100 | [diff] [blame] | 707 | DECLARE_LAYER_POLICY_1_PARAM(QuantizedLstm) |
| 708 | |
Francis Murtagh | e7a86a4 | 2018-08-29 12:42:10 +0100 | [diff] [blame] | 709 | DECLARE_LAYER_POLICY_1_PARAM(Division) |
| 710 | |
Finn Williams | 2605b23 | 2020-06-10 15:53:46 +0100 | [diff] [blame] | 711 | DECLARE_LAYER_POLICY_1_PARAM(Rank) |
| 712 | |
Teresa Charlin | a9075df | 2019-06-27 15:41:57 +0100 | [diff] [blame] | 713 | DECLARE_LAYER_POLICY_2_PARAM(Resize) |
| 714 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 715 | DECLARE_LAYER_POLICY_2_PARAM(Reshape) |
| 716 | |
Keith Davis | 3ae3f97 | 2021-05-21 16:33:48 +0100 | [diff] [blame] | 717 | DECLARE_LAYER_POLICY_1_PARAM(Shape) |
| 718 | |
Aron Virginas-Tar | 636ab40 | 2019-09-16 14:27:45 +0100 | [diff] [blame] | 719 | DECLARE_LAYER_POLICY_2_PARAM(Slice) |
| 720 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 721 | DECLARE_LAYER_POLICY_2_PARAM(Softmax) |
| 722 | |
Nattapat Chaimanowong | 207ef9a | 2018-11-02 10:57:25 +0000 | [diff] [blame] | 723 | DECLARE_LAYER_POLICY_2_PARAM(SpaceToBatchNd) |
| 724 | |
Aron Virginas-Tar | 972af15 | 2019-06-11 14:14:03 +0100 | [diff] [blame] | 725 | DECLARE_LAYER_POLICY_2_PARAM(SpaceToDepth) |
| 726 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 727 | DECLARE_LAYER_POLICY_2_PARAM(Splitter) |
| 728 | |
Matthew Jackson | 2b8c1da | 2019-07-04 14:59:16 +0100 | [diff] [blame] | 729 | DECLARE_LAYER_POLICY_2_PARAM(Stack) |
| 730 | |
Derek Lamberti | 013c390 | 2019-10-21 10:46:16 +0100 | [diff] [blame] | 731 | DECLARE_LAYER_POLICY_EXCEPTION_2_PARAM(StandIn) |
| 732 | |
Conor Kennedy | 430b5d8 | 2018-11-14 15:28:28 +0000 | [diff] [blame] | 733 | DECLARE_LAYER_POLICY_2_PARAM(StridedSlice) |
| 734 | |
David Beck | c2044fe | 2018-09-05 15:00:38 +0100 | [diff] [blame] | 735 | DECLARE_LAYER_POLICY_1_PARAM(Subtraction) |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 736 | |
Sadik Armagan | 0c3ea5b | 2021-02-03 09:29:30 +0000 | [diff] [blame] | 737 | DECLARE_LAYER_POLICY_2_PARAM(Reduce) |
| 738 | |
Sadik Armagan | eff363d | 2019-04-05 15:25:46 +0100 | [diff] [blame] | 739 | DECLARE_LAYER_POLICY_1_PARAM(Switch) |
| 740 | |
Mike Kelly | c9ea45a | 2020-02-28 18:11:58 +0000 | [diff] [blame] | 741 | DECLARE_LAYER_POLICY_2_PARAM(Transpose) |
| 742 | |
Aron Virginas-Tar | 639fb04 | 2019-06-20 14:28:19 +0100 | [diff] [blame] | 743 | DECLARE_LAYER_POLICY_2_PARAM(TransposeConvolution2d) |
| 744 | |
Narumol Prangnawarat | 8ed39ae | 2021-07-15 16:16:25 +0100 | [diff] [blame] | 745 | DECLARE_LAYER_POLICY_2_PARAM(UnidirectionalSequenceLstm) |
| 746 | |
Jim Flynn | 3a40ea5 | 2020-10-08 11:42:30 +0100 | [diff] [blame] | 747 | DECLARE_LAYER_POLICY_MAP_PARAM(Unmap, void) |
| 748 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 749 | |
| 750 | // Generic implementation to get the number of input slots for a given layer type; |
| 751 | template<armnn::LayerType Type> |
| 752 | unsigned int GetNumInputs(const armnn::Layer& layer) |
| 753 | { |
| 754 | return layer.GetNumInputSlots(); |
| 755 | } |
| 756 | |
| 757 | // Generic implementation to get the number of output slots for a given layer type; |
| 758 | template<armnn::LayerType Type> |
| 759 | unsigned int GetNumOutputs(const armnn::Layer& layer) |
| 760 | { |
| 761 | return layer.GetNumOutputSlots(); |
| 762 | } |
| 763 | |
| 764 | template<> |
Jim Flynn | e242f2d | 2019-05-22 14:24:13 +0100 | [diff] [blame] | 765 | unsigned int GetNumInputs<armnn::LayerType::Concat>(const armnn::Layer& layer) |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 766 | { |
Jan Eilers | 8eb2560 | 2020-03-09 12:13:48 +0000 | [diff] [blame] | 767 | IgnoreUnused(layer); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 768 | return 2; |
| 769 | } |
| 770 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 771 | // Tests that the IsLayerSupported() function returns the correct value. |
| 772 | // We determined the correct value by *trying* to create the relevant workload and seeing if it matches what we expect. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 773 | // Returns true if expectations are met, otherwise returns false. |
| 774 | template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type> |
| 775 | bool IsLayerSupportedTest(FactoryType *factory, Tag<Type>) |
| 776 | { |
| 777 | using LayerPolicy = LayerTypePolicy<Type, DataType>; |
| 778 | using LayerType = typename LayerPolicy::Type; |
| 779 | using LayerDesc = typename LayerPolicy::Desc; |
| 780 | DummyLayer<LayerType, LayerDesc> layer; |
| 781 | |
Derek Lamberti | b99ef39 | 2019-10-21 14:10:38 +0100 | [diff] [blame] | 782 | if (LayerPolicy::IsException) //Don't test exceptions to the rule. |
| 783 | { |
| 784 | return true; |
| 785 | } |
| 786 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 787 | unsigned int numIn = GetNumInputs<Type>(*layer.m_Layer); |
| 788 | unsigned int numOut = GetNumOutputs<Type>(*layer.m_Layer); |
| 789 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 790 | // Make another dummy layer just to make IsLayerSupported have valid inputs. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 791 | DummyLayer<armnn::ConstantLayer, void> previousLayer; |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 792 | // Set output of the previous layer to a dummy tensor. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 793 | armnn::TensorInfo output = MakeDummyTensorInfo<DataType>(); |
| 794 | previousLayer.m_Layer->GetOutputSlot(0).SetTensorInfo(output); |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 795 | // Connect all outputs of the previous layer to inputs of tested layer. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 796 | for (unsigned int i = 0; i < numIn; i++) |
| 797 | { |
| 798 | armnn::IOutputSlot& previousLayerOutputSlot = previousLayer.m_Layer->GetOutputSlot(0); |
| 799 | armnn::IInputSlot& layerInputSlot = layer.m_Layer->GetInputSlot(i); |
| 800 | previousLayerOutputSlot.Connect(layerInputSlot); |
| 801 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 802 | // Set outputs of tested layer to a dummy tensor. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 803 | for (unsigned int i = 0; i < numOut; i++) |
| 804 | { |
| 805 | layer.m_Layer->GetOutputSlot(0).SetTensorInfo(output); |
| 806 | } |
| 807 | |
| 808 | std::string layerName = LayerPolicy::NameStr; |
| 809 | std::string reasonIfUnsupported; |
| 810 | if (FactoryType::IsLayerSupported(*layer.m_Layer, DataType, reasonIfUnsupported)) |
| 811 | { |
| 812 | std::string errorMsg = " layer expected support but found none."; |
| 813 | try |
| 814 | { |
| 815 | bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() != nullptr; |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 816 | CHECK_MESSAGE(retVal, layerName << errorMsg); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 817 | return retVal; |
| 818 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 819 | catch(const armnn::InvalidArgumentException& e) |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 820 | { |
Jan Eilers | 8eb2560 | 2020-03-09 12:13:48 +0000 | [diff] [blame] | 821 | IgnoreUnused(e); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 822 | // This is ok since we throw InvalidArgumentException when creating the dummy workload. |
| 823 | return true; |
| 824 | } |
| 825 | catch(const std::exception& e) |
| 826 | { |
| 827 | errorMsg = e.what(); |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 828 | FAIL(layerName << ": " << errorMsg); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 829 | return false; |
| 830 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 831 | catch(...) |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 832 | { |
| 833 | errorMsg = "Unexpected error while testing support for "; |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 834 | FAIL(errorMsg << layerName); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 835 | return false; |
| 836 | } |
| 837 | } |
| 838 | else |
| 839 | { |
| 840 | std::string errorMsg = "layer expected no support (giving reason: " + reasonIfUnsupported + ") but found some."; |
| 841 | try |
| 842 | { |
| 843 | bool retVal = LayerPolicy::MakeDummyWorkload(factory, numIn, numOut).get() == nullptr; |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 844 | CHECK_MESSAGE(retVal, layerName << errorMsg); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 845 | return retVal; |
| 846 | } |
| 847 | // These two exceptions are ok: For workloads that are partially supported, attempting to instantiate them |
| 848 | // using parameters that make IsLayerSupported() return false should throw an |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 849 | // InvalidArgumentException or UnimplementedException. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 850 | catch(const armnn::InvalidArgumentException& e) |
| 851 | { |
Jan Eilers | 8eb2560 | 2020-03-09 12:13:48 +0000 | [diff] [blame] | 852 | IgnoreUnused(e); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 853 | return true; |
| 854 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 855 | catch(const armnn::UnimplementedException& e) |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 856 | { |
Jan Eilers | 8eb2560 | 2020-03-09 12:13:48 +0000 | [diff] [blame] | 857 | IgnoreUnused(e); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 858 | return true; |
| 859 | } |
| 860 | catch(const std::exception& e) |
| 861 | { |
| 862 | errorMsg = e.what(); |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 863 | FAIL(layerName << ": " << errorMsg); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 864 | return false; |
| 865 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 866 | catch(...) |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 867 | { |
| 868 | errorMsg = "Unexpected error while testing support for "; |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 869 | FAIL(errorMsg << layerName); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 870 | return false; |
| 871 | } |
| 872 | } |
| 873 | } |
| 874 | |
Jim Flynn | 68db06f | 2020-10-06 10:14:50 +0100 | [diff] [blame] | 875 | template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type> |
| 876 | bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Map>) |
| 877 | { |
| 878 | IgnoreUnused(factory); |
| 879 | return true; |
| 880 | } |
| 881 | |
Jim Flynn | 3a40ea5 | 2020-10-08 11:42:30 +0100 | [diff] [blame] | 882 | template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type> |
| 883 | bool IsLayerSupportedTest(FactoryType *factory, Tag<armnn::LayerType::Unmap>) |
| 884 | { |
| 885 | IgnoreUnused(factory); |
| 886 | return true; |
| 887 | } |
| 888 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 889 | // Helper function to compute the next type in the LayerType enum. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 890 | constexpr armnn::LayerType NextType(armnn::LayerType type) |
| 891 | { |
| 892 | return static_cast<armnn::LayerType>(static_cast<int>(type)+1); |
| 893 | } |
| 894 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 895 | // Termination function for determining the end of the LayerType enumeration. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 896 | template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type> |
| 897 | bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<armnn::LayerType::LastLayer>) |
| 898 | { |
| 899 | return IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>()); |
Matteo Martincigh | 59a950c | 2018-12-13 12:48:25 +0000 | [diff] [blame] | 900 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 901 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 902 | // Recursive function to test and enter in the LayerType enum and then iterate on the next entry. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 903 | template<typename FactoryType, armnn::DataType DataType, armnn::LayerType Type> |
| 904 | bool IsLayerSupportedTestsImpl(FactoryType *factory, Tag<Type>) |
| 905 | { |
| 906 | bool v = IsLayerSupportedTest<FactoryType, DataType, Type>(factory, Tag<Type>()); |
| 907 | |
| 908 | return v && |
| 909 | IsLayerSupportedTestsImpl<FactoryType, DataType, NextType(Type)> |
| 910 | (factory, Tag<NextType(Type)>()); |
Matteo Martincigh | 59a950c | 2018-12-13 12:48:25 +0000 | [diff] [blame] | 911 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 912 | |
| 913 | // Helper function to pass through to the test framework. |
| 914 | template<typename FactoryType, armnn::DataType DataType> |
| 915 | bool IsLayerSupportedTests(FactoryType *factory) |
| 916 | { |
| 917 | return IsLayerSupportedTestsImpl<FactoryType, DataType>(factory, Tag<armnn::LayerType::FirstLayer>()); |
Matteo Martincigh | 59a950c | 2018-12-13 12:48:25 +0000 | [diff] [blame] | 918 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 919 | |
| 920 | template<armnn::LayerType Type> |
| 921 | bool TestLayerTypeMatches() |
| 922 | { |
| 923 | using LayerPolicy = LayerTypePolicy<Type, armnn::DataType::Float32>; |
| 924 | using LayerType = typename LayerPolicy::Type; |
| 925 | using LayerDesc = typename LayerPolicy::Desc; |
| 926 | DummyLayer<LayerType, LayerDesc> layer; |
| 927 | |
| 928 | std::stringstream ss; |
| 929 | ss << LayerPolicy::NameStr << " layer type mismatches expected layer type value."; |
| 930 | bool v = Type == layer.m_Layer->GetType(); |
Sadik Armagan | 1625efc | 2021-06-10 18:24:34 +0100 | [diff] [blame] | 931 | CHECK_MESSAGE(v, ss.str()); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 932 | return v; |
Matteo Martincigh | 59a950c | 2018-12-13 12:48:25 +0000 | [diff] [blame] | 933 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 934 | |
| 935 | template<armnn::LayerType Type> |
| 936 | bool LayerTypeMatchesTestImpl(Tag<armnn::LayerType::LastLayer>) |
| 937 | { |
| 938 | return TestLayerTypeMatches<Type>(); |
Matteo Martincigh | 59a950c | 2018-12-13 12:48:25 +0000 | [diff] [blame] | 939 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 940 | |
| 941 | template<armnn::LayerType Type> |
| 942 | bool LayerTypeMatchesTestImpl(Tag<Type>) |
| 943 | { |
| 944 | return TestLayerTypeMatches<Type>() && |
| 945 | LayerTypeMatchesTestImpl<NextType(Type)>(Tag<NextType(Type)>()); |
Matteo Martincigh | 59a950c | 2018-12-13 12:48:25 +0000 | [diff] [blame] | 946 | } |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 947 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 948 | template<typename FactoryType, typename LayerType, armnn::DataType InputDataType , armnn::DataType OutputDataType> |
| 949 | bool IsConvertLayerSupportedTests(std::string& reasonIfUnsupported) |
| 950 | { |
| 951 | armnn::Graph graph; |
| 952 | LayerType* const layer = graph.AddLayer<LayerType>("LayerName"); |
| 953 | |
| 954 | armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input"); |
| 955 | armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output"); |
| 956 | |
| 957 | armnn::TensorInfo inputTensorInfo({1, 3, 2, 3}, InputDataType); |
| 958 | armnn::TensorInfo outputTensorInfo({1, 3, 2, 3}, OutputDataType); |
| 959 | |
| 960 | input->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 961 | input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo); |
| 962 | layer->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 963 | layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo); |
| 964 | |
| 965 | bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported); |
| 966 | |
| 967 | return result; |
Matteo Martincigh | 59a950c | 2018-12-13 12:48:25 +0000 | [diff] [blame] | 968 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 969 | |
Matthew Bentham | 1f0ff35 | 2019-01-02 13:26:31 +0000 | [diff] [blame] | 970 | template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType> |
James Conroy | 177df1e | 2020-11-13 10:18:51 +0000 | [diff] [blame] | 971 | bool IsLogicalBinaryLayerSupportedTests(std::string& reasonIfUnsupported) |
| 972 | { |
| 973 | armnn::Graph graph; |
| 974 | armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalOr); |
| 975 | |
| 976 | armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0"); |
| 977 | armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1"); |
| 978 | |
| 979 | armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalOrLayer"); |
| 980 | |
| 981 | armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output1"); |
| 982 | |
| 983 | armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType); |
| 984 | armnn::TensorInfo inputTensorInfo1({1, 1, 1, 4}, InputDataType); |
| 985 | |
| 986 | armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType); |
| 987 | |
| 988 | input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 989 | input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); |
| 990 | |
| 991 | input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0); |
| 992 | input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1); |
| 993 | |
| 994 | layer->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 995 | layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo); |
| 996 | |
| 997 | bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported); |
| 998 | |
| 999 | return result; |
| 1000 | } |
| 1001 | |
| 1002 | template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType> |
| 1003 | bool IsLogicalBinaryLayerBroadcastSupportedTests(std::string& reasonIfUnsupported) |
| 1004 | { |
| 1005 | armnn::Graph graph; |
| 1006 | armnn::LogicalBinaryDescriptor desc(armnn::LogicalBinaryOperation::LogicalAnd); |
| 1007 | |
| 1008 | armnn::Layer* const input0 = graph.AddLayer<armnn::InputLayer>(0, "input0"); |
| 1009 | armnn::Layer* const input1 = graph.AddLayer<armnn::InputLayer>(1, "input1"); |
| 1010 | |
| 1011 | armnn::Layer* const layer = graph.AddLayer<armnn::LogicalBinaryLayer>(desc, "logicalAndLayer"); |
| 1012 | |
| 1013 | armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output2"); |
| 1014 | |
| 1015 | armnn::TensorInfo inputTensorInfo0({1, 1, 1, 4}, InputDataType); |
| 1016 | armnn::TensorInfo inputTensorInfo1({1, 1, 1, 1}, InputDataType); |
| 1017 | |
| 1018 | armnn::TensorInfo outputTensorInfo({1, 1, 1, 4}, OutputDataType); |
| 1019 | |
| 1020 | input0->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 1021 | input1->GetOutputSlot(0).Connect(layer->GetInputSlot(1)); |
| 1022 | |
| 1023 | input0->GetOutputHandler(0).SetTensorInfo(inputTensorInfo0); |
| 1024 | input1->GetOutputHandler(0).SetTensorInfo(inputTensorInfo1); |
| 1025 | |
| 1026 | layer->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 1027 | layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo); |
| 1028 | |
| 1029 | bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported); |
| 1030 | |
| 1031 | return result; |
| 1032 | } |
| 1033 | |
| 1034 | template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType> |
Matthew Bentham | 1f0ff35 | 2019-01-02 13:26:31 +0000 | [diff] [blame] | 1035 | bool IsMeanLayerSupportedTests(std::string& reasonIfUnsupported) |
| 1036 | { |
| 1037 | armnn::Graph graph; |
| 1038 | static const std::vector<unsigned> axes = {1, 0}; |
| 1039 | armnn::MeanDescriptor desc(axes, false); |
| 1040 | |
| 1041 | armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName"); |
| 1042 | |
| 1043 | armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input"); |
| 1044 | armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output"); |
| 1045 | |
| 1046 | armnn::TensorInfo inputTensorInfo({4, 3, 2}, InputDataType); |
| 1047 | armnn::TensorInfo outputTensorInfo({2}, OutputDataType); |
| 1048 | |
| 1049 | input->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 1050 | input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo); |
| 1051 | layer->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 1052 | layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo); |
| 1053 | |
| 1054 | bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported); |
| 1055 | |
| 1056 | return result; |
| 1057 | } |
| 1058 | |
James Conroy | 4d1ff58 | 2019-06-10 17:06:39 +0100 | [diff] [blame] | 1059 | // Tests that IsMeanSupported fails when input tensor dimensions |
| 1060 | // do not match output tensor dimensions when keepDims == true |
| 1061 | template<typename FactoryType, armnn::DataType InputDataType , armnn::DataType OutputDataType> |
| 1062 | bool IsMeanLayerNotSupportedTests(std::string& reasonIfUnsupported) |
| 1063 | { |
| 1064 | armnn::Graph graph; |
| 1065 | static const std::vector<unsigned> axes = {}; |
| 1066 | // Set keepDims == true |
| 1067 | armnn::MeanDescriptor desc(axes, true); |
| 1068 | |
| 1069 | armnn::Layer* const layer = graph.AddLayer<armnn::MeanLayer>(desc, "LayerName"); |
| 1070 | |
| 1071 | armnn::Layer* const input = graph.AddLayer<armnn::InputLayer>(0, "input"); |
| 1072 | armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "output"); |
| 1073 | |
| 1074 | // Mismatching number of tensor dimensions |
| 1075 | armnn::TensorInfo inputTensorInfo({1, 1, 1, 1}, InputDataType); |
| 1076 | armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType); |
| 1077 | |
| 1078 | input->GetOutputSlot(0).Connect(layer->GetInputSlot(0)); |
| 1079 | input->GetOutputHandler(0).SetTensorInfo(inputTensorInfo); |
| 1080 | layer->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 1081 | layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo); |
| 1082 | |
| 1083 | bool result = FactoryType::IsLayerSupported(*layer, InputDataType, reasonIfUnsupported); |
| 1084 | |
| 1085 | return result; |
| 1086 | } |
| 1087 | |
Mike Kelly | 0886ac4 | 2020-04-27 09:55:40 +0100 | [diff] [blame] | 1088 | template<typename FactoryType, armnn::DataType OutputDataType> |
| 1089 | bool IsConstantLayerSupportedTests(std::string& reasonIfUnsupported) |
| 1090 | { |
| 1091 | armnn::Graph graph; |
| 1092 | |
| 1093 | armnn::Layer* const layer = graph.AddLayer<armnn::ConstantLayer>("ConstantLayerName"); |
| 1094 | armnn::Layer* const output = graph.AddLayer<armnn::OutputLayer>(0, "OutputLayerName"); |
| 1095 | |
| 1096 | armnn::TensorInfo outputTensorInfo({1, 1}, OutputDataType); |
| 1097 | |
| 1098 | layer->GetOutputSlot(0).Connect(output->GetInputSlot(0)); |
| 1099 | layer->GetOutputHandler(0).SetTensorInfo(outputTensorInfo); |
| 1100 | |
| 1101 | bool result = FactoryType::IsLayerSupported(*layer, OutputDataType, reasonIfUnsupported); |
| 1102 | |
| 1103 | return result; |
| 1104 | } |
Matthew Bentham | 1f0ff35 | 2019-01-02 13:26:31 +0000 | [diff] [blame] | 1105 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 1106 | } //namespace |