Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 1 | // |
Mike Kelly | 3ec3077 | 2023-03-08 13:47:17 +0000 | [diff] [blame] | 2 | // Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved. |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 3 | // SPDX-License-Identifier: MIT |
| 4 | // |
| 5 | #pragma once |
| 6 | |
Colm Donelan | a98e79a | 2022-12-06 21:32:29 +0000 | [diff] [blame] | 7 | #include <armnn/Deprecated.hpp> |
| 8 | #include <armnn/Descriptors.hpp> |
| 9 | #include <armnn/Exceptions.hpp> |
| 10 | #include <armnn/IRuntime.hpp> |
| 11 | #include <armnn/Optional.hpp> |
| 12 | #include <armnn/Tensor.hpp> |
| 13 | #include <armnn/Types.hpp> |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 14 | #include <armnn/backends/IBackendInternal.hpp> |
| 15 | #include <armnn/backends/MemCopyWorkload.hpp> |
Colm Donelan | a98e79a | 2022-12-06 21:32:29 +0000 | [diff] [blame] | 16 | #include <armnn/backends/ITensorHandle.hpp> |
| 17 | #include <armnn/backends/IWorkload.hpp> |
| 18 | #include <armnn/backends/OptimizationViews.hpp> |
| 19 | #include <armnn/backends/SubgraphView.hpp> |
| 20 | #include <armnn/backends/WorkloadData.hpp> |
| 21 | #include <armnn/backends/WorkloadFactory.hpp> |
| 22 | #include <armnn/backends/WorkloadInfo.hpp> |
| 23 | #include <armnn/utility/IgnoreUnused.hpp> |
| 24 | #include <armnn/utility/PolymorphicDowncast.hpp> |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 25 | #include <armnnTestUtils/MockTensorHandle.hpp> |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 26 | #include <backendsCommon/LayerSupportBase.hpp> |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 27 | |
Colm Donelan | a98e79a | 2022-12-06 21:32:29 +0000 | [diff] [blame] | 28 | #include <client/include/CounterValue.hpp> |
| 29 | #include <client/include/ISendTimelinePacket.hpp> |
| 30 | #include <client/include/Timestamp.hpp> |
| 31 | #include <client/include/backends/IBackendProfiling.hpp> |
| 32 | #include <client/include/backends/IBackendProfilingContext.hpp> |
| 33 | #include <common/include/Optional.hpp> |
| 34 | |
| 35 | #include <atomic> |
| 36 | #include <cstdint> |
| 37 | #include <memory> |
| 38 | #include <string> |
| 39 | #include <utility> |
| 40 | #include <vector> |
| 41 | |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 42 | namespace armnn |
| 43 | { |
Colm Donelan | a98e79a | 2022-12-06 21:32:29 +0000 | [diff] [blame] | 44 | class BackendId; |
| 45 | class ICustomAllocator; |
| 46 | class MockMemoryManager; |
| 47 | struct LstmInputParamsInfo; |
| 48 | struct QuantizedLstmInputParamsInfo; |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 49 | |
| 50 | // A bare bones Mock backend to enable unit testing of simple tensor manipulation features. |
| 51 | class MockBackend : public IBackendInternal |
| 52 | { |
| 53 | public: |
| 54 | MockBackend() = default; |
| 55 | |
| 56 | ~MockBackend() = default; |
| 57 | |
| 58 | static const BackendId& GetIdStatic(); |
| 59 | |
| 60 | const BackendId& GetId() const override |
| 61 | { |
| 62 | return GetIdStatic(); |
| 63 | } |
| 64 | IBackendInternal::IWorkloadFactoryPtr |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 65 | CreateWorkloadFactory(const IBackendInternal::IMemoryManagerSharedPtr& memoryManager = nullptr) const override; |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 66 | |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 67 | IBackendInternal::ILayerSupportSharedPtr GetLayerSupport() const override; |
| 68 | |
| 69 | IBackendInternal::IMemoryManagerUniquePtr CreateMemoryManager() const override; |
| 70 | |
| 71 | IBackendInternal::IBackendContextPtr CreateBackendContext(const IRuntime::CreationOptions&) const override; |
| 72 | IBackendInternal::IBackendProfilingContextPtr |
| 73 | CreateBackendProfilingContext(const IRuntime::CreationOptions& creationOptions, |
| 74 | IBackendProfilingPtr& backendProfiling) override; |
| 75 | |
| 76 | OptimizationViews OptimizeSubgraphView(const SubgraphView& subgraph) const override; |
| 77 | |
| 78 | std::unique_ptr<ICustomAllocator> GetDefaultAllocator() const override; |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 79 | }; |
| 80 | |
| 81 | class MockWorkloadFactory : public IWorkloadFactory |
| 82 | { |
| 83 | |
| 84 | public: |
| 85 | explicit MockWorkloadFactory(const std::shared_ptr<MockMemoryManager>& memoryManager); |
| 86 | MockWorkloadFactory(); |
| 87 | |
| 88 | ~MockWorkloadFactory() |
| 89 | {} |
| 90 | |
| 91 | const BackendId& GetBackendId() const override; |
| 92 | |
| 93 | bool SupportsSubTensors() const override |
| 94 | { |
| 95 | return false; |
| 96 | } |
| 97 | |
| 98 | ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateSubTensorHandle instead") |
| 99 | std::unique_ptr<ITensorHandle> CreateSubTensorHandle(ITensorHandle&, |
| 100 | TensorShape const&, |
| 101 | unsigned int const*) const override |
| 102 | { |
| 103 | return nullptr; |
| 104 | } |
| 105 | |
| 106 | ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateTensorHandle instead") |
| 107 | std::unique_ptr<ITensorHandle> CreateTensorHandle(const TensorInfo& tensorInfo, |
| 108 | const bool IsMemoryManaged = true) const override |
| 109 | { |
| 110 | IgnoreUnused(IsMemoryManaged); |
| 111 | return std::make_unique<MockTensorHandle>(tensorInfo, m_MemoryManager); |
| 112 | }; |
| 113 | |
| 114 | ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateTensorHandle instead") |
| 115 | std::unique_ptr<ITensorHandle> CreateTensorHandle(const TensorInfo& tensorInfo, |
| 116 | DataLayout dataLayout, |
| 117 | const bool IsMemoryManaged = true) const override |
| 118 | { |
| 119 | IgnoreUnused(dataLayout, IsMemoryManaged); |
| 120 | return std::make_unique<MockTensorHandle>(tensorInfo, static_cast<unsigned int>(MemorySource::Malloc)); |
| 121 | }; |
| 122 | |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 123 | std::unique_ptr<IWorkload> |
Cian McGriskin | 7894ef9 | 2023-08-01 14:04:09 +0100 | [diff] [blame] | 124 | CreateWorkload(LayerType type, const QueueDescriptor& descriptor, const WorkloadInfo& info) const override; |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 125 | |
| 126 | private: |
| 127 | mutable std::shared_ptr<MockMemoryManager> m_MemoryManager; |
| 128 | }; |
| 129 | |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 130 | class MockBackendInitialiser |
| 131 | { |
| 132 | public: |
| 133 | MockBackendInitialiser(); |
| 134 | ~MockBackendInitialiser(); |
| 135 | }; |
| 136 | |
| 137 | class MockBackendProfilingContext : public arm::pipe::IBackendProfilingContext |
| 138 | { |
| 139 | public: |
| 140 | MockBackendProfilingContext(IBackendInternal::IBackendProfilingPtr& backendProfiling) |
| 141 | : m_BackendProfiling(std::move(backendProfiling)) |
| 142 | , m_CapturePeriod(0) |
| 143 | , m_IsTimelineEnabled(true) |
| 144 | {} |
| 145 | |
| 146 | ~MockBackendProfilingContext() = default; |
| 147 | |
| 148 | IBackendInternal::IBackendProfilingPtr& GetBackendProfiling() |
| 149 | { |
| 150 | return m_BackendProfiling; |
| 151 | } |
| 152 | |
| 153 | uint16_t RegisterCounters(uint16_t currentMaxGlobalCounterId) |
| 154 | { |
| 155 | std::unique_ptr<arm::pipe::IRegisterBackendCounters> counterRegistrar = |
| 156 | m_BackendProfiling->GetCounterRegistrationInterface(static_cast<uint16_t>(currentMaxGlobalCounterId)); |
| 157 | |
| 158 | std::string categoryName("MockCounters"); |
| 159 | counterRegistrar->RegisterCategory(categoryName); |
| 160 | |
| 161 | counterRegistrar->RegisterCounter(0, categoryName, 0, 0, 1.f, "Mock Counter One", "Some notional counter"); |
| 162 | |
| 163 | counterRegistrar->RegisterCounter(1, categoryName, 0, 0, 1.f, "Mock Counter Two", |
| 164 | "Another notional counter"); |
| 165 | |
| 166 | std::string units("microseconds"); |
| 167 | uint16_t nextMaxGlobalCounterId = |
| 168 | counterRegistrar->RegisterCounter(2, categoryName, 0, 0, 1.f, "Mock MultiCore Counter", |
| 169 | "A dummy four core counter", units, 4); |
| 170 | return nextMaxGlobalCounterId; |
| 171 | } |
| 172 | |
Jim Flynn | decd08b | 2022-03-13 22:35:46 +0000 | [diff] [blame] | 173 | arm::pipe::Optional<std::string> ActivateCounters(uint32_t capturePeriod, const std::vector<uint16_t>& counterIds) |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 174 | { |
| 175 | if (capturePeriod == 0 || counterIds.size() == 0) |
| 176 | { |
| 177 | m_ActiveCounters.clear(); |
| 178 | } |
| 179 | else if (capturePeriod == 15939u) |
| 180 | { |
Jim Flynn | decd08b | 2022-03-13 22:35:46 +0000 | [diff] [blame] | 181 | return arm::pipe::Optional<std::string>("ActivateCounters example test error"); |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 182 | } |
| 183 | m_CapturePeriod = capturePeriod; |
| 184 | m_ActiveCounters = counterIds; |
Jim Flynn | decd08b | 2022-03-13 22:35:46 +0000 | [diff] [blame] | 185 | return arm::pipe::Optional<std::string>(); |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 186 | } |
| 187 | |
| 188 | std::vector<arm::pipe::Timestamp> ReportCounterValues() |
| 189 | { |
| 190 | std::vector<arm::pipe::CounterValue> counterValues; |
| 191 | |
| 192 | for (auto counterId : m_ActiveCounters) |
| 193 | { |
| 194 | counterValues.emplace_back(arm::pipe::CounterValue{ counterId, counterId + 1u }); |
| 195 | } |
| 196 | |
| 197 | uint64_t timestamp = m_CapturePeriod; |
| 198 | return { arm::pipe::Timestamp{ timestamp, counterValues } }; |
| 199 | } |
| 200 | |
| 201 | bool EnableProfiling(bool) |
| 202 | { |
| 203 | auto sendTimelinePacket = m_BackendProfiling->GetSendTimelinePacket(); |
| 204 | sendTimelinePacket->SendTimelineEntityBinaryPacket(4256); |
| 205 | sendTimelinePacket->Commit(); |
| 206 | return true; |
| 207 | } |
| 208 | |
| 209 | bool EnableTimelineReporting(bool isEnabled) |
| 210 | { |
| 211 | m_IsTimelineEnabled = isEnabled; |
| 212 | return isEnabled; |
| 213 | } |
| 214 | |
| 215 | bool TimelineReportingEnabled() |
| 216 | { |
| 217 | return m_IsTimelineEnabled; |
| 218 | } |
| 219 | |
| 220 | private: |
| 221 | IBackendInternal::IBackendProfilingPtr m_BackendProfiling; |
| 222 | uint32_t m_CapturePeriod; |
| 223 | std::vector<uint16_t> m_ActiveCounters; |
| 224 | std::atomic<bool> m_IsTimelineEnabled; |
| 225 | }; |
| 226 | |
| 227 | class MockBackendProfilingService |
| 228 | { |
| 229 | public: |
| 230 | // Getter for the singleton instance |
| 231 | static MockBackendProfilingService& Instance() |
| 232 | { |
| 233 | static MockBackendProfilingService instance; |
| 234 | return instance; |
| 235 | } |
| 236 | |
| 237 | MockBackendProfilingContext* GetContext() |
| 238 | { |
| 239 | return m_sharedContext.get(); |
| 240 | } |
| 241 | |
| 242 | void SetProfilingContextPtr(std::shared_ptr<MockBackendProfilingContext> shared) |
| 243 | { |
| 244 | m_sharedContext = shared; |
| 245 | } |
| 246 | |
| 247 | private: |
| 248 | std::shared_ptr<MockBackendProfilingContext> m_sharedContext; |
| 249 | }; |
| 250 | |
| 251 | class MockLayerSupport : public LayerSupportBase |
| 252 | { |
| 253 | public: |
| 254 | bool IsLayerSupported(const LayerType& type, |
| 255 | const std::vector<TensorInfo>& infos, |
| 256 | const BaseDescriptor& descriptor, |
| 257 | const Optional<LstmInputParamsInfo>& /*lstmParamsInfo*/, |
| 258 | const Optional<QuantizedLstmInputParamsInfo>& /*quantizedLstmParamsInfo*/, |
| 259 | Optional<std::string&> reasonIfUnsupported) const override |
| 260 | { |
| 261 | switch(type) |
| 262 | { |
| 263 | case LayerType::Input: |
| 264 | return IsInputSupported(infos[0], reasonIfUnsupported); |
| 265 | case LayerType::Output: |
| 266 | return IsOutputSupported(infos[0], reasonIfUnsupported); |
| 267 | case LayerType::Addition: |
| 268 | return IsAdditionSupported(infos[0], infos[1], infos[2], reasonIfUnsupported); |
| 269 | case LayerType::Convolution2d: |
| 270 | { |
| 271 | if (infos.size() != 4) |
| 272 | { |
| 273 | throw InvalidArgumentException("Invalid number of TransposeConvolution2d " |
| 274 | "TensorInfos. TensorInfos should be of format: " |
| 275 | "{input, output, weights, biases}."); |
| 276 | } |
| 277 | |
| 278 | auto desc = *(PolymorphicDowncast<const Convolution2dDescriptor*>(&descriptor)); |
| 279 | if (infos[3] == TensorInfo()) |
| 280 | { |
| 281 | return IsConvolution2dSupported(infos[0], |
| 282 | infos[1], |
| 283 | desc, |
| 284 | infos[2], |
| 285 | EmptyOptional(), |
| 286 | reasonIfUnsupported); |
| 287 | } |
| 288 | else |
| 289 | { |
| 290 | return IsConvolution2dSupported(infos[0], |
| 291 | infos[1], |
| 292 | desc, |
| 293 | infos[2], |
| 294 | infos[3], |
| 295 | reasonIfUnsupported); |
| 296 | } |
| 297 | } |
Mike Kelly | 3ec3077 | 2023-03-08 13:47:17 +0000 | [diff] [blame] | 298 | case LayerType::ElementwiseBinary: |
| 299 | { |
| 300 | auto elementwiseDesc = *(PolymorphicDowncast<const ElementwiseBinaryDescriptor*>(&descriptor)); |
| 301 | return (elementwiseDesc.m_Operation == BinaryOperation::Add); |
| 302 | } |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 303 | default: |
| 304 | return false; |
| 305 | } |
| 306 | } |
| 307 | |
| 308 | bool IsInputSupported(const TensorInfo& /*input*/, |
Francis Murtagh | 6627703 | 2023-07-28 13:01:32 +0100 | [diff] [blame] | 309 | Optional<std::string&> /*reasonIfUnsupported = EmptyOptional()*/) const |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 310 | { |
| 311 | return true; |
| 312 | } |
| 313 | |
| 314 | bool IsOutputSupported(const TensorInfo& /*input*/, |
Francis Murtagh | 6627703 | 2023-07-28 13:01:32 +0100 | [diff] [blame] | 315 | Optional<std::string&> /*reasonIfUnsupported = EmptyOptional()*/) const |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 316 | { |
| 317 | return true; |
| 318 | } |
| 319 | |
| 320 | bool IsAdditionSupported(const TensorInfo& /*input0*/, |
| 321 | const TensorInfo& /*input1*/, |
| 322 | const TensorInfo& /*output*/, |
Francis Murtagh | 6627703 | 2023-07-28 13:01:32 +0100 | [diff] [blame] | 323 | Optional<std::string&> /*reasonIfUnsupported = EmptyOptional()*/) const |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 324 | { |
| 325 | return true; |
| 326 | } |
| 327 | |
| 328 | bool IsConvolution2dSupported(const TensorInfo& /*input*/, |
| 329 | const TensorInfo& /*output*/, |
| 330 | const Convolution2dDescriptor& /*descriptor*/, |
| 331 | const TensorInfo& /*weights*/, |
| 332 | const Optional<TensorInfo>& /*biases*/, |
Francis Murtagh | 6627703 | 2023-07-28 13:01:32 +0100 | [diff] [blame] | 333 | Optional<std::string&> /*reasonIfUnsupported = EmptyOptional()*/) const |
Cathal Corbett | 3464ba1 | 2022-03-04 11:36:39 +0000 | [diff] [blame] | 334 | { |
| 335 | return true; |
| 336 | } |
| 337 | }; |
| 338 | |
Colm Donelan | 17948b5 | 2022-02-01 23:37:04 +0000 | [diff] [blame] | 339 | } // namespace armnn |