blob: 9728a85398dec10e4513cade5217f57f13b8b3e2 [file] [log] [blame]
Colm Donelan17948b52022-02-01 23:37:04 +00001//
Mike Kelly3ec30772023-03-08 13:47:17 +00002// Copyright © 2022-2023 Arm Ltd and Contributors. All rights reserved.
Colm Donelan17948b52022-02-01 23:37:04 +00003// SPDX-License-Identifier: MIT
4//
5#pragma once
6
Colm Donelana98e79a2022-12-06 21:32:29 +00007#include <armnn/Deprecated.hpp>
8#include <armnn/Descriptors.hpp>
9#include <armnn/Exceptions.hpp>
10#include <armnn/IRuntime.hpp>
11#include <armnn/Optional.hpp>
12#include <armnn/Tensor.hpp>
13#include <armnn/Types.hpp>
Colm Donelan17948b52022-02-01 23:37:04 +000014#include <armnn/backends/IBackendInternal.hpp>
15#include <armnn/backends/MemCopyWorkload.hpp>
Colm Donelana98e79a2022-12-06 21:32:29 +000016#include <armnn/backends/ITensorHandle.hpp>
17#include <armnn/backends/IWorkload.hpp>
18#include <armnn/backends/OptimizationViews.hpp>
19#include <armnn/backends/SubgraphView.hpp>
20#include <armnn/backends/WorkloadData.hpp>
21#include <armnn/backends/WorkloadFactory.hpp>
22#include <armnn/backends/WorkloadInfo.hpp>
23#include <armnn/utility/IgnoreUnused.hpp>
24#include <armnn/utility/PolymorphicDowncast.hpp>
Colm Donelan17948b52022-02-01 23:37:04 +000025#include <armnnTestUtils/MockTensorHandle.hpp>
Cathal Corbett3464ba12022-03-04 11:36:39 +000026#include <backendsCommon/LayerSupportBase.hpp>
Colm Donelan17948b52022-02-01 23:37:04 +000027
Colm Donelana98e79a2022-12-06 21:32:29 +000028#include <client/include/CounterValue.hpp>
29#include <client/include/ISendTimelinePacket.hpp>
30#include <client/include/Timestamp.hpp>
31#include <client/include/backends/IBackendProfiling.hpp>
32#include <client/include/backends/IBackendProfilingContext.hpp>
33#include <common/include/Optional.hpp>
34
35#include <atomic>
36#include <cstdint>
37#include <memory>
38#include <string>
39#include <utility>
40#include <vector>
41
Colm Donelan17948b52022-02-01 23:37:04 +000042namespace armnn
43{
Colm Donelana98e79a2022-12-06 21:32:29 +000044class BackendId;
45class ICustomAllocator;
46class MockMemoryManager;
47struct LstmInputParamsInfo;
48struct QuantizedLstmInputParamsInfo;
Colm Donelan17948b52022-02-01 23:37:04 +000049
50// A bare bones Mock backend to enable unit testing of simple tensor manipulation features.
51class MockBackend : public IBackendInternal
52{
53public:
54 MockBackend() = default;
55
56 ~MockBackend() = default;
57
58 static const BackendId& GetIdStatic();
59
60 const BackendId& GetId() const override
61 {
62 return GetIdStatic();
63 }
64 IBackendInternal::IWorkloadFactoryPtr
Cathal Corbett3464ba12022-03-04 11:36:39 +000065 CreateWorkloadFactory(const IBackendInternal::IMemoryManagerSharedPtr& memoryManager = nullptr) const override;
Colm Donelan17948b52022-02-01 23:37:04 +000066
Cathal Corbett3464ba12022-03-04 11:36:39 +000067 IBackendInternal::ILayerSupportSharedPtr GetLayerSupport() const override;
68
69 IBackendInternal::IMemoryManagerUniquePtr CreateMemoryManager() const override;
70
71 IBackendInternal::IBackendContextPtr CreateBackendContext(const IRuntime::CreationOptions&) const override;
72 IBackendInternal::IBackendProfilingContextPtr
73 CreateBackendProfilingContext(const IRuntime::CreationOptions& creationOptions,
74 IBackendProfilingPtr& backendProfiling) override;
75
76 OptimizationViews OptimizeSubgraphView(const SubgraphView& subgraph) const override;
77
78 std::unique_ptr<ICustomAllocator> GetDefaultAllocator() const override;
Colm Donelan17948b52022-02-01 23:37:04 +000079};
80
81class MockWorkloadFactory : public IWorkloadFactory
82{
83
84public:
85 explicit MockWorkloadFactory(const std::shared_ptr<MockMemoryManager>& memoryManager);
86 MockWorkloadFactory();
87
88 ~MockWorkloadFactory()
89 {}
90
91 const BackendId& GetBackendId() const override;
92
93 bool SupportsSubTensors() const override
94 {
95 return false;
96 }
97
98 ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateSubTensorHandle instead")
99 std::unique_ptr<ITensorHandle> CreateSubTensorHandle(ITensorHandle&,
100 TensorShape const&,
101 unsigned int const*) const override
102 {
103 return nullptr;
104 }
105
106 ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateTensorHandle instead")
107 std::unique_ptr<ITensorHandle> CreateTensorHandle(const TensorInfo& tensorInfo,
108 const bool IsMemoryManaged = true) const override
109 {
110 IgnoreUnused(IsMemoryManaged);
111 return std::make_unique<MockTensorHandle>(tensorInfo, m_MemoryManager);
112 };
113
114 ARMNN_DEPRECATED_MSG("Use ITensorHandleFactory::CreateTensorHandle instead")
115 std::unique_ptr<ITensorHandle> CreateTensorHandle(const TensorInfo& tensorInfo,
116 DataLayout dataLayout,
117 const bool IsMemoryManaged = true) const override
118 {
119 IgnoreUnused(dataLayout, IsMemoryManaged);
120 return std::make_unique<MockTensorHandle>(tensorInfo, static_cast<unsigned int>(MemorySource::Malloc));
121 };
122
Colm Donelan17948b52022-02-01 23:37:04 +0000123 std::unique_ptr<IWorkload>
Cian McGriskin7894ef92023-08-01 14:04:09 +0100124 CreateWorkload(LayerType type, const QueueDescriptor& descriptor, const WorkloadInfo& info) const override;
Colm Donelan17948b52022-02-01 23:37:04 +0000125
126private:
127 mutable std::shared_ptr<MockMemoryManager> m_MemoryManager;
128};
129
Cathal Corbett3464ba12022-03-04 11:36:39 +0000130class MockBackendInitialiser
131{
132public:
133 MockBackendInitialiser();
134 ~MockBackendInitialiser();
135};
136
137class MockBackendProfilingContext : public arm::pipe::IBackendProfilingContext
138{
139public:
140 MockBackendProfilingContext(IBackendInternal::IBackendProfilingPtr& backendProfiling)
141 : m_BackendProfiling(std::move(backendProfiling))
142 , m_CapturePeriod(0)
143 , m_IsTimelineEnabled(true)
144 {}
145
146 ~MockBackendProfilingContext() = default;
147
148 IBackendInternal::IBackendProfilingPtr& GetBackendProfiling()
149 {
150 return m_BackendProfiling;
151 }
152
153 uint16_t RegisterCounters(uint16_t currentMaxGlobalCounterId)
154 {
155 std::unique_ptr<arm::pipe::IRegisterBackendCounters> counterRegistrar =
156 m_BackendProfiling->GetCounterRegistrationInterface(static_cast<uint16_t>(currentMaxGlobalCounterId));
157
158 std::string categoryName("MockCounters");
159 counterRegistrar->RegisterCategory(categoryName);
160
161 counterRegistrar->RegisterCounter(0, categoryName, 0, 0, 1.f, "Mock Counter One", "Some notional counter");
162
163 counterRegistrar->RegisterCounter(1, categoryName, 0, 0, 1.f, "Mock Counter Two",
164 "Another notional counter");
165
166 std::string units("microseconds");
167 uint16_t nextMaxGlobalCounterId =
168 counterRegistrar->RegisterCounter(2, categoryName, 0, 0, 1.f, "Mock MultiCore Counter",
169 "A dummy four core counter", units, 4);
170 return nextMaxGlobalCounterId;
171 }
172
Jim Flynndecd08b2022-03-13 22:35:46 +0000173 arm::pipe::Optional<std::string> ActivateCounters(uint32_t capturePeriod, const std::vector<uint16_t>& counterIds)
Cathal Corbett3464ba12022-03-04 11:36:39 +0000174 {
175 if (capturePeriod == 0 || counterIds.size() == 0)
176 {
177 m_ActiveCounters.clear();
178 }
179 else if (capturePeriod == 15939u)
180 {
Jim Flynndecd08b2022-03-13 22:35:46 +0000181 return arm::pipe::Optional<std::string>("ActivateCounters example test error");
Cathal Corbett3464ba12022-03-04 11:36:39 +0000182 }
183 m_CapturePeriod = capturePeriod;
184 m_ActiveCounters = counterIds;
Jim Flynndecd08b2022-03-13 22:35:46 +0000185 return arm::pipe::Optional<std::string>();
Cathal Corbett3464ba12022-03-04 11:36:39 +0000186 }
187
188 std::vector<arm::pipe::Timestamp> ReportCounterValues()
189 {
190 std::vector<arm::pipe::CounterValue> counterValues;
191
192 for (auto counterId : m_ActiveCounters)
193 {
194 counterValues.emplace_back(arm::pipe::CounterValue{ counterId, counterId + 1u });
195 }
196
197 uint64_t timestamp = m_CapturePeriod;
198 return { arm::pipe::Timestamp{ timestamp, counterValues } };
199 }
200
201 bool EnableProfiling(bool)
202 {
203 auto sendTimelinePacket = m_BackendProfiling->GetSendTimelinePacket();
204 sendTimelinePacket->SendTimelineEntityBinaryPacket(4256);
205 sendTimelinePacket->Commit();
206 return true;
207 }
208
209 bool EnableTimelineReporting(bool isEnabled)
210 {
211 m_IsTimelineEnabled = isEnabled;
212 return isEnabled;
213 }
214
215 bool TimelineReportingEnabled()
216 {
217 return m_IsTimelineEnabled;
218 }
219
220private:
221 IBackendInternal::IBackendProfilingPtr m_BackendProfiling;
222 uint32_t m_CapturePeriod;
223 std::vector<uint16_t> m_ActiveCounters;
224 std::atomic<bool> m_IsTimelineEnabled;
225};
226
227class MockBackendProfilingService
228{
229public:
230 // Getter for the singleton instance
231 static MockBackendProfilingService& Instance()
232 {
233 static MockBackendProfilingService instance;
234 return instance;
235 }
236
237 MockBackendProfilingContext* GetContext()
238 {
239 return m_sharedContext.get();
240 }
241
242 void SetProfilingContextPtr(std::shared_ptr<MockBackendProfilingContext> shared)
243 {
244 m_sharedContext = shared;
245 }
246
247private:
248 std::shared_ptr<MockBackendProfilingContext> m_sharedContext;
249};
250
251class MockLayerSupport : public LayerSupportBase
252{
253public:
254 bool IsLayerSupported(const LayerType& type,
255 const std::vector<TensorInfo>& infos,
256 const BaseDescriptor& descriptor,
257 const Optional<LstmInputParamsInfo>& /*lstmParamsInfo*/,
258 const Optional<QuantizedLstmInputParamsInfo>& /*quantizedLstmParamsInfo*/,
259 Optional<std::string&> reasonIfUnsupported) const override
260 {
261 switch(type)
262 {
263 case LayerType::Input:
264 return IsInputSupported(infos[0], reasonIfUnsupported);
265 case LayerType::Output:
266 return IsOutputSupported(infos[0], reasonIfUnsupported);
267 case LayerType::Addition:
268 return IsAdditionSupported(infos[0], infos[1], infos[2], reasonIfUnsupported);
269 case LayerType::Convolution2d:
270 {
271 if (infos.size() != 4)
272 {
273 throw InvalidArgumentException("Invalid number of TransposeConvolution2d "
274 "TensorInfos. TensorInfos should be of format: "
275 "{input, output, weights, biases}.");
276 }
277
278 auto desc = *(PolymorphicDowncast<const Convolution2dDescriptor*>(&descriptor));
279 if (infos[3] == TensorInfo())
280 {
281 return IsConvolution2dSupported(infos[0],
282 infos[1],
283 desc,
284 infos[2],
285 EmptyOptional(),
286 reasonIfUnsupported);
287 }
288 else
289 {
290 return IsConvolution2dSupported(infos[0],
291 infos[1],
292 desc,
293 infos[2],
294 infos[3],
295 reasonIfUnsupported);
296 }
297 }
Mike Kelly3ec30772023-03-08 13:47:17 +0000298 case LayerType::ElementwiseBinary:
299 {
300 auto elementwiseDesc = *(PolymorphicDowncast<const ElementwiseBinaryDescriptor*>(&descriptor));
301 return (elementwiseDesc.m_Operation == BinaryOperation::Add);
302 }
Cathal Corbett3464ba12022-03-04 11:36:39 +0000303 default:
304 return false;
305 }
306 }
307
308 bool IsInputSupported(const TensorInfo& /*input*/,
Francis Murtagh66277032023-07-28 13:01:32 +0100309 Optional<std::string&> /*reasonIfUnsupported = EmptyOptional()*/) const
Cathal Corbett3464ba12022-03-04 11:36:39 +0000310 {
311 return true;
312 }
313
314 bool IsOutputSupported(const TensorInfo& /*input*/,
Francis Murtagh66277032023-07-28 13:01:32 +0100315 Optional<std::string&> /*reasonIfUnsupported = EmptyOptional()*/) const
Cathal Corbett3464ba12022-03-04 11:36:39 +0000316 {
317 return true;
318 }
319
320 bool IsAdditionSupported(const TensorInfo& /*input0*/,
321 const TensorInfo& /*input1*/,
322 const TensorInfo& /*output*/,
Francis Murtagh66277032023-07-28 13:01:32 +0100323 Optional<std::string&> /*reasonIfUnsupported = EmptyOptional()*/) const
Cathal Corbett3464ba12022-03-04 11:36:39 +0000324 {
325 return true;
326 }
327
328 bool IsConvolution2dSupported(const TensorInfo& /*input*/,
329 const TensorInfo& /*output*/,
330 const Convolution2dDescriptor& /*descriptor*/,
331 const TensorInfo& /*weights*/,
332 const Optional<TensorInfo>& /*biases*/,
Francis Murtagh66277032023-07-28 13:01:32 +0100333 Optional<std::string&> /*reasonIfUnsupported = EmptyOptional()*/) const
Cathal Corbett3464ba12022-03-04 11:36:39 +0000334 {
335 return true;
336 }
337};
338
Colm Donelan17948b52022-02-01 23:37:04 +0000339} // namespace armnn