blob: b144c78889284d4b023a023f4dfd3fefd6854131 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Finn Williamsf24effa2020-07-03 10:12:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
7#include "LayerFwd.hpp"
8
Matteo Martincighe5b8eb92019-11-28 15:45:42 +00009#include <armnn/backends/ITensorHandleFactory.hpp>
10#include <OutputHandler.hpp>
Derek Lamberti84da38b2019-06-13 11:40:08 +010011#include <backendsCommon/TensorHandleFactoryRegistry.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000012#include <backendsCommon/WorkloadDataCollector.hpp>
13#include <backendsCommon/WorkloadInfo.hpp>
telsoa014fcda012018-03-09 14:13:49 +000014#include "InternalTypes.hpp"
surmeh01bceff2f2018-03-29 16:29:27 +010015#include "SerializeLayerParameters.hpp"
Rob Hughesd0b4aa92022-02-09 11:24:25 +000016#include "DllExport.hpp"
telsoa014fcda012018-03-09 14:13:49 +000017
18#include <armnn/Types.hpp>
19#include <armnn/Tensor.hpp>
20#include <armnn/INetwork.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000021#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010022#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010023#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000024
25#include <algorithm>
Jan Eilersbb446e52020-04-02 13:56:54 +010026#include <functional>
27#include <iostream>
28#include <list>
telsoa014fcda012018-03-09 14:13:49 +000029#include <memory>
30#include <string>
31#include <vector>
Colm Donelan0c479742021-12-10 12:43:54 +000032#include <armnn/backends/WorkloadData.hpp>
telsoa014fcda012018-03-09 14:13:49 +000033
telsoa014fcda012018-03-09 14:13:49 +000034namespace armnn
35{
36
37class IWorkload;
38class IWorkloadFactory;
39class Layer;
40class Graph;
41
42class InputSlot final : public IInputSlot
43{
44public:
45 explicit InputSlot(Layer& owner, unsigned int slotIndex)
46 : m_OwningLayer(owner)
47 , m_Connection(nullptr)
48 , m_SlotIndex(slotIndex)
49 {}
50
51 ~InputSlot();
52
53 Layer& GetOwningLayer() const { return m_OwningLayer; }
Francis Murtaghcea3d492022-06-27 12:44:50 +010054 unsigned int GetSlotIndex() const override { return m_SlotIndex; }
telsoa014fcda012018-03-09 14:13:49 +000055
56 const OutputSlot* GetConnectedOutputSlot() const { return m_Connection; }
57 OutputSlot* GetConnectedOutputSlot() { return m_Connection; }
58
Francis Murtagh9d74ba62022-01-19 16:31:58 +000059 const IConnectableLayer& GetOwningIConnectableLayer() const override;
Nabeel Ahmad09fa24d2022-06-16 13:55:00 +010060 IConnectableLayer& GetOwningIConnectableLayer() override;
Francis Murtagh9d74ba62022-01-19 16:31:58 +000061
telsoa01c577f2c2018-08-31 09:22:23 +010062 /// Links the slot to an output slot or breaks an existing link if passing nullptr.
telsoa014fcda012018-03-09 14:13:49 +000063 void SetConnection(OutputSlot* source)
64 {
65 if (m_Connection != nullptr && source != nullptr)
66 {
67 throw InvalidArgumentException("Tried to connect an output slot to an input slot, "
68 "but the latter already has a connection");
69 }
70 m_Connection = source;
71 }
72
telsoa01c577f2c2018-08-31 09:22:23 +010073 // Inserts single-output existing layer at this point in the graph.
telsoa014fcda012018-03-09 14:13:49 +000074 void Insert(Layer& layer);
75
Francis Murtaghcea3d492022-06-27 12:44:50 +010076 // InputSlot
telsoa014fcda012018-03-09 14:13:49 +000077
78 const IOutputSlot* GetConnection() const override;
79 IOutputSlot* GetConnection() override;
80
81private:
82 Layer& m_OwningLayer;
83 OutputSlot* m_Connection;
84 const unsigned int m_SlotIndex;
85};
86
87class OutputSlot final : public IOutputSlot
88{
89public:
90 explicit OutputSlot(Layer& owner, OutputHandler& outputHandler)
91 : m_OwningLayer(owner)
92 , m_OutputHandler(outputHandler)
Derek Lamberti84da38b2019-06-13 11:40:08 +010093 , m_TensorHandleFactoryId(ITensorHandleFactory::LegacyFactoryId)
telsoa014fcda012018-03-09 14:13:49 +000094 {}
95
Derek Lamberti84da38b2019-06-13 11:40:08 +010096 OutputSlot(const OutputSlot&) = delete;
97 OutputSlot& operator=(const OutputSlot&) = delete;
Matthew Bentham13757bd2020-02-10 17:23:39 +000098 OutputSlot& operator=(OutputSlot&&) = delete;
Derek Lamberti84da38b2019-06-13 11:40:08 +010099
100 OutputSlot(OutputSlot&&) = default;
Derek Lamberti84da38b2019-06-13 11:40:08 +0100101
telsoa014fcda012018-03-09 14:13:49 +0000102 ~OutputSlot()
103 {
surmeh013537c2c2018-05-18 16:31:43 +0100104 try
105 {
106 // Coverity fix: DisconnectAll() may throw uncaught exceptions.
107 DisconnectAll();
108 }
109 catch (const std::exception& e)
110 {
111 // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
112 // exception of type std::length_error.
113 // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
114 std::cerr << "WARNING: An error has occurred when disconnecting all output slots: "
115 << e.what() << std::endl;
116 }
telsoa014fcda012018-03-09 14:13:49 +0000117 }
118
119 Layer& GetOwningLayer() const { return m_OwningLayer; }
120
Francis Murtagh56ccf682021-12-13 18:48:12 +0000121 const IConnectableLayer& GetOwningIConnectableLayer() const override;
Nabeel Ahmad09fa24d2022-06-16 13:55:00 +0100122 IConnectableLayer& GetOwningIConnectableLayer() override;
Francis Murtagh56ccf682021-12-13 18:48:12 +0000123
Mike Kelly8c1701a2019-02-11 17:01:27 +0000124 LayerGuid GetOwningLayerGuid() const override;
125
telsoa014fcda012018-03-09 14:13:49 +0000126 const OutputHandler& GetOutputHandler() const { return m_OutputHandler; }
127 OutputHandler& GetOutputHandler() { return m_OutputHandler; }
128
129 int Connect(InputSlot& destination);
130 void Disconnect(InputSlot& slot);
131
132 const std::vector<InputSlot*>& GetConnections() const { return m_Connections; }
Derek Lambertif674aa02019-08-01 15:56:25 +0100133 const std::vector<EdgeStrategy>& GetEdgeStrategies() const { return m_EdgeStrategies; }
telsoa014fcda012018-03-09 14:13:49 +0000134
135 bool ValidateTensorShape(const TensorShape& shape) const;
136
telsoa01c577f2c2018-08-31 09:22:23 +0100137 // Disconnect all conections.
telsoa014fcda012018-03-09 14:13:49 +0000138 void DisconnectAll();
139
telsoa01c577f2c2018-08-31 09:22:23 +0100140 /// Moves all connections to another OutputSlot.
telsoa014fcda012018-03-09 14:13:49 +0000141 void MoveAllConnections(OutputSlot& destination);
142
143 // IOutputSlot
144
Matthew Sloyan0663d662020-09-14 11:47:26 +0100145 unsigned int GetNumConnections() const override { return armnn::numeric_cast<unsigned int>(m_Connections.size()); }
telsoa014fcda012018-03-09 14:13:49 +0000146 const InputSlot* GetConnection(unsigned int index) const override;
147 InputSlot* GetConnection(unsigned int index) override;
148
149 void SetTensorInfo(const TensorInfo& tensorInfo) override;
150 const TensorInfo& GetTensorInfo() const override;
151 bool IsTensorInfoSet() const override;
152
153 int Connect(IInputSlot& destination) override
154 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100155 return Connect(*PolymorphicDowncast<InputSlot*>(&destination));
telsoa014fcda012018-03-09 14:13:49 +0000156 }
157
158 void Disconnect(IInputSlot& slot) override
159 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100160 return Disconnect(*PolymorphicDowncast<InputSlot*>(&slot));
telsoa014fcda012018-03-09 14:13:49 +0000161 }
162
Mike Kelly8c1701a2019-02-11 17:01:27 +0000163 unsigned int CalculateIndexOnOwner() const override;
Derek Lamberti27d83072019-02-05 16:00:08 +0000164
165 bool operator==(const OutputSlot& other) const;
166
Derek Lamberti84da38b2019-06-13 11:40:08 +0100167 void SetTensorHandleFactory(const ITensorHandleFactory::FactoryId& id);
168 ITensorHandleFactory::FactoryId GetTensorHandleFactoryId() const;
169
Derek Lambertif674aa02019-08-01 15:56:25 +0100170 void SetEdgeStrategy(unsigned int connectionIndex, EdgeStrategy strategy);
171 EdgeStrategy GetEdgeStrategyForConnection(unsigned int connectionIdx) const;
Derek Lamberti84da38b2019-06-13 11:40:08 +0100172
telsoa014fcda012018-03-09 14:13:49 +0000173private:
174 void ValidateConnectionIndex(unsigned int index) const;
175
176 Layer& m_OwningLayer;
177 OutputHandler& m_OutputHandler;
178 std::vector<InputSlot*> m_Connections;
Derek Lamberti84da38b2019-06-13 11:40:08 +0100179
180 ITensorHandleFactory::FactoryId m_TensorHandleFactoryId;
Derek Lambertif674aa02019-08-01 15:56:25 +0100181 std::vector<EdgeStrategy> m_EdgeStrategies;
telsoa014fcda012018-03-09 14:13:49 +0000182};
183
telsoa01c577f2c2018-08-31 09:22:23 +0100184// InputSlot inlines that need OutputSlot declaration.
telsoa014fcda012018-03-09 14:13:49 +0000185
186inline InputSlot::~InputSlot()
187{
188 if (m_Connection != nullptr)
189 {
surmeh013537c2c2018-05-18 16:31:43 +0100190 try
191 {
192 // Coverity fix: Disconnect() may throw uncaught exceptions.
193 m_Connection->Disconnect(*this);
194 }
195 catch (const std::exception& e)
196 {
197 // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
198 // exception of type std::length_error.
199 // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
200 std::cerr << "WARNING: An error has occurred when disconnecting an input slot: "
201 << e.what() << std::endl;
202 }
telsoa014fcda012018-03-09 14:13:49 +0000203 }
204}
205
206inline const IOutputSlot* InputSlot::GetConnection() const { return GetConnectedOutputSlot(); }
207inline IOutputSlot* InputSlot::GetConnection() { return GetConnectedOutputSlot(); }
208
telsoa01c577f2c2018-08-31 09:22:23 +0100209
James Conroy1f58f032021-04-27 17:13:27 +0100210class ScopedTensorHandle;
telsoa01c577f2c2018-08-31 09:22:23 +0100211
telsoa014fcda012018-03-09 14:13:49 +0000212// Base layer class
213
214using LayerPriority = unsigned int;
Keith Davisdf04d232020-10-23 17:20:05 +0100215using AdditionalInfoObjectPtr = std::shared_ptr<void>;
telsoa014fcda012018-03-09 14:13:49 +0000216
217class Layer : public IConnectableLayer
218{
219public:
telsoa01c577f2c2018-08-31 09:22:23 +0100220 /// @param name - Optional name for the layer (may be nullptr).
telsoa014fcda012018-03-09 14:13:49 +0000221 Layer(unsigned int numInputSlots, unsigned int numOutputSlots, LayerType type, const char* name);
Derek Lamberti0cff1632018-09-18 16:02:25 +0100222 Layer(unsigned int numInputSlots, unsigned int numOutputSlots, LayerType type, DataLayout layout, const char* name);
telsoa014fcda012018-03-09 14:13:49 +0000223
Finn Williamsb454c5c2021-02-09 15:56:23 +0000224 void ExecuteStrategy(IStrategy& strategy) const override;
225
226
telsoa014fcda012018-03-09 14:13:49 +0000227 const std::string& GetNameStr() const
228 {
229 return m_LayerName;
230 }
231
232 const OutputHandler& GetOutputHandler(unsigned int i = 0) const
233 {
234 return m_OutputHandlers[i];
235 }
236
237 OutputHandler& GetOutputHandler(unsigned int i = 0)
238 {
239 return const_cast<OutputHandler&>(const_cast<const Layer*>(this)->GetOutputHandler(i));
240 }
241
Finn Williamsf24effa2020-07-03 10:12:03 +0100242 ShapeInferenceMethod GetShapeInferenceMethod() const { return m_ShapeInferenceMethod; };
Mike Kelly80512b02022-05-16 23:10:42 +0100243 bool GetAllowExpandedDims() const { return m_AllowExpandedDims; };
Finn Williamsf24effa2020-07-03 10:12:03 +0100244
telsoa014fcda012018-03-09 14:13:49 +0000245 const std::vector<InputSlot>& GetInputSlots() const { return m_InputSlots; }
246 const std::vector<OutputSlot>& GetOutputSlots() const { return m_OutputSlots; }
247
telsoa01c577f2c2018-08-31 09:22:23 +0100248 // Allows non-const access to input slots, but don't expose vector (vector size is fixed at layer construction).
telsoa014fcda012018-03-09 14:13:49 +0000249 std::vector<InputSlot>::iterator BeginInputSlots() { return m_InputSlots.begin(); }
250 std::vector<InputSlot>::iterator EndInputSlots() { return m_InputSlots.end(); }
251
telsoa01c577f2c2018-08-31 09:22:23 +0100252 // Allows non-const access to output slots, but don't expose vector (vector size is fixed at layer construction).
telsoa014fcda012018-03-09 14:13:49 +0000253 std::vector<OutputSlot>::iterator BeginOutputSlots() { return m_OutputSlots.begin(); }
254 std::vector<OutputSlot>::iterator EndOutputSlots() { return m_OutputSlots.end(); }
255
telsoa01c577f2c2018-08-31 09:22:23 +0100256 // Checks whether the outputs of this layer don't have any connection.
telsoa014fcda012018-03-09 14:13:49 +0000257 bool IsOutputUnconnected()
258 {
259 unsigned int numConnections = 0;
260
261 for (auto&& output : GetOutputSlots())
262 {
263 numConnections += output.GetNumConnections();
264 }
265
266 return (GetNumOutputSlots() > 0) && (numConnections == 0);
267 }
268
telsoa01c577f2c2018-08-31 09:22:23 +0100269 // Used for sorting.
telsoa014fcda012018-03-09 14:13:49 +0000270 void ResetPriority() const;
271 LayerPriority GetPriority() const;
272
Finn Williamsb454c5c2021-02-09 15:56:23 +0000273 LayerType GetType() const override { return m_Type; }
telsoa014fcda012018-03-09 14:13:49 +0000274
275 DataType GetDataType() const;
276
David Beck33f0ae02018-10-18 15:13:56 +0100277 const BackendId& GetBackendId() const { return m_BackendId; }
278 void SetBackendId(const BackendId& id) { m_BackendId = id; }
telsoa014fcda012018-03-09 14:13:49 +0000279
280 // Virtuals
281
Derek Lamberti94a88d22019-12-10 21:12:59 +0000282 virtual std::unique_ptr<IWorkload> CreateWorkload(const IWorkloadFactory& factory) const = 0;
telsoa014fcda012018-03-09 14:13:49 +0000283
David Monahan3fb7e102019-08-20 11:25:29 +0100284 virtual void CreateTensorHandles(const TensorHandleFactoryRegistry& registry,
285 const IWorkloadFactory& factory,
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +0100286 const bool IsMemoryManaged = true);
telsoa014fcda012018-03-09 14:13:49 +0000287
telsoa01c577f2c2018-08-31 09:22:23 +0100288 /// Creates a dynamically-allocated copy of this layer.
289 /// @param graph - The Graph into which this Layer is being cloned.
telsoa014fcda012018-03-09 14:13:49 +0000290 virtual Layer* Clone(Graph& graph) const = 0;
291
telsoa01c577f2c2018-08-31 09:22:23 +0100292 void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation& location) const;
293
Finn Williamsf24effa2020-07-03 10:12:03 +0100294 virtual void ValidateTensorShapesFromInputs() = 0;
telsoa014fcda012018-03-09 14:13:49 +0000295
telsoa01c577f2c2018-08-31 09:22:23 +0100296 std::vector<TensorShape> InferOutputShapes(const std::vector<TensorShape>& inputShapes) const override;
297
298 /// Helper to serialize the layer parameters to string.
299 /// (currently used in DotSerializer and company).
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100300 virtual void SerializeLayerParameters(ParameterStringifyFunction& fn) const;
surmeh01bceff2f2018-03-29 16:29:27 +0100301
telsoa01c577f2c2018-08-31 09:22:23 +0100302 // Free up the constant source data
303 virtual void ReleaseConstantData();
304
305 template<typename Op>
306 void OperateOnConstantTensors(Op op)
307 {
308 for (auto constant : GetConstantTensorsByRef())
309 {
310 if (constant.get())
311 {
312 op(constant);
313 }
314 }
315 };
316
telsoa014fcda012018-03-09 14:13:49 +0000317 // IConnectableLayer
318
319 const char* GetName() const override { return m_LayerName.c_str(); }
320
321 unsigned int GetNumInputSlots() const override { return static_cast<unsigned int>(m_InputSlots.size()); }
322 unsigned int GetNumOutputSlots() const override { return static_cast<unsigned int>(m_OutputSlots.size()); }
323
324 const InputSlot& GetInputSlot(unsigned int index) const override { return m_InputSlots.at(index); }
325 InputSlot& GetInputSlot(unsigned int index) override { return m_InputSlots.at(index); }
326 const OutputSlot& GetOutputSlot(unsigned int index = 0) const override { return m_OutputSlots.at(index); }
327 OutputSlot& GetOutputSlot(unsigned int index = 0) override { return m_OutputSlots.at(index); }
328
surmeh01bceff2f2018-03-29 16:29:27 +0100329 void SetGuid(LayerGuid guid) { m_Guid = guid; }
330 LayerGuid GetGuid() const final { return m_Guid; }
331
telsoa01c577f2c2018-08-31 09:22:23 +0100332 void AddRelatedLayerName(const std::string layerName) { m_RelatedLayerNames.emplace_back(layerName); }
333
334 const std::list<std::string>& GetRelatedLayerNames() { return m_RelatedLayerNames; }
335
Derek Lamberti8106b7c2019-05-07 21:33:30 +0100336 virtual void Reparent(Graph& dest, std::list<Layer*>::const_iterator iterator) = 0;
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000337
338 void BackendSelectionHint(Optional<BackendId> backend) final
339 {
340 m_BackendHint = backend;
341 }
342 Optional<BackendId> GetBackendHint() const { return m_BackendHint; }
343
Finn Williamsf24effa2020-07-03 10:12:03 +0100344 void SetShapeInferenceMethod(ShapeInferenceMethod shapeInferenceMethod)
345 {
346 m_ShapeInferenceMethod = shapeInferenceMethod;
347 }
348
Mike Kelly80512b02022-05-16 23:10:42 +0100349 void SetAllowExpandedDims(bool allowExpandedDims)
350 {
351 m_AllowExpandedDims = allowExpandedDims;
352 }
353
Keith Davisdf04d232020-10-23 17:20:05 +0100354 template<typename T>
Mike Kelly7a0efa52020-11-17 13:55:01 +0000355 std::shared_ptr<T> GetAdditionalInformation() const
Keith Davisdf04d232020-10-23 17:20:05 +0100356 {
357 return std::static_pointer_cast<T>(m_AdditionalInfoObject);
358 }
359
360 void SetAdditionalInfoForObject(const AdditionalInfoObjectPtr& additionalInfo)
361 {
362 m_AdditionalInfoObject = additionalInfo;
363 }
364
Jim Flynne4665962022-01-31 16:08:53 +0000365 virtual const BaseDescriptor& GetParameters() const override { return m_NullDescriptor; }
366
telsoa014fcda012018-03-09 14:13:49 +0000367protected:
telsoa01c577f2c2018-08-31 09:22:23 +0100368 // Graph needs access to the virtual destructor.
telsoa014fcda012018-03-09 14:13:49 +0000369 friend class Graph;
370 virtual ~Layer() = default;
371
372 template <typename QueueDescriptor>
Derek Lamberti94a88d22019-12-10 21:12:59 +0000373 void CollectQueueDescriptorInputs(QueueDescriptor& descriptor, WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000374 {
375 WorkloadDataCollector dataCollector(descriptor.m_Inputs, info.m_InputTensorInfos);
Derek Lamberti94a88d22019-12-10 21:12:59 +0000376 CollectWorkloadInputs(dataCollector);
telsoa014fcda012018-03-09 14:13:49 +0000377 }
378
379 template <typename QueueDescriptor>
Derek Lamberti94a88d22019-12-10 21:12:59 +0000380 void CollectQueueDescriptorOutputs(QueueDescriptor& descriptor, WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000381 {
382 WorkloadDataCollector dataCollector(descriptor.m_Outputs, info.m_OutputTensorInfos);
Derek Lamberti94a88d22019-12-10 21:12:59 +0000383 CollectWorkloadOutputs(dataCollector);
telsoa014fcda012018-03-09 14:13:49 +0000384 }
385
Finn Williams87d0bda2020-07-03 10:12:03 +0100386 void ValidateAndCopyShape(const TensorShape& outputShape,
387 const TensorShape& inferredShape,
388 const ShapeInferenceMethod shapeInferenceMethod,
389 const std::string& layerName,
390 const unsigned int outputSlotIndex = 0);
391
392 void VerifyShapeInferenceType(const TensorShape& outputShape, ShapeInferenceMethod shapeInferenceMethod);
393
telsoa01c577f2c2018-08-31 09:22:23 +0100394 /// Helper function to reduce duplication in *Layer::CreateWorkload.
telsoa014fcda012018-03-09 14:13:49 +0000395 template <typename QueueDescriptor>
Derek Lamberti94a88d22019-12-10 21:12:59 +0000396 WorkloadInfo PrepInfoAndDesc(QueueDescriptor& descriptor) const
telsoa014fcda012018-03-09 14:13:49 +0000397 {
398 WorkloadInfo info;
Derek Lamberti94a88d22019-12-10 21:12:59 +0000399 CollectQueueDescriptorInputs(descriptor, info);
400 CollectQueueDescriptorOutputs(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000401 return info;
402 }
403
404 template <typename LayerType, typename ... Params>
405 LayerType* CloneBase(Graph& graph, Params&& ... params) const;
406
telsoa01c577f2c2018-08-31 09:22:23 +0100407 // Retrieve the Handles to the constants
Nikhil Raj2e241752022-02-01 16:42:15 +0000408 // Marking this as override and having this here keeps IConnectable abstract with only pure virtual function
409 virtual ConstantTensors GetConstantTensorsByRef() override {return ConstantTensors(); };
telsoa01c577f2c2018-08-31 09:22:23 +0100410
Keith Davisdf04d232020-10-23 17:20:05 +0100411 // "Blob"
412 AdditionalInfoObjectPtr m_AdditionalInfoObject;
413
414 // Utility method to set a pointer in the queueDescriptor to the "blob" location in the layer
415 void SetAdditionalInfo(QueueDescriptor& descriptor) const;
416
telsoa014fcda012018-03-09 14:13:49 +0000417private:
Derek Lamberti94a88d22019-12-10 21:12:59 +0000418 void CollectWorkloadInputs(WorkloadDataCollector& dataCollector) const;
419 void CollectWorkloadOutputs(WorkloadDataCollector& dataCollector) const;
telsoa014fcda012018-03-09 14:13:49 +0000420
421protected:
422 std::vector<OutputHandler> m_OutputHandlers;
Finn Williamsf24effa2020-07-03 10:12:03 +0100423 ShapeInferenceMethod m_ShapeInferenceMethod;
telsoa014fcda012018-03-09 14:13:49 +0000424
425private:
426 const std::string m_LayerName;
427
428 std::vector<InputSlot> m_InputSlots;
429 std::vector<OutputSlot> m_OutputSlots;
430
431 const LayerType m_Type;
David Beck33f0ae02018-10-18 15:13:56 +0100432 BackendId m_BackendId;
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000433 Optional<BackendId> m_BackendHint;
telsoa014fcda012018-03-09 14:13:49 +0000434
telsoa01c577f2c2018-08-31 09:22:23 +0100435 /// Used for sorting.
telsoa014fcda012018-03-09 14:13:49 +0000436 mutable LayerPriority m_Priority = 0;
437 mutable bool m_Visiting = false;
surmeh01bceff2f2018-03-29 16:29:27 +0100438
Mike Kelly80512b02022-05-16 23:10:42 +0100439 bool m_AllowExpandedDims = false;
440
surmeh01bceff2f2018-03-29 16:29:27 +0100441 LayerGuid m_Guid;
telsoa01c577f2c2018-08-31 09:22:23 +0100442
443 std::list<std::string> m_RelatedLayerNames;
Finn Williamsf24effa2020-07-03 10:12:03 +0100444
Jim Flynne4665962022-01-31 16:08:53 +0000445 /// returned by layers which have no parameters associated with them.
446 /// has to be a member as it is returned as a const reference
447 /// declared static so that there is only ever one of them in memory
Rob Hughesd0b4aa92022-02-09 11:24:25 +0000448 ARMNN_DLLEXPORT static NullDescriptor m_NullDescriptor;
telsoa014fcda012018-03-09 14:13:49 +0000449};
450
telsoa01c577f2c2018-08-31 09:22:23 +0100451// A layer user-provided data can be bound to (e.g. inputs, outputs).
telsoa014fcda012018-03-09 14:13:49 +0000452class BindableLayer : public Layer
453{
454public:
455 BindableLayer(unsigned int numInputSlots,
456 unsigned int numOutputSlots,
457 LayerType type,
458 const char* name,
459 LayerBindingId id)
460 : Layer(numInputSlots, numOutputSlots, type, name)
461 , m_Id(id)
462 {
463 }
464
465 LayerBindingId GetBindingId() const { return m_Id; };
466
Finn Williamsb454c5c2021-02-09 15:56:23 +0000467 void ExecuteStrategy(IStrategy& strategy) const override
468 {
469 strategy.ExecuteStrategy(this, BaseDescriptor(), {}, GetName(), GetBindingId());
470 }
471
telsoa014fcda012018-03-09 14:13:49 +0000472protected:
473 ~BindableLayer() = default;
474
475private:
476 LayerBindingId m_Id;
477};
478
Nikhil Raj4d2eec02022-05-30 11:08:52 +0100479} //namespace armnn