blob: ecfa1d9a5bc0173e5b72ebbf9ba74e1b669a3642 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
Finn Williamsf24effa2020-07-03 10:12:03 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
5#pragma once
6
7#include "LayerFwd.hpp"
8
Matteo Martincighe5b8eb92019-11-28 15:45:42 +00009#include <armnn/backends/ITensorHandleFactory.hpp>
10#include <OutputHandler.hpp>
Derek Lamberti84da38b2019-06-13 11:40:08 +010011#include <backendsCommon/TensorHandleFactoryRegistry.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000012#include <backendsCommon/WorkloadDataCollector.hpp>
13#include <backendsCommon/WorkloadInfo.hpp>
telsoa014fcda012018-03-09 14:13:49 +000014#include "InternalTypes.hpp"
surmeh01bceff2f2018-03-29 16:29:27 +010015#include "SerializeLayerParameters.hpp"
telsoa014fcda012018-03-09 14:13:49 +000016
17#include <armnn/Types.hpp>
18#include <armnn/Tensor.hpp>
19#include <armnn/INetwork.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000020#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010021#include <armnn/utility/NumericCast.hpp>
Jan Eilersbb446e52020-04-02 13:56:54 +010022#include <armnn/utility/PolymorphicDowncast.hpp>
telsoa014fcda012018-03-09 14:13:49 +000023
24#include <algorithm>
Jan Eilersbb446e52020-04-02 13:56:54 +010025#include <functional>
26#include <iostream>
27#include <list>
telsoa014fcda012018-03-09 14:13:49 +000028#include <memory>
29#include <string>
30#include <vector>
Colm Donelan0c479742021-12-10 12:43:54 +000031#include <armnn/backends/WorkloadData.hpp>
telsoa014fcda012018-03-09 14:13:49 +000032
telsoa014fcda012018-03-09 14:13:49 +000033namespace armnn
34{
35
36class IWorkload;
37class IWorkloadFactory;
38class Layer;
39class Graph;
40
41class InputSlot final : public IInputSlot
42{
43public:
44 explicit InputSlot(Layer& owner, unsigned int slotIndex)
45 : m_OwningLayer(owner)
46 , m_Connection(nullptr)
47 , m_SlotIndex(slotIndex)
48 {}
49
50 ~InputSlot();
51
52 Layer& GetOwningLayer() const { return m_OwningLayer; }
53 unsigned int GetSlotIndex() const { return m_SlotIndex; }
54
55 const OutputSlot* GetConnectedOutputSlot() const { return m_Connection; }
56 OutputSlot* GetConnectedOutputSlot() { return m_Connection; }
57
telsoa01c577f2c2018-08-31 09:22:23 +010058 /// Links the slot to an output slot or breaks an existing link if passing nullptr.
telsoa014fcda012018-03-09 14:13:49 +000059 void SetConnection(OutputSlot* source)
60 {
61 if (m_Connection != nullptr && source != nullptr)
62 {
63 throw InvalidArgumentException("Tried to connect an output slot to an input slot, "
64 "but the latter already has a connection");
65 }
66 m_Connection = source;
67 }
68
telsoa01c577f2c2018-08-31 09:22:23 +010069 // Inserts single-output existing layer at this point in the graph.
telsoa014fcda012018-03-09 14:13:49 +000070 void Insert(Layer& layer);
71
72 // IInputSlot
73
74 const IOutputSlot* GetConnection() const override;
75 IOutputSlot* GetConnection() override;
76
77private:
78 Layer& m_OwningLayer;
79 OutputSlot* m_Connection;
80 const unsigned int m_SlotIndex;
81};
82
83class OutputSlot final : public IOutputSlot
84{
85public:
86 explicit OutputSlot(Layer& owner, OutputHandler& outputHandler)
87 : m_OwningLayer(owner)
88 , m_OutputHandler(outputHandler)
Derek Lamberti84da38b2019-06-13 11:40:08 +010089 , m_TensorHandleFactoryId(ITensorHandleFactory::LegacyFactoryId)
telsoa014fcda012018-03-09 14:13:49 +000090 {}
91
Derek Lamberti84da38b2019-06-13 11:40:08 +010092 OutputSlot(const OutputSlot&) = delete;
93 OutputSlot& operator=(const OutputSlot&) = delete;
Matthew Bentham13757bd2020-02-10 17:23:39 +000094 OutputSlot& operator=(OutputSlot&&) = delete;
Derek Lamberti84da38b2019-06-13 11:40:08 +010095
96 OutputSlot(OutputSlot&&) = default;
Derek Lamberti84da38b2019-06-13 11:40:08 +010097
telsoa014fcda012018-03-09 14:13:49 +000098 ~OutputSlot()
99 {
surmeh013537c2c2018-05-18 16:31:43 +0100100 try
101 {
102 // Coverity fix: DisconnectAll() may throw uncaught exceptions.
103 DisconnectAll();
104 }
105 catch (const std::exception& e)
106 {
107 // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
108 // exception of type std::length_error.
109 // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
110 std::cerr << "WARNING: An error has occurred when disconnecting all output slots: "
111 << e.what() << std::endl;
112 }
telsoa014fcda012018-03-09 14:13:49 +0000113 }
114
115 Layer& GetOwningLayer() const { return m_OwningLayer; }
116
Francis Murtagh56ccf682021-12-13 18:48:12 +0000117 const IConnectableLayer& GetOwningIConnectableLayer() const override;
118
Mike Kelly8c1701a2019-02-11 17:01:27 +0000119 LayerGuid GetOwningLayerGuid() const override;
120
telsoa014fcda012018-03-09 14:13:49 +0000121 const OutputHandler& GetOutputHandler() const { return m_OutputHandler; }
122 OutputHandler& GetOutputHandler() { return m_OutputHandler; }
123
124 int Connect(InputSlot& destination);
125 void Disconnect(InputSlot& slot);
126
127 const std::vector<InputSlot*>& GetConnections() const { return m_Connections; }
Derek Lambertif674aa02019-08-01 15:56:25 +0100128 const std::vector<EdgeStrategy>& GetEdgeStrategies() const { return m_EdgeStrategies; }
telsoa014fcda012018-03-09 14:13:49 +0000129
130 bool ValidateTensorShape(const TensorShape& shape) const;
131
telsoa01c577f2c2018-08-31 09:22:23 +0100132 // Disconnect all conections.
telsoa014fcda012018-03-09 14:13:49 +0000133 void DisconnectAll();
134
telsoa01c577f2c2018-08-31 09:22:23 +0100135 /// Moves all connections to another OutputSlot.
telsoa014fcda012018-03-09 14:13:49 +0000136 void MoveAllConnections(OutputSlot& destination);
137
138 // IOutputSlot
139
Matthew Sloyan0663d662020-09-14 11:47:26 +0100140 unsigned int GetNumConnections() const override { return armnn::numeric_cast<unsigned int>(m_Connections.size()); }
telsoa014fcda012018-03-09 14:13:49 +0000141 const InputSlot* GetConnection(unsigned int index) const override;
142 InputSlot* GetConnection(unsigned int index) override;
143
144 void SetTensorInfo(const TensorInfo& tensorInfo) override;
145 const TensorInfo& GetTensorInfo() const override;
146 bool IsTensorInfoSet() const override;
147
148 int Connect(IInputSlot& destination) override
149 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100150 return Connect(*PolymorphicDowncast<InputSlot*>(&destination));
telsoa014fcda012018-03-09 14:13:49 +0000151 }
152
153 void Disconnect(IInputSlot& slot) override
154 {
Jan Eilersbb446e52020-04-02 13:56:54 +0100155 return Disconnect(*PolymorphicDowncast<InputSlot*>(&slot));
telsoa014fcda012018-03-09 14:13:49 +0000156 }
157
Mike Kelly8c1701a2019-02-11 17:01:27 +0000158 unsigned int CalculateIndexOnOwner() const override;
Derek Lamberti27d83072019-02-05 16:00:08 +0000159
160 bool operator==(const OutputSlot& other) const;
161
Derek Lamberti84da38b2019-06-13 11:40:08 +0100162 void SetTensorHandleFactory(const ITensorHandleFactory::FactoryId& id);
163 ITensorHandleFactory::FactoryId GetTensorHandleFactoryId() const;
164
Derek Lambertif674aa02019-08-01 15:56:25 +0100165 void SetEdgeStrategy(unsigned int connectionIndex, EdgeStrategy strategy);
166 EdgeStrategy GetEdgeStrategyForConnection(unsigned int connectionIdx) const;
Derek Lamberti84da38b2019-06-13 11:40:08 +0100167
telsoa014fcda012018-03-09 14:13:49 +0000168private:
169 void ValidateConnectionIndex(unsigned int index) const;
170
171 Layer& m_OwningLayer;
172 OutputHandler& m_OutputHandler;
173 std::vector<InputSlot*> m_Connections;
Derek Lamberti84da38b2019-06-13 11:40:08 +0100174
175 ITensorHandleFactory::FactoryId m_TensorHandleFactoryId;
Derek Lambertif674aa02019-08-01 15:56:25 +0100176 std::vector<EdgeStrategy> m_EdgeStrategies;
telsoa014fcda012018-03-09 14:13:49 +0000177};
178
telsoa01c577f2c2018-08-31 09:22:23 +0100179// InputSlot inlines that need OutputSlot declaration.
telsoa014fcda012018-03-09 14:13:49 +0000180
181inline InputSlot::~InputSlot()
182{
183 if (m_Connection != nullptr)
184 {
surmeh013537c2c2018-05-18 16:31:43 +0100185 try
186 {
187 // Coverity fix: Disconnect() may throw uncaught exceptions.
188 m_Connection->Disconnect(*this);
189 }
190 catch (const std::exception& e)
191 {
192 // Coverity fix: BOOST_LOG_TRIVIAL (typically used to report errors) may throw an
193 // exception of type std::length_error.
194 // Using stderr instead in this context as there is no point in nesting try-catch blocks here.
195 std::cerr << "WARNING: An error has occurred when disconnecting an input slot: "
196 << e.what() << std::endl;
197 }
telsoa014fcda012018-03-09 14:13:49 +0000198 }
199}
200
201inline const IOutputSlot* InputSlot::GetConnection() const { return GetConnectedOutputSlot(); }
202inline IOutputSlot* InputSlot::GetConnection() { return GetConnectedOutputSlot(); }
203
telsoa01c577f2c2018-08-31 09:22:23 +0100204
James Conroy1f58f032021-04-27 17:13:27 +0100205class ScopedTensorHandle;
telsoa01c577f2c2018-08-31 09:22:23 +0100206
telsoa014fcda012018-03-09 14:13:49 +0000207// Base layer class
208
209using LayerPriority = unsigned int;
Keith Davisdf04d232020-10-23 17:20:05 +0100210using AdditionalInfoObjectPtr = std::shared_ptr<void>;
telsoa014fcda012018-03-09 14:13:49 +0000211
212class Layer : public IConnectableLayer
213{
214public:
telsoa01c577f2c2018-08-31 09:22:23 +0100215 /// @param name - Optional name for the layer (may be nullptr).
telsoa014fcda012018-03-09 14:13:49 +0000216 Layer(unsigned int numInputSlots, unsigned int numOutputSlots, LayerType type, const char* name);
Derek Lamberti0cff1632018-09-18 16:02:25 +0100217 Layer(unsigned int numInputSlots, unsigned int numOutputSlots, LayerType type, DataLayout layout, const char* name);
telsoa014fcda012018-03-09 14:13:49 +0000218
Finn Williamsb454c5c2021-02-09 15:56:23 +0000219 void ExecuteStrategy(IStrategy& strategy) const override;
220
221
telsoa014fcda012018-03-09 14:13:49 +0000222 const std::string& GetNameStr() const
223 {
224 return m_LayerName;
225 }
226
227 const OutputHandler& GetOutputHandler(unsigned int i = 0) const
228 {
229 return m_OutputHandlers[i];
230 }
231
232 OutputHandler& GetOutputHandler(unsigned int i = 0)
233 {
234 return const_cast<OutputHandler&>(const_cast<const Layer*>(this)->GetOutputHandler(i));
235 }
236
Finn Williamsf24effa2020-07-03 10:12:03 +0100237 ShapeInferenceMethod GetShapeInferenceMethod() const { return m_ShapeInferenceMethod; };
238
telsoa014fcda012018-03-09 14:13:49 +0000239 const std::vector<InputSlot>& GetInputSlots() const { return m_InputSlots; }
240 const std::vector<OutputSlot>& GetOutputSlots() const { return m_OutputSlots; }
241
telsoa01c577f2c2018-08-31 09:22:23 +0100242 // Allows non-const access to input slots, but don't expose vector (vector size is fixed at layer construction).
telsoa014fcda012018-03-09 14:13:49 +0000243 std::vector<InputSlot>::iterator BeginInputSlots() { return m_InputSlots.begin(); }
244 std::vector<InputSlot>::iterator EndInputSlots() { return m_InputSlots.end(); }
245
telsoa01c577f2c2018-08-31 09:22:23 +0100246 // Allows non-const access to output slots, but don't expose vector (vector size is fixed at layer construction).
telsoa014fcda012018-03-09 14:13:49 +0000247 std::vector<OutputSlot>::iterator BeginOutputSlots() { return m_OutputSlots.begin(); }
248 std::vector<OutputSlot>::iterator EndOutputSlots() { return m_OutputSlots.end(); }
249
telsoa01c577f2c2018-08-31 09:22:23 +0100250 // Checks whether the outputs of this layer don't have any connection.
telsoa014fcda012018-03-09 14:13:49 +0000251 bool IsOutputUnconnected()
252 {
253 unsigned int numConnections = 0;
254
255 for (auto&& output : GetOutputSlots())
256 {
257 numConnections += output.GetNumConnections();
258 }
259
260 return (GetNumOutputSlots() > 0) && (numConnections == 0);
261 }
262
telsoa01c577f2c2018-08-31 09:22:23 +0100263 // Used for sorting.
telsoa014fcda012018-03-09 14:13:49 +0000264 void ResetPriority() const;
265 LayerPriority GetPriority() const;
266
Finn Williamsb454c5c2021-02-09 15:56:23 +0000267 LayerType GetType() const override { return m_Type; }
telsoa014fcda012018-03-09 14:13:49 +0000268
269 DataType GetDataType() const;
270
David Beck33f0ae02018-10-18 15:13:56 +0100271 const BackendId& GetBackendId() const { return m_BackendId; }
272 void SetBackendId(const BackendId& id) { m_BackendId = id; }
telsoa014fcda012018-03-09 14:13:49 +0000273
274 // Virtuals
275
Derek Lamberti94a88d22019-12-10 21:12:59 +0000276 virtual std::unique_ptr<IWorkload> CreateWorkload(const IWorkloadFactory& factory) const = 0;
telsoa014fcda012018-03-09 14:13:49 +0000277
David Monahan3fb7e102019-08-20 11:25:29 +0100278 virtual void CreateTensorHandles(const TensorHandleFactoryRegistry& registry,
279 const IWorkloadFactory& factory,
Narumol Prangnawarate5f0b242021-05-07 17:52:36 +0100280 const bool IsMemoryManaged = true);
telsoa014fcda012018-03-09 14:13:49 +0000281
telsoa01c577f2c2018-08-31 09:22:23 +0100282 /// Creates a dynamically-allocated copy of this layer.
283 /// @param graph - The Graph into which this Layer is being cloned.
telsoa014fcda012018-03-09 14:13:49 +0000284 virtual Layer* Clone(Graph& graph) const = 0;
285
telsoa01c577f2c2018-08-31 09:22:23 +0100286 void VerifyLayerConnections(unsigned int expectedConnections, const CheckLocation& location) const;
287
Finn Williamsf24effa2020-07-03 10:12:03 +0100288 virtual void ValidateTensorShapesFromInputs() = 0;
telsoa014fcda012018-03-09 14:13:49 +0000289
telsoa01c577f2c2018-08-31 09:22:23 +0100290 std::vector<TensorShape> InferOutputShapes(const std::vector<TensorShape>& inputShapes) const override;
291
292 /// Helper to serialize the layer parameters to string.
293 /// (currently used in DotSerializer and company).
Andre Ghattas23ae2ea2019-08-07 12:18:38 +0100294 virtual void SerializeLayerParameters(ParameterStringifyFunction& fn) const;
surmeh01bceff2f2018-03-29 16:29:27 +0100295
telsoa01c577f2c2018-08-31 09:22:23 +0100296 // Free up the constant source data
297 virtual void ReleaseConstantData();
298
299 template<typename Op>
300 void OperateOnConstantTensors(Op op)
301 {
302 for (auto constant : GetConstantTensorsByRef())
303 {
304 if (constant.get())
305 {
306 op(constant);
307 }
308 }
309 };
310
telsoa014fcda012018-03-09 14:13:49 +0000311 // IConnectableLayer
312
313 const char* GetName() const override { return m_LayerName.c_str(); }
314
315 unsigned int GetNumInputSlots() const override { return static_cast<unsigned int>(m_InputSlots.size()); }
316 unsigned int GetNumOutputSlots() const override { return static_cast<unsigned int>(m_OutputSlots.size()); }
317
318 const InputSlot& GetInputSlot(unsigned int index) const override { return m_InputSlots.at(index); }
319 InputSlot& GetInputSlot(unsigned int index) override { return m_InputSlots.at(index); }
320 const OutputSlot& GetOutputSlot(unsigned int index = 0) const override { return m_OutputSlots.at(index); }
321 OutputSlot& GetOutputSlot(unsigned int index = 0) override { return m_OutputSlots.at(index); }
322
surmeh01bceff2f2018-03-29 16:29:27 +0100323 void SetGuid(LayerGuid guid) { m_Guid = guid; }
324 LayerGuid GetGuid() const final { return m_Guid; }
325
telsoa01c577f2c2018-08-31 09:22:23 +0100326 void AddRelatedLayerName(const std::string layerName) { m_RelatedLayerNames.emplace_back(layerName); }
327
328 const std::list<std::string>& GetRelatedLayerNames() { return m_RelatedLayerNames; }
329
Derek Lamberti8106b7c2019-05-07 21:33:30 +0100330 virtual void Reparent(Graph& dest, std::list<Layer*>::const_iterator iterator) = 0;
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000331
332 void BackendSelectionHint(Optional<BackendId> backend) final
333 {
334 m_BackendHint = backend;
335 }
336 Optional<BackendId> GetBackendHint() const { return m_BackendHint; }
337
Finn Williamsf24effa2020-07-03 10:12:03 +0100338 void SetShapeInferenceMethod(ShapeInferenceMethod shapeInferenceMethod)
339 {
340 m_ShapeInferenceMethod = shapeInferenceMethod;
341 }
342
Keith Davisdf04d232020-10-23 17:20:05 +0100343 template<typename T>
Mike Kelly7a0efa52020-11-17 13:55:01 +0000344 std::shared_ptr<T> GetAdditionalInformation() const
Keith Davisdf04d232020-10-23 17:20:05 +0100345 {
346 return std::static_pointer_cast<T>(m_AdditionalInfoObject);
347 }
348
349 void SetAdditionalInfoForObject(const AdditionalInfoObjectPtr& additionalInfo)
350 {
351 m_AdditionalInfoObject = additionalInfo;
352 }
353
Jim Flynne4665962022-01-31 16:08:53 +0000354 virtual const BaseDescriptor& GetParameters() const override { return m_NullDescriptor; }
355
telsoa014fcda012018-03-09 14:13:49 +0000356protected:
telsoa01c577f2c2018-08-31 09:22:23 +0100357 // Graph needs access to the virtual destructor.
telsoa014fcda012018-03-09 14:13:49 +0000358 friend class Graph;
359 virtual ~Layer() = default;
360
361 template <typename QueueDescriptor>
Derek Lamberti94a88d22019-12-10 21:12:59 +0000362 void CollectQueueDescriptorInputs(QueueDescriptor& descriptor, WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000363 {
364 WorkloadDataCollector dataCollector(descriptor.m_Inputs, info.m_InputTensorInfos);
Derek Lamberti94a88d22019-12-10 21:12:59 +0000365 CollectWorkloadInputs(dataCollector);
telsoa014fcda012018-03-09 14:13:49 +0000366 }
367
368 template <typename QueueDescriptor>
Derek Lamberti94a88d22019-12-10 21:12:59 +0000369 void CollectQueueDescriptorOutputs(QueueDescriptor& descriptor, WorkloadInfo& info) const
telsoa014fcda012018-03-09 14:13:49 +0000370 {
371 WorkloadDataCollector dataCollector(descriptor.m_Outputs, info.m_OutputTensorInfos);
Derek Lamberti94a88d22019-12-10 21:12:59 +0000372 CollectWorkloadOutputs(dataCollector);
telsoa014fcda012018-03-09 14:13:49 +0000373 }
374
Finn Williams87d0bda2020-07-03 10:12:03 +0100375 void ValidateAndCopyShape(const TensorShape& outputShape,
376 const TensorShape& inferredShape,
377 const ShapeInferenceMethod shapeInferenceMethod,
378 const std::string& layerName,
379 const unsigned int outputSlotIndex = 0);
380
381 void VerifyShapeInferenceType(const TensorShape& outputShape, ShapeInferenceMethod shapeInferenceMethod);
382
telsoa01c577f2c2018-08-31 09:22:23 +0100383 /// Helper function to reduce duplication in *Layer::CreateWorkload.
telsoa014fcda012018-03-09 14:13:49 +0000384 template <typename QueueDescriptor>
Derek Lamberti94a88d22019-12-10 21:12:59 +0000385 WorkloadInfo PrepInfoAndDesc(QueueDescriptor& descriptor) const
telsoa014fcda012018-03-09 14:13:49 +0000386 {
387 WorkloadInfo info;
Derek Lamberti94a88d22019-12-10 21:12:59 +0000388 CollectQueueDescriptorInputs(descriptor, info);
389 CollectQueueDescriptorOutputs(descriptor, info);
telsoa014fcda012018-03-09 14:13:49 +0000390 return info;
391 }
392
393 template <typename LayerType, typename ... Params>
394 LayerType* CloneBase(Graph& graph, Params&& ... params) const;
395
telsoa01c577f2c2018-08-31 09:22:23 +0100396 // Retrieve the Handles to the constants
James Conroy1f58f032021-04-27 17:13:27 +0100397 using ConstantTensors = std::vector<std::reference_wrapper<std::shared_ptr<ConstTensorHandle>>>;
telsoa01c577f2c2018-08-31 09:22:23 +0100398 virtual ConstantTensors GetConstantTensorsByRef() {return ConstantTensors(); };
399
Keith Davisdf04d232020-10-23 17:20:05 +0100400 // "Blob"
401 AdditionalInfoObjectPtr m_AdditionalInfoObject;
402
403 // Utility method to set a pointer in the queueDescriptor to the "blob" location in the layer
404 void SetAdditionalInfo(QueueDescriptor& descriptor) const;
405
telsoa014fcda012018-03-09 14:13:49 +0000406private:
Derek Lamberti94a88d22019-12-10 21:12:59 +0000407 void CollectWorkloadInputs(WorkloadDataCollector& dataCollector) const;
408 void CollectWorkloadOutputs(WorkloadDataCollector& dataCollector) const;
telsoa014fcda012018-03-09 14:13:49 +0000409
410protected:
411 std::vector<OutputHandler> m_OutputHandlers;
Finn Williamsf24effa2020-07-03 10:12:03 +0100412 ShapeInferenceMethod m_ShapeInferenceMethod;
telsoa014fcda012018-03-09 14:13:49 +0000413
414private:
415 const std::string m_LayerName;
416
417 std::vector<InputSlot> m_InputSlots;
418 std::vector<OutputSlot> m_OutputSlots;
419
420 const LayerType m_Type;
David Beck33f0ae02018-10-18 15:13:56 +0100421 BackendId m_BackendId;
Derek Lamberti4a9e24b2020-01-03 16:53:38 +0000422 Optional<BackendId> m_BackendHint;
telsoa014fcda012018-03-09 14:13:49 +0000423
telsoa01c577f2c2018-08-31 09:22:23 +0100424 /// Used for sorting.
telsoa014fcda012018-03-09 14:13:49 +0000425 mutable LayerPriority m_Priority = 0;
426 mutable bool m_Visiting = false;
surmeh01bceff2f2018-03-29 16:29:27 +0100427
428 LayerGuid m_Guid;
telsoa01c577f2c2018-08-31 09:22:23 +0100429
430 std::list<std::string> m_RelatedLayerNames;
Finn Williamsf24effa2020-07-03 10:12:03 +0100431
Jim Flynne4665962022-01-31 16:08:53 +0000432 /// returned by layers which have no parameters associated with them.
433 /// has to be a member as it is returned as a const reference
434 /// declared static so that there is only ever one of them in memory
435 static NullDescriptor m_NullDescriptor;
telsoa014fcda012018-03-09 14:13:49 +0000436};
437
telsoa01c577f2c2018-08-31 09:22:23 +0100438// A layer user-provided data can be bound to (e.g. inputs, outputs).
telsoa014fcda012018-03-09 14:13:49 +0000439class BindableLayer : public Layer
440{
441public:
442 BindableLayer(unsigned int numInputSlots,
443 unsigned int numOutputSlots,
444 LayerType type,
445 const char* name,
446 LayerBindingId id)
447 : Layer(numInputSlots, numOutputSlots, type, name)
448 , m_Id(id)
449 {
450 }
451
452 LayerBindingId GetBindingId() const { return m_Id; };
453
Finn Williamsb454c5c2021-02-09 15:56:23 +0000454 void ExecuteStrategy(IStrategy& strategy) const override
455 {
456 strategy.ExecuteStrategy(this, BaseDescriptor(), {}, GetName(), GetBindingId());
457 }
458
telsoa014fcda012018-03-09 14:13:49 +0000459protected:
460 ~BindableLayer() = default;
461
462private:
463 LayerBindingId m_Id;
464};
465
466}