Laurent Carlier | 749294b | 2020-06-01 09:03:17 +0100 | [diff] [blame] | 1 | // |
Teresa Charlin | 5266473 | 2020-06-29 16:27:03 +0100 | [diff] [blame] | 2 | // Copyright © 2017 Arm Ltd and Contributors. All rights reserved. |
David Beck | ecb56cd | 2018-09-05 12:52:57 +0100 | [diff] [blame] | 3 | // SPDX-License-Identifier: MIT |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 4 | // |
| 5 | #pragma once |
| 6 | |
Finn Williams | f24effa | 2020-07-03 10:12:03 +0100 | [diff] [blame] | 7 | #include <armnn/BackendOptions.hpp> |
Jim Flynn | 906f946 | 2019-05-10 13:55:21 +0100 | [diff] [blame] | 8 | #include <armnn/Deprecated.hpp> |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 9 | #include <armnn/DescriptorsFwd.hpp> |
jimfly01 | e9e7bfd | 2019-01-24 22:29:33 +0000 | [diff] [blame] | 10 | #include <armnn/ILayerVisitor.hpp> |
Finn Williams | b454c5c | 2021-02-09 15:56:23 +0000 | [diff] [blame] | 11 | #include <armnn/IStrategy.hpp> |
Matthew Bentham | 313e1c8 | 2019-03-25 17:37:47 +0000 | [diff] [blame] | 12 | #include <armnn/NetworkFwd.hpp> |
| 13 | #include <armnn/Optional.hpp> |
| 14 | #include <armnn/TensorFwd.hpp> |
Jan Eilers | 6a71bb5 | 2021-10-26 17:41:18 +0100 | [diff] [blame] | 15 | #include <armnn/Logging.hpp> |
Nikhil Raj | 2e24175 | 2022-02-01 16:42:15 +0000 | [diff] [blame] | 16 | #include <armnn/backends/TensorHandle.hpp> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 17 | |
| 18 | #include <memory> |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 19 | #include <vector> |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 20 | |
| 21 | namespace armnn |
| 22 | { |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 23 | /// @brief An input connection slot for a layer. |
| 24 | /// The input slot can be connected to an output slot of the preceding layer in the graph. |
| 25 | /// Only one connection to the input slot is allowed. |
| 26 | class IInputSlot |
| 27 | { |
| 28 | public: |
| 29 | virtual const IOutputSlot* GetConnection() const = 0; |
| 30 | virtual IOutputSlot* GetConnection() = 0; |
Francis Murtagh | 9d74ba6 | 2022-01-19 16:31:58 +0000 | [diff] [blame] | 31 | virtual const IConnectableLayer& GetOwningIConnectableLayer() const = 0; |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 32 | |
| 33 | protected: |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 34 | /// Not user deletable. |
| 35 | ~IInputSlot() {} |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 36 | }; |
| 37 | |
| 38 | /// @brief An output connection slot for a layer. |
| 39 | /// The output slot may be connected to 1 or more input slots of subsequent layers in the graph. |
| 40 | class IOutputSlot |
| 41 | { |
| 42 | public: |
| 43 | virtual unsigned int GetNumConnections() const = 0; |
| 44 | virtual const IInputSlot* GetConnection(unsigned int index) const = 0; |
Keith Davis | b4dd5cc | 2022-04-07 11:32:00 +0100 | [diff] [blame] | 45 | virtual IInputSlot* GetConnection(unsigned int outputindex) = 0; |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 46 | |
| 47 | virtual void SetTensorInfo(const TensorInfo& tensorInfo) = 0; |
| 48 | virtual const TensorInfo& GetTensorInfo() const = 0; |
| 49 | virtual bool IsTensorInfoSet() const = 0; |
| 50 | |
| 51 | virtual int Connect(IInputSlot& destination) = 0; |
| 52 | virtual void Disconnect(IInputSlot& slot) = 0; |
| 53 | |
Mike Kelly | 8c1701a | 2019-02-11 17:01:27 +0000 | [diff] [blame] | 54 | virtual unsigned int CalculateIndexOnOwner() const = 0; |
| 55 | |
| 56 | virtual LayerGuid GetOwningLayerGuid() const = 0; |
| 57 | |
Francis Murtagh | 56ccf68 | 2021-12-13 18:48:12 +0000 | [diff] [blame] | 58 | virtual const IConnectableLayer& GetOwningIConnectableLayer() const = 0; |
| 59 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 60 | protected: |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 61 | /// Not user deletable. |
| 62 | ~IOutputSlot() {} |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 63 | }; |
| 64 | |
| 65 | /// @brief Interface for a layer that is connectable to other layers via InputSlots and OutputSlots. |
| 66 | class IConnectableLayer |
| 67 | { |
| 68 | public: |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 69 | /// Returns the name of the layer |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 70 | virtual const char* GetName() const = 0; |
| 71 | |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 72 | /// Returns the number of connectable input slots |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 73 | virtual unsigned int GetNumInputSlots() const = 0; |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 74 | |
| 75 | /// Returns the number of connectable output slots |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 76 | virtual unsigned int GetNumOutputSlots() const = 0; |
| 77 | |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 78 | /// Get a const input slot handle by slot index |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 79 | virtual const IInputSlot& GetInputSlot(unsigned int index) const = 0; |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 80 | |
| 81 | /// Get the input slot handle by slot index |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 82 | virtual IInputSlot& GetInputSlot(unsigned int index) = 0; |
| 83 | |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 84 | /// Get the const output slot handle by slot index |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 85 | virtual const IOutputSlot& GetOutputSlot(unsigned int index) const = 0; |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 86 | |
| 87 | /// Get the output slot handle by slot index |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 88 | virtual IOutputSlot& GetOutputSlot(unsigned int index) = 0; |
| 89 | |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 90 | /// Infer the shape of the output(s) based on the provided input shape(s) |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 91 | virtual std::vector<TensorShape> InferOutputShapes(const std::vector<TensorShape>& inputShapes) const = 0; |
| 92 | |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 93 | /// Returns the unique id of the layer |
surmeh01 | bceff2f | 2018-03-29 16:29:27 +0100 | [diff] [blame] | 94 | virtual LayerGuid GetGuid() const = 0; |
jimfly01 | e9e7bfd | 2019-01-24 22:29:33 +0000 | [diff] [blame] | 95 | |
Jan Eilers | 1b2654f | 2021-09-24 15:45:46 +0100 | [diff] [blame] | 96 | // The Accept function needs to be wrapped in a no warn macro to avoid deprecation warnings from |
| 97 | // the deprecated ILayerVisitor which is used in the function. |
| 98 | ARMNN_NO_DEPRECATE_WARN_BEGIN |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 99 | /// Apply a visitor to this layer |
Jan Eilers | 1b2654f | 2021-09-24 15:45:46 +0100 | [diff] [blame] | 100 | ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Accept is deprecated. The ILayerVisitor that works in conjunction with this " |
| 101 | "Accept function is deprecated. Use IStrategy in combination with " |
| 102 | "ExecuteStrategy instead, which is an ABI/API stable version of the " |
| 103 | "visitor pattern.", |
| 104 | "22.05") |
jimfly01 | e9e7bfd | 2019-01-24 22:29:33 +0000 | [diff] [blame] | 105 | virtual void Accept(ILayerVisitor& visitor) const = 0; |
Jan Eilers | 1b2654f | 2021-09-24 15:45:46 +0100 | [diff] [blame] | 106 | ARMNN_NO_DEPRECATE_WARN_END |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 107 | |
Finn Williams | b454c5c | 2021-02-09 15:56:23 +0000 | [diff] [blame] | 108 | /// Apply a visitor to this layer |
| 109 | virtual void ExecuteStrategy(IStrategy& strategy) const = 0; |
| 110 | |
Derek Lamberti | 4a9e24b | 2020-01-03 16:53:38 +0000 | [diff] [blame] | 111 | /// Provide a hint for the optimizer as to which backend to prefer for this layer |
| 112 | virtual void BackendSelectionHint(Optional<BackendId> backend) = 0; |
Finn Williams | b454c5c | 2021-02-09 15:56:23 +0000 | [diff] [blame] | 113 | |
| 114 | /// Returns the armnn::LayerType of this layer |
| 115 | virtual LayerType GetType() const = 0; |
| 116 | |
Jim Flynn | e466596 | 2022-01-31 16:08:53 +0000 | [diff] [blame] | 117 | /// If the layer has a descriptor return it. |
| 118 | /// The base descriptor can then be cast to the correct descriptor class. |
| 119 | /// If the layer has no associated descriptor a struct of type NullDescriptor will be returned. |
| 120 | /// Note: NullDescriptors can be detected because they return true when |
| 121 | /// the BaseDescriptor IsNull function is invoked. |
| 122 | virtual const BaseDescriptor& GetParameters() const = 0; |
| 123 | |
Nikhil Raj | 2e24175 | 2022-02-01 16:42:15 +0000 | [diff] [blame] | 124 | using ConstantTensors = std::vector<std::reference_wrapper<std::shared_ptr<ConstTensorHandle>>>; |
| 125 | |
| 126 | // Returns ConstantTensors of this Layer if it has any, otherwise returns empty vector. |
| 127 | virtual ConstantTensors GetConstantTensorsByRef() = 0; |
| 128 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 129 | protected: |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 130 | /// Objects are not deletable via the handle |
| 131 | ~IConnectableLayer() {} |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 132 | }; |
| 133 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 134 | |
Jan Eilers | b1c62f1 | 2021-10-26 14:56:47 +0100 | [diff] [blame] | 135 | /// ArmNN performs an optimization on each model/network before it gets loaded for execution. OptimizerOptions provides |
| 136 | /// a set of features that allows the user to customize this optimization on a per model basis. |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 137 | struct OptimizerOptions |
| 138 | { |
Matteo Martincigh | 4912402 | 2019-01-11 13:25:59 +0000 | [diff] [blame] | 139 | OptimizerOptions() |
| 140 | : m_ReduceFp32ToFp16(false) |
| 141 | , m_Debug(false) |
Narumol Prangnawarat | bc7ffb5 | 2020-03-20 15:01:01 +0000 | [diff] [blame] | 142 | , m_ReduceFp32ToBf16(false) |
Teresa Charlin | cdc0149 | 2020-06-09 18:00:20 +0100 | [diff] [blame] | 143 | , m_shapeInferenceMethod(armnn::ShapeInferenceMethod::ValidateOnly) |
Narumol Prangnawarat | a2493a0 | 2020-08-19 14:39:07 +0100 | [diff] [blame] | 144 | , m_ImportEnabled(false) |
Sadik Armagan | 045f6be | 2020-09-10 13:37:32 +0100 | [diff] [blame] | 145 | , m_ModelOptions() |
Derek Lamberti | f1e0ad3 | 2021-10-13 18:02:25 +0100 | [diff] [blame] | 146 | , m_ProfilingEnabled(false) |
keidav01 | 738c2e6 | 2018-12-11 16:14:20 +0000 | [diff] [blame] | 147 | {} |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 148 | |
Sadik Armagan | 045f6be | 2020-09-10 13:37:32 +0100 | [diff] [blame] | 149 | OptimizerOptions(bool reduceFp32ToFp16, bool debug, bool reduceFp32ToBf16, bool importEnabled, |
James Conroy | a0f8b15 | 2022-06-21 11:31:47 +0000 | [diff] [blame^] | 150 | ModelOptions modelOptions = {}) |
Narumol Prangnawarat | ea063df | 2020-08-21 10:03:49 +0100 | [diff] [blame] | 151 | : m_ReduceFp32ToFp16(reduceFp32ToFp16) |
| 152 | , m_Debug(debug) |
| 153 | , m_ReduceFp32ToBf16(reduceFp32ToBf16) |
| 154 | , m_shapeInferenceMethod(armnn::ShapeInferenceMethod::ValidateOnly) |
| 155 | , m_ImportEnabled(importEnabled) |
Sadik Armagan | 045f6be | 2020-09-10 13:37:32 +0100 | [diff] [blame] | 156 | , m_ModelOptions(modelOptions) |
Derek Lamberti | f1e0ad3 | 2021-10-13 18:02:25 +0100 | [diff] [blame] | 157 | , m_ProfilingEnabled(false) |
Narumol Prangnawarat | ea063df | 2020-08-21 10:03:49 +0100 | [diff] [blame] | 158 | { |
| 159 | if (m_ReduceFp32ToFp16 && m_ReduceFp32ToBf16) |
| 160 | { |
| 161 | throw InvalidArgumentException("BFloat16 and Float16 optimization cannot be enabled at the same time."); |
| 162 | } |
| 163 | } |
| 164 | |
Teresa Charlin | cdc0149 | 2020-06-09 18:00:20 +0100 | [diff] [blame] | 165 | OptimizerOptions(bool reduceFp32ToFp16, bool debug, bool reduceFp32ToBf16 = false, |
Narumol Prangnawarat | a2493a0 | 2020-08-19 14:39:07 +0100 | [diff] [blame] | 166 | ShapeInferenceMethod shapeInferenceMethod = armnn::ShapeInferenceMethod::ValidateOnly, |
James Conroy | a0f8b15 | 2022-06-21 11:31:47 +0000 | [diff] [blame^] | 167 | bool importEnabled = false, ModelOptions modelOptions = {}) |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 168 | : m_ReduceFp32ToFp16(reduceFp32ToFp16) |
keidav01 | 738c2e6 | 2018-12-11 16:14:20 +0000 | [diff] [blame] | 169 | , m_Debug(debug) |
Narumol Prangnawarat | bc7ffb5 | 2020-03-20 15:01:01 +0000 | [diff] [blame] | 170 | , m_ReduceFp32ToBf16(reduceFp32ToBf16) |
Teresa Charlin | cdc0149 | 2020-06-09 18:00:20 +0100 | [diff] [blame] | 171 | , m_shapeInferenceMethod(shapeInferenceMethod) |
Narumol Prangnawarat | a2493a0 | 2020-08-19 14:39:07 +0100 | [diff] [blame] | 172 | , m_ImportEnabled(importEnabled) |
Sadik Armagan | 045f6be | 2020-09-10 13:37:32 +0100 | [diff] [blame] | 173 | , m_ModelOptions(modelOptions) |
Derek Lamberti | f1e0ad3 | 2021-10-13 18:02:25 +0100 | [diff] [blame] | 174 | , m_ProfilingEnabled(false) |
Narumol Prangnawarat | bc7ffb5 | 2020-03-20 15:01:01 +0000 | [diff] [blame] | 175 | { |
| 176 | if (m_ReduceFp32ToFp16 && m_ReduceFp32ToBf16) |
| 177 | { |
| 178 | throw InvalidArgumentException("BFloat16 and Float16 optimization cannot be enabled at the same time."); |
| 179 | } |
| 180 | } |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 181 | |
Jan Eilers | 6a71bb5 | 2021-10-26 17:41:18 +0100 | [diff] [blame] | 182 | const std::string ToString() const |
| 183 | { |
| 184 | std::stringstream stream; |
| 185 | stream << "OptimizerOptions: \n"; |
| 186 | stream << "\tReduceFp32ToFp16: " << m_ReduceFp32ToFp16 << "\n"; |
| 187 | stream << "\tReduceFp32ToBf16: " << m_ReduceFp32ToBf16 << "\n"; |
Jan Eilers | 17d34da | 2021-12-08 16:15:12 +0000 | [diff] [blame] | 188 | stream << "\tDebug: " << m_Debug << "\n"; |
| 189 | stream << "\tShapeInferenceMethod: " << |
Jan Eilers | 6a71bb5 | 2021-10-26 17:41:18 +0100 | [diff] [blame] | 190 | (m_shapeInferenceMethod == ShapeInferenceMethod::ValidateOnly ? "ValidateOnly" : "InferAndValidate") << "\n"; |
| 191 | stream << "\tImportEnabled: " << m_ImportEnabled << "\n"; |
| 192 | stream << "\tProfilingEnabled: " << m_ProfilingEnabled << "\n"; |
| 193 | |
| 194 | stream << "\tModelOptions: \n"; |
| 195 | for (auto optionsGroup : m_ModelOptions) |
| 196 | { |
| 197 | for (size_t i=0; i < optionsGroup.GetOptionCount(); i++) |
| 198 | { |
| 199 | const armnn::BackendOptions::BackendOption option = optionsGroup.GetOption(i); |
Jan Eilers | 17d34da | 2021-12-08 16:15:12 +0000 | [diff] [blame] | 200 | stream << "\t\tBackend: " << optionsGroup.GetBackendId() << "\n" |
| 201 | << "\t\t\tOption: " << option.GetName() << "\n" |
| 202 | << "\t\t\tValue: " << std::string(option.GetValue().ToString()) << "\n"; |
Jan Eilers | 6a71bb5 | 2021-10-26 17:41:18 +0100 | [diff] [blame] | 203 | } |
| 204 | } |
| 205 | |
| 206 | return stream.str(); |
| 207 | } |
| 208 | |
Jan Eilers | b1c62f1 | 2021-10-26 14:56:47 +0100 | [diff] [blame] | 209 | /// Reduces all Fp32 operators in the model to Fp16 for faster processing. |
| 210 | /// @Note This feature works best if all operators of the model are in Fp32. ArmNN will add conversion layers |
| 211 | /// between layers that weren't in Fp32 in the first place or if the operator is not supported in Fp16. |
| 212 | /// The overhead of these conversions can lead to a slower overall performance if too many conversions are |
| 213 | /// required. |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 214 | bool m_ReduceFp32ToFp16; |
keidav01 | 738c2e6 | 2018-12-11 16:14:20 +0000 | [diff] [blame] | 215 | |
| 216 | // Add debug data for easier troubleshooting |
| 217 | bool m_Debug; |
Narumol Prangnawarat | bc7ffb5 | 2020-03-20 15:01:01 +0000 | [diff] [blame] | 218 | |
Jan Eilers | b1c62f1 | 2021-10-26 14:56:47 +0100 | [diff] [blame] | 219 | /// Reduces all Fp32 operators in the model to Bf16 for faster processing. |
| 220 | /// @Note This feature works best if all operators of the model are in Fp32. ArmNN will add conversion layers |
| 221 | /// between layers that weren't in Fp32 in the first place or if the operator is not supported in Bf16. |
| 222 | /// The overhead of these conversions can lead to a slower overall performance if too many conversions are |
| 223 | /// required. |
Narumol Prangnawarat | bc7ffb5 | 2020-03-20 15:01:01 +0000 | [diff] [blame] | 224 | bool m_ReduceFp32ToBf16; |
Teresa Charlin | cdc0149 | 2020-06-09 18:00:20 +0100 | [diff] [blame] | 225 | |
| 226 | // Infer output size when not available |
| 227 | ShapeInferenceMethod m_shapeInferenceMethod; |
Narumol Prangnawarat | a2493a0 | 2020-08-19 14:39:07 +0100 | [diff] [blame] | 228 | |
| 229 | // Enable Import |
| 230 | bool m_ImportEnabled; |
Sadik Armagan | 045f6be | 2020-09-10 13:37:32 +0100 | [diff] [blame] | 231 | |
| 232 | // Enable Model Options |
| 233 | ModelOptions m_ModelOptions; |
Derek Lamberti | f1e0ad3 | 2021-10-13 18:02:25 +0100 | [diff] [blame] | 234 | |
| 235 | // Enable profiling dump of the optimizer phase |
| 236 | bool m_ProfilingEnabled; |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 237 | }; |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 238 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 239 | class IWorkloadFactory; |
| 240 | class NetworkImpl; |
| 241 | using INetworkPtr = std::unique_ptr<INetwork, void(*)(INetwork* network)>; |
| 242 | using IOptimizedNetworkPtr = std::unique_ptr<IOptimizedNetwork, void(*)(IOptimizedNetwork* network)>; |
| 243 | |
Cathal Corbett | 18655b8 | 2021-12-13 13:03:22 +0000 | [diff] [blame] | 244 | using CompiledBlobDeleter = std::function<void(const void*)>; |
| 245 | using CompiledBlobPtr = std::unique_ptr<void, CompiledBlobDeleter>; |
| 246 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 247 | /// Main network class which provides the interface for building up a neural network. |
| 248 | /// This object is subsequently required by the IRuntime::Load() method. |
| 249 | class INetwork |
| 250 | { |
| 251 | public: |
| 252 | static INetwork* CreateRaw(NetworkOptions networkOptions = {}); |
| 253 | static INetworkPtr Create(NetworkOptions networkOptions = {}); |
| 254 | static void Destroy(INetwork* network); |
| 255 | |
| 256 | Status PrintGraph(); |
| 257 | |
| 258 | /// Adds an input layer to the network. |
| 259 | /// @param id - User generated id to uniquely identify a particular input. The same id needs to be specified. |
| 260 | /// when passing the inputs to the IRuntime::EnqueueWorkload() function. |
| 261 | /// @param name - Optional name for the layer. |
| 262 | /// @return - Interface for configuring the layer. |
| 263 | IConnectableLayer* AddInputLayer(LayerBindingId id, const char* name = nullptr); |
| 264 | |
| 265 | /// Adds an ArgMinMax layer to the network. |
| 266 | /// @param desc - Parameters for the L2 normalization operation. |
| 267 | /// @param name - Optional name for the layer. |
| 268 | /// @return - Interface for configuring the layer. |
| 269 | IConnectableLayer* AddArgMinMaxLayer(const ArgMinMaxDescriptor& desc, |
| 270 | const char* name = nullptr); |
| 271 | |
mathad01 | b392e98 | 2021-04-07 12:07:30 +0100 | [diff] [blame] | 272 | /// Adds a cast layer to the network. |
| 273 | /// @param name - Optional name for the layer. |
| 274 | /// @return - Interface for configuring the layer. |
| 275 | IConnectableLayer* AddCastLayer(const char* name = nullptr); |
| 276 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 277 | /// Add a Comparison layer to the network. |
| 278 | /// @param name - Optional name for the layer. |
| 279 | /// @param desc - Descriptor for the comparison operation. |
| 280 | /// @return - Interface for configuring the layer. |
| 281 | IConnectableLayer* AddComparisonLayer(const ComparisonDescriptor& comparisonDescriptor, |
| 282 | const char* name = nullptr); |
| 283 | |
| 284 | /// Adds a concatenation layer to the network. |
| 285 | /// @param concatDescriptor - ConcatDescriptor (synonym for OriginsDescriptor) to configure the concatenation |
| 286 | /// process. Number of Views must be equal to the number of inputs, and their order |
| 287 | /// must match - e.g. first view corresponds to the first input, second view to the |
| 288 | /// second input, etc.... |
| 289 | /// @param name - Optional name for the layer. |
| 290 | /// @return - Interface for configuring the layer. |
| 291 | IConnectableLayer* AddConcatLayer(const ConcatDescriptor& concatDescriptor, |
| 292 | const char* name = nullptr); |
| 293 | |
| 294 | /// Adds a 2D convolution layer to the network. |
| 295 | /// @param convolution2dDescriptor - Description of the 2D convolution layer. |
Keith Davis | b4dd5cc | 2022-04-07 11:32:00 +0100 | [diff] [blame] | 296 | /// @param name - Optional name for the layer. |
| 297 | /// @return - Interface for configuring the layer. |
| 298 | IConnectableLayer* AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor, |
| 299 | const char* name = nullptr); |
| 300 | |
| 301 | /// Adds a 2D convolution layer to the network. |
| 302 | /// @param convolution2dDescriptor - Description of the 2D convolution layer. |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 303 | /// @param weights - Tensor for the weights data. |
| 304 | /// @param biases - Optional tensor for the bias data. If specified, must match the output tensor shape. |
| 305 | /// @param name - Optional name for the layer. |
| 306 | /// @return - Interface for configuring the layer. |
Keith Davis | b4dd5cc | 2022-04-07 11:32:00 +0100 | [diff] [blame] | 307 | ARMNN_DEPRECATED_MSG_REMOVAL_DATE("This AddConvolution2dLayer overload is deprecated", "22.08") |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 308 | IConnectableLayer* AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor, |
| 309 | const ConstTensor& weights, |
| 310 | const Optional<ConstTensor>& biases, |
| 311 | const char* name = nullptr); |
| 312 | |
Jan Eilers | 1b2654f | 2021-09-24 15:45:46 +0100 | [diff] [blame] | 313 | ARMNN_DEPRECATED_MSG_REMOVAL_DATE("This AddConvolution2dLayer overload is deprecated", "22.08") |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 314 | IConnectableLayer* AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor, |
| 315 | const ConstTensor& weights, |
| 316 | const char* name = nullptr); |
| 317 | |
Jan Eilers | 1b2654f | 2021-09-24 15:45:46 +0100 | [diff] [blame] | 318 | ARMNN_DEPRECATED_MSG_REMOVAL_DATE("This AddConvolution2dLayer overload is deprecated", "22.08") |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 319 | IConnectableLayer* AddConvolution2dLayer(const Convolution2dDescriptor& convolution2dDescriptor, |
| 320 | const ConstTensor& weights, |
| 321 | const ConstTensor& biases, |
| 322 | const char* name = nullptr); |
| 323 | |
Matthew Sloyan | b63a311 | 2021-09-08 13:05:51 +0100 | [diff] [blame] | 324 | /// Adds a 3D convolution layer to the network. |
| 325 | /// @param convolution3dDescriptor - Description of the 3D convolution layer. |
Matthew Sloyan | b63a311 | 2021-09-08 13:05:51 +0100 | [diff] [blame] | 326 | /// @param name - Optional name for the layer. |
| 327 | /// @return - Interface for configuring the layer. |
| 328 | IConnectableLayer* AddConvolution3dLayer(const Convolution3dDescriptor& convolution3dDescriptor, |
Matthew Sloyan | b63a311 | 2021-09-08 13:05:51 +0100 | [diff] [blame] | 329 | const char* name = nullptr); |
| 330 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 331 | /// Adds a depth to space layer to the network. |
| 332 | /// @param depthToSpaceDescriptor - Parameters for the depth to space operation. |
| 333 | /// @param name - Optional name for the layer. |
| 334 | /// @return - Interface for configuring the layer. |
| 335 | IConnectableLayer* AddDepthToSpaceLayer(const DepthToSpaceDescriptor& depthToSpaceDescriptor, |
| 336 | const char* name = nullptr); |
| 337 | |
| 338 | /// Adds a 2D depthwise convolution layer to the network. |
| 339 | /// @param convolution2dDescriptor - Description of the 2D depthwise convolution layer. |
Cathal Corbett | 0690265 | 2022-04-14 17:55:11 +0100 | [diff] [blame] | 340 | /// @param name - Optional name for the layer. |
| 341 | /// @return - Interface for configuring the layer. |
Keith Davis | b4dd5cc | 2022-04-07 11:32:00 +0100 | [diff] [blame] | 342 | IConnectableLayer* AddDepthwiseConvolution2dLayer(const DepthwiseConvolution2dDescriptor& convolution2dDescriptor, |
| 343 | const char* name = nullptr); |
Cathal Corbett | 0690265 | 2022-04-14 17:55:11 +0100 | [diff] [blame] | 344 | |
| 345 | /// Adds a 2D depthwise convolution layer to the network. |
| 346 | /// @param convolution2dDescriptor - Description of the 2D depthwise convolution layer. |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 347 | /// @param weights - Tensor for the weights. Expected format: [channelMultiplier, inputChannels, height, width]. |
| 348 | /// @param biases Optional tensor for the bias data. If specified, must match the output tensor shape. |
| 349 | /// @param name - Optional name for the layer. |
| 350 | /// @return - Interface for configuring the layer. |
Cathal Corbett | 0690265 | 2022-04-14 17:55:11 +0100 | [diff] [blame] | 351 | ARMNN_DEPRECATED_MSG("This AddDepthwiseConvolution2dLayer overload is deprecated") |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 352 | IConnectableLayer* AddDepthwiseConvolution2dLayer( |
| 353 | const DepthwiseConvolution2dDescriptor& convolution2dDescriptor, |
| 354 | const ConstTensor& weights, |
| 355 | const Optional<ConstTensor>& biases, |
| 356 | const char* name = nullptr); |
| 357 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 358 | /// Adds a Dequantize layer to the network. |
| 359 | /// @return - Interface for configuring the layer. |
| 360 | IConnectableLayer* AddDequantizeLayer(const char* name = nullptr); |
| 361 | |
| 362 | /// Adds a Detection PostProcess layer to the network. |
| 363 | /// @param descriptor - Description of the Detection PostProcess layer. |
| 364 | /// @param anchors - Tensor for anchors. |
| 365 | /// @param name - Optional name for the layer. |
| 366 | /// @return - Interface for configuring the layer. |
| 367 | IConnectableLayer* AddDetectionPostProcessLayer( |
| 368 | const DetectionPostProcessDescriptor& descriptor, |
| 369 | const ConstTensor& anchors, |
| 370 | const char* name = nullptr); |
| 371 | |
| 372 | /// Add an ElementwiseUnary layer to the network. |
| 373 | /// @param name - Optional name for the layer. |
| 374 | /// @param desc - Descriptor for the elementwiseUnary operation. |
| 375 | /// @return - Interface for configuring the layer. |
| 376 | IConnectableLayer* AddElementwiseUnaryLayer(const ElementwiseUnaryDescriptor& elementwiseUnaryDescriptor, |
| 377 | const char* name = nullptr); |
| 378 | |
| 379 | /// Add an Fill layer to the network. |
| 380 | /// @param name - Optional name for the layer. |
| 381 | /// @param fillDescriptor - Descriptor for the fill operation. |
| 382 | /// @return - Interface for configuring the layer. |
| 383 | IConnectableLayer* AddFillLayer(const FillDescriptor& fillDescriptor, |
| 384 | const char* name = nullptr); |
| 385 | |
Matthew Sloyan | 81beae3 | 2021-07-13 19:46:11 +0100 | [diff] [blame] | 386 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 387 | /// Adds a fully connected layer to the network. |
| 388 | /// @param fullyConnectedDescriptor - Description of the fully connected layer. |
Sadik Armagan | f0a6dec | 2021-03-25 07:46:55 +0000 | [diff] [blame] | 389 | /// @return - Interface for configuring the layer. |
Matthew Sloyan | 57d2c7e | 2021-08-12 17:41:04 +0100 | [diff] [blame] | 390 | /// |
| 391 | /// @note Weights and biases are passed in as inputs. If they are constant tensors you can simply store |
| 392 | /// them in a ConstantLayer as seen below. A full example can be found in samples/SimpleSample.cpp. |
| 393 | /// |
| 394 | /// @code |
| 395 | /// // Make sure the IsConstant flag is set on the weightsInfo before passing it to the ConstTensor. |
| 396 | /// ConstTensor weights(weightsInfo, weightsData); |
| 397 | /// |
| 398 | /// // Constant layer that now holds weights data for FullyConnected |
| 399 | /// IConnectableLayer* const constantWeightsLayer = myNetwork->AddConstantLayer(weights, "weights"); |
| 400 | /// |
| 401 | /// FullyConnectedDescriptor fullyConnectedDesc; |
| 402 | /// IConnectableLayer* const fullyConnectedLayer = myNetwork->AddFullyConnectedLayer(fullyConnectedDesc, |
| 403 | /// "fully connected"); |
| 404 | /// IConnectableLayer* InputLayer = myNetwork->AddInputLayer(0); |
| 405 | /// InputLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(0)); |
| 406 | /// constantWeightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1)); |
| 407 | /// @endcode |
Sadik Armagan | f0a6dec | 2021-03-25 07:46:55 +0000 | [diff] [blame] | 408 | IConnectableLayer* AddFullyConnectedLayer(const FullyConnectedDescriptor& fullyConnectedDescriptor, |
Matthew Sloyan | 81beae3 | 2021-07-13 19:46:11 +0100 | [diff] [blame] | 409 | const char* name = nullptr); |
| 410 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 411 | /// Adds a permute layer to the network. |
| 412 | /// @param permuteDescriptor - PermuteDescriptor to configure the permute. |
| 413 | /// @param name - Optional name for the layer. |
| 414 | /// @return - Interface for configuring the layer. |
| 415 | IConnectableLayer* AddPermuteLayer(const PermuteDescriptor& permuteDescriptor, |
| 416 | const char* name = nullptr); |
| 417 | |
| 418 | /// Adds a batch to space ND layer to the network. |
| 419 | /// @param batchToSpaceNdDescriptor - Description of the layer. |
| 420 | /// @param name - Optional name for the layer. |
| 421 | /// @return - Interface for configuring the layer. |
| 422 | IConnectableLayer* AddBatchToSpaceNdLayer(const BatchToSpaceNdDescriptor& batchToSpaceNdDescriptor, |
| 423 | const char* name = nullptr); |
| 424 | |
Tamás Nyíri | 7b885b3 | 2021-10-26 14:47:57 +0100 | [diff] [blame] | 425 | /// Adds a 2D pooling layer to the network. |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 426 | /// @param pooling2dDescriptor - Pooling2dDescriptor to configure the pooling. |
| 427 | /// @param name - Optional name for the layer. |
| 428 | /// @return - Interface for configuring the layer. |
| 429 | IConnectableLayer* AddPooling2dLayer(const Pooling2dDescriptor& pooling2dDescriptor, |
| 430 | const char* name = nullptr); |
| 431 | |
Tamás Nyíri | 7b885b3 | 2021-10-26 14:47:57 +0100 | [diff] [blame] | 432 | /// Adds a 3D pooling layer to the network. |
| 433 | /// @param pooling3dDescriptor - Pooling3dDescriptor to configure the pooling. |
| 434 | /// @param name - Optional name for the layer. |
| 435 | /// @return - Interface for configuring the layer. |
| 436 | IConnectableLayer* AddPooling3dLayer(const Pooling3dDescriptor& pooling3dDescriptor, |
| 437 | const char* name = nullptr); |
| 438 | |
Cathal Corbett | 18655b8 | 2021-12-13 13:03:22 +0000 | [diff] [blame] | 439 | /// Adds a Precompiled layer to the network. |
| 440 | /// Method use is for backend users. |
| 441 | /// @param preCompiledDescriptor - PreCompiledDescriptor contains parameters for the Precompiled layer. |
| 442 | /// @param compiledBlobPtr - CompiledBlobPtr pre-compiled object set for the Precompiled layer. |
| 443 | /// @param backend - optional BackendId set for the Precompiled layer. |
| 444 | /// @return - Interface for configuring the layer. |
| 445 | IConnectableLayer* AddPrecompiledLayer(const PreCompiledDescriptor& preCompiledDescriptor, |
Cathal Corbett | 3ea0107 | 2022-01-06 10:29:43 +0000 | [diff] [blame] | 446 | CompiledBlobPtr compiledBlobPtr, |
Cathal Corbett | cbfd718 | 2021-12-15 17:12:59 +0000 | [diff] [blame] | 447 | const Optional<BackendId>& backend, |
| 448 | const char* name = nullptr); |
Cathal Corbett | 18655b8 | 2021-12-13 13:03:22 +0000 | [diff] [blame] | 449 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 450 | /// Adds an activation layer to the network. |
| 451 | /// @param activationDescriptor - ActivationDescriptor to configure the activation. |
| 452 | /// @param name - Optional name for the layer. |
| 453 | /// @return - Interface for configuring the layer. |
| 454 | IConnectableLayer* AddActivationLayer(const ActivationDescriptor& activationDescriptor, |
| 455 | const char* name = nullptr); |
| 456 | |
| 457 | /// Adds a normalization layer to the network. |
| 458 | /// @param normalizationDescriptor - NormalizationDescriptor to configure the normalization. |
| 459 | /// @param name - Optional name for the layer. |
| 460 | /// @return - Interface for configuring the layer. |
| 461 | IConnectableLayer* AddNormalizationLayer(const NormalizationDescriptor& normalizationDescriptor, |
| 462 | const char* name = nullptr); |
| 463 | |
| 464 | /// Adds a slice layer to the network. |
| 465 | /// @param sliceDescriptor - SliceDescriptor to configure the slice operation. |
| 466 | /// @param name - Optional name for the layer. |
| 467 | /// @return - Interface for configuring the layer. |
| 468 | IConnectableLayer* AddSliceLayer(const SliceDescriptor& sliceDescriptor, const char* name = nullptr); |
| 469 | |
| 470 | /// Adds a softmax layer to the network. |
| 471 | /// If the data type is QAsymm8, then the output quantization parameters |
| 472 | /// must have a scale of 1/256 and an offset of 0 |
| 473 | /// @param softmaxDescriptor - SoftmaxDescriptor to configure the softmax. |
| 474 | /// @param name - Optional name for the layer. |
| 475 | /// @return - Interface for configuring the layer. |
| 476 | IConnectableLayer* AddSoftmaxLayer(const SoftmaxDescriptor& softmaxDescriptor, |
| 477 | const char* name = nullptr); |
| 478 | |
| 479 | /// Adds a splitter layer to the network. |
| 480 | /// @param splitterDescriptor - ViewsDescriptor to configure the splitting process. |
| 481 | /// Number of Views must be equal to the number of outputs, |
| 482 | /// and their order must match - e.g. first view corresponds to |
| 483 | /// the first output, second view to the second output, etc.... |
| 484 | /// @param name - Optional name for the layer. |
| 485 | /// @return - Interface for configuring the layer. |
| 486 | IConnectableLayer* AddSplitterLayer(const ViewsDescriptor& splitterDescriptor, |
| 487 | const char* name = nullptr); |
| 488 | |
| 489 | /// Adds a merge layer to the network. |
| 490 | /// @param name - Optional name for the layer. |
| 491 | /// @return - Interface for configuring the layer. |
| 492 | IConnectableLayer* AddMergeLayer(const char* name = nullptr); |
| 493 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 494 | /// Adds an addition layer to the network. |
| 495 | /// @param name - Optional name for the layer. |
| 496 | /// @return - Interface for configuring the layer. |
| 497 | IConnectableLayer* AddAdditionLayer(const char* name = nullptr); |
| 498 | |
| 499 | /// Adds a multiplication layer to the network. |
| 500 | /// @param name - Optional name for the layer. |
| 501 | /// @return - Interface for configuring the layer. |
| 502 | IConnectableLayer* AddMultiplicationLayer(const char* name = nullptr); |
| 503 | |
| 504 | /// Adds a batch normalization layer to the network. |
| 505 | /// @param mean - Pre-calculated mean for each channel. |
| 506 | /// @param variance - Pre-calculated variance for each channel. |
| 507 | /// @param beta - Per-channel additive factor. |
| 508 | /// @param gamma - Per-channel multiplicative factor. |
| 509 | /// @return - Interface for configuring the layer. |
| 510 | /// @param name - Optional name for the layer. |
| 511 | IConnectableLayer* AddBatchNormalizationLayer(const BatchNormalizationDescriptor& desc, |
| 512 | const ConstTensor& mean, |
| 513 | const ConstTensor& variance, |
| 514 | const ConstTensor& beta, |
| 515 | const ConstTensor& gamma, |
| 516 | const char* name = nullptr); |
| 517 | |
| 518 | /// Adds a rank layer to the network. |
| 519 | /// @param name - Optional name for the layer. |
| 520 | /// @return - Interface for configuring the layer. |
| 521 | IConnectableLayer* AddRankLayer(const char* name = nullptr); |
| 522 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 523 | /// Adds a resize layer to the network. |
| 524 | /// @param resizeDescriptor - Parameters for the resize operation. |
| 525 | /// @param name - Optional name for the layer. |
| 526 | /// @return - Interface for configuring the layer. |
| 527 | IConnectableLayer* AddResizeLayer(const ResizeDescriptor& resizeDescriptor, |
| 528 | const char* name = nullptr); |
| 529 | |
| 530 | /// Adds a reduce layer to the network. |
| 531 | /// @param ReduceDescriptor - Parameters for the reduce operation. |
| 532 | /// @param name - Optional name for the layer. |
| 533 | /// @return - Interface for configuring the layer. |
| 534 | IConnectableLayer* AddReduceLayer(const ReduceDescriptor& reduceDescriptor, |
| 535 | const char* name = nullptr); |
| 536 | |
| 537 | /// Adds an instance normalization layer to the network. |
| 538 | /// @param desc - Parameters for the instance normalization operation. |
| 539 | /// @param name - Optional name for the layer. |
| 540 | /// @return - Interface for configuring the layer. |
| 541 | IConnectableLayer* AddInstanceNormalizationLayer(const InstanceNormalizationDescriptor& desc, |
| 542 | const char* name = nullptr); |
| 543 | |
| 544 | /// Adds an L2 normalization layer to the network. |
| 545 | /// Normalization is performed along dimension 1, but requires a 4d input. |
| 546 | /// @param desc - Parameters for the L2 normalization operation. |
| 547 | /// @param name - Optional name for the layer. |
| 548 | /// @return - Interface for configuring the layer. |
| 549 | IConnectableLayer* AddL2NormalizationLayer(const L2NormalizationDescriptor& desc, |
| 550 | const char* name = nullptr); |
| 551 | |
| 552 | /// Adds a log softmax layer to the network. |
| 553 | /// @param logSoftmaxDescriptor - LogSoftmaxDescriptor to configure the log softmax. |
| 554 | /// @param name - Optional name for the layer. |
| 555 | /// @return - Interface for configuring the layer. |
| 556 | IConnectableLayer* AddLogSoftmaxLayer(const LogSoftmaxDescriptor& logSoftmaxDescriptor, |
| 557 | const char* name = nullptr); |
| 558 | |
| 559 | /// Adds a layer with no inputs and a single output, which always corresponds to |
| 560 | /// the passed in constant tensor. |
| 561 | /// @param input - Tensor to be provided as the only output of the layer. The layer will maintain |
| 562 | /// its own copy of the tensor data, meaning the memory referenced by @a input can |
| 563 | /// be freed or reused after this function is called. |
| 564 | /// @param name - Optional name for the layer. |
| 565 | /// @return - Interface for configuring the layer. |
| 566 | IConnectableLayer* AddConstantLayer(const ConstTensor& input, |
| 567 | const char* name = nullptr); |
| 568 | |
| 569 | /// Adds a reshape layer to the network. |
| 570 | /// @param reshapeDescriptor - Parameters for the reshape operation. |
| 571 | /// @param name - Optional name for the layer. |
| 572 | /// @return - Interface for configuring the layer. |
| 573 | IConnectableLayer* AddReshapeLayer(const ReshapeDescriptor& reshapeDescriptor, |
| 574 | const char* name = nullptr); |
| 575 | |
Keith Davis | 3ae3f97 | 2021-05-21 16:33:48 +0100 | [diff] [blame] | 576 | /// Adds a shape layer to the network. |
| 577 | /// @param name - Optional name for the layer. |
| 578 | /// @return - Interface for configuring the layer. |
| 579 | IConnectableLayer* AddShapeLayer(const char* name = nullptr); |
| 580 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 581 | /// Adds a space to batch layer to the network. |
| 582 | /// @param spaceToBatchNdDescriptor - Parameters for the space to batch operation. |
| 583 | /// @param name - Optional name for the layer. |
| 584 | /// @return - Interface for configuring the layer. |
| 585 | IConnectableLayer* AddSpaceToBatchNdLayer(const SpaceToBatchNdDescriptor& spaceToBatchNdDescriptor, |
| 586 | const char* name = nullptr); |
| 587 | |
| 588 | /// Adds a space to depth layer to the network. |
| 589 | /// @param spaceToDepthDescriptor - Parameters for the space to depth operation. |
| 590 | /// @param name - Optional name for the layer. |
| 591 | /// @return - Interface for configuring the layer. |
| 592 | IConnectableLayer* AddSpaceToDepthLayer(const SpaceToDepthDescriptor& spaceToDepthDescriptor, |
| 593 | const char* name = nullptr); |
| 594 | |
| 595 | /// Adds a floor layer to the network. |
| 596 | /// @param name - Optional name for the layer. |
| 597 | /// @return - Interface for configuring the layer. |
| 598 | IConnectableLayer* AddFloorLayer(const char* name = nullptr); |
| 599 | |
| 600 | /// Adds an output layer to the network. |
| 601 | /// @param id - User generated id to uniquely identify a particular output. The same id needs to be specified |
| 602 | /// when passing the outputs to the IRuntime::EnqueueWorkload() function. |
| 603 | /// @param name - Optional name for the layer. |
| 604 | /// @return - Interface for configuring the layer. |
| 605 | IConnectableLayer* AddOutputLayer(LayerBindingId id, const char* name = nullptr); |
| 606 | |
| 607 | /// Add a Lstm layer to the network |
| 608 | /// @param descriptor - Parameters for the Lstm operation |
| 609 | /// @param params - Weights and biases for the LSTM cell |
| 610 | /// @param name - Optional name for the layer |
| 611 | /// @return - Interface for configuring the layer. |
| 612 | IConnectableLayer* AddLstmLayer(const LstmDescriptor& descriptor, |
| 613 | const LstmInputParams& params, |
| 614 | const char* name = nullptr); |
| 615 | |
| 616 | /// Adds a division layer to the network. |
| 617 | /// @param name - Optional name for the layer. |
| 618 | /// @return - Interface for configuring the layer. |
| 619 | IConnectableLayer* AddDivisionLayer(const char* name = nullptr); |
| 620 | |
| 621 | /// Adds a subtraction layer to the network. |
| 622 | /// @param name - Optional name for the layer. |
| 623 | /// @return - Interface for configuring the layer. |
| 624 | IConnectableLayer* AddSubtractionLayer(const char* name = nullptr); |
| 625 | |
| 626 | /// Add a Maximum layer to the network. |
| 627 | /// @param name - Optional name for the layer. |
| 628 | /// @return - Interface for configuring the layer. |
| 629 | IConnectableLayer* AddMaximumLayer(const char* name = nullptr); |
| 630 | |
| 631 | /// Add a Mean layer to the network. |
| 632 | /// @param meanDescriptor - Parameters for the mean operation. |
| 633 | /// @param name - Optional name for the layer. |
| 634 | /// @return - Interface for configuring the layer. |
| 635 | IConnectableLayer* AddMeanLayer(const MeanDescriptor& meanDescriptor, const char* name = nullptr); |
| 636 | |
| 637 | /// Adds a fully pad layer to the network. |
| 638 | /// @param paddings - n by 2 tensor, where n is the rank of the input tensor, |
| 639 | /// such that paddings[i,0] indicates the amount of padding to add in front of dimonsion i, and |
| 640 | /// paddings[i,1] indicates the amount of padding to add after the end of dimension i |
| 641 | /// @param name - Optional name for the layer. |
| 642 | /// @return - Interface for configuring the layer. |
| 643 | IConnectableLayer* AddPadLayer(const PadDescriptor& padDescriptor, |
| 644 | const char* name = nullptr); |
| 645 | |
| 646 | /// Add a quantize layer to the network |
| 647 | ///@param name - Optional name for the layer. |
| 648 | /// @return - Interface for configuring the layer. |
| 649 | IConnectableLayer* AddQuantizeLayer(const char* name = nullptr); |
| 650 | |
| 651 | /// Adds a strided slice layer to the network. |
| 652 | /// @param StridedSliceDescriptor - Parameters for the strided slice operation. |
| 653 | /// @param name - Optional name for the layer. |
| 654 | /// @return - Interface for configuring the layer. |
| 655 | IConnectableLayer* AddStridedSliceLayer(const StridedSliceDescriptor& stridedSliceDescriptor, |
| 656 | const char* name = nullptr); |
| 657 | |
| 658 | /// Add a Minimum layer to the network. |
| 659 | /// @param name - Optional name for the layer. |
| 660 | /// @return - Interface for configuring the layer. |
| 661 | IConnectableLayer* AddMinimumLayer(const char* name = nullptr); |
| 662 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 663 | /// Add Gather layer to the network. |
| 664 | /// @param descriptor - Description of the gather layer. |
| 665 | /// @param name - Optional name for the layer. |
| 666 | /// @return - Interface for configuring the layer. |
| 667 | IConnectableLayer* AddGatherLayer(const GatherDescriptor& descriptor, |
| 668 | const char* name = nullptr); |
| 669 | |
Teresa Charlin | b2d3ec5 | 2022-04-12 22:07:09 +0100 | [diff] [blame] | 670 | /// Add GatherNd layer to the network. |
| 671 | /// @param name - Optional name for the layer. |
| 672 | /// @return - Interface for configuring the layer. |
| 673 | IConnectableLayer* AddGatherNdLayer(const char* name = nullptr); |
| 674 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 675 | /// Adds a switch layer to the network. |
| 676 | /// @param name - Optional name for the layer. |
| 677 | /// @return - Interface for configuring the layer. |
| 678 | IConnectableLayer* AddSwitchLayer(const char* name = nullptr); |
| 679 | |
| 680 | /// Adds a PReLU layer to the network. |
| 681 | /// @param name - Optional name for the layer. |
| 682 | /// @return - Interface for configuring the layer. |
| 683 | IConnectableLayer* AddPreluLayer(const char* name = nullptr); |
| 684 | |
| 685 | /// Adds a 2D transpose convolution layer to the network. |
| 686 | /// @param descriptor - Description of the 2D transpose convolution layer. |
| 687 | /// @param weights - Tensor for the weights data. |
| 688 | /// @param biases - Optional tensor for the bias data. |
| 689 | /// @param name - Optional name for the layer. |
| 690 | /// @return - Interface for configuring the layer. |
| 691 | IConnectableLayer* AddTransposeConvolution2dLayer(const TransposeConvolution2dDescriptor& descriptor, |
| 692 | const ConstTensor& weights, |
| 693 | const Optional<ConstTensor>& biases, |
| 694 | const char* name = nullptr); |
| 695 | |
| 696 | /// Adds a transpose layer to the network. |
| 697 | /// @param transposeDescriptor - TransposeDescriptor to configure the transpose. |
| 698 | /// @param name - Optional name for the layer. |
| 699 | /// @return - Interface for configuring the layer. |
| 700 | IConnectableLayer* AddTransposeLayer(const TransposeDescriptor& transposeDescriptor, |
| 701 | const char* name = nullptr); |
| 702 | |
| 703 | /// Adds a stack layer to the network. |
| 704 | /// @param descriptor - Description of the stack layer. |
| 705 | /// @param name - Optional name for the layer. |
| 706 | /// @return - Interface for configuring the layer. |
| 707 | IConnectableLayer* AddStackLayer(const StackDescriptor& descriptor, |
| 708 | const char* name = nullptr); |
| 709 | |
| 710 | /// Add a stand-in layer for a type unknown to the Arm NN framework. |
| 711 | /// Note: Due to the nature of this layer, no validation can be performed by the framework. |
| 712 | /// Furthermore, Any model containing this layer cannot make use of dynamic tensors since the |
| 713 | /// tensor sizes cannot be inferred. |
| 714 | /// @descriptor - Descriptor for the StandIn layer. |
| 715 | /// @return - Interface for configuring the layer. |
| 716 | IConnectableLayer* AddStandInLayer(const StandInDescriptor& descriptor, |
| 717 | const char* name = nullptr); |
| 718 | |
| 719 | /// Add a QuantizedLstm layer to the network |
| 720 | /// @param params - The weights and biases for the Quantized LSTM cell |
| 721 | /// @param name - Optional name for the layer |
| 722 | /// @return - Interface for configuring the layer. |
| 723 | IConnectableLayer* AddQuantizedLstmLayer(const QuantizedLstmInputParams& params, |
| 724 | const char* name = nullptr); |
| 725 | |
| 726 | /// Add a QLstm layer to the network |
| 727 | /// @param descriptor - Parameters for the QLstm operation |
| 728 | /// @param params - Weights and biases for the layer |
| 729 | /// @param name - Optional name for the layer |
| 730 | /// @return - Interface for configuring the layer. |
| 731 | IConnectableLayer* AddQLstmLayer(const QLstmDescriptor& descriptor, |
| 732 | const LstmInputParams& params, |
| 733 | const char* name = nullptr); |
| 734 | |
| 735 | /// Adds a Logical Binary layer to the network. |
| 736 | /// @param descriptor - Description of the Logical Binary layer. |
| 737 | /// @param name - Optional name for the layer. |
| 738 | /// @return - Interface for configuring the layer. |
| 739 | IConnectableLayer* AddLogicalBinaryLayer(const LogicalBinaryDescriptor& descriptor, |
| 740 | const char* name = nullptr); |
| 741 | |
Narumol Prangnawarat | 8ed39ae | 2021-07-15 16:16:25 +0100 | [diff] [blame] | 742 | /// Add a UnidirectionalSequenceLstm layer to the network |
| 743 | /// @param descriptor - Parameters for the UnidirectionalSequenceLstm operation |
| 744 | /// @param params - Weights and biases for the UnidirectionalSequenceLstm |
| 745 | /// @param name - Optional name for the layer |
| 746 | /// @return - Interface for configuring the layer. |
| 747 | IConnectableLayer* AddUnidirectionalSequenceLstmLayer(const UnidirectionalSequenceLstmDescriptor& descriptor, |
| 748 | const LstmInputParams& params, |
| 749 | const char* name = nullptr); |
| 750 | |
Simon Obute | 51f6777 | 2021-09-03 15:50:13 +0100 | [diff] [blame] | 751 | /// Add a ChannelShuffle layer to the network |
| 752 | /// @param descriptor - Parameters for the ChannelShuffle operation |
| 753 | /// @param name - Optional name for the layer |
| 754 | /// @return - Interface for configuring the layer |
| 755 | IConnectableLayer* AddChannelShuffleLayer(const ChannelShuffleDescriptor& descriptor, |
| 756 | const char* name = nullptr); |
| 757 | |
Jan Eilers | 1b2654f | 2021-09-24 15:45:46 +0100 | [diff] [blame] | 758 | // The Accept function needs to be wrapped in a no warn macro to avoid deprecation warnings from |
| 759 | // the deprecated ILayerVisitor which is used in the function. |
| 760 | ARMNN_NO_DEPRECATE_WARN_BEGIN |
| 761 | /// Apply a visitor to this layer |
| 762 | ARMNN_DEPRECATED_MSG_REMOVAL_DATE("Accept is deprecated. The ILayerVisitor that works in conjunction with this " |
| 763 | "Accept function is deprecated. Use IStrategy in combination with " |
| 764 | "ExecuteStrategy instead, which is an ABI/API stable version of the " |
| 765 | "visitor pattern.", |
| 766 | "22.05") |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 767 | void Accept(ILayerVisitor& visitor) const; |
Jan Eilers | 1b2654f | 2021-09-24 15:45:46 +0100 | [diff] [blame] | 768 | ARMNN_NO_DEPRECATE_WARN_END |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 769 | |
| 770 | void ExecuteStrategy(IStrategy& strategy) const; |
| 771 | |
| 772 | protected: |
| 773 | ~INetwork(); |
| 774 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 775 | friend void VisitLayersTopologically(const INetwork* inputNetwork, IStrategy& strategy); |
| 776 | friend class TestConnectionPreservation; |
| 777 | friend TensorInfo GetInputTensorInfo(const INetwork* network); |
| 778 | friend IOptimizedNetworkPtr Optimize(const INetwork& network, |
| 779 | const std::vector<BackendId>& backendPreferences, |
| 780 | const IDeviceSpec& deviceSpec, |
| 781 | const OptimizerOptions& options, |
| 782 | Optional<std::vector<std::string>&> messages); |
| 783 | |
| 784 | INetwork(NetworkOptions networkOptions = {}); |
| 785 | |
| 786 | std::unique_ptr<NetworkImpl> pNetworkImpl; |
| 787 | }; |
| 788 | |
Mike Kelly | 386ff1a | 2021-03-29 15:04:50 +0100 | [diff] [blame] | 789 | namespace experimental |
| 790 | { |
Sadik Armagan | a004251 | 2021-03-30 11:05:36 +0100 | [diff] [blame] | 791 | class AsyncNetworkImpl; |
Mike Kelly | 386ff1a | 2021-03-29 15:04:50 +0100 | [diff] [blame] | 792 | class WorkingMemHandle; |
| 793 | } |
| 794 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 795 | struct BackendSettings; |
| 796 | struct OptimizationResult; |
| 797 | class OptimizedNetworkImpl; |
Derek Lamberti | e155bbf | 2021-10-13 14:32:12 +0100 | [diff] [blame] | 798 | class IProfiler; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 799 | class IOptimizedNetwork |
| 800 | { |
| 801 | public: |
| 802 | static void Destroy(IOptimizedNetwork* network); |
| 803 | |
| 804 | Status PrintGraph(); |
| 805 | Status SerializeToDot(std::ostream& stream) const; |
| 806 | |
Cathal Corbett | 5aa9fd7 | 2022-02-25 15:33:28 +0000 | [diff] [blame] | 807 | arm::pipe::ProfilingGuid GetGuid() const; |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 808 | |
Sadik Armagan | b7851f9 | 2021-10-06 16:37:02 +0100 | [diff] [blame] | 809 | size_t GetNumInputs() const; |
| 810 | size_t GetNumOutputs() const; |
| 811 | |
Mike Kelly | 0d677db | 2021-06-27 22:39:21 +0100 | [diff] [blame] | 812 | // Creates a copy of the IOptimizedNetwork. The IOptimizedNetwork will not be reoptimized, |
| 813 | // the provided ModelOptions will only be used when creating a LoadedNetwork. |
| 814 | IOptimizedNetwork(const IOptimizedNetwork& other, const ModelOptions& modelOptions); |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 815 | IOptimizedNetwork(std::unique_ptr<Graph> graph); |
| 816 | IOptimizedNetwork(std::unique_ptr<OptimizedNetworkImpl> impl); |
| 817 | ~IOptimizedNetwork(); |
| 818 | |
Derek Lamberti | e155bbf | 2021-10-13 14:32:12 +0100 | [diff] [blame] | 819 | const std::shared_ptr<IProfiler>& GetProfiler() const; |
| 820 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 821 | protected: |
| 822 | friend class LoadedNetwork; |
Mike Kelly | 386ff1a | 2021-03-29 15:04:50 +0100 | [diff] [blame] | 823 | |
Sadik Armagan | a004251 | 2021-03-30 11:05:36 +0100 | [diff] [blame] | 824 | friend class experimental::AsyncNetworkImpl; |
Mike Kelly | 386ff1a | 2021-03-29 15:04:50 +0100 | [diff] [blame] | 825 | friend class experimental::WorkingMemHandle; |
| 826 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 827 | friend Graph& GetGraphForTesting(IOptimizedNetwork* optNetPtr); |
| 828 | friend ModelOptions& GetModelOptionsForTesting(IOptimizedNetwork* optNetPtr); |
| 829 | friend IOptimizedNetworkPtr Optimize(const INetwork& inNetwork, |
| 830 | const std::vector<BackendId>& backendPreferences, |
| 831 | const IDeviceSpec& deviceSpec, |
| 832 | const OptimizerOptions& options, |
| 833 | Optional<std::vector<std::string>&> messages); |
Cathal Corbett | a3f4fba | 2022-03-21 09:27:08 +0000 | [diff] [blame] | 834 | friend IOptimizedNetworkPtr Optimize(const Graph& inGraph, |
| 835 | const std::vector<BackendId>& backendPreferences, |
| 836 | const IDeviceSpec& deviceSpec, |
| 837 | const OptimizerOptions& options, |
| 838 | Optional<std::vector<std::string>&> messages); |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 839 | |
Francis Murtagh | 3d2b4b2 | 2021-02-15 18:23:17 +0000 | [diff] [blame] | 840 | IOptimizedNetwork(std::unique_ptr<Graph> graph, const ModelOptions& modelOptions); |
| 841 | |
| 842 | std::unique_ptr<OptimizedNetworkImpl> pOptimizedNetworkImpl; |
| 843 | }; |
| 844 | |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 845 | /// Create an optimized version of the network |
| 846 | /// @param network INetwork description of the network to be optimized. |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 847 | /// @param backendPreferences The choice of the backend ordered by user preferences. |
| 848 | /// @param deviceSpec DeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec() |
Rob Hughes | 2321443 | 2019-11-05 11:27:36 +0000 | [diff] [blame] | 849 | /// @param messages If there are failures or warnings a string describing same will be added to the vector |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 850 | /// @param options OptimizerOptions object with optimizer configuration options |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 851 | /// @return An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from |
| 852 | /// armnn::Exception if process fails. |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 853 | |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 854 | IOptimizedNetworkPtr Optimize(const INetwork& network, |
David Beck | f0b4845 | 2018-10-19 15:20:56 +0100 | [diff] [blame] | 855 | const std::vector<BackendId>& backendPreferences, |
telsoa01 | c577f2c | 2018-08-31 09:22:23 +0100 | [diff] [blame] | 856 | const IDeviceSpec& deviceSpec, |
jimfly01 | 6b0b53d | 2018-10-08 14:43:01 +0100 | [diff] [blame] | 857 | const OptimizerOptions& options = OptimizerOptions(), |
Rob Hughes | 2321443 | 2019-11-05 11:27:36 +0000 | [diff] [blame] | 858 | Optional<std::vector<std::string>&> messages = EmptyOptional()); |
Cathal Corbett | a3f4fba | 2022-03-21 09:27:08 +0000 | [diff] [blame] | 859 | |
| 860 | /// Create an optimized version of the network |
| 861 | /// @param inGraph Graph to be optimized. |
| 862 | /// @param backendPreferences The choice of the backend ordered by user preferences. |
| 863 | /// @param deviceSpec DeviceSpec object as queried from the runtime. See IRuntime::GetDeviceSpec() |
| 864 | /// @param messages If there are failures or warnings a string describing same will be added to the vector |
| 865 | /// @param options OptimizerOptions object with optimizer configuration options |
| 866 | /// @return An IOptimizedNetworkPtr interface to the optimized network, throws an exception derived from |
| 867 | /// armnn::Exception if process fails. |
| 868 | |
| 869 | IOptimizedNetworkPtr Optimize(const Graph& inGraph, |
| 870 | const std::vector<BackendId>& backendPreferences, |
| 871 | const IDeviceSpec& deviceSpec, |
| 872 | const OptimizerOptions& options, |
| 873 | Optional<std::vector<std::string>&> messages = EmptyOptional()); |
telsoa01 | 4fcda01 | 2018-03-09 14:13:49 +0000 | [diff] [blame] | 874 | } //namespace armnn |