blob: 54bdf3698290621139dabf6754d55c22c62b3819 [file] [log] [blame]
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +00001//
2// Copyright © 2023 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include <armnn_delegate.hpp>
Ryan OSheaac9607f2023-04-03 11:33:33 +01007#include <OpaqueDelegateUtils.hpp>
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +00008
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +00009#include "Activation.hpp"
10#include "ArgMinMax.hpp"
11#include "BatchMatMul.hpp"
12#include "BatchSpace.hpp"
Idriss Chaouchcbf79292023-09-08 11:18:16 +010013#include "BroadcastTo.hpp"
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000014#include "Comparison.hpp"
15#include "Convolution.hpp"
16#include "Control.hpp"
17#include "ElementwiseBinary.hpp"
18#include "ElementwiseUnary.hpp"
19#include "Fill.hpp"
20#include "FullyConnected.hpp"
21#include "Gather.hpp"
22#include "GatherNd.hpp"
23#include "LogicalBinary.hpp"
24#include "Lstm.hpp"
25#include "Normalization.hpp"
26#include "Pack.hpp"
27#include "Pad.hpp"
28#include "Pooling.hpp"
29#include "Prelu.hpp"
30#include "Quantization.hpp"
31#include "Redefine.hpp"
32#include "Reduce.hpp"
33#include "Resize.hpp"
Tracy Narine7306bbe2023-07-17 16:06:26 +010034#include "ReverseV2.hpp"
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000035#include "Round.hpp"
36#include "Shape.hpp"
37#include "Slice.hpp"
38#include "StridedSlice.hpp"
39#include "Softmax.hpp"
40#include "SpaceDepth.hpp"
41#include "Split.hpp"
Tianle Cheng92ce35c2023-07-25 16:41:00 +010042#include "Tile.hpp"
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000043#include "Transpose.hpp"
44#include "UnidirectionalSequenceLstm.hpp"
45#include "Unpack.hpp"
46
47#include <armnn/utility/IgnoreUnused.hpp>
48#include <armnnUtils/Filesystem.hpp>
49#include <armnn/utility/Timer.hpp>
50#include <flatbuffers/flatbuffers.h>
51#include <tensorflow/lite/context_util.h>
52#include <tensorflow/lite/schema/schema_generated.h>
53#include <tensorflow/lite/minimal_logging.h>
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000054
55#include <algorithm>
56#include <iostream>
57#include <sstream>
Teresa Charlin19ad8162023-10-04 11:17:03 +010058#include <regex>
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000059
60namespace armnnOpaqueDelegate
61{
62
Narumol Prangnawarat26654cb2023-05-03 16:08:11 +010063static auto* g_delegate_plugin_ArmnnDelegatePlugin_ =
Ryan OShea59f8f652023-05-11 20:37:53 +010064 new tflite::delegates::DelegatePluginRegistry::Register("armnn_delegate",
Narumol Prangnawarat26654cb2023-05-03 16:08:11 +010065 ArmnnDelegatePlugin::New);
66
Teresa Charlin19ad8162023-10-04 11:17:03 +010067armnnDelegate::DelegateOptions ParseArmNNSettings(const tflite::TFLiteSettings* tfLiteSettings)
68{
69 const tflite::ArmNNSettings* settings = tfLiteSettings->armnn_settings();
70 ARMNN_THROW_INVALIDARG_MSG_IF_FALSE(settings,
71 "The passed TFLiteSettings did not contain a valid ArmNNSettings");
72
73 // Extract settings fields
74 bool fastmath = settings->fastmath();
75 std::string backends_str = (settings->backends()) ? settings->backends()->str() : "";
76 const ::flatbuffers::String* additional_parameters = settings->additional_parameters();
77
78 // Build additional parameters string
79 std::string additional_parameters_str;
80 if (additional_parameters)
81 {
82 additional_parameters_str = additional_parameters->str();
83
84 // Apply a regex to remove spaces around the = and , signs
85 std::regex regex_equals_str("[ ]*=[ ]*");
86 std::regex regex_comma_str("[ ]*,[ ]*");
87 additional_parameters_str = std::regex_replace(additional_parameters_str, regex_equals_str, "=");
88 additional_parameters_str = std::regex_replace(additional_parameters_str, regex_comma_str, ",");
89 }
90
91 // Build a std::pair list of option names and values
92 std::vector<std::pair<std::string, std::string>> options;
93 options.emplace_back(std::pair<std::string, std::string>("backends", backends_str));
94 options.emplace_back(std::pair<std::string, std::string>("enable-fast-math", (fastmath) ? "true" : "false"));
95
96 std::stringstream additional_parameters_ss(additional_parameters_str);
97 while (additional_parameters_ss.good())
98 {
99 std::string option_str;
100 getline( additional_parameters_ss, option_str, ',' );
101 size_t n = option_str.find("=");
102 if (n != std::string::npos)
103 {
104 std::string name = option_str.substr(0, n);
105 std::string value = option_str.substr(n + 1, std::string::npos);
106 options.emplace_back(std::pair<std::string, std::string>(name, value));
107 }
108 }
109
110 // Build the key and value lists to pass into the constructor of the DelegateOptions
111 size_t num_options = options.size();
112 std::unique_ptr<const char*> options_keys = std::unique_ptr<const char*>(new const char*[num_options + 1]);
113 std::unique_ptr<const char*> options_values = std::unique_ptr<const char*>(new const char*[num_options + 1]);
114
115 for (size_t i=0; i<num_options; ++i)
116 {
117 options_keys.get()[i] = options[i].first.c_str();
118 options_values.get()[i] = options[i].second.c_str();
119 }
120
121 // Finally call the constructor
122 armnnDelegate::DelegateOptions delegateOptions = armnnDelegate::DelegateOptions(options_keys.get(),
123 options_values.get(),
124 num_options,
125 nullptr);
126
127 return delegateOptions;
128}
129
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +0000130ArmnnOpaqueDelegate::ArmnnOpaqueDelegate(armnnDelegate::DelegateOptions options)
131 : m_Options(std::move(options))
132{
133 // Configures logging for ARMNN
134 if (m_Options.IsLoggingEnabled())
135 {
136 armnn::ConfigureLogging(true, true, m_Options.GetLoggingSeverity());
137 }
138 // Create/Get the static ArmNN Runtime. Note that the m_Runtime will be shared by all armnn_delegate
139 // instances so the RuntimeOptions cannot be altered for different armnn_delegate instances.
140 m_Runtime = GetRuntime(m_Options.GetRuntimeOptions());
141 std::vector<armnn::BackendId> backends;
142 if (m_Runtime)
143 {
144 const armnn::BackendIdSet supportedDevices = m_Runtime->GetDeviceSpec().GetSupportedBackends();
145 for (auto& backend : m_Options.GetBackends())
146 {
147 if (std::find(supportedDevices.cbegin(), supportedDevices.cend(), backend) == supportedDevices.cend())
148 {
149 TFLITE_LOG_PROD(tflite::TFLITE_LOG_INFO,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100150 "TfLiteArmnnOpaqueDelegate: Requested unknown backend %s", backend.Get().c_str());
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +0000151 }
152 else
153 {
154 backends.push_back(backend);
Kevin May0425a372023-11-03 12:06:04 +0000155 TFLITE_LOG_PROD(tflite::TFLITE_LOG_INFO,
156 "TfLiteArmnnOpaqueDelegate: Added backend %s", backend.Get().c_str());
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +0000157 }
158 }
159 }
160
161 if (backends.empty())
162 {
163 // No known backend specified
164 throw armnn::InvalidArgumentException("TfLiteArmnnOpaqueDelegate: No known backend specified.");
165 }
166 m_Options.SetBackends(backends);
167
168 TFLITE_LOG_PROD_ONCE(tflite::TFLITE_LOG_INFO, "TfLiteArmnnOpaqueDelegate: Created TfLite ArmNN delegate.");
169}
170
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100171TfLiteStatus DoPrepare(TfLiteOpaqueContext* tfLiteContext, TfLiteOpaqueDelegate* tfLiteDelegate, void* data)
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100172{
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100173 // We are required to have the void* data parameter in the function signature, but we don't actually use it.
174 armnn::IgnoreUnused(data);
175
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100176 TfLiteIntArray* supportedOperators =
177 static_cast<::armnnOpaqueDelegate::ArmnnOpaqueDelegate*>
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100178 (TfLiteOpaqueDelegateGetData(tfLiteDelegate))->IdentifyOperatorsToDelegate(tfLiteContext);
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100179 if(supportedOperators == nullptr)
180 {
181 return kTfLiteError;
182 }
183
184 // ArmNN Opaque Delegate Registration
185 TfLiteRegistrationExternal* kernelRegistration =
Narumol Prangnawarat26654cb2023-05-03 16:08:11 +0100186 TfLiteRegistrationExternalCreate(kTfLiteBuiltinDelegate,
Ryan OShea59f8f652023-05-11 20:37:53 +0100187 "armnn_delegate",
Narumol Prangnawarat26654cb2023-05-03 16:08:11 +0100188 /*version=*/OPAQUE_DELEGATE_MAJOR_VERSION);
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100189 if(kernelRegistration == nullptr)
190 {
191 return kTfLiteError;
192 }
193
194 TfLiteRegistrationExternalSetInit(
195 kernelRegistration,
196 [](TfLiteOpaqueContext* tfLiteContext, const char* buffer, size_t length) -> void*
197 {
198 armnn::IgnoreUnused(length);
199 const TfLiteOpaqueDelegateParams* parameters =
200 reinterpret_cast<const TfLiteOpaqueDelegateParams*>(buffer);
201 if(parameters == nullptr)
202 {
203 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext,
204 "TfLiteArmnnOpaqueDelegate: Unable to get parameters.");
205 return nullptr;
206 }
207
208 return static_cast<void*>(
209 ArmnnSubgraph::Create(tfLiteContext,
210 parameters,
211 static_cast<::armnnOpaqueDelegate::ArmnnOpaqueDelegate*>(
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100212 parameters->delegate->opaque_delegate_builder->data)));
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100213 }
214 );
215
216 TfLiteRegistrationExternalSetFree(
217 kernelRegistration,
218 [](TfLiteOpaqueContext* tfLiteContext, void* buffer) -> void
219 {
220 armnn::IgnoreUnused(tfLiteContext);
221 if (buffer != nullptr)
222 {
223 delete static_cast<ArmnnSubgraph*>(buffer);
224 }
225 }
226 );
227
228 TfLiteRegistrationExternalSetPrepare(
229 kernelRegistration,
230 [](TfLiteOpaqueContext* tfLiteContext, TfLiteOpaqueNode* tfLiteNode) -> TfLiteStatus
231 {
232 void* userData = TfLiteOpaqueNodeGetUserData(tfLiteNode);
233 if (userData == nullptr)
234 {
235 return kTfLiteError;
236 }
237 return static_cast<ArmnnSubgraph*>(userData)->Prepare(tfLiteContext);
238 }
239 );
240
241 TfLiteRegistrationExternalSetInvoke(
242 kernelRegistration,
243 [](TfLiteOpaqueContext* tfLiteContext, TfLiteOpaqueNode* tfLiteNode) -> TfLiteStatus
244 {
245 void* userData = TfLiteOpaqueNodeGetUserData(tfLiteNode);
246 if (userData == nullptr)
247 {
248 return kTfLiteError;
249 }
250
251 return static_cast<ArmnnSubgraph*>(userData)->Invoke(tfLiteContext, tfLiteNode);
252 }
253 );
254
255 const TfLiteStatus status =
256 TfLiteOpaqueContextReplaceNodeSubsetsWithDelegateKernels(
257 tfLiteContext, kernelRegistration, supportedOperators, tfLiteDelegate);
258
259 TfLiteIntArrayFree(supportedOperators);
260 return status;
261}
262
Teresa Charlin3e4b6082023-10-19 19:13:29 +0100263TfLiteOpaqueDelegate* TfLiteArmnnOpaqueDelegateCreate(armnnDelegate::DelegateOptions options)
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +0000264{
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +0000265 auto* armnnDelegate = new ::armnnOpaqueDelegate::ArmnnOpaqueDelegate(options);
266 return TfLiteOpaqueDelegateCreate(armnnDelegate->GetDelegateBuilder());
267}
268
269::armnnDelegate::DelegateOptions TfLiteArmnnDelegateOptionsDefault()
270{
271 ::armnnDelegate::DelegateOptions options(armnn::Compute::CpuRef);
272 return options;
273}
274
275void TfLiteArmnnOpaqueDelegateDelete(TfLiteOpaqueDelegate* tfLiteDelegate)
276{
277 if (tfLiteDelegate != nullptr)
278 {
279 delete static_cast<::armnnOpaqueDelegate::ArmnnOpaqueDelegate*>(TfLiteOpaqueDelegateGetData(tfLiteDelegate));
280 TfLiteOpaqueDelegateDelete(tfLiteDelegate);
281 }
282}
283
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +0000284const std::string ArmnnOpaqueDelegate::GetVersion() {
285 return OPAQUE_DELEGATE_VERSION;
286}
287
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100288TfLiteIntArray* ArmnnOpaqueDelegate::IdentifyOperatorsToDelegate(TfLiteOpaqueContext* tfLiteContext)
289{
290 TfLiteIntArray* executionPlan = nullptr;
291 if (TfLiteOpaqueContextGetExecutionPlan(tfLiteContext, &executionPlan) != kTfLiteOk)
292 {
293 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext, "TfLiteArmnnOpaqueDelegate: Unable to get graph execution plan.");
294 return nullptr;
295 }
296
297 // Delegate data with null network
298 DelegateData delegateData(m_Options.GetBackends());
299
300 TfLiteIntArray* nodesToDelegate = TfLiteIntArrayCreate(executionPlan->size);
301 if (nodesToDelegate == nullptr)
302 {
303 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext,
304 "TfLiteArmnnOpaqueDelegate: Unable to create int array from execution plan.");
305 return nullptr;
306 }
307 nodesToDelegate->size = 0;
308
309 std::set<int32_t> unsupportedOperators;
310
311 for (int i = 0; i < executionPlan->size; ++i)
312 {
313 const int nodeIndex = executionPlan->data[i];
314
315 // If TfLiteOpaqueNodes can be delegated to ArmNN
316 TfLiteOpaqueNode* tfLiteNode = nullptr;
317 TfLiteRegistrationExternal* tfLiteRegistration = nullptr;
318
319 if (TfLiteOpaqueContextGetNodeAndRegistration(
320 tfLiteContext, nodeIndex, &tfLiteNode, &tfLiteRegistration) != kTfLiteOk)
321 {
322 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext,
323 "TfLiteArmnnOpaqueDelegate: Unable to get node and registration for node %d.",
324 nodeIndex);
325 continue;
326 }
327
328 TfLiteStatus visitStatus;
329 try
330 {
331 visitStatus = ArmnnSubgraph::VisitNode(
332 delegateData, tfLiteContext, tfLiteRegistration, tfLiteNode, nodeIndex);
333 }
334 catch(std::exception& ex)
335 {
336 ARMNN_LOG(error) << "ArmNN Failed to visit node with error: " << ex.what();
337 visitStatus = kTfLiteError;
Ciara Sookarry39436152023-10-31 15:44:41 +0000338 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext,
339 "Exception text: %s",
340 ex.what());
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100341 }
342
343 if (visitStatus != kTfLiteOk)
344 {
345 // node is not supported by ArmNN
346 unsupportedOperators.insert(TfLiteRegistrationExternalGetBuiltInCode(tfLiteRegistration));
347 continue;
348 }
349
350 nodesToDelegate->data[nodesToDelegate->size++] = nodeIndex;
351 }
352
353 for (std::set<int32_t>::iterator it=unsupportedOperators.begin(); it!=unsupportedOperators.end(); ++it)
354 {
355 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext,
356 "Operator %s [%d] is not supported by armnn_opaque_delegate.",
357 tflite::EnumNameBuiltinOperator(tflite::BuiltinOperator(*it)),
358 *it);
359 }
360
361 if (!unsupportedOperators.empty() && m_Options.TfLiteRuntimeFallbackDisabled())
362 {
363 std::stringstream exMessage;
364 exMessage << "TfLiteArmnnOpaqueDelegate: There are unsupported operators in the model. ";
365 exMessage << "Not falling back to TfLite Runtime as fallback is disabled. ";
366 exMessage << "This should only be disabled under test conditions.";
367 throw armnn::Exception(exMessage.str());
368 }
369 if (nodesToDelegate->size == 0)
370 {
371 ARMNN_LOG(info) << "No operators in this model are supported by the Arm NN TfLite delegate." <<
372 " The model will be executed entirely by TfLite runtime.";
373 }
374
375 std::sort(&nodesToDelegate->data[0], &nodesToDelegate->data[nodesToDelegate->size]);
376 return nodesToDelegate;
377}
378
Ryan OSheaac9607f2023-04-03 11:33:33 +0100379TfLiteStatus ArmnnSubgraph::AddInputLayer(DelegateData& delegateData,
380 TfLiteOpaqueContext* tfLiteContext,
381 const TfLiteIntArray* inputs,
382 std::vector<armnn::BindingPointInfo>& inputBindings)
383{
384 const size_t numInputs = static_cast<size_t>(inputs->size);
385 for (unsigned int i = 0; i < numInputs; ++i)
386 {
387 const int32_t tensorId = inputs->data[i];
388 const TfLiteOpaqueTensor* tensor = TfLiteOpaqueContextGetOpaqueTensor(tfLiteContext, tensorId);
389
390 if(!tensor)
391 {
392 return kTfLiteError;
393 }
394
395 // Do not create bindings for constant inputs
396 if (TfLiteOpaqueTensorGetAllocationType(tensor) == kTfLiteMmapRo)
397 {
398 continue;
399 }
400
401 auto bindingId = static_cast<armnn::LayerBindingId>((tensorId));
402 armnn::IConnectableLayer* layer = delegateData.m_Network->AddInputLayer(bindingId);
403
404 auto tensorInfo = GetTensorInfoForTfLiteOpaqueTensor(tensor);
405 armnn::IOutputSlot& outputSlot = layer->GetOutputSlot(0);
406 outputSlot.SetTensorInfo(tensorInfo);
407
408 // Store for creating connections
409 delegateData.m_OutputSlotForNode[static_cast<unsigned long>(tensorId)] = &outputSlot;
410
411 inputBindings.push_back(std::make_pair(bindingId, tensorInfo));
412 }
413
414 return kTfLiteOk;
415}
416
417TfLiteStatus ArmnnSubgraph::AddOutputLayer(DelegateData& delegateData,
418 TfLiteOpaqueContext* tfLiteContext,
419 const TfLiteIntArray* outputs,
420 std::vector<armnn::BindingPointInfo>& outputBindings)
421{
422 const size_t numOutputs = static_cast<size_t>(outputs->size);
423 for (unsigned int i = 0; i < numOutputs; ++i)
424 {
425 const int32_t tensorId = outputs->data[i];
426 const TfLiteOpaqueTensor* tensor = TfLiteOpaqueContextGetOpaqueTensor(tfLiteContext, tensorId);
427
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100428 if(!IsValid(tensor))
Ryan OSheaac9607f2023-04-03 11:33:33 +0100429 {
430 return kTfLiteError;
431 }
432
433 auto bindingId = static_cast<armnn::LayerBindingId>((tensorId));
434 armnn::IConnectableLayer* layer = delegateData.m_Network->AddOutputLayer(bindingId);
435
436 auto tensorInfo = GetTensorInfoForTfLiteOpaqueTensor(tensor);
Ryan OSheac229b3f2023-06-27 22:34:54 +0100437
438 if (delegateData.m_OutputSlotForNode[static_cast<unsigned long>(tensorId)] == nullptr)
439 {
440 return kTfLiteError;
441 }
442
Ryan OSheaac9607f2023-04-03 11:33:33 +0100443 delegateData.m_OutputSlotForNode[static_cast<unsigned long>(tensorId)]->Connect(layer->GetInputSlot(0));
444 outputBindings.push_back(std::make_pair(bindingId, tensorInfo));
445 }
446
447 return kTfLiteOk;
448}
449
450ArmnnSubgraph* ArmnnSubgraph::Create(TfLiteOpaqueContext* tfLiteContext,
451 const TfLiteOpaqueDelegateParams* parameters,
452 const ArmnnOpaqueDelegate* delegate)
453{
454 const auto startTime = armnn::GetTimeNow();
455 ARMNN_LOG(info) << "ArmnnSubgraph creation";
456
457 TfLiteIntArray* executionPlan;
458 if (TfLiteOpaqueContextGetExecutionPlan(tfLiteContext, &executionPlan) != kTfLiteOk)
459 {
460 return nullptr;
461 }
462
463 // Initialize DelegateData holds network and output slots information
464 DelegateData delegateData(delegate->m_Options.GetBackends());
465
466 // Build ArmNN Network
John Mcloughlinc5ee0d72023-03-24 12:07:25 +0000467 armnn::NetworkOptions networkOptions = delegate->m_Options.GetOptimizerOptions().GetModelOptions();
Ryan OSheaac9607f2023-04-03 11:33:33 +0100468 armnn::NetworkId networkId;
469 delegateData.m_Network = armnn::INetwork::Create(networkOptions);
470
471 delegateData.m_OutputSlotForNode = std::vector<armnn::IOutputSlot*>(
472 TfLiteOpaqueContextGetNumTensors(tfLiteContext), nullptr);
473
474 std::vector<armnn::BindingPointInfo> inputBindings;
475 std::vector<armnn::BindingPointInfo> outputBindings;
476
477 // Add input layer
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100478 if (AddInputLayer(delegateData, tfLiteContext, parameters->input_tensors, inputBindings) != kTfLiteOk)
Ryan OSheaac9607f2023-04-03 11:33:33 +0100479 {
480 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to add Inputs to the network!");
481 }
482
483 // Parse TfLite delegate nodes to ArmNN
484 const auto parseStartTime = armnn::GetTimeNow();
485 for (int i = 0; i < parameters->nodes_to_replace->size; ++i)
486 {
487 const int nodeIndex = parameters->nodes_to_replace->data[i];
488
489 TfLiteOpaqueNode* tfLiteNode = nullptr;
490 TfLiteRegistrationExternal* tfLiteRegistration = nullptr;
491 if (TfLiteOpaqueContextGetNodeAndRegistration(
492 tfLiteContext, nodeIndex, &tfLiteNode, &tfLiteRegistration) != kTfLiteOk)
493 {
494 throw armnn::Exception(&"TfLiteArmnnOpaqueDelegate: Unable to get node registration: " [ nodeIndex]);
495 }
496
497 if (VisitNode(delegateData, tfLiteContext, tfLiteRegistration, tfLiteNode, nodeIndex) != kTfLiteOk)
498 {
499 throw armnn::Exception(&"TfLiteArmnnOpaqueDelegate: Unable to parse node: " [ nodeIndex]);
500 }
501 }
502 ARMNN_LOG(info) << "Parse nodes to ArmNN time: " << std::setprecision(2)
503 << std::fixed << armnn::GetTimeDuration(parseStartTime).count() << " ms";
504
505 // Add Output layer
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100506 if (AddOutputLayer(delegateData, tfLiteContext, parameters->output_tensors, outputBindings) != kTfLiteOk)
Ryan OSheaac9607f2023-04-03 11:33:33 +0100507 {
508 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to add Outputs to the network!");
509 }
510
511 // Optimize ArmNN network
512 armnn::IOptimizedNetworkPtr optNet(nullptr, nullptr);
513 try
514 {
515 const auto optimizeStartTime = armnn::GetTimeNow();
516 optNet = armnn::Optimize(*(delegateData.m_Network.get()),
517 delegate->m_Options.GetBackends(),
518 delegate->m_Runtime->GetDeviceSpec(),
519 delegate->m_Options.GetOptimizerOptions());
520 ARMNN_LOG(info) << "Optimize ArmnnSubgraph time: " << std::setprecision(2)
521 << std::fixed << armnn::GetTimeDuration(optimizeStartTime).count() << " ms";
522 }
523 catch (std::exception& ex)
524 {
525 std::stringstream exMessage;
526 exMessage << "TfLiteArmnnOpaqueDelegate: Exception (" << ex.what() << ") caught from optimize.";
527 throw armnn::Exception(exMessage.str());
528 }
529 if (!optNet)
530 {
531 // Optimize failed
532 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to optimize the network!");
533 }
534
535 // If set, we will serialize the optimized model into a dot file.
536 const std::string serializeToDotFile = delegate->m_Options.GetSerializeToDot();
537 if (!serializeToDotFile.empty())
538 {
539 ARMNN_LOG(info) << "Writing graph to dot file: " << serializeToDotFile;
540 fs::path filename = serializeToDotFile;
541 std::fstream file(filename.c_str(), std::ios_base::out);
542 optNet->SerializeToDot(file);
543 }
544
545 try
546 {
547 const auto loadStartTime = armnn::GetTimeNow();
548
549 // Load graph into runtime
550 std::string errorMessage;
551 armnn::Status loadingStatus;
552 armnn::MemorySource inputSource = armnn::MemorySource::Undefined;
553 armnn::MemorySource outputSource = armnn::MemorySource::Undefined;
554 // There's a bit of an assumption here that the delegate will only support Malloc memory source.
John Mcloughlinc5ee0d72023-03-24 12:07:25 +0000555 if (delegate->m_Options.GetOptimizerOptions().GetImportEnabled())
Ryan OSheaac9607f2023-04-03 11:33:33 +0100556 {
557 inputSource = armnn::MemorySource::Malloc;
558 }
John Mcloughlinc5ee0d72023-03-24 12:07:25 +0000559 if (delegate->m_Options.GetOptimizerOptions().GetExportEnabled())
Ryan OSheaac9607f2023-04-03 11:33:33 +0100560 {
561 outputSource = armnn::MemorySource::Malloc;
562 }
563 armnn::INetworkProperties networkProperties(false,
564 inputSource,
565 outputSource,
566 delegate->m_Options.GetInternalProfilingState(),
567 delegate->m_Options.GetInternalProfilingDetail());
568 loadingStatus = delegate->m_Runtime->LoadNetwork(networkId,
569 std::move(optNet),
570 errorMessage,
571 networkProperties);
572 if (loadingStatus != armnn::Status::Success)
573 {
574 // Network load failed.
575 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Network could not be loaded: " + errorMessage);
576 }
577
578 ARMNN_LOG(info) << "Load ArmnnSubgraph time: " << std::setprecision(2)
579 << std::fixed << armnn::GetTimeDuration(loadStartTime).count() << " ms";
580 }
581 catch (std::exception& ex)
582 {
583 std::stringstream exMessage;
584 exMessage << "TfLiteArmnnOpaqueDelegate: Exception (" << ex.what() << ") caught from LoadNetwork.";
585 throw armnn::Exception(exMessage.str());
586 }
587
588 // Register debug callback function
589 if (delegate->m_Options.GetDebugCallbackFunction().has_value())
590 {
591 delegate->m_Runtime->RegisterDebugCallback(networkId, delegate->m_Options.GetDebugCallbackFunction().value());
592 }
593
594 ARMNN_LOG(info) << "Overall ArmnnSubgraph creation time: " << std::setprecision(2)
595 << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms\n";
596
597 // Create a new SubGraph with networkId and runtime
598 return new ArmnnSubgraph(networkId, delegate->m_Runtime, inputBindings, outputBindings);
599}
600
601TfLiteStatus ArmnnSubgraph::Prepare(TfLiteOpaqueContext* tfLiteContext)
602{
603 armnn::IgnoreUnused(tfLiteContext);
604 return kTfLiteOk;
605}
606
607TfLiteStatus ArmnnSubgraph::Invoke(TfLiteOpaqueContext* tfLiteContext, TfLiteOpaqueNode* tfLiteNode)
608{
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100609 // Get array of input indices, inputIndexArray is set from the TfLiteOpaqueNodeInputs function
610 // This function turns inputIndexArray into an int array of indices. These indices point to the tensors for
611 // each input slot in the node.
612 const int* inputIndexArray;
Ryan OSheaac9607f2023-04-03 11:33:33 +0100613 int numInputs;
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100614 if(TfLiteOpaqueNodeInputs(tfLiteNode, &inputIndexArray, &numInputs) != kTfLiteOk)
Ryan OSheaac9607f2023-04-03 11:33:33 +0100615 {
616 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to load subgraph inputs!");
617 }
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100618 // Prepare inputs
619 armnn::InputTensors inputTensors;
620 size_t inputIndex = 0;
Ryan OSheaac9607f2023-04-03 11:33:33 +0100621 for (int inputIdx = 0; inputIdx < numInputs; inputIdx++)
622 {
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100623 TfLiteOpaqueTensor* tensor = TfLiteOpaqueContextGetOpaqueTensor(tfLiteContext, inputIndexArray[inputIdx]);
Ryan OSheaac9607f2023-04-03 11:33:33 +0100624
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100625 if(!IsValid(tensor))
Ryan OSheaac9607f2023-04-03 11:33:33 +0100626 {
627 return kTfLiteError;
628 }
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100629 // If tensor is not read only
Ryan OSheaac9607f2023-04-03 11:33:33 +0100630 if (TfLiteOpaqueTensorGetAllocationType(tensor) != kTfLiteMmapRo)
631 {
632 const armnn::BindingPointInfo& inputBinding = m_InputBindings[inputIndex];
633 armnn::TensorInfo inputTensorInfo = inputBinding.second;
634 inputTensorInfo.SetConstant(true);
635 const armnn::ConstTensor inputTensor(inputTensorInfo, TfLiteOpaqueTensorData(tensor));
Narumol Prangnawarat46e574e2023-05-05 16:39:05 +0100636 inputTensors.emplace_back(inputIndexArray[inputIdx], inputTensor);
Ryan OSheaac9607f2023-04-03 11:33:33 +0100637
638 ++inputIndex;
639 }
640 }
641
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100642 // Get array of output indices, outputIndexArray is set from the TfLiteOpaqueNodeOutputs function
643 // This function turns outputIndexArray into an int array of indices. These indices point to the tensors for
644 // each output slot in the node.
645 const int* outputIndexArray;
Ryan OSheaac9607f2023-04-03 11:33:33 +0100646 int numOutputs;
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100647 if(TfLiteOpaqueNodeOutputs(tfLiteNode, &outputIndexArray, &numOutputs) != kTfLiteOk)
Ryan OSheaac9607f2023-04-03 11:33:33 +0100648 {
649 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to load subgraph outputs!");
650 }
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100651 // Assign the tensors from the outputIndexArray to the armnn BindingPointInfo
652 armnn::OutputTensors outputTensors;
Ryan OSheaac9607f2023-04-03 11:33:33 +0100653 for (int outputIdx = 0; outputIdx < numOutputs; outputIdx++)
654 {
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100655 const armnn::BindingPointInfo& outputBinding = m_OutputBindings[outputIdx];
656 TfLiteOpaqueTensor* tensor = TfLiteOpaqueContextGetOpaqueTensor(tfLiteContext, outputIndexArray[outputIdx]);
657 if(!IsValid(tensor))
Ryan OSheaac9607f2023-04-03 11:33:33 +0100658 {
659 return kTfLiteError;
660 }
661
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100662 const armnn::Tensor outputTensor(outputBinding.second, reinterpret_cast<TfLiteTensor*>(tensor)->data
663 .data);
664 outputTensors.emplace_back(outputIndexArray[outputIdx], outputTensor);
Ryan OSheaac9607f2023-04-03 11:33:33 +0100665 }
666
667 // Run graph
David Monahan727d0172023-10-04 10:16:24 +0100668 try
Ryan OSheaac9607f2023-04-03 11:33:33 +0100669 {
David Monahan727d0172023-10-04 10:16:24 +0100670 auto status = m_Runtime->EnqueueWorkload(m_NetworkId, inputTensors, outputTensors);
671 // The delegate holds its own Arm NN runtime so this is our last chance to print internal profiling data.
672 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkId);
673 if (profiler && profiler->IsProfilingEnabled())
674 {
675 profiler->Print(std::cout);
676 }
677 return (status == armnn::Status::Success) ? kTfLiteOk : kTfLiteError;
Ryan OSheaac9607f2023-04-03 11:33:33 +0100678 }
David Monahan727d0172023-10-04 10:16:24 +0100679 catch (armnn::InvalidArgumentException& ex)
680 {
David Monahan8df6bf32023-11-10 09:55:33 +0000681 std::stringstream exMessage;
682 exMessage << "ArmNN Failed to EnqueueWorkload with error: " << ex.what();
683 ARMNN_LOG(error) << exMessage.str();
684 TFLITE_LOG_PROD_ONCE(tflite::TFLITE_LOG_INFO, exMessage.str().c_str());
David Monahan727d0172023-10-04 10:16:24 +0100685 // This should really be kTfLiteDelegateError but the Delegate Test Suite expects kTfLiteError so we return
686 // that instead
687 return kTfLiteError;
688 }
689
Ryan OSheaac9607f2023-04-03 11:33:33 +0100690}
691
692TfLiteStatus ArmnnSubgraph::VisitNode(DelegateData& delegateData,
693 TfLiteOpaqueContext* tfLiteContext,
694 TfLiteRegistrationExternal* tfLiteRegistration,
695 TfLiteOpaqueNode* tfLiteNode,
696 int nodeIndex)
697{
698 switch (TfLiteRegistrationExternalGetBuiltInCode(tfLiteRegistration))
699 {
Teresa Charlinf69ae562023-04-27 14:42:23 +0100700 case kTfLiteBuiltinAbs:
701 return VisitElementwiseUnaryOperator(delegateData,
702 tfLiteContext,
703 tfLiteNode,
704 nodeIndex,
705 kTfLiteBuiltinAbs,
706 armnn::UnaryOperation::Abs);
David Monahan6c53f9f2023-04-27 15:21:19 +0100707 case kTfLiteBuiltinAdd:
708 return VisitElementwiseBinaryOperator(delegateData,
709 tfLiteContext,
710 tfLiteNode,
711 nodeIndex,
712 kTfLiteBuiltinAdd);
John Mcloughlin559d9092023-04-26 20:14:47 +0100713 case kTfLiteBuiltinArgMax:
714 return VisitArgMinMaxOperator(delegateData,
715 tfLiteContext,
716 tfLiteNode,
717 nodeIndex,
718 kTfLiteBuiltinArgMax);
719 case kTfLiteBuiltinArgMin:
720 return VisitArgMinMaxOperator(delegateData,
721 tfLiteContext,
722 tfLiteNode,
723 nodeIndex,
724 kTfLiteBuiltinArgMin);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100725 case kTfLiteBuiltinAveragePool2d:
726 return VisitPooling2dOperator(delegateData,
727 tfLiteContext,
728 tfLiteNode,
729 nodeIndex,
730 kTfLiteBuiltinAveragePool2d);
John Mcloughlin0422cf22023-04-27 16:55:00 +0100731 case kTfLiteBuiltinBatchMatmul:
732 return VisitBatchMatMulOperator(delegateData,
733 tfLiteContext,
734 tfLiteNode,
735 nodeIndex,
736 kTfLiteBuiltinBatchMatmul);
Idriss Chaouchcbf79292023-09-08 11:18:16 +0100737 case kTfLiteBuiltinBroadcastTo:
738 return VisitBroadcastToOperator(delegateData,
739 tfLiteContext,
740 tfLiteNode,
741 nodeIndex,
742 kTfLiteBuiltinBroadcastTo);
Kevin May81b66f32023-04-26 14:55:36 +0100743 case kTfLiteBuiltinBatchToSpaceNd:
744 return VisitBatchToSpaceNdOperator(delegateData,
745 tfLiteContext,
746 tfLiteNode,
747 nodeIndex,
748 kTfLiteBuiltinBatchToSpaceNd);
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100749 case kTfLiteBuiltinCast:
750 return VisitCastOperator(delegateData,
751 tfLiteContext,
752 tfLiteNode,
753 nodeIndex,
754 kTfLiteBuiltinCast);
Teresa Charlinf69ae562023-04-27 14:42:23 +0100755 case kTfLiteBuiltinCeil:
756 return VisitElementwiseUnaryOperator(delegateData,
757 tfLiteContext,
758 tfLiteNode,
759 nodeIndex,
760 kTfLiteBuiltinCeil,
761 armnn::UnaryOperation::Ceil);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100762 case kTfLiteBuiltinConcatenation:
763 return VisitControlOperator(delegateData,
764 tfLiteContext,
765 tfLiteNode,
766 nodeIndex,
767 kTfLiteBuiltinConcatenation);
Matthew Sloyan080ffd82023-04-24 12:53:04 +0100768 case kTfLiteBuiltinConv2d:
769 return VisitConvolutionOperator(delegateData,
770 tfLiteContext,
771 tfLiteNode,
772 nodeIndex,
773 kTfLiteBuiltinConv2d);
Francis Murtagh3a9e7ba2023-04-26 15:58:39 +0100774 case kTfLiteBuiltinConv3d:
775 return VisitConvolutionOperator(delegateData,
776 tfLiteContext,
777 tfLiteNode,
778 nodeIndex,
779 kTfLiteBuiltinConv3d);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100780 case kTfLiteBuiltinCustom:
781 {
782 // Custom operators are defined by the name rather than the builtin code.
783 // Parse the custom_name param in the registration to point to the correct visitor function.
784 std::string customOperatorName = TfLiteRegistrationExternalGetCustomName(tfLiteRegistration);
785 if ( customOperatorName == "AveragePool3D" )
786 {
787 return VisitPooling3dOperator(delegateData,
788 tfLiteContext,
789 tfLiteNode,
790 nodeIndex,
791 customOperatorName);
792 }
793 else if (customOperatorName == "MaxPool3D")
794 {
795 return VisitPooling3dOperator(delegateData,
796 tfLiteContext,
797 tfLiteNode,
798 nodeIndex,
799 customOperatorName);
800 }
801 // Invalid or unsupported custom operator
802 return kTfLiteError;
803 }
Matthew Sloyan080ffd82023-04-24 12:53:04 +0100804 case kTfLiteBuiltinDepthwiseConv2d:
805 return VisitConvolutionOperator(delegateData,
806 tfLiteContext,
807 tfLiteNode,
808 nodeIndex,
809 kTfLiteBuiltinDepthwiseConv2d);
Francis Murtagh36d94ef2023-04-28 14:05:43 +0100810 case kTfLiteBuiltinDequantize:
811 return VisitDequantizeOperator(delegateData,
812 tfLiteContext,
813 tfLiteNode,
814 nodeIndex,
815 kTfLiteBuiltinDequantize);
David Monahan6c53f9f2023-04-27 15:21:19 +0100816 case kTfLiteBuiltinDiv:
817 return VisitElementwiseBinaryOperator(delegateData,
818 tfLiteContext,
819 tfLiteNode,
820 nodeIndex,
821 kTfLiteBuiltinDiv);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100822 case kTfLiteBuiltinEqual:
823 return VisitComparisonOperator(delegateData,
824 tfLiteContext,
825 tfLiteNode,
826 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100827 kTfLiteBuiltinEqual,
828 armnn::ComparisonOperation::Equal);
Teresa Charlin42362962023-04-28 14:23:33 +0100829 case kTfLiteBuiltinDepthToSpace:
830 return VisitDepthToSpaceOperator(delegateData,
831 tfLiteContext,
832 tfLiteNode,
833 nodeIndex,
834 kTfLiteBuiltinDepthToSpace);
835 case kTfLiteBuiltinElu:
836 return VisitActivationOperator(delegateData,
837 tfLiteContext,
838 tfLiteNode,
839 nodeIndex,
840 kTfLiteBuiltinElu);
Teresa Charlinf69ae562023-04-27 14:42:23 +0100841 case kTfLiteBuiltinExp:
842 return VisitElementwiseUnaryOperator(delegateData,
843 tfLiteContext,
844 tfLiteNode,
845 nodeIndex,
846 kTfLiteBuiltinExp,
847 armnn::UnaryOperation::Exp);
Matthew Sloyan3504e422023-05-03 13:53:02 +0100848 case kTfLiteBuiltinExpandDims:
849 return VisitExpandDimsOperator(delegateData,
850 tfLiteContext,
851 tfLiteNode,
852 nodeIndex,
853 kTfLiteBuiltinExpandDims);
Ryan OShea59f8f652023-05-11 20:37:53 +0100854 case kTfLiteBuiltinFill:
855 return VisitFillOperator(delegateData,
856 tfLiteContext,
857 tfLiteNode,
858 nodeIndex,
859 kTfLiteBuiltinFill);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100860 case kTfLiteBuiltinFloor:
861 return VisitFloorOperator(delegateData,
862 tfLiteContext,
863 tfLiteNode,
864 nodeIndex,
865 kTfLiteBuiltinFloor);
David Monahan6c53f9f2023-04-27 15:21:19 +0100866 case kTfLiteBuiltinFloorDiv:
867 return VisitElementwiseBinaryOperator(delegateData,
868 tfLiteContext,
869 tfLiteNode,
870 nodeIndex,
871 kTfLiteBuiltinFloorDiv);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +0100872 case kTfLiteBuiltinFullyConnected:
873 return VisitFullyConnectedOperator(delegateData,
874 tfLiteContext,
875 tfLiteNode,
876 nodeIndex,
877 kTfLiteBuiltinFullyConnected);
Kevin Mayb2831c52023-04-26 17:27:24 +0100878 case kTfLiteBuiltinGather:
879 return VisitGatherOperator(delegateData,
880 tfLiteContext,
881 tfLiteNode,
882 nodeIndex,
883 kTfLiteBuiltinGather);
884 case kTfLiteBuiltinGatherNd:
885 return VisitGatherNdOperator(delegateData,
886 tfLiteContext,
887 tfLiteNode,
888 nodeIndex,
889 kTfLiteBuiltinGatherNd);
Teresa Charlin077cddb2023-09-15 15:19:21 +0100890 case kTfLiteBuiltinGelu:
891 return VisitActivationOperator(delegateData,
892 tfLiteContext,
893 tfLiteNode,
894 nodeIndex,
895 kTfLiteBuiltinGelu);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100896 case kTfLiteBuiltinGreater:
897 return VisitComparisonOperator(delegateData,
898 tfLiteContext,
899 tfLiteNode,
900 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100901 kTfLiteBuiltinGreater,
902 armnn::ComparisonOperation::Greater);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100903 case kTfLiteBuiltinGreaterEqual:
904 return VisitComparisonOperator(delegateData,
905 tfLiteContext,
906 tfLiteNode,
907 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100908 kTfLiteBuiltinGreaterEqual,
909 armnn::ComparisonOperation::GreaterOrEqual);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +0100910 case kTfLiteBuiltinHardSwish:
911 return VisitActivationOperator(delegateData,
912 tfLiteContext,
913 tfLiteNode,
914 nodeIndex,
915 kTfLiteBuiltinHardSwish);
Teresa Charlinf69ae562023-04-27 14:42:23 +0100916 case kTfLiteBuiltinL2Normalization:
917 return VisitL2NormalizationOperator(delegateData,
918 tfLiteContext,
919 tfLiteNode,
920 nodeIndex,
921 kTfLiteBuiltinL2Normalization);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100922 case kTfLiteBuiltinL2Pool2d:
923 return VisitPooling2dOperator(delegateData,
924 tfLiteContext,
925 tfLiteNode,
926 nodeIndex,
927 kTfLiteBuiltinL2Pool2d);
Tianle Chengae931732023-07-28 11:53:04 +0100928 case kTfLiteBuiltinLeakyRelu:
929 return VisitActivationOperator(delegateData,
930 tfLiteContext,
931 tfLiteNode,
932 nodeIndex,
933 kTfLiteBuiltinLeakyRelu);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100934 case kTfLiteBuiltinLess:
935 return VisitComparisonOperator(delegateData,
936 tfLiteContext,
937 tfLiteNode,
938 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100939 kTfLiteBuiltinLess,
940 armnn::ComparisonOperation::Less);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100941 case kTfLiteBuiltinLessEqual:
942 return VisitComparisonOperator(delegateData,
943 tfLiteContext,
944 tfLiteNode,
945 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100946 kTfLiteBuiltinLessEqual,
947 armnn::ComparisonOperation::LessOrEqual);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +0100948 case kTfLiteBuiltinLogistic:
949 return VisitActivationOperator(delegateData,
950 tfLiteContext,
951 tfLiteNode,
952 nodeIndex,
953 kTfLiteBuiltinLogistic);
Teresa Charlinf69ae562023-04-27 14:42:23 +0100954 case kTfLiteBuiltinLocalResponseNormalization:
955 return VisitLocalResponseNormalizationOperator(delegateData,
956 tfLiteContext,
957 tfLiteNode,
958 nodeIndex,
959 kTfLiteBuiltinLocalResponseNormalization);
960 case kTfLiteBuiltinLog:
961 return VisitElementwiseUnaryOperator(delegateData,
962 tfLiteContext,
963 tfLiteNode,
964 nodeIndex,
965 kTfLiteBuiltinLog,
966 armnn::UnaryOperation::Log);
967 case kTfLiteBuiltinLogicalAnd:
968 return VisitLogicalBinaryOperator(delegateData,
969 tfLiteContext,
970 tfLiteNode,
971 nodeIndex,
972 kTfLiteBuiltinLogicalAnd,
973 armnn::LogicalBinaryOperation::LogicalAnd);
974 case kTfLiteBuiltinLogicalNot:
975 return VisitElementwiseUnaryOperator(delegateData,
976 tfLiteContext,
977 tfLiteNode,
978 nodeIndex,
979 kTfLiteBuiltinLogicalNot,
980 armnn::UnaryOperation::LogicalNot);
981 case kTfLiteBuiltinLogicalOr:
982 return VisitLogicalBinaryOperator(delegateData,
983 tfLiteContext,
984 tfLiteNode,
985 nodeIndex,
986 kTfLiteBuiltinLogicalOr,
987 armnn::LogicalBinaryOperation::LogicalOr);
Teresa Charlin42362962023-04-28 14:23:33 +0100988 case kTfLiteBuiltinLogSoftmax:
989 return VisitSoftmaxOperator(delegateData,
990 tfLiteContext,
991 tfLiteNode,
992 nodeIndex,
993 kTfLiteBuiltinLogSoftmax);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100994 case kTfLiteBuiltinLstm:
995 return VisitLstmOperator(delegateData,
996 tfLiteContext,
997 tfLiteNode,
998 nodeIndex,
999 kTfLiteBuiltinLstm);
1000 case kTfLiteBuiltinMaxPool2d:
1001 return VisitPooling2dOperator(delegateData,
1002 tfLiteContext,
1003 tfLiteNode,
1004 nodeIndex,
1005 kTfLiteBuiltinMaxPool2d);
David Monahan6c53f9f2023-04-27 15:21:19 +01001006 case kTfLiteBuiltinMaximum:
1007 return VisitElementwiseBinaryOperator(delegateData,
1008 tfLiteContext,
1009 tfLiteNode,
1010 nodeIndex,
1011 kTfLiteBuiltinMaximum);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +01001012 case kTfLiteBuiltinMean:
1013 return VisitControlOperator(delegateData,
1014 tfLiteContext,
1015 tfLiteNode,
1016 nodeIndex,
1017 kTfLiteBuiltinMean);
David Monahan6c53f9f2023-04-27 15:21:19 +01001018 case kTfLiteBuiltinMinimum:
1019 return VisitElementwiseBinaryOperator(delegateData,
1020 tfLiteContext,
1021 tfLiteNode,
1022 nodeIndex,
1023 kTfLiteBuiltinMinimum);
Ryan OShea59f8f652023-05-11 20:37:53 +01001024 case kTfLiteBuiltinMirrorPad:
1025 return VisitPadOperator(delegateData,
1026 tfLiteContext,
1027 tfLiteNode,
1028 nodeIndex,
1029 kTfLiteBuiltinMirrorPad);
David Monahan6c53f9f2023-04-27 15:21:19 +01001030 case kTfLiteBuiltinMul:
1031 return VisitElementwiseBinaryOperator(delegateData,
1032 tfLiteContext,
1033 tfLiteNode,
1034 nodeIndex,
1035 kTfLiteBuiltinMul);
Teresa Charlinf69ae562023-04-27 14:42:23 +01001036 case kTfLiteBuiltinNeg:
1037 return VisitElementwiseUnaryOperator(delegateData,
1038 tfLiteContext,
1039 tfLiteNode,
1040 nodeIndex,
1041 kTfLiteBuiltinNeg,
1042 armnn::UnaryOperation::Neg);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +01001043 case kTfLiteBuiltinNotEqual:
1044 return VisitComparisonOperator(delegateData,
1045 tfLiteContext,
1046 tfLiteNode,
1047 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +01001048 kTfLiteBuiltinNotEqual,
1049 armnn::ComparisonOperation::NotEqual);
Teresa Charlinecebb0f2023-04-27 21:37:56 +01001050 case kTfLiteBuiltinPack:
1051 return VisitPackOperator(delegateData,
1052 tfLiteContext,
1053 tfLiteNode,
1054 nodeIndex,
1055 kTfLiteBuiltinPack);
1056 case kTfLiteBuiltinPad:
1057 return VisitPadOperator(delegateData,
1058 tfLiteContext,
1059 tfLiteNode,
1060 nodeIndex,
1061 kTfLiteBuiltinPad);
1062 case kTfLiteBuiltinPadv2:
1063 return VisitPadOperator(delegateData,
1064 tfLiteContext,
1065 tfLiteNode,
1066 nodeIndex,
1067 kTfLiteBuiltinPadv2);
John Mcloughlin0ec00872023-05-15 17:03:49 +01001068 case kTfLiteBuiltinPow:
1069 return VisitElementwiseBinaryOperator(delegateData,
1070 tfLiteContext,
1071 tfLiteNode,
1072 nodeIndex,
1073 kTfLiteBuiltinPow);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +01001074 case kTfLiteBuiltinPrelu:
1075 return VisitPreluOperator(delegateData,
1076 tfLiteContext,
1077 tfLiteNode,
1078 nodeIndex,
1079 kTfLiteBuiltinPrelu);
Francis Murtagh36d94ef2023-04-28 14:05:43 +01001080 case kTfLiteBuiltinQuantize:
1081 return VisitQuantizeOperator(delegateData,
1082 tfLiteContext,
1083 tfLiteNode,
1084 nodeIndex,
1085 kTfLiteBuiltinQuantize);
John Mcloughlin083586d2023-04-28 18:36:52 +01001086 case kTfLiteBuiltinReduceMax:
1087 return VisitReduceOperator(delegateData,
1088 tfLiteContext,
1089 tfLiteNode,
1090 nodeIndex,
1091 kTfLiteBuiltinReduceMax);
1092 case kTfLiteBuiltinReduceMin:
1093 return VisitReduceOperator(delegateData,
1094 tfLiteContext,
1095 tfLiteNode,
1096 nodeIndex,
1097 kTfLiteBuiltinReduceMin);
1098 case kTfLiteBuiltinReduceProd:
1099 return VisitReduceOperator(delegateData,
1100 tfLiteContext,
1101 tfLiteNode,
1102 nodeIndex,
1103 kTfLiteBuiltinReduceProd);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +01001104 case kTfLiteBuiltinRelu:
1105 return VisitActivationOperator(delegateData,
1106 tfLiteContext,
1107 tfLiteNode,
1108 nodeIndex,
1109 kTfLiteBuiltinRelu);
1110 case kTfLiteBuiltinReluN1To1:
1111 return VisitActivationOperator(delegateData,
1112 tfLiteContext,
1113 tfLiteNode,
1114 nodeIndex,
1115 kTfLiteBuiltinReluN1To1);
1116 case kTfLiteBuiltinRelu6:
1117 return VisitActivationOperator(delegateData,
1118 tfLiteContext,
1119 tfLiteNode,
1120 nodeIndex,
1121 kTfLiteBuiltinRelu6);
Matthew Sloyanc49aacc2023-04-28 17:27:26 +01001122 case kTfLiteBuiltinReshape:
1123 return VisitReshapeOperator(delegateData,
1124 tfLiteContext,
1125 tfLiteNode,
1126 nodeIndex,
1127 kTfLiteBuiltinReshape);
John Mcloughlin083586d2023-04-28 18:36:52 +01001128 case kTfLiteBuiltinResizeNearestNeighbor:
1129 return VisitResizeOperator(delegateData,
1130 tfLiteContext,
1131 tfLiteNode,
1132 nodeIndex,
1133 kTfLiteBuiltinResizeNearestNeighbor);
1134 case kTfLiteBuiltinResizeBilinear:
1135 return VisitResizeOperator(delegateData,
1136 tfLiteContext,
1137 tfLiteNode,
1138 nodeIndex,
1139 kTfLiteBuiltinResizeBilinear);
Tracy Narine7306bbe2023-07-17 16:06:26 +01001140 case kTfLiteBuiltinReverseV2:
1141 return VisitReverseV2Operator(delegateData,
1142 tfLiteContext,
1143 tfLiteNode,
1144 nodeIndex,
1145 kTfLiteBuiltinReverseV2);
Teresa Charlinf69ae562023-04-27 14:42:23 +01001146 case kTfLiteBuiltinRsqrt:
1147 return VisitElementwiseUnaryOperator(delegateData,
1148 tfLiteContext,
1149 tfLiteNode,
1150 nodeIndex,
1151 kTfLiteBuiltinRsqrt,
1152 armnn::UnaryOperation::Rsqrt);
John Mcloughlin0422cf22023-04-27 16:55:00 +01001153 case kTfLiteBuiltinShape:
1154 return VisitShapeOperator(delegateData,
1155 tfLiteContext,
1156 tfLiteNode,
1157 nodeIndex,
1158 kTfLiteBuiltinShape);
Teresa Charlinf69ae562023-04-27 14:42:23 +01001159 case kTfLiteBuiltinSin:
1160 return VisitElementwiseUnaryOperator(delegateData,
1161 tfLiteContext,
1162 tfLiteNode,
1163 nodeIndex,
1164 kTfLiteBuiltinSin,
1165 armnn::UnaryOperation::Sin);
Teresa Charlin86b03572023-04-28 13:19:12 +01001166 case kTfLiteBuiltinSlice:
1167 return VisitSliceOperator(delegateData,
1168 tfLiteContext,
1169 tfLiteNode,
1170 nodeIndex,
1171 kTfLiteBuiltinSlice);
Teresa Charlin42362962023-04-28 14:23:33 +01001172 case kTfLiteBuiltinSoftmax:
1173 return VisitSoftmaxOperator(delegateData,
1174 tfLiteContext,
1175 tfLiteNode,
1176 nodeIndex,
1177 kTfLiteBuiltinSoftmax);
Kevin May81b66f32023-04-26 14:55:36 +01001178 case kTfLiteBuiltinSpaceToBatchNd:
1179 return VisitSpaceToBatchNdOperator(delegateData,
1180 tfLiteContext,
1181 tfLiteNode,
1182 nodeIndex,
1183 kTfLiteBuiltinSpaceToBatchNd);
Teresa Charlin42362962023-04-28 14:23:33 +01001184 case kTfLiteBuiltinSpaceToDepth:
1185 return VisitSpaceToDepthOperator(delegateData,
1186 tfLiteContext,
1187 tfLiteNode,
1188 nodeIndex,
1189 kTfLiteBuiltinSpaceToDepth);
David Monahanc833cef2023-05-03 15:53:03 +01001190 case kTfLiteBuiltinSplit:
1191 return VisitSplitOperator(delegateData,
1192 tfLiteContext,
1193 tfLiteNode,
1194 nodeIndex,
1195 kTfLiteBuiltinSplit);
1196 case kTfLiteBuiltinSplitV:
1197 return VisitSplitVOperator(delegateData,
1198 tfLiteContext,
1199 tfLiteNode,
1200 nodeIndex,
1201 kTfLiteBuiltinSplitV);
John Mcloughlin0ec00872023-05-15 17:03:49 +01001202 case kTfLiteBuiltinSquaredDifference:
1203 return VisitElementwiseBinaryOperator(delegateData,
1204 tfLiteContext,
1205 tfLiteNode,
1206 nodeIndex,
1207 kTfLiteBuiltinSquaredDifference);
David Monahan6c53f9f2023-04-27 15:21:19 +01001208 case kTfLiteBuiltinSub:
1209 return VisitElementwiseBinaryOperator(delegateData,
1210 tfLiteContext,
1211 tfLiteNode,
1212 nodeIndex,
1213 kTfLiteBuiltinSub);
Teresa Charlinf69ae562023-04-27 14:42:23 +01001214 case kTfLiteBuiltinSqrt:
1215 return VisitElementwiseUnaryOperator(delegateData,
1216 tfLiteContext,
1217 tfLiteNode,
1218 nodeIndex,
1219 kTfLiteBuiltinSqrt,
1220 armnn::UnaryOperation::Sqrt);
Matthew Sloyan3504e422023-05-03 13:53:02 +01001221 case kTfLiteBuiltinSqueeze:
1222 return VisitSqueezeOperator(delegateData,
1223 tfLiteContext,
1224 tfLiteNode,
1225 nodeIndex,
1226 kTfLiteBuiltinSqueeze);
Teresa Charlin86b03572023-04-28 13:19:12 +01001227 case kTfLiteBuiltinStridedSlice:
1228 return VisitStridedSliceOperator(delegateData,
1229 tfLiteContext,
1230 tfLiteNode,
1231 nodeIndex,
1232 kTfLiteBuiltinStridedSlice);
John Mcloughlin083586d2023-04-28 18:36:52 +01001233 case kTfLiteBuiltinSum:
1234 return VisitReduceOperator(delegateData,
1235 tfLiteContext,
1236 tfLiteNode,
1237 nodeIndex,
1238 kTfLiteBuiltinSum);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +01001239 case kTfLiteBuiltinTanh:
1240 return VisitActivationOperator(delegateData,
1241 tfLiteContext,
1242 tfLiteNode,
1243 nodeIndex,
1244 kTfLiteBuiltinTanh);
Tianle Cheng92ce35c2023-07-25 16:41:00 +01001245 case kTfLiteBuiltinTile:
1246 return VisitTileOperator(delegateData,
1247 tfLiteContext,
1248 tfLiteNode,
1249 nodeIndex,
1250 kTfLiteBuiltinTile);
Teresa Charlin42362962023-04-28 14:23:33 +01001251 case kTfLiteBuiltinTranspose:
1252 return VisitTransposeOperator(delegateData,
Tianle Cheng92ce35c2023-07-25 16:41:00 +01001253 tfLiteContext,
1254 tfLiteNode,
1255 nodeIndex,
1256 kTfLiteBuiltinTranspose);
Francis Murtagh3a9e7ba2023-04-26 15:58:39 +01001257 case kTfLiteBuiltinTransposeConv:
1258 return VisitConvolutionOperator(delegateData,
1259 tfLiteContext,
1260 tfLiteNode,
1261 nodeIndex,
1262 kTfLiteBuiltinTransposeConv);
Matthew Sloyan74be13e2023-05-03 17:34:00 +01001263 case kTfLiteBuiltinUnidirectionalSequenceLstm:
1264 return VisitUnidirectionalSequenceLstmOperator(delegateData,
1265 tfLiteContext,
1266 tfLiteNode,
1267 nodeIndex,
1268 kTfLiteBuiltinUnidirectionalSequenceLstm);
Teresa Charlinecebb0f2023-04-27 21:37:56 +01001269 case kTfLiteBuiltinUnpack:
1270 return VisitUnpackOperator(delegateData,
1271 tfLiteContext,
1272 tfLiteNode,
1273 nodeIndex,
1274 kTfLiteBuiltinUnpack);
Ryan OSheaac9607f2023-04-03 11:33:33 +01001275 default:
1276 return kTfLiteError;
1277 }
1278}
Francis Murtagh3a9e7ba2023-04-26 15:58:39 +01001279} // armnnOpaqueDelegate namespace