blob: 60da293eb26f0657bb97a03632c801b74fbcab36 [file] [log] [blame]
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +00001//
2// Copyright © 2023 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include <armnn_delegate.hpp>
Ryan OSheaac9607f2023-04-03 11:33:33 +01007#include <OpaqueDelegateUtils.hpp>
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +00008
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +00009#include "Activation.hpp"
10#include "ArgMinMax.hpp"
11#include "BatchMatMul.hpp"
12#include "BatchSpace.hpp"
13#include "Comparison.hpp"
14#include "Convolution.hpp"
15#include "Control.hpp"
16#include "ElementwiseBinary.hpp"
17#include "ElementwiseUnary.hpp"
18#include "Fill.hpp"
19#include "FullyConnected.hpp"
20#include "Gather.hpp"
21#include "GatherNd.hpp"
22#include "LogicalBinary.hpp"
23#include "Lstm.hpp"
24#include "Normalization.hpp"
25#include "Pack.hpp"
26#include "Pad.hpp"
27#include "Pooling.hpp"
28#include "Prelu.hpp"
29#include "Quantization.hpp"
30#include "Redefine.hpp"
31#include "Reduce.hpp"
32#include "Resize.hpp"
Tracy Narine7306bbe2023-07-17 16:06:26 +010033#include "ReverseV2.hpp"
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000034#include "Round.hpp"
35#include "Shape.hpp"
36#include "Slice.hpp"
37#include "StridedSlice.hpp"
38#include "Softmax.hpp"
39#include "SpaceDepth.hpp"
40#include "Split.hpp"
Tianle Cheng92ce35c2023-07-25 16:41:00 +010041#include "Tile.hpp"
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000042#include "Transpose.hpp"
43#include "UnidirectionalSequenceLstm.hpp"
44#include "Unpack.hpp"
45
46#include <armnn/utility/IgnoreUnused.hpp>
47#include <armnnUtils/Filesystem.hpp>
48#include <armnn/utility/Timer.hpp>
49#include <flatbuffers/flatbuffers.h>
50#include <tensorflow/lite/context_util.h>
51#include <tensorflow/lite/schema/schema_generated.h>
52#include <tensorflow/lite/minimal_logging.h>
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000053
54#include <algorithm>
55#include <iostream>
56#include <sstream>
57
58namespace armnnOpaqueDelegate
59{
60
Narumol Prangnawarat26654cb2023-05-03 16:08:11 +010061static auto* g_delegate_plugin_ArmnnDelegatePlugin_ =
Ryan OShea59f8f652023-05-11 20:37:53 +010062 new tflite::delegates::DelegatePluginRegistry::Register("armnn_delegate",
Narumol Prangnawarat26654cb2023-05-03 16:08:11 +010063 ArmnnDelegatePlugin::New);
64
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000065ArmnnOpaqueDelegate::ArmnnOpaqueDelegate(armnnDelegate::DelegateOptions options)
66 : m_Options(std::move(options))
67{
68 // Configures logging for ARMNN
69 if (m_Options.IsLoggingEnabled())
70 {
71 armnn::ConfigureLogging(true, true, m_Options.GetLoggingSeverity());
72 }
73 // Create/Get the static ArmNN Runtime. Note that the m_Runtime will be shared by all armnn_delegate
74 // instances so the RuntimeOptions cannot be altered for different armnn_delegate instances.
75 m_Runtime = GetRuntime(m_Options.GetRuntimeOptions());
76 std::vector<armnn::BackendId> backends;
77 if (m_Runtime)
78 {
79 const armnn::BackendIdSet supportedDevices = m_Runtime->GetDeviceSpec().GetSupportedBackends();
80 for (auto& backend : m_Options.GetBackends())
81 {
82 if (std::find(supportedDevices.cbegin(), supportedDevices.cend(), backend) == supportedDevices.cend())
83 {
84 TFLITE_LOG_PROD(tflite::TFLITE_LOG_INFO,
Teresa Charlinf69ae562023-04-27 14:42:23 +010085 "TfLiteArmnnOpaqueDelegate: Requested unknown backend %s", backend.Get().c_str());
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +000086 }
87 else
88 {
89 backends.push_back(backend);
90 }
91 }
92 }
93
94 if (backends.empty())
95 {
96 // No known backend specified
97 throw armnn::InvalidArgumentException("TfLiteArmnnOpaqueDelegate: No known backend specified.");
98 }
99 m_Options.SetBackends(backends);
100
101 TFLITE_LOG_PROD_ONCE(tflite::TFLITE_LOG_INFO, "TfLiteArmnnOpaqueDelegate: Created TfLite ArmNN delegate.");
102}
103
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100104TfLiteStatus DoPrepare(TfLiteOpaqueContext* tfLiteContext, TfLiteOpaqueDelegate* tfLiteDelegate, void* data)
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100105{
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100106 // We are required to have the void* data parameter in the function signature, but we don't actually use it.
107 armnn::IgnoreUnused(data);
108
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100109 TfLiteIntArray* supportedOperators =
110 static_cast<::armnnOpaqueDelegate::ArmnnOpaqueDelegate*>
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100111 (TfLiteOpaqueDelegateGetData(tfLiteDelegate))->IdentifyOperatorsToDelegate(tfLiteContext);
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100112 if(supportedOperators == nullptr)
113 {
114 return kTfLiteError;
115 }
116
117 // ArmNN Opaque Delegate Registration
118 TfLiteRegistrationExternal* kernelRegistration =
Narumol Prangnawarat26654cb2023-05-03 16:08:11 +0100119 TfLiteRegistrationExternalCreate(kTfLiteBuiltinDelegate,
Ryan OShea59f8f652023-05-11 20:37:53 +0100120 "armnn_delegate",
Narumol Prangnawarat26654cb2023-05-03 16:08:11 +0100121 /*version=*/OPAQUE_DELEGATE_MAJOR_VERSION);
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100122 if(kernelRegistration == nullptr)
123 {
124 return kTfLiteError;
125 }
126
127 TfLiteRegistrationExternalSetInit(
128 kernelRegistration,
129 [](TfLiteOpaqueContext* tfLiteContext, const char* buffer, size_t length) -> void*
130 {
131 armnn::IgnoreUnused(length);
132 const TfLiteOpaqueDelegateParams* parameters =
133 reinterpret_cast<const TfLiteOpaqueDelegateParams*>(buffer);
134 if(parameters == nullptr)
135 {
136 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext,
137 "TfLiteArmnnOpaqueDelegate: Unable to get parameters.");
138 return nullptr;
139 }
140
141 return static_cast<void*>(
142 ArmnnSubgraph::Create(tfLiteContext,
143 parameters,
144 static_cast<::armnnOpaqueDelegate::ArmnnOpaqueDelegate*>(
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100145 parameters->delegate->opaque_delegate_builder->data)));
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100146 }
147 );
148
149 TfLiteRegistrationExternalSetFree(
150 kernelRegistration,
151 [](TfLiteOpaqueContext* tfLiteContext, void* buffer) -> void
152 {
153 armnn::IgnoreUnused(tfLiteContext);
154 if (buffer != nullptr)
155 {
156 delete static_cast<ArmnnSubgraph*>(buffer);
157 }
158 }
159 );
160
161 TfLiteRegistrationExternalSetPrepare(
162 kernelRegistration,
163 [](TfLiteOpaqueContext* tfLiteContext, TfLiteOpaqueNode* tfLiteNode) -> TfLiteStatus
164 {
165 void* userData = TfLiteOpaqueNodeGetUserData(tfLiteNode);
166 if (userData == nullptr)
167 {
168 return kTfLiteError;
169 }
170 return static_cast<ArmnnSubgraph*>(userData)->Prepare(tfLiteContext);
171 }
172 );
173
174 TfLiteRegistrationExternalSetInvoke(
175 kernelRegistration,
176 [](TfLiteOpaqueContext* tfLiteContext, TfLiteOpaqueNode* tfLiteNode) -> TfLiteStatus
177 {
178 void* userData = TfLiteOpaqueNodeGetUserData(tfLiteNode);
179 if (userData == nullptr)
180 {
181 return kTfLiteError;
182 }
183
184 return static_cast<ArmnnSubgraph*>(userData)->Invoke(tfLiteContext, tfLiteNode);
185 }
186 );
187
188 const TfLiteStatus status =
189 TfLiteOpaqueContextReplaceNodeSubsetsWithDelegateKernels(
190 tfLiteContext, kernelRegistration, supportedOperators, tfLiteDelegate);
191
192 TfLiteIntArrayFree(supportedOperators);
193 return status;
194}
195
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +0000196TfLiteOpaqueDelegate* TfLiteArmnnOpaqueDelegateCreate(const void* settings)
197{
198 // This method will always create Opaque Delegate with default settings until
199 // we have a DelegateOptions Constructor which can parse the void* settings
200 armnn::IgnoreUnused(settings);
201 auto options = TfLiteArmnnDelegateOptionsDefault();
202 auto* armnnDelegate = new ::armnnOpaqueDelegate::ArmnnOpaqueDelegate(options);
203 return TfLiteOpaqueDelegateCreate(armnnDelegate->GetDelegateBuilder());
204}
205
206::armnnDelegate::DelegateOptions TfLiteArmnnDelegateOptionsDefault()
207{
208 ::armnnDelegate::DelegateOptions options(armnn::Compute::CpuRef);
209 return options;
210}
211
212void TfLiteArmnnOpaqueDelegateDelete(TfLiteOpaqueDelegate* tfLiteDelegate)
213{
214 if (tfLiteDelegate != nullptr)
215 {
216 delete static_cast<::armnnOpaqueDelegate::ArmnnOpaqueDelegate*>(TfLiteOpaqueDelegateGetData(tfLiteDelegate));
217 TfLiteOpaqueDelegateDelete(tfLiteDelegate);
218 }
219}
220
Francis Murtaghc4fb0dd2023-03-16 17:01:56 +0000221const std::string ArmnnOpaqueDelegate::GetVersion() {
222 return OPAQUE_DELEGATE_VERSION;
223}
224
Matthew Sloyan54cf0112023-04-03 16:32:57 +0100225TfLiteIntArray* ArmnnOpaqueDelegate::IdentifyOperatorsToDelegate(TfLiteOpaqueContext* tfLiteContext)
226{
227 TfLiteIntArray* executionPlan = nullptr;
228 if (TfLiteOpaqueContextGetExecutionPlan(tfLiteContext, &executionPlan) != kTfLiteOk)
229 {
230 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext, "TfLiteArmnnOpaqueDelegate: Unable to get graph execution plan.");
231 return nullptr;
232 }
233
234 // Delegate data with null network
235 DelegateData delegateData(m_Options.GetBackends());
236
237 TfLiteIntArray* nodesToDelegate = TfLiteIntArrayCreate(executionPlan->size);
238 if (nodesToDelegate == nullptr)
239 {
240 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext,
241 "TfLiteArmnnOpaqueDelegate: Unable to create int array from execution plan.");
242 return nullptr;
243 }
244 nodesToDelegate->size = 0;
245
246 std::set<int32_t> unsupportedOperators;
247
248 for (int i = 0; i < executionPlan->size; ++i)
249 {
250 const int nodeIndex = executionPlan->data[i];
251
252 // If TfLiteOpaqueNodes can be delegated to ArmNN
253 TfLiteOpaqueNode* tfLiteNode = nullptr;
254 TfLiteRegistrationExternal* tfLiteRegistration = nullptr;
255
256 if (TfLiteOpaqueContextGetNodeAndRegistration(
257 tfLiteContext, nodeIndex, &tfLiteNode, &tfLiteRegistration) != kTfLiteOk)
258 {
259 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext,
260 "TfLiteArmnnOpaqueDelegate: Unable to get node and registration for node %d.",
261 nodeIndex);
262 continue;
263 }
264
265 TfLiteStatus visitStatus;
266 try
267 {
268 visitStatus = ArmnnSubgraph::VisitNode(
269 delegateData, tfLiteContext, tfLiteRegistration, tfLiteNode, nodeIndex);
270 }
271 catch(std::exception& ex)
272 {
273 ARMNN_LOG(error) << "ArmNN Failed to visit node with error: " << ex.what();
274 visitStatus = kTfLiteError;
275 }
276
277 if (visitStatus != kTfLiteOk)
278 {
279 // node is not supported by ArmNN
280 unsupportedOperators.insert(TfLiteRegistrationExternalGetBuiltInCode(tfLiteRegistration));
281 continue;
282 }
283
284 nodesToDelegate->data[nodesToDelegate->size++] = nodeIndex;
285 }
286
287 for (std::set<int32_t>::iterator it=unsupportedOperators.begin(); it!=unsupportedOperators.end(); ++it)
288 {
289 TF_LITE_OPAQUE_KERNEL_LOG(tfLiteContext,
290 "Operator %s [%d] is not supported by armnn_opaque_delegate.",
291 tflite::EnumNameBuiltinOperator(tflite::BuiltinOperator(*it)),
292 *it);
293 }
294
295 if (!unsupportedOperators.empty() && m_Options.TfLiteRuntimeFallbackDisabled())
296 {
297 std::stringstream exMessage;
298 exMessage << "TfLiteArmnnOpaqueDelegate: There are unsupported operators in the model. ";
299 exMessage << "Not falling back to TfLite Runtime as fallback is disabled. ";
300 exMessage << "This should only be disabled under test conditions.";
301 throw armnn::Exception(exMessage.str());
302 }
303 if (nodesToDelegate->size == 0)
304 {
305 ARMNN_LOG(info) << "No operators in this model are supported by the Arm NN TfLite delegate." <<
306 " The model will be executed entirely by TfLite runtime.";
307 }
308
309 std::sort(&nodesToDelegate->data[0], &nodesToDelegate->data[nodesToDelegate->size]);
310 return nodesToDelegate;
311}
312
Ryan OSheaac9607f2023-04-03 11:33:33 +0100313TfLiteStatus ArmnnSubgraph::AddInputLayer(DelegateData& delegateData,
314 TfLiteOpaqueContext* tfLiteContext,
315 const TfLiteIntArray* inputs,
316 std::vector<armnn::BindingPointInfo>& inputBindings)
317{
318 const size_t numInputs = static_cast<size_t>(inputs->size);
319 for (unsigned int i = 0; i < numInputs; ++i)
320 {
321 const int32_t tensorId = inputs->data[i];
322 const TfLiteOpaqueTensor* tensor = TfLiteOpaqueContextGetOpaqueTensor(tfLiteContext, tensorId);
323
324 if(!tensor)
325 {
326 return kTfLiteError;
327 }
328
329 // Do not create bindings for constant inputs
330 if (TfLiteOpaqueTensorGetAllocationType(tensor) == kTfLiteMmapRo)
331 {
332 continue;
333 }
334
335 auto bindingId = static_cast<armnn::LayerBindingId>((tensorId));
336 armnn::IConnectableLayer* layer = delegateData.m_Network->AddInputLayer(bindingId);
337
338 auto tensorInfo = GetTensorInfoForTfLiteOpaqueTensor(tensor);
339 armnn::IOutputSlot& outputSlot = layer->GetOutputSlot(0);
340 outputSlot.SetTensorInfo(tensorInfo);
341
342 // Store for creating connections
343 delegateData.m_OutputSlotForNode[static_cast<unsigned long>(tensorId)] = &outputSlot;
344
345 inputBindings.push_back(std::make_pair(bindingId, tensorInfo));
346 }
347
348 return kTfLiteOk;
349}
350
351TfLiteStatus ArmnnSubgraph::AddOutputLayer(DelegateData& delegateData,
352 TfLiteOpaqueContext* tfLiteContext,
353 const TfLiteIntArray* outputs,
354 std::vector<armnn::BindingPointInfo>& outputBindings)
355{
356 const size_t numOutputs = static_cast<size_t>(outputs->size);
357 for (unsigned int i = 0; i < numOutputs; ++i)
358 {
359 const int32_t tensorId = outputs->data[i];
360 const TfLiteOpaqueTensor* tensor = TfLiteOpaqueContextGetOpaqueTensor(tfLiteContext, tensorId);
361
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100362 if(!IsValid(tensor))
Ryan OSheaac9607f2023-04-03 11:33:33 +0100363 {
364 return kTfLiteError;
365 }
366
367 auto bindingId = static_cast<armnn::LayerBindingId>((tensorId));
368 armnn::IConnectableLayer* layer = delegateData.m_Network->AddOutputLayer(bindingId);
369
370 auto tensorInfo = GetTensorInfoForTfLiteOpaqueTensor(tensor);
371 ARMNN_ASSERT(delegateData.m_OutputSlotForNode[static_cast<unsigned long>(tensorId)] != nullptr);
372 delegateData.m_OutputSlotForNode[static_cast<unsigned long>(tensorId)]->Connect(layer->GetInputSlot(0));
373 outputBindings.push_back(std::make_pair(bindingId, tensorInfo));
374 }
375
376 return kTfLiteOk;
377}
378
379ArmnnSubgraph* ArmnnSubgraph::Create(TfLiteOpaqueContext* tfLiteContext,
380 const TfLiteOpaqueDelegateParams* parameters,
381 const ArmnnOpaqueDelegate* delegate)
382{
383 const auto startTime = armnn::GetTimeNow();
384 ARMNN_LOG(info) << "ArmnnSubgraph creation";
385
386 TfLiteIntArray* executionPlan;
387 if (TfLiteOpaqueContextGetExecutionPlan(tfLiteContext, &executionPlan) != kTfLiteOk)
388 {
389 return nullptr;
390 }
391
392 // Initialize DelegateData holds network and output slots information
393 DelegateData delegateData(delegate->m_Options.GetBackends());
394
395 // Build ArmNN Network
John Mcloughlinc5ee0d72023-03-24 12:07:25 +0000396 armnn::NetworkOptions networkOptions = delegate->m_Options.GetOptimizerOptions().GetModelOptions();
Ryan OSheaac9607f2023-04-03 11:33:33 +0100397 armnn::NetworkId networkId;
398 delegateData.m_Network = armnn::INetwork::Create(networkOptions);
399
400 delegateData.m_OutputSlotForNode = std::vector<armnn::IOutputSlot*>(
401 TfLiteOpaqueContextGetNumTensors(tfLiteContext), nullptr);
402
403 std::vector<armnn::BindingPointInfo> inputBindings;
404 std::vector<armnn::BindingPointInfo> outputBindings;
405
406 // Add input layer
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100407 if (AddInputLayer(delegateData, tfLiteContext, parameters->input_tensors, inputBindings) != kTfLiteOk)
Ryan OSheaac9607f2023-04-03 11:33:33 +0100408 {
409 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to add Inputs to the network!");
410 }
411
412 // Parse TfLite delegate nodes to ArmNN
413 const auto parseStartTime = armnn::GetTimeNow();
414 for (int i = 0; i < parameters->nodes_to_replace->size; ++i)
415 {
416 const int nodeIndex = parameters->nodes_to_replace->data[i];
417
418 TfLiteOpaqueNode* tfLiteNode = nullptr;
419 TfLiteRegistrationExternal* tfLiteRegistration = nullptr;
420 if (TfLiteOpaqueContextGetNodeAndRegistration(
421 tfLiteContext, nodeIndex, &tfLiteNode, &tfLiteRegistration) != kTfLiteOk)
422 {
423 throw armnn::Exception(&"TfLiteArmnnOpaqueDelegate: Unable to get node registration: " [ nodeIndex]);
424 }
425
426 if (VisitNode(delegateData, tfLiteContext, tfLiteRegistration, tfLiteNode, nodeIndex) != kTfLiteOk)
427 {
428 throw armnn::Exception(&"TfLiteArmnnOpaqueDelegate: Unable to parse node: " [ nodeIndex]);
429 }
430 }
431 ARMNN_LOG(info) << "Parse nodes to ArmNN time: " << std::setprecision(2)
432 << std::fixed << armnn::GetTimeDuration(parseStartTime).count() << " ms";
433
434 // Add Output layer
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100435 if (AddOutputLayer(delegateData, tfLiteContext, parameters->output_tensors, outputBindings) != kTfLiteOk)
Ryan OSheaac9607f2023-04-03 11:33:33 +0100436 {
437 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to add Outputs to the network!");
438 }
439
440 // Optimize ArmNN network
441 armnn::IOptimizedNetworkPtr optNet(nullptr, nullptr);
442 try
443 {
444 const auto optimizeStartTime = armnn::GetTimeNow();
445 optNet = armnn::Optimize(*(delegateData.m_Network.get()),
446 delegate->m_Options.GetBackends(),
447 delegate->m_Runtime->GetDeviceSpec(),
448 delegate->m_Options.GetOptimizerOptions());
449 ARMNN_LOG(info) << "Optimize ArmnnSubgraph time: " << std::setprecision(2)
450 << std::fixed << armnn::GetTimeDuration(optimizeStartTime).count() << " ms";
451 }
452 catch (std::exception& ex)
453 {
454 std::stringstream exMessage;
455 exMessage << "TfLiteArmnnOpaqueDelegate: Exception (" << ex.what() << ") caught from optimize.";
456 throw armnn::Exception(exMessage.str());
457 }
458 if (!optNet)
459 {
460 // Optimize failed
461 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to optimize the network!");
462 }
463
464 // If set, we will serialize the optimized model into a dot file.
465 const std::string serializeToDotFile = delegate->m_Options.GetSerializeToDot();
466 if (!serializeToDotFile.empty())
467 {
468 ARMNN_LOG(info) << "Writing graph to dot file: " << serializeToDotFile;
469 fs::path filename = serializeToDotFile;
470 std::fstream file(filename.c_str(), std::ios_base::out);
471 optNet->SerializeToDot(file);
472 }
473
474 try
475 {
476 const auto loadStartTime = armnn::GetTimeNow();
477
478 // Load graph into runtime
479 std::string errorMessage;
480 armnn::Status loadingStatus;
481 armnn::MemorySource inputSource = armnn::MemorySource::Undefined;
482 armnn::MemorySource outputSource = armnn::MemorySource::Undefined;
483 // There's a bit of an assumption here that the delegate will only support Malloc memory source.
John Mcloughlinc5ee0d72023-03-24 12:07:25 +0000484 if (delegate->m_Options.GetOptimizerOptions().GetImportEnabled())
Ryan OSheaac9607f2023-04-03 11:33:33 +0100485 {
486 inputSource = armnn::MemorySource::Malloc;
487 }
John Mcloughlinc5ee0d72023-03-24 12:07:25 +0000488 if (delegate->m_Options.GetOptimizerOptions().GetExportEnabled())
Ryan OSheaac9607f2023-04-03 11:33:33 +0100489 {
490 outputSource = armnn::MemorySource::Malloc;
491 }
492 armnn::INetworkProperties networkProperties(false,
493 inputSource,
494 outputSource,
495 delegate->m_Options.GetInternalProfilingState(),
496 delegate->m_Options.GetInternalProfilingDetail());
497 loadingStatus = delegate->m_Runtime->LoadNetwork(networkId,
498 std::move(optNet),
499 errorMessage,
500 networkProperties);
501 if (loadingStatus != armnn::Status::Success)
502 {
503 // Network load failed.
504 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Network could not be loaded: " + errorMessage);
505 }
506
507 ARMNN_LOG(info) << "Load ArmnnSubgraph time: " << std::setprecision(2)
508 << std::fixed << armnn::GetTimeDuration(loadStartTime).count() << " ms";
509 }
510 catch (std::exception& ex)
511 {
512 std::stringstream exMessage;
513 exMessage << "TfLiteArmnnOpaqueDelegate: Exception (" << ex.what() << ") caught from LoadNetwork.";
514 throw armnn::Exception(exMessage.str());
515 }
516
517 // Register debug callback function
518 if (delegate->m_Options.GetDebugCallbackFunction().has_value())
519 {
520 delegate->m_Runtime->RegisterDebugCallback(networkId, delegate->m_Options.GetDebugCallbackFunction().value());
521 }
522
523 ARMNN_LOG(info) << "Overall ArmnnSubgraph creation time: " << std::setprecision(2)
524 << std::fixed << armnn::GetTimeDuration(startTime).count() << " ms\n";
525
526 // Create a new SubGraph with networkId and runtime
527 return new ArmnnSubgraph(networkId, delegate->m_Runtime, inputBindings, outputBindings);
528}
529
530TfLiteStatus ArmnnSubgraph::Prepare(TfLiteOpaqueContext* tfLiteContext)
531{
532 armnn::IgnoreUnused(tfLiteContext);
533 return kTfLiteOk;
534}
535
536TfLiteStatus ArmnnSubgraph::Invoke(TfLiteOpaqueContext* tfLiteContext, TfLiteOpaqueNode* tfLiteNode)
537{
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100538 // Get array of input indices, inputIndexArray is set from the TfLiteOpaqueNodeInputs function
539 // This function turns inputIndexArray into an int array of indices. These indices point to the tensors for
540 // each input slot in the node.
541 const int* inputIndexArray;
Ryan OSheaac9607f2023-04-03 11:33:33 +0100542 int numInputs;
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100543 if(TfLiteOpaqueNodeInputs(tfLiteNode, &inputIndexArray, &numInputs) != kTfLiteOk)
Ryan OSheaac9607f2023-04-03 11:33:33 +0100544 {
545 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to load subgraph inputs!");
546 }
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100547 // Prepare inputs
548 armnn::InputTensors inputTensors;
549 size_t inputIndex = 0;
Ryan OSheaac9607f2023-04-03 11:33:33 +0100550 for (int inputIdx = 0; inputIdx < numInputs; inputIdx++)
551 {
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100552 TfLiteOpaqueTensor* tensor = TfLiteOpaqueContextGetOpaqueTensor(tfLiteContext, inputIndexArray[inputIdx]);
Ryan OSheaac9607f2023-04-03 11:33:33 +0100553
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100554 if(!IsValid(tensor))
Ryan OSheaac9607f2023-04-03 11:33:33 +0100555 {
556 return kTfLiteError;
557 }
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100558 // If tensor is not read only
Ryan OSheaac9607f2023-04-03 11:33:33 +0100559 if (TfLiteOpaqueTensorGetAllocationType(tensor) != kTfLiteMmapRo)
560 {
561 const armnn::BindingPointInfo& inputBinding = m_InputBindings[inputIndex];
562 armnn::TensorInfo inputTensorInfo = inputBinding.second;
563 inputTensorInfo.SetConstant(true);
564 const armnn::ConstTensor inputTensor(inputTensorInfo, TfLiteOpaqueTensorData(tensor));
Narumol Prangnawarat46e574e2023-05-05 16:39:05 +0100565 inputTensors.emplace_back(inputIndexArray[inputIdx], inputTensor);
Ryan OSheaac9607f2023-04-03 11:33:33 +0100566
567 ++inputIndex;
568 }
569 }
570
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100571 // Get array of output indices, outputIndexArray is set from the TfLiteOpaqueNodeOutputs function
572 // This function turns outputIndexArray into an int array of indices. These indices point to the tensors for
573 // each output slot in the node.
574 const int* outputIndexArray;
Ryan OSheaac9607f2023-04-03 11:33:33 +0100575 int numOutputs;
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100576 if(TfLiteOpaqueNodeOutputs(tfLiteNode, &outputIndexArray, &numOutputs) != kTfLiteOk)
Ryan OSheaac9607f2023-04-03 11:33:33 +0100577 {
578 throw armnn::Exception("TfLiteArmnnOpaqueDelegate: Unable to load subgraph outputs!");
579 }
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100580 // Assign the tensors from the outputIndexArray to the armnn BindingPointInfo
581 armnn::OutputTensors outputTensors;
Ryan OSheaac9607f2023-04-03 11:33:33 +0100582 for (int outputIdx = 0; outputIdx < numOutputs; outputIdx++)
583 {
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100584 const armnn::BindingPointInfo& outputBinding = m_OutputBindings[outputIdx];
585 TfLiteOpaqueTensor* tensor = TfLiteOpaqueContextGetOpaqueTensor(tfLiteContext, outputIndexArray[outputIdx]);
586 if(!IsValid(tensor))
Ryan OSheaac9607f2023-04-03 11:33:33 +0100587 {
588 return kTfLiteError;
589 }
590
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100591 const armnn::Tensor outputTensor(outputBinding.second, reinterpret_cast<TfLiteTensor*>(tensor)->data
592 .data);
593 outputTensors.emplace_back(outputIndexArray[outputIdx], outputTensor);
Ryan OSheaac9607f2023-04-03 11:33:33 +0100594 }
595
596 // Run graph
597 auto status = m_Runtime->EnqueueWorkload(m_NetworkId, inputTensors, outputTensors);
598 // The delegate holds its own Arm NN runtime so this is our last chance to print internal profiling data.
599 std::shared_ptr<armnn::IProfiler> profiler = m_Runtime->GetProfiler(m_NetworkId);
600 if (profiler && profiler->IsProfilingEnabled())
601 {
602 profiler->Print(std::cout);
603 }
604 return (status == armnn::Status::Success) ? kTfLiteOk : kTfLiteError;
605}
606
607TfLiteStatus ArmnnSubgraph::VisitNode(DelegateData& delegateData,
608 TfLiteOpaqueContext* tfLiteContext,
609 TfLiteRegistrationExternal* tfLiteRegistration,
610 TfLiteOpaqueNode* tfLiteNode,
611 int nodeIndex)
612{
613 switch (TfLiteRegistrationExternalGetBuiltInCode(tfLiteRegistration))
614 {
Teresa Charlinf69ae562023-04-27 14:42:23 +0100615 case kTfLiteBuiltinAbs:
616 return VisitElementwiseUnaryOperator(delegateData,
617 tfLiteContext,
618 tfLiteNode,
619 nodeIndex,
620 kTfLiteBuiltinAbs,
621 armnn::UnaryOperation::Abs);
David Monahan6c53f9f2023-04-27 15:21:19 +0100622 case kTfLiteBuiltinAdd:
623 return VisitElementwiseBinaryOperator(delegateData,
624 tfLiteContext,
625 tfLiteNode,
626 nodeIndex,
627 kTfLiteBuiltinAdd);
John Mcloughlin559d9092023-04-26 20:14:47 +0100628 case kTfLiteBuiltinArgMax:
629 return VisitArgMinMaxOperator(delegateData,
630 tfLiteContext,
631 tfLiteNode,
632 nodeIndex,
633 kTfLiteBuiltinArgMax);
634 case kTfLiteBuiltinArgMin:
635 return VisitArgMinMaxOperator(delegateData,
636 tfLiteContext,
637 tfLiteNode,
638 nodeIndex,
639 kTfLiteBuiltinArgMin);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100640 case kTfLiteBuiltinAveragePool2d:
641 return VisitPooling2dOperator(delegateData,
642 tfLiteContext,
643 tfLiteNode,
644 nodeIndex,
645 kTfLiteBuiltinAveragePool2d);
John Mcloughlin0422cf22023-04-27 16:55:00 +0100646 case kTfLiteBuiltinBatchMatmul:
647 return VisitBatchMatMulOperator(delegateData,
648 tfLiteContext,
649 tfLiteNode,
650 nodeIndex,
651 kTfLiteBuiltinBatchMatmul);
Kevin May81b66f32023-04-26 14:55:36 +0100652 case kTfLiteBuiltinBatchToSpaceNd:
653 return VisitBatchToSpaceNdOperator(delegateData,
654 tfLiteContext,
655 tfLiteNode,
656 nodeIndex,
657 kTfLiteBuiltinBatchToSpaceNd);
Ryan OSheaa37ccb02023-04-11 10:54:07 +0100658 case kTfLiteBuiltinCast:
659 return VisitCastOperator(delegateData,
660 tfLiteContext,
661 tfLiteNode,
662 nodeIndex,
663 kTfLiteBuiltinCast);
Teresa Charlinf69ae562023-04-27 14:42:23 +0100664 case kTfLiteBuiltinCeil:
665 return VisitElementwiseUnaryOperator(delegateData,
666 tfLiteContext,
667 tfLiteNode,
668 nodeIndex,
669 kTfLiteBuiltinCeil,
670 armnn::UnaryOperation::Ceil);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100671 case kTfLiteBuiltinConcatenation:
672 return VisitControlOperator(delegateData,
673 tfLiteContext,
674 tfLiteNode,
675 nodeIndex,
676 kTfLiteBuiltinConcatenation);
Matthew Sloyan080ffd82023-04-24 12:53:04 +0100677 case kTfLiteBuiltinConv2d:
678 return VisitConvolutionOperator(delegateData,
679 tfLiteContext,
680 tfLiteNode,
681 nodeIndex,
682 kTfLiteBuiltinConv2d);
Francis Murtagh3a9e7ba2023-04-26 15:58:39 +0100683 case kTfLiteBuiltinConv3d:
684 return VisitConvolutionOperator(delegateData,
685 tfLiteContext,
686 tfLiteNode,
687 nodeIndex,
688 kTfLiteBuiltinConv3d);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100689 case kTfLiteBuiltinCustom:
690 {
691 // Custom operators are defined by the name rather than the builtin code.
692 // Parse the custom_name param in the registration to point to the correct visitor function.
693 std::string customOperatorName = TfLiteRegistrationExternalGetCustomName(tfLiteRegistration);
694 if ( customOperatorName == "AveragePool3D" )
695 {
696 return VisitPooling3dOperator(delegateData,
697 tfLiteContext,
698 tfLiteNode,
699 nodeIndex,
700 customOperatorName);
701 }
702 else if (customOperatorName == "MaxPool3D")
703 {
704 return VisitPooling3dOperator(delegateData,
705 tfLiteContext,
706 tfLiteNode,
707 nodeIndex,
708 customOperatorName);
709 }
710 // Invalid or unsupported custom operator
711 return kTfLiteError;
712 }
Matthew Sloyan080ffd82023-04-24 12:53:04 +0100713 case kTfLiteBuiltinDepthwiseConv2d:
714 return VisitConvolutionOperator(delegateData,
715 tfLiteContext,
716 tfLiteNode,
717 nodeIndex,
718 kTfLiteBuiltinDepthwiseConv2d);
Francis Murtagh36d94ef2023-04-28 14:05:43 +0100719 case kTfLiteBuiltinDequantize:
720 return VisitDequantizeOperator(delegateData,
721 tfLiteContext,
722 tfLiteNode,
723 nodeIndex,
724 kTfLiteBuiltinDequantize);
David Monahan6c53f9f2023-04-27 15:21:19 +0100725 case kTfLiteBuiltinDiv:
726 return VisitElementwiseBinaryOperator(delegateData,
727 tfLiteContext,
728 tfLiteNode,
729 nodeIndex,
730 kTfLiteBuiltinDiv);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100731 case kTfLiteBuiltinEqual:
732 return VisitComparisonOperator(delegateData,
733 tfLiteContext,
734 tfLiteNode,
735 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100736 kTfLiteBuiltinEqual,
737 armnn::ComparisonOperation::Equal);
Teresa Charlin42362962023-04-28 14:23:33 +0100738 case kTfLiteBuiltinDepthToSpace:
739 return VisitDepthToSpaceOperator(delegateData,
740 tfLiteContext,
741 tfLiteNode,
742 nodeIndex,
743 kTfLiteBuiltinDepthToSpace);
744 case kTfLiteBuiltinElu:
745 return VisitActivationOperator(delegateData,
746 tfLiteContext,
747 tfLiteNode,
748 nodeIndex,
749 kTfLiteBuiltinElu);
Teresa Charlinf69ae562023-04-27 14:42:23 +0100750 case kTfLiteBuiltinExp:
751 return VisitElementwiseUnaryOperator(delegateData,
752 tfLiteContext,
753 tfLiteNode,
754 nodeIndex,
755 kTfLiteBuiltinExp,
756 armnn::UnaryOperation::Exp);
Matthew Sloyan3504e422023-05-03 13:53:02 +0100757 case kTfLiteBuiltinExpandDims:
758 return VisitExpandDimsOperator(delegateData,
759 tfLiteContext,
760 tfLiteNode,
761 nodeIndex,
762 kTfLiteBuiltinExpandDims);
Ryan OShea59f8f652023-05-11 20:37:53 +0100763 case kTfLiteBuiltinFill:
764 return VisitFillOperator(delegateData,
765 tfLiteContext,
766 tfLiteNode,
767 nodeIndex,
768 kTfLiteBuiltinFill);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100769 case kTfLiteBuiltinFloor:
770 return VisitFloorOperator(delegateData,
771 tfLiteContext,
772 tfLiteNode,
773 nodeIndex,
774 kTfLiteBuiltinFloor);
David Monahan6c53f9f2023-04-27 15:21:19 +0100775 case kTfLiteBuiltinFloorDiv:
776 return VisitElementwiseBinaryOperator(delegateData,
777 tfLiteContext,
778 tfLiteNode,
779 nodeIndex,
780 kTfLiteBuiltinFloorDiv);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +0100781 case kTfLiteBuiltinFullyConnected:
782 return VisitFullyConnectedOperator(delegateData,
783 tfLiteContext,
784 tfLiteNode,
785 nodeIndex,
786 kTfLiteBuiltinFullyConnected);
Kevin Mayb2831c52023-04-26 17:27:24 +0100787 case kTfLiteBuiltinGather:
788 return VisitGatherOperator(delegateData,
789 tfLiteContext,
790 tfLiteNode,
791 nodeIndex,
792 kTfLiteBuiltinGather);
793 case kTfLiteBuiltinGatherNd:
794 return VisitGatherNdOperator(delegateData,
795 tfLiteContext,
796 tfLiteNode,
797 nodeIndex,
798 kTfLiteBuiltinGatherNd);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100799 case kTfLiteBuiltinGreater:
800 return VisitComparisonOperator(delegateData,
801 tfLiteContext,
802 tfLiteNode,
803 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100804 kTfLiteBuiltinGreater,
805 armnn::ComparisonOperation::Greater);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100806 case kTfLiteBuiltinGreaterEqual:
807 return VisitComparisonOperator(delegateData,
808 tfLiteContext,
809 tfLiteNode,
810 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100811 kTfLiteBuiltinGreaterEqual,
812 armnn::ComparisonOperation::GreaterOrEqual);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +0100813 case kTfLiteBuiltinHardSwish:
814 return VisitActivationOperator(delegateData,
815 tfLiteContext,
816 tfLiteNode,
817 nodeIndex,
818 kTfLiteBuiltinHardSwish);
Teresa Charlinf69ae562023-04-27 14:42:23 +0100819 case kTfLiteBuiltinL2Normalization:
820 return VisitL2NormalizationOperator(delegateData,
821 tfLiteContext,
822 tfLiteNode,
823 nodeIndex,
824 kTfLiteBuiltinL2Normalization);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100825 case kTfLiteBuiltinL2Pool2d:
826 return VisitPooling2dOperator(delegateData,
827 tfLiteContext,
828 tfLiteNode,
829 nodeIndex,
830 kTfLiteBuiltinL2Pool2d);
Tianle Chengae931732023-07-28 11:53:04 +0100831 case kTfLiteBuiltinLeakyRelu:
832 return VisitActivationOperator(delegateData,
833 tfLiteContext,
834 tfLiteNode,
835 nodeIndex,
836 kTfLiteBuiltinLeakyRelu);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100837 case kTfLiteBuiltinLess:
838 return VisitComparisonOperator(delegateData,
839 tfLiteContext,
840 tfLiteNode,
841 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100842 kTfLiteBuiltinLess,
843 armnn::ComparisonOperation::Less);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100844 case kTfLiteBuiltinLessEqual:
845 return VisitComparisonOperator(delegateData,
846 tfLiteContext,
847 tfLiteNode,
848 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100849 kTfLiteBuiltinLessEqual,
850 armnn::ComparisonOperation::LessOrEqual);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +0100851 case kTfLiteBuiltinLogistic:
852 return VisitActivationOperator(delegateData,
853 tfLiteContext,
854 tfLiteNode,
855 nodeIndex,
856 kTfLiteBuiltinLogistic);
Teresa Charlinf69ae562023-04-27 14:42:23 +0100857 case kTfLiteBuiltinLocalResponseNormalization:
858 return VisitLocalResponseNormalizationOperator(delegateData,
859 tfLiteContext,
860 tfLiteNode,
861 nodeIndex,
862 kTfLiteBuiltinLocalResponseNormalization);
863 case kTfLiteBuiltinLog:
864 return VisitElementwiseUnaryOperator(delegateData,
865 tfLiteContext,
866 tfLiteNode,
867 nodeIndex,
868 kTfLiteBuiltinLog,
869 armnn::UnaryOperation::Log);
870 case kTfLiteBuiltinLogicalAnd:
871 return VisitLogicalBinaryOperator(delegateData,
872 tfLiteContext,
873 tfLiteNode,
874 nodeIndex,
875 kTfLiteBuiltinLogicalAnd,
876 armnn::LogicalBinaryOperation::LogicalAnd);
877 case kTfLiteBuiltinLogicalNot:
878 return VisitElementwiseUnaryOperator(delegateData,
879 tfLiteContext,
880 tfLiteNode,
881 nodeIndex,
882 kTfLiteBuiltinLogicalNot,
883 armnn::UnaryOperation::LogicalNot);
884 case kTfLiteBuiltinLogicalOr:
885 return VisitLogicalBinaryOperator(delegateData,
886 tfLiteContext,
887 tfLiteNode,
888 nodeIndex,
889 kTfLiteBuiltinLogicalOr,
890 armnn::LogicalBinaryOperation::LogicalOr);
Teresa Charlin42362962023-04-28 14:23:33 +0100891 case kTfLiteBuiltinLogSoftmax:
892 return VisitSoftmaxOperator(delegateData,
893 tfLiteContext,
894 tfLiteNode,
895 nodeIndex,
896 kTfLiteBuiltinLogSoftmax);
Matthew Sloyan48ec8132023-04-27 17:04:47 +0100897 case kTfLiteBuiltinLstm:
898 return VisitLstmOperator(delegateData,
899 tfLiteContext,
900 tfLiteNode,
901 nodeIndex,
902 kTfLiteBuiltinLstm);
903 case kTfLiteBuiltinMaxPool2d:
904 return VisitPooling2dOperator(delegateData,
905 tfLiteContext,
906 tfLiteNode,
907 nodeIndex,
908 kTfLiteBuiltinMaxPool2d);
David Monahan6c53f9f2023-04-27 15:21:19 +0100909 case kTfLiteBuiltinMaximum:
910 return VisitElementwiseBinaryOperator(delegateData,
911 tfLiteContext,
912 tfLiteNode,
913 nodeIndex,
914 kTfLiteBuiltinMaximum);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100915 case kTfLiteBuiltinMean:
916 return VisitControlOperator(delegateData,
917 tfLiteContext,
918 tfLiteNode,
919 nodeIndex,
920 kTfLiteBuiltinMean);
David Monahan6c53f9f2023-04-27 15:21:19 +0100921 case kTfLiteBuiltinMinimum:
922 return VisitElementwiseBinaryOperator(delegateData,
923 tfLiteContext,
924 tfLiteNode,
925 nodeIndex,
926 kTfLiteBuiltinMinimum);
Ryan OShea59f8f652023-05-11 20:37:53 +0100927 case kTfLiteBuiltinMirrorPad:
928 return VisitPadOperator(delegateData,
929 tfLiteContext,
930 tfLiteNode,
931 nodeIndex,
932 kTfLiteBuiltinMirrorPad);
David Monahan6c53f9f2023-04-27 15:21:19 +0100933 case kTfLiteBuiltinMul:
934 return VisitElementwiseBinaryOperator(delegateData,
935 tfLiteContext,
936 tfLiteNode,
937 nodeIndex,
938 kTfLiteBuiltinMul);
Teresa Charlinf69ae562023-04-27 14:42:23 +0100939 case kTfLiteBuiltinNeg:
940 return VisitElementwiseUnaryOperator(delegateData,
941 tfLiteContext,
942 tfLiteNode,
943 nodeIndex,
944 kTfLiteBuiltinNeg,
945 armnn::UnaryOperation::Neg);
Matthew Sloyan2b04ec32023-04-26 11:42:46 +0100946 case kTfLiteBuiltinNotEqual:
947 return VisitComparisonOperator(delegateData,
948 tfLiteContext,
949 tfLiteNode,
950 nodeIndex,
Teresa Charlinf69ae562023-04-27 14:42:23 +0100951 kTfLiteBuiltinNotEqual,
952 armnn::ComparisonOperation::NotEqual);
Teresa Charlinecebb0f2023-04-27 21:37:56 +0100953 case kTfLiteBuiltinPack:
954 return VisitPackOperator(delegateData,
955 tfLiteContext,
956 tfLiteNode,
957 nodeIndex,
958 kTfLiteBuiltinPack);
959 case kTfLiteBuiltinPad:
960 return VisitPadOperator(delegateData,
961 tfLiteContext,
962 tfLiteNode,
963 nodeIndex,
964 kTfLiteBuiltinPad);
965 case kTfLiteBuiltinPadv2:
966 return VisitPadOperator(delegateData,
967 tfLiteContext,
968 tfLiteNode,
969 nodeIndex,
970 kTfLiteBuiltinPadv2);
John Mcloughlin0ec00872023-05-15 17:03:49 +0100971 case kTfLiteBuiltinPow:
972 return VisitElementwiseBinaryOperator(delegateData,
973 tfLiteContext,
974 tfLiteNode,
975 nodeIndex,
976 kTfLiteBuiltinPow);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +0100977 case kTfLiteBuiltinPrelu:
978 return VisitPreluOperator(delegateData,
979 tfLiteContext,
980 tfLiteNode,
981 nodeIndex,
982 kTfLiteBuiltinPrelu);
Francis Murtagh36d94ef2023-04-28 14:05:43 +0100983 case kTfLiteBuiltinQuantize:
984 return VisitQuantizeOperator(delegateData,
985 tfLiteContext,
986 tfLiteNode,
987 nodeIndex,
988 kTfLiteBuiltinQuantize);
John Mcloughlin083586d2023-04-28 18:36:52 +0100989 case kTfLiteBuiltinReduceMax:
990 return VisitReduceOperator(delegateData,
991 tfLiteContext,
992 tfLiteNode,
993 nodeIndex,
994 kTfLiteBuiltinReduceMax);
995 case kTfLiteBuiltinReduceMin:
996 return VisitReduceOperator(delegateData,
997 tfLiteContext,
998 tfLiteNode,
999 nodeIndex,
1000 kTfLiteBuiltinReduceMin);
1001 case kTfLiteBuiltinReduceProd:
1002 return VisitReduceOperator(delegateData,
1003 tfLiteContext,
1004 tfLiteNode,
1005 nodeIndex,
1006 kTfLiteBuiltinReduceProd);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +01001007 case kTfLiteBuiltinRelu:
1008 return VisitActivationOperator(delegateData,
1009 tfLiteContext,
1010 tfLiteNode,
1011 nodeIndex,
1012 kTfLiteBuiltinRelu);
1013 case kTfLiteBuiltinReluN1To1:
1014 return VisitActivationOperator(delegateData,
1015 tfLiteContext,
1016 tfLiteNode,
1017 nodeIndex,
1018 kTfLiteBuiltinReluN1To1);
1019 case kTfLiteBuiltinRelu6:
1020 return VisitActivationOperator(delegateData,
1021 tfLiteContext,
1022 tfLiteNode,
1023 nodeIndex,
1024 kTfLiteBuiltinRelu6);
Matthew Sloyanc49aacc2023-04-28 17:27:26 +01001025 case kTfLiteBuiltinReshape:
1026 return VisitReshapeOperator(delegateData,
1027 tfLiteContext,
1028 tfLiteNode,
1029 nodeIndex,
1030 kTfLiteBuiltinReshape);
John Mcloughlin083586d2023-04-28 18:36:52 +01001031 case kTfLiteBuiltinResizeNearestNeighbor:
1032 return VisitResizeOperator(delegateData,
1033 tfLiteContext,
1034 tfLiteNode,
1035 nodeIndex,
1036 kTfLiteBuiltinResizeNearestNeighbor);
1037 case kTfLiteBuiltinResizeBilinear:
1038 return VisitResizeOperator(delegateData,
1039 tfLiteContext,
1040 tfLiteNode,
1041 nodeIndex,
1042 kTfLiteBuiltinResizeBilinear);
Tracy Narine7306bbe2023-07-17 16:06:26 +01001043 case kTfLiteBuiltinReverseV2:
1044 return VisitReverseV2Operator(delegateData,
1045 tfLiteContext,
1046 tfLiteNode,
1047 nodeIndex,
1048 kTfLiteBuiltinReverseV2);
Teresa Charlinf69ae562023-04-27 14:42:23 +01001049 case kTfLiteBuiltinRsqrt:
1050 return VisitElementwiseUnaryOperator(delegateData,
1051 tfLiteContext,
1052 tfLiteNode,
1053 nodeIndex,
1054 kTfLiteBuiltinRsqrt,
1055 armnn::UnaryOperation::Rsqrt);
John Mcloughlin0422cf22023-04-27 16:55:00 +01001056 case kTfLiteBuiltinShape:
1057 return VisitShapeOperator(delegateData,
1058 tfLiteContext,
1059 tfLiteNode,
1060 nodeIndex,
1061 kTfLiteBuiltinShape);
Teresa Charlinf69ae562023-04-27 14:42:23 +01001062 case kTfLiteBuiltinSin:
1063 return VisitElementwiseUnaryOperator(delegateData,
1064 tfLiteContext,
1065 tfLiteNode,
1066 nodeIndex,
1067 kTfLiteBuiltinSin,
1068 armnn::UnaryOperation::Sin);
Teresa Charlin86b03572023-04-28 13:19:12 +01001069 case kTfLiteBuiltinSlice:
1070 return VisitSliceOperator(delegateData,
1071 tfLiteContext,
1072 tfLiteNode,
1073 nodeIndex,
1074 kTfLiteBuiltinSlice);
Teresa Charlin42362962023-04-28 14:23:33 +01001075 case kTfLiteBuiltinSoftmax:
1076 return VisitSoftmaxOperator(delegateData,
1077 tfLiteContext,
1078 tfLiteNode,
1079 nodeIndex,
1080 kTfLiteBuiltinSoftmax);
Kevin May81b66f32023-04-26 14:55:36 +01001081 case kTfLiteBuiltinSpaceToBatchNd:
1082 return VisitSpaceToBatchNdOperator(delegateData,
1083 tfLiteContext,
1084 tfLiteNode,
1085 nodeIndex,
1086 kTfLiteBuiltinSpaceToBatchNd);
Teresa Charlin42362962023-04-28 14:23:33 +01001087 case kTfLiteBuiltinSpaceToDepth:
1088 return VisitSpaceToDepthOperator(delegateData,
1089 tfLiteContext,
1090 tfLiteNode,
1091 nodeIndex,
1092 kTfLiteBuiltinSpaceToDepth);
David Monahanc833cef2023-05-03 15:53:03 +01001093 case kTfLiteBuiltinSplit:
1094 return VisitSplitOperator(delegateData,
1095 tfLiteContext,
1096 tfLiteNode,
1097 nodeIndex,
1098 kTfLiteBuiltinSplit);
1099 case kTfLiteBuiltinSplitV:
1100 return VisitSplitVOperator(delegateData,
1101 tfLiteContext,
1102 tfLiteNode,
1103 nodeIndex,
1104 kTfLiteBuiltinSplitV);
John Mcloughlin0ec00872023-05-15 17:03:49 +01001105 case kTfLiteBuiltinSquaredDifference:
1106 return VisitElementwiseBinaryOperator(delegateData,
1107 tfLiteContext,
1108 tfLiteNode,
1109 nodeIndex,
1110 kTfLiteBuiltinSquaredDifference);
David Monahan6c53f9f2023-04-27 15:21:19 +01001111 case kTfLiteBuiltinSub:
1112 return VisitElementwiseBinaryOperator(delegateData,
1113 tfLiteContext,
1114 tfLiteNode,
1115 nodeIndex,
1116 kTfLiteBuiltinSub);
Teresa Charlinf69ae562023-04-27 14:42:23 +01001117 case kTfLiteBuiltinSqrt:
1118 return VisitElementwiseUnaryOperator(delegateData,
1119 tfLiteContext,
1120 tfLiteNode,
1121 nodeIndex,
1122 kTfLiteBuiltinSqrt,
1123 armnn::UnaryOperation::Sqrt);
Matthew Sloyan3504e422023-05-03 13:53:02 +01001124 case kTfLiteBuiltinSqueeze:
1125 return VisitSqueezeOperator(delegateData,
1126 tfLiteContext,
1127 tfLiteNode,
1128 nodeIndex,
1129 kTfLiteBuiltinSqueeze);
Teresa Charlin86b03572023-04-28 13:19:12 +01001130 case kTfLiteBuiltinStridedSlice:
1131 return VisitStridedSliceOperator(delegateData,
1132 tfLiteContext,
1133 tfLiteNode,
1134 nodeIndex,
1135 kTfLiteBuiltinStridedSlice);
John Mcloughlin083586d2023-04-28 18:36:52 +01001136 case kTfLiteBuiltinSum:
1137 return VisitReduceOperator(delegateData,
1138 tfLiteContext,
1139 tfLiteNode,
1140 nodeIndex,
1141 kTfLiteBuiltinSum);
Matthew Sloyan0bd4c622023-04-27 11:48:26 +01001142 case kTfLiteBuiltinTanh:
1143 return VisitActivationOperator(delegateData,
1144 tfLiteContext,
1145 tfLiteNode,
1146 nodeIndex,
1147 kTfLiteBuiltinTanh);
Tianle Cheng92ce35c2023-07-25 16:41:00 +01001148 case kTfLiteBuiltinTile:
1149 return VisitTileOperator(delegateData,
1150 tfLiteContext,
1151 tfLiteNode,
1152 nodeIndex,
1153 kTfLiteBuiltinTile);
Teresa Charlin42362962023-04-28 14:23:33 +01001154 case kTfLiteBuiltinTranspose:
1155 return VisitTransposeOperator(delegateData,
Tianle Cheng92ce35c2023-07-25 16:41:00 +01001156 tfLiteContext,
1157 tfLiteNode,
1158 nodeIndex,
1159 kTfLiteBuiltinTranspose);
Francis Murtagh3a9e7ba2023-04-26 15:58:39 +01001160 case kTfLiteBuiltinTransposeConv:
1161 return VisitConvolutionOperator(delegateData,
1162 tfLiteContext,
1163 tfLiteNode,
1164 nodeIndex,
1165 kTfLiteBuiltinTransposeConv);
Matthew Sloyan74be13e2023-05-03 17:34:00 +01001166 case kTfLiteBuiltinUnidirectionalSequenceLstm:
1167 return VisitUnidirectionalSequenceLstmOperator(delegateData,
1168 tfLiteContext,
1169 tfLiteNode,
1170 nodeIndex,
1171 kTfLiteBuiltinUnidirectionalSequenceLstm);
Teresa Charlinecebb0f2023-04-27 21:37:56 +01001172 case kTfLiteBuiltinUnpack:
1173 return VisitUnpackOperator(delegateData,
1174 tfLiteContext,
1175 tfLiteNode,
1176 nodeIndex,
1177 kTfLiteBuiltinUnpack);
Ryan OSheaac9607f2023-04-03 11:33:33 +01001178 default:
1179 return kTfLiteError;
1180 }
1181}
Francis Murtagh3a9e7ba2023-04-26 15:58:39 +01001182} // armnnOpaqueDelegate namespace