blob: 240a2953932ad02aedd9d897d80b4ff54d9d0690 [file] [log] [blame]
Colm Donelan0aef6532023-10-02 17:01:37 +01001//
2// Copyright © 2023 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include <armnn_delegate.hpp>
7
8#include <tensorflow/lite/c/common.h>
9#include <tensorflow/lite/core/model.h>
10#include <tensorflow/lite/interpreter.h>
11#include <tensorflow/lite/kernels/register.h>
12
13int main()
14{
15 std::unique_ptr<tflite::FlatBufferModel> model;
16 model = tflite::FlatBufferModel::BuildFromFile("./simple_conv2d_1_op.tflite");
17 if (!model)
18 {
19 std::cout << "Failed to load TfLite model from: ./simple_conv2d_1_op.tflite" << std::endl;
20 return -1;
21 }
22 std::unique_ptr<tflite::Interpreter> m_TfLiteInterpreter;
23 m_TfLiteInterpreter = std::make_unique<tflite::Interpreter>();
24 tflite::ops::builtin::BuiltinOpResolver resolver;
25 tflite::InterpreterBuilder builder(*model, resolver);
26 if (builder(&m_TfLiteInterpreter) != kTfLiteOk)
27 {
28 std::cout << "Error loading the model into the TfLiteInterpreter." << std::endl;
29 return -1;
30 }
31 // Use default settings until options have been enabled
32 flatbuffers::FlatBufferBuilder flatBufferBuilder;
33 tflite::TFLiteSettingsBuilder tfliteSettingsBuilder(flatBufferBuilder);
34 flatbuffers::Offset<tflite::TFLiteSettings> tfliteSettings = tfliteSettingsBuilder.Finish();
35 flatBufferBuilder.Finish(tfliteSettings);
36 const tflite::TFLiteSettings* settings =
37 flatbuffers::GetRoot<tflite::TFLiteSettings>(flatBufferBuilder.GetBufferPointer());
38
39 std::unique_ptr<tflite::delegates::DelegatePluginInterface> delegatePlugIn =
40 tflite::delegates::DelegatePluginRegistry::CreateByName("armnn_delegate", *settings);
41
42 // Create Armnn Opaque Delegate from Armnn Delegate Plugin
43 tflite::delegates::TfLiteDelegatePtr armnnDelegate = delegatePlugIn->Create();
44
45 // Add Delegate to the builder
46 builder.AddDelegate(armnnDelegate.get());
47 if (builder(&m_TfLiteInterpreter) != kTfLiteOk)
48 {
49 std::cout << "Unable to add the Arm NN delegate to the TfLite runtime." << std::endl;
50 return -1;
51 }
52
53 if (m_TfLiteInterpreter->AllocateTensors() != kTfLiteOk)
54 {
55 std::cout << "Failed to allocate tensors in the TfLiteInterpreter." << std::endl;
56 return -1;
57 }
58
59 // Really should populate the tensors here, but it'll work without it.
60
61 int status = m_TfLiteInterpreter->Invoke();
62 if (status != kTfLiteOk)
63 {
64 std::cout << "Inference failed." << std::endl;
65 return -1;
66 }
67}