blob: e4d2b28a4bf1c2a834597502a6bcc9f78c95d9ee [file] [log] [blame]
Narumol Prangnawarat867eba52020-02-03 12:29:56 +00001//
2// Copyright © 2020 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#include <armnn/INetwork.hpp>
6#include <armnn/IRuntime.hpp>
7#include <armnn/Utils.hpp>
8#include <armnn/Descriptors.hpp>
9
10#include <iostream>
11
Narumol Prangnawarat1db8b822020-06-25 11:53:50 +010012/// A simple example of using the ArmNN SDK API with the standalone sample dynamic backend.
13/// In this example, an addition layer is used to add 2 input tensors to produce a result output tensor.
Narumol Prangnawarat867eba52020-02-03 12:29:56 +000014int main()
15{
16 using namespace armnn;
17
18 // Construct ArmNN network
19 armnn::NetworkId networkIdentifier;
20 INetworkPtr myNetwork = INetwork::Create();
21
22 IConnectableLayer* input0 = myNetwork->AddInputLayer(0);
23 IConnectableLayer* input1 = myNetwork->AddInputLayer(1);
24 IConnectableLayer* add = myNetwork->AddAdditionLayer();
25 IConnectableLayer* output = myNetwork->AddOutputLayer(0);
26
27 input0->GetOutputSlot(0).Connect(add->GetInputSlot(0));
28 input1->GetOutputSlot(0).Connect(add->GetInputSlot(1));
29 add->GetOutputSlot(0).Connect(output->GetInputSlot(0));
30
31 TensorInfo tensorInfo(TensorShape({2, 1}), DataType::Float32);
32 input0->GetOutputSlot(0).SetTensorInfo(tensorInfo);
33 input1->GetOutputSlot(0).SetTensorInfo(tensorInfo);
34 add->GetOutputSlot(0).SetTensorInfo(tensorInfo);
35
36 // Create ArmNN runtime
37 IRuntime::CreationOptions options; // default options
38 armnn::IRuntimePtr run(armnn::IRuntime::Create(options));
39
40 // Optimise ArmNN network
41 armnn::IOptimizedNetworkPtr optNet = Optimize(*myNetwork, {"SampleDynamic"}, run->GetDeviceSpec());
42 if (!optNet)
43 {
44 // This shouldn't happen for this simple sample, with reference backend.
45 // But in general usage Optimize could fail if the hardware at runtime cannot
46 // support the model that has been provided.
47 std::cerr << "Error: Failed to optimise the input network." << std::endl;
48 return 1;
49 }
50
51 // Load graph into runtime
52 run->LoadNetwork(networkIdentifier, std::move(optNet));
53
54 // input data
55 std::vector<float> input0Data
56 {
57 5.0f, 3.0f
58 };
59 std::vector<float> input1Data
60 {
61 10.0f, 8.0f
62 };
63 std::vector<float> outputData(2);
64
Cathal Corbett5b8093c2021-10-22 11:12:07 +010065 TensorInfo inputTensorInfo = run->GetInputTensorInfo(networkIdentifier, 0);
66 inputTensorInfo.SetConstant(true);
Narumol Prangnawarat867eba52020-02-03 12:29:56 +000067 InputTensors inputTensors
68 {
Cathal Corbett5b8093c2021-10-22 11:12:07 +010069 {0,armnn::ConstTensor(inputTensorInfo, input0Data.data())},
David Monahan89f297a2021-11-09 10:13:00 +000070 {1,armnn::ConstTensor(inputTensorInfo, input1Data.data())}
Narumol Prangnawarat867eba52020-02-03 12:29:56 +000071 };
72 OutputTensors outputTensors
73 {
74 {0,armnn::Tensor(run->GetOutputTensorInfo(networkIdentifier, 0), outputData.data())}
75 };
76
77 // Execute network
78 run->EnqueueWorkload(networkIdentifier, inputTensors, outputTensors);
79
80 std::cout << "Addition operator result is {" << outputData[0] << "," << outputData[1] << "}" << std::endl;
81 return 0;
82}