blob: 4c0bf503df27441e7cea19b350bb2541b30a063d [file] [log] [blame]
Anton Moberg456566d2021-03-17 10:19:26 +01001/*
2 * Copyright (c) 2021 Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19/****************************************************************************
20 * Includes
21 ****************************************************************************/
22
23// NPU driver
24#include "ethosu_driver.h"
25// Inference process
26#include "inference_process.hpp"
27// System includes
28#include <stdio.h>
29#include <vector>
30
31// Model data
32#include "input.h"
33#include "model.h"
34#include "output.h"
35
36using namespace std;
37
38/****************************************************************************
39 * InferenceJob
40 ****************************************************************************/
41
42#ifndef TENSOR_ARENA_SIZE
43#define TENSOR_ARENA_SIZE 2000000
44#endif
45
46__attribute__((section(".bss.tensor_arena"), aligned(16))) uint8_t TFLuTensorArena[TENSOR_ARENA_SIZE];
47
48InferenceProcess::InferenceProcess inferenceProcess(TFLuTensorArena, TENSOR_ARENA_SIZE);
49
Jonathan Strandbergd2afc512021-03-19 10:31:18 +010050uint8_t outputData[sizeof(expectedOutputData)] __attribute__((aligned(16), section("output_data_sec")));
Anton Moberg456566d2021-03-17 10:19:26 +010051
52int runInference() {
53 // Load inference data
54 vector<InferenceProcess::DataPtr> input;
55 input.push_back(InferenceProcess::DataPtr(inputData, sizeof(inputData)));
56
57 vector<InferenceProcess::DataPtr> output;
58 output.push_back(InferenceProcess::DataPtr(outputData, sizeof(outputData)));
59
60 vector<InferenceProcess::DataPtr> expected;
61 expected.push_back(InferenceProcess::DataPtr(expectedOutputData, sizeof(expectedOutputData)));
62
63 // Create job
64 InferenceProcess::InferenceJob job(string(modelName),
65 InferenceProcess::DataPtr(networkModelData, sizeof(networkModelData)),
66 input,
67 output,
68 expected,
69 512,
70 std::vector<uint8_t>(4),
71 false);
72
73 // Run job
74 bool failed = inferenceProcess.runJob(job);
75 printf("Status of executed job: ");
76 printf(failed ? "Failed\n" : "Success\n");
77
78 return failed;
79}
80
81int main() {
82 int ret = runInference();
83 return ret;
Jonathan Strandbergd2afc512021-03-19 10:31:18 +010084}