blob: 4c0bf503df27441e7cea19b350bb2541b30a063d [file] [log] [blame]
/*
* Copyright (c) 2021 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/****************************************************************************
* Includes
****************************************************************************/
// NPU driver
#include "ethosu_driver.h"
// Inference process
#include "inference_process.hpp"
// System includes
#include <stdio.h>
#include <vector>
// Model data
#include "input.h"
#include "model.h"
#include "output.h"
using namespace std;
/****************************************************************************
* InferenceJob
****************************************************************************/
#ifndef TENSOR_ARENA_SIZE
#define TENSOR_ARENA_SIZE 2000000
#endif
__attribute__((section(".bss.tensor_arena"), aligned(16))) uint8_t TFLuTensorArena[TENSOR_ARENA_SIZE];
InferenceProcess::InferenceProcess inferenceProcess(TFLuTensorArena, TENSOR_ARENA_SIZE);
uint8_t outputData[sizeof(expectedOutputData)] __attribute__((aligned(16), section("output_data_sec")));
int runInference() {
// Load inference data
vector<InferenceProcess::DataPtr> input;
input.push_back(InferenceProcess::DataPtr(inputData, sizeof(inputData)));
vector<InferenceProcess::DataPtr> output;
output.push_back(InferenceProcess::DataPtr(outputData, sizeof(outputData)));
vector<InferenceProcess::DataPtr> expected;
expected.push_back(InferenceProcess::DataPtr(expectedOutputData, sizeof(expectedOutputData)));
// Create job
InferenceProcess::InferenceJob job(string(modelName),
InferenceProcess::DataPtr(networkModelData, sizeof(networkModelData)),
input,
output,
expected,
512,
std::vector<uint8_t>(4),
false);
// Run job
bool failed = inferenceProcess.runJob(job);
printf("Status of executed job: ");
printf(failed ? "Failed\n" : "Success\n");
return failed;
}
int main() {
int ret = runInference();
return ret;
}