blob: bb89c9978b5d620ce9cbe8f2dbad46bec55b2684 [file] [log] [blame]
/*
* Copyright (c) 2021 Arm Limited. All rights reserved.
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "hal.h"
#include "ImageUtils.hpp"
#include "MobileNetModel.hpp"
#include "TensorFlowLiteMicro.hpp"
#include "TestData_img_class.hpp"
#include <catch.hpp>
using namespace test;
bool RunInference(arm::app::Model& model, const int8_t imageData[])
{
TfLiteTensor* inputTensor = model.GetInputTensor(0);
REQUIRE(inputTensor);
const size_t copySz = inputTensor->bytes < IFM_DATA_SIZE ?
inputTensor->bytes :
IFM_DATA_SIZE;
memcpy(inputTensor->data.data, imageData, copySz);
if(model.IsDataSigned()){
convertImgIoInt8(inputTensor->data.data, copySz);
}
return model.RunInference();
}
template<typename T>
void TestInference(int imageIdx, arm::app::Model& model, T tolerance) {
auto image = get_ifm_data_array(imageIdx);
auto goldenFV = get_ofm_data_array(imageIdx);
REQUIRE(RunInference(model, image));
TfLiteTensor* outputTensor = model.GetOutputTensor(0);
REQUIRE(outputTensor);
REQUIRE(outputTensor->bytes == OFM_DATA_SIZE);
auto tensorData = tflite::GetTensorData<T>(outputTensor);
REQUIRE(tensorData);
for (size_t i = 0; i < outputTensor->bytes; i++) {
REQUIRE(static_cast<int>(tensorData[i]) == Approx(static_cast<int>((T)goldenFV[i])).epsilon(tolerance));
}
}
TEST_CASE("Running inference with TensorFlow Lite Micro and MobileNeV2 Uint8", "[MobileNetV2]")
{
SECTION("Executing inferences sequentially")
{
arm::app::MobileNetModel model{};
REQUIRE_FALSE(model.IsInited());
REQUIRE(model.Init());
REQUIRE(model.IsInited());
for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) {
TestInference<uint8_t>(i, model, 1);
}
}
for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) {
DYNAMIC_SECTION("Executing inference with re-init")
{
arm::app::MobileNetModel model{};
REQUIRE_FALSE(model.IsInited());
REQUIRE(model.Init());
REQUIRE(model.IsInited());
TestInference<uint8_t>(i, model, 1);
}
}
}