blob: 332833931805cabda6bc010698823655fe55325a [file] [log] [blame]
Aron Virginas-Tard089b742019-01-29 11:09:51 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#include "../MobileNetSsdInferenceTest.hpp"
6
7#include "armnnTfLiteParser/ITfLiteParser.hpp"
8
9#include <algorithm>
10#include <iterator>
11
12using namespace armnnTfLiteParser;
13
14int main(int argc, char* argv[])
15{
16 int retVal = EXIT_FAILURE;
17 try
18 {
19 using DataType = float;
20 using Parser = armnnTfLiteParser::ITfLiteParser;
21 using Model = InferenceModel<Parser, DataType>;
22
23 armnn::TensorShape inputTensorShape({ 1, 300, 300, 3 });
24
25 std::vector<const char*> inputLayerNames =
26 {
27 "normalized_input_image_tensor"
28 };
29
30 std::vector<const char*> outputLayerNames =
31 {
32 "TFLite_Detection_PostProcess",
33 "TFLite_Detection_PostProcess:1",
34 "TFLite_Detection_PostProcess:2",
35 "TFLite_Detection_PostProcess:3"
36 };
37
38 retVal = InferenceTestMain(argc, argv, { 0 },
39 [&inputTensorShape, inputLayerNames, outputLayerNames]()
40 {
41 return make_unique<MobileNetSsdTestCaseProvider<Model>>(
42 [&]
43 (typename Model::CommandLineOptions modelOptions)
44 {
45 if (!ValidateDirectory(modelOptions.m_ModelDir))
46 {
47 return std::unique_ptr<Model>();
48 }
49
50 typename Model::Params modelParams;
51 modelParams.m_ModelPath =
52 modelOptions.m_ModelDir + "ssd_mobilenet_v1.tflite";
53
54 std::copy(inputLayerNames.begin(), inputLayerNames.end(),
55 std::back_inserter(modelParams.m_InputBindings));
56
57 std::copy(outputLayerNames.begin(), outputLayerNames.end(),
58 std::back_inserter(modelParams.m_OutputBindings));
59
60 modelParams.m_InputShapes = { inputTensorShape };
61 modelParams.m_IsModelBinary = true;
Aron Virginas-Tar339bcae2019-01-31 16:44:26 +000062 modelParams.m_ComputeDevices = modelOptions.GetComputeDevicesAsBackendIds();
Aron Virginas-Tard089b742019-01-29 11:09:51 +000063 modelParams.m_VisualizePostOptimizationModel = modelOptions.m_VisualizePostOptimizationModel;
64 modelParams.m_EnableFp16TurboMode = modelOptions.m_EnableFp16TurboMode;
65
66 return std::make_unique<Model>(modelParams);
67 });
68 });
69 }
70 catch (const std::exception& e)
71 {
72 std::cerr << "WARNING: " << *argv << ": An error has occurred when running "
73 "the classifier inference tests: " << e.what() << std::endl;
74 }
75 return retVal;
76}