blob: 7f3fe747f870715cf9b53e12c50926f91a3cfb31 [file] [log] [blame]
Davide Grohmann6d2e5b72022-08-24 17:01:40 +02001/*
Mikael Olsson07545152023-10-17 13:05:38 +02002 * SPDX-FileCopyrightText: Copyright 2022-2023 Arm Limited and/or its affiliates <open-source-office@arm.com>
Davide Grohmann6d2e5b72022-08-24 17:01:40 +02003 * SPDX-License-Identifier: Apache-2.0
4 *
5 * Licensed under the Apache License, Version 2.0 (the License); you may
6 * not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
13 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18#include <ethosu.hpp>
19#include <uapi/ethosu.h>
20
21#include <cstring>
22#include <iostream>
23#include <list>
24#include <memory>
25#include <sstream>
26#include <stdio.h>
27#include <string>
28#include <unistd.h>
29
30#include "input.h"
31#include "model.h"
32#include "output.h"
33#include "test_assertions.hpp"
34
35using namespace EthosU;
36
37namespace {
38
39int64_t defaultTimeout = 60000000000;
40
41void testCancelInference(const Device &device) {
42 try {
Mikael Olssonc081e592023-10-30 11:10:56 +010043 auto network = std::make_shared<Network>(device, networkModelData, sizeof(networkModelData));
Davide Grohmann6d2e5b72022-08-24 17:01:40 +020044
45 std::vector<std::shared_ptr<Buffer>> inputBuffers;
46 std::vector<std::shared_ptr<Buffer>> outputBuffers;
47
48 auto inputBuffer = std::make_shared<Buffer>(device, sizeof(inputData));
Davide Grohmann6d2e5b72022-08-24 17:01:40 +020049 std::memcpy(inputBuffer->data(), inputData, sizeof(inputData));
50
51 inputBuffers.push_back(inputBuffer);
52 outputBuffers.push_back(std::make_shared<Buffer>(device, sizeof(expectedOutputData)));
53 std::vector<uint8_t> enabledCounters(Inference::getMaxPmuEventCounters());
54
55 auto inference = std::make_shared<Inference>(network,
56 inputBuffers.begin(),
57 inputBuffers.end(),
58 outputBuffers.begin(),
59 outputBuffers.end(),
60 enabledCounters,
61 false);
62
63 InferenceStatus status = inference->status();
64 TEST_ASSERT(status == InferenceStatus::RUNNING);
65
66 bool success = inference->cancel();
67 TEST_ASSERT(success);
68
69 status = inference->status();
70 TEST_ASSERT(status == InferenceStatus::ABORTED);
71
72 bool timedout = inference->wait(defaultTimeout);
73 TEST_ASSERT(!timedout);
74
75 } catch (std::exception &e) { throw TestFailureException("Inference run test: ", e.what()); }
76}
77
78void testRejectInference(const Device &device) {
79 try {
Mikael Olssonc081e592023-10-30 11:10:56 +010080 auto network = std::make_shared<Network>(device, networkModelData, sizeof(networkModelData));
Davide Grohmann6d2e5b72022-08-24 17:01:40 +020081
82 std::vector<std::shared_ptr<Buffer>> inputBuffers;
83 std::vector<std::shared_ptr<Buffer>> outputBuffers;
84
85 auto inputBuffer = std::make_shared<Buffer>(device, sizeof(inputData));
Davide Grohmann6d2e5b72022-08-24 17:01:40 +020086 std::memcpy(inputBuffer->data(), inputData, sizeof(inputData));
87
88 inputBuffers.push_back(inputBuffer);
89 outputBuffers.push_back(std::make_shared<Buffer>(device, sizeof(expectedOutputData)));
90 std::vector<uint8_t> enabledCounters(Inference::getMaxPmuEventCounters());
91
92 std::shared_ptr<Inference> inferences[5];
93
94 for (int i = 0; i < 5; i++) {
95 inferences[i] = std::make_shared<Inference>(network,
96 inputBuffers.begin(),
97 inputBuffers.end(),
98 outputBuffers.begin(),
99 outputBuffers.end(),
100 enabledCounters,
101 false);
102
103 InferenceStatus status = inferences[i]->status();
104 TEST_ASSERT(status == InferenceStatus::RUNNING);
105 }
106
107 auto inference = std::make_shared<Inference>(network,
108 inputBuffers.begin(),
109 inputBuffers.end(),
110 outputBuffers.begin(),
111 outputBuffers.end(),
112 enabledCounters,
113 false);
114
115 bool timedout = inference->wait(defaultTimeout);
116 TEST_ASSERT(!timedout);
117
118 InferenceStatus status = inference->status();
119 TEST_ASSERT(status == InferenceStatus::REJECTED);
120
121 for (int i = 0; i < 5; i++) {
122
123 bool success = inferences[i]->cancel();
124 TEST_ASSERT(success);
125
126 InferenceStatus status = inferences[i]->status();
127 TEST_ASSERT(status == InferenceStatus::ABORTED);
128
129 bool timedout = inference->wait(defaultTimeout);
130 TEST_ASSERT(!timedout);
131 }
132 } catch (std::exception &e) { throw TestFailureException("Inference run test: ", e.what()); }
133}
134
135} // namespace
136
137int main() {
138 Device device;
139
140 try {
141 testCancelInference(device);
142 testRejectInference(device);
143 } catch (TestFailureException &e) {
144 std::cerr << "Test failure: " << e.what() << std::endl;
145 return 1;
146 }
147
148 return 0;
149}