blob: 5b371921993dc9b0a3de1c85482cbf588c742485 [file] [log] [blame]
surmeh0149b9e102018-05-17 14:11:25 +01001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// See LICENSE file in the project root for full license information.
4//
5#include "DriverTestHelpers.hpp"
6#include <log/log.h>
7#include <boost/test/unit_test.hpp>
8
9namespace android
10{
11namespace hardware
12{
13namespace neuralnetworks
14{
15namespace V1_0
16{
17
18std::ostream& operator<<(std::ostream& os, ErrorStatus stat)
19{
20 return os << static_cast<int>(stat);
21}
22
23} // namespace android::hardware::neuralnetworks::V1_0
24} // namespace android::hardware::neuralnetworks
25} // namespace android::hardware
26} // namespace android
27
28
29namespace driverTestHelpers
30{
31
32Return<void> ExecutionCallback::notify(ErrorStatus status)
33{
34 (void)status;
35 ALOGI("ExecutionCallback::notify invoked");
36 std::lock_guard<std::mutex> executionLock(mMutex);
37 mNotified = true;
38 mCondition.notify_one();
39 return Void();
40}
41
42Return<void> ExecutionCallback::wait()
43{
44 ALOGI("ExecutionCallback::wait invoked");
45 std::unique_lock<std::mutex> executionLock(mMutex);
46 while (!mNotified)
47 {
48 mCondition.wait(executionLock);
49 }
50 mNotified = false;
51 return Void();
52}
53
54Return<void> PreparedModelCallback::notify(ErrorStatus status,
55 const android::sp<IPreparedModel>& preparedModel)
56{
57 m_ErrorStatus = status;
58 m_PreparedModel = preparedModel;
59 return Void();
60}
61
62// lifted from common/Utils.cpp
63hidl_memory allocateSharedMemory(int64_t size)
64{
65 hidl_memory memory;
66
67 const std::string& type = "ashmem";
68 android::sp<IAllocator> allocator = IAllocator::getService(type);
69 allocator->allocate(size, [&](bool success, const hidl_memory& mem) {
70 if (!success)
71 {
72 ALOGE("unable to allocate %li bytes of %s", size, type.c_str());
73 }
74 else
75 {
76 memory = mem;
77 }
78 });
79
80 return memory;
81}
82
83android::sp<IMemory> AddPoolAndGetData(uint32_t size, Request& request)
84{
85 hidl_memory pool;
86
87 android::sp<IAllocator> allocator = IAllocator::getService("ashmem");
88 allocator->allocate(sizeof(float) * size, [&](bool success, const hidl_memory& mem) {
89 BOOST_TEST(success);
90 pool = mem;
91 });
92
93 request.pools.resize(request.pools.size() + 1);
94 request.pools[request.pools.size() - 1] = pool;
95
96 android::sp<IMemory> mapped = mapMemory(pool);
97 mapped->update();
98 return mapped;
99}
100
101void AddPoolAndSetData(uint32_t size, Request& request, const float* data)
102{
103 android::sp<IMemory> memory = AddPoolAndGetData(size, request);
104
105 float* dst = static_cast<float*>(static_cast<void*>(memory->getPointer()));
106
107 memcpy(dst, data, size * sizeof(float));
108}
109
110void AddOperand(Model& model, const Operand& op)
111{
112 model.operands.resize(model.operands.size() + 1);
113 model.operands[model.operands.size() - 1] = op;
114}
115
116void AddIntOperand(Model& model, int32_t value)
117{
118 DataLocation location = {};
119 location.offset = model.operandValues.size();
120 location.length = sizeof(int32_t);
121
122 Operand op = {};
123 op.type = OperandType::INT32;
124 op.dimensions = hidl_vec<uint32_t>{};
125 op.lifetime = OperandLifeTime::CONSTANT_COPY;
126 op.location = location;
127
128 model.operandValues.resize(model.operandValues.size() + location.length);
129 *reinterpret_cast<int32_t*>(&model.operandValues[location.offset]) = value;
130
131 AddOperand(model, op);
132}
133
134void AddInputOperand(Model& model, hidl_vec<uint32_t> dimensions)
135{
136 Operand op = {};
137 op.type = OperandType::TENSOR_FLOAT32;
138 op.dimensions = dimensions;
139 op.lifetime = OperandLifeTime::MODEL_INPUT;
140
141 AddOperand(model, op);
142
143 model.inputIndexes.resize(model.inputIndexes.size() + 1);
144 model.inputIndexes[model.inputIndexes.size() - 1] = model.operands.size() - 1;
145}
146
147void AddOutputOperand(Model& model, hidl_vec<uint32_t> dimensions)
148{
149 Operand op = {};
150 op.type = OperandType::TENSOR_FLOAT32;
151 op.dimensions = dimensions;
152 op.lifetime = OperandLifeTime::MODEL_OUTPUT;
153
154 AddOperand(model, op);
155
156 model.outputIndexes.resize(model.outputIndexes.size() + 1);
157 model.outputIndexes[model.outputIndexes.size() - 1] = model.operands.size() - 1;
158}
159
160
161android::sp<IPreparedModel> PrepareModelWithStatus(const Model& model,
162 armnn_driver::ArmnnDriver& driver,
163 ErrorStatus & prepareStatus,
164 ErrorStatus expectedStatus)
165{
166
167 android::sp<PreparedModelCallback> cb(new PreparedModelCallback());
168 driver.prepareModel(model, cb);
169
170 prepareStatus = cb->GetErrorStatus();
171 BOOST_TEST(prepareStatus == expectedStatus);
172 if (expectedStatus == ErrorStatus::NONE)
173 {
174 BOOST_TEST((cb->GetPreparedModel() != nullptr));
175 }
176 return cb->GetPreparedModel();
177}
178
179android::sp<IPreparedModel> PrepareModel(const Model& model,
180 armnn_driver::ArmnnDriver& driver)
181{
182 ErrorStatus prepareStatus = ErrorStatus::NONE;
183 return PrepareModelWithStatus(model, driver, prepareStatus);
184}
185
186ErrorStatus Execute(android::sp<IPreparedModel> preparedModel,
187 const Request& request,
188 ErrorStatus expectedStatus)
189{
190 android::sp<ExecutionCallback> cb(new ExecutionCallback());
191 ErrorStatus execStatus = preparedModel->execute(request, cb);
192 BOOST_TEST(execStatus == expectedStatus);
193 ALOGI("Execute: waiting for callback to be invoked");
194 cb->wait();
195 return execStatus;
196}
197
198android::sp<ExecutionCallback> ExecuteNoWait(android::sp<IPreparedModel> preparedModel, const Request& request)
199{
200 android::sp<ExecutionCallback> cb(new ExecutionCallback());
201 BOOST_TEST(preparedModel->execute(request, cb) == ErrorStatus::NONE);
202 ALOGI("ExecuteNoWait: returning callback object");
203 return cb;
204}
205
206template<>
207OperandType TypeToOperandType<float>()
208{
209 return OperandType::TENSOR_FLOAT32;
210};
211
212template<>
213OperandType TypeToOperandType<int32_t>()
214{
215 return OperandType::TENSOR_INT32;
216};
217
218} // namespace driverTestHelpers