blob: f84f73d810e2c0c163b34e7c174201d7b5445666 [file] [log] [blame]
telsoa014fcda012018-03-09 14:13:49 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
David Beckecb56cd2018-09-05 12:52:57 +01003// SPDX-License-Identifier: MIT
telsoa014fcda012018-03-09 14:13:49 +00004//
telsoa014fcda012018-03-09 14:13:49 +00005
Aron Virginas-Tarc26ba752018-10-22 13:32:01 +01006#include <armnn/Descriptors.hpp>
7#include <armnn/IRuntime.hpp>
8#include <armnn/INetwork.hpp>
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +00009#include <Runtime.hpp>
Aron Virginas-Tarc26ba752018-10-22 13:32:01 +010010#include <armnn/TypesUtils.hpp>
telsoa014fcda012018-03-09 14:13:49 +000011
Narumol Prangnawaratdf31cfe2019-11-22 11:26:06 +000012#include <LabelsAndEventClasses.hpp>
13#include <test/ProfilingTestUtils.hpp>
14
Aron Virginas-Tarc9cc8042018-11-01 16:15:57 +000015#include <HeapProfiling.hpp>
16#include <LeakChecking.hpp>
telsoa014fcda012018-03-09 14:13:49 +000017
18#ifdef WITH_VALGRIND
Aron Virginas-Tarc26ba752018-10-22 13:32:01 +010019#include <valgrind/memcheck.h>
telsoa014fcda012018-03-09 14:13:49 +000020#endif
21
Aron Virginas-Tarc26ba752018-10-22 13:32:01 +010022#include <boost/test/unit_test.hpp>
Narumol Prangnawaratdf31cfe2019-11-22 11:26:06 +000023#include "RuntimeTests.hpp"
Aron Virginas-Tarc26ba752018-10-22 13:32:01 +010024
telsoa014fcda012018-03-09 14:13:49 +000025namespace armnn
26{
27
28void RuntimeLoadedNetworksReserve(armnn::Runtime* runtime)
29{
30 runtime->m_LoadedNetworks.reserve(1);
31}
32
33}
34
35BOOST_AUTO_TEST_SUITE(Runtime)
36
37BOOST_AUTO_TEST_CASE(RuntimeUnloadNetwork)
38{
39 // build 2 mock-networks and load them into the runtime
telsoa01c577f2c2018-08-31 09:22:23 +010040 armnn::IRuntime::CreationOptions options;
41 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
telsoa014fcda012018-03-09 14:13:49 +000042
telsoa01c577f2c2018-08-31 09:22:23 +010043 // Mock network 1.
telsoa014fcda012018-03-09 14:13:49 +000044 armnn::NetworkId networkIdentifier1 = 1;
45 armnn::INetworkPtr mockNetwork1(armnn::INetwork::Create());
46 mockNetwork1->AddInputLayer(0, "test layer");
David Beckf0b48452018-10-19 15:20:56 +010047 std::vector<armnn::BackendId> backends = {armnn::Compute::CpuRef};
telsoa01c577f2c2018-08-31 09:22:23 +010048 runtime->LoadNetwork(networkIdentifier1, Optimize(*mockNetwork1, backends, runtime->GetDeviceSpec()));
telsoa014fcda012018-03-09 14:13:49 +000049
telsoa01c577f2c2018-08-31 09:22:23 +010050 // Mock network 2.
telsoa014fcda012018-03-09 14:13:49 +000051 armnn::NetworkId networkIdentifier2 = 2;
52 armnn::INetworkPtr mockNetwork2(armnn::INetwork::Create());
53 mockNetwork2->AddInputLayer(0, "test layer");
telsoa01c577f2c2018-08-31 09:22:23 +010054 runtime->LoadNetwork(networkIdentifier2, Optimize(*mockNetwork2, backends, runtime->GetDeviceSpec()));
telsoa014fcda012018-03-09 14:13:49 +000055
telsoa01c577f2c2018-08-31 09:22:23 +010056 // Unloads one by its networkID.
telsoa014fcda012018-03-09 14:13:49 +000057 BOOST_TEST(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Success);
58
59 BOOST_TEST(runtime->UnloadNetwork(networkIdentifier1) == armnn::Status::Failure);
60}
61
surmeh013537c2c2018-05-18 16:31:43 +010062// Note: the current builds we don't do valgrind and gperftools based leak checking at the same
telsoa01c577f2c2018-08-31 09:22:23 +010063// time, so in practice WITH_VALGRIND and ARMNN_LEAK_CHECKING_ENABLED are exclusive. The
64// valgrind tests can stay for x86 builds, but on hikey Valgrind is just way too slow
65// to be integrated into the CI system.
surmeh013537c2c2018-05-18 16:31:43 +010066
telsoa01c577f2c2018-08-31 09:22:23 +010067#ifdef ARMNN_LEAK_CHECKING_ENABLED
68
69struct DisableGlobalLeakChecking
70{
71 DisableGlobalLeakChecking()
72 {
73 ARMNN_LOCAL_LEAK_CHECKING_ONLY();
74 }
75};
76
77BOOST_GLOBAL_FIXTURE(DisableGlobalLeakChecking);
78
surmeh013537c2c2018-05-18 16:31:43 +010079BOOST_AUTO_TEST_CASE(RuntimeHeapMemoryUsageSanityChecks)
80{
81 BOOST_TEST(ARMNN_LEAK_CHECKER_IS_ACTIVE());
82 {
83 ARMNN_SCOPED_LEAK_CHECKER("Sanity_Check_Outer");
84 {
85 ARMNN_SCOPED_LEAK_CHECKER("Sanity_Check_Inner");
telsoa01c577f2c2018-08-31 09:22:23 +010086 BOOST_TEST(ARMNN_NO_LEAKS_IN_SCOPE() == true);
surmeh013537c2c2018-05-18 16:31:43 +010087 std::unique_ptr<char[]> dummyAllocation(new char[1000]);
telsoa01c577f2c2018-08-31 09:22:23 +010088 BOOST_CHECK_MESSAGE(ARMNN_NO_LEAKS_IN_SCOPE() == false,
89 "A leak of 1000 bytes is expected here. "
90 "Please make sure environment variable: HEAPCHECK=draconian is set!");
91 BOOST_TEST(ARMNN_BYTES_LEAKED_IN_SCOPE() == 1000);
92 BOOST_TEST(ARMNN_OBJECTS_LEAKED_IN_SCOPE() == 1);
surmeh013537c2c2018-05-18 16:31:43 +010093 }
94 BOOST_TEST(ARMNN_NO_LEAKS_IN_SCOPE());
95 BOOST_TEST(ARMNN_BYTES_LEAKED_IN_SCOPE() == 0);
96 BOOST_TEST(ARMNN_OBJECTS_LEAKED_IN_SCOPE() == 0);
97 }
98}
99
surmeh013537c2c2018-05-18 16:31:43 +0100100#endif // ARMNN_LEAK_CHECKING_ENABLED
101
102// Note: this part of the code is due to be removed when we fully trust the gperftools based results.
telsoa014fcda012018-03-09 14:13:49 +0000103#ifdef WITH_VALGRIND
telsoa01c577f2c2018-08-31 09:22:23 +0100104// Run with the following command to get all the amazing output (in the devenv/build folder) :)
telsoa014fcda012018-03-09 14:13:49 +0000105// valgrind --leak-check=full --show-leak-kinds=all --log-file=Valgrind_Memcheck_Leak_Report.txt armnn/test/UnitTests
106BOOST_AUTO_TEST_CASE(RuntimeMemoryLeak)
107{
108 // From documentation:
109
110 // This means that no pointer to the block can be found. The block is classified as "lost",
111 // because the programmer could not possibly have freed it at program exit, since no pointer to it exists.
112 unsigned long leakedBefore = 0;
113 unsigned long leakedAfter = 0;
114
115 // A start-pointer or chain of start-pointers to the block is found. Since the block is still pointed at,
116 // the programmer could, at least in principle, have freed it before program exit.
telsoa01c577f2c2018-08-31 09:22:23 +0100117 // We want to test this in case memory is not freed as early as it could have been.
telsoa014fcda012018-03-09 14:13:49 +0000118 unsigned long reachableBefore = 0;
119 unsigned long reachableAfter = 0;
120
telsoa01c577f2c2018-08-31 09:22:23 +0100121 // Needed as out params but we don't test them.
telsoa014fcda012018-03-09 14:13:49 +0000122 unsigned long dubious = 0;
123 unsigned long suppressed = 0;
124
125 armnn::NetworkId networkIdentifier1 = 1;
126
127 // ensure that runtime is large enough before checking for memory leaks
128 // otherwise when loading the network it will automatically reserve memory that won't be released until destruction
telsoa01c577f2c2018-08-31 09:22:23 +0100129 armnn::IRuntime::CreationOptions options;
130 armnn::Runtime runtime(options);
telsoa014fcda012018-03-09 14:13:49 +0000131 armnn::RuntimeLoadedNetworksReserve(&runtime);
132
telsoa014fcda012018-03-09 14:13:49 +0000133 {
Matteo Martincigh9326aab2018-11-15 10:54:54 +0000134 std::vector<armnn::BackendId> backends = {armnn::Compute::CpuRef};
telsoa014fcda012018-03-09 14:13:49 +0000135
136 std::unique_ptr<armnn::Network> mockNetwork1 = std::make_unique<armnn::Network>();
137 mockNetwork1->AddInputLayer(0, "test layer");
138
Matteo Martincigh9326aab2018-11-15 10:54:54 +0000139 // Warm-up load/unload pair to put the runtime in a stable state (memory-wise).
telsoa01c577f2c2018-08-31 09:22:23 +0100140 runtime.LoadNetwork(networkIdentifier1, Optimize(*mockNetwork1, backends, runtime.GetDeviceSpec()));
Matteo Martincigh9326aab2018-11-15 10:54:54 +0000141 runtime.UnloadNetwork(networkIdentifier1);
142
143 // Checks for leaks before we load the network and record them so that we can see the delta after unloading.
144 VALGRIND_DO_QUICK_LEAK_CHECK;
145 VALGRIND_COUNT_LEAKS(leakedBefore, dubious, reachableBefore, suppressed);
146
147 // The actual test.
148 runtime.LoadNetwork(networkIdentifier1, Optimize(*mockNetwork1, backends, runtime.GetDeviceSpec()));
149 runtime.UnloadNetwork(networkIdentifier1);
150
151 VALGRIND_DO_ADDED_LEAK_CHECK;
152 VALGRIND_COUNT_LEAKS(leakedAfter, dubious, reachableAfter, suppressed);
telsoa014fcda012018-03-09 14:13:49 +0000153 }
154
telsoa01c577f2c2018-08-31 09:22:23 +0100155 // If we're not running under Valgrind, these vars will have been initialised to 0, so this will always pass.
Aron Virginas-Tarc26ba752018-10-22 13:32:01 +0100156 BOOST_TEST(leakedBefore == leakedAfter);
157 BOOST_TEST(reachableBefore == reachableAfter);
telsoa014fcda012018-03-09 14:13:49 +0000158
telsoa01c577f2c2018-08-31 09:22:23 +0100159 // These are needed because VALGRIND_COUNT_LEAKS is a macro that assigns to the parameters
160 // so they are assigned to, but still considered unused, causing a warning.
telsoa014fcda012018-03-09 14:13:49 +0000161 boost::ignore_unused(dubious);
162 boost::ignore_unused(suppressed);
163}
Aron Virginas-Tarc26ba752018-10-22 13:32:01 +0100164#endif // WITH_VALGRIND
telsoa01c577f2c2018-08-31 09:22:23 +0100165
166BOOST_AUTO_TEST_CASE(RuntimeCpuRef)
167{
168 using namespace armnn;
169
170 // Create runtime in which test will run
171 armnn::IRuntime::CreationOptions options;
172 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
173
174 // build up the structure of the network
175 INetworkPtr net(INetwork::Create());
176
177 IConnectableLayer* input = net->AddInputLayer(0);
178
179 // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
180 NormalizationDescriptor descriptor;
181 IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
182
183 IConnectableLayer* output = net->AddOutputLayer(0);
184
185 input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
186 normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
187
188 input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
189 normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
190
191 // optimize the network
David Beckf0b48452018-10-19 15:20:56 +0100192 std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
telsoa01c577f2c2018-08-31 09:22:23 +0100193 IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
194
195 // Load it into the runtime. It should success.
196 armnn::NetworkId netId;
197 BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
198}
199
200BOOST_AUTO_TEST_CASE(RuntimeFallbackToCpuRef)
201{
202 using namespace armnn;
203
204 // Create runtime in which test will run
205 armnn::IRuntime::CreationOptions options;
206 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
207
208 // build up the structure of the network
209 INetworkPtr net(INetwork::Create());
210
211 IConnectableLayer* input = net->AddInputLayer(0);
212
213 // This layer configuration isn't supported by CpuAcc, should be fall back to CpuRef.
214 NormalizationDescriptor descriptor;
215 IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
216
217 IConnectableLayer* output = net->AddOutputLayer(0);
218
219 input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
220 normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
221
222 input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
223 normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
224
225 // Allow fallback to CpuRef.
David Beckf0b48452018-10-19 15:20:56 +0100226 std::vector<armnn::BackendId> backends = { armnn::Compute::CpuAcc, armnn::Compute::CpuRef };
telsoa01c577f2c2018-08-31 09:22:23 +0100227 // optimize the network
228 IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
229
230 // Load it into the runtime. It should succeed.
231 armnn::NetworkId netId;
232 BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
233}
234
jimfly016b0b53d2018-10-08 14:43:01 +0100235BOOST_AUTO_TEST_CASE(IVGCVSW_1929_QuantizedSoftmaxIssue)
236{
237 // Test for issue reported by Chris Nix in https://jira.arm.com/browse/IVGCVSW-1929
238 using namespace armnn;
239
240 // Create runtime in which test will run
241 armnn::IRuntime::CreationOptions options;
242 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
243
244 // build up the structure of the network
245 INetworkPtr net(INetwork::Create());
246 armnn::IConnectableLayer* input = net->AddInputLayer(
247 0,
248 "input"
249 );
250 armnn::IConnectableLayer* softmax = net->AddSoftmaxLayer(
251 armnn::SoftmaxDescriptor(),
252 "softmax"
253 );
254 armnn::IConnectableLayer* output = net->AddOutputLayer(
255 0,
256 "output"
257 );
258
259 input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
260 softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
261
262 input->GetOutputSlot(0).SetTensorInfo(armnn::TensorInfo(
263 armnn::TensorShape({ 1, 5 }),
264 armnn::DataType::QuantisedAsymm8,
265 1.0f/255,
266 0
267 ));
268
269 softmax->GetOutputSlot(0).SetTensorInfo(armnn::TensorInfo(
270 armnn::TensorShape({ 1, 5 }),
271 armnn::DataType::QuantisedAsymm8
272 ));
273
David Beckf0b48452018-10-19 15:20:56 +0100274 std::vector<armnn::BackendId> backends = {armnn::Compute::CpuRef};
jimfly016b0b53d2018-10-08 14:43:01 +0100275 std::vector<std::string> errMessages;
276 armnn::IOptimizedNetworkPtr optNet = Optimize(
277 *net,
278 backends,
279 runtime->GetDeviceSpec(),
280 OptimizerOptions(),
281 errMessages
282 );
David Beckf0b48452018-10-19 15:20:56 +0100283
jimfly016b0b53d2018-10-08 14:43:01 +0100284 BOOST_TEST(errMessages.size() == 1);
285 BOOST_TEST(errMessages[0] ==
286 "ERROR: output 0 of layer Softmax (softmax) is of type "
287 "Quantized 8 bit but its scale parameter has not been set");
288 BOOST_TEST(!optNet);
289}
290
Narumol Prangnawaratdf31cfe2019-11-22 11:26:06 +0000291BOOST_AUTO_TEST_CASE(ProfilingDisable)
292{
293 using namespace armnn;
294
295 // Create runtime in which the test will run
296 armnn::IRuntime::CreationOptions options;
297 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
298
299 // build up the structure of the network
300 INetworkPtr net(INetwork::Create());
301
302 IConnectableLayer* input = net->AddInputLayer(0);
303
304 // This layer configuration isn't supported by CpuAcc, should fall back to CpuRef.
305 NormalizationDescriptor descriptor;
306 IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor);
307
308 IConnectableLayer* output = net->AddOutputLayer(0);
309
310 input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
311 normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
312
313 input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
314 normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
315
316 // optimize the network
317 std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
318 IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
319
320 // Load it into the runtime. It should succeed.
321 armnn::NetworkId netId;
322 BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
323
324 profiling::ProfilingServiceRuntimeHelper profilingServiceHelper;
325 profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
326 auto readableBuffer = bufferManager.GetReadableBuffer();
327
328 // Profiling is not enabled, the post-optimisation structure should not be created
329 BOOST_TEST(!readableBuffer);
330}
331
332BOOST_AUTO_TEST_CASE(ProfilingEnableCpuRef)
333{
334 using namespace armnn;
335 using namespace armnn::profiling;
336
337 // Create runtime in which the test will run
338 armnn::IRuntime::CreationOptions options;
339 options.m_ProfilingOptions.m_EnableProfiling = true;
340 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
341
342 // build up the structure of the network
343 INetworkPtr net(INetwork::Create());
344
345 IConnectableLayer* input = net->AddInputLayer(0, "input");
346
347 NormalizationDescriptor descriptor;
348 IConnectableLayer* normalize = net->AddNormalizationLayer(descriptor, "normalization");
349
350 IConnectableLayer* output = net->AddOutputLayer(0, "output");
351
352 input->GetOutputSlot(0).Connect(normalize->GetInputSlot(0));
353 normalize->GetOutputSlot(0).Connect(output->GetInputSlot(0));
354
355 input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
356 normalize->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 4 }, DataType::Float32));
357
358 // optimize the network
359 std::vector<armnn::BackendId> backends = { armnn::Compute::CpuRef };
360 IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
361
362 ProfilingGuid optNetGuid = optNet->GetGuid();
363
364 // Load it into the runtime. It should succeed.
365 armnn::NetworkId netId;
366 BOOST_TEST(runtime->LoadNetwork(netId, std::move(optNet)) == Status::Success);
367
368 profiling::ProfilingServiceRuntimeHelper profilingServiceHelper;
369 profiling::BufferManager& bufferManager = profilingServiceHelper.GetProfilingBufferManager();
370 auto readableBuffer = bufferManager.GetReadableBuffer();
371
372 // Profiling is enabled, the post-optimisation structure should be created
373 BOOST_CHECK(readableBuffer != nullptr);
374
375 unsigned int size = readableBuffer->GetSize();
376 BOOST_CHECK(size == 1356);
377
378 const unsigned char* readableData = readableBuffer->GetReadableData();
379 BOOST_CHECK(readableData != nullptr);
380
381 unsigned int offset = 0;
382
383 // Post-optimisation network
384 // Network entity
385 VerifyTimelineEntityBinaryPacket(optNetGuid, readableData, offset);
386
387 // Entity - Type relationship
388 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
389 EmptyOptional(),
390 optNetGuid,
391 LabelsAndEventClasses::NETWORK_GUID,
392 readableData,
393 offset);
394
395 // Type label relationship
396 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
397 EmptyOptional(),
398 EmptyOptional(),
399 LabelsAndEventClasses::TYPE_GUID,
400 readableData,
401 offset);
402
403 // Input layer
404 // Input layer entity
405 VerifyTimelineEntityBinaryPacket(input->GetGuid(), readableData, offset);
406
407 // Name Entity
408 VerifyTimelineLabelBinaryPacket(EmptyOptional(), "input", readableData, offset);
409
410 // Entity - Name relationship
411 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
412 EmptyOptional(),
413 input->GetGuid(),
414 EmptyOptional(),
415 readableData,
416 offset);
417
418 // Name label relationship
419 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
420 EmptyOptional(),
421 EmptyOptional(),
422 LabelsAndEventClasses::NAME_GUID,
423 readableData,
424 offset);
425
426 // Entity - Type relationship
427 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
428 EmptyOptional(),
429 input->GetGuid(),
430 EmptyOptional(),
431 readableData,
432 offset);
433
434 // Type label relationship
435 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
436 EmptyOptional(),
437 EmptyOptional(),
438 LabelsAndEventClasses::TYPE_GUID,
439 readableData,
440 offset);
441
442 // Network - Input layer relationship
443 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::RetentionLink,
444 EmptyOptional(),
445 optNetGuid,
446 input->GetGuid(),
447 readableData,
448 offset);
449
450 // Normalization layer
451 // Normalization layer entity
452 VerifyTimelineEntityBinaryPacket(normalize->GetGuid(), readableData, offset);
453
454 // Name entity
455 VerifyTimelineLabelBinaryPacket(EmptyOptional(), "normalization", readableData, offset);
456
457 // Entity - Name relationship
458 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
459 EmptyOptional(),
460 normalize->GetGuid(),
461 EmptyOptional(),
462 readableData,
463 offset);
464
465 // Name label relationship
466 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
467 EmptyOptional(),
468 EmptyOptional(),
469 LabelsAndEventClasses::NAME_GUID,
470 readableData,
471 offset);
472
473 // Entity - Type relationship
474 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
475 EmptyOptional(),
476 normalize->GetGuid(),
477 EmptyOptional(),
478 readableData,
479 offset);
480
481 // Type label relationship
482 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
483 EmptyOptional(),
484 EmptyOptional(),
485 LabelsAndEventClasses::TYPE_GUID,
486 readableData,
487 offset);
488
489 // Network - Normalize layer relationship
490 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::RetentionLink,
491 EmptyOptional(),
492 optNetGuid,
493 normalize->GetGuid(),
494 readableData,
495 offset);
496
497 // Input layer - Normalize layer relationship
498 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::RetentionLink,
499 EmptyOptional(),
500 input->GetGuid(),
501 normalize->GetGuid(),
502 readableData,
503 offset);
504
505 // Entity - Type relationship
506 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
507 EmptyOptional(),
508 EmptyOptional(),
509 LabelsAndEventClasses::CONNECTION_GUID,
510 readableData,
511 offset);
512
513 // Type label relationship
514 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
515 EmptyOptional(),
516 EmptyOptional(),
517 LabelsAndEventClasses::TYPE_GUID,
518 readableData,
519 offset);
520
521 // Normalization workload
522 // Normalization workload entity
523 VerifyTimelineEntityBinaryPacket(EmptyOptional(), readableData, offset);
524
525 // Entity - Type relationship
526 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
527 EmptyOptional(),
528 EmptyOptional(),
529 EmptyOptional(),
530 readableData,
531 offset);
532
533 // Type label relationship
534 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
535 EmptyOptional(),
536 EmptyOptional(),
537 LabelsAndEventClasses::TYPE_GUID,
538 readableData,
539 offset);
540
541 // BackendId entity
542 VerifyTimelineLabelBinaryPacket(EmptyOptional(), "CpuRef", readableData, offset);
543
544 // Entity - BackendId relationship
545 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
546 EmptyOptional(),
547 EmptyOptional(),
548 EmptyOptional(),
549 readableData,
550 offset);
551
552 // BackendId label relationship
553 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
554 EmptyOptional(),
555 EmptyOptional(),
556 LabelsAndEventClasses::BACKENDID_GUID,
557 readableData,
558 offset);
559
560 // Normalize layer - Normalize workload relationship
561 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::RetentionLink,
562 EmptyOptional(),
563 normalize->GetGuid(),
564 EmptyOptional(),
565 readableData,
566 offset);
567
568 // Output layer
569 // Output layer entity
570 VerifyTimelineEntityBinaryPacket(output->GetGuid(), readableData, offset);
571
572 // Name entity
573 VerifyTimelineLabelBinaryPacket(EmptyOptional(), "output", readableData, offset);
574
575 // Entity - Name relationship
576 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
577 EmptyOptional(),
578 output->GetGuid(),
579 EmptyOptional(),
580 readableData,
581 offset);
582
583 // Name label relationship
584 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
585 EmptyOptional(),
586 EmptyOptional(),
587 LabelsAndEventClasses::NAME_GUID,
588 readableData,
589 offset);
590
591 // Entity - Type relationship
592 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
593 EmptyOptional(),
594 output->GetGuid(),
595 EmptyOptional(),
596 readableData,
597 offset);
598
599 // Type label relationship
600 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
601 EmptyOptional(),
602 EmptyOptional(),
603 LabelsAndEventClasses::TYPE_GUID,
604 readableData,
605 offset);
606
607 // Network - Output layer relationship
608 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::RetentionLink,
609 EmptyOptional(),
610 optNetGuid,
611 output->GetGuid(),
612 readableData,
613 offset);
614
615 // Normalize layer - Output layer relationship
616 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::RetentionLink,
617 EmptyOptional(),
618 normalize->GetGuid(),
619 output->GetGuid(),
620 readableData,
621 offset);
622
623 // Entity - Type relationship
624 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
625 EmptyOptional(),
626 EmptyOptional(),
627 LabelsAndEventClasses::CONNECTION_GUID,
628 readableData,
629 offset);
630
631 // Type label relationship
632 VerifyTimelineRelationshipBinaryPacket(ProfilingRelationshipType::LabelLink,
633 EmptyOptional(),
634 EmptyOptional(),
635 LabelsAndEventClasses::TYPE_GUID,
636 readableData,
637 offset);
638
639 bufferManager.MarkRead(readableBuffer);
640}
641
642BOOST_AUTO_TEST_CASE(ProfilingPostOptimisationStructureCpuRef)
643{
644 VerifyPostOptimisationStructureTestImpl(armnn::Compute::CpuRef);
645}
646
telsoa014fcda012018-03-09 14:13:49 +0000647BOOST_AUTO_TEST_SUITE_END()