blob: 1135e115e16eece6dfe3fd4a071e6229bfde677d [file] [log] [blame]
Matthew Sloyan80fbcd52021-01-07 13:28:47 +00001//
2// Copyright © 2020 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include <Filesystem.hpp>
7
8#include <cl/test/ClContextControlFixture.hpp>
9
Sadik Armagan1625efc2021-06-10 18:24:34 +010010#include <doctest/doctest.h>
Matthew Sloyan80fbcd52021-01-07 13:28:47 +000011
12#include <fstream>
13
14namespace
15{
16
17armnn::INetworkPtr CreateNetwork()
18{
19 // Builds up the structure of the network.
20 armnn::INetworkPtr net(armnn::INetwork::Create());
21
22 armnn::IConnectableLayer* input = net->AddInputLayer(0, "input");
23 armnn::IConnectableLayer* softmax = net->AddSoftmaxLayer(armnn::SoftmaxDescriptor(), "softmax");
24 armnn::IConnectableLayer* output = net->AddOutputLayer(0, "output");
25
26 input->GetOutputSlot(0).Connect(softmax->GetInputSlot(0));
27 softmax->GetOutputSlot(0).Connect(output->GetInputSlot(0));
28
29 // Sets the input and output tensors
30 armnn::TensorInfo inputTensorInfo(armnn::TensorShape({1, 5}), armnn::DataType::QAsymmU8, 10000.0f, 1);
31 input->GetOutputSlot(0).SetTensorInfo(inputTensorInfo);
32
33 armnn::TensorInfo outputTensorInfo(armnn::TensorShape({1, 5}), armnn::DataType::QAsymmU8, 1.0f/255.0f, 0);
34 softmax->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
35
36 return net;
37}
38
39void RunInference(armnn::NetworkId& netId, armnn::IRuntimePtr& runtime, std::vector<uint8_t>& outputData)
40{
41 // Creates structures for input & output.
42 std::vector<uint8_t> inputData
43 {
44 1, 10, 3, 200, 5 // Some inputs - one of which is sufficiently larger than the others to saturate softmax.
45 };
46
47 armnn::InputTensors inputTensors
48 {
49 {0, armnn::ConstTensor(runtime->GetInputTensorInfo(netId, 0), inputData.data())}
50 };
51
52 armnn::OutputTensors outputTensors
53 {
54 {0, armnn::Tensor(runtime->GetOutputTensorInfo(netId, 0), outputData.data())}
55 };
56
57 // Run inference.
58 runtime->EnqueueWorkload(netId, inputTensors, outputTensors);
59}
60
61std::vector<char> ReadBinaryFile(const std::string& binaryFileName)
62{
63 std::ifstream input(binaryFileName, std::ios::binary);
64 return std::vector<char>(std::istreambuf_iterator<char>(input), {});
65}
66
67} // anonymous namespace
68
Sadik Armagan1625efc2021-06-10 18:24:34 +010069TEST_CASE_FIXTURE(ClContextControlFixture, "ClContextSerializerTest")
Matthew Sloyan80fbcd52021-01-07 13:28:47 +000070{
71 // Get tmp directory and create blank file.
72 fs::path filePath = armnnUtils::Filesystem::NamedTempFile("Armnn-CachedNetworkFileTest-TempFile.bin");
73 std::string const filePathString{filePath.string()};
74 std::ofstream file { filePathString };
75
76 // Create runtime in which test will run
77 armnn::IRuntime::CreationOptions options;
78 armnn::IRuntimePtr runtime(armnn::IRuntime::Create(options));
79
80 std::vector<armnn::BackendId> backends = {armnn::Compute::GpuAcc};
81
82 // Create two networks.
83 // net1 will serialize and save context to file.
84 // net2 will deserialize context saved from net1 and load.
85 armnn::INetworkPtr net1 = CreateNetwork();
86 armnn::INetworkPtr net2 = CreateNetwork();
87
88 // Add specific optimizerOptions to each network.
89 armnn::OptimizerOptions optimizerOptions1;
90 armnn::OptimizerOptions optimizerOptions2;
91 armnn::BackendOptions modelOptions1("GpuAcc",
92 {{"SaveCachedNetwork", true}, {"CachedNetworkFilePath", filePathString}});
93 armnn::BackendOptions modelOptions2("GpuAcc",
94 {{"SaveCachedNetwork", false}, {"CachedNetworkFilePath", filePathString}});
95 optimizerOptions1.m_ModelOptions.push_back(modelOptions1);
96 optimizerOptions2.m_ModelOptions.push_back(modelOptions2);
97
98 armnn::IOptimizedNetworkPtr optNet1 = armnn::Optimize(
99 *net1, backends, runtime->GetDeviceSpec(), optimizerOptions1);
100 armnn::IOptimizedNetworkPtr optNet2 = armnn::Optimize(
101 *net2, backends, runtime->GetDeviceSpec(), optimizerOptions2);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100102 CHECK(optNet1);
103 CHECK(optNet2);
Matthew Sloyan80fbcd52021-01-07 13:28:47 +0000104
105 // Cached file should be empty until net1 is loaded into runtime.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100106 CHECK(fs::is_empty(filePathString));
Matthew Sloyan80fbcd52021-01-07 13:28:47 +0000107
108 // Load net1 into the runtime.
109 armnn::NetworkId netId1;
Sadik Armagan1625efc2021-06-10 18:24:34 +0100110 CHECK(runtime->LoadNetwork(netId1, std::move(optNet1)) == armnn::Status::Success);
Matthew Sloyan80fbcd52021-01-07 13:28:47 +0000111
112 // File should now exist and not be empty. It has been serialized.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100113 CHECK(fs::exists(filePathString));
Matthew Sloyan80fbcd52021-01-07 13:28:47 +0000114 std::vector<char> dataSerialized = ReadBinaryFile(filePathString);
Sadik Armagan1625efc2021-06-10 18:24:34 +0100115 CHECK(dataSerialized.size() != 0);
Matthew Sloyan80fbcd52021-01-07 13:28:47 +0000116
117 // Load net2 into the runtime using file and deserialize.
118 armnn::NetworkId netId2;
Sadik Armagan1625efc2021-06-10 18:24:34 +0100119 CHECK(runtime->LoadNetwork(netId2, std::move(optNet2)) == armnn::Status::Success);
Matthew Sloyan80fbcd52021-01-07 13:28:47 +0000120
121 // Run inference and get output data.
122 std::vector<uint8_t> outputData1(5);
123 RunInference(netId1, runtime, outputData1);
124
125 std::vector<uint8_t> outputData2(5);
126 RunInference(netId2, runtime, outputData2);
127
128 // Compare outputs from both networks.
Sadik Armagan1625efc2021-06-10 18:24:34 +0100129 CHECK(std::equal(outputData1.begin(), outputData1.end(), outputData2.begin(), outputData2.end()));
Matthew Sloyan80fbcd52021-01-07 13:28:47 +0000130
131 // Remove temp file created.
132 fs::remove(filePath);
133}