blob: 156258a54999cce433c5ecb30fc9636190accdc9 [file] [log] [blame]
Sadik Armagana097d2a2021-11-24 15:47:28 +00001//
2// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5#pragma once
6
7#include <armnn/Tensor.hpp>
8
9#include <armnn/backends/IBackendInternal.hpp>
10#include <armnn/backends/IMemoryManager.hpp>
Colm Donelan0c479742021-12-10 12:43:54 +000011#include <armnn/backends/Workload.hpp>
12#include <armnn/backends/WorkloadInfo.hpp>
Sadik Armagana097d2a2021-11-24 15:47:28 +000013
14namespace armnn
15{
16class ITensorHandle;
17} // namespace armnn
18
19namespace
20{
21
22template <typename QueueDescriptor>
23void AddInputToWorkload(QueueDescriptor& descriptor,
24 armnn::WorkloadInfo& info,
25 const armnn::TensorInfo& tensorInfo,
26 armnn::ITensorHandle* tensorHandle)
27{
28 descriptor.m_Inputs.push_back(tensorHandle);
29 info.m_InputTensorInfos.push_back(tensorInfo);
30}
31
32template <typename QueueDescriptor>
33void AddOutputToWorkload(QueueDescriptor& descriptor,
34 armnn::WorkloadInfo& info,
35 const armnn::TensorInfo& tensorInfo,
36 armnn::ITensorHandle* tensorHandle)
37{
38 descriptor.m_Outputs.push_back(tensorHandle);
39 info.m_OutputTensorInfos.push_back(tensorInfo);
40}
41
42template <typename QueueDescriptor>
43void SetWorkloadInput(QueueDescriptor& descriptor,
44 armnn::WorkloadInfo& info,
45 unsigned int index,
46 const armnn::TensorInfo& tensorInfo,
47 armnn::ITensorHandle* tensorHandle)
48{
49 descriptor.m_Inputs[index] = tensorHandle;
50 info.m_InputTensorInfos[index] = tensorInfo;
51}
52
53template <typename QueueDescriptor>
54void SetWorkloadOutput(QueueDescriptor& descriptor,
55 armnn::WorkloadInfo& info,
56 unsigned int index,
57 const armnn::TensorInfo& tensorInfo,
58 armnn::ITensorHandle* tensorHandle)
59{
60 descriptor.m_Outputs[index] = tensorHandle;
61 info.m_OutputTensorInfos[index] = tensorInfo;
62}
63
64inline void ExecuteWorkload(armnn::IWorkload& workload,
65 const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
66 bool memoryManagementRequested = true)
67{
68 const bool manageMemory = memoryManager && memoryManagementRequested;
69
70 // Acquire working memory (if needed)
71 if (manageMemory)
72 {
73 memoryManager->Acquire();
74 }
75
76 // Perform PostAllocationConfiguration
77 workload.PostAllocationConfigure();
78
79 // Execute the workload
80 workload.Execute();
81
82 // Release working memory (if needed)
83 if (manageMemory)
84 {
85 memoryManager->Release();
86 }
87}
88
89inline armnn::Optional<armnn::DataType> GetBiasTypeFromWeightsType(armnn::Optional<armnn::DataType> weightsType)
90{
91 if (!weightsType)
92 {
93 return weightsType;
94 }
95
96 switch(weightsType.value())
97 {
98 case armnn::DataType::BFloat16:
99 case armnn::DataType::Float16:
100 case armnn::DataType::Float32:
101 return weightsType;
102 case armnn::DataType::QAsymmS8:
103 case armnn::DataType::QAsymmU8:
104 case armnn::DataType::QSymmS8:
105 case armnn::DataType::QSymmS16:
106 return armnn::DataType::Signed32;
107 default:
108 ARMNN_ASSERT_MSG(false, "GetBiasTypeFromWeightsType(): Unsupported data type.");
109 }
110 return armnn::EmptyOptional();
111}
112
113} // anonymous namespace