blob: 3e63afac6fb6d76509ab3477d2b66c425de8b461 [file] [log] [blame]
Laurent Carlier749294b2020-06-01 09:03:17 +01001//
Francis Murtagh43aec582019-05-27 12:14:10 +01002// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "RefFullyConnectedWorkload.hpp"
7
8#include "FullyConnected.hpp"
9#include "RefWorkloadUtils.hpp"
10
11#include "Profiling.hpp"
12
13namespace armnn
14{
15RefFullyConnectedWorkload::RefFullyConnectedWorkload(
16 const FullyConnectedQueueDescriptor& descriptor, const WorkloadInfo& info)
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000017 : BaseWorkload<FullyConnectedQueueDescriptor>(descriptor, info)
Francis Murtagh43aec582019-05-27 12:14:10 +010018{
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000019 if (descriptor.m_Parameters.m_ConstantWeights)
Francis Murtagh43aec582019-05-27 12:14:10 +010020 {
James Conroy1f58f032021-04-27 17:13:27 +010021 m_Weight = std::make_unique<ScopedTensorHandle>(*(descriptor.m_Weight));
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000022 const TensorInfo& rWeightInfo = m_Weight->GetTensorInfo();
23 m_WeightShape = rWeightInfo.GetShape();
24 m_WeightDecoder = MakeDecoder<float>(rWeightInfo, m_Weight->Map(true));
25
26 if (descriptor.m_Parameters.m_BiasEnabled)
27 {
James Conroy1f58f032021-04-27 17:13:27 +010028 m_Bias = std::make_unique<ScopedTensorHandle>(*(descriptor.m_Bias));
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000029 const TensorInfo& biasInfo = m_Bias->GetTensorInfo();
30 m_BiasDecoder = MakeDecoder<float>(biasInfo, m_Bias->Map(true));
31 }
Francis Murtagh43aec582019-05-27 12:14:10 +010032 }
33}
34
35void RefFullyConnectedWorkload::PostAllocationConfigure()
36{
Finn Williamsb8181f72021-04-07 10:23:21 +010037 PostAllocationConfigure(m_Data.m_Inputs, m_Data.m_Outputs);
38}
39
40void RefFullyConnectedWorkload::PostAllocationConfigure(std::vector<ITensorHandle*> inputs,
41 std::vector<ITensorHandle*> outputs)
42{
43 const TensorInfo& inputInfo = GetTensorInfo(inputs[0]);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010044 ARMNN_ASSERT(inputInfo.GetNumDimensions() > 1);
Francis Murtagh43aec582019-05-27 12:14:10 +010045 m_InputShape = inputInfo.GetShape();
Francis Murtagh43aec582019-05-27 12:14:10 +010046
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000047 if (!m_Data.m_Parameters.m_ConstantWeights)
48 {
Finn Williamsb8181f72021-04-07 10:23:21 +010049 const TensorInfo& rWeightInfo = GetTensorInfo(inputs[1]);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000050 ARMNN_ASSERT(inputInfo.GetNumDimensions() > 1);
51 m_WeightShape = rWeightInfo.GetShape();
52 m_WeightDecoder = MakeDecoder<float>(rWeightInfo);
53
54 if (m_Data.m_Parameters.m_BiasEnabled)
55 {
Finn Williamsb8181f72021-04-07 10:23:21 +010056 const TensorInfo& biasInfo = GetTensorInfo(inputs[2]);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000057 m_BiasDecoder = MakeDecoder<float>(biasInfo);
58 }
59 }
60
Finn Williamsb8181f72021-04-07 10:23:21 +010061 const TensorInfo& outputInfo = GetTensorInfo(outputs[0]);
Francis Murtagh43aec582019-05-27 12:14:10 +010062 m_OutputShape = outputInfo.GetShape();
Francis Murtagh43aec582019-05-27 12:14:10 +010063
64 m_NumActivations = 1; // Total number of activations in the input.
65 for (unsigned int i = 1; i < inputInfo.GetNumDimensions(); i++)
66 {
67 m_NumActivations *= inputInfo.GetShape()[i];
68 }
69}
70
71void RefFullyConnectedWorkload::Execute() const
72{
Finn Williamsb8181f72021-04-07 10:23:21 +010073 Execute(m_Data.m_Inputs, m_Data.m_Outputs);
74}
75
76void RefFullyConnectedWorkload::ExecuteAsync(WorkingMemDescriptor &workingMemDescriptor)
77{
78 PostAllocationConfigure(workingMemDescriptor.m_Inputs, workingMemDescriptor.m_Outputs);
79
80 Execute(workingMemDescriptor.m_Inputs, workingMemDescriptor.m_Outputs);
81}
82
83void RefFullyConnectedWorkload::Execute(std::vector<ITensorHandle*> inputs, std::vector<ITensorHandle*> outputs) const
84{
Francis Murtagh43aec582019-05-27 12:14:10 +010085 ARMNN_SCOPED_PROFILING_EVENT(Compute::CpuRef, "RefFullyConnectedWorkload_Execute");
86
Finn Williamsb8181f72021-04-07 10:23:21 +010087 std::unique_ptr<Decoder<float>> inputDecoder = MakeDecoder<float>(GetTensorInfo(inputs[0]), inputs[0]->Map());
88 std::unique_ptr<Encoder<float>> OutputEncoder = MakeEncoder<float>(GetTensorInfo(outputs[0]), outputs[0]->Map());
89
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000090 if (!m_Data.m_Parameters.m_ConstantWeights)
91 {
Finn Williamsb8181f72021-04-07 10:23:21 +010092 m_WeightDecoder->Reset(inputs[1]->Map());
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000093 if (m_Data.m_Parameters.m_BiasEnabled)
94 {
Finn Williamsb8181f72021-04-07 10:23:21 +010095 m_BiasDecoder->Reset(inputs[2]->Map());
Sadik Armaganf0a6dec2021-03-25 07:46:55 +000096 }
97 }
Matthew Benthamc394a6d2019-06-24 12:51:25 +010098
Francis Murtagh43aec582019-05-27 12:14:10 +010099 FullyConnected(m_InputShape,
Finn Williamsb8181f72021-04-07 10:23:21 +0100100 *inputDecoder,
Francis Murtagh43aec582019-05-27 12:14:10 +0100101 m_OutputShape,
Finn Williamsb8181f72021-04-07 10:23:21 +0100102 *OutputEncoder,
Finn Williamsb9dcfe62020-09-17 15:58:31 +0100103 m_WeightShape,
Francis Murtagh43aec582019-05-27 12:14:10 +0100104 *m_WeightDecoder,
105 *m_BiasDecoder,
106 m_Data.m_Parameters.m_BiasEnabled,
107 m_NumActivations,
108 m_Data.m_Parameters.m_TransposeWeightMatrix);
109}
110
111} //namespace armnn