IVGCVSW-5816 Constant memory access

 * Add new class ManagedConstTensorHandle to Unmap when out of scope
 * Integrate into existing layers that have constants
 * Add unit tests

Signed-off-by: Francis Murtagh <francis.murtagh@arm.com>
Change-Id: I0a05e14e438804b37e9862e76b5ca329483f6b45
diff --git a/src/armnn/layers/BatchNormalizationLayer.cpp b/src/armnn/layers/BatchNormalizationLayer.cpp
index 680d9e5..83ed45a 100644
--- a/src/armnn/layers/BatchNormalizationLayer.cpp
+++ b/src/armnn/layers/BatchNormalizationLayer.cpp
@@ -72,20 +72,31 @@
 
 void BatchNormalizationLayer::Accept(ILayerVisitor& visitor) const
 {
-    ConstTensor meanTensor(m_Mean->GetTensorInfo(), m_Mean->Map(true));
-    ConstTensor varianceTensor(m_Variance->GetTensorInfo(), m_Variance->Map(true));
-    ConstTensor betaTensor(m_Beta->GetTensorInfo(), m_Beta->Map(true));
-    ConstTensor gammaTensor(m_Gamma->GetTensorInfo(), m_Gamma->Map(true));
+    ManagedConstTensorHandle managedMean(m_Mean);
+    ManagedConstTensorHandle managedVariance(m_Variance);
+    ManagedConstTensorHandle managedBeta(m_Beta);
+    ManagedConstTensorHandle managedGamma(m_Gamma);
+
+    ConstTensor meanTensor(managedMean.GetTensorInfo(), managedMean.Map());
+    ConstTensor varianceTensor(managedVariance.GetTensorInfo(), managedVariance.Map());
+    ConstTensor betaTensor(managedBeta.GetTensorInfo(), managedBeta.Map());
+    ConstTensor gammaTensor(managedGamma.GetTensorInfo(), managedGamma.Map());
+
     visitor.VisitBatchNormalizationLayer(
             this, GetParameters(), meanTensor, varianceTensor, betaTensor, gammaTensor, GetName());
 }
 
 void BatchNormalizationLayer::ExecuteStrategy(IStrategy& strategy) const
 {
-    std::vector<armnn::ConstTensor> constTensors { {m_Mean->GetTensorInfo(), m_Mean->Map(true)},
-                                                   {m_Variance->GetTensorInfo(), m_Variance->Map(true)},
-                                                   {m_Beta->GetTensorInfo(), m_Beta->Map(true)},
-                                                   {m_Gamma->GetTensorInfo(), m_Gamma->Map(true)} };
+    ManagedConstTensorHandle managedMean(m_Mean);
+    ManagedConstTensorHandle managedVariance(m_Variance);
+    ManagedConstTensorHandle managedBeta(m_Beta);
+    ManagedConstTensorHandle managedGamma(m_Gamma);
+
+    std::vector<armnn::ConstTensor> constTensors { { managedMean.GetTensorInfo(), managedMean.Map() },
+                                                   { managedVariance.GetTensorInfo(), managedVariance.Map() },
+                                                   { managedBeta.GetTensorInfo(), managedBeta.Map() },
+                                                   { managedGamma.GetTensorInfo(), managedGamma.Map() } };
 
     strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
 }
diff --git a/src/armnn/layers/ConstantLayer.cpp b/src/armnn/layers/ConstantLayer.cpp
index 8ae34b6..eb28187 100644
--- a/src/armnn/layers/ConstantLayer.cpp
+++ b/src/armnn/layers/ConstantLayer.cpp
@@ -64,14 +64,16 @@
 
 void ConstantLayer::Accept(ILayerVisitor& visitor) const
 {
-    ConstTensor layerOutputTensor(m_LayerOutput->GetTensorInfo(), m_LayerOutput->Map(true)) ;
+    ManagedConstTensorHandle managedLayerOutput(m_LayerOutput);
+    ConstTensor layerOutputTensor(managedLayerOutput.GetTensorInfo(), managedLayerOutput.Map());
     visitor.VisitConstantLayer(this, layerOutputTensor, GetName());
 }
 
 void ConstantLayer::ExecuteStrategy(IStrategy& strategy) const
 {
-    std::vector<armnn::ConstTensor> constTensors { {m_LayerOutput->GetTensorInfo(), m_LayerOutput->Map(true)} };
-    strategy.ExecuteStrategy(this, BaseDescriptor(), constTensors, GetName());
+    ManagedConstTensorHandle managedLayerOutput(m_LayerOutput);
+    ConstTensor layerOutputTensor(managedLayerOutput.GetTensorInfo(), managedLayerOutput.Map());
+    strategy.ExecuteStrategy(this, BaseDescriptor(), { layerOutputTensor }, GetName());
 }
 
 } // namespace armnn
diff --git a/src/armnn/layers/Convolution2dLayer.cpp b/src/armnn/layers/Convolution2dLayer.cpp
index cf7cf0f..d7a7a33 100644
--- a/src/armnn/layers/Convolution2dLayer.cpp
+++ b/src/armnn/layers/Convolution2dLayer.cpp
@@ -145,12 +145,14 @@
 
 void Convolution2dLayer::Accept(ILayerVisitor& visitor) const
 {
-    ConstTensor weightsTensor(m_Weight->GetTensorInfo(), m_Weight->Map(true)) ;
-    Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
+    ManagedConstTensorHandle managedWeight(m_Weight);
+    ConstTensor weightsTensor(managedWeight.GetTensorInfo(), managedWeight.Map());
 
+    Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
+    ManagedConstTensorHandle managedBias(m_Bias);
     if (GetParameters().m_BiasEnabled)
     {
-        ConstTensor biasTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true));
+        ConstTensor biasTensor(managedBias.GetTensorInfo(), managedBias.Map());
         optionalBiasTensor = Optional<ConstTensor>(biasTensor);
     }
 
@@ -159,11 +161,13 @@
 
 void Convolution2dLayer::ExecuteStrategy(IStrategy& strategy) const
 {
-    std::vector<armnn::ConstTensor> constTensors { {m_Weight->GetTensorInfo(), m_Weight->Map(true)} };
+    ManagedConstTensorHandle managedWeight(m_Weight);
+    std::vector<armnn::ConstTensor> constTensors { { managedWeight.GetTensorInfo(), managedWeight.Map() } };
 
+    ManagedConstTensorHandle managedBias(m_Bias);
     if (GetParameters().m_BiasEnabled)
     {
-        constTensors.emplace_back(ConstTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedBias.GetTensorInfo(), managedBias.Map()));
     }
 
     strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
diff --git a/src/armnn/layers/DepthwiseConvolution2dLayer.cpp b/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
index 0b2114a..3511ab5 100644
--- a/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
+++ b/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
@@ -153,12 +153,14 @@
 
 void DepthwiseConvolution2dLayer::Accept(ILayerVisitor& visitor) const
 {
-    ConstTensor weightsTensor(m_Weight->GetTensorInfo(), m_Weight->Map(true));
+    ManagedConstTensorHandle managedWeight(m_Weight);
+    ConstTensor weightsTensor(managedWeight.GetTensorInfo(), managedWeight.Map());
     Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
 
+    ManagedConstTensorHandle managedBias(m_Bias);
     if (GetParameters().m_BiasEnabled)
     {
-        ConstTensor biasTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true));
+        ConstTensor biasTensor(managedBias.GetTensorInfo(), managedBias.Map());
         optionalBiasTensor = Optional<ConstTensor>(biasTensor);
     }
 
@@ -167,11 +169,13 @@
 
 void DepthwiseConvolution2dLayer::ExecuteStrategy(IStrategy& strategy) const
 {
-    std::vector<armnn::ConstTensor> constTensors { {m_Weight->GetTensorInfo(), m_Weight->Map(true)} };
+    ManagedConstTensorHandle managedWeight(m_Weight);
+    std::vector<armnn::ConstTensor> constTensors { { managedWeight.GetTensorInfo(), managedWeight.Map() } };
 
+    ManagedConstTensorHandle managedBias(m_Bias);
     if (GetParameters().m_BiasEnabled)
     {
-        constTensors.emplace_back(ConstTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedBias.GetTensorInfo(), managedBias.Map(true)));
     }
 
     strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
diff --git a/src/armnn/layers/DetectionPostProcessLayer.cpp b/src/armnn/layers/DetectionPostProcessLayer.cpp
index e5bbeca..b508617 100644
--- a/src/armnn/layers/DetectionPostProcessLayer.cpp
+++ b/src/armnn/layers/DetectionPostProcessLayer.cpp
@@ -80,14 +80,16 @@
 
 void DetectionPostProcessLayer::Accept(ILayerVisitor& visitor) const
 {
-    ConstTensor anchorTensor(m_Anchors->GetTensorInfo(), m_Anchors->GetConstTensor<void>());
+    ManagedConstTensorHandle managedAnchors(m_Anchors);
+    ConstTensor anchorTensor(managedAnchors.GetTensorInfo(), managedAnchors.Map());
     visitor.VisitDetectionPostProcessLayer(this, GetParameters(), anchorTensor, GetName());
+    m_Anchors->Unmap();
 }
 
 void DetectionPostProcessLayer::ExecuteStrategy(IStrategy& strategy) const
 {
-    std::vector<armnn::ConstTensor> constTensors { {m_Anchors->GetTensorInfo(), m_Anchors->GetConstTensor<void>()} };
-
+    ManagedConstTensorHandle managedAnchors(m_Anchors);
+    std::vector<armnn::ConstTensor> constTensors { {managedAnchors.GetTensorInfo(), managedAnchors.Map()} };
     strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
 }
 
diff --git a/src/armnn/layers/FullyConnectedLayer.cpp b/src/armnn/layers/FullyConnectedLayer.cpp
index 44c8920..79d56c0 100644
--- a/src/armnn/layers/FullyConnectedLayer.cpp
+++ b/src/armnn/layers/FullyConnectedLayer.cpp
@@ -103,17 +103,21 @@
 {
     Optional<ConstTensor> optionalWeightsTensor = EmptyOptional();
     Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
-    if(GetParameters().m_ConstantWeights)
+
+    ManagedConstTensorHandle managedWeight(m_Weight);
+    ManagedConstTensorHandle managedBias(m_Bias);
+    if (GetParameters().m_ConstantWeights)
     {
-        ConstTensor weightsTensor(m_Weight->GetTensorInfo(), m_Weight->GetConstTensor<void>());
+        ConstTensor weightsTensor(managedWeight.GetTensorInfo(), managedWeight.Map());
         optionalWeightsTensor = Optional<ConstTensor>(weightsTensor);
 
         if (GetParameters().m_BiasEnabled)
         {
-            ConstTensor biasTensor(m_Bias->GetTensorInfo(), m_Bias->GetConstTensor<void>());
+            ConstTensor biasTensor(managedBias.GetTensorInfo(), managedBias.Map());
             optionalBiasTensor = Optional<ConstTensor>(biasTensor);
         }
     }
+
     visitor.VisitFullyConnectedLayer(this,
                                      GetParameters(),
                                      optionalWeightsTensor.value(),
@@ -124,12 +128,15 @@
 void FullyConnectedLayer::ExecuteStrategy(IStrategy& strategy) const
 {
     std::vector <armnn::ConstTensor> constTensors;
+    ManagedConstTensorHandle managedWeight(m_Weight);
+    ManagedConstTensorHandle managedBias(m_Bias);
+
     if(GetParameters().m_ConstantWeights)
     {
-        constTensors.emplace_back(ConstTensor(m_Weight->GetTensorInfo(), m_Weight->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedWeight.GetTensorInfo(), managedWeight.Map()));
         if (GetParameters().m_BiasEnabled)
         {
-            constTensors.emplace_back(ConstTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedBias.GetTensorInfo(), managedBias.Map()));
         }
     }
     strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
diff --git a/src/armnn/layers/LstmLayer.cpp b/src/armnn/layers/LstmLayer.cpp
index 0eeb2f8..403d911 100644
--- a/src/armnn/layers/LstmLayer.cpp
+++ b/src/armnn/layers/LstmLayer.cpp
@@ -303,35 +303,65 @@
 void LstmLayer::Accept(ILayerVisitor& visitor) const
 {
     LstmInputParams inputParams;
+    ManagedConstTensorHandle managedInputToForgetWeights(m_BasicParameters.m_InputToForgetWeights);
+    ManagedConstTensorHandle managedInputToCellWeights(m_BasicParameters.m_InputToCellWeights);
+    ManagedConstTensorHandle managedInputToOutputWeights(m_BasicParameters.m_InputToOutputWeights);
+    ManagedConstTensorHandle managedRecurrentToForgetWeights(m_BasicParameters.m_RecurrentToForgetWeights);
+    ManagedConstTensorHandle managedRecurrentToCellWeights(m_BasicParameters.m_RecurrentToCellWeights);
+    ManagedConstTensorHandle managedRecurrentToOutputWeights(m_BasicParameters.m_RecurrentToOutputWeights);
+    ManagedConstTensorHandle managedForgetGateBias(m_BasicParameters.m_ForgetGateBias);
+    ManagedConstTensorHandle managedCellBias(m_BasicParameters.m_CellBias);
+    ManagedConstTensorHandle managedOutputGateBias(m_BasicParameters.m_OutputGateBias);
+
+    // Cifg parameters
+    ManagedConstTensorHandle managedInputToInputWeights(m_CifgParameters.m_InputToInputWeights);
+    ManagedConstTensorHandle managedRecurrentToInputWeights(m_CifgParameters.m_RecurrentToInputWeights);
+    ManagedConstTensorHandle managedInputGateBias(m_CifgParameters.m_InputGateBias);
+
+    // Projection parameters
+    ManagedConstTensorHandle managedProjectionWeights(m_ProjectionParameters.m_ProjectionWeights);
+    ManagedConstTensorHandle managedProjectionBias(m_ProjectionParameters.m_ProjectionBias);
+
+    // Peephole parameters
+    ManagedConstTensorHandle managedCellToInputWeights(m_PeepholeParameters.m_CellToInputWeights);
+    ManagedConstTensorHandle managedCellToForgetWeights(m_PeepholeParameters.m_CellToForgetWeights);
+    ManagedConstTensorHandle managedCellToOutputWeights(m_PeepholeParameters.m_CellToOutputWeights);
+
+    // Layer normalisation parameters
+    ManagedConstTensorHandle managedInputLayerNormWeights(m_LayerNormParameters.m_InputLayerNormWeights);
+    ManagedConstTensorHandle managedForgetLayerNormWeights(m_LayerNormParameters.m_ForgetLayerNormWeights);
+    ManagedConstTensorHandle managedCellLayerNormWeights(m_LayerNormParameters.m_CellLayerNormWeights);
+    ManagedConstTensorHandle managedOutputLayerNormWeights(m_LayerNormParameters.m_OutputLayerNormWeights);
+
     ConstTensor inputToInputWeightsTensor;
     if (m_CifgParameters.m_InputToInputWeights != nullptr)
     {
-        ConstTensor inputToInputWeightsTensorCopy(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
-                                                  m_CifgParameters.m_InputToInputWeights->Map(true));
+        ConstTensor inputToInputWeightsTensorCopy(managedInputToInputWeights.GetTensorInfo(),
+                                                  managedInputToInputWeights.Map());
         inputToInputWeightsTensor = inputToInputWeightsTensorCopy;
         inputParams.m_InputToInputWeights = &inputToInputWeightsTensor;
     }
     ConstTensor inputToForgetWeightsTensor;
     if (m_BasicParameters.m_InputToForgetWeights != nullptr)
     {
-        ConstTensor inputToForgetWeightsTensorCopy(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
-                                                   m_BasicParameters.m_InputToForgetWeights->Map(true));
+        ConstTensor inputToForgetWeightsTensorCopy(managedInputToForgetWeights.GetTensorInfo(),
+                                                   managedInputToForgetWeights.Map());
         inputToForgetWeightsTensor = inputToForgetWeightsTensorCopy;
         inputParams.m_InputToForgetWeights = &inputToForgetWeightsTensor;
     }
     ConstTensor inputToCellWeightsTensor;
     if (m_BasicParameters.m_InputToCellWeights != nullptr)
     {
-        ConstTensor inputToCellWeightsTensorCopy(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
-                                                 m_BasicParameters.m_InputToCellWeights->Map(true));
+        ConstTensor inputToCellWeightsTensorCopy(managedInputToCellWeights.GetTensorInfo(),
+                                                 managedInputToCellWeights.Map());
         inputToCellWeightsTensor = inputToCellWeightsTensorCopy;
         inputParams.m_InputToCellWeights = &inputToCellWeightsTensor;
     }
     ConstTensor inputToOutputWeightsTensor;
     if (m_BasicParameters.m_InputToOutputWeights != nullptr)
     {
-        ConstTensor inputToOutputWeightsTensorCopy(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
-                                                   m_BasicParameters.m_InputToOutputWeights->Map(true));
+        ConstTensor inputToOutputWeightsTensorCopy(managedInputToOutputWeights.GetTensorInfo(),
+                                                   managedInputToOutputWeights.Map());
         inputToOutputWeightsTensor = inputToOutputWeightsTensorCopy;
         inputParams.m_InputToOutputWeights = &inputToOutputWeightsTensor;
     }
@@ -339,8 +369,8 @@
     if (m_CifgParameters.m_RecurrentToInputWeights != nullptr)
     {
         ConstTensor recurrentToInputWeightsTensorCopy(
-                m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(),
-                m_CifgParameters.m_RecurrentToInputWeights->Map(true));
+                managedRecurrentToInputWeights.GetTensorInfo(),
+                managedRecurrentToInputWeights.Map());
         recurrentToInputWeightsTensor = recurrentToInputWeightsTensorCopy;
         inputParams.m_RecurrentToInputWeights = &recurrentToInputWeightsTensor;
     }
@@ -348,8 +378,8 @@
     if (m_BasicParameters.m_RecurrentToForgetWeights != nullptr)
     {
         ConstTensor recurrentToForgetWeightsTensorCopy(
-                m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToForgetWeights->Map(true));
+                managedRecurrentToForgetWeights.GetTensorInfo(),
+                managedRecurrentToForgetWeights.Map());
         recurrentToForgetWeightsTensor = recurrentToForgetWeightsTensorCopy;
         inputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeightsTensor;
     }
@@ -357,8 +387,8 @@
     if (m_BasicParameters.m_RecurrentToCellWeights != nullptr)
     {
         ConstTensor recurrentToCellWeightsTensorCopy(
-                m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToCellWeights->Map(true));
+                managedRecurrentToCellWeights.GetTensorInfo(),
+                managedRecurrentToCellWeights.Map());
         recurrentToCellWeightsTensor = recurrentToCellWeightsTensorCopy;
         inputParams.m_RecurrentToCellWeights = &recurrentToCellWeightsTensor;
     }
@@ -366,112 +396,112 @@
     if (m_BasicParameters.m_RecurrentToOutputWeights != nullptr)
     {
         ConstTensor recurrentToOutputWeightsTensorCopy(
-                m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToOutputWeights->Map(true));
+                managedRecurrentToOutputWeights.GetTensorInfo(),
+                managedRecurrentToOutputWeights.Map());
         recurrentToOutputWeightsTensor = recurrentToOutputWeightsTensorCopy;
         inputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeightsTensor;
     }
     ConstTensor cellToInputWeightsTensor;
     if (m_PeepholeParameters.m_CellToInputWeights != nullptr)
     {
-        ConstTensor cellToInputWeightsTensorCopy(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
-                                                 m_PeepholeParameters.m_CellToInputWeights->Map(true));
+        ConstTensor cellToInputWeightsTensorCopy(managedCellToInputWeights.GetTensorInfo(),
+                                                 managedCellToInputWeights.Map());
         cellToInputWeightsTensor = cellToInputWeightsTensorCopy;
         inputParams.m_CellToInputWeights = &cellToInputWeightsTensor;
     }
     ConstTensor cellToForgetWeightsTensor;
     if (m_PeepholeParameters.m_CellToForgetWeights != nullptr)
     {
-        ConstTensor cellToForgetWeightsTensorCopy(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
-                                                  m_PeepholeParameters.m_CellToForgetWeights->Map(true));
+        ConstTensor cellToForgetWeightsTensorCopy(managedCellToForgetWeights.GetTensorInfo(),
+                                                  managedCellToForgetWeights.Map());
         cellToForgetWeightsTensor = cellToForgetWeightsTensorCopy;
         inputParams.m_CellToForgetWeights = &cellToForgetWeightsTensor;
     }
     ConstTensor cellToOutputWeightsTensor;
     if (m_PeepholeParameters.m_CellToOutputWeights != nullptr)
     {
-        ConstTensor cellToOutputWeightsTensorCopy(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
-                                                  m_PeepholeParameters.m_CellToOutputWeights->Map(true));
+        ConstTensor cellToOutputWeightsTensorCopy(managedCellToOutputWeights.GetTensorInfo(),
+                                                  managedCellToOutputWeights.Map());
         cellToOutputWeightsTensor = cellToOutputWeightsTensorCopy;
         inputParams.m_CellToOutputWeights = &cellToOutputWeightsTensor;
     }
     ConstTensor inputGateBiasTensor;
     if (m_CifgParameters.m_InputGateBias != nullptr)
     {
-        ConstTensor inputGateBiasTensorCopy(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
-                                        m_CifgParameters.m_InputGateBias->Map(true));
+        ConstTensor inputGateBiasTensorCopy(managedInputGateBias.GetTensorInfo(),
+                                        managedInputGateBias.Map());
         inputGateBiasTensor = inputGateBiasTensorCopy;
         inputParams.m_InputGateBias = &inputGateBiasTensor;
     }
     ConstTensor forgetGateBiasTensor;
     if (m_BasicParameters.m_ForgetGateBias != nullptr)
     {
-        ConstTensor forgetGateBiasTensorCopy(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
-                                             m_BasicParameters.m_ForgetGateBias->Map(true));
+        ConstTensor forgetGateBiasTensorCopy(managedForgetGateBias.GetTensorInfo(),
+                                             managedForgetGateBias.Map());
         forgetGateBiasTensor = forgetGateBiasTensorCopy;
         inputParams.m_ForgetGateBias = &forgetGateBiasTensor;
     }
     ConstTensor cellBiasTensor;
     if (m_BasicParameters.m_CellBias != nullptr)
     {
-        ConstTensor cellBiasTensorCopy(m_BasicParameters.m_CellBias->GetTensorInfo(),
-                                       m_BasicParameters.m_CellBias->Map(true));
+        ConstTensor cellBiasTensorCopy(managedCellBias.GetTensorInfo(),
+                                       managedCellBias.Map());
         cellBiasTensor = cellBiasTensorCopy;
         inputParams.m_CellBias = &cellBiasTensor;
     }
     ConstTensor outputGateBias;
     if (m_BasicParameters.m_OutputGateBias != nullptr)
     {
-        ConstTensor outputGateBiasCopy(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
-                                       m_BasicParameters.m_OutputGateBias->Map(true));
+        ConstTensor outputGateBiasCopy(managedOutputGateBias.GetTensorInfo(),
+                                       managedOutputGateBias.Map());
         outputGateBias = outputGateBiasCopy;
         inputParams.m_OutputGateBias = &outputGateBias;
     }
     ConstTensor projectionWeightsTensor;
     if (m_ProjectionParameters.m_ProjectionWeights != nullptr)
     {
-        ConstTensor projectionWeightsTensorCopy(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
-                                                m_ProjectionParameters.m_ProjectionWeights->Map(true));
+        ConstTensor projectionWeightsTensorCopy(managedProjectionWeights.GetTensorInfo(),
+                                                managedProjectionWeights.Map());
         projectionWeightsTensor = projectionWeightsTensorCopy;
         inputParams.m_ProjectionWeights = &projectionWeightsTensor;
     }
     ConstTensor projectionBiasTensor;
     if (m_ProjectionParameters.m_ProjectionBias != nullptr)
     {
-        ConstTensor projectionBiasTensorCopy(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
-                                             m_ProjectionParameters.m_ProjectionBias->Map(true));
+        ConstTensor projectionBiasTensorCopy(managedProjectionBias.GetTensorInfo(),
+                                             managedProjectionBias.Map());
         projectionBiasTensor = projectionBiasTensorCopy;
         inputParams.m_ProjectionBias = &projectionBiasTensor;
     }
     ConstTensor inputLayerNormTensor;
     if (m_LayerNormParameters.m_InputLayerNormWeights != nullptr)
     {
-        ConstTensor inputLayerNormTensorCopy(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
-                                             m_LayerNormParameters.m_InputLayerNormWeights->Map(true));
+        ConstTensor inputLayerNormTensorCopy(managedInputLayerNormWeights.GetTensorInfo(),
+                                             managedInputLayerNormWeights.Map());
         inputLayerNormTensor = inputLayerNormTensorCopy;
         inputParams.m_InputLayerNormWeights = &inputLayerNormTensor;
     }
     ConstTensor forgetLayerNormTensor;
     if (m_LayerNormParameters.m_ForgetLayerNormWeights != nullptr)
     {
-        ConstTensor forgetLayerNormTensorCopy(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
-                                             m_LayerNormParameters.m_ForgetLayerNormWeights->Map(true));
+        ConstTensor forgetLayerNormTensorCopy(managedForgetLayerNormWeights.GetTensorInfo(),
+                                             managedForgetLayerNormWeights.Map());
         forgetLayerNormTensor = forgetLayerNormTensorCopy;
         inputParams.m_ForgetLayerNormWeights = &forgetLayerNormTensor;
     }
     ConstTensor cellLayerNormTensor;
     if (m_LayerNormParameters.m_CellLayerNormWeights != nullptr)
     {
-        ConstTensor cellLayerNormTensorCopy(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
-                                              m_LayerNormParameters.m_CellLayerNormWeights->Map(true));
+        ConstTensor cellLayerNormTensorCopy(managedCellLayerNormWeights.GetTensorInfo(),
+                                              managedCellLayerNormWeights.Map());
         cellLayerNormTensor = cellLayerNormTensorCopy;
         inputParams.m_CellLayerNormWeights = &cellLayerNormTensor;
     }
     ConstTensor outputLayerNormTensor;
     if (m_LayerNormParameters.m_OutputLayerNormWeights != nullptr)
     {
-        ConstTensor outputLayerNormTensorCopy(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
-                                            m_LayerNormParameters.m_OutputLayerNormWeights->Map(true));
+        ConstTensor outputLayerNormTensorCopy(managedOutputLayerNormWeights.GetTensorInfo(),
+                                            managedOutputLayerNormWeights.Map());
         outputLayerNormTensor = outputLayerNormTensorCopy;
         inputParams.m_OutputLayerNormWeights = &outputLayerNormTensor;
     }
@@ -486,54 +516,84 @@
 
     LstmDescriptor descriptor = GetParameters();
 
+    ManagedConstTensorHandle managedInputToForgetWeights(m_BasicParameters.m_InputToForgetWeights);
+    ManagedConstTensorHandle managedInputToCellWeights(m_BasicParameters.m_InputToCellWeights);
+    ManagedConstTensorHandle managedInputToOutputWeights(m_BasicParameters.m_InputToOutputWeights);
+    ManagedConstTensorHandle managedRecurrentToForgetWeights(m_BasicParameters.m_RecurrentToForgetWeights);
+    ManagedConstTensorHandle managedRecurrentToCellWeights(m_BasicParameters.m_RecurrentToCellWeights);
+    ManagedConstTensorHandle managedRecurrentToOutputWeights(m_BasicParameters.m_RecurrentToOutputWeights);
+    ManagedConstTensorHandle managedForgetGateBias(m_BasicParameters.m_ForgetGateBias);
+    ManagedConstTensorHandle managedCellBias(m_BasicParameters.m_CellBias);
+    ManagedConstTensorHandle managedOutputGateBias(m_BasicParameters.m_OutputGateBias);
+
+    // Cifg parameters
+    ManagedConstTensorHandle managedInputToInputWeights(m_CifgParameters.m_InputToInputWeights);
+    ManagedConstTensorHandle managedRecurrentToInputWeights(m_CifgParameters.m_RecurrentToInputWeights);
+    ManagedConstTensorHandle managedInputGateBias(m_CifgParameters.m_InputGateBias);
+
+    // Projection parameters
+    ManagedConstTensorHandle managedProjectionWeights(m_ProjectionParameters.m_ProjectionWeights);
+    ManagedConstTensorHandle managedProjectionBias(m_ProjectionParameters.m_ProjectionBias);
+
+    // Peephole parameters
+    ManagedConstTensorHandle managedCellToInputWeights(m_PeepholeParameters.m_CellToInputWeights);
+    ManagedConstTensorHandle managedCellToForgetWeights(m_PeepholeParameters.m_CellToForgetWeights);
+    ManagedConstTensorHandle managedCellToOutputWeights(m_PeepholeParameters.m_CellToOutputWeights);
+
+    // Layer normalisation parameters
+    ManagedConstTensorHandle managedInputLayerNormWeights(m_LayerNormParameters.m_InputLayerNormWeights);
+    ManagedConstTensorHandle managedForgetLayerNormWeights(m_LayerNormParameters.m_ForgetLayerNormWeights);
+    ManagedConstTensorHandle managedCellLayerNormWeights(m_LayerNormParameters.m_CellLayerNormWeights);
+    ManagedConstTensorHandle managedOutputLayerNormWeights(m_LayerNormParameters.m_OutputLayerNormWeights);
+
     // First add mandatory/basic parameters
     if (m_BasicParameters.m_InputToForgetWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
-                                              m_BasicParameters.m_InputToForgetWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToForgetWeights.GetTensorInfo(),
+                                              managedInputToForgetWeights.Map()));
     }
     if (m_BasicParameters.m_InputToCellWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
-                                              m_BasicParameters.m_InputToCellWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToCellWeights.GetTensorInfo(),
+                                              managedInputToCellWeights.Map()));
     }
     if (m_BasicParameters.m_InputToOutputWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
-                                              m_BasicParameters.m_InputToOutputWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToOutputWeights.GetTensorInfo(),
+                                              managedInputToOutputWeights.Map()));
     }
     if (m_BasicParameters.m_RecurrentToForgetWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToForgetWeights->Map(true)));
+                managedRecurrentToForgetWeights.GetTensorInfo(),
+                managedRecurrentToForgetWeights.Map()));
     }
     if (m_BasicParameters.m_RecurrentToCellWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToCellWeights->Map(true)));
+                managedRecurrentToCellWeights.GetTensorInfo(),
+                managedRecurrentToCellWeights.Map()));
     }
     if (m_BasicParameters.m_RecurrentToOutputWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToOutputWeights->Map(true)));
+                managedRecurrentToOutputWeights.GetTensorInfo(),
+                managedRecurrentToOutputWeights.Map()));
     }
     if (m_BasicParameters.m_ForgetGateBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
-                                              m_BasicParameters.m_ForgetGateBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedForgetGateBias.GetTensorInfo(),
+                                              managedForgetGateBias.Map()));
     }
     if (m_BasicParameters.m_CellBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_CellBias->GetTensorInfo(),
-                                              m_BasicParameters.m_CellBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedCellBias.GetTensorInfo(),
+                                              managedCellBias.Map()));
     }
     if (m_BasicParameters.m_OutputGateBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
-                                              m_BasicParameters.m_OutputGateBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedOutputGateBias.GetTensorInfo(),
+                                              managedOutputGateBias.Map()));
     }
 
     // Add cifg parameters
@@ -541,19 +601,19 @@
     {
         if (m_CifgParameters.m_InputToInputWeights != nullptr)
         {
-            constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
-                                                  m_CifgParameters.m_InputToInputWeights->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedInputToInputWeights.GetTensorInfo(),
+                                                  managedInputToInputWeights.Map()));
         }
         if (m_CifgParameters.m_RecurrentToInputWeights != nullptr)
         {
             constTensors.emplace_back(ConstTensor(
-                    m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(),
-                    m_CifgParameters.m_RecurrentToInputWeights->Map(true)));
+                    managedRecurrentToInputWeights.GetTensorInfo(),
+                    managedRecurrentToInputWeights.Map()));
         }
         if (m_CifgParameters.m_InputGateBias != nullptr)
         {
-            constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
-                                                  m_CifgParameters.m_InputGateBias->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedInputGateBias.GetTensorInfo(),
+                                                  managedInputGateBias.Map()));
         }
     }
 
@@ -564,19 +624,19 @@
         {
             if (m_PeepholeParameters.m_CellToInputWeights != nullptr)
             {
-                constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
-                                                      m_PeepholeParameters.m_CellToInputWeights->Map(true)));
+                constTensors.emplace_back(ConstTensor(managedCellToInputWeights.GetTensorInfo(),
+                                                      managedCellToInputWeights.Map()));
             }
         }
         if (m_PeepholeParameters.m_CellToForgetWeights != nullptr)
         {
-            constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
-                                                  m_PeepholeParameters.m_CellToForgetWeights->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedCellToForgetWeights.GetTensorInfo(),
+                                                  managedCellToForgetWeights.Map()));
         }
         if (m_PeepholeParameters.m_CellToOutputWeights != nullptr)
         {
-            constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
-                                                  m_PeepholeParameters.m_CellToOutputWeights->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedCellToOutputWeights.GetTensorInfo(),
+                                                  managedCellToOutputWeights.Map()));
         }
     }
 
@@ -585,13 +645,13 @@
     {
         if (m_ProjectionParameters.m_ProjectionWeights != nullptr)
         {
-            constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
-                                                  m_ProjectionParameters.m_ProjectionWeights->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedProjectionWeights.GetTensorInfo(),
+                                                  managedProjectionWeights.Map()));
         }
         if (m_ProjectionParameters.m_ProjectionBias != nullptr)
         {
-            constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
-                                                  m_ProjectionParameters.m_ProjectionBias->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedProjectionBias.GetTensorInfo(),
+                                                  managedProjectionBias.Map()));
         }
     }
 
@@ -602,24 +662,24 @@
         {
             if (m_LayerNormParameters.m_InputLayerNormWeights != nullptr)
             {
-                constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
-                                                      m_LayerNormParameters.m_InputLayerNormWeights->Map(true)));
+                constTensors.emplace_back(ConstTensor(managedInputLayerNormWeights.GetTensorInfo(),
+                                                      managedInputLayerNormWeights.Map()));
             }
         }
         if (m_LayerNormParameters.m_ForgetLayerNormWeights != nullptr)
         {
-            constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
-                                                  m_LayerNormParameters.m_ForgetLayerNormWeights->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedForgetLayerNormWeights.GetTensorInfo(),
+                                                  managedForgetLayerNormWeights.Map()));
         }
         if (m_LayerNormParameters.m_CellLayerNormWeights != nullptr)
         {
-            constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
-                                                  m_LayerNormParameters.m_CellLayerNormWeights->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedCellLayerNormWeights.GetTensorInfo(),
+                                                  managedCellLayerNormWeights.Map()));
         }
         if (m_LayerNormParameters.m_OutputLayerNormWeights != nullptr)
         {
-            constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
-                                                  m_LayerNormParameters.m_OutputLayerNormWeights->Map(true)));
+            constTensors.emplace_back(ConstTensor(managedOutputLayerNormWeights.GetTensorInfo(),
+                                                  managedOutputLayerNormWeights.Map()));
         }
     }
 
diff --git a/src/armnn/layers/QLstmLayer.cpp b/src/armnn/layers/QLstmLayer.cpp
index 16aa718..72b020f 100644
--- a/src/armnn/layers/QLstmLayer.cpp
+++ b/src/armnn/layers/QLstmLayer.cpp
@@ -305,12 +305,41 @@
 void QLstmLayer::Accept(ILayerVisitor& visitor) const
 {
     LstmInputParams inputParams;
+    ManagedConstTensorHandle managedInputToForgetWeights(m_BasicParameters.m_InputToForgetWeights);
+    ManagedConstTensorHandle managedInputToCellWeights(m_BasicParameters.m_InputToCellWeights);
+    ManagedConstTensorHandle managedInputToOutputWeights(m_BasicParameters.m_InputToOutputWeights);
+    ManagedConstTensorHandle managedRecurrentToForgetWeights(m_BasicParameters.m_RecurrentToForgetWeights);
+    ManagedConstTensorHandle managedRecurrentToCellWeights(m_BasicParameters.m_RecurrentToCellWeights);
+    ManagedConstTensorHandle managedRecurrentToOutputWeights(m_BasicParameters.m_RecurrentToOutputWeights);
+    ManagedConstTensorHandle managedForgetGateBias(m_BasicParameters.m_ForgetGateBias);
+    ManagedConstTensorHandle managedCellBias(m_BasicParameters.m_CellBias);
+    ManagedConstTensorHandle managedOutputGateBias(m_BasicParameters.m_OutputGateBias);
+
+    // Cifg parameters
+    ManagedConstTensorHandle managedInputToInputWeights(m_CifgParameters.m_InputToInputWeights);
+    ManagedConstTensorHandle managedRecurrentToInputWeights(m_CifgParameters.m_RecurrentToInputWeights);
+    ManagedConstTensorHandle managedInputGateBias(m_CifgParameters.m_InputGateBias);
+
+    // Projection parameters
+    ManagedConstTensorHandle managedProjectionWeights(m_ProjectionParameters.m_ProjectionWeights);
+    ManagedConstTensorHandle managedProjectionBias(m_ProjectionParameters.m_ProjectionBias);
+
+    // Peephole parameters
+    ManagedConstTensorHandle managedCellToInputWeights(m_PeepholeParameters.m_CellToInputWeights);
+    ManagedConstTensorHandle managedCellToForgetWeights(m_PeepholeParameters.m_CellToForgetWeights);
+    ManagedConstTensorHandle managedCellToOutputWeights(m_PeepholeParameters.m_CellToOutputWeights);
+
+    // Layer normalisation parameters
+    ManagedConstTensorHandle managedInputLayerNormWeights(m_LayerNormParameters.m_InputLayerNormWeights);
+    ManagedConstTensorHandle managedForgetLayerNormWeights(m_LayerNormParameters.m_ForgetLayerNormWeights);
+    ManagedConstTensorHandle managedCellLayerNormWeights(m_LayerNormParameters.m_CellLayerNormWeights);
+    ManagedConstTensorHandle managedOutputLayerNormWeights(m_LayerNormParameters.m_OutputLayerNormWeights);
 
     ConstTensor inputToInputWeightsTensor;
     if (m_CifgParameters.m_InputToInputWeights != nullptr)
     {
-        ConstTensor inputToInputWeightsTensorCopy(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
-                                                  m_CifgParameters.m_InputToInputWeights->Map(true));
+        ConstTensor inputToInputWeightsTensorCopy(managedInputToInputWeights.GetTensorInfo(),
+                                                  managedInputToInputWeights.Map());
         inputToInputWeightsTensor = inputToInputWeightsTensorCopy;
         inputParams.m_InputToInputWeights = &inputToInputWeightsTensor;
     }
@@ -318,8 +347,8 @@
     ConstTensor inputToForgetWeightsTensor;
     if (m_BasicParameters.m_InputToForgetWeights != nullptr)
     {
-        ConstTensor inputToForgetWeightsTensorCopy(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
-                                                   m_BasicParameters.m_InputToForgetWeights->Map(true));
+        ConstTensor inputToForgetWeightsTensorCopy(managedInputToForgetWeights.GetTensorInfo(),
+                                                   managedInputToForgetWeights.Map());
         inputToForgetWeightsTensor = inputToForgetWeightsTensorCopy;
         inputParams.m_InputToForgetWeights = &inputToForgetWeightsTensor;
     }
@@ -327,8 +356,8 @@
     ConstTensor inputToCellWeightsTensor;
     if (m_BasicParameters.m_InputToCellWeights != nullptr)
     {
-        ConstTensor inputToCellWeightsTensorCopy(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
-                                                 m_BasicParameters.m_InputToCellWeights->Map(true));
+        ConstTensor inputToCellWeightsTensorCopy(managedInputToCellWeights.GetTensorInfo(),
+                                                 managedInputToCellWeights.Map());
         inputToCellWeightsTensor = inputToCellWeightsTensorCopy;
         inputParams.m_InputToCellWeights = &inputToCellWeightsTensor;
     }
@@ -336,8 +365,8 @@
     ConstTensor inputToOutputWeightsTensor;
     if (m_BasicParameters.m_InputToOutputWeights != nullptr)
     {
-        ConstTensor inputToOutputWeightsTensorCopy(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
-                                                   m_BasicParameters.m_InputToOutputWeights->Map(true));
+        ConstTensor inputToOutputWeightsTensorCopy(managedInputToOutputWeights.GetTensorInfo(),
+                                                   managedInputToOutputWeights.Map());
         inputToOutputWeightsTensor = inputToOutputWeightsTensorCopy;
         inputParams.m_InputToOutputWeights = &inputToOutputWeightsTensor;
     }
@@ -346,8 +375,8 @@
     if (m_CifgParameters.m_RecurrentToInputWeights != nullptr)
     {
         ConstTensor recurrentToInputWeightsTensorCopy(
-                m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(),
-                m_CifgParameters.m_RecurrentToInputWeights->Map(true));
+                managedRecurrentToInputWeights.GetTensorInfo(),
+                managedRecurrentToInputWeights.Map());
         recurrentToInputWeightsTensor = recurrentToInputWeightsTensorCopy;
         inputParams.m_RecurrentToInputWeights = &recurrentToInputWeightsTensor;
     }
@@ -356,8 +385,8 @@
     if (m_BasicParameters.m_RecurrentToForgetWeights != nullptr)
     {
         ConstTensor recurrentToForgetWeightsTensorCopy(
-                m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToForgetWeights->Map(true));
+                managedRecurrentToForgetWeights.GetTensorInfo(),
+                managedRecurrentToForgetWeights.Map());
         recurrentToForgetWeightsTensor = recurrentToForgetWeightsTensorCopy;
         inputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeightsTensor;
     }
@@ -366,8 +395,8 @@
     if (m_BasicParameters.m_RecurrentToCellWeights != nullptr)
     {
         ConstTensor recurrentToCellWeightsTensorCopy(
-                m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToCellWeights->Map(true));
+                managedRecurrentToCellWeights.GetTensorInfo(),
+                managedRecurrentToCellWeights.Map());
         recurrentToCellWeightsTensor = recurrentToCellWeightsTensorCopy;
         inputParams.m_RecurrentToCellWeights = &recurrentToCellWeightsTensor;
     }
@@ -376,8 +405,8 @@
     if (m_BasicParameters.m_RecurrentToOutputWeights != nullptr)
     {
         ConstTensor recurrentToOutputWeightsTensorCopy(
-                m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToOutputWeights->Map(true));
+                managedRecurrentToOutputWeights.GetTensorInfo(),
+                managedRecurrentToOutputWeights.Map());
         recurrentToOutputWeightsTensor = recurrentToOutputWeightsTensorCopy;
         inputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeightsTensor;
     }
@@ -385,8 +414,8 @@
     ConstTensor cellToInputWeightsTensor;
     if (m_PeepholeParameters.m_CellToInputWeights != nullptr)
     {
-        ConstTensor cellToInputWeightsTensorCopy(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
-                                                 m_PeepholeParameters.m_CellToInputWeights->Map(true));
+        ConstTensor cellToInputWeightsTensorCopy(managedCellToInputWeights.GetTensorInfo(),
+                                                 managedCellToInputWeights.Map());
         cellToInputWeightsTensor = cellToInputWeightsTensorCopy;
         inputParams.m_CellToInputWeights = &cellToInputWeightsTensor;
     }
@@ -394,8 +423,8 @@
     ConstTensor cellToForgetWeightsTensor;
     if (m_PeepholeParameters.m_CellToForgetWeights != nullptr)
     {
-        ConstTensor cellToForgetWeightsTensorCopy(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
-                                                  m_PeepholeParameters.m_CellToForgetWeights->Map(true));
+        ConstTensor cellToForgetWeightsTensorCopy(managedCellToForgetWeights.GetTensorInfo(),
+                                                  managedCellToForgetWeights.Map());
         cellToForgetWeightsTensor = cellToForgetWeightsTensorCopy;
         inputParams.m_CellToForgetWeights = &cellToForgetWeightsTensor;
     }
@@ -403,8 +432,8 @@
     ConstTensor cellToOutputWeightsTensor;
     if (m_PeepholeParameters.m_CellToOutputWeights != nullptr)
     {
-        ConstTensor cellToOutputWeightsTensorCopy(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
-                                                  m_PeepholeParameters.m_CellToOutputWeights->Map(true));
+        ConstTensor cellToOutputWeightsTensorCopy(managedCellToOutputWeights.GetTensorInfo(),
+                                                  managedCellToOutputWeights.Map());
         cellToOutputWeightsTensor = cellToOutputWeightsTensorCopy;
         inputParams.m_CellToOutputWeights = &cellToOutputWeightsTensor;
     }
@@ -412,8 +441,8 @@
     ConstTensor inputGateBiasTensor;
     if (m_CifgParameters.m_InputGateBias != nullptr)
     {
-        ConstTensor inputGateBiasTensorCopy(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
-                                            m_CifgParameters.m_InputGateBias->Map(true));
+        ConstTensor inputGateBiasTensorCopy(managedInputGateBias.GetTensorInfo(),
+                                            managedInputGateBias.Map());
         inputGateBiasTensor = inputGateBiasTensorCopy;
         inputParams.m_InputGateBias = &inputGateBiasTensor;
     }
@@ -421,8 +450,8 @@
     ConstTensor forgetGateBiasTensor;
     if (m_BasicParameters.m_ForgetGateBias != nullptr)
     {
-        ConstTensor forgetGateBiasTensorCopy(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
-                                             m_BasicParameters.m_ForgetGateBias->Map(true));
+        ConstTensor forgetGateBiasTensorCopy(managedForgetGateBias.GetTensorInfo(),
+                                             managedForgetGateBias.Map());
         forgetGateBiasTensor = forgetGateBiasTensorCopy;
         inputParams.m_ForgetGateBias = &forgetGateBiasTensor;
     }
@@ -430,8 +459,8 @@
     ConstTensor cellBiasTensor;
     if (m_BasicParameters.m_CellBias != nullptr)
     {
-        ConstTensor cellBiasTensorCopy(m_BasicParameters.m_CellBias->GetTensorInfo(),
-                                       m_BasicParameters.m_CellBias->Map(true));
+        ConstTensor cellBiasTensorCopy(managedCellBias.GetTensorInfo(),
+                                       managedCellBias.Map());
         cellBiasTensor = cellBiasTensorCopy;
         inputParams.m_CellBias = &cellBiasTensor;
     }
@@ -439,8 +468,8 @@
     ConstTensor outputGateBias;
     if (m_BasicParameters.m_OutputGateBias != nullptr)
     {
-        ConstTensor outputGateBiasCopy(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
-                                       m_BasicParameters.m_OutputGateBias->Map(true));
+        ConstTensor outputGateBiasCopy(managedOutputGateBias.GetTensorInfo(),
+                                       managedOutputGateBias.Map());
         outputGateBias = outputGateBiasCopy;
         inputParams.m_OutputGateBias = &outputGateBias;
     }
@@ -448,8 +477,8 @@
     ConstTensor projectionWeightsTensor;
     if (m_ProjectionParameters.m_ProjectionWeights != nullptr)
     {
-        ConstTensor projectionWeightsTensorCopy(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
-                                                m_ProjectionParameters.m_ProjectionWeights->Map(true));
+        ConstTensor projectionWeightsTensorCopy(managedProjectionWeights.GetTensorInfo(),
+                                                managedProjectionWeights.Map());
         projectionWeightsTensor = projectionWeightsTensorCopy;
         inputParams.m_ProjectionWeights = &projectionWeightsTensor;
     }
@@ -457,8 +486,8 @@
     ConstTensor projectionBiasTensor;
     if (m_ProjectionParameters.m_ProjectionBias != nullptr)
     {
-        ConstTensor projectionBiasTensorCopy(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
-                                             m_ProjectionParameters.m_ProjectionBias->Map(true));
+        ConstTensor projectionBiasTensorCopy(managedProjectionBias.GetTensorInfo(),
+                                             managedProjectionBias.Map());
         projectionBiasTensor = projectionBiasTensorCopy;
         inputParams.m_ProjectionBias = &projectionBiasTensor;
     }
@@ -466,8 +495,8 @@
     ConstTensor inputLayerNormTensor;
     if (m_LayerNormParameters.m_InputLayerNormWeights != nullptr)
     {
-        ConstTensor inputLayerNormTensorCopy(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
-                                             m_LayerNormParameters.m_InputLayerNormWeights->Map(true));
+        ConstTensor inputLayerNormTensorCopy(managedInputLayerNormWeights.GetTensorInfo(),
+                                             managedInputLayerNormWeights.Map());
         inputLayerNormTensor = inputLayerNormTensorCopy;
         inputParams.m_InputLayerNormWeights = &inputLayerNormTensor;
     }
@@ -475,8 +504,8 @@
     ConstTensor forgetLayerNormTensor;
     if (m_LayerNormParameters.m_ForgetLayerNormWeights != nullptr)
     {
-        ConstTensor forgetLayerNormTensorCopy(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
-                                              m_LayerNormParameters.m_ForgetLayerNormWeights->Map(true));
+        ConstTensor forgetLayerNormTensorCopy(managedForgetLayerNormWeights.GetTensorInfo(),
+                                              managedForgetLayerNormWeights.Map());
         forgetLayerNormTensor = forgetLayerNormTensorCopy;
         inputParams.m_ForgetLayerNormWeights = &forgetLayerNormTensor;
     }
@@ -484,8 +513,8 @@
     ConstTensor cellLayerNormTensor;
     if (m_LayerNormParameters.m_CellLayerNormWeights != nullptr)
     {
-        ConstTensor cellLayerNormTensorCopy(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
-                                            m_LayerNormParameters.m_CellLayerNormWeights->Map(true));
+        ConstTensor cellLayerNormTensorCopy(managedCellLayerNormWeights.GetTensorInfo(),
+                                            managedCellLayerNormWeights.Map());
         cellLayerNormTensor = cellLayerNormTensorCopy;
         inputParams.m_CellLayerNormWeights = &cellLayerNormTensor;
     }
@@ -493,8 +522,8 @@
     ConstTensor outputLayerNormTensor;
     if (m_LayerNormParameters.m_OutputLayerNormWeights != nullptr)
     {
-        ConstTensor outputLayerNormTensorCopy(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
-                                              m_LayerNormParameters.m_OutputLayerNormWeights->Map(true));
+        ConstTensor outputLayerNormTensorCopy(managedOutputLayerNormWeights.GetTensorInfo(),
+                                              managedOutputLayerNormWeights.Map());
         outputLayerNormTensor = outputLayerNormTensorCopy;
         inputParams.m_OutputLayerNormWeights = &outputLayerNormTensor;
     }
@@ -507,124 +536,153 @@
 void QLstmLayer::ExecuteStrategy(IStrategy& strategy) const
 {
     std::vector<ConstTensor> constTensors;
+    ManagedConstTensorHandle managedInputToForgetWeights(m_BasicParameters.m_InputToForgetWeights);
+    ManagedConstTensorHandle managedInputToCellWeights(m_BasicParameters.m_InputToCellWeights);
+    ManagedConstTensorHandle managedInputToOutputWeights(m_BasicParameters.m_InputToOutputWeights);
+    ManagedConstTensorHandle managedRecurrentToForgetWeights(m_BasicParameters.m_RecurrentToForgetWeights);
+    ManagedConstTensorHandle managedRecurrentToCellWeights(m_BasicParameters.m_RecurrentToCellWeights);
+    ManagedConstTensorHandle managedRecurrentToOutputWeights(m_BasicParameters.m_RecurrentToOutputWeights);
+    ManagedConstTensorHandle managedForgetGateBias(m_BasicParameters.m_ForgetGateBias);
+    ManagedConstTensorHandle managedCellBias(m_BasicParameters.m_CellBias);
+    ManagedConstTensorHandle managedOutputGateBias(m_BasicParameters.m_OutputGateBias);
+
+    // Cifg parameters
+    ManagedConstTensorHandle managedInputToInputWeights(m_CifgParameters.m_InputToInputWeights);
+    ManagedConstTensorHandle managedRecurrentToInputWeights(m_CifgParameters.m_RecurrentToInputWeights);
+    ManagedConstTensorHandle managedInputGateBias(m_CifgParameters.m_InputGateBias);
+
+    // Projection parameters
+    ManagedConstTensorHandle managedProjectionWeights(m_ProjectionParameters.m_ProjectionWeights);
+    ManagedConstTensorHandle managedProjectionBias(m_ProjectionParameters.m_ProjectionBias);
+
+    // Peephole parameters
+    ManagedConstTensorHandle managedCellToInputWeights(m_PeepholeParameters.m_CellToInputWeights);
+    ManagedConstTensorHandle managedCellToForgetWeights(m_PeepholeParameters.m_CellToForgetWeights);
+    ManagedConstTensorHandle managedCellToOutputWeights(m_PeepholeParameters.m_CellToOutputWeights);
+
+    // Layer normalisation parameters
+    ManagedConstTensorHandle managedInputLayerNormWeights(m_LayerNormParameters.m_InputLayerNormWeights);
+    ManagedConstTensorHandle managedForgetLayerNormWeights(m_LayerNormParameters.m_ForgetLayerNormWeights);
+    ManagedConstTensorHandle managedCellLayerNormWeights(m_LayerNormParameters.m_CellLayerNormWeights);
+    ManagedConstTensorHandle managedOutputLayerNormWeights(m_LayerNormParameters.m_OutputLayerNormWeights);
 
     // First add mandatory/basic parameters
     if (m_BasicParameters.m_InputToForgetWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToForgetWeights->GetTensorInfo(),
-                                              m_BasicParameters.m_InputToForgetWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToForgetWeights.GetTensorInfo(),
+                                              managedInputToForgetWeights.Map()));
     }
     if (m_BasicParameters.m_InputToCellWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToCellWeights->GetTensorInfo(),
-                                              m_BasicParameters.m_InputToCellWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToCellWeights.GetTensorInfo(),
+                                              managedInputToCellWeights.Map()));
     }
     if (m_BasicParameters.m_InputToOutputWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_InputToOutputWeights->GetTensorInfo(),
-                                              m_BasicParameters.m_InputToOutputWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToOutputWeights.GetTensorInfo(),
+                                              managedInputToOutputWeights.Map()));
     }
     if (m_BasicParameters.m_RecurrentToForgetWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_BasicParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToForgetWeights->Map(true)));
+                managedRecurrentToForgetWeights.GetTensorInfo(),
+                managedRecurrentToForgetWeights.Map()));
     }
     if (m_BasicParameters.m_RecurrentToCellWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_BasicParameters.m_RecurrentToCellWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToCellWeights->Map(true)));
+                managedRecurrentToCellWeights.GetTensorInfo(),
+                managedRecurrentToCellWeights.Map()));
     }
     if (m_BasicParameters.m_RecurrentToOutputWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_BasicParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
-                m_BasicParameters.m_RecurrentToOutputWeights->Map(true)));
+                managedRecurrentToOutputWeights.GetTensorInfo(),
+                managedRecurrentToOutputWeights.Map()));
     }
     if (m_BasicParameters.m_ForgetGateBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_ForgetGateBias->GetTensorInfo(),
-                                              m_BasicParameters.m_ForgetGateBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedForgetGateBias.GetTensorInfo(),
+                                              managedForgetGateBias.Map()));
     }
     if (m_BasicParameters.m_CellBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_CellBias->GetTensorInfo(),
-                                              m_BasicParameters.m_CellBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedCellBias.GetTensorInfo(),
+                                              managedCellBias.Map()));
     }
     if (m_BasicParameters.m_OutputGateBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_BasicParameters.m_OutputGateBias->GetTensorInfo(),
-                                              m_BasicParameters.m_OutputGateBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedOutputGateBias.GetTensorInfo(),
+                                              managedOutputGateBias.Map()));
     }
 
     // Add cifig parameters
     if (m_CifgParameters.m_InputToInputWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputToInputWeights->GetTensorInfo(),
-                                              m_CifgParameters.m_InputToInputWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToInputWeights.GetTensorInfo(),
+                                              managedInputToInputWeights.Map()));
     }
     if (m_CifgParameters.m_RecurrentToInputWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_CifgParameters.m_RecurrentToInputWeights->GetTensorInfo(),
-                m_CifgParameters.m_RecurrentToInputWeights->Map(true)));
+                managedRecurrentToInputWeights.GetTensorInfo(),
+                managedRecurrentToInputWeights.Map()));
     }
     if (m_CifgParameters.m_InputGateBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_CifgParameters.m_InputGateBias->GetTensorInfo(),
-                                              m_CifgParameters.m_InputGateBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputGateBias.GetTensorInfo(),
+                                              managedInputGateBias.Map()));
     }
 
     // Add peephole parameters
     if (m_PeepholeParameters.m_CellToInputWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToInputWeights->GetTensorInfo(),
-                                              m_PeepholeParameters.m_CellToInputWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedCellToInputWeights.GetTensorInfo(),
+                                              managedCellToInputWeights.Map()));
     }
     if (m_PeepholeParameters.m_CellToForgetWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToForgetWeights->GetTensorInfo(),
-                                              m_PeepholeParameters.m_CellToForgetWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedCellToForgetWeights.GetTensorInfo(),
+                                              managedCellToForgetWeights.Map()));
     }
     if (m_PeepholeParameters.m_CellToOutputWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_PeepholeParameters.m_CellToOutputWeights->GetTensorInfo(),
-                                              m_PeepholeParameters.m_CellToOutputWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedCellToOutputWeights.GetTensorInfo(),
+                                              managedCellToOutputWeights.Map()));
     }
 
     // Add projection parameters
     if (m_ProjectionParameters.m_ProjectionWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionWeights->GetTensorInfo(),
-                                              m_ProjectionParameters.m_ProjectionWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedProjectionWeights.GetTensorInfo(),
+                                              managedProjectionWeights.Map()));
     }
     if (m_ProjectionParameters.m_ProjectionBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_ProjectionParameters.m_ProjectionBias->GetTensorInfo(),
-                                              m_ProjectionParameters.m_ProjectionBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedProjectionBias.GetTensorInfo(),
+                                              managedProjectionBias.Map()));
     }
 
     // Add norm parameters
     if (m_LayerNormParameters.m_InputLayerNormWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_InputLayerNormWeights->GetTensorInfo(),
-                                              m_LayerNormParameters.m_InputLayerNormWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputLayerNormWeights.GetTensorInfo(),
+                                              managedInputLayerNormWeights.Map()));
     }
     if (m_LayerNormParameters.m_ForgetLayerNormWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_ForgetLayerNormWeights->GetTensorInfo(),
-                                              m_LayerNormParameters.m_ForgetLayerNormWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedForgetLayerNormWeights.GetTensorInfo(),
+                                              managedForgetLayerNormWeights.Map()));
     }
     if (m_LayerNormParameters.m_CellLayerNormWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_CellLayerNormWeights->GetTensorInfo(),
-                                              m_LayerNormParameters.m_CellLayerNormWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedCellLayerNormWeights.GetTensorInfo(),
+                                              managedCellLayerNormWeights.Map()));
     }
     if (m_LayerNormParameters.m_OutputLayerNormWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_LayerNormParameters.m_OutputLayerNormWeights->GetTensorInfo(),
-                                              m_LayerNormParameters.m_OutputLayerNormWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedOutputLayerNormWeights.GetTensorInfo(),
+                                              managedOutputLayerNormWeights.Map()));
     }
     strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
 }
diff --git a/src/armnn/layers/QuantizedLstmLayer.cpp b/src/armnn/layers/QuantizedLstmLayer.cpp
index a1ff985..4d0dab9 100644
--- a/src/armnn/layers/QuantizedLstmLayer.cpp
+++ b/src/armnn/layers/QuantizedLstmLayer.cpp
@@ -173,12 +173,27 @@
 {
     QuantizedLstmInputParams inputParams;
 
+    ManagedConstTensorHandle managedInputToInputWeights(m_QuantizedLstmParameters.m_InputToInputWeights);
+    ManagedConstTensorHandle managedInputToForgetWeights(m_QuantizedLstmParameters.m_InputToForgetWeights);
+    ManagedConstTensorHandle managedInputToCellWeights(m_QuantizedLstmParameters.m_InputToCellWeights);
+    ManagedConstTensorHandle managedInputToOutputWeights(m_QuantizedLstmParameters.m_InputToOutputWeights);
+
+    ManagedConstTensorHandle managedRecurrentToInputWeights(m_QuantizedLstmParameters.m_RecurrentToInputWeights);
+    ManagedConstTensorHandle managedRecurrentToForgetWeights(m_QuantizedLstmParameters.m_RecurrentToForgetWeights);
+    ManagedConstTensorHandle managedRecurrentToCellWeights(m_QuantizedLstmParameters.m_RecurrentToCellWeights);
+    ManagedConstTensorHandle managedRecurrentToOutputWeights(m_QuantizedLstmParameters.m_RecurrentToOutputWeights);
+
+    ManagedConstTensorHandle managedInputGateBias(m_QuantizedLstmParameters.m_InputGateBias);
+    ManagedConstTensorHandle managedForgetGateBias(m_QuantizedLstmParameters.m_ForgetGateBias);
+    ManagedConstTensorHandle managedCellBias(m_QuantizedLstmParameters.m_CellBias);
+    ManagedConstTensorHandle managedOutputGateBias(m_QuantizedLstmParameters.m_OutputGateBias);
+
     // InputToX weight tensors
     ConstTensor inputToInputWeightsTensor;
     if (m_QuantizedLstmParameters.m_InputToInputWeights != nullptr)
     {
-        ConstTensor inputToInputWeightsTensorCopy(m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo(),
-                                                  m_QuantizedLstmParameters.m_InputToInputWeights->Map(true));
+        ConstTensor inputToInputWeightsTensorCopy(managedInputToInputWeights.GetTensorInfo(),
+                                                  managedInputToInputWeights.Map());
         inputToInputWeightsTensor = inputToInputWeightsTensorCopy;
         inputParams.m_InputToInputWeights = &inputToInputWeightsTensor;
     }
@@ -186,8 +201,8 @@
     ConstTensor inputToForgetWeightsTensor;
     if (m_QuantizedLstmParameters.m_InputToForgetWeights != nullptr)
     {
-        ConstTensor inputToForgetWeightsTensorCopy(m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo(),
-                                                   m_QuantizedLstmParameters.m_InputToForgetWeights->Map(true));
+        ConstTensor inputToForgetWeightsTensorCopy(managedInputToForgetWeights.GetTensorInfo(),
+                                                   managedInputToForgetWeights.Map());
         inputToForgetWeightsTensor = inputToForgetWeightsTensorCopy;
         inputParams.m_InputToForgetWeights = &inputToForgetWeightsTensor;
     }
@@ -195,8 +210,8 @@
     ConstTensor inputToCellWeightsTensor;
     if (m_QuantizedLstmParameters.m_InputToCellWeights != nullptr)
     {
-        ConstTensor inputToCellWeightsTensorCopy(m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo(),
-                                                 m_QuantizedLstmParameters.m_InputToCellWeights->Map(true));
+        ConstTensor inputToCellWeightsTensorCopy(managedInputToCellWeights.GetTensorInfo(),
+                                                 managedInputToCellWeights.Map());
         inputToCellWeightsTensor = inputToCellWeightsTensorCopy;
         inputParams.m_InputToCellWeights = &inputToCellWeightsTensor;
     }
@@ -204,8 +219,8 @@
     ConstTensor inputToOutputWeightsTensor;
     if (m_QuantizedLstmParameters.m_InputToOutputWeights != nullptr)
     {
-        ConstTensor inputToOutputWeightsTensorCopy(m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo(),
-                                                   m_QuantizedLstmParameters.m_InputToOutputWeights->Map(true));
+        ConstTensor inputToOutputWeightsTensorCopy(managedInputToOutputWeights.GetTensorInfo(),
+                                                   managedInputToOutputWeights.Map());
         inputToOutputWeightsTensor = inputToOutputWeightsTensorCopy;
         inputParams.m_InputToOutputWeights = &inputToOutputWeightsTensor;
     }
@@ -215,8 +230,8 @@
     if (m_QuantizedLstmParameters.m_RecurrentToInputWeights != nullptr)
     {
         ConstTensor recurrentToInputWeightsTensorCopy(
-                m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo(),
-                m_QuantizedLstmParameters.m_RecurrentToInputWeights->Map(true));
+                managedRecurrentToInputWeights.GetTensorInfo(),
+                managedRecurrentToInputWeights.Map());
         recurrentToInputWeightsTensor = recurrentToInputWeightsTensorCopy;
         inputParams.m_RecurrentToInputWeights = &recurrentToInputWeightsTensor;
     }
@@ -225,8 +240,8 @@
     if (m_QuantizedLstmParameters.m_RecurrentToForgetWeights != nullptr)
     {
         ConstTensor recurrentToForgetWeightsTensorCopy(
-                m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
-                m_QuantizedLstmParameters.m_RecurrentToForgetWeights->Map(true));
+                managedRecurrentToForgetWeights.GetTensorInfo(),
+                managedRecurrentToForgetWeights.Map());
         recurrentToForgetWeightsTensor = recurrentToForgetWeightsTensorCopy;
         inputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeightsTensor;
     }
@@ -235,8 +250,8 @@
     if (m_QuantizedLstmParameters.m_RecurrentToCellWeights != nullptr)
     {
         ConstTensor recurrentToCellWeightsTensorCopy(
-                m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo(),
-                m_QuantizedLstmParameters.m_RecurrentToCellWeights->Map(true));
+                managedRecurrentToCellWeights.GetTensorInfo(),
+                managedRecurrentToCellWeights.Map());
         recurrentToCellWeightsTensor = recurrentToCellWeightsTensorCopy;
         inputParams.m_RecurrentToCellWeights = &recurrentToCellWeightsTensor;
     }
@@ -245,8 +260,8 @@
     if (m_QuantizedLstmParameters.m_RecurrentToOutputWeights != nullptr)
     {
         ConstTensor recurrentToOutputWeightsTensorCopy(
-                m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
-                m_QuantizedLstmParameters.m_RecurrentToOutputWeights->Map(true));
+                managedRecurrentToOutputWeights.GetTensorInfo(),
+                managedRecurrentToOutputWeights.Map());
         recurrentToOutputWeightsTensor = recurrentToOutputWeightsTensorCopy;
         inputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeightsTensor;
     }
@@ -255,8 +270,8 @@
     ConstTensor inputGateBiasTensor;
     if (m_QuantizedLstmParameters.m_InputGateBias != nullptr)
     {
-        ConstTensor inputGateBiasTensorCopy(m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo(),
-                                            m_QuantizedLstmParameters.m_InputGateBias->Map(true));
+        ConstTensor inputGateBiasTensorCopy(managedInputGateBias.GetTensorInfo(),
+                                            managedInputGateBias.Map());
         inputGateBiasTensor = inputGateBiasTensorCopy;
         inputParams.m_InputGateBias = &inputGateBiasTensor;
     }
@@ -264,8 +279,8 @@
     ConstTensor forgetGateBiasTensor;
     if (m_QuantizedLstmParameters.m_ForgetGateBias != nullptr)
     {
-        ConstTensor forgetGateBiasTensorCopy(m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo(),
-                                             m_QuantizedLstmParameters.m_ForgetGateBias->Map(true));
+        ConstTensor forgetGateBiasTensorCopy(managedForgetGateBias.GetTensorInfo(),
+                                             managedForgetGateBias.Map());
         forgetGateBiasTensor = forgetGateBiasTensorCopy;
         inputParams.m_ForgetGateBias = &forgetGateBiasTensor;
     }
@@ -273,8 +288,8 @@
     ConstTensor cellBiasTensor;
     if (m_QuantizedLstmParameters.m_CellBias != nullptr)
     {
-        ConstTensor cellBiasTensorCopy(m_QuantizedLstmParameters.m_CellBias->GetTensorInfo(),
-                                       m_QuantizedLstmParameters.m_CellBias->Map(true));
+        ConstTensor cellBiasTensorCopy(managedCellBias.GetTensorInfo(),
+                                       managedCellBias.Map());
         cellBiasTensor = cellBiasTensorCopy;
         inputParams.m_CellBias = &cellBiasTensor;
     }
@@ -282,8 +297,8 @@
     ConstTensor outputGateBiasTensor;
     if (m_QuantizedLstmParameters.m_OutputGateBias != nullptr)
     {
-        ConstTensor outputGateBiasCopy(m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo(),
-                                       m_QuantizedLstmParameters.m_OutputGateBias->Map(true));
+        ConstTensor outputGateBiasCopy(managedOutputGateBias.GetTensorInfo(),
+                                       managedOutputGateBias.Map());
         outputGateBiasTensor = outputGateBiasCopy;
         inputParams.m_OutputGateBias = &outputGateBiasTensor;
     }
@@ -295,83 +310,98 @@
 {
     std::vector<ConstTensor> constTensors;
 
+    ManagedConstTensorHandle managedInputToInputWeights(m_QuantizedLstmParameters.m_InputToInputWeights);
+    ManagedConstTensorHandle managedInputToForgetWeights(m_QuantizedLstmParameters.m_InputToForgetWeights);
+    ManagedConstTensorHandle managedInputToCellWeights(m_QuantizedLstmParameters.m_InputToCellWeights);
+    ManagedConstTensorHandle managedInputToOutputWeights(m_QuantizedLstmParameters.m_InputToOutputWeights);
+
+    ManagedConstTensorHandle managedRecurrentToInputWeights(m_QuantizedLstmParameters.m_RecurrentToInputWeights);
+    ManagedConstTensorHandle managedRecurrentToForgetWeights(m_QuantizedLstmParameters.m_RecurrentToForgetWeights);
+    ManagedConstTensorHandle managedRecurrentToCellWeights(m_QuantizedLstmParameters.m_RecurrentToCellWeights);
+    ManagedConstTensorHandle managedRecurrentToOutputWeights(m_QuantizedLstmParameters.m_RecurrentToOutputWeights);
+
+    ManagedConstTensorHandle managedInputGateBias(m_QuantizedLstmParameters.m_InputGateBias);
+    ManagedConstTensorHandle managedForgetGateBias(m_QuantizedLstmParameters.m_ForgetGateBias);
+    ManagedConstTensorHandle managedCellBias(m_QuantizedLstmParameters.m_CellBias);
+    ManagedConstTensorHandle managedOutputGateBias(m_QuantizedLstmParameters.m_OutputGateBias);
+
     // InputToX weight tensors
     if (m_QuantizedLstmParameters.m_InputToInputWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputToInputWeights->GetTensorInfo(),
-                                              m_QuantizedLstmParameters.m_InputToInputWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToInputWeights.GetTensorInfo(),
+                                              managedInputToInputWeights.Map()));
     }
 
     if (m_QuantizedLstmParameters.m_InputToForgetWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputToForgetWeights->GetTensorInfo(),
-                                              m_QuantizedLstmParameters.m_InputToForgetWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToForgetWeights.GetTensorInfo(),
+                                              managedInputToForgetWeights.Map()));
     }
 
     if (m_QuantizedLstmParameters.m_InputToCellWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputToCellWeights->GetTensorInfo(),
-                                              m_QuantizedLstmParameters.m_InputToCellWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToCellWeights.GetTensorInfo(),
+                                              managedInputToCellWeights.Map()));
     }
 
     if (m_QuantizedLstmParameters.m_InputToOutputWeights != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputToOutputWeights->GetTensorInfo(),
-                                              m_QuantizedLstmParameters.m_InputToOutputWeights->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputToOutputWeights.GetTensorInfo(),
+                                              managedInputToOutputWeights.Map()));
     }
 
     // RecurrentToX weight tensors
     if (m_QuantizedLstmParameters.m_RecurrentToInputWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_QuantizedLstmParameters.m_RecurrentToInputWeights->GetTensorInfo(),
-                m_QuantizedLstmParameters.m_RecurrentToInputWeights->Map(true)));
+                managedRecurrentToInputWeights.GetTensorInfo(),
+                managedRecurrentToInputWeights.Map()));
     }
 
     if (m_QuantizedLstmParameters.m_RecurrentToForgetWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_QuantizedLstmParameters.m_RecurrentToForgetWeights->GetTensorInfo(),
-                m_QuantizedLstmParameters.m_RecurrentToForgetWeights->Map(true)));
+                managedRecurrentToForgetWeights.GetTensorInfo(),
+                managedRecurrentToForgetWeights.Map()));
     }
 
     if (m_QuantizedLstmParameters.m_RecurrentToCellWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_QuantizedLstmParameters.m_RecurrentToCellWeights->GetTensorInfo(),
-                m_QuantizedLstmParameters.m_RecurrentToCellWeights->Map(true)));
+                managedRecurrentToCellWeights.GetTensorInfo(),
+                managedRecurrentToCellWeights.Map()));
     }
 
     if (m_QuantizedLstmParameters.m_RecurrentToOutputWeights != nullptr)
     {
         constTensors.emplace_back(ConstTensor(
-                m_QuantizedLstmParameters.m_RecurrentToOutputWeights->GetTensorInfo(),
-                m_QuantizedLstmParameters.m_RecurrentToOutputWeights->Map(true)));
+                managedRecurrentToOutputWeights.GetTensorInfo(),
+                managedRecurrentToOutputWeights.Map()));
     }
 
     // Bias tensors
     if (m_QuantizedLstmParameters.m_InputGateBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_InputGateBias->GetTensorInfo(),
-                                              m_QuantizedLstmParameters.m_InputGateBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedInputGateBias.GetTensorInfo(),
+                                              managedInputGateBias.Map()));
     }
 
     if (m_QuantizedLstmParameters.m_ForgetGateBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_ForgetGateBias->GetTensorInfo(),
-                                              m_QuantizedLstmParameters.m_ForgetGateBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedForgetGateBias.GetTensorInfo(),
+                                              managedForgetGateBias.Map()));
     }
 
     if (m_QuantizedLstmParameters.m_CellBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_CellBias->GetTensorInfo(),
-                                              m_QuantizedLstmParameters.m_CellBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedCellBias.GetTensorInfo(),
+                                              managedCellBias.Map()));
     }
 
     if (m_QuantizedLstmParameters.m_OutputGateBias != nullptr)
     {
-        constTensors.emplace_back(ConstTensor(m_QuantizedLstmParameters.m_OutputGateBias->GetTensorInfo(),
-                                              m_QuantizedLstmParameters.m_OutputGateBias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedOutputGateBias.GetTensorInfo(),
+                                              managedOutputGateBias.Map()));
     }
 
 
diff --git a/src/armnn/layers/TransposeConvolution2dLayer.cpp b/src/armnn/layers/TransposeConvolution2dLayer.cpp
index 8f6908e..c0a7dfa 100644
--- a/src/armnn/layers/TransposeConvolution2dLayer.cpp
+++ b/src/armnn/layers/TransposeConvolution2dLayer.cpp
@@ -123,12 +123,14 @@
 
 void TransposeConvolution2dLayer::Accept(ILayerVisitor& visitor) const
 {
-    ConstTensor weightsTensor(m_Weight->GetTensorInfo(), m_Weight->Map(true)) ;
-    Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
+    ManagedConstTensorHandle managedWeight(m_Weight);
+    ConstTensor weightsTensor(managedWeight.GetTensorInfo(), managedWeight.Map());
 
+    Optional<ConstTensor> optionalBiasTensor = EmptyOptional();
+    ManagedConstTensorHandle managedBias(m_Bias);
     if (GetParameters().m_BiasEnabled)
     {
-        ConstTensor biasTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true));
+        ConstTensor biasTensor(managedBias.GetTensorInfo(), managedBias.Map());
         optionalBiasTensor = Optional<ConstTensor>(biasTensor);
     }
 
@@ -137,11 +139,13 @@
 
 void TransposeConvolution2dLayer::ExecuteStrategy(IStrategy& strategy) const
 {
-    std::vector<armnn::ConstTensor> constTensors { {m_Weight->GetTensorInfo(), m_Weight->Map(true)} };
+    ManagedConstTensorHandle managedWeight(m_Weight);
+    std::vector<armnn::ConstTensor> constTensors { { managedWeight.GetTensorInfo(), managedWeight.Map() } };
 
+    ManagedConstTensorHandle managedBias(m_Bias);
     if (GetParameters().m_BiasEnabled)
     {
-        constTensors.emplace_back(ConstTensor(m_Bias->GetTensorInfo(), m_Bias->Map(true)));
+        constTensors.emplace_back(ConstTensor(managedBias.GetTensorInfo(), managedBias.Map()));
     }
 
     strategy.ExecuteStrategy(this, GetParameters(), constTensors, GetName());
diff --git a/src/backends/backendsCommon/CpuTensorHandle.hpp b/src/backends/backendsCommon/CpuTensorHandle.hpp
index a300fe0..fdd2439 100644
--- a/src/backends/backendsCommon/CpuTensorHandle.hpp
+++ b/src/backends/backendsCommon/CpuTensorHandle.hpp
@@ -175,4 +175,71 @@
 template <>
 void* CpuTensorHandle::GetTensor() const;
 
+class ManagedConstTensorHandle
+{
+
+public:
+    explicit ManagedConstTensorHandle(std::shared_ptr<ConstCpuTensorHandle> ptr)
+        : m_Mapped(false)
+        , m_TensorHandle(std::move(ptr)) {};
+
+    /// RAII Managed resource Unmaps MemoryArea once out of scope
+    const void* Map(bool blocking = true)
+    {
+        if (m_TensorHandle)
+        {
+            auto pRet = m_TensorHandle->Map(blocking);
+            m_Mapped = true;
+            return pRet;
+        }
+        else
+        {
+            throw armnn::Exception("Attempting to Map null TensorHandle");
+        }
+
+    }
+
+    // Delete copy constructor as it's unnecessary
+    ManagedConstTensorHandle(const ConstCpuTensorHandle& other) = delete;
+
+    // Delete copy assignment as it's unnecessary
+    ManagedConstTensorHandle& operator=(const ManagedConstTensorHandle& other) = delete;
+
+    // Delete move assignment as it's unnecessary
+    ManagedConstTensorHandle& operator=(ManagedConstTensorHandle&& other) noexcept = delete;
+
+    ~ManagedConstTensorHandle()
+    {
+        // Bias tensor handles need to be initialized empty before entering scope of if statement checking if enabled
+        if (m_TensorHandle)
+        {
+            Unmap();
+        }
+    }
+
+    void Unmap()
+    {
+        // Only unmap if mapped and TensorHandle exists.
+        if (m_Mapped && m_TensorHandle)
+        {
+            m_TensorHandle->Unmap();
+            m_Mapped = false;
+        }
+    }
+
+    const TensorInfo& GetTensorInfo() const
+    {
+        return m_TensorHandle->GetTensorInfo();
+    }
+
+    bool IsMapped() const
+    {
+        return m_Mapped;
+    }
+
+private:
+    bool m_Mapped;
+    std::shared_ptr<ConstCpuTensorHandle> m_TensorHandle;
+};
+
 } // namespace armnn
diff --git a/src/backends/backendsCommon/test/DefaultAsyncExecuteTest.cpp b/src/backends/backendsCommon/test/DefaultAsyncExecuteTest.cpp
index 0d45952..56a794e 100644
--- a/src/backends/backendsCommon/test/DefaultAsyncExecuteTest.cpp
+++ b/src/backends/backendsCommon/test/DefaultAsyncExecuteTest.cpp
@@ -243,7 +243,6 @@
     ValidateTensor(workingMemDescriptor2.m_Inputs[0], expectedExecuteval2);
 }
 
-
 BOOST_AUTO_TEST_SUITE_END()
 
 }
\ No newline at end of file
diff --git a/src/backends/reference/test/RefTensorHandleTests.cpp b/src/backends/reference/test/RefTensorHandleTests.cpp
index 1ef6de9..b04d9d6 100644
--- a/src/backends/reference/test/RefTensorHandleTests.cpp
+++ b/src/backends/reference/test/RefTensorHandleTests.cpp
@@ -167,6 +167,39 @@
     ARMNN_ASSERT(!(handleFactory.SupportsInPlaceComputation()));
 }
 
+BOOST_AUTO_TEST_CASE(TestManagedConstTensorHandle)
+{
+    // Initialize arguments
+    void* mem = nullptr;
+    TensorInfo info;
+
+    // Use PassthroughCpuTensor as others are abstract
+    auto passThroughHandle = std::make_shared<PassthroughCpuTensorHandle>(info, mem);
+
+    // Test managed handle is initialized with m_Mapped unset and once Map() called its set
+    ManagedConstTensorHandle managedHandle(passThroughHandle);
+    BOOST_CHECK(!managedHandle.IsMapped());
+    managedHandle.Map();
+    BOOST_CHECK(managedHandle.IsMapped());
+
+    // Test it can then be unmapped
+    managedHandle.Unmap();
+    BOOST_CHECK(!managedHandle.IsMapped());
+
+    // Test member function
+    BOOST_CHECK(managedHandle.GetTensorInfo() == info);
+
+    // Test that nullptr tensor handle doesn't get mapped
+    ManagedConstTensorHandle managedHandleNull(nullptr);
+    BOOST_CHECK(!managedHandleNull.IsMapped());
+    BOOST_CHECK_THROW(managedHandleNull.Map(), armnn::Exception);
+    BOOST_CHECK(!managedHandleNull.IsMapped());
+
+    // Check Unmap() when m_Mapped already false
+    managedHandleNull.Unmap();
+    BOOST_CHECK(!managedHandleNull.IsMapped());
+}
+
 #if !defined(__ANDROID__)
 // Only run these tests on non Android platforms
 BOOST_AUTO_TEST_CASE(CheckSourceType)