IVGCVSW-6873 Import inputs but don't export outputs fails.

Only one bool is used to indicate whether inputs should be imported.
However, its possible for the user to want to import inputs but not
export outputs. In addition it's possible for a user to enabled import
during optimize but then pass a memory source that does not require
import.

* Add m_ExportEnabled to INetwork.hpp.
* Modify Network::dNetwork to consider both m_ImportEnabled
  and m_ExportEnabled.
* Add ValidateSourcesMatchOptimizedNetwork to LoadedNetwork to validate
  import options between optimize and network load.
* Update the TfLite delegate consider exportEnabled flag in the
  optimizer.

!armnn-internal-tests:425350
Signed-off-by: Colm Donelan <colm.donelan@arm.com>
Change-Id: I776eab81595898e43f91ab40306962eae61329f4
diff --git a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
index 77901df..cc5aa23 100644
--- a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
+++ b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
@@ -204,7 +204,9 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // Optimize the network
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
+    OptimizerOptions optimizedOptions;
+    optimizedOptions.m_ImportEnabled = true;
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
     CHECK(optNet);
 
     // Loads it into the runtime.
@@ -269,7 +271,10 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // Optimize the network
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
+    OptimizerOptions optimizedOptions;
+    optimizedOptions.m_ImportEnabled = true;
+    optimizedOptions.m_ExportEnabled = true;
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
     CHECK(optNet);
 
     // Loads it into the runtime.
@@ -340,7 +345,10 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // Optimize the network
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
+    OptimizerOptions optimizedOptions;
+    optimizedOptions.m_ImportEnabled = true;
+    optimizedOptions.m_ExportEnabled = true;
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
     CHECK(optNet);
 
     // Loads it into the runtime.
@@ -424,7 +432,9 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // optimize the network
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
+    OptimizerOptions optimizedOptions;
+    optimizedOptions.m_ImportEnabled = true;
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
 
     INFO("Load Network");
     // Load it into the runtime. It should pass.
@@ -514,7 +524,9 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // optimize the network
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
+    OptimizerOptions optimizedOptions;
+    optimizedOptions.m_ExportEnabled = true;
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
 
     INFO("Load Network");
     // Load it into the runtime. It should pass.
@@ -601,7 +613,10 @@
     input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32, 0.0f, 0, true));
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
+    OptimizerOptions optimizedOptions;
+    optimizedOptions.m_ImportEnabled = true;
+    optimizedOptions.m_ExportEnabled = true;
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
 
     INFO("Load Network");
     // Load it into the runtime. It should pass.
@@ -694,7 +709,10 @@
     activation->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 1 }, DataType::Float32));
 
     // Optimize the network
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
+    OptimizerOptions optimizedOptions;
+    optimizedOptions.m_ImportEnabled = true;
+    optimizedOptions.m_ExportEnabled = true;
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
 
     // Loads it into the runtime.
     NetworkId netId;