Revert "IVGCVSW-6873 Import inputs but don't export outputs fails."

This reverts commit 03bf98a8bc51ad20eef4b9ca5fbf6ce15e063721.

Reason for revert: Caused failures in tests located in internal repo.

Change-Id: If35cb0ede349b270e4e7827324382e09455d8cfa
diff --git a/src/backends/backendsCommon/test/CompatibilityTests.cpp b/src/backends/backendsCommon/test/CompatibilityTests.cpp
index 9c85ffc..c69a4b5 100644
--- a/src/backends/backendsCommon/test/CompatibilityTests.cpp
+++ b/src/backends/backendsCommon/test/CompatibilityTests.cpp
@@ -73,7 +73,7 @@
     graph.TopologicalSort();
 
     std::vector<std::string> errors;
-    auto result = SelectTensorHandleStrategy(graph, backends, registry, true, true, errors);
+    auto result = SelectTensorHandleStrategy(graph, backends, registry, true, errors);
 
     CHECK(result.m_Error == false);
     CHECK(result.m_Warning == false);
diff --git a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
index cc5aa23..77901df 100644
--- a/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
+++ b/src/backends/backendsCommon/test/EndToEndTestImpl.hpp
@@ -204,9 +204,7 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // Optimize the network
-    OptimizerOptions optimizedOptions;
-    optimizedOptions.m_ImportEnabled = true;
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
     CHECK(optNet);
 
     // Loads it into the runtime.
@@ -271,10 +269,7 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // Optimize the network
-    OptimizerOptions optimizedOptions;
-    optimizedOptions.m_ImportEnabled = true;
-    optimizedOptions.m_ExportEnabled = true;
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
     CHECK(optNet);
 
     // Loads it into the runtime.
@@ -345,10 +340,7 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // Optimize the network
-    OptimizerOptions optimizedOptions;
-    optimizedOptions.m_ImportEnabled = true;
-    optimizedOptions.m_ExportEnabled = true;
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
     CHECK(optNet);
 
     // Loads it into the runtime.
@@ -432,9 +424,7 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // optimize the network
-    OptimizerOptions optimizedOptions;
-    optimizedOptions.m_ImportEnabled = true;
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
 
     INFO("Load Network");
     // Load it into the runtime. It should pass.
@@ -524,9 +514,7 @@
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
     // optimize the network
-    OptimizerOptions optimizedOptions;
-    optimizedOptions.m_ExportEnabled = true;
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
 
     INFO("Load Network");
     // Load it into the runtime. It should pass.
@@ -613,10 +601,7 @@
     input->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32, 0.0f, 0, true));
     pooling->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 1, 4 }, DataType::Float32));
 
-    OptimizerOptions optimizedOptions;
-    optimizedOptions.m_ImportEnabled = true;
-    optimizedOptions.m_ExportEnabled = true;
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
 
     INFO("Load Network");
     // Load it into the runtime. It should pass.
@@ -709,10 +694,7 @@
     activation->GetOutputSlot(0).SetTensorInfo(TensorInfo({ 1, 1, 4, 1 }, DataType::Float32));
 
     // Optimize the network
-    OptimizerOptions optimizedOptions;
-    optimizedOptions.m_ImportEnabled = true;
-    optimizedOptions.m_ExportEnabled = true;
-    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec(), optimizedOptions);
+    IOptimizedNetworkPtr optNet = Optimize(*net, backends, runtime->GetDeviceSpec());
 
     // Loads it into the runtime.
     NetworkId netId;
diff --git a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp
index cd865de..bcea061 100644
--- a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp
+++ b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp
@@ -421,7 +421,7 @@
 
     std::vector<armnn::BackendId> preferredBackends { "CpuRef" };
     armnn::ModelOptions modelOptions;
-    armnn::OptimizerOptions optimizerOptions(false, false, false, false, modelOptions, false);
+    armnn::OptimizerOptions optimizerOptions(false, false, false, false, modelOptions);
     std::vector<std::string> errorMessages;
 
     // optimize the network.