IVGCVSW-6929 Support for models with implicit expanded
dimensions
* Added allow-expanded-dims to TFLite parser and ArmNN delegate
* If true ArmNN will disregard dimensions with a size of 1 when
validating tensor shapes. Tensor sizes must still match.
* This allows us to support models where tensors have expanded
dimensions (i.e. extra dimensions with a size of 1).
* Fixed bug in Network where it assumed that only the first option
could be ShapeInferenceMethod.
* Fixed bug where m_ShapeInferenceMethod was lost when copying or
moving Graphs.
* Changed Delegate to pass "infer-output-shape", "allow-expanded-dims"
and other BackendOptions through to the Network during construction.
Signed-off-by: Mike Kelly <mike.kelly@arm.com>
Change-Id: Ibe7c5ae6597796fc9164cb07bd372bd7f8f8cacf
diff --git a/delegate/src/DelegateOptions.cpp b/delegate/src/DelegateOptions.cpp
index 9413a46..f3e13c9 100644
--- a/delegate/src/DelegateOptions.cpp
+++ b/delegate/src/DelegateOptions.cpp
@@ -156,6 +156,24 @@
{
optimizerOptions.m_Debug = armnn::stringUtils::StringToBool(options_values[i]);
}
+ // Infer output-shape
+ else if (std::string(options_keys[i]) == std::string("infer-output-shape"))
+ {
+ armnn::BackendOptions backendOption("ShapeInferenceMethod",
+ {
+ { "InferAndValidate", armnn::stringUtils::StringToBool(options_values[i]) }
+ });
+ optimizerOptions.m_ModelOptions.push_back(backendOption);
+ }
+ // Allow expanded dims
+ else if (std::string(options_keys[i]) == std::string("allow-expanded-dims"))
+ {
+ armnn::BackendOptions backendOption("AllowExpandedDims",
+ {
+ { "AllowExpandedDims", armnn::stringUtils::StringToBool(options_values[i]) }
+ });
+ optimizerOptions.m_ModelOptions.push_back(backendOption);
+ }
// Process memory-import
else if (std::string(options_keys[i]) == std::string("memory-import"))
{
diff --git a/delegate/src/armnn_delegate.cpp b/delegate/src/armnn_delegate.cpp
index 4d71f26..6e1a91f 100644
--- a/delegate/src/armnn_delegate.cpp
+++ b/delegate/src/armnn_delegate.cpp
@@ -308,7 +308,7 @@
DelegateData delegateData(delegate->m_Options.GetBackends());
// Build ArmNN Network
- armnn::NetworkOptions networkOptions = {};
+ armnn::NetworkOptions networkOptions = delegate->m_Options.GetOptimizerOptions().m_ModelOptions;
armnn::NetworkId networkId;
delegateData.m_Network = armnn::INetwork::Create(networkOptions);