IVGCVSW-2993: Investigate TfLite Parser test output shape validation
* Added a check after enqueue workload to ensure that the outputs have the correct number of dimensions
* OutputTensors cannot be used for this as in RunTest we're specifically creating these with expected number of dimensions
Signed-off-by: Nina Drozd <nina.drozd@arm.com>
Change-Id: Ib6e5a138240e2f639f462f58caa72ae760e2b406
diff --git a/src/armnnTfLiteParser/test/ParserFlatbuffersFixture.hpp b/src/armnnTfLiteParser/test/ParserFlatbuffersFixture.hpp
index 95c6e85..9b443c3 100644
--- a/src/armnnTfLiteParser/test/ParserFlatbuffersFixture.hpp
+++ b/src/armnnTfLiteParser/test/ParserFlatbuffersFixture.hpp
@@ -280,6 +280,21 @@
m_Runtime->EnqueueWorkload(m_NetworkIdentifier, inputTensors, outputTensors);
+ // Check that output tensors have correct number of dimensions (NumOutputDimensions specified in test)
+ // after running the workload
+ for (auto&& it : expectedOutputData)
+ {
+ armnn::LayerBindingId outputBindingId = m_Parser->GetNetworkOutputBindingInfo(subgraphId, it.first).first;
+ auto outputNumDimensions = m_Runtime->GetOutputTensorInfo(
+ m_NetworkIdentifier, outputBindingId).GetNumDimensions();
+
+ BOOST_CHECK_MESSAGE((outputNumDimensions == NumOutputDimensions),
+ boost::str(boost::format("Number of dimensions expected %1%, but got %2% for output layer %3%")
+ % NumOutputDimensions
+ % outputNumDimensions
+ % it.first));
+ }
+
// Compare each output tensor to the expected values
for (auto&& it : expectedOutputData)
{