MLECO-3148 Enabling Ctest driven testing

Signed-off-by: Eanna O Cathain <eanna.ocathain@arm.com>
Change-Id: Ica67662d20b0c02b75418f79d4ba2f4d18373310
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 150c668..7453176 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -50,6 +50,8 @@
 
 include(${CMAKE_SCRIPTS_DIR}/source_gen_utils.cmake)
 
+enable_testing()
+
 if (${CMAKE_BINARY_DIR} STREQUAL ${CMAKE_SOURCE_DIR})
     message(FATAL_ERROR "Source and build are in the same directory")
 else()
@@ -150,6 +152,7 @@
 list(REMOVE_ITEM USE_CASES "" ${EXCLUDED_USE_CASES})
 message(STATUS "Use-cases excluded by platform configuration: ${EXCLUDED_USE_CASES}")
 message(STATUS "Building use-cases: ${USE_CASES}.")
+
 foreach(use_case ${USE_CASES})
 
     set(SRC_USE_CASE "")
diff --git a/docs/sections/testing_benchmarking.md b/docs/sections/testing_benchmarking.md
index 2641049..41ac8df 100644
--- a/docs/sections/testing_benchmarking.md
+++ b/docs/sections/testing_benchmarking.md
@@ -38,18 +38,58 @@
 ├── ethos-u-<usecase1>
 └── ethos-u-<usecase1>
 ```
-
-To execute unit-tests for a specific use-case, in addition to the common tests, use:
-
+To view all the available tests to run, use the following command in the `<build folder>`:
 ```commandline
-arm_ml_embedded_evaluation_kit-<use_case>-tests
+ctest -N
+```
+Sample output:
+```commandline
+Test #1: ad-tests
+Test #2: asr-tests
+Test #3: img_class-tests
+Test #4: kws-tests
+Test #5: kws_asr-tests
+Test #6: noise_reduction-tests
+Test #7: object_detection-tests
+Test #8: vww-tests
+
+Total Tests: 8
 ```
 
+To execute a specific unit-test from the above list, in addition to the common tests, run the following command in the `<build folder>`:
+
+```commandline
+ctest -R <test_name>
+```
+
+To run every test that has been built, run the following command in the `<build folder>`:
+
+```commandline
+ctest
+```
+
+Sample output:
 ```log
-INFO - native platform initialised
-...
-===============================================================================
-   All tests passed (37 assertions in 7 test cases)
+Start 1: ad-tests
+1/8 Test #1: ad-tests .........................   Passed    0.17 sec
+    Start 2: asr-tests
+2/8 Test #2: asr-tests ........................   Passed    3.04 sec
+    Start 3: img_class-tests
+3/8 Test #3: img_class-tests ..................   Passed    0.49 sec
+    Start 4: kws-tests
+4/8 Test #4: kws-tests ........................   Passed    7.52 sec
+    Start 5: kws_asr-tests
+5/8 Test #5: kws_asr-tests ....................   Passed    2.85 sec
+    Start 6: noise_reduction-tests
+6/8 Test #6: noise_reduction-tests ............   Passed   16.41 sec
+    Start 7: object_detection-tests
+7/8 Test #7: object_detection-tests ...........   Passed    0.58 sec
+    Start 8: vww-tests
+8/8 Test #8: vww-tests ........................   Passed    0.07 sec
+
+
+Total Test time (real) =  34.71 sec
+
 ```
 
 > **Note:** Test outputs could contain `[ERROR]` messages. This is OK as they are coming from negative scenarios tests.
diff --git a/scripts/cmake/platforms/native/build_configuration.cmake b/scripts/cmake/platforms/native/build_configuration.cmake
index 63f9491..d53439c 100644
--- a/scripts/cmake/platforms/native/build_configuration.cmake
+++ b/scripts/cmake/platforms/native/build_configuration.cmake
@@ -110,5 +110,6 @@
         target_compile_definitions(${TEST_TARGET_NAME} PRIVATE
                 "ACTIVATION_BUF_SZ=${${use_case}_ACTIVATION_BUF_SZ}"
                 TESTS)
+        add_test(NAME "${use_case}-tests" COMMAND ${TEST_TARGET_NAME})
     endif ()
 endfunction()
\ No newline at end of file