aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDavid Wagner <david.wagner@intel.com>2015-03-19 16:53:50 +0100
committerEric Laurent <elaurent@google.com>2015-04-24 13:39:11 -0700
commitb2742cf39a7ca6c8a1e25698e2065258da1d1a2f (patch)
tree96e4661eecf3d2c1de2a8c435ee7ecb805f2bb32
parent0bc363e6b3760abc2c9a500b6de1f4d32889d9e7 (diff)
downloadexternal_parameter-framework-b2742cf39a7ca6c8a1e25698e2065258da1d1a2f.zip
external_parameter-framework-b2742cf39a7ca6c8a1e25698e2065258da1d1a2f.tar.gz
external_parameter-framework-b2742cf39a7ca6c8a1e25698e2065258da1d1a2f.tar.bz2
Add unit tests for the Tokenizer class
Signed-off-by: David Wagner <david.wagner@intel.com>
-rw-r--r--CMakeLists.txt1
-rw-r--r--test/tokenizer/CMakeLists.txt48
-rw-r--r--test/tokenizer/Test.cpp116
3 files changed, 165 insertions, 0 deletions
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 0cddfd3..e42b6be 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -57,6 +57,7 @@ configure_file(CTestCustom.cmake ${CMAKE_BINARY_DIR} COPYONLY)
add_subdirectory(test/test-platform)
add_subdirectory(test/test-fixed-point-parameter)
+add_subdirectory(test/tokenizer)
option(BASH_COMPLETION "Install bash completion configuration" ON)
if (BASH_COMPLETION)
diff --git a/test/tokenizer/CMakeLists.txt b/test/tokenizer/CMakeLists.txt
new file mode 100644
index 0000000..f68f6a3
--- /dev/null
+++ b/test/tokenizer/CMakeLists.txt
@@ -0,0 +1,48 @@
+# Copyright (c) 2015, Intel Corporation
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# 1. Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation and/or
+# other materials provided with the distribution.
+#
+# 3. Neither the name of the copyright holder nor the names of its contributors
+# may be used to endorse or promote products derived from this software without
+# specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+if(BUILD_TESTING)
+ # Add catch unit test framework
+ # TODO Use gtest as it is the team recommendation
+ # Unfortunately gtest is very hard to setup as not binary distributed
+ # catch is only one header so it is very easy
+ # Catch can be downloaded from:
+ # https://raw.github.com/philsquared/Catch/master/single_include/catch.hpp
+ # Then append the download folder to the CMAKE_INCLUDE_PATH variable or
+ # copy it in a standard location (/usr/include on most linux distribution).
+ find_path(CATCH_HEADER catch.hpp)
+ include_directories(${CATCH_HEADER})
+
+ # Add unit test
+ add_executable(tokenizerTest Test.cpp)
+
+ include_directories(${PROJECT_SOURCE_DIR}/utility)
+ target_link_libraries(tokenizerTest pfw_utility)
+
+ add_test(tokenizerTest tokenizerTest)
+endif()
diff --git a/test/tokenizer/Test.cpp b/test/tokenizer/Test.cpp
new file mode 100644
index 0000000..14f9ea4
--- /dev/null
+++ b/test/tokenizer/Test.cpp
@@ -0,0 +1,116 @@
+/*
+ * Copyright (c) 2015, Intel Corporation
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without modification,
+ * are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation and/or
+ * other materials provided with the distribution.
+ *
+ * 3. Neither the name of the copyright holder nor the names of its contributors
+ * may be used to endorse or promote products derived from this software without
+ * specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
+ * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "Tokenizer.h"
+
+#define CATCH_CONFIG_MAIN // This tells Catch to provide a main()
+#include <catch.hpp>
+
+#include <string>
+#include <vector>
+
+using std::string;
+using std::vector;
+
+SCENARIO("Tokenizer tests") {
+ GIVEN("A default tokenizer") {
+
+ GIVEN("A trivial string") {
+ Tokenizer tokenizer("a bcd ef");
+
+ THEN("next() api should work") {
+ CHECK(tokenizer.next() == "a");
+ CHECK(tokenizer.next() == "bcd");
+ CHECK(tokenizer.next() == "ef");
+ CHECK(tokenizer.next() == "");
+ }
+ THEN("split() api should work") {
+ vector<string> expected;
+ expected.push_back("a");
+ expected.push_back("bcd");
+ expected.push_back("ef");
+
+ CHECK(tokenizer.split() == expected);
+ }
+ }
+
+ GIVEN("An empty string") {
+ Tokenizer tokenizer("");
+
+ THEN("next() api should work") {
+ CHECK(tokenizer.next() == "");
+ }
+ THEN("split() api should work") {
+ vector<string> expected;
+
+ CHECK(tokenizer.split().empty());
+ }
+ }
+
+ GIVEN("A slash-separated string and tokenizer") {
+ Tokenizer tokenizer("/a/bcd/ef g/h/", "/");
+
+ THEN("next() api should work") {
+ CHECK(tokenizer.next() == "a");
+ CHECK(tokenizer.next() == "bcd");
+ CHECK(tokenizer.next() == "ef g");
+ CHECK(tokenizer.next() == "h");
+ CHECK(tokenizer.next() == "");
+ }
+ THEN("split() api should work") {
+ vector<string> expected;
+ expected.push_back("a");
+ expected.push_back("bcd");
+ expected.push_back("ef g");
+ expected.push_back("h");
+
+ CHECK(tokenizer.split() == expected);
+ }
+ }
+
+ GIVEN("Multiple separators in a row") {
+ Tokenizer tokenizer(" a \n\t bc ");
+
+ THEN("next() api should work") {
+ CHECK(tokenizer.next() == "a");
+ CHECK(tokenizer.next() == "bc");
+ CHECK(tokenizer.next() == "");
+ }
+ THEN("split() api should work") {
+ vector<string> expected;
+ expected.push_back("a");
+ expected.push_back("bc");
+
+ CHECK(tokenizer.split() == expected);
+ }
+ }
+ }
+
+}