diff options
author | 2019-09-29 21:35:26 +0300 | |
---|---|---|
committer | 2019-10-02 13:39:31 +0300 | |
commit | 43a8e9effec37af18b037cd3dce57a601af7b8ab (patch) | |
tree | f0e56e3abd2e730ce7a99f1e1db2dd58e249e456 | |
parent | 7f71a13f1c99b5ac5c218f2a1a50a8545cfee7e9 (diff) | |
download | newsboat-43a8e9effec37af18b037cd3dce57a601af7b8ab.tar.gz newsboat-43a8e9effec37af18b037cd3dce57a601af7b8ab.tar.zst newsboat-43a8e9effec37af18b037cd3dce57a601af7b8ab.zip |
Test how tokenize_quoted treats pound sign
-rw-r--r-- | test/utils.cpp | 56 |
1 files changed, 56 insertions, 0 deletions
diff --git a/test/utils.cpp b/test/utils.cpp index b1dd9b50..b5e9c037 100644 --- a/test/utils.cpp +++ b/test/utils.cpp @@ -150,6 +150,62 @@ TEST_CASE("tokenize_quoted() doesn't un-escape escaped backticks", "[utils]") REQUIRE(tokens[1] == "\\`foobar `bla`\\`"); } +TEST_CASE("tokenize_quoted stops tokenizing once it found a # character " + "(outside of double quotes)", + "[utils]") +{ + std::vector<std::string> tokens; + + SECTION("A string consisting of just a comment") + { + tokens = utils::tokenize_quoted("# just a comment"); + REQUIRE(tokens.empty()); + } + + SECTION("A string with one quoted substring") + { + tokens = utils::tokenize_quoted(R"#("a test substring" # !!!)#"); + REQUIRE(tokens.size() == 1); + REQUIRE(tokens[0] == "a test substring"); + } + + SECTION("A string with two quoted substrings") + { + tokens = utils::tokenize_quoted(R"#("first sub" "snd" # comment)#"); + REQUIRE(tokens.size() == 2); + REQUIRE(tokens[0] == "first sub"); + REQUIRE(tokens[1] == "snd"); + } + + SECTION("A comment containing # character") + { + tokens = utils::tokenize_quoted(R"#(one # a comment with # char)#"); + REQUIRE(tokens.size() == 1); + REQUIRE(tokens[0] == "one"); + } + + SECTION("A # character inside quoted substring is ignored") + { + tokens = utils::tokenize_quoted(R"#(this "will # be" ignored)#"); + REQUIRE(tokens.size() == 3); + REQUIRE(tokens[0] == "this"); + REQUIRE(tokens[1] == "will # be"); + REQUIRE(tokens[2] == "ignored"); + } +} + +TEST_CASE("tokenize_quoted does not consider escaped pound sign (\\#) " + "a beginning of a comment", + "[utils]") +{ + const auto tokens = utils::tokenize_quoted(R"#(one \# two three # ???)#"); + REQUIRE(tokens.size() == 4); + REQUIRE(tokens[0] == "one"); + REQUIRE(tokens[1] == "\\#"); + REQUIRE(tokens[2] == "two"); + REQUIRE(tokens[3] == "three"); +} + TEST_CASE("tokenize_nl() split a string into delimiters and fields", "[utils]") { std::vector<std::string> tokens; |