diff options
author | Tobias Markmann <tm@ayena.de> | 2016-03-31 14:57:35 (GMT) |
---|---|---|
committer | Tobias Markmann <tm@ayena.de> | 2016-03-31 14:57:35 (GMT) |
commit | cfbdb43d2cadd40aa87338d41548e4bf89e146e6 (patch) | |
tree | 18d94153a302445196fc0c18586abf44a1ce4a38 /Sluift/UnitTest | |
parent | 1d545a4a7fb877f021508094b88c1f17b30d8b4e (diff) | |
download | swift-cfbdb43d2cadd40aa87338d41548e4bf89e146e6.zip swift-cfbdb43d2cadd40aa87338d41548e4bf89e146e6.tar.bz2 |
Convert tabs to 4 spaces for all source files
Removed trailing spaces and whitespace on empty lines
in the process.
Changed CheckTabs.py tool to disallow hard tabs in source
files.
Test-Information:
Manually checked 30 random files that the conversion worked
as expected.
Change-Id: I874f99d617bd3d2bb55f02d58f22f58f9b094480
Diffstat (limited to 'Sluift/UnitTest')
-rw-r--r-- | Sluift/UnitTest/TokenizeTest.cpp | 76 |
1 files changed, 38 insertions, 38 deletions
diff --git a/Sluift/UnitTest/TokenizeTest.cpp b/Sluift/UnitTest/TokenizeTest.cpp index fb7dbbd..cd617b5 100644 --- a/Sluift/UnitTest/TokenizeTest.cpp +++ b/Sluift/UnitTest/TokenizeTest.cpp @@ -12,52 +12,52 @@ using namespace Swift; class TokenizeTest : public CppUnit::TestFixture { - CPPUNIT_TEST_SUITE(TokenizeTest); - CPPUNIT_TEST(testTokenize); - CPPUNIT_TEST(testTokenize); - CPPUNIT_TEST(testTokenize_String); - CPPUNIT_TEST(testTokenize_IncompleteString); - CPPUNIT_TEST(testTokenize_Identifier); - CPPUNIT_TEST_SUITE_END(); + CPPUNIT_TEST_SUITE(TokenizeTest); + CPPUNIT_TEST(testTokenize); + CPPUNIT_TEST(testTokenize); + CPPUNIT_TEST(testTokenize_String); + CPPUNIT_TEST(testTokenize_IncompleteString); + CPPUNIT_TEST(testTokenize_Identifier); + CPPUNIT_TEST_SUITE_END(); - public: - void testTokenize() { - std::vector<std::string> tokens = Lua::tokenize("foo.bar + 1.23 - bam"); + public: + void testTokenize() { + std::vector<std::string> tokens = Lua::tokenize("foo.bar + 1.23 - bam"); - CPPUNIT_ASSERT_EQUAL(7, static_cast<int>(tokens.size())); - CPPUNIT_ASSERT_EQUAL(std::string("foo"), tokens[0]); - CPPUNIT_ASSERT_EQUAL(std::string("."), tokens[1]); - CPPUNIT_ASSERT_EQUAL(std::string("bar"), tokens[2]); - CPPUNIT_ASSERT_EQUAL(std::string("+"), tokens[3]); - CPPUNIT_ASSERT_EQUAL(std::string("1.23"), tokens[4]); - CPPUNIT_ASSERT_EQUAL(std::string("-"), tokens[5]); - CPPUNIT_ASSERT_EQUAL(std::string("bam"), tokens[6]); - } + CPPUNIT_ASSERT_EQUAL(7, static_cast<int>(tokens.size())); + CPPUNIT_ASSERT_EQUAL(std::string("foo"), tokens[0]); + CPPUNIT_ASSERT_EQUAL(std::string("."), tokens[1]); + CPPUNIT_ASSERT_EQUAL(std::string("bar"), tokens[2]); + CPPUNIT_ASSERT_EQUAL(std::string("+"), tokens[3]); + CPPUNIT_ASSERT_EQUAL(std::string("1.23"), tokens[4]); + CPPUNIT_ASSERT_EQUAL(std::string("-"), tokens[5]); + CPPUNIT_ASSERT_EQUAL(std::string("bam"), tokens[6]); + } - void testTokenize_String() { - std::vector<std::string> tokens = Lua::tokenize(" foo .. \"1234\\\"bla blo\""); + void testTokenize_String() { + std::vector<std::string> tokens = Lua::tokenize(" foo .. \"1234\\\"bla blo\""); - CPPUNIT_ASSERT_EQUAL(3, static_cast<int>(tokens.size())); - CPPUNIT_ASSERT_EQUAL(std::string("foo"), tokens[0]); - CPPUNIT_ASSERT_EQUAL(std::string(".."), tokens[1]); - CPPUNIT_ASSERT_EQUAL(std::string("\"1234\\\"bla blo\""), tokens[2]); - } + CPPUNIT_ASSERT_EQUAL(3, static_cast<int>(tokens.size())); + CPPUNIT_ASSERT_EQUAL(std::string("foo"), tokens[0]); + CPPUNIT_ASSERT_EQUAL(std::string(".."), tokens[1]); + CPPUNIT_ASSERT_EQUAL(std::string("\"1234\\\"bla blo\""), tokens[2]); + } - void testTokenize_IncompleteString() { - std::vector<std::string> tokens = Lua::tokenize("\"1234"); + void testTokenize_IncompleteString() { + std::vector<std::string> tokens = Lua::tokenize("\"1234"); - CPPUNIT_ASSERT_EQUAL(1, static_cast<int>(tokens.size())); - CPPUNIT_ASSERT_EQUAL(std::string("\"1234"), tokens[0]); - } + CPPUNIT_ASSERT_EQUAL(1, static_cast<int>(tokens.size())); + CPPUNIT_ASSERT_EQUAL(std::string("\"1234"), tokens[0]); + } - void testTokenize_Identifier() { - std::vector<std::string> tokens = Lua::tokenize("foo.bar_baz"); + void testTokenize_Identifier() { + std::vector<std::string> tokens = Lua::tokenize("foo.bar_baz"); - CPPUNIT_ASSERT_EQUAL(3, static_cast<int>(tokens.size())); - CPPUNIT_ASSERT_EQUAL(std::string("foo"), tokens[0]); - CPPUNIT_ASSERT_EQUAL(std::string("."), tokens[1]); - CPPUNIT_ASSERT_EQUAL(std::string("bar_baz"), tokens[2]); - } + CPPUNIT_ASSERT_EQUAL(3, static_cast<int>(tokens.size())); + CPPUNIT_ASSERT_EQUAL(std::string("foo"), tokens[0]); + CPPUNIT_ASSERT_EQUAL(std::string("."), tokens[1]); + CPPUNIT_ASSERT_EQUAL(std::string("bar_baz"), tokens[2]); + } }; |