summaryrefslogtreecommitdiffstats
blob: b089cdbea3e10ef8ca94f4c7c07f7fba0d3f4d24 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
/*
 * Copyright (c) 2013 Remko Tronçon
 * Licensed under the GNU General Public License.
 * See the COPYING file for more information.
 */

#include <Sluift/tokenize.h>

#include <boost/tokenizer.hpp>
#include <cctype>

using namespace Swift;

namespace {
	struct LuaTokenizeFunctor {
		void reset() {
		}

		template<typename InputIterator, typename Token>
		bool operator()(InputIterator& next, InputIterator& end, Token& result) {
			while (next != end && std::isspace(*next)) {
				++next;
			}
			if (next == end) {
				return false;
			}

			std::vector<char> token;
			char c = *next++;
			token.push_back(c);

			// String literal
			if (c == '\'' || c == '"') {
				char quote = c;
				bool inEscape = false;
				for (; next != end; ++next) {
					c = *next;
					token.push_back(c);
					if (inEscape) {
						inEscape = false;
					}
					else if (c == '\\') {
						inEscape = true;
					}
					else if (c == quote) {
						break;
					}
				}
				if (next != end) {
					++next;
				}
			}
			// Identifier
			else if (std::isalpha(c) || c == '_') {
				while (next != end && (std::isalpha(*next) || *next == '_' || std::isdigit(*next))) {
					token.push_back(*next);
					++next;
				}
			}
			// Digit
			else if (std::isdigit(c)) {
				while (next != end && !std::isspace(*next)) {
					token.push_back(*next);
					++next;
				}
			}
			// Dots
			else if (c == '.') {
				while (next != end && *next == '.') {
					token.push_back(*next);
					++next;
				}
			}
			
			result = Token(&token[0], token.size());
			return true;
		}
	};
}


std::vector<std::string> Lua::tokenize(const std::string& input) {
	boost::tokenizer<LuaTokenizeFunctor> tokenizer(input);
	return std::vector<std::string>(tokenizer.begin(), tokenizer.end());
}