summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKevin Smith <git@kismith.co.uk>2013-01-12 18:41:34 (GMT)
committerSwift Review <review@swift.im>2013-01-13 10:36:26 (GMT)
commitf3bc816af1b0d61452de973963e453bf3b3f95a2 (patch)
treee895f8afa3580e6cff6f5ad2017d45bf147a17c2 /3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl
parent188fc285c6555eadd3c9d50ab8a94adcade78d89 (diff)
downloadswift-f3bc816af1b0d61452de973963e453bf3b3f95a2.zip
swift-f3bc816af1b0d61452de973963e453bf3b3f95a2.tar.bz2
Adding in the spirit Boost stuff
Change-Id: I4f127ce61667243b64081b0aa309028d5077045f
Diffstat (limited to '3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl')
-rw-r--r--3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/functor.hpp300
-rw-r--r--3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/functor_data.hpp552
-rw-r--r--3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/iterator.hpp121
-rw-r--r--3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/iterator_tokenizer.hpp255
-rw-r--r--3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/lexer.hpp399
-rw-r--r--3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/semantic_action_data.hpp121
-rw-r--r--3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/token.hpp654
-rw-r--r--3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/wrap_action.hpp154
8 files changed, 2556 insertions, 0 deletions
diff --git a/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/functor.hpp b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/functor.hpp
new file mode 100644
index 0000000..79e5f07
--- /dev/null
+++ b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/functor.hpp
@@ -0,0 +1,300 @@
+// Copyright (c) 2001-2011 Hartmut Kaiser
+//
+// Distributed under the Boost Software License, Version 1.0. (See accompanying
+// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#if !defined(BOOST_SPIRIT_LEX_LEXER_FUNCTOR_NOV_18_2007_1112PM)
+#define BOOST_SPIRIT_LEX_LEXER_FUNCTOR_NOV_18_2007_1112PM
+
+#if defined(_MSC_VER)
+#pragma once
+#endif
+
+#include <boost/mpl/bool.hpp>
+#include <boost/detail/iterator.hpp>
+#include <boost/detail/workaround.hpp>
+#include <boost/spirit/home/lex/lexer/pass_flags.hpp>
+#include <boost/assert.hpp>
+
+#if 0 != __COMO_VERSION__ || !BOOST_WORKAROUND(BOOST_MSVC, <= 1310)
+#define BOOST_SPIRIT_STATIC_EOF 1
+#define BOOST_SPIRIT_EOF_PREFIX static
+#else
+#define BOOST_SPIRIT_EOF_PREFIX
+#endif
+
+namespace boost { namespace spirit { namespace lex { namespace lexertl
+{
+ ///////////////////////////////////////////////////////////////////////////
+ //
+ // functor is a template usable as the functor object for the
+ // multi_pass iterator allowing to wrap a lexertl based dfa into a
+ // iterator based interface.
+ //
+ // Token: the type of the tokens produced by this functor
+ // this needs to expose a constructor with the following
+ // prototype:
+ //
+ // Token(std::size_t id, std::size_t state,
+ // Iterator start, Iterator end)
+ //
+ // where 'id' is the token id, state is the lexer state,
+ // this token has been matched in, and 'first' and 'end'
+ // mark the start and the end of the token with respect
+ // to the underlying character stream.
+ // FunctorData:
+ // this is expected to encapsulate the shared part of the
+ // functor (see lex/lexer/lexertl/functor_data.hpp for an
+ // example and documentation).
+ // Iterator: the type of the underlying iterator
+ // SupportsActors:
+ // this is expected to be a mpl::bool_, if mpl::true_ the
+ // functor invokes functors which (optionally) have
+ // been attached to the token definitions.
+ // SupportState:
+ // this is expected to be a mpl::bool_, if mpl::true_ the
+ // functor supports different lexer states,
+ // otherwise no lexer state is supported.
+ //
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Token
+ , template <typename, typename, typename, typename> class FunctorData
+ , typename Iterator = typename Token::iterator_type
+ , typename SupportsActors = mpl::false_
+ , typename SupportsState = typename Token::has_state>
+ class functor
+ {
+ public:
+ typedef typename
+ boost::detail::iterator_traits<Iterator>::value_type
+ char_type;
+
+ private:
+ // Needed by compilers not implementing the resolution to DR45. For
+ // reference, see
+ // http://www.open-std.org/JTC1/SC22/WG21/docs/cwg_defects.html#45.
+ typedef typename Token::token_value_type token_value_type;
+ friend class FunctorData<Iterator, SupportsActors, SupportsState
+ , token_value_type>;
+
+ // Helper template allowing to assign a value on exit
+ template <typename T>
+ struct assign_on_exit
+ {
+ assign_on_exit(T& dst, T const& src)
+ : dst_(dst), src_(src) {}
+
+ ~assign_on_exit()
+ {
+ dst_ = src_;
+ }
+
+ T& dst_;
+ T const& src_;
+
+ private:
+ // silence MSVC warning C4512: assignment operator could not be generated
+ assign_on_exit& operator= (assign_on_exit const&);
+ };
+
+ public:
+ functor()
+#if defined(__PGI)
+ : eof()
+#endif
+ {}
+
+#if BOOST_WORKAROUND(BOOST_MSVC, <= 1310)
+ // somehow VC7.1 needs this (meaningless) assignment operator
+ functor& operator=(functor const& rhs)
+ {
+ return *this;
+ }
+#endif
+
+ ///////////////////////////////////////////////////////////////////////
+ // interface to the iterator_policies::split_functor_input policy
+ typedef Token result_type;
+ typedef functor unique;
+ typedef FunctorData<Iterator, SupportsActors, SupportsState
+ , token_value_type> shared;
+
+ BOOST_SPIRIT_EOF_PREFIX result_type const eof;
+
+ ///////////////////////////////////////////////////////////////////////
+ typedef Iterator iterator_type;
+ typedef typename shared::semantic_actions_type semantic_actions_type;
+ typedef typename shared::next_token_functor next_token_functor;
+ typedef typename shared::get_state_name_type get_state_name_type;
+
+ // this is needed to wrap the semantic actions in a proper way
+ typedef typename shared::wrap_action_type wrap_action_type;
+
+ ///////////////////////////////////////////////////////////////////////
+ template <typename MultiPass>
+ static result_type& get_next(MultiPass& mp, result_type& result)
+ {
+ typedef typename result_type::id_type id_type;
+
+ shared& data = mp.shared()->ftor;
+ for(;;)
+ {
+ if (data.get_first() == data.get_last())
+#if defined(BOOST_SPIRIT_STATIC_EOF)
+ return result = eof;
+#else
+ return result = mp.ftor.eof;
+#endif
+
+ data.reset_value();
+ Iterator end = data.get_first();
+ std::size_t unique_id = boost::lexer::npos;
+ bool prev_bol = false;
+
+ // lexer matching might change state
+ std::size_t state = data.get_state();
+ std::size_t id = data.next(end, unique_id, prev_bol);
+
+ if (boost::lexer::npos == id) { // no match
+#if defined(BOOST_SPIRIT_LEXERTL_DEBUG)
+ std::string next;
+ Iterator it = data.get_first();
+ for (std::size_t i = 0; i < 10 && it != data.get_last(); ++it, ++i)
+ next += *it;
+
+ std::cerr << "Not matched, in state: " << state
+ << ", lookahead: >" << next << "<" << std::endl;
+#endif
+ return result = result_type(0);
+ }
+ else if (0 == id) { // EOF reached
+#if defined(BOOST_SPIRIT_STATIC_EOF)
+ return result = eof;
+#else
+ return result = mp.ftor.eof;
+#endif
+ }
+
+#if defined(BOOST_SPIRIT_LEXERTL_DEBUG)
+ {
+ std::string next;
+ Iterator it = end;
+ for (std::size_t i = 0; i < 10 && it != data.get_last(); ++it, ++i)
+ next += *it;
+
+ std::cerr << "Matched: " << id << ", in state: "
+ << state << ", string: >"
+ << std::basic_string<char_type>(data.get_first(), end) << "<"
+ << ", lookahead: >" << next << "<" << std::endl;
+ if (data.get_state() != state) {
+ std::cerr << "Switched to state: "
+ << data.get_state() << std::endl;
+ }
+ }
+#endif
+ // account for a possibly pending lex::more(), i.e. moving
+ // data.first_ back to the start of the previously matched token.
+ bool adjusted = data.adjust_start();
+
+ // set the end of the matched input sequence in the token data
+ data.set_end(end);
+
+ // invoke attached semantic actions, if defined, might change
+ // state, id, data.first_, and/or end
+ BOOST_SCOPED_ENUM(pass_flags) pass =
+ data.invoke_actions(state, id, unique_id, end);
+
+ if (data.has_value()) {
+ // return matched token using the token value as set before
+ // using data.set_value(), advancing 'data.first_' past the
+ // matched sequence
+ assign_on_exit<Iterator> on_exit(data.get_first(), end);
+ return result = result_type(id_type(id), state, data.get_value());
+ }
+ else if (pass_flags::pass_normal == pass) {
+ // return matched token, advancing 'data.first_' past the
+ // matched sequence
+ assign_on_exit<Iterator> on_exit(data.get_first(), end);
+ return result = result_type(id_type(id), state, data.get_first(), end);
+ }
+ else if (pass_flags::pass_fail == pass) {
+#if defined(BOOST_SPIRIT_LEXERTL_DEBUG)
+ std::cerr << "Matching forced to fail" << std::endl;
+#endif
+ // if the data.first_ got adjusted above, revert this adjustment
+ if (adjusted)
+ data.revert_adjust_start();
+
+ // one of the semantic actions signaled no-match
+ data.reset_bol(prev_bol);
+ if (state != data.get_state())
+ continue; // retry matching if state has changed
+
+ // if the state is unchanged repeating the match wouldn't
+ // move the input forward, causing an infinite loop
+ return result = result_type(0);
+ }
+
+#if defined(BOOST_SPIRIT_LEXERTL_DEBUG)
+ std::cerr << "Token ignored, continuing matching" << std::endl;
+#endif
+ // if this token needs to be ignored, just repeat the matching,
+ // while starting right after the current match
+ data.get_first() = end;
+ }
+ }
+
+ // set_state are propagated up to the iterator interface, allowing to
+ // manipulate the current lexer state through any of the exposed
+ // iterators.
+ template <typename MultiPass>
+ static std::size_t set_state(MultiPass& mp, std::size_t state)
+ {
+ std::size_t oldstate = mp.shared()->ftor.get_state();
+ mp.shared()->ftor.set_state(state);
+
+#if defined(BOOST_SPIRIT_LEXERTL_DEBUG)
+ std::cerr << "Switching state from: " << oldstate
+ << " to: " << state
+ << std::endl;
+#endif
+ return oldstate;
+ }
+
+ template <typename MultiPass>
+ static std::size_t get_state(MultiPass& mp)
+ {
+ return mp.shared()->ftor.get_state();
+ }
+
+ template <typename MultiPass>
+ static std::size_t
+ map_state(MultiPass const& mp, char_type const* statename)
+ {
+ return mp.shared()->ftor.get_state_id(statename);
+ }
+
+ // we don't need this, but it must be there
+ template <typename MultiPass>
+ static void destroy(MultiPass const&) {}
+ };
+
+#if defined(BOOST_SPIRIT_STATIC_EOF)
+ ///////////////////////////////////////////////////////////////////////////
+ // eof token
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Token
+ , template <typename, typename, typename, typename> class FunctorData
+ , typename Iterator, typename SupportsActors, typename SupportsState>
+ typename functor<Token, FunctorData, Iterator, SupportsActors, SupportsState>::result_type const
+ functor<Token, FunctorData, Iterator, SupportsActors, SupportsState>::eof =
+ typename functor<Token, FunctorData, Iterator, SupportsActors
+ , SupportsState>::result_type();
+#endif
+
+}}}}
+
+#undef BOOST_SPIRIT_EOF_PREFIX
+#undef BOOST_SPIRIT_STATIC_EOF
+
+#endif
diff --git a/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/functor_data.hpp b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/functor_data.hpp
new file mode 100644
index 0000000..207b374
--- /dev/null
+++ b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/functor_data.hpp
@@ -0,0 +1,552 @@
+// Copyright (c) 2001-2011 Hartmut Kaiser
+//
+// Distributed under the Boost Software License, Version 1.0. (See accompanying
+// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#if !defined(BOOST_SPIRIT_LEX_LEXER_FUNCTOR_DATA_JUN_10_2009_0954AM)
+#define BOOST_SPIRIT_LEX_LEXER_FUNCTOR_DATA_JUN_10_2009_0954AM
+
+#if defined(_MSC_VER)
+#pragma once
+#endif
+
+#include <boost/spirit/home/qi/detail/assign_to.hpp>
+#include <boost/spirit/home/support/detail/lexer/generator.hpp>
+#include <boost/spirit/home/support/detail/lexer/rules.hpp>
+#include <boost/spirit/home/support/detail/lexer/state_machine.hpp>
+#include <boost/spirit/home/lex/lexer/lexertl/iterator_tokenizer.hpp>
+#include <boost/spirit/home/lex/lexer/lexertl/semantic_action_data.hpp>
+#include <boost/spirit/home/lex/lexer/lexertl/wrap_action.hpp>
+#include <boost/mpl/bool.hpp>
+#include <boost/optional.hpp>
+
+namespace boost { namespace spirit { namespace lex { namespace lexertl
+{
+ namespace detail
+ {
+ ///////////////////////////////////////////////////////////////////////
+ template <typename Iterator, typename HasActors, typename HasState
+ , typename TokenValue>
+ class data; // no default specialization
+
+ ///////////////////////////////////////////////////////////////////////
+ // neither supports state, nor actors
+ template <typename Iterator, typename TokenValue>
+ class data<Iterator, mpl::false_, mpl::false_, TokenValue>
+ {
+ protected:
+ typedef typename
+ boost::detail::iterator_traits<Iterator>::value_type
+ char_type;
+
+ public:
+ typedef Iterator base_iterator_type;
+ typedef iterator_range<Iterator> token_value_type;
+ typedef token_value_type get_value_type;
+ typedef std::size_t state_type;
+ typedef char_type const* state_name_type;
+ typedef unused_type semantic_actions_type;
+ typedef detail::wrap_action<unused_type, Iterator, data, std::size_t>
+ wrap_action_type;
+
+ typedef unused_type next_token_functor;
+ typedef unused_type get_state_name_type;
+
+ // initialize the shared data
+ template <typename IterData>
+ data (IterData const& data_, Iterator& first, Iterator const& last)
+ : first_(first), last_(last)
+ , state_machine_(data_.state_machine_)
+ , rules_(data_.rules_)
+ , bol_(data_.state_machine_.data()._seen_BOL_assertion) {}
+
+ // The following functions are used by the implementation of the
+ // placeholder '_state'.
+ template <typename Char>
+ void set_state_name (Char const*)
+ {
+// some (random) versions of gcc instantiate this function even if it's not
+// needed leading to false static asserts
+#if !defined(__GNUC__)
+ // If you see a compile time assertion below you're probably
+ // using a token type not supporting lexer states (the 3rd
+ // template parameter of the token is mpl::false_), but your
+ // code uses state changes anyways.
+ BOOST_STATIC_ASSERT(false);
+#endif
+ }
+ char_type const* get_state_name() const { return rules_.initial(); }
+ std::size_t get_state_id (char_type const*) const
+ {
+ return 0;
+ }
+
+ // The function get_eoi() is used by the implementation of the
+ // placeholder '_eoi'.
+ Iterator const& get_eoi() const { return last_; }
+
+ // The function less() is used by the implementation of the support
+ // function lex::less(). Its functionality is equivalent to flex'
+ // function yyless(): it returns an iterator positioned to the
+ // nth input character beyond the current start iterator (i.e. by
+ // assigning the return value to the placeholder '_end' it is
+ // possible to return all but the first n characters of the current
+ // token back to the input stream.
+ //
+ // This function does nothing as long as no semantic actions are
+ // used.
+ Iterator const& less(Iterator const& it, int)
+ {
+ // The following assertion fires most likely because you are
+ // using lexer semantic actions without using the actor_lexer
+ // as the base class for your token definition class.
+ BOOST_ASSERT(false &&
+ "Are you using lexer semantic actions without using the "
+ "actor_lexer base?");
+ return it;
+ }
+
+ // The function more() is used by the implementation of the support
+ // function lex::more(). Its functionality is equivalent to flex'
+ // function yymore(): it tells the lexer that the next time it
+ // matches a rule, the corresponding token should be appended onto
+ // the current token value rather than replacing it.
+ //
+ // These functions do nothing as long as no semantic actions are
+ // used.
+ void more()
+ {
+ // The following assertion fires most likely because you are
+ // using lexer semantic actions without using the actor_lexer
+ // as the base class for your token definition class.
+ BOOST_ASSERT(false &&
+ "Are you using lexer semantic actions without using the "
+ "actor_lexer base?");
+ }
+ bool adjust_start() { return false; }
+ void revert_adjust_start() {}
+
+ // The function lookahead() is used by the implementation of the
+ // support function lex::lookahead. It can be used to implement
+ // lookahead for lexer engines not supporting constructs like flex'
+ // a/b (match a, but only when followed by b):
+ //
+ // This function does nothing as long as no semantic actions are
+ // used.
+ bool lookahead(std::size_t, std::size_t /*state*/ = std::size_t(~0))
+ {
+ // The following assertion fires most likely because you are
+ // using lexer semantic actions without using the actor_lexer
+ // as the base class for your token definition class.
+ BOOST_ASSERT(false &&
+ "Are you using lexer semantic actions without using the "
+ "actor_lexer base?");
+ return false;
+ }
+
+ // the functions next, invoke_actions, and get_state are used by
+ // the functor implementation below
+
+ // The function next() tries to match the next token from the
+ // underlying input sequence.
+ std::size_t next(Iterator& end, std::size_t& unique_id, bool& prev_bol)
+ {
+ prev_bol = bol_;
+
+ typedef basic_iterator_tokeniser<Iterator> tokenizer;
+ return tokenizer::next(state_machine_, bol_, end, last_
+ , unique_id);
+ }
+
+ // nothing to invoke, so this is empty
+ BOOST_SCOPED_ENUM(pass_flags) invoke_actions(std::size_t
+ , std::size_t, std::size_t, Iterator const&)
+ {
+ return pass_flags::pass_normal; // always accept
+ }
+
+ std::size_t get_state() const { return 0; }
+ void set_state(std::size_t) {}
+
+ void set_end(Iterator const& it) {}
+
+ Iterator& get_first() { return first_; }
+ Iterator const& get_first() const { return first_; }
+ Iterator const& get_last() const { return last_; }
+
+ iterator_range<Iterator> get_value() const
+ {
+ return iterator_range<Iterator>(first_, last_);
+ }
+ bool has_value() const { return false; }
+ void reset_value() {}
+
+ void reset_bol(bool bol) { bol_ = bol; }
+
+ protected:
+ Iterator& first_;
+ Iterator last_;
+
+ boost::lexer::basic_state_machine<char_type> const& state_machine_;
+ boost::lexer::basic_rules<char_type> const& rules_;
+
+ bool bol_; // helper storing whether last character was \n
+
+ private:
+ // silence MSVC warning C4512: assignment operator could not be generated
+ data& operator= (data const&);
+ };
+
+ ///////////////////////////////////////////////////////////////////////
+ // doesn't support lexer semantic actions, but supports state
+ template <typename Iterator, typename TokenValue>
+ class data<Iterator, mpl::false_, mpl::true_, TokenValue>
+ : public data<Iterator, mpl::false_, mpl::false_, TokenValue>
+ {
+ protected:
+ typedef data<Iterator, mpl::false_, mpl::false_, TokenValue> base_type;
+ typedef typename base_type::char_type char_type;
+
+ public:
+ typedef Iterator base_iterator_type;
+ typedef iterator_range<Iterator> token_value_type;
+ typedef token_value_type get_value_type;
+ typedef typename base_type::state_type state_type;
+ typedef typename base_type::state_name_type state_name_type;
+ typedef typename base_type::semantic_actions_type
+ semantic_actions_type;
+
+ // initialize the shared data
+ template <typename IterData>
+ data (IterData const& data_, Iterator& first, Iterator const& last)
+ : base_type(data_, first, last)
+ , state_(0) {}
+
+ // The following functions are used by the implementation of the
+ // placeholder '_state'.
+ void set_state_name (char_type const* new_state)
+ {
+ std::size_t state_id = this->rules_.state(new_state);
+
+ // If the following assertion fires you've probably been using
+ // a lexer state name which was not defined in your token
+ // definition.
+ BOOST_ASSERT(state_id != boost::lexer::npos);
+
+ if (state_id != boost::lexer::npos)
+ state_ = state_id;
+ }
+ char_type const* get_state_name() const
+ {
+ return this->rules_.state(state_);
+ }
+ std::size_t get_state_id (char_type const* state) const
+ {
+ return this->rules_.state(state);
+ }
+
+ // the functions next() and get_state() are used by the functor
+ // implementation below
+
+ // The function next() tries to match the next token from the
+ // underlying input sequence.
+ std::size_t next(Iterator& end, std::size_t& unique_id, bool& prev_bol)
+ {
+ prev_bol = this->bol_;
+
+ typedef basic_iterator_tokeniser<Iterator> tokenizer;
+ return tokenizer::next(this->state_machine_, state_,
+ this->bol_, end, this->get_eoi(), unique_id);
+ }
+
+ std::size_t& get_state() { return state_; }
+ void set_state(std::size_t state) { state_ = state; }
+
+ protected:
+ std::size_t state_;
+
+ private:
+ // silence MSVC warning C4512: assignment operator could not be generated
+ data& operator= (data const&);
+ };
+
+ ///////////////////////////////////////////////////////////////////////
+ // does support lexer semantic actions, may support state
+ template <typename Iterator, typename HasState, typename TokenValue>
+ class data<Iterator, mpl::true_, HasState, TokenValue>
+ : public data<Iterator, mpl::false_, HasState, TokenValue>
+ {
+ public:
+ typedef semantic_actions<Iterator, HasState, data>
+ semantic_actions_type;
+
+ protected:
+ typedef data<Iterator, mpl::false_, HasState, TokenValue> base_type;
+ typedef typename base_type::char_type char_type;
+ typedef typename semantic_actions_type::functor_wrapper_type
+ functor_wrapper_type;
+
+ public:
+ typedef Iterator base_iterator_type;
+ typedef TokenValue token_value_type;
+ typedef TokenValue const& get_value_type;
+ typedef typename base_type::state_type state_type;
+ typedef typename base_type::state_name_type state_name_type;
+
+ typedef detail::wrap_action<functor_wrapper_type
+ , Iterator, data, std::size_t> wrap_action_type;
+
+ template <typename IterData>
+ data (IterData const& data_, Iterator& first, Iterator const& last)
+ : base_type(data_, first, last)
+ , actions_(data_.actions_), hold_()
+ , value_(iterator_range<Iterator>(first, last))
+ , has_value_(false), has_hold_(false) {}
+
+ // invoke attached semantic actions, if defined
+ BOOST_SCOPED_ENUM(pass_flags) invoke_actions(std::size_t state
+ , std::size_t& id, std::size_t unique_id, Iterator& end)
+ {
+ return actions_.invoke_actions(state, id, unique_id, end, *this);
+ }
+
+ // The function less() is used by the implementation of the support
+ // function lex::less(). Its functionality is equivalent to flex'
+ // function yyless(): it returns an iterator positioned to the
+ // nth input character beyond the current start iterator (i.e. by
+ // assigning the return value to the placeholder '_end' it is
+ // possible to return all but the first n characters of the current
+ // token back to the input stream).
+ Iterator const& less(Iterator& it, int n)
+ {
+ it = this->get_first();
+ std::advance(it, n);
+ return it;
+ }
+
+ // The function more() is used by the implementation of the support
+ // function lex::more(). Its functionality is equivalent to flex'
+ // function yymore(): it tells the lexer that the next time it
+ // matches a rule, the corresponding token should be appended onto
+ // the current token value rather than replacing it.
+ void more()
+ {
+ hold_ = this->get_first();
+ has_hold_ = true;
+ }
+
+ // The function lookahead() is used by the implementation of the
+ // support function lex::lookahead. It can be used to implement
+ // lookahead for lexer engines not supporting constructs like flex'
+ // a/b (match a, but only when followed by b)
+ bool lookahead(std::size_t id, std::size_t state = std::size_t(~0))
+ {
+ Iterator end = end_;
+ std::size_t unique_id = boost::lexer::npos;
+ bool bol = this->bol_;
+
+ if (std::size_t(~0) == state)
+ state = this->state_;
+
+ typedef basic_iterator_tokeniser<Iterator> tokenizer;
+ return id == tokenizer::next(this->state_machine_, state,
+ bol, end, this->get_eoi(), unique_id);
+ }
+
+ // The adjust_start() and revert_adjust_start() are helper
+ // functions needed to implement the functionality required for
+ // lex::more(). It is called from the functor body below.
+ bool adjust_start()
+ {
+ if (!has_hold_)
+ return false;
+
+ std::swap(this->get_first(), hold_);
+ has_hold_ = false;
+ return true;
+ }
+ void revert_adjust_start()
+ {
+ // this will be called only if adjust_start above returned true
+ std::swap(this->get_first(), hold_);
+ has_hold_ = true;
+ }
+
+ TokenValue const& get_value() const
+ {
+ if (!has_value_) {
+ value_ = iterator_range<Iterator>(this->get_first(), end_);
+ has_value_ = true;
+ }
+ return value_;
+ }
+ template <typename Value>
+ void set_value(Value const& val)
+ {
+ value_ = val;
+ has_value_ = true;
+ }
+ void set_end(Iterator const& it)
+ {
+ end_ = it;
+ }
+ bool has_value() const { return has_value_; }
+ void reset_value() { has_value_ = false; }
+
+ protected:
+ semantic_actions_type const& actions_;
+ Iterator hold_; // iterator needed to support lex::more()
+ Iterator end_; // iterator pointing to end of matched token
+ mutable TokenValue value_; // token value to use
+ mutable bool has_value_; // 'true' if value_ is valid
+ bool has_hold_; // 'true' if hold_ is valid
+
+ private:
+ // silence MSVC warning C4512: assignment operator could not be generated
+ data& operator= (data const&);
+ };
+
+ ///////////////////////////////////////////////////////////////////////
+ // does support lexer semantic actions, may support state, is used for
+ // position_token exposing exactly one type
+ template <typename Iterator, typename HasState, typename TokenValue>
+ class data<Iterator, mpl::true_, HasState, boost::optional<TokenValue> >
+ : public data<Iterator, mpl::false_, HasState, TokenValue>
+ {
+ public:
+ typedef semantic_actions<Iterator, HasState, data>
+ semantic_actions_type;
+
+ protected:
+ typedef data<Iterator, mpl::false_, HasState, TokenValue> base_type;
+ typedef typename base_type::char_type char_type;
+ typedef typename semantic_actions_type::functor_wrapper_type
+ functor_wrapper_type;
+
+ public:
+ typedef Iterator base_iterator_type;
+ typedef boost::optional<TokenValue> token_value_type;
+ typedef boost::optional<TokenValue> const& get_value_type;
+ typedef typename base_type::state_type state_type;
+ typedef typename base_type::state_name_type state_name_type;
+
+ typedef detail::wrap_action<functor_wrapper_type
+ , Iterator, data, std::size_t> wrap_action_type;
+
+ template <typename IterData>
+ data (IterData const& data_, Iterator& first, Iterator const& last)
+ : base_type(data_, first, last)
+ , actions_(data_.actions_), hold_()
+ , has_value_(false), has_hold_(false)
+ {
+ spirit::traits::assign_to(first, last, value_);
+ has_value_ = true;
+ }
+
+ // invoke attached semantic actions, if defined
+ BOOST_SCOPED_ENUM(pass_flags) invoke_actions(std::size_t state
+ , std::size_t& id, std::size_t unique_id, Iterator& end)
+ {
+ return actions_.invoke_actions(state, id, unique_id, end, *this);
+ }
+
+ // The function less() is used by the implementation of the support
+ // function lex::less(). Its functionality is equivalent to flex'
+ // function yyless(): it returns an iterator positioned to the
+ // nth input character beyond the current start iterator (i.e. by
+ // assigning the return value to the placeholder '_end' it is
+ // possible to return all but the first n characters of the current
+ // token back to the input stream).
+ Iterator const& less(Iterator& it, int n)
+ {
+ it = this->get_first();
+ std::advance(it, n);
+ return it;
+ }
+
+ // The function more() is used by the implementation of the support
+ // function lex::more(). Its functionality is equivalent to flex'
+ // function yymore(): it tells the lexer that the next time it
+ // matches a rule, the corresponding token should be appended onto
+ // the current token value rather than replacing it.
+ void more()
+ {
+ hold_ = this->get_first();
+ has_hold_ = true;
+ }
+
+ // The function lookahead() is used by the implementation of the
+ // support function lex::lookahead. It can be used to implement
+ // lookahead for lexer engines not supporting constructs like flex'
+ // a/b (match a, but only when followed by b)
+ bool lookahead(std::size_t id, std::size_t state = std::size_t(~0))
+ {
+ Iterator end = end_;
+ std::size_t unique_id = boost::lexer::npos;
+ bool bol = this->bol_;
+
+ if (std::size_t(~0) == state)
+ state = this->state_;
+
+ typedef basic_iterator_tokeniser<Iterator> tokenizer;
+ return id == tokenizer::next(this->state_machine_, state,
+ bol, end, this->get_eoi(), unique_id);
+ }
+
+ // The adjust_start() and revert_adjust_start() are helper
+ // functions needed to implement the functionality required for
+ // lex::more(). It is called from the functor body below.
+ bool adjust_start()
+ {
+ if (!has_hold_)
+ return false;
+
+ std::swap(this->get_first(), hold_);
+ has_hold_ = false;
+ return true;
+ }
+ void revert_adjust_start()
+ {
+ // this will be called only if adjust_start above returned true
+ std::swap(this->get_first(), hold_);
+ has_hold_ = true;
+ }
+
+ token_value_type const& get_value() const
+ {
+ if (!has_value_) {
+ spirit::traits::assign_to(this->get_first(), end_, value_);
+ has_value_ = true;
+ }
+ return value_;
+ }
+ template <typename Value>
+ void set_value(Value const& val)
+ {
+ value_ = val;
+ has_value_ = true;
+ }
+ void set_end(Iterator const& it)
+ {
+ end_ = it;
+ }
+ bool has_value() const { return has_value_; }
+ void reset_value() { has_value_ = false; }
+
+ protected:
+ semantic_actions_type const& actions_;
+ Iterator hold_; // iterator needed to support lex::more()
+ Iterator end_; // iterator pointing to end of matched token
+ mutable token_value_type value_; // token value to use
+ mutable bool has_value_; // 'true' if value_ is valid
+ bool has_hold_; // 'true' if hold_ is valid
+
+ private:
+ // silence MSVC warning C4512: assignment operator could not be generated
+ data& operator= (data const&);
+ };
+ }
+}}}}
+
+#endif
+
diff --git a/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/iterator.hpp b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/iterator.hpp
new file mode 100644
index 0000000..f2793ba
--- /dev/null
+++ b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/iterator.hpp
@@ -0,0 +1,121 @@
+// Copyright (c) 2001-2011 Hartmut Kaiser
+//
+// Distributed under the Boost Software License, Version 1.0. (See accompanying
+// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#if !defined(BOOST_SPIRIT_LEX_LEXER_ITERATOR_MAR_16_2007_0353PM)
+#define BOOST_SPIRIT_LEX_LEXER_ITERATOR_MAR_16_2007_0353PM
+
+#if defined(_MSC_VER)
+#pragma once
+#endif
+
+#if defined(BOOST_SPIRIT_DEBUG)
+#include <boost/spirit/home/support/iterators/detail/buf_id_check_policy.hpp>
+#else
+#include <boost/spirit/home/support/iterators/detail/no_check_policy.hpp>
+#endif
+#include <boost/spirit/home/support/iterators/detail/split_functor_input_policy.hpp>
+#include <boost/spirit/home/support/iterators/detail/ref_counted_policy.hpp>
+#include <boost/spirit/home/support/iterators/detail/split_std_deque_policy.hpp>
+#include <boost/spirit/home/support/iterators/multi_pass.hpp>
+
+namespace boost { namespace spirit { namespace lex { namespace lexertl
+{
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename FunctorData>
+ struct make_multi_pass
+ {
+ // Divide the given functor type into its components (unique and
+ // shared) and build a std::pair from these parts
+ typedef std::pair<typename FunctorData::unique
+ , typename FunctorData::shared> functor_data_type;
+
+ // This is the result type returned from the iterator
+ typedef typename FunctorData::result_type result_type;
+
+ // Compose the multi_pass iterator policy type from the appropriate
+ // policies
+ typedef iterator_policies::split_functor_input input_policy;
+ typedef iterator_policies::ref_counted ownership_policy;
+#if defined(BOOST_SPIRIT_DEBUG)
+ typedef iterator_policies::buf_id_check check_policy;
+#else
+ typedef iterator_policies::no_check check_policy;
+#endif
+ typedef iterator_policies::split_std_deque storage_policy;
+
+ typedef iterator_policies::default_policy<
+ ownership_policy, check_policy, input_policy, storage_policy>
+ policy_type;
+
+ // Compose the multi_pass iterator from the policy
+ typedef spirit::multi_pass<functor_data_type, policy_type> type;
+ };
+
+ ///////////////////////////////////////////////////////////////////////////
+ // lexer_iterator exposes an iterator for a lexertl based dfa (lexer)
+ // The template parameters have the same semantics as described for the
+ // functor above.
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Functor>
+ class iterator : public make_multi_pass<Functor>::type
+ {
+ public:
+ typedef typename Functor::unique unique_functor_type;
+ typedef typename Functor::shared shared_functor_type;
+
+ typedef typename Functor::iterator_type base_iterator_type;
+ typedef typename Functor::result_type token_type;
+
+ private:
+ typedef typename make_multi_pass<Functor>::functor_data_type
+ functor_type;
+ typedef typename make_multi_pass<Functor>::type base_type;
+ typedef typename Functor::char_type char_type;
+
+ public:
+ // create a new iterator encapsulating the lexer object to be used
+ // for tokenization
+ template <typename IteratorData>
+ iterator(IteratorData const& iterdata_, base_iterator_type& first
+ , base_iterator_type const& last, char_type const* state = 0)
+ : base_type(functor_type(unique_functor_type()
+ , shared_functor_type(iterdata_, first, last)))
+ {
+ set_state(map_state(state));
+ }
+
+ // create an end iterator usable for end of range checking
+ iterator() {}
+
+ // (wash): < mgaunard> T it; T it2 = ++it; doesn't ocmpile
+ // < mgaunard> this gets fixed by adding
+ iterator(const base_type& base)
+ : base_type(base) { }
+
+ // set the new required state for the underlying lexer object
+ std::size_t set_state(std::size_t state)
+ {
+ return unique_functor_type::set_state(*this, state);
+ }
+
+ // get the curent state for the underlying lexer object
+ std::size_t get_state()
+ {
+ return unique_functor_type::get_state(*this);
+ }
+
+ // map the given state name to a corresponding state id as understood
+ // by the underlying lexer object
+ std::size_t map_state(char_type const* statename)
+ {
+ return (0 != statename)
+ ? unique_functor_type::map_state(*this, statename)
+ : 0;
+ }
+ };
+
+}}}}
+
+#endif
diff --git a/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/iterator_tokenizer.hpp b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/iterator_tokenizer.hpp
new file mode 100644
index 0000000..31dffce
--- /dev/null
+++ b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/iterator_tokenizer.hpp
@@ -0,0 +1,255 @@
+// Copyright (c) 2001-2011 Hartmut Kaiser
+//
+// Distributed under the Boost Software License, Version 1.0. (See accompanying
+// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#if !defined(BOOST_SPIRIT_LEXERTL_ITERATOR_TOKENISER_MARCH_22_2007_0859AM)
+#define BOOST_SPIRIT_LEXERTL_ITERATOR_TOKENISER_MARCH_22_2007_0859AM
+
+#if defined(_MSC_VER)
+#pragma once
+#endif
+
+#include <boost/detail/iterator.hpp>
+#include <boost/spirit/home/support/detail/lexer/state_machine.hpp>
+#include <boost/spirit/home/support/detail/lexer/consts.hpp>
+#include <boost/spirit/home/support/detail/lexer/size_t.hpp>
+#include <boost/spirit/home/support/detail/lexer/char_traits.hpp>
+#include <vector>
+
+namespace boost { namespace spirit { namespace lex { namespace lexertl
+{
+ ///////////////////////////////////////////////////////////////////////////
+ template<typename Iterator>
+ class basic_iterator_tokeniser
+ {
+ public:
+ typedef std::vector<std::size_t> size_t_vector;
+ typedef typename boost::detail::iterator_traits<Iterator>::value_type
+ char_type;
+
+ static std::size_t next (
+ boost::lexer::basic_state_machine<char_type> const& state_machine_
+ , std::size_t &dfa_state_, bool& bol_, Iterator &start_token_
+ , Iterator const& end_, std::size_t& unique_id_)
+ {
+ if (start_token_ == end_)
+ {
+ unique_id_ = boost::lexer::npos;
+ return 0;
+ }
+
+ bool bol = bol_;
+ boost::lexer::detail::internals const& internals_ =
+ state_machine_.data();
+
+ again:
+ std::size_t const* lookup_ = &internals_._lookup[dfa_state_]->
+ front ();
+ std::size_t dfa_alphabet_ = internals_._dfa_alphabet[dfa_state_];
+ std::size_t const* dfa_ = &internals_._dfa[dfa_state_]->front ();
+
+ std::size_t const* ptr_ = dfa_ + dfa_alphabet_;
+ Iterator curr_ = start_token_;
+ bool end_state_ = *ptr_ != 0;
+ std::size_t id_ = *(ptr_ + boost::lexer::id_index);
+ std::size_t uid_ = *(ptr_ + boost::lexer::unique_id_index);
+ std::size_t end_start_state_ = dfa_state_;
+ bool end_bol_ = bol_;
+ Iterator end_token_ = start_token_;
+
+ while (curr_ != end_)
+ {
+ std::size_t const BOL_state_ = ptr_[boost::lexer::bol_index];
+ std::size_t const EOL_state_ = ptr_[boost::lexer::eol_index];
+
+ if (BOL_state_ && bol)
+ {
+ ptr_ = &dfa_[BOL_state_ * dfa_alphabet_];
+ }
+ else if (EOL_state_ && *curr_ == '\n')
+ {
+ ptr_ = &dfa_[EOL_state_ * dfa_alphabet_];
+ }
+ else
+ {
+ typedef typename
+ boost::detail::iterator_traits<Iterator>::value_type
+ value_type;
+ typedef typename
+ boost::lexer::char_traits<value_type>::index_type
+ index_type;
+
+ index_type index =
+ boost::lexer::char_traits<value_type>::call(*curr_++);
+ bol = (index == '\n') ? true : false;
+ std::size_t const state_ = ptr_[
+ lookup_[static_cast<std::size_t>(index)]];
+
+ if (state_ == 0)
+ {
+ break;
+ }
+
+ ptr_ = &dfa_[state_ * dfa_alphabet_];
+ }
+
+ if (*ptr_)
+ {
+ end_state_ = true;
+ id_ = *(ptr_ + boost::lexer::id_index);
+ uid_ = *(ptr_ + boost::lexer::unique_id_index);
+ end_start_state_ = *(ptr_ + boost::lexer::state_index);
+ end_bol_ = bol;
+ end_token_ = curr_;
+ }
+ }
+
+ std::size_t const EOL_state_ = ptr_[boost::lexer::eol_index];
+
+ if (EOL_state_ && curr_ == end_)
+ {
+ ptr_ = &dfa_[EOL_state_ * dfa_alphabet_];
+
+ if (*ptr_)
+ {
+ end_state_ = true;
+ id_ = *(ptr_ + boost::lexer::id_index);
+ uid_ = *(ptr_ + boost::lexer::unique_id_index);
+ end_start_state_ = *(ptr_ + boost::lexer::state_index);
+ end_bol_ = bol;
+ end_token_ = curr_;
+ }
+ }
+
+ if (end_state_) {
+ // return longest match
+ dfa_state_ = end_start_state_;
+ start_token_ = end_token_;
+
+ if (id_ == 0)
+ {
+ bol = end_bol_;
+ goto again;
+ }
+ else
+ {
+ bol_ = end_bol_;
+ }
+ }
+ else {
+ bol_ = (*start_token_ == '\n') ? true : false;
+ id_ = boost::lexer::npos;
+ uid_ = boost::lexer::npos;
+ }
+
+ unique_id_ = uid_;
+ return id_;
+ }
+
+ ///////////////////////////////////////////////////////////////////////
+ static std::size_t next (
+ boost::lexer::basic_state_machine<char_type> const& state_machine_
+ , bool& bol_, Iterator &start_token_, Iterator const& end_
+ , std::size_t& unique_id_)
+ {
+ if (start_token_ == end_)
+ {
+ unique_id_ = boost::lexer::npos;
+ return 0;
+ }
+
+ bool bol = bol_;
+ std::size_t const* lookup_ = &state_machine_.data()._lookup[0]->front();
+ std::size_t dfa_alphabet_ = state_machine_.data()._dfa_alphabet[0];
+ std::size_t const* dfa_ = &state_machine_.data()._dfa[0]->front ();
+ std::size_t const* ptr_ = dfa_ + dfa_alphabet_;
+
+ Iterator curr_ = start_token_;
+ bool end_state_ = *ptr_ != 0;
+ std::size_t id_ = *(ptr_ + boost::lexer::id_index);
+ std::size_t uid_ = *(ptr_ + boost::lexer::unique_id_index);
+ bool end_bol_ = bol_;
+ Iterator end_token_ = start_token_;
+
+ while (curr_ != end_)
+ {
+ std::size_t const BOL_state_ = ptr_[boost::lexer::bol_index];
+ std::size_t const EOL_state_ = ptr_[boost::lexer::eol_index];
+
+ if (BOL_state_ && bol)
+ {
+ ptr_ = &dfa_[BOL_state_ * dfa_alphabet_];
+ }
+ else if (EOL_state_ && *curr_ == '\n')
+ {
+ ptr_ = &dfa_[EOL_state_ * dfa_alphabet_];
+ }
+ else
+ {
+ typedef typename
+ boost::detail::iterator_traits<Iterator>::value_type
+ value_type;
+ typedef typename
+ boost::lexer::char_traits<value_type>::index_type
+ index_type;
+
+ index_type index =
+ boost::lexer::char_traits<value_type>::call(*curr_++);
+ bol = (index == '\n') ? true : false;
+ std::size_t const state_ = ptr_[
+ lookup_[static_cast<std::size_t>(index)]];
+
+ if (state_ == 0)
+ {
+ break;
+ }
+
+ ptr_ = &dfa_[state_ * dfa_alphabet_];
+ }
+
+ if (*ptr_)
+ {
+ end_state_ = true;
+ id_ = *(ptr_ + boost::lexer::id_index);
+ uid_ = *(ptr_ + boost::lexer::unique_id_index);
+ end_bol_ = bol;
+ end_token_ = curr_;
+ }
+ }
+
+ std::size_t const EOL_state_ = ptr_[boost::lexer::eol_index];
+
+ if (EOL_state_ && curr_ == end_)
+ {
+ ptr_ = &dfa_[EOL_state_ * dfa_alphabet_];
+
+ if (*ptr_)
+ {
+ end_state_ = true;
+ id_ = *(ptr_ + boost::lexer::id_index);
+ uid_ = *(ptr_ + boost::lexer::unique_id_index);
+ end_bol_ = bol;
+ end_token_ = curr_;
+ }
+ }
+
+ if (end_state_) {
+ // return longest match
+ bol_ = end_bol_;
+ start_token_ = end_token_;
+ }
+ else {
+ bol_ = *start_token_ == '\n';
+ id_ = boost::lexer::npos;
+ uid_ = boost::lexer::npos;
+ }
+
+ unique_id_ = uid_;
+ return id_;
+ }
+ };
+
+}}}}
+
+#endif
diff --git a/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/lexer.hpp b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/lexer.hpp
new file mode 100644
index 0000000..0f8af55
--- /dev/null
+++ b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/lexer.hpp
@@ -0,0 +1,399 @@
+// Copyright (c) 2001-2011 Hartmut Kaiser
+//
+// Distributed under the Boost Software License, Version 1.0. (See accompanying
+// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#if !defined(BOOST_SPIRIT_LEX_LEXER_MAR_17_2007_0139PM)
+#define BOOST_SPIRIT_LEX_LEXER_MAR_17_2007_0139PM
+
+#if defined(_MSC_VER)
+#pragma once
+#endif
+
+#include <iosfwd>
+
+#include <boost/spirit/home/support/detail/lexer/generator.hpp>
+#include <boost/spirit/home/support/detail/lexer/rules.hpp>
+#include <boost/spirit/home/support/detail/lexer/consts.hpp>
+#include <boost/spirit/home/support/unused.hpp>
+
+#include <boost/spirit/home/lex/lexer/lexertl/token.hpp>
+#include <boost/spirit/home/lex/lexer/lexertl/functor.hpp>
+#include <boost/spirit/home/lex/lexer/lexertl/functor_data.hpp>
+#include <boost/spirit/home/lex/lexer/lexertl/iterator.hpp>
+#if defined(BOOST_SPIRIT_LEXERTL_DEBUG)
+#include <boost/spirit/home/support/detail/lexer/debug.hpp>
+#endif
+
+#include <boost/foreach.hpp>
+
+namespace boost { namespace spirit { namespace lex { namespace lexertl
+{
+ ///////////////////////////////////////////////////////////////////////////
+ namespace detail
+ {
+ ///////////////////////////////////////////////////////////////////////
+ // The must_escape function checks if the given character value needs
+ // to be preceded by a backslash character to disable its special
+ // meaning in the context of a regular expression
+ ///////////////////////////////////////////////////////////////////////
+ template <typename Char>
+ inline bool must_escape(Char c)
+ {
+ // FIXME: more needed?
+ switch (c) {
+ case '+': case '/': case '*': case '?':
+ case '|':
+ case '(': case ')':
+ case '[': case ']':
+ case '{': case '}':
+ case '.':
+ case '^': case '$':
+ case '\\':
+ case '"':
+ return true;
+
+ default:
+ break;
+ }
+ return false;
+ }
+
+ ///////////////////////////////////////////////////////////////////////
+ // The escape function returns the string representation of the given
+ // character value, possibly escaped with a backslash character, to
+ // allow it being safely used in a regular expression definition.
+ ///////////////////////////////////////////////////////////////////////
+ template <typename Char>
+ inline std::basic_string<Char> escape(Char ch)
+ {
+ std::basic_string<Char> result(1, ch);
+ if (detail::must_escape(ch))
+ {
+ typedef typename std::basic_string<Char>::size_type size_type;
+ result.insert((size_type)0, 1, '\\');
+ }
+ return result;
+ }
+
+ ///////////////////////////////////////////////////////////////////////
+ //
+ ///////////////////////////////////////////////////////////////////////
+ inline boost::lexer::regex_flags map_flags(unsigned int flags)
+ {
+ unsigned int retval = boost::lexer::none;
+ if (flags & match_flags::match_not_dot_newline)
+ retval |= boost::lexer::dot_not_newline;
+ if (flags & match_flags::match_icase)
+ retval |= boost::lexer::icase;
+
+ return boost::lexer::regex_flags(retval);
+ }
+ }
+
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Lexer, typename F>
+ bool generate_static(Lexer const&
+ , std::basic_ostream<typename Lexer::char_type>&
+ , typename Lexer::char_type const*, F);
+
+ ///////////////////////////////////////////////////////////////////////////
+ //
+ // Every lexer type to be used as a lexer for Spirit has to conform to
+ // the following public interface:
+ //
+ // typedefs:
+ // iterator_type The type of the iterator exposed by this lexer.
+ // token_type The type of the tokens returned from the exposed
+ // iterators.
+ //
+ // functions:
+ // default constructor
+ // Since lexers are instantiated as base classes
+ // only it might be a good idea to make this
+ // constructor protected.
+ // begin, end Return a pair of iterators, when dereferenced
+ // returning the sequence of tokens recognized in
+ // the input stream given as the parameters to the
+ // begin() function.
+ // add_token Should add the definition of a token to be
+ // recognized by this lexer.
+ // clear Should delete all current token definitions
+ // associated with the given state of this lexer
+ // object.
+ //
+ // template parameters:
+ // Iterator The type of the iterator used to access the
+ // underlying character stream.
+ // Token The type of the tokens to be returned from the
+ // exposed token iterator.
+ // Functor The type of the InputPolicy to use to instantiate
+ // the multi_pass iterator type to be used as the
+ // token iterator (returned from begin()/end()).
+ //
+ ///////////////////////////////////////////////////////////////////////////
+
+ ///////////////////////////////////////////////////////////////////////////
+ //
+ // The lexer class is a implementation of a Spirit.Lex lexer on
+ // top of Ben Hanson's lexertl library as outlined above (For more
+ // information about lexertl go here: http://www.benhanson.net/lexertl.html).
+ //
+ // This class is supposed to be used as the first and only template
+ // parameter while instantiating instances of a lex::lexer class.
+ //
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Token = token<>
+ , typename Iterator = typename Token::iterator_type
+ , typename Functor = functor<Token, lexertl::detail::data, Iterator> >
+ class lexer
+ {
+ private:
+ struct dummy { void true_() {} };
+ typedef void (dummy::*safe_bool)();
+
+ static std::size_t const all_states_id = static_cast<std::size_t>(-2);
+
+ public:
+ operator safe_bool() const
+ { return initialized_dfa_ ? &dummy::true_ : 0; }
+
+ typedef typename boost::detail::iterator_traits<Iterator>::value_type
+ char_type;
+ typedef std::basic_string<char_type> string_type;
+
+ typedef boost::lexer::basic_rules<char_type> basic_rules_type;
+
+ // Every lexer type to be used as a lexer for Spirit has to conform to
+ // a public interface .
+ typedef Token token_type;
+ typedef typename Token::id_type id_type;
+ typedef iterator<Functor> iterator_type;
+
+ private:
+ // this type is purely used for the iterator_type construction below
+ struct iterator_data_type
+ {
+ typedef typename Functor::semantic_actions_type semantic_actions_type;
+
+ iterator_data_type(
+ boost::lexer::basic_state_machine<char_type> const& sm
+ , boost::lexer::basic_rules<char_type> const& rules
+ , semantic_actions_type const& actions)
+ : state_machine_(sm), rules_(rules), actions_(actions)
+ {}
+
+ boost::lexer::basic_state_machine<char_type> const& state_machine_;
+ boost::lexer::basic_rules<char_type> const& rules_;
+ semantic_actions_type const& actions_;
+
+ private:
+ // silence MSVC warning C4512: assignment operator could not be generated
+ iterator_data_type& operator= (iterator_data_type const&);
+ };
+
+ public:
+ // Return the start iterator usable for iterating over the generated
+ // tokens.
+ iterator_type begin(Iterator& first, Iterator const& last
+ , char_type const* initial_state = 0) const
+ {
+ if (!init_dfa()) // never minimize DFA for dynamic lexers
+ return iterator_type();
+
+ iterator_data_type iterator_data(state_machine_, rules_, actions_);
+ return iterator_type(iterator_data, first, last, initial_state);
+ }
+
+ // Return the end iterator usable to stop iterating over the generated
+ // tokens.
+ iterator_type end() const
+ {
+ return iterator_type();
+ }
+
+ protected:
+ // Lexer instances can be created by means of a derived class only.
+ lexer(unsigned int flags)
+ : flags_(detail::map_flags(flags))
+ , rules_(flags_)
+ , initialized_dfa_(false)
+ {}
+
+ public:
+ // interface for token definition management
+ std::size_t add_token(char_type const* state, char_type tokendef,
+ std::size_t token_id, char_type const* targetstate)
+ {
+ add_state(state);
+ initialized_dfa_ = false;
+ if (state == all_states())
+ return rules_.add(state, detail::escape(tokendef), token_id, rules_.dot());
+
+ if (0 == targetstate)
+ targetstate = state;
+ else
+ add_state(targetstate);
+ return rules_.add(state, detail::escape(tokendef), token_id, targetstate);
+ }
+ std::size_t add_token(char_type const* state, string_type const& tokendef,
+ std::size_t token_id, char_type const* targetstate)
+ {
+ add_state(state);
+ initialized_dfa_ = false;
+ if (state == all_states())
+ return rules_.add(state, tokendef, token_id, rules_.dot());
+
+ if (0 == targetstate)
+ targetstate = state;
+ else
+ add_state(targetstate);
+ return rules_.add(state, tokendef, token_id, targetstate);
+ }
+
+ // interface for pattern definition management
+ void add_pattern (char_type const* state, string_type const& name,
+ string_type const& patterndef)
+ {
+ add_state(state);
+ rules_.add_macro(name.c_str(), patterndef);
+ initialized_dfa_ = false;
+ }
+
+ boost::lexer::rules const& get_rules() const { return rules_; }
+
+ void clear(char_type const* state)
+ {
+ std::size_t s = rules_.state(state);
+ if (boost::lexer::npos != s)
+ rules_.clear(state);
+ initialized_dfa_ = false;
+ }
+ std::size_t add_state(char_type const* state)
+ {
+ if (state == all_states())
+ return all_states_id;
+
+ std::size_t stateid = rules_.state(state);
+ if (boost::lexer::npos == stateid) {
+ stateid = rules_.add_state(state);
+ initialized_dfa_ = false;
+ }
+ return stateid;
+ }
+ string_type initial_state() const
+ {
+ return string_type(rules_.initial());
+ }
+ string_type all_states() const
+ {
+ return string_type(rules_.all_states());
+ }
+
+ // Register a semantic action with the given id
+ template <typename F>
+ void add_action(std::size_t unique_id, std::size_t state, F act)
+ {
+ // If you see an error here stating add_action is not a member of
+ // fusion::unused_type then you are probably having semantic actions
+ // attached to at least one token in the lexer definition without
+ // using the lex::lexertl::actor_lexer<> as its base class.
+ typedef typename Functor::wrap_action_type wrapper_type;
+ if (state == all_states_id) {
+ // add the action to all known states
+ typedef typename
+ basic_rules_type::string_size_t_map::value_type
+ state_type;
+
+ std::size_t states = rules_.statemap().size();
+ BOOST_FOREACH(state_type const& s, rules_.statemap()) {
+ for (std::size_t j = 0; j < states; ++j)
+ actions_.add_action(unique_id + j, s.second, wrapper_type::call(act));
+ }
+ }
+ else {
+ actions_.add_action(unique_id, state, wrapper_type::call(act));
+ }
+ }
+// template <typename F>
+// void add_action(std::size_t unique_id, char_type const* state, F act)
+// {
+// typedef typename Functor::wrap_action_type wrapper_type;
+// actions_.add_action(unique_id, add_state(state), wrapper_type::call(act));
+// }
+
+ // We do not minimize the state machine by default anymore because
+ // Ben said: "If you can afford to generate a lexer at runtime, there
+ // is little point in calling minimise."
+ // Go figure.
+ bool init_dfa(bool minimize = false) const
+ {
+ if (!initialized_dfa_) {
+ state_machine_.clear();
+ typedef boost::lexer::basic_generator<char_type> generator;
+ generator::build (rules_, state_machine_);
+ if (minimize)
+ generator::minimise (state_machine_);
+
+#if defined(BOOST_SPIRIT_LEXERTL_DEBUG)
+ boost::lexer::debug::dump(state_machine_, std::cerr);
+#endif
+ initialized_dfa_ = true;
+
+// // release memory held by rules description
+// basic_rules_type rules;
+// rules.init_state_info(rules_); // preserve states
+// std::swap(rules, rules_);
+ }
+ return true;
+ }
+
+ private:
+ // lexertl specific data
+ mutable boost::lexer::basic_state_machine<char_type> state_machine_;
+ boost::lexer::regex_flags flags_;
+ /*mutable*/ basic_rules_type rules_;
+
+ typename Functor::semantic_actions_type actions_;
+ mutable bool initialized_dfa_;
+
+ // generator functions must be able to access members directly
+ template <typename Lexer, typename F>
+ friend bool generate_static(Lexer const&
+ , std::basic_ostream<typename Lexer::char_type>&
+ , typename Lexer::char_type const*, F);
+ };
+
+ ///////////////////////////////////////////////////////////////////////////
+ //
+ // The actor_lexer class is another implementation of a Spirit.Lex
+ // lexer on top of Ben Hanson's lexertl library as outlined above (For
+ // more information about lexertl go here:
+ // http://www.benhanson.net/lexertl.html).
+ //
+ // The only difference to the lexer class above is that token_def
+ // definitions may have semantic (lexer) actions attached while being
+ // defined:
+ //
+ // int w;
+ // token_def word = "[^ \t\n]+";
+ // self = word[++ref(w)]; // see example: word_count_lexer
+ //
+ // This class is supposed to be used as the first and only template
+ // parameter while instantiating instances of a lex::lexer class.
+ //
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Token = token<>
+ , typename Iterator = typename Token::iterator_type
+ , typename Functor = functor<Token, lexertl::detail::data, Iterator, mpl::true_> >
+ class actor_lexer : public lexer<Token, Iterator, Functor>
+ {
+ protected:
+ // Lexer instances can be created by means of a derived class only.
+ actor_lexer(unsigned int flags)
+ : lexer<Token, Iterator, Functor>(flags) {}
+ };
+
+}}}}
+
+#endif
diff --git a/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/semantic_action_data.hpp b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/semantic_action_data.hpp
new file mode 100644
index 0000000..30748c5
--- /dev/null
+++ b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/semantic_action_data.hpp
@@ -0,0 +1,121 @@
+// Copyright (c) 2001-2011 Hartmut Kaiser
+//
+// Distributed under the Boost Software License, Version 1.0. (See accompanying
+// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#if !defined(BOOST_SPIRIT_LEX_LEXER_SEMANTIC_ACTION_DATA_JUN_10_2009_0417PM)
+#define BOOST_SPIRIT_LEX_LEXER_SEMANTIC_ACTION_DATA_JUN_10_2009_0417PM
+
+#if defined(_MSC_VER)
+#pragma once
+#endif
+
+#include <boost/spirit/home/lex/lexer/pass_flags.hpp>
+#include <boost/mpl/bool.hpp>
+#include <boost/function.hpp>
+#include <vector>
+
+namespace boost { namespace spirit { namespace lex { namespace lexertl
+{
+ namespace detail
+ {
+ ///////////////////////////////////////////////////////////////////////
+ template <typename Iterator, typename SupportsState, typename Data>
+ struct semantic_actions;
+
+ // This specialization of semantic_actions will be used if the token
+ // type (lexer definition) does not support states, which simplifies
+ // the data structures used to store the semantic action function
+ // objects.
+ template <typename Iterator, typename Data>
+ struct semantic_actions<Iterator, mpl::false_, Data>
+ {
+ typedef void functor_type(Iterator&, Iterator&
+ , BOOST_SCOPED_ENUM(pass_flags)&, std::size_t&, Data&);
+ typedef boost::function<functor_type> functor_wrapper_type;
+
+ // add a semantic action function object
+ template <typename F>
+ void add_action(std::size_t unique_id, std::size_t, F act)
+ {
+ if (actions_.size() <= unique_id)
+ actions_.resize(unique_id + 1);
+
+ actions_[unique_id] = act;
+ }
+
+ // try to invoke a semantic action for the given token (unique_id)
+ BOOST_SCOPED_ENUM(pass_flags) invoke_actions(std::size_t /*state*/
+ , std::size_t& id, std::size_t unique_id, Iterator& end
+ , Data& data) const
+ {
+ // if there is nothing to invoke, continue with 'match'
+ if (unique_id >= actions_.size() || !actions_[unique_id])
+ return pass_flags::pass_normal;
+
+ // Note: all arguments might be changed by the invoked semantic
+ // action
+ BOOST_SCOPED_ENUM(pass_flags) match = pass_flags::pass_normal;
+ actions_[unique_id](data.get_first(), end, match, id, data);
+ return match;
+ }
+
+ std::vector<functor_wrapper_type> actions_;
+ };
+
+ // This specialization of semantic_actions will be used if the token
+ // type (lexer definition) needs to support states, resulting in a more
+ // complex data structure needed for storing the semantic action
+ // function objects.
+ template <typename Iterator, typename Data>
+ struct semantic_actions<Iterator, mpl::true_, Data>
+ {
+ typedef void functor_type(Iterator&, Iterator&
+ , BOOST_SCOPED_ENUM(pass_flags)&, std::size_t&, Data&);
+ typedef boost::function<functor_type> functor_wrapper_type;
+
+ // add a semantic action function object
+ template <typename F>
+ void add_action(std::size_t unique_id, std::size_t state, F act)
+ {
+ if (actions_.size() <= state)
+ actions_.resize(state + 1);
+
+ std::vector<functor_wrapper_type>& actions (actions_[state]);
+ if (actions.size() <= unique_id)
+ actions.resize(unique_id + 1);
+
+ actions[unique_id] = act;
+ }
+
+ // try to invoke a semantic action for the given token (unique_id)
+ BOOST_SCOPED_ENUM(pass_flags) invoke_actions(std::size_t state
+ , std::size_t& id, std::size_t unique_id, Iterator& end
+ , Data& data) const
+ {
+ // if there is no action defined for this state, return match
+ if (state >= actions_.size())
+ return pass_flags::pass_normal;
+
+ // if there is nothing to invoke, continue with 'match'
+ std::vector<functor_wrapper_type> const& actions = actions_[state];
+ if (unique_id >= actions.size() || !actions[unique_id])
+ return pass_flags::pass_normal;
+
+ // set token value
+ data.set_end(end);
+
+ // Note: all arguments might be changed by the invoked semantic
+ // action
+ BOOST_SCOPED_ENUM(pass_flags) match = pass_flags::pass_normal;
+ actions[unique_id](data.get_first(), end, match, id, data);
+ return match;
+ }
+
+ std::vector<std::vector<functor_wrapper_type> > actions_;
+ };
+ }
+
+}}}}
+
+#endif
diff --git a/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/token.hpp b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/token.hpp
new file mode 100644
index 0000000..90961af
--- /dev/null
+++ b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/token.hpp
@@ -0,0 +1,654 @@
+// Copyright (c) 2001-2011 Hartmut Kaiser
+//
+// Distributed under the Boost Software License, Version 1.0. (See accompanying
+// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+#if !defined(BOOST_SPIRIT_LEX_TOKEN_FEB_10_2008_0751PM)
+#define BOOST_SPIRIT_LEX_TOKEN_FEB_10_2008_0751PM
+
+#if defined(_MSC_VER)
+#pragma once
+#endif
+
+#include <boost/config.hpp>
+#include <boost/detail/workaround.hpp>
+#include <boost/spirit/home/qi/detail/assign_to.hpp>
+#include <boost/spirit/home/support/attributes.hpp>
+#include <boost/spirit/home/support/argument.hpp>
+#include <boost/spirit/home/support/detail/lexer/generator.hpp>
+#include <boost/spirit/home/support/detail/lexer/rules.hpp>
+#include <boost/spirit/home/support/detail/lexer/consts.hpp>
+#include <boost/spirit/home/support/utree/utree_traits_fwd.hpp>
+#include <boost/spirit/home/lex/lexer/terminals.hpp>
+#include <boost/fusion/include/vector.hpp>
+#include <boost/fusion/include/at.hpp>
+#include <boost/fusion/include/value_at.hpp>
+#include <boost/detail/iterator.hpp>
+#include <boost/variant.hpp>
+#include <boost/mpl/bool.hpp>
+#include <boost/mpl/vector.hpp>
+#include <boost/mpl/is_sequence.hpp>
+#include <boost/mpl/begin.hpp>
+#include <boost/mpl/insert.hpp>
+#include <boost/mpl/vector.hpp>
+#include <boost/mpl/if.hpp>
+#include <boost/mpl/or.hpp>
+#include <boost/type_traits/is_same.hpp>
+#include <boost/range/iterator_range.hpp>
+#if !BOOST_WORKAROUND(BOOST_MSVC, <= 1300)
+#include <boost/static_assert.hpp>
+#endif
+
+#if defined(BOOST_SPIRIT_DEBUG)
+#include <iosfwd>
+#endif
+
+namespace boost { namespace spirit { namespace lex { namespace lexertl
+{
+ ///////////////////////////////////////////////////////////////////////////
+ //
+ // The token is the type of the objects returned by default by the
+ // iterator.
+ //
+ // template parameters:
+ // Iterator The type of the iterator used to access the
+ // underlying character stream.
+ // AttributeTypes A mpl sequence containing the types of all
+ // required different token values to be supported
+ // by this token type.
+ // HasState A mpl::bool_ indicating, whether this token type
+ // should support lexer states.
+ // Idtype The type to use for the token id (defaults to
+ // std::size_t).
+ //
+ // It is possible to use other token types with the spirit::lex
+ // framework as well. If you plan to use a different type as your token
+ // type, you'll need to expose the following things from your token type
+ // to make it compatible with spirit::lex:
+ //
+ // typedefs
+ // iterator_type The type of the iterator used to access the
+ // underlying character stream.
+ //
+ // id_type The type of the token id used.
+ //
+ // methods
+ // default constructor
+ // This should initialize the token as an end of
+ // input token.
+ // constructors The prototype of the other required
+ // constructors should be:
+ //
+ // token(int)
+ // This constructor should initialize the token as
+ // an invalid token (not carrying any specific
+ // values)
+ //
+ // where: the int is used as a tag only and its value is
+ // ignored
+ //
+ // and:
+ //
+ // token(Idtype id, std::size_t state,
+ // iterator_type first, iterator_type last);
+ //
+ // where: id: token id
+ // state: lexer state this token was matched in
+ // first, last: pair of iterators marking the matched
+ // range in the underlying input stream
+ //
+ // accessors
+ // id() return the token id of the matched input sequence
+ // id(newid) set the token id of the token instance
+ //
+ // state() return the lexer state this token was matched in
+ //
+ // value() return the token value
+ //
+ // Additionally, you will have to implement a couple of helper functions
+ // in the same namespace as the token type: a comparison operator==() to
+ // compare your token instances, a token_is_valid() function and different
+ // specializations of the Spirit customization point
+ // assign_to_attribute_from_value as shown below.
+ //
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Iterator = char const*
+ , typename AttributeTypes = mpl::vector0<>
+ , typename HasState = mpl::true_
+ , typename Idtype = std::size_t>
+ struct token;
+
+ ///////////////////////////////////////////////////////////////////////////
+ // This specialization of the token type doesn't contain any item data and
+ // doesn't support working with lexer states.
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Iterator, typename Idtype>
+ struct token<Iterator, lex::omit, mpl::false_, Idtype>
+ {
+ typedef Iterator iterator_type;
+ typedef mpl::false_ has_state;
+ typedef Idtype id_type;
+ typedef unused_type token_value_type;
+
+ // default constructed tokens correspond to EOI tokens
+ token() : id_(id_type(boost::lexer::npos)) {}
+
+ // construct an invalid token
+ explicit token(int) : id_(id_type(0)) {}
+
+ token(id_type id, std::size_t) : id_(id) {}
+
+ token(id_type id, std::size_t, token_value_type)
+ : id_(id) {}
+
+ token_value_type& value() { static token_value_type u; return u; }
+ token_value_type const& value() const { return unused; }
+
+#if defined(BOOST_SPIRIT_DEBUG)
+ token(id_type id, std::size_t, Iterator const& first
+ , Iterator const& last)
+ : matched_(first, last)
+ , id_(id) {}
+#else
+ token(id_type id, std::size_t, Iterator const&, Iterator const&)
+ : id_(id) {}
+#endif
+
+ // this default conversion operator is needed to allow the direct
+ // usage of tokens in conjunction with the primitive parsers defined
+ // in Qi
+ operator id_type() const { return id_; }
+
+ // Retrieve or set the token id of this token instance.
+ id_type id() const { return id_; }
+ void id(id_type newid) { id_ = newid; }
+
+ std::size_t state() const { return 0; } // always '0' (INITIAL state)
+
+ bool is_valid() const
+ {
+ return 0 != id_ && id_type(boost::lexer::npos) != id_;
+ }
+
+#if defined(BOOST_SPIRIT_DEBUG)
+#if BOOST_WORKAROUND(BOOST_MSVC, == 1600)
+ // workaround for MSVC10 which has problems copying a default
+ // constructed iterator_range
+ token& operator= (token const& rhs)
+ {
+ if (this != &rhs)
+ {
+ id_ = rhs.id_;
+ if (is_valid())
+ matched_ = rhs.matched_;
+ }
+ return *this;
+ }
+#endif
+ std::pair<Iterator, Iterator> matched_;
+#endif
+
+ protected:
+ id_type id_; // token id, 0 if nothing has been matched
+ };
+
+#if defined(BOOST_SPIRIT_DEBUG)
+ template <typename Char, typename Traits, typename Iterator
+ , typename AttributeTypes, typename HasState, typename Idtype>
+ inline std::basic_ostream<Char, Traits>&
+ operator<< (std::basic_ostream<Char, Traits>& os
+ , token<Iterator, AttributeTypes, HasState, Idtype> const& t)
+ {
+ if (t.is_valid()) {
+ Iterator end = t.matched_.second;
+ for (Iterator it = t.matched_.first; it != end; ++it)
+ os << *it;
+ }
+ else {
+ os << "<invalid token>";
+ }
+ return os;
+ }
+#endif
+
+ ///////////////////////////////////////////////////////////////////////////
+ // This specialization of the token type doesn't contain any item data but
+ // supports working with lexer states.
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Iterator, typename Idtype>
+ struct token<Iterator, lex::omit, mpl::true_, Idtype>
+ : token<Iterator, lex::omit, mpl::false_, Idtype>
+ {
+ private:
+ typedef token<Iterator, lex::omit, mpl::false_, Idtype> base_type;
+
+ public:
+ typedef typename base_type::id_type id_type;
+ typedef Iterator iterator_type;
+ typedef mpl::true_ has_state;
+ typedef unused_type token_value_type;
+
+ // default constructed tokens correspond to EOI tokens
+ token() : state_(boost::lexer::npos) {}
+
+ // construct an invalid token
+ explicit token(int) : base_type(0), state_(boost::lexer::npos) {}
+
+ token(id_type id, std::size_t state)
+ : base_type(id, boost::lexer::npos), state_(state) {}
+
+ token(id_type id, std::size_t state, token_value_type)
+ : base_type(id, boost::lexer::npos, unused)
+ , state_(state) {}
+
+ token(id_type id, std::size_t state
+ , Iterator const& first, Iterator const& last)
+ : base_type(id, boost::lexer::npos, first, last)
+ , state_(state) {}
+
+ std::size_t state() const { return state_; }
+
+#if defined(BOOST_SPIRIT_DEBUG) && BOOST_WORKAROUND(BOOST_MSVC, == 1600)
+ // workaround for MSVC10 which has problems copying a default
+ // constructed iterator_range
+ token& operator= (token const& rhs)
+ {
+ if (this != &rhs)
+ {
+ this->base_type::operator=(static_cast<base_type const&>(rhs));
+ state_ = rhs.state_;
+ }
+ return *this;
+ }
+#endif
+
+ protected:
+ std::size_t state_; // lexer state this token was matched in
+ };
+
+ ///////////////////////////////////////////////////////////////////////////
+ // The generic version of the token type derives from the
+ // specialization above and adds a single data member holding the item
+ // data carried by the token instance.
+ ///////////////////////////////////////////////////////////////////////////
+ namespace detail
+ {
+ ///////////////////////////////////////////////////////////////////////
+ // Meta-function to calculate the type of the variant data item to be
+ // stored with each token instance.
+ //
+ // Note: The iterator pair needs to be the first type in the list of
+ // types supported by the generated variant type (this is being
+ // used to identify whether the stored data item in a particular
+ // token instance needs to be converted from the pair of
+ // iterators (see the first of the assign_to_attribute_from_value
+ // specializations below).
+ ///////////////////////////////////////////////////////////////////////
+ template <typename IteratorPair, typename AttributeTypes>
+ struct token_value_typesequence
+ {
+ typedef typename mpl::insert<
+ AttributeTypes
+ , typename mpl::begin<AttributeTypes>::type
+ , IteratorPair
+ >::type sequence_type;
+ typedef typename make_variant_over<sequence_type>::type type;
+ };
+
+ ///////////////////////////////////////////////////////////////////////
+ // The type of the data item stored with a token instance is defined
+ // by the template parameter 'AttributeTypes' and may be:
+ //
+ // lex::omit: no data item is stored with the token
+ // instance (this is handled by the
+ // specializations of the token class
+ // below)
+ // mpl::vector0<>: each token instance stores a pair of
+ // iterators pointing to the matched input
+ // sequence
+ // mpl::vector<...>: each token instance stores a variant being
+ // able to store the pair of iterators pointing
+ // to the matched input sequence, or any of the
+ // types a specified in the mpl::vector<>
+ //
+ // All this is done to ensure the token type is as small (in terms
+ // of its byte-size) as possible.
+ ///////////////////////////////////////////////////////////////////////
+ template <typename IteratorPair, typename AttributeTypes>
+ struct token_value_type
+ : mpl::eval_if<
+ mpl::or_<
+ is_same<AttributeTypes, mpl::vector0<> >
+ , is_same<AttributeTypes, mpl::vector<> > >
+ , mpl::identity<IteratorPair>
+ , token_value_typesequence<IteratorPair, AttributeTypes> >
+ {};
+ }
+
+ template <typename Iterator, typename AttributeTypes, typename HasState
+ , typename Idtype>
+ struct token : token<Iterator, lex::omit, HasState, Idtype>
+ {
+ private: // precondition assertions
+#if !BOOST_WORKAROUND(BOOST_MSVC, <= 1300)
+ BOOST_STATIC_ASSERT((mpl::is_sequence<AttributeTypes>::value ||
+ is_same<AttributeTypes, lex::omit>::value));
+#endif
+ typedef token<Iterator, lex::omit, HasState, Idtype> base_type;
+
+ protected:
+ // If no additional token value types are given, the the token will
+ // hold the plain pair of iterators pointing to the matched range
+ // in the underlying input sequence. Otherwise the token value is
+ // stored as a variant and will again hold the pair of iterators but
+ // is able to hold any of the given data types as well. The conversion
+ // from the iterator pair to the required data type is done when it is
+ // accessed for the first time.
+ typedef iterator_range<Iterator> iterpair_type;
+
+ public:
+ typedef typename base_type::id_type id_type;
+ typedef typename detail::token_value_type<
+ iterpair_type, AttributeTypes
+ >::type token_value_type;
+
+ typedef Iterator iterator_type;
+
+ // default constructed tokens correspond to EOI tokens
+ token() : value_(iterpair_type(iterator_type(), iterator_type())) {}
+
+ // construct an invalid token
+ explicit token(int)
+ : base_type(0)
+ , value_(iterpair_type(iterator_type(), iterator_type())) {}
+
+ token(id_type id, std::size_t state, token_value_type const& value)
+ : base_type(id, state, value)
+ , value_(value) {}
+
+ token(id_type id, std::size_t state, Iterator const& first
+ , Iterator const& last)
+ : base_type(id, state, first, last)
+ , value_(iterpair_type(first, last)) {}
+
+ token_value_type& value() { return value_; }
+ token_value_type const& value() const { return value_; }
+
+#if BOOST_WORKAROUND(BOOST_MSVC, == 1600)
+ // workaround for MSVC10 which has problems copying a default
+ // constructed iterator_range
+ token& operator= (token const& rhs)
+ {
+ if (this != &rhs)
+ {
+ this->base_type::operator=(static_cast<base_type const&>(rhs));
+ if (this->is_valid())
+ value_ = rhs.value_;
+ }
+ return *this;
+ }
+#endif
+
+ protected:
+ token_value_type value_; // token value, by default a pair of iterators
+ };
+
+ ///////////////////////////////////////////////////////////////////////////
+ // tokens are considered equal, if their id's match (these are unique)
+ template <typename Iterator, typename AttributeTypes, typename HasState
+ , typename Idtype>
+ inline bool
+ operator== (token<Iterator, AttributeTypes, HasState, Idtype> const& lhs,
+ token<Iterator, AttributeTypes, HasState, Idtype> const& rhs)
+ {
+ return lhs.id() == rhs.id();
+ }
+
+ ///////////////////////////////////////////////////////////////////////////
+ // This overload is needed by the multi_pass/functor_input_policy to
+ // validate a token instance. It has to be defined in the same namespace
+ // as the token class itself to allow ADL to find it.
+ ///////////////////////////////////////////////////////////////////////////
+ template <typename Iterator, typename AttributeTypes, typename HasState
+ , typename Idtype>
+ inline bool
+ token_is_valid(token<Iterator, AttributeTypes, HasState, Idtype> const& t)
+ {
+ return t.is_valid();
+ }
+}}}}
+
+namespace boost { namespace spirit { namespace traits
+{
+ ///////////////////////////////////////////////////////////////////////////
+ // We have to provide specializations for the customization point
+ // assign_to_attribute_from_value allowing to extract the needed value
+ // from the token.
+ ///////////////////////////////////////////////////////////////////////////
+
+ // This is called from the parse function of token_def if the token_def
+ // has been defined to carry a special attribute type
+ template <typename Attribute, typename Iterator, typename AttributeTypes
+ , typename HasState, typename Idtype>
+ struct assign_to_attribute_from_value<Attribute
+ , lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> >
+ {
+ static void
+ call(lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> const& t
+ , Attribute& attr)
+ {
+ // The goal of this function is to avoid the conversion of the pair of
+ // iterators (to the matched character sequence) into the token value
+ // of the required type being done more than once. For this purpose it
+ // checks whether the stored value type is still the default one (pair
+ // of iterators) and if yes, replaces the pair of iterators with the
+ // converted value to be returned from subsequent calls.
+
+ if (0 == t.value().which()) {
+ // first access to the token value
+ typedef iterator_range<Iterator> iterpair_type;
+ iterpair_type const& ip = boost::get<iterpair_type>(t.value());
+
+ // Interestingly enough we use the assign_to() framework defined in
+ // Spirit.Qi allowing to convert the pair of iterators to almost any
+ // required type (assign_to(), if available, uses the standard Spirit
+ // parsers to do the conversion).
+ spirit::traits::assign_to(ip.begin(), ip.end(), attr);
+
+ // If you get an error during the compilation of the following
+ // assignment expression, you probably forgot to list one or more
+ // types used as token value types (in your token_def<...>
+ // definitions) in your definition of the token class. I.e. any token
+ // value type used for a token_def<...> definition has to be listed
+ // during the declaration of the token type to use. For instance let's
+ // assume we have two token_def's:
+ //
+ // token_def<int> number; number = "...";
+ // token_def<std::string> identifier; identifier = "...";
+ //
+ // Then you'll have to use the following token type definition
+ // (assuming you are using the token class):
+ //
+ // typedef mpl::vector<int, std::string> token_values;
+ // typedef token<base_iter_type, token_values> token_type;
+ //
+ // where: base_iter_type is the iterator type used to expose the
+ // underlying input stream.
+ //
+ // This token_type has to be used as the second template parameter
+ // to the lexer class:
+ //
+ // typedef lexer<base_iter_type, token_type> lexer_type;
+ //
+ // again, assuming you're using the lexer<> template for your
+ // tokenization.
+
+ typedef lex::lexertl::token<
+ Iterator, AttributeTypes, HasState, Idtype> token_type;
+ spirit::traits::assign_to(
+ attr, const_cast<token_type&>(t).value()); // re-assign value
+ }
+ else {
+ // reuse the already assigned value
+ spirit::traits::assign_to(boost::get<Attribute>(t.value()), attr);
+ }
+ }
+ };
+
+ template <typename Attribute, typename Iterator, typename AttributeTypes
+ , typename HasState, typename Idtype>
+ struct assign_to_container_from_value<Attribute
+ , lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> >
+ : assign_to_attribute_from_value<Attribute
+ , lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> >
+ {};
+
+ template <typename Iterator, typename AttributeTypes
+ , typename HasState, typename Idtype>
+ struct assign_to_container_from_value<utree
+ , lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> >
+ : assign_to_attribute_from_value<utree
+ , lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> >
+ {};
+
+ template <typename Iterator>
+ struct assign_to_container_from_value<
+ iterator_range<Iterator>, iterator_range<Iterator> >
+ {
+ static void
+ call(iterator_range<Iterator> const& val, iterator_range<Iterator>& attr)
+ {
+ attr = val;
+ }
+ };
+
+ // These are called from the parse function of token_def if the token type
+ // has no special attribute type assigned
+ template <typename Attribute, typename Iterator, typename HasState
+ , typename Idtype>
+ struct assign_to_attribute_from_value<Attribute
+ , lex::lexertl::token<Iterator, mpl::vector0<>, HasState, Idtype> >
+ {
+ static void
+ call(lex::lexertl::token<Iterator, mpl::vector0<>, HasState, Idtype> const& t
+ , Attribute& attr)
+ {
+ // The default type returned by the token_def parser component (if
+ // it has no token value type assigned) is the pair of iterators
+ // to the matched character sequence.
+ spirit::traits::assign_to(t.value().begin(), t.value().end(), attr);
+ }
+ };
+
+// template <typename Attribute, typename Iterator, typename HasState
+// , typename Idtype>
+// struct assign_to_container_from_value<Attribute
+// , lex::lexertl::token<Iterator, mpl::vector0<>, HasState, Idtype> >
+// : assign_to_attribute_from_value<Attribute
+// , lex::lexertl::token<Iterator, mpl::vector0<>, HasState, Idtype> >
+// {};
+
+ // same as above but using mpl::vector<> instead of mpl::vector0<>
+ template <typename Attribute, typename Iterator, typename HasState
+ , typename Idtype>
+ struct assign_to_attribute_from_value<Attribute
+ , lex::lexertl::token<Iterator, mpl::vector<>, HasState, Idtype> >
+ {
+ static void
+ call(lex::lexertl::token<Iterator, mpl::vector<>, HasState, Idtype> const& t
+ , Attribute& attr)
+ {
+ // The default type returned by the token_def parser component (if
+ // it has no token value type assigned) is the pair of iterators
+ // to the matched character sequence.
+ spirit::traits::assign_to(t.value().begin(), t.value().end(), attr);
+ }
+ };
+
+// template <typename Attribute, typename Iterator, typename HasState
+// , typename Idtype>
+// struct assign_to_container_from_value<Attribute
+// , lex::lexertl::token<Iterator, mpl::vector<>, HasState, Idtype> >
+// : assign_to_attribute_from_value<Attribute
+// , lex::lexertl::token<Iterator, mpl::vector<>, HasState, Idtype> >
+// {};
+
+ // This is called from the parse function of token_def if the token type
+ // has been explicitly omitted (i.e. no attribute value is used), which
+ // essentially means that every attribute gets initialized using default
+ // constructed values.
+ template <typename Attribute, typename Iterator, typename HasState
+ , typename Idtype>
+ struct assign_to_attribute_from_value<Attribute
+ , lex::lexertl::token<Iterator, lex::omit, HasState, Idtype> >
+ {
+ static void
+ call(lex::lexertl::token<Iterator, lex::omit, HasState, Idtype> const& t
+ , Attribute& attr)
+ {
+ // do nothing
+ }
+ };
+
+ template <typename Attribute, typename Iterator, typename HasState
+ , typename Idtype>
+ struct assign_to_container_from_value<Attribute
+ , lex::lexertl::token<Iterator, lex::omit, HasState, Idtype> >
+ : assign_to_attribute_from_value<Attribute
+ , lex::lexertl::token<Iterator, lex::omit, HasState, Idtype> >
+ {};
+
+ // This is called from the parse function of lexer_def_
+ template <typename Iterator, typename AttributeTypes, typename HasState
+ , typename Idtype_, typename Idtype>
+ struct assign_to_attribute_from_value<
+ fusion::vector2<Idtype_, iterator_range<Iterator> >
+ , lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> >
+ {
+ static void
+ call(lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> const& t
+ , fusion::vector2<Idtype_, iterator_range<Iterator> >& attr)
+ {
+ // The type returned by the lexer_def_ parser components is a
+ // fusion::vector containing the token id of the matched token
+ // and the pair of iterators to the matched character sequence.
+ typedef iterator_range<Iterator> iterpair_type;
+ typedef fusion::vector2<Idtype_, iterator_range<Iterator> >
+ attribute_type;
+
+ iterpair_type const& ip = boost::get<iterpair_type>(t.value());
+ attr = attribute_type(t.id(), ip);
+ }
+ };
+
+ template <typename Iterator, typename AttributeTypes, typename HasState
+ , typename Idtype_, typename Idtype>
+ struct assign_to_container_from_value<
+ fusion::vector2<Idtype_, iterator_range<Iterator> >
+ , lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> >
+ : assign_to_attribute_from_value<
+ fusion::vector2<Idtype_, iterator_range<Iterator> >
+ , lex::lexertl::token<Iterator, AttributeTypes, HasState, Idtype> >
+ {};
+
+ ///////////////////////////////////////////////////////////////////////////
+ // Overload debug output for a single token, this integrates lexer tokens
+ // with Qi's simple_trace debug facilities
+ template <typename Iterator, typename Attribute, typename HasState
+ , typename Idtype>
+ struct token_printer_debug<
+ lex::lexertl::token<Iterator, Attribute, HasState, Idtype> >
+ {
+ typedef lex::lexertl::token<Iterator, Attribute, HasState, Idtype> token_type;
+
+ template <typename Out>
+ static void print(Out& out, token_type const& val)
+ {
+ out << '[';
+ spirit::traits::print_token(out, val.value());
+ out << ']';
+ }
+ };
+}}}
+
+#endif
diff --git a/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/wrap_action.hpp b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/wrap_action.hpp
new file mode 100644
index 0000000..e128d27
--- /dev/null
+++ b/3rdParty/Boost/src/boost/spirit/home/lex/lexer/lexertl/wrap_action.hpp
@@ -0,0 +1,154 @@
+/*=============================================================================
+ Copyright (c) 2001-2011 Joel de Guzman
+ Copyright (c) 2001-2011 Hartmut Kaiser
+ http://spirit.sourceforge.net/
+
+ Distributed under the Boost Software License, Version 1.0. (See accompanying
+ file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+=============================================================================*/
+#if !defined(BOOST_SPIRIT_WRAP_ACTION_APR_19_2008_0103PM)
+#define BOOST_SPIRIT_WRAP_ACTION_APR_19_2008_0103PM
+
+#if defined(_MSC_VER)
+#pragma once
+#endif
+
+#include <boost/spirit/include/phoenix_core.hpp>
+#include <boost/spirit/include/phoenix_bind.hpp>
+#include <boost/spirit/include/phoenix_scope.hpp>
+
+#include <boost/spirit/home/support/attributes.hpp>
+#include <boost/spirit/home/lex/lexer/pass_flags.hpp>
+
+///////////////////////////////////////////////////////////////////////////////
+namespace boost { namespace spirit { namespace lex { namespace lexertl
+{
+ namespace detail
+ {
+ template <typename FunctionType, typename Iterator, typename Context
+ , typename IdType>
+ struct wrap_action
+ {
+ // plain functions with 5 arguments, function objects (including
+ // phoenix actors) are not touched at all
+ template <typename F>
+ static FunctionType call(F const& f)
+ {
+ return f;
+ }
+
+ // semantic actions with 4 arguments
+ template <typename F>
+ static void arg4_action(F* f, Iterator& start, Iterator& end
+ , BOOST_SCOPED_ENUM(pass_flags)& pass, IdType& id
+ , Context const&)
+ {
+ f(start, end, pass, id);
+ }
+
+ template <typename A0, typename A1, typename A2, typename A3>
+ static FunctionType call(void (*f)(A0, A1, A2, A3))
+ {
+ void (*pf)(void(*)(A0, A1, A2, A3)
+ , Iterator&, Iterator&, BOOST_SCOPED_ENUM(pass_flags)&
+ , IdType&, Context const&) = &wrap_action::arg4_action;
+
+ using phoenix::arg_names::_1;
+ using phoenix::arg_names::_2;
+ using phoenix::arg_names::_3;
+ using phoenix::arg_names::_4;
+ using phoenix::arg_names::_5;
+ return phoenix::bind(pf, f, _1, _2, _3, _4, _5);
+ }
+
+ // semantic actions with 3 arguments
+ template <typename F>
+ static void arg3_action(F* f, Iterator& start, Iterator& end
+ , BOOST_SCOPED_ENUM(pass_flags)& pass, IdType
+ , Context const&)
+ {
+ f(start, end, pass);
+ }
+
+ template <typename A0, typename A1, typename A2>
+ static FunctionType call(void (*f)(A0, A1, A2))
+ {
+ void (*pf)(void(*)(A0, A1, A2), Iterator&, Iterator&
+ , BOOST_SCOPED_ENUM(pass_flags)&, IdType
+ , Context const&) = &wrap_action::arg3_action;
+
+ using phoenix::arg_names::_1;
+ using phoenix::arg_names::_2;
+ using phoenix::arg_names::_3;
+ using phoenix::arg_names::_4;
+ using phoenix::arg_names::_5;
+ return phoenix::bind(pf, f, _1, _2, _3, _4, _5);
+ }
+
+ // semantic actions with 2 arguments
+ template <typename F>
+ static void arg2_action(F* f, Iterator& start, Iterator& end
+ , BOOST_SCOPED_ENUM(pass_flags)&, IdType, Context const&)
+ {
+ f (start, end);
+ }
+
+ template <typename A0, typename A1>
+ static FunctionType call(void (*f)(A0, A1))
+ {
+ void (*pf)(void(*)(A0, A1), Iterator&, Iterator&
+ , BOOST_SCOPED_ENUM(pass_flags)&
+ , IdType, Context const&) = &wrap_action::arg2_action;
+
+ using phoenix::arg_names::_1;
+ using phoenix::arg_names::_2;
+ using phoenix::arg_names::_3;
+ using phoenix::arg_names::_4;
+ using phoenix::arg_names::_5;
+ return phoenix::bind(pf, f, _1, _2, _3, _4, _5);
+ }
+
+ // we assume that either both iterators are to be passed to the
+ // semantic action or none iterator at all (i.e. it's not possible
+ // to have a lexer semantic action function taking one arguments).
+
+ // semantic actions with 0 argument
+ template <typename F>
+ static void arg0_action(F* f, Iterator&, Iterator&
+ , BOOST_SCOPED_ENUM(pass_flags)&, IdType, Context const&)
+ {
+ f();
+ }
+
+ static FunctionType call(void (*f)())
+ {
+ void (*pf)(void(*)(), Iterator&, Iterator&
+ , BOOST_SCOPED_ENUM(pass_flags)&
+ , IdType, Context const&) = &arg0_action;
+
+ using phoenix::arg_names::_1;
+ using phoenix::arg_names::_2;
+ using phoenix::arg_names::_3;
+ using phoenix::arg_names::_4;
+ using phoenix::arg_names::_5;
+ return phoenix::bind(pf, f, _1, _2, _3, _4, _5);
+ }
+ };
+
+ // specialization allowing to skip wrapping for lexer types not
+ // supporting semantic actions
+ template <typename Iterator, typename Context, typename Idtype>
+ struct wrap_action<unused_type, Iterator, Context, Idtype>
+ {
+ // plain function objects are not touched at all
+ template <typename F>
+ static F const& call(F const& f)
+ {
+ return f;
+ }
+ };
+ }
+
+}}}}
+
+#endif