lexer_debug_support.cpp 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109
  1. // Copyright (c) 2001-2011 Hartmut Kaiser
  2. //
  3. // Distributed under the Boost Software License, Version 1.0. (See accompanying
  4. // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
  5. // #define BOOST_SPIRIT_LEXERTL_DEBUG 1
  6. #include <boost/config/warning_disable.hpp>
  7. #include <boost/spirit/include/lex_lexertl.hpp>
  8. #include <boost/spirit/include/qi.hpp>
  9. #include <boost/spirit/include/phoenix.hpp>
  10. namespace lex = boost::spirit::lex;
  11. namespace qi = boost::spirit::qi;
  12. namespace phoenix = boost::phoenix;
  13. ///////////////////////////////////////////////////////////////////////////////
  14. template <typename Lexer>
  15. struct language_tokens : lex::lexer<Lexer>
  16. {
  17. language_tokens()
  18. {
  19. tok_float = "float";
  20. tok_int = "int";
  21. floatlit = "[0-9]+\\.[0-9]*";
  22. intlit = "[0-9]+";
  23. ws = "[ \t\n]+";
  24. identifier = "[a-zA-Z_][a-zA-Z_0-9]*";
  25. this->self = ws [lex::_pass = lex::pass_flags::pass_ignore];
  26. this->self += tok_float | tok_int | floatlit | intlit | identifier;
  27. this->self += lex::char_('=');
  28. }
  29. lex::token_def<> tok_float, tok_int;
  30. lex::token_def<> ws;
  31. lex::token_def<double> floatlit;
  32. lex::token_def<int> intlit;
  33. lex::token_def<> identifier;
  34. };
  35. ///////////////////////////////////////////////////////////////////////////////
  36. template <typename Iterator>
  37. struct language_grammar : qi::grammar<Iterator>
  38. {
  39. template <typename Lexer>
  40. language_grammar(language_tokens<Lexer> const& tok)
  41. : language_grammar::base_type(declarations)
  42. {
  43. declarations = +number;
  44. number =
  45. tok.tok_float >> tok.identifier >> '=' >> tok.floatlit
  46. | tok.tok_int >> tok.identifier >> '=' >> tok.intlit
  47. ;
  48. declarations.name("declarations");
  49. number.name("number");
  50. debug(declarations);
  51. debug(number);
  52. }
  53. qi::rule<Iterator> declarations;
  54. qi::rule<Iterator> number;
  55. };
  56. ///////////////////////////////////////////////////////////////////////////////
  57. int main()
  58. {
  59. // iterator type used to expose the underlying input stream
  60. typedef std::string::iterator base_iterator_type;
  61. // lexer type
  62. typedef lex::lexertl::actor_lexer<
  63. lex::lexertl::token<
  64. base_iterator_type, boost::mpl::vector2<double, int>
  65. > > lexer_type;
  66. // iterator type exposed by the lexer
  67. typedef language_tokens<lexer_type>::iterator_type iterator_type;
  68. // now we use the types defined above to create the lexer and grammar
  69. // object instances needed to invoke the parsing process
  70. language_tokens<lexer_type> tokenizer; // Our lexer
  71. language_grammar<iterator_type> g (tokenizer); // Our parser
  72. // Parsing is done based on the token stream, not the character
  73. // stream read from the input.
  74. std::string str ("float f = 3.4\nint i = 6\n");
  75. base_iterator_type first = str.begin();
  76. bool r = lex::tokenize_and_parse(first, str.end(), tokenizer, g);
  77. if (r) {
  78. std::cout << "-------------------------\n";
  79. std::cout << "Parsing succeeded\n";
  80. std::cout << "-------------------------\n";
  81. }
  82. else {
  83. std::string rest(first, str.end());
  84. std::cout << "-------------------------\n";
  85. std::cout << "Parsing failed\n";
  86. std::cout << "stopped at: \"" << rest << "\"\n";
  87. std::cout << "-------------------------\n";
  88. }
  89. std::cout << "Bye... :-) \n\n";
  90. return 0;
  91. }