$include_dir="/home/hyper-archives/boost-commit/include"; include("$include_dir/msg-header.inc") ?>
Subject: [Boost-commit] svn:boost r52768 - in trunk/libs/spirit/test: . lex
From: hartmut.kaiser_at_[hidden]
Date: 2009-05-05 09:00:57
Author: hkaiser
Date: 2009-05-05 09:00:56 EDT (Tue, 05 May 2009)
New Revision: 52768
URL: http://svn.boost.org/trac/boost/changeset/52768
Log:
Spirit: Added a new test case
Added:
   trunk/libs/spirit/test/lex/regression001.cpp   (contents, props changed)
Text files modified: 
   trunk/libs/spirit/test/Jamfile |     2 ++                                      
   1 files changed, 2 insertions(+), 0 deletions(-)
Modified: trunk/libs/spirit/test/Jamfile
==============================================================================
--- trunk/libs/spirit/test/Jamfile	(original)
+++ trunk/libs/spirit/test/Jamfile	2009-05-05 09:00:56 EDT (Tue, 05 May 2009)
@@ -106,6 +106,8 @@
     [ run lex/lexertl5.cpp                  : : : : ]
     [ run lex/state_switcher_test.cpp       : : : : ]
 
+    [ run lex/regression001.cpp             : : : : lex_regression001 ]
+
     ;
 
 }
Added: trunk/libs/spirit/test/lex/regression001.cpp
==============================================================================
--- (empty file)
+++ trunk/libs/spirit/test/lex/regression001.cpp	2009-05-05 09:00:56 EDT (Tue, 05 May 2009)
@@ -0,0 +1,58 @@
+
+#include <boost/spirit/include/lex_lexertl.hpp>
+
+#include <iostream>
+#include <string>
+
+using namespace boost::spirit;
+using namespace boost::spirit::lex;
+
+typedef const char * base_iterator;
+
+///////////////////////////////////////////////////////////////////////////////
+//  Token definition
+///////////////////////////////////////////////////////////////////////////////
+template <typename Lexer>
+struct position_helper_tokens : lexer<Lexer> 
+{
+    position_helper_tokens()
+    {
+        // define tokens and associate them with the lexer
+        eol = "\n";
+        any = "[^\n]+";
+
+        // associate tokens with the lexer
+        this->self
+            =   eol
+            |   any
+            ;
+    }
+
+    token_def<> any, eol;
+};
+
+int main(int argc, char* argv[])
+{
+    // read input from the given file
+    std::string str ("test");
+
+    // token type
+    typedef lexertl::token<base_iterator, omitted, boost::mpl::false_> token_type;
+
+    // lexer type
+    typedef lexertl::actor_lexer<token_type> lexer_type;
+
+    // create the lexer object instance needed to invoke the lexical analysis
+    position_helper_tokens<lexer_type> position_helper_lexer;
+
+    // tokenize the given string, all generated tokens are discarded
+    base_iterator first = str.c_str(); 
+    base_iterator last = &first[str.size()];
+
+    for(lexer_type::iterator_type i = position_helper_lexer.begin(first, last); 
+        i != position_helper_lexer.end(); i++ )
+    {
+    }
+    return 0;
+}
+