$include_dir="/home/hyper-archives/boost-commit/include"; include("$include_dir/msg-header.inc") ?>
From: grafikrobot_at_[hidden]
Date: 2008-04-13 19:55:59
Author: grafik
Date: 2008-04-13 19:55:58 EDT (Sun, 13 Apr 2008)
New Revision: 44388
URL: http://svn.boost.org/trac/boost/changeset/44388
Log:
Move tokenizer tests into canonical test subdir structure.
Added:
   trunk/libs/tokenizer/test/
   trunk/libs/tokenizer/test/Jamfile.v2   (contents, props changed)
   trunk/libs/tokenizer/test/examples.cpp
      - copied unchanged from r44376, /trunk/libs/tokenizer/examples.cpp
   trunk/libs/tokenizer/test/simple_example_1.cpp
      - copied unchanged from r44376, /trunk/libs/tokenizer/simple_example_1.cpp
   trunk/libs/tokenizer/test/simple_example_2.cpp
      - copied unchanged from r44376, /trunk/libs/tokenizer/simple_example_2.cpp
   trunk/libs/tokenizer/test/simple_example_3.cpp
      - copied unchanged from r44376, /trunk/libs/tokenizer/simple_example_3.cpp
   trunk/libs/tokenizer/test/simple_example_4.cpp
      - copied unchanged from r44376, /trunk/libs/tokenizer/simple_example_4.cpp
   trunk/libs/tokenizer/test/simple_example_5.cpp
      - copied unchanged from r44376, /trunk/libs/tokenizer/simple_example_5.cpp
Removed:
   trunk/libs/tokenizer/examples.cpp
   trunk/libs/tokenizer/simple_example_1.cpp
   trunk/libs/tokenizer/simple_example_2.cpp
   trunk/libs/tokenizer/simple_example_3.cpp
   trunk/libs/tokenizer/simple_example_4.cpp
   trunk/libs/tokenizer/simple_example_5.cpp
Text files modified: 
   trunk/status/Jamfile.v2 |    10 +---------                              
   1 files changed, 1 insertions(+), 9 deletions(-)
Deleted: trunk/libs/tokenizer/examples.cpp
==============================================================================
--- trunk/libs/tokenizer/examples.cpp	2008-04-13 19:55:58 EDT (Sun, 13 Apr 2008)
+++ (empty file)
@@ -1,156 +0,0 @@
-// Boost tokenizer examples  -------------------------------------------------//
-
-// (c) Copyright John R. Bandela 2001. 
-
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-
-// See http://www.boost.org for updates, documentation, and revision history.
-
-#include <iostream>
-#include <iterator>
-#include <string>
-#include <boost/tokenizer.hpp>
-#include <boost/array.hpp>
-
-#include <boost/test/minimal.hpp>
-
-int test_main( int /*argc*/, char* /*argv*/[] )
-{
-  using namespace std;
-  using namespace boost;
-  
-  // Use tokenizer
-  {
-        const string test_string = ";;Hello|world||-foo--bar;yow;baz|";
-        string answer[] = { "Hello", "world",  "foo", "bar", "yow",  "baz" };
-        typedef tokenizer<char_separator<char> > Tok;
-        char_separator<char> sep("-;|");
-        Tok t(test_string, sep);
-    BOOST_REQUIRE(equal(t.begin(),t.end(),answer));       
-  }
-  {
-        const string test_string = ";;Hello|world||-foo--bar;yow;baz|";
-        string answer[] = { "", "", "Hello", "|", "world", "|", "", "|", "",
-                                                "foo", "", "bar", "yow", "baz", "|", "" };
-        typedef tokenizer<char_separator<char> > Tok;
-        char_separator<char> sep("-;", "|", boost::keep_empty_tokens);
-        Tok t(test_string, sep);
-    BOOST_REQUIRE(equal(t.begin(), t.end(), answer));
-  }
-  {
-    const string test_string = "This,,is, a.test..";
-    string answer[] = {"This","is","a","test"};
-    typedef tokenizer<> Tok;
-    Tok t(test_string);
-    BOOST_REQUIRE(equal(t.begin(),t.end(),answer));
-  }
-
-  {
-    const string test_string = "Field 1,\"embedded,comma\",quote \\\", escape \\\\";
-    string answer[] = {"Field 1","embedded,comma","quote \""," escape \\"};
-    typedef tokenizer<escaped_list_separator<char> > Tok;
-    Tok t(test_string);
-    BOOST_REQUIRE(equal(t.begin(),t.end(),answer));
-
-  }
-
-  {
-    const string test_string = ",1,;2\\\";3\\;,4,5^\\,\'6,7\';";
-    string answer[] = {"","1","","2\"","3;","4","5\\","6,7",""};
-    typedef tokenizer<escaped_list_separator<char> > Tok;
-    escaped_list_separator<char> sep("\\^",",;","\"\'");
-    Tok t(test_string,sep);
-    BOOST_REQUIRE(equal(t.begin(),t.end(),answer));
-
-  }
-
-  {
-    const string test_string = "12252001";
-    string answer[] = {"12","25","2001"};
-    typedef tokenizer<offset_separator > Tok;
-    boost::array<int,3> offsets = {{2,2,4}};
-    offset_separator func(offsets.begin(),offsets.end());
-    Tok t(test_string,func);
-    BOOST_REQUIRE(equal(t.begin(),t.end(),answer));
-
-  }
-
-  // Use token_iterator_generator
-  {
-    
-    const string test_string = "This,,is, a.test..";
-    string answer[] = {"This","is","a","test"};
-    typedef token_iterator_generator<char_delimiters_separator<char> >::type Iter;
-    Iter begin = make_token_iterator<string>(test_string.begin(),
-      test_string.end(),char_delimiters_separator<char>());
-    Iter end;
-    BOOST_REQUIRE(equal(begin,end,answer));
-  }
-
-  {
-    const string test_string = "Field 1,\"embedded,comma\",quote \\\", escape \\\\";
-    string answer[] = {"Field 1","embedded,comma","quote \""," escape \\"};
-    typedef token_iterator_generator<escaped_list_separator<char> >::type Iter;
-    Iter begin = make_token_iterator<string>(test_string.begin(),
-      test_string.end(),escaped_list_separator<char>());
-    Iter begin_c(begin);
-    Iter end;
-    BOOST_REQUIRE(equal(begin,end,answer));
-
-    while(begin_c != end)
-    {
-       BOOST_REQUIRE(begin_c.at_end() == 0);
-       ++begin_c;
-    }
-    BOOST_REQUIRE(begin_c.at_end());
-
-  }
-
-  {
-    const string test_string = "12252001";
-    string answer[] = {"12","25","2001"};
-    typedef token_iterator_generator<offset_separator>::type Iter;
-    boost::array<int,3> offsets = {{2,2,4}};
-    offset_separator func(offsets.begin(),offsets.end());
-    Iter begin = make_token_iterator<string>(test_string.begin(),
-      test_string.end(),func);
-    Iter end= make_token_iterator<string>(test_string.end(),
-      test_string.end(),func);
-    BOOST_REQUIRE(equal(begin,end,answer));
-
-  }
-  
-  // Test copying
-  {
-    const string test_string = "abcdef";
-    token_iterator_generator<offset_separator>::type beg, end, other;
-    boost::array<int,3> ar = {{1,2,3}};
-    offset_separator f(ar.begin(),ar.end());
-    beg = make_token_iterator<string>(test_string.begin(),test_string.end(),f);
-    
-    ++beg;
-    other = beg;
-    ++other;
-
-    BOOST_REQUIRE(*beg=="bc");
-    BOOST_REQUIRE(*other=="def");
-    
-    other = make_token_iterator<string>(test_string.begin(),
-        test_string.end(),f);
-
-    BOOST_REQUIRE(*other=="a");
-  }
-
-  // Test non-default constructed char_delimiters_separator
-  {
-    const string test_string = "how,are you, doing";
-    string answer[] = {"how",",","are you",","," doing"};
-    tokenizer<> t(test_string,char_delimiters_separator<char>(true,",",""));
-    BOOST_REQUIRE(equal(t.begin(),t.end(),answer));
-  }
-
-  return 0;
-}
-
Deleted: trunk/libs/tokenizer/simple_example_1.cpp
==============================================================================
--- trunk/libs/tokenizer/simple_example_1.cpp	2008-04-13 19:55:58 EDT (Sun, 13 Apr 2008)
+++ (empty file)
@@ -1,25 +0,0 @@
-// (c) Copyright John R. Bandela 2001. 
-
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-
-// See http://www.boost.org/libs/tokenizer for documenation
-
-
-// simple_example_1.cpp
-#include<iostream>
-#include<boost/tokenizer.hpp>
-#include<string>
-
-int main(){
-   using namespace std;
-   using namespace boost;
-   string s = "This is,  a test";
-   tokenizer<> tok(s);
-   for(tokenizer<>::iterator beg=tok.begin(); beg!=tok.end();++beg){
-       cout << *beg << "\n";
-   }
-   return 0;
-}
-
Deleted: trunk/libs/tokenizer/simple_example_2.cpp
==============================================================================
--- trunk/libs/tokenizer/simple_example_2.cpp	2008-04-13 19:55:58 EDT (Sun, 13 Apr 2008)
+++ (empty file)
@@ -1,24 +0,0 @@
-// (c) Copyright John R. Bandela 2001. 
-
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-
-// See http://www.boost.org/libs/tokenizer for documenation
-
-// simple_example_2.cpp
-#include<iostream>
-#include<boost/tokenizer.hpp>
-#include<string>
-
-int main(){
-   using namespace std;
-   using namespace boost;
-   string s = "Field 1,\"putting quotes around fields, allows commas\",Field 3";
-   tokenizer<escaped_list_separator<char> > tok(s);
-   for(tokenizer<escaped_list_separator<char> >::iterator beg=tok.begin(); beg!=tok.end();++beg){
-       cout << *beg << "\n";
-   }
-   return 0;
-}
-
Deleted: trunk/libs/tokenizer/simple_example_3.cpp
==============================================================================
--- trunk/libs/tokenizer/simple_example_3.cpp	2008-04-13 19:55:58 EDT (Sun, 13 Apr 2008)
+++ (empty file)
@@ -1,25 +0,0 @@
-// (c) Copyright John R. Bandela 2001. 
-
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-
-// See http://www.boost.org/libs/tokenizer for documenation
-
-// simple_example_3.cpp
-#include<iostream>
-#include<boost/tokenizer.hpp>
-#include<string>
-
-int main(){
-   using namespace std;
-   using namespace boost;
-   string s = "12252001";
-   int offsets[] = {2,2,4};
-   offset_separator f(offsets, offsets+3);
-   tokenizer<offset_separator> tok(s,f);
-   for(tokenizer<offset_separator>::iterator beg=tok.begin(); beg!=tok.end();++beg){
-       cout << *beg << "\n";
-   }
-   return 0;
-}
Deleted: trunk/libs/tokenizer/simple_example_4.cpp
==============================================================================
--- trunk/libs/tokenizer/simple_example_4.cpp	2008-04-13 19:55:58 EDT (Sun, 13 Apr 2008)
+++ (empty file)
@@ -1,24 +0,0 @@
-// (c) Copyright John R. Bandela 2001. 
-
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-
-// See http://www.boost.org/libs/tokenizer for documenation
-
-// simple_example_4.cpp
-#include<iostream>
-#include<boost/tokenizer.hpp>
-#include<string>
-
-int main(){
-   using namespace std;
-   using namespace boost;
-   string s = "This is,  a test";
-   tokenizer<char_delimiters_separator<char> > tok(s);
-   for(tokenizer<char_delimiters_separator<char> >::iterator beg=tok.begin(); beg!=tok.end();++beg){
-       cout << *beg << "\n";
-   }
-   return 0;
-}
-
Deleted: trunk/libs/tokenizer/simple_example_5.cpp
==============================================================================
--- trunk/libs/tokenizer/simple_example_5.cpp	2008-04-13 19:55:58 EDT (Sun, 13 Apr 2008)
+++ (empty file)
@@ -1,34 +0,0 @@
-// (c) Copyright John R. Bandela 2001. 
-
-// Distributed under the Boost Software License, Version 1.0. (See
-// accompanying file LICENSE_1_0.txt or copy at
-// http://www.boost.org/LICENSE_1_0.txt)
-
-// See http://www.boost.org/libs/tokenizer for documenation
-
-/// simple_example_5.cpp
-#include<iostream>
-#include<boost/token_iterator.hpp>
-#include<string>
-
-#ifdef __BORLANDC__
-// compiler bug fix:
-template class boost::token_iterator_generator<boost::offset_separator>::type;
-#endif
-
-int main(){
-   using namespace std;
-   using namespace boost;
-   string s = "12252001";
-   int offsets[] = {2,2,4};
-   offset_separator f(offsets, offsets+3);
-   typedef token_iterator_generator<offset_separator>::type Iter;
-   Iter beg = make_token_iterator<string>(s.begin(),s.end(),f);
-   Iter end = make_token_iterator<string>(s.end(),s.end(),f); 
-   // The above statement could also have been what is below
-   // Iter end;
-   for(;beg!=end;++beg){
-     cout << *beg << "\n";
-   }
-   return 0;
-}
Added: trunk/libs/tokenizer/test/Jamfile.v2
==============================================================================
--- (empty file)
+++ trunk/libs/tokenizer/test/Jamfile.v2	2008-04-13 19:55:58 EDT (Sun, 13 Apr 2008)
@@ -0,0 +1,15 @@
+#~ Copyright Rene Rivera 2008
+#~ Distributed under the Boost Software License, Version 1.0.
+#~ (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
+
+import testing ;
+
+test-suite tokenizer
+:   [ run examples.cpp
+        /boost/test//boost_test_exec_monitor/<link>static ]
+    [ run simple_example_1.cpp ]
+    [ run simple_example_2.cpp ]
+    [ run simple_example_3.cpp ]
+    [ run simple_example_4.cpp ]
+    [ run simple_example_5.cpp ]
+    ;
Modified: trunk/status/Jamfile.v2
==============================================================================
--- trunk/status/Jamfile.v2	(original)
+++ trunk/status/Jamfile.v2	2008-04-13 19:55:58 EDT (Sun, 13 Apr 2008)
@@ -89,6 +89,7 @@
 build-project ../libs/test/test ;              # test-suite test
 build-project ../libs/thread/test ;            # test-suite thread
 build-project ../libs/timer/test ;             # test-suite timer
+build-project ../libs/tokenizer/test ;         # test-suite tokenizer
 build-project ../libs/tr1/test ;               # test-suite tr1
 build-project ../libs/tuple/test ;             # test-suite tuple
 build-project ../libs/type_traits/test ;       # test-suite type_traits
@@ -148,12 +149,3 @@
           [ compile-fail libs/config/test/threads/test_thread_fail1.cpp ]
           [ compile-fail libs/config/test/threads/test_thread_fail2.cpp ]
         ;
-
-    test-suite tokenizer
-        : [ run libs/tokenizer/examples.cpp test_exec_monitor ]
-          [ run libs/tokenizer/simple_example_1.cpp ]
-          [ run libs/tokenizer/simple_example_2.cpp ]
-          [ run libs/tokenizer/simple_example_3.cpp ]
-          [ run libs/tokenizer/simple_example_4.cpp ]
-          [ run libs/tokenizer/simple_example_5.cpp ]
-        ;