PH Logo
Logo intepreter modeled after UCB Logo.
/Users/paul/Documents/phlogo/core/tokenizertest.cpp
00001 #define BOOST_TEST_MAIN
00002 #include <boost/test/unit_test.hpp>
00003 
00004 #include "Tokenizer.h"
00005 #include "Verbs.h"
00006 #include "Exceptions.h"
00007 
00008 using namespace std;
00009 using namespace phlogo;
00010 using namespace boost;
00011 
00012 BOOST_AUTO_TEST_CASE( numbers )
00013 {
00014         Verbs verbs(0, 0);
00015         Tokenizer tk(&verbs);
00016 
00017         BOOST_CHECK_EQUAL(tk.tokenizeToString("1"), "1");
00018         BOOST_CHECK_EQUAL(tk.tokenizeToString("20"), "20");
00019         BOOST_CHECK_EQUAL(tk.tokenizeToString("1000"), "1000");
00020 }
00021 
00022 BOOST_AUTO_TEST_CASE( discrimination )
00023 {
00024         Verbs verbs(0, 0);
00025         Tokenizer tk(&verbs);
00026 
00027         BOOST_CHECK_EQUAL(tk.tokenizeToString("1 1"), "1 1");
00028         BOOST_CHECK_EQUAL(tk.tokenizeToString("1 + 1"), "$+ 1 1");
00029         BOOST_CHECK_EQUAL(tk.tokenizeToString("1 1 + 1 1"), "1 $+ 1 1 1");
00030 }
00031 
00032 BOOST_AUTO_TEST_CASE( subexpr )
00033 {
00034         Verbs verbs(0, 0);
00035         Tokenizer tk(&verbs);
00036 
00037         BOOST_CHECK_EQUAL(tk.tokenizeToString("1 (1) 1"), "1 ( 1 ) 1");
00038         BOOST_CHECK_EQUAL(tk.tokenizeToString("1 + 1 (2 2) 1"), "$+ 1 1 ( 2 2 ) 1");
00039         BOOST_CHECK_EQUAL(tk.tokenizeToString("1 (1 + 1) 1"), "1 ( $+ 1 1 ) 1");
00040 }
00041 
00042 BOOST_AUTO_TEST_CASE( numberexpr )
00043 {
00044         Verbs verbs(0, 0);
00045         Tokenizer tk(&verbs);
00046 
00047         BOOST_CHECK_EQUAL(tk.tokenizeToString("1+2"), "$+ 1 2");
00048         BOOST_CHECK_EQUAL(tk.tokenizeToString("1+(3+4)"), "$+ 1 ( $+ 3 4 )");
00049         BOOST_CHECK_EQUAL(tk.tokenizeToString("2 + 3+100"), "$+ $+ 2 3 100");
00050 }
00051 
00052 BOOST_AUTO_TEST_CASE( tokens )
00053 {
00054         Verbs verbs(0, 0);
00055         Tokenizer tk(&verbs);
00056 
00057         BOOST_CHECK_EQUAL(tk.tokenizeToString("1 2"), "1 2");
00058         BOOST_CHECK_EQUAL(tk.tokenizeToString("\"abced \"fghi"), "abced fghi");
00059         BOOST_CHECK_EQUAL(tk.tokenizeToString("[2]"), "[2]");
00060         BOOST_CHECK_EQUAL(tk.tokenizeToString("[2 3]"), "[2,3]");
00061         BOOST_CHECK_EQUAL(tk.tokenizeToString("[aaa bbb]"), "[aaa,bbb]");
00062         BOOST_CHECK_EQUAL(tk.tokenizeToString("[[aaa bbb] [ccc ddd]]"), "[[aaa,bbb],[ccc,ddd]]");
00063         BOOST_CHECK_EQUAL(tk.tokenizeToString("{2}"), "{2}");
00064         BOOST_CHECK_EQUAL(tk.tokenizeToString("{2 3}"), "{2,3}");
00065         BOOST_CHECK_EQUAL(tk.tokenizeToString("{aaa bbb}"), "{aaa,bbb}");
00066         BOOST_CHECK_EQUAL(tk.tokenizeToString("{{aaa bbb} {ccc ddd}}"), "{{aaa,bbb},{ccc,ddd}}");
00067         BOOST_CHECK_EQUAL(tk.tokenizeToString("{[aaa bbb] {ccc ddd}}"), "{[aaa,bbb],{ccc,ddd}}");
00068         BOOST_CHECK_EQUAL(tk.tokenizeToString("[{aaa bbb} {ccc ddd}]"), "[{aaa,bbb},{ccc,ddd}]");
00069 }
00070 
00071 BOOST_AUTO_TEST_CASE( smoke )
00072 {
00073         Verbs verbs(0, 0);
00074         Tokenizer tk(&verbs);
00075         
00076         BOOST_CHECK_EQUAL(tk.tokenizeToString("print \"X"), "$1.1 X");
00077         BOOST_CHECK_EQUAL(tk.tokenizeToString("print \"X print \"Y"), "$1.1 X $1.1 Y");
00078         BOOST_CHECK_EQUAL(tk.tokenizeToString("print \"X print \"Y print \"Z"), "$1.1 X $1.1 Y $1.1 Z");
00079         BOOST_CHECK_EQUAL(tk.tokenizeToString("print \"X make \"Y [2]"), "$1.1 X $1.3 Y [2]");
00080 }
00081 
00082 BOOST_AUTO_TEST_CASE( error )
00083 {
00084         Verbs verbs(0, 0);
00085         Tokenizer tk(&verbs);
00086         
00087         try {
00088                 tk.tokenizeToString("print hello");
00089                 BOOST_CHECK( false );
00090         }
00091         catch ( dont_know_how_exception &x ) {
00092         }
00093 }
00094 
00095 BOOST_AUTO_TEST_CASE( comments )
00096 {
00097         Verbs verbs(0, 0);
00098         Tokenizer tk(&verbs);
00099         
00100         BOOST_CHECK_EQUAL(tk.tokenizeToString("print \"hello ; This is a comment"), "$1.1 hello");
00101 }
00102 
00103 BOOST_AUTO_TEST_CASE( multinumericargs )
00104 {
00105         Verbs verbs(0, 0);
00106         Tokenizer tk(&verbs);
00107         
00108         BOOST_CHECK_EQUAL(tk.tokenizeToString("print (array 4 1)"), "$1.1 ( $2.7 4 1 )");
00109 }
00110 
00111 BOOST_AUTO_TEST_CASE( compressLists )
00112 {
00113         Verbs verbs(0, 0);
00114         Tokenizer tk(&verbs);
00115         
00116         BOOST_CHECK_EQUAL(tk.tokenizeToString("[hello world]"), "[hello,world]");
00117         BOOST_CHECK_EQUAL(tk.tokenizeToString("[hello]"), "[hello]");
00118         BOOST_CHECK_EQUAL(tk.tokenizeToString("[[[hello world] x]]"), "[[[hello,world],x]]");
00119         BOOST_CHECK_EQUAL(tk.tokenizeToString("{{{hello world} x}}"), "{{{hello,world},x}}");
00120         BOOST_CHECK_EQUAL(tk.tokenizeToString("{{[hello world] x}}"), "{{[hello,world],x}}");
00121 }
00122 
00123 BOOST_AUTO_TEST_CASE( literalList )
00124 {
00125         Verbs verbs(0, 0);
00126         Tokenizer tk(&verbs);
00127         
00128         BOOST_CHECK_EQUAL(tk.tokenizeToString("[\"hello \"world]"), "[hello,world]");
00129 }
00130 
00131 BOOST_AUTO_TEST_CASE( tokenizeExpression1 )
00132 {
00133         Verbs verbs(0, 0);
00134         Tokenizer tk(&verbs);
00135         
00136         BOOST_CHECK_EQUAL(tk.tokenizeToString("1+2"), "$+ 1 2");
00137         BOOST_CHECK_EQUAL(tk.tokenizeToString("1+  2"), "$+ 1 2");
00138         BOOST_CHECK_EQUAL(tk.tokenizeToString("1+   2"), "$+ 1 2");
00139         
00140         // but leave numerics with spaces alone.
00141         BOOST_CHECK_EQUAL(tk.tokenizeToString("1 2"), "1 2");
00142 
00143         // leave variables with spaces alone
00144         BOOST_CHECK_EQUAL(tk.tokenizeToString("1 :X"), "1 :X");
00145 
00146         // and literals with spaces
00147         BOOST_CHECK_EQUAL(tk.tokenizeToString("\"aaa :X"), "aaa :X");
00148 }
00149 
00150 BOOST_AUTO_TEST_CASE( tokenizeExpression2 )
00151 {
00152         Verbs verbs(0, 0);
00153         Tokenizer tk(&verbs);
00154         
00155         BOOST_CHECK_EQUAL(tk.tokenizeToString("(gensym)"), "$2.13");
00156         BOOST_CHECK_EQUAL(tk.tokenizeToString("(gensym gensym word \"X \"Y)"), "$2.13 $2.13 $2.1 X Y");
00157 }
00158 
00159 BOOST_AUTO_TEST_CASE( tokenizeExpression3 )
00160 {
00161         Verbs verbs(0, 0);
00162         Tokenizer tk(&verbs);
00163         
00164         BOOST_CHECK_EQUAL(tk.tokenizeToString("(thing :var)+1"), "$+ ( $1.4 :var ) 1");
00165 }
00166 
00167 BOOST_AUTO_TEST_CASE( tokenizeExpressionWithLists )
00168 {
00169         Verbs verbs(0, 0);
00170         Tokenizer tk(&verbs);
00171         
00172         BOOST_CHECK_EQUAL(tk.tokenizeToString("(mdarray [1 2])"), "$2.8 [1,2]");
00173 }
00174 
00175 BOOST_AUTO_TEST_CASE( varhasdot )
00176 {
00177         Verbs verbs(0, 0);
00178         Tokenizer tk(&verbs);
00179         
00180         BOOST_CHECK_EQUAL(tk.tokenizeToString(":dot.var"), ":dot.var");
00181 }
00182 
00183 BOOST_AUTO_TEST_CASE( varhasnum )
00184 {
00185         Verbs verbs(0, 0);
00186         Tokenizer tk(&verbs);
00187         
00188         BOOST_CHECK_EQUAL(tk.tokenizeToString(":word1"), ":word1");
00189 }
00190 
00191 BOOST_AUTO_TEST_CASE( literalhasdot )
00192 {
00193         Verbs verbs(0, 0);
00194         Tokenizer tk(&verbs);
00195         
00196         BOOST_CHECK_EQUAL(tk.tokenizeToString("\"dot.var"), "dot.var");
00197 }
00198 
00199 BOOST_AUTO_TEST_CASE( listErrors )
00200 {
00201         Verbs verbs(0, 0);
00202         Tokenizer tk(&verbs);
00203         
00204         try {
00205                 tk.tokenizeToString("[hello world");
00206                 BOOST_CHECK( false );
00207         }
00208         catch ( dont_know_how_exception &x ) {
00209         }
00210         
00211         try {
00212                 tk.tokenizeToString("{[hello world}");
00213         }
00214         catch ( dont_know_how_exception &x ) {
00215         }
00216 }
 All Classes Functions