comparison trunk/src/dil/Lexer.d @ 487:bccca748d745

Added 'tokenize' command.
author Aziz K?ksal <aziz.koeksal@gmail.com>
date Sat, 01 Dec 2007 20:20:44 +0100
parents ea8c7459f1c4
children a7291d3ee9d7
comparison
equal deleted inserted replaced
486:bd176bc73e43 487:bccca748d745
2533 while (nextToken() != TOK.EOF) 2533 while (nextToken() != TOK.EOF)
2534 {} 2534 {}
2535 return head; 2535 return head;
2536 } 2536 }
2537 2537
2538 /// Scan the whole text until EOF is encountered.
2539 void scanAll()
2540 {
2541 while (nextToken() != TOK.EOF)
2542 {}
2543 }
2544
2545 /// HEAD -> Newline -> First Token
2546 Token* firstToken()
2547 {
2548 return this.head.next.next;
2549 }
2550
2538 static void loadKeywords(ref Identifier[string] table) 2551 static void loadKeywords(ref Identifier[string] table)
2539 { 2552 {
2540 foreach(k; keywords) 2553 foreach(k; keywords)
2541 table[k.str] = k; 2554 table[k.str] = k;
2542 } 2555 }