changeset 619:933cd8d24467

Renamed Parser.lx to Parser.lexer.
author Aziz K?ksal <aziz.koeksal@gmail.com>
date Fri, 11 Jan 2008 00:49:05 +0100
parents 07946b379006
children f15b054bb27e
files trunk/src/cmd/Generate.d trunk/src/dil/parser/ImportParser.d trunk/src/dil/parser/Parser.d trunk/src/dil/semantic/Module.d
diffstat 4 files changed, 15 insertions(+), 14 deletions(-) [+]
line wrap: on
line diff
--- a/trunk/src/cmd/Generate.d	Fri Jan 11 00:42:35 2008 +0100
+++ b/trunk/src/cmd/Generate.d	Fri Jan 11 00:49:05 2008 +0100
@@ -297,7 +297,7 @@
   auto sourceText = loadFile(filePath);
   auto parser = new Parser(sourceText, filePath);
   auto root = parser.start();
-  auto lx = parser.lx;
+  auto lx = parser.lexer;
 
   auto token = lx.head;
 
--- a/trunk/src/dil/parser/ImportParser.d	Fri Jan 11 00:42:35 2008 +0100
+++ b/trunk/src/dil/parser/ImportParser.d	Fri Jan 11 00:49:05 2008 +0100
@@ -62,7 +62,7 @@
     uint level = 1;
     while (1)
     {
-      lx.peek(next);
+      lexer.peek(next);
       if (next.type == opening)
         ++level;
       else if (next.type == closing && --level == 0)
--- a/trunk/src/dil/parser/Parser.d	Fri Jan 11 00:42:35 2008 +0100
+++ b/trunk/src/dil/parser/Parser.d	Fri Jan 11 00:49:05 2008 +0100
@@ -25,7 +25,7 @@
 +/
 class Parser
 {
-  Lexer lx;
+  Lexer lexer;
   Token* token; /// Current non-whitespace token.
   Token* prevToken; /// Previous non-whitespace token.
 
@@ -34,6 +34,7 @@
 
   ImportDeclaration[] imports; /// ImportDeclarations in the source text.
 
+  // Attributes are evaluated in the parsing phase.
   LinkageType linkageType;
   Protection protection;
   StorageClass storageClass;
@@ -51,7 +52,7 @@
   this(char[] srcText, string filePath, InfoManager infoMan = null)
   {
     this.infoMan = infoMan;
-    lx = new Lexer(srcText, filePath, infoMan);
+    lexer = new Lexer(srcText, filePath, infoMan);
   }
 
   protected void init()
@@ -65,8 +66,8 @@
     prevToken = token;
     do
     {
-      lx.nextToken();
-      token = lx.token;
+      lexer.nextToken();
+      token = lexer.token;
     } while (token.isWhitespace) // Skip whitespace
   }
 
@@ -116,7 +117,7 @@
       // Restore members.
       token      = oldToken;
       prevToken  = oldPrevToken;
-      lx.token   = oldToken;
+      lexer.token   = oldToken;
       errorCount = oldCount;
       success = false;
     }
@@ -146,7 +147,7 @@
   {
     Token* next = token;
     do
-      lx.peek(next);
+      lexer.peek(next);
     while (next.isWhitespace) // Skip whitespace
     return next.type;
   }
@@ -155,7 +156,7 @@
   {
     assert(next !is null);
     do
-      lx.peek(next);
+      lexer.peek(next);
     while (next.isWhitespace) // Skip whitespace
     return next.type;
   }
@@ -2728,7 +2729,7 @@
       if (!trying)
       {
         // Insert a dummy token and don't consume current one.
-        begin = lx.insertEmptyTokenBefore(token);
+        begin = lexer.insertEmptyTokenBefore(token);
         this.prevToken = begin;
       }
     }
@@ -3430,7 +3431,7 @@
         if (!trying)
         {
           // Insert a dummy token and don't consume current one.
-          begin = lx.insertEmptyTokenBefore(token);
+          begin = lexer.insertEmptyTokenBefore(token);
           this.prevToken = begin;
         }
       }
@@ -3597,14 +3598,14 @@
   Loop:
     while (1)
     {
-      lx.peek(next);
+      lexer.peek(next);
       switch (next.type)
       {
       case T.RParen:
         if (--level == 0)
         { // Last, closing parentheses found.
           do
-            lx.peek(next);
+            lexer.peek(next);
           while (next.isWhitespace)
           break Loop;
         }
--- a/trunk/src/dil/semantic/Module.d	Fri Jan 11 00:42:35 2008 +0100
+++ b/trunk/src/dil/semantic/Module.d	Fri Jan 11 00:49:05 2008 +0100
@@ -100,7 +100,7 @@
   /// Returns true if there are errors in the source file.
   bool hasErrors()
   {
-    return parser.errors.length || parser.lx.errors.length;
+    return parser.errors.length || parser.lexer.errors.length;
   }
 
   string[] getImports()