diff parser/Parser.d @ 144:6e6355fb5f0f

- Parsing nested attributes. - Creating classes and interfaces in AST. - Updated AstPrinter to print attributes, classes and interfaces.
author Anders Johnsen <skabet@gmail.com>
date Mon, 21 Jul 2008 17:41:40 +0200
parents d76cc5cad4fc
children 8c09fdaa724e
line wrap: on
line diff
--- a/parser/Parser.d	Mon Jul 21 01:05:20 2008 +0200
+++ b/parser/Parser.d	Mon Jul 21 17:41:40 2008 +0200
@@ -35,163 +35,105 @@
         this.action = act;
 
         Module m;
-        if (lexer.peek.type == Tok.Module)
+        if ( isa(Tok.Module) )
         {
-            Token _module = lexer.next;
+            Token _module = next;
             ModuleName name = parseModuleName();
             m = action.actOnModule(_module, sm.getText(name.asRange()));
             require(Tok.Seperator);
         }
         else
         {
-            SLoc loc = lexer.peek.location;
+            SLoc loc = peek.location;
             m = action.actOnImplicitModule(loc, sm.getFile(loc));
         }
 
-        Attribute att;
-        while (lexer.peek.type != Tok.EOF)
-            foreach (d; parseDeclDef(&att))
+        auto nes = parseAttributeInit;
+        while( !isa(Tok.EOF) )
+        {
+            while ( peek.isAttribute )
+                nes ~= parseAttribute(nes[$-1]);
+
+            foreach (d; parseDeclDef(nes[$-1].a))
                 action.actOnModuleDecl(m, d);
 
+            nes = parseAttributeScope(nes);
+        }
+
         return m;
     }
 
 private:
-    Decl[] parseDeclDef(Attribute* a)
+    Decl[] parseDeclDef(Attribute a)
     {
-        Token t = lexer.peek;
-        if (t.type == Tok.Import)
+        if ( isa (Tok.Import) )
             return parseImports();
-        else
-            return [parseDecl(a)];
+
+        return [parseDecl(a)];
     }
 
-    Decl parseDecl(Attribute* all_res)
+    Decl parseDecl(Attribute att)
     {
-        Token t = lexer.peek;
-
-        Attribute att = *all_res;
-
-        while ( t.isAttribute )
-        {
-            Attribute* a = &att;
-            bool sco;
-            if(lexer.peek(1).type == Tok.Colon)
-                sco = true;
-
-LSwitch:    switch(t.type)
-            {
-                case Tok.Public:
-                    a.setProtection(Protection.Public);
-                    break;
-                case Tok.Private:
-                    a.setProtection(Protection.Private);
-                    break;
-                case Tok.Package:
-                    a.setProtection(Protection.Package);
-                    break;
-                case Tok.Protected:
-                    a.setProtection(Protection.Protected);
-                    break;
-                case Tok.Export:
-                    a.setProtection(Protection.Export);
-                    break;
-                case Tok.Static:
-                    a.setStatic;
-                    break;
-                case Tok.Final:
-                    a.setFinal;
-                    break;
-                case Tok.Const:
-                    a.setConst;
-                    break;
-                case Tok.Abstract:
-                    a.setAbstract;
-                    break;
-                case Tok.Override:
-                    a.setOverride;
-                    break;
-                case Tok.Depracted:
-                    a.setDepracted;
-                    break;
-                case Tok.Auto:
-                    a.setAuto;
-                    break;
-                case Tok.Extern:
-                    Extern e = parseLinkageType;
-                    a.setExtern(e);
-                    break;
-            }
-            if(sco)
-            {
-                sco = false;
-                a = all_res;
-                lexer.next;
-                goto LSwitch;
-            }
-            lexer.next;
-            t = lexer.peek;
-        }
+        Token t = peek;
 
         if (t.isBasicType || t.isIdentifier)
         {
             Id type;
             Id iden;
             int len = peekParseType;
-            if(lexer.peek(len).type == Tok.Identifier && len != 0)
+            if (peek(len).type == Tok.Identifier && len != 0)
             {
                 type = parseType;
 parseDeclAfterInvalidType:
                 iden = Id(require(Tok.Identifier));
-                Token next = lexer.peek();
-                if (next.type == Tok.Seperator)
+                if ( isa(Tok.Seperator) )
                 {
-                    Token sep = lexer.next();
+                    Token sep = next;
                     return action.actOnDeclarator(type, iden, null, att);
                 }
-                else if (next.type == Tok.Assign)
+                else if ( isa(Tok.Assign) )
                 {
-                    Token assign = lexer.next();
+                    Token assign = next();
                     Exp exp = parseExpression();
                     require(Tok.Seperator);
                     return action.actOnDeclarator(type, iden, exp, att);
                 }
-                else if (next.type == Tok.OpenParentheses)
+                else if ( isa(Tok.OpenParentheses) )
                     return parseFunc(type, iden, att);
                 else
                     messages.report(UnexpectedTok, next.location).arg(next.getType);
                 return null;
             }
-            t = lexer.peek(len);
+            t = peek(len);
             messages.report(InvalidDeclType, t.location)
                 .arg(sm.getText(t.asRange));
             while(len--)
-                lexer.next;
-            while(lexer.peek.type != Tok.Identifier)
-                lexer.next;
-            type = Id(lexer.peek);
+                next;
+            while(peek.type != Tok.Identifier)
+                next;
+            type = Id(peek);
             goto parseDeclAfterInvalidType;
         }
         else if (t.type == Tok.Struct)
         {
-            Id type = Id(lexer.next);
+            Id type = Id(next);
             Id iden = Id(require(Tok.Identifier));
             
             return parseStruct(type, iden, att);
         }
         else if (t.type == Tok.Class)
         {
-            Id type = Id(lexer.next);
+            Id type = Id(next);
             Id iden = Id(require(Tok.Identifier));
             
             return parseClass(type, iden, att);
         }
         else if (t.type == Tok.Interface)
         {
-            Id type = Id(lexer.next);
+            Id type = Id(next);
             Id iden = Id(require(Tok.Identifier));
             
-            return parseClass(type, iden, att);
+            return parseInterface(type, iden, att);
         }
         messages.report(UnexpectedTok, t.location)
             .arg(t.getType)
@@ -202,18 +144,18 @@
     Extern parseLinkageType()
     {
         Extern e = Extern.D;
-        if(lexer.peek(1).type != Tok.OpenParentheses)
+        if(peek(1).type != Tok.OpenParentheses)
             return e;
 
-        lexer.next; lexer.next;
+        next; next;
 
         Token t = require(Tok.Identifier);
 
         switch(sm.getText(t.asRange))
         {
             case "C":
-                if (lexer.peek(0).type == Tok.Plus && 
-                    lexer.peek(1).type == Tok.Plus)
+                if (peek(0).type == Tok.Plus && 
+                    peek(1).type == Tok.Plus)
                     e = Extern.CPlusPlus;
                 else
                     e = Extern.C;
@@ -248,16 +190,16 @@
         void addToRes(Decl d) { res ~= d; }
 
         bool done = false;
-        while (!done && !on_a(Tok.Seperator))
+        while (!done && !isa(Tok.Seperator))
         {
             ModuleName mod = parseModuleName();
-            Token tok = lexer.peek;
+            Token tok = peek;
             switch (tok.type)
             {
                 case Tok.Comma:
                     // import A, B.C;
                     // parse another module-name
-                    lexer.next();
+                    next();
                     res ~= action.actOnImport(_import, mod, null);
                     break;
                 case Tok.Assign:
@@ -271,7 +213,7 @@
                     }
                     //if (isStatic)
                     //    error("Static imports cannot be renamed");
-                    lexer.next();
+                    next();
                     Id name = mod.id;
                     mod = parseModuleName();
                     // create from mod and rename to `name`
@@ -280,7 +222,7 @@
                 case Tok.Colon:
                     // import A : a;
                     // selective imports, potentially import A : print = a
-                    lexer.next();
+                    next();
                     Decl d = action.actOnImport(_import, mod, null);
                     // do-while on a comma:
                     //   add explicit symbol
@@ -313,8 +255,8 @@
         require(Tok.Seperator);
         return res.safe();
 Lerror:
-        while (!on_a (Tok.Seperator))
-            lexer.next();
+        while (!isa (Tok.Seperator))
+            next();
         return res.safe();
     }
 
@@ -325,37 +267,42 @@
     {
         auto decl = action.actOnDeclarator(type, iden, null, att);
 
-        if (lexer.peek.type == Tok.Colon)
+        if (peek.type == Tok.Colon)
             // SuperInterfaces
         {
-            lexer.next; // Remove colon.
+            next; // Remove colon.
 
-            Token identifier;
+            Id identifier;
 
             // The identifier
-            identifier = require(Tok.Identifier);
+            identifier = Id(require(Tok.Identifier));
 
-            // FIXME: Register Interface here
+            action.actOnInterfaceBaseClass(decl, identifier);
 
             // We should now have an optional list of items, each starting ','
-            while (lexer.peek.type == Tok.Comma)
+            while (peek.type == Tok.Comma)
             {
-                lexer.next; // Remove comma
+                next; // Remove comma
 
                 // The identifier
-                identifier = require(Tok.Identifier);
+                identifier = Id(require(Tok.Identifier));
 
-                // FIXME: Register Interface here
+                action.actOnInterfaceBaseClass(decl, identifier);
             }
         }
 
         require(Tok.OpenBrace);
 
-        Attribute a;
-        while(lexer.peek.isBasicType || lexer.peek.isIdentifier || lexer.peek.isAttribute)
+        auto nes = parseAttributeInit;
+        while( !isa(Tok.EOF) && !isa(Tok.CloseBrace) )
         {
-            auto m_decl = parseDecl(&a);
-            action.actOnStructMember(decl, m_decl); 
+            while ( peek.isAttribute )
+                nes ~= parseAttribute(nes[$-1]);
+
+            auto m_decl = parseDecl(nes[$-1].a);
+            action.actOnInterfaceMember(decl, m_decl);
+
+            nes = parseAttributeScope(nes);
         }
 
         require(Tok.CloseBrace);
@@ -370,41 +317,49 @@
     {
         auto decl = action.actOnDeclarator(type, iden, null, att);
 
-        if (lexer.peek.type == Tok.Colon)
+        if (peek.type == Tok.Colon)
             // BaseClassList - Super class and interfaces(in that order)
         {
-            lexer.next; // Remove colon.
+            next; // Remove colon.
 
-            Token protection, identifier;
+            Token protection;
+            Id    identifier;
 
             // First we expect an optional protection level.
-            if (lexer.peek.isBaseClassProtection)
-                protection = lexer.next;
+            if (peek.isBaseClassProtection)
+                protection = next;
             // Then the identifier
-            identifier = require(Tok.Identifier);
+            identifier = Id(require(Tok.Identifier));
 
-            // FIXME: Register Interface here
+            action.actOnClassBaseClass(decl, identifier);
 
             // We should now have an optional list of items, each starting ','
-            while (lexer.peek.type == Tok.Comma)
+            while (peek.type == Tok.Comma)
             {
-                lexer.next; // Remove comma
+                next; // Remove comma
 
                 // First we expect an optional protection level.
-                if (lexer.peek.isBaseClassProtection)
-                    protection = lexer.next;
+                if (peek.isBaseClassProtection)
+                    protection = next;
                 // Then the identifier
-                identifier = require(Tok.Identifier);
+                identifier = Id(require(Tok.Identifier));
+
+                action.actOnClassBaseClass(decl, identifier);
             }
         }
 
         require(Tok.OpenBrace);
 
-        Attribute a;
-        while(lexer.peek.isBasicType || lexer.peek.isIdentifier || lexer.peek.isAttribute)
+        auto nes = parseAttributeInit;
+        while( !isa(Tok.EOF) && !isa(Tok.CloseBrace) )
         {
-            auto m_decl = parseDecl(&a);
-            action.actOnStructMember(decl, m_decl); // FIXME: Should call actOnClassMember
+            while ( peek.isAttribute )
+                nes ~= parseAttribute(nes[$-1]);
+
+            auto m_decl = parseDecl(nes[$-1].a);
+            action.actOnClassMember(decl, m_decl);
+
+            nes = parseAttributeScope(nes);
         }
 
         require(Tok.CloseBrace);
@@ -421,11 +376,16 @@
 
         require(Tok.OpenBrace);
 
-        Attribute a;
-        while(lexer.peek.isBasicType || lexer.peek.isIdentifier || lexer.peek.isAttribute)
+        auto nes = parseAttributeInit;
+        while( !isa(Tok.EOF) && !isa(Tok.CloseBrace) )
         {
-            auto m_decl = parseDecl(&a);
+            while ( peek.isAttribute )
+                nes ~= parseAttribute(nes[$-1]);
+
+            auto m_decl = parseDecl(nes[$-1].a);
             action.actOnStructMember(decl, m_decl); 
+
+            nes = parseAttributeScope(nes);
         }
 
         require(Tok.CloseBrace);
@@ -433,48 +393,162 @@
         return decl;
     }
 
-    /**
-      Parse statements.
+    Att[] parseAttributeInit()
+    {
+        Att[] nes;
+        nes ~= Att();
+        nes[0].nested = Scope;
+        return nes;
+    }
 
-      This is the place to attack!
-     */
-    Stmt parseStatement()
+    Att[] parseAttributeScope(Att[] nes)
     {
-        Token t = lexer.peek;
+        while ( nes[$-1].nested == Single )
+            nes.length = nes.length - 1;
 
-        if (t.isReturn)
+        while ( isa(Tok.CloseBrace) && nes.length > 1)
         {
-            Token ret = lexer.next;
-            Exp exp;
-            if (lexer.peek.type != Tok.Seperator)
-                exp = parseExpression();
-            require(Tok.Seperator);
-            return action.actOnReturnStmt(ret, exp);
+            while ( nes.length > 1 )
+            {
+                if( nes[$-1].nested == Scope )
+                {
+                    nes.length = nes.length - 1;
+                    next;
+                    break;
+                }
+                nes.length = nes.length - 1;
+            }
+        }
+
+        return nes;
+    }
+
+    Att parseAttribute(Att last)
+    {
+        Att _parseAttribute(Att last)
+        {
+            Att a = last;
+            a.nested = Single;
 
-            /*
-               if (cond)
-               single statement | compound statement
-               [else
-               single statement | compound statement]
-             */
+            switch(peek.type)
+            {
+                case Tok.Public:
+                    a.a.setProtection(Protection.Public);
+                    break;
+                case Tok.Private:
+                    a.a.setProtection(Protection.Private);
+                    break;
+                case Tok.Package:
+                    a.a.setProtection(Protection.Package);
+                    break;
+                case Tok.Protected:
+                    a.a.setProtection(Protection.Protected);
+                    break;
+                case Tok.Export:
+                    a.a.setProtection(Protection.Export);
+                    break;
+                case Tok.Static:
+                    a.a.setStatic;
+                    break;
+                case Tok.Final:
+                    a.a.setFinal;
+                    break;
+                case Tok.Const:
+                    a.a.setConst;
+                    break;
+                case Tok.Abstract:
+                    a.a.setAbstract;
+                    break;
+                case Tok.Override:
+                    a.a.setOverride;
+                    break;
+                case Tok.Depracted:
+                    a.a.setDepracted;
+                    break;
+                case Tok.Auto:
+                    a.a.setAuto;
+                    break;
+                case Tok.Extern:
+                    Extern e = parseLinkageType;
+                    a.a.setExtern(e);
+                    break;
+            }
+            next;
+
+            return a;
         }
-        else if (t.isIf)
+
+        Att a = _parseAttribute(last);
+
+        while (peek.isAttribute)
         {
-            Token _if = lexer.next();
+            a = parseAttribute(a);
+        }
 
-            require(Tok.OpenParentheses);
-            Exp cond = parseExpression();
-            require(Tok.CloseParentheses);
+        if (peek.type == Tok.Colon)
+        {
+            a.nested = All;
+            next;
+        }
+        else if  (peek.type == Tok.OpenBrace)
+        {
+            a.nested = Scope;
+            next;
+        }
+
+        return a;
+    }
 
-            Stmt thenB = parseSingleOrCompoundStatement();
+enum : uint { Single, Scope, All }
+
+       struct Att
+       {
+           Attribute a;
+           uint nested;
+       }
+
+       /**
+         Parse statements.
+
+         This is the place to attack!
+        */
+       Stmt parseStatement()
+       {
+           Token t = peek;
 
-            // if there is no else part we use the if as token, to have
-            // something than can be passed along
-            Token _else = _if;
-            Stmt elseB;
-            if (lexer.peek.type == Tok.Else)
+           if (t.isReturn)
+           {
+               Token ret = next;
+               Exp exp;
+               if (peek.type != Tok.Seperator)
+                   exp = parseExpression();
+               require(Tok.Seperator);
+               return action.actOnReturnStmt(ret, exp);
+
+               /*
+                  if (cond)
+                  single statement | compound statement
+                  [else
+                  single statement | compound statement]
+                */
+           }
+           else if (t.isIf)
+           {
+               Token _if = next();
+
+               require(Tok.OpenParentheses);
+               Exp cond = parseExpression();
+               require(Tok.CloseParentheses);
+
+               Stmt thenB = parseSingleOrCompoundStatement();
+
+               // if there is no else part we use the if as token, to have
+               // something than can be passed along
+               Token _else = _if;
+               Stmt elseB;
+            if (peek.type == Tok.Else)
             {
-                _else = lexer.next;
+                _else = next;
                 elseB = parseSingleOrCompoundStatement();
             }
 
@@ -487,7 +561,7 @@
         }
         else if (t.isWhile)
         {
-            Token _while = lexer.next;
+            Token _while = next;
             require(Tok.OpenParentheses);
             Exp cond = parseExpression();
             require(Tok.CloseParentheses);
@@ -506,14 +580,14 @@
         }
         else if (t.isBasicType || t.isIdentifier)
         {
-            Token iden = lexer.peek;
-            Token n = lexer.peek(1);
+            Token iden = peek;
+            Token n = peek(1);
             // Must be an decl, if we start with a basic type, or two
             // identifiers in a row
             if ( n.type == Tok.Star || n.type == Tok.OpenBracket)
             {
                 int len = peekParseType;
-                if(lexer.peek(len).type == Tok.Identifier && len != 0)
+                if(peek(len).type == Tok.Identifier && len != 0)
                     return action.actOnDeclStmt(parseVarDecl());
 
                 Exp exp = parseExpression();
@@ -531,7 +605,7 @@
         }
         else if(t.isSwitch)
         {
-            lexer.next;
+            next;
             require(Tok.OpenParentheses);
             auto target = parseExpression();
             auto res = action.actOnStartOfSwitchStmt(target);
@@ -544,18 +618,18 @@
                 {
                     require(Tok.Colon);
                     statements.length = 0;
-                    while (lexer.peek.type != Tok.Case
-                            && lexer.peek.type != Tok.Default
-                            && lexer.peek.type != Tok.CloseBrace)
+                    while (peek.type != Tok.Case
+                            && peek.type != Tok.Default
+                            && peek.type != Tok.CloseBrace)
                         statements ~= parseStatement();
                     action.actOnDefaultStmt(res, statements);
                     continue;
                 }
 
-                Token _case = lexer.peek;
+                Token _case = peek;
                 if (_case.type != Tok.Case)
                     break;
-                lexer.next();
+                next();
 
                 Exp[] literals;
                 do
@@ -563,21 +637,21 @@
                     Exp e = parseExpression();
 //                    IntegerLit lit = cast(IntegerLit)e;
 //                    if (lit is null)
-//                        messages.report(CaseValueMustBeInt, lexer.peek.location).arg(lexer.next.getType);
+//                        messages.report(CaseValueMustBeInt, peek.location).arg(next.getType);
 //                    else
                     literals ~= e;
                 }
                 while (skip(Tok.Comma));
                 require(Tok.Colon);
 
-                while (lexer.peek.type != Tok.Case
-                        && lexer.peek.type != Tok.Default
-                        && lexer.peek.type != Tok.CloseBrace)
+                while (peek.type != Tok.Case
+                        && peek.type != Tok.Default
+                        && peek.type != Tok.CloseBrace)
                     statements ~= parseStatement();
 
                 action.actOnCaseStmt(res, literals, statements);
 
-                if (lexer.peek.type == Tok.CloseBrace)
+                if (peek.type == Tok.CloseBrace)
                     break;
             }
             require(Tok.CloseBrace);
@@ -591,7 +665,7 @@
                 require(Tok.Seperator);
                 return action.actOnExprStmt(exp);
             }
-            messages.report(UnexpectedBeginStmt, lexer.peek.location).arg(lexer.next.getType);
+            messages.report(UnexpectedBeginStmt, peek.location).arg(next.getType);
             return null;
         }
         messages.report(UnexpectedTok, t.location);
@@ -603,7 +677,7 @@
         // manually hardcoded to only support "type id [= exp];"
         // as that is the only thing the codegen understands
         Id type = parseType;
-        Id id = Id(lexer.next);
+        Id id = Id(next);
         Exp init;
         if (skip(Tok.Assign))
             init = parseExpression();
@@ -621,9 +695,9 @@
         Decl func = action.actOnStartOfFunctionDef(type, name, att);
         parseFuncArgs(func);
 
-        if(lexer.peek.type == Tok.Seperator)
+        if(peek.type == Tok.Seperator)
         {
-            lexer.next;
+            next;
             return func;
         }
         Stmt stmt = parseCompoundStatement();
@@ -640,16 +714,16 @@
     {
         require(Tok.OpenParentheses); // Remove the "(" token.
 
-        while(lexer.peek.type != Tok.CloseParentheses)
+        while(peek.type != Tok.CloseParentheses)
         {
             auto t = parseType();
             Id i;
-            if(lexer.peek.type == Tok.Identifier)
+            if(peek.type == Tok.Identifier)
                 i = parseIdentifier();
             action.addFuncArg(func, t, i);
 
-            if(lexer.peek.type == Tok.Comma)
-                lexer.next;
+            if(peek.type == Tok.Comma)
+                next;
         }
 
         require(Tok.CloseParentheses); // Remove the ")"
@@ -661,7 +735,7 @@
      */
     Stmt parseSingleOrCompoundStatement()
     {
-        if (lexer.peek.type == Tok.OpenBrace)
+        if (peek.type == Tok.OpenBrace)
             return parseCompoundStatement();
         return parseStatement();
     }
@@ -676,7 +750,7 @@
     {
         Token lbrace = require(Tok.OpenBrace);
         SmallArray!(Stmt, 32) stmts; // Try to use the stack only
-        while (lexer.peek.type != Tok.CloseBrace)
+        while (peek.type != Tok.CloseBrace)
             stmts ~= parseStatement();
         Token rbrace = require(Tok.CloseBrace);
         return action.actOnCompoundStmt(lbrace, rbrace, stmts.unsafe());
@@ -684,7 +758,7 @@
 
     Id parseIdentifier()
     {
-        Token tok = lexer.next;
+        Token tok = next;
 
         if (tok.type is Tok.Identifier)
             return Id(tok);
@@ -701,8 +775,8 @@
         while (skip(Tok.Dot))
         {
             mod.packages ~= id;
-            if (lexer.peek.type != Tok.Identifier) {
-                messages.report(ExpectedIdAfterPackage, lexer.peek.location);
+            if (peek.type != Tok.Identifier) {
+                messages.report(ExpectedIdAfterPackage, peek.location);
                 goto Lerror;
             }
             id = parseIdentifier();
@@ -711,7 +785,7 @@
         return mod;
 Lerror:
         while (!skip(Tok.Seperator))
-            lexer.next();
+            next();
         return mod;
     }
 
@@ -721,7 +795,7 @@
      */
     Id parseType()
     {
-        Token type = lexer.next;
+        Token type = next;
 
         Id currentType;
 
@@ -729,19 +803,19 @@
             messages.report(InvalidType, type.location);
 
         currentType = Id(type);
-        type = lexer.peek;
+        type = peek;
 
         while(type.type == Tok.Star || type.type == Tok.OpenBracket)
         {
             if(type.type == Tok.Star)
             {
                 currentType = PointerId(currentType);
-                lexer.next;
+                next;
             }
             else
             {
-                lexer.next;
-                if(lexer.peek.type == Tok.Integer)
+                next;
+                if(peek.type == Tok.Integer)
                     currentType = StaticArrayId(
                             currentType, 
                             action.actOnNumericConstant(
@@ -749,7 +823,7 @@
                 require(Tok.CloseBracket);
                 
             }
-            type = lexer.peek;
+            type = peek;
         }
 
         return currentType;
@@ -758,7 +832,7 @@
     int peekParseType()
     {
         int i;
-        Token type = lexer.peek(i);
+        Token type = peek(i);
 
         Id currentType;
 
@@ -766,7 +840,7 @@
             return 0;
 
         currentType = Id(type);
-        type = lexer.peek(++i);
+        type = peek(++i);
 
         while(type.type == Tok.Star || type.type == Tok.OpenBracket)
         {
@@ -776,20 +850,20 @@
             }
             else
             {
-                if(lexer.peek(i++).type != Tok.OpenBracket)
+                if(peek(i++).type != Tok.OpenBracket)
                     return 0;
-                if(lexer.peek(i).type == Tok.Integer)
+                if(peek(i).type == Tok.Integer)
                 {
                     i++;
-                    if(lexer.peek(i++).type != Tok.CloseBracket)    
+                    if(peek(i++).type != Tok.CloseBracket)    
                         return 0;
                 }
                 else
-                    if(lexer.peek(i++).type != Tok.CloseBracket)
+                    if(peek(i++).type != Tok.CloseBracket)
                         return 0;
                 
             }
-            type = lexer.peek(i);
+            type = peek(i);
         }
 
         return i;
@@ -799,22 +873,22 @@
     // -- Expression parsing -- //
     Exp parsePostfixExp(Exp target)
     {
-        switch(lexer.peek.type)
+        switch(peek.type)
         {
             case Tok.Dot:
-                switch(lexer.peek(1).type)
+                switch(peek(1).type)
                 {
                     case Tok.Identifier:
-                        Token op = lexer.next;
-                        Id member = Id(lexer.next);
+                        Token op = next;
+                        Id member = Id(next);
                         Exp exp = action.actOnMemberReference(target, op.location, member);
                         return parsePostfixExp(exp);
                     default:
-                        Token t = lexer.peek(1);
+                        Token t = peek(1);
                         messages.report(ExpectedIdAfterDot, t.location);
                 }
             case Tok.OpenBracket:
-                Token open = lexer.next;
+                Token open = next;
                 Exp index = parseExpression();
                 Token close = require(Tok.CloseBracket);
                 return action.actOnIndexEpr(target, open, index, close);
@@ -826,15 +900,15 @@
     Exp parseExpression(int p = 0)
     {
         auto exp = P();
-        Token next = lexer.peek();
+        Token n = peek();
         BinOp* op = null;
-        while ((op = binary(next.type)) != null && op.prec >= p)
+        while ((op = binary(n.type)) != null && op.prec >= p)
         {
-            lexer.next();
+            next();
             int q = op.leftAssoc? 1 + op.prec : op.prec;
             auto exp2 = parseExpression(q);
-            exp = action.actOnBinaryOp(next.location, op.operator, exp, exp2);
-            next = lexer.peek();
+            exp = action.actOnBinaryOp(n.location, op.operator, exp, exp2);
+            n = peek();
         }
 
         return exp;
@@ -842,46 +916,46 @@
 
     Exp P()
     {
-        Token next = lexer.next();
-        if (auto op = unary(next.type))
-            return action.actOnUnaryOp(next, parseExpression(op.prec));
-        else if (next.type == Tok.OpenParentheses)
+        Token n = next();
+        if (auto op = unary(n.type))
+            return action.actOnUnaryOp(n, parseExpression(op.prec));
+        else if (n.type == Tok.OpenParentheses)
         {
             auto e = parseExpression(0);
             require(Tok.CloseParentheses);
             return e;
         }
-        else if (next.type == Tok.Identifier)
+        else if (n.type == Tok.Identifier)
         {
-            Exp value = action.actOnIdentifierExp(Id(next));
+            Exp value = action.actOnIdentifierExp(Id(n));
             Exp iden = parsePostfixExp(value);
-            switch(lexer.peek.type)
+            switch(peek.type)
             {
                 case Tok.OpenParentheses:
-                    Token lp = lexer.next;
+                    Token lp = next;
                     SmallArray!(Exp, 8) args;
-                    while(lexer.peek.type != Tok.CloseParentheses)
+                    while(peek.type != Tok.CloseParentheses)
                     {
-                        if(lexer.peek.type == Tok.Comma)
-                            lexer.next;
+                        if(peek.type == Tok.Comma)
+                            next;
                         args ~= parseExpression();
                     }
 
-                    Token rp = lexer.next();
+                    Token rp = next();
                     return action.actOnCallExpr(iden, lp, args.unsafe(), rp);
 
                 default:
                     return iden;
             }
         }
-        else if (next.type == Tok.Cast)
-            return parseCast(next);
-        else if (next.type == Tok.Integer)
-            return action.actOnNumericConstant(next);
-        else if (next.type == Tok.String)
-            return action.actOnStringExp(next);
+        else if (n.type == Tok.Cast)
+            return parseCast(n);
+        else if (n.type == Tok.Integer)
+            return action.actOnNumericConstant(n);
+        else if (n.type == Tok.String)
+            return action.actOnStringExp(n);
 
-        messages.report(ExpectedExp, next.location)
+        messages.report(ExpectedExp, n.location)
             .fatal(ExitLevel.Parser);
         return null;
     }
@@ -889,13 +963,13 @@
     Exp parseCast(ref Token _cast)
     {
         require(Tok.OpenParentheses);
-        auto next = lexer.next;
-        if(!next.isBasicType && !next.isIdentifier)
-            messages.report(ExpectedCastType, next.location);
+        auto n = next;
+        if(!n.isBasicType && !n.isIdentifier)
+            messages.report(ExpectedCastType, n.location);
         
         require(Tok.CloseParentheses);
         auto exp = P();
-        return action.actOnCastExpr(_cast, Id(next), exp);
+        return action.actOnCastExpr(_cast, Id(n), exp);
     }
 
     struct UnOp
@@ -980,27 +1054,37 @@
 
     Token require(Tok t)
     {
-        if (lexer.peek().type != t)
-            messages.report(UnexpectedTokSingle, lexer.peek.location)
-                .arg(lexer.peek.getType)
+        if (peek().type != t)
+            messages.report(UnexpectedTokSingle, peek.location)
+                .arg(peek.getType)
                 .arg(t);
-        return lexer.next();
+        return next();
     }
 
     bool skip(Tok t)
     {
-        if (lexer.peek().type != t)
+        if (peek().type != t)
             return false;
-        lexer.next();
+        next();
         return true;
     }
 
-    bool on_a(Tok t)
+    bool isa(Tok t)
     {
-        return lexer.peek.type == t;
+        return peek.type == t;
     }
 
-    Lexer lexer;
+    Token next()
+    {
+        return lexer.next;
+    }
+
+    Token peek(int i = 0)
+    {
+        return lexer.peek(i);
+    }
+
+    Lexer lexer;   
     SourceManager sm;
 }