diff src/parser/Parser.d @ 207:e0551773a005

Added the correct version.
author Anders Johnsen <skabet@gmail.com>
date Tue, 12 Aug 2008 18:19:34 +0200
parents d3c148ca429b
children 42e663451371
line wrap: on
line diff
--- a/src/parser/Parser.d	Tue Aug 12 18:14:56 2008 +0200
+++ b/src/parser/Parser.d	Tue Aug 12 18:19:34 2008 +0200
@@ -5,7 +5,8 @@
 
 import parser.Action;
 
-import basic.Message;
+import basic.Message,
+       basic.Attribute;
 
 import basic.SmallArray,
        basic.SourceManager;
@@ -34,91 +35,151 @@
         this.action = act;
 
         Module m;
-        if (lexer.peek.type == Tok.Module)
+        if (peek.type == Tok.Module)
         {
-            Token _module = lexer.next;
+            Token _module = next();
             ModuleName name = parseModuleName();
             m = action.actOnModule(_module, sm.getText(name.asRange()));
             require(Tok.Seperator);
         }
         else
         {
-            SLoc loc = lexer.peek.location;
+            SLoc loc = peek.location;
             m = action.actOnImplicitModule(loc, sm.getFile(loc));
         }
 
-        while (lexer.peek.type != Tok.EOF)
-            foreach (d; parseDeclDef())
+        auto nes = parseAttributeInit;
+        while( !isa(Tok.EOF) )
+        {
+            while ( peek.isAttribute )
+                nes ~= parseAttribute(nes[$-1]);
+
+            foreach (d; parseDeclDef(nes[$-1].a))
                 action.actOnModuleDecl(m, d);
 
+            nes = parseAttributeScope(nes);
+        }
+
         return m;
     }
 
 private:
-    Decl[] parseDeclDef()
+    Decl[] parseDeclDef(Attribute a)
     {
-        Token t = lexer.peek;
-        if (t.type == Tok.Import)
+        if ( isa (Tok.Import) )
             return parseImports();
-        else
-            return [parseDecl()];
+
+        return [parseDecl(a)];
     }
 
-    Decl parseDecl()
+    Decl parseDecl(Attribute att)
     {
-        Token t = lexer.peek;
-
-        if (t.isBasicType || t.isIdentifier)
+        switch(peek.type)
         {
-            Id type;
-            Id iden;
-            int len = peekParseType;
-            if(lexer.peek(len).type == Tok.Identifier && len != 0)
-            {
-                type = parseType;
-parseDeclAfterInvalidType:
-                iden = Id(require(Tok.Identifier));
-                Token next = lexer.peek();
-                if (next.type == Tok.Seperator)
-                {
-                    Token sep = lexer.next();
-                    return action.actOnDeclarator(type, iden, null);
-                }
-                else if (next.type == Tok.Assign)
+            case Tok.Struct:
+                Id type = Id(next());
+                Id iden = Id(require(Tok.Identifier));            
+                return parseStruct(type, iden, att);
+
+            case Tok.Class:
+                Id type = Id(next());
+                Id iden = Id(require(Tok.Identifier));            
+                return parseClass(type, iden, att);
+
+            case Tok.Interface:
+                Id type = Id(next());
+                Id iden = Id(require(Tok.Identifier));
+                return parseInterface(type, iden, att);
+
+            case Tok.Alias:
+                next();
+                auto decl = parseDecl(Attribute());
+                return action.actOnAliasDecl(decl, att);
+
+            case Tok.Identifier:
+                Id type = parseType;
+                Id iden = Id(require(Tok.Identifier));
+                
+                switch(peek.type)
                 {
-                    Token assign = lexer.next();
-                    Exp exp = parseExpression();
-                    require(Tok.Seperator);
-                    return action.actOnDeclarator(type, iden, exp);
+                    case Tok.Seperator:
+                        Token sep = next();
+                        return action.actOnDeclarator(type, iden, null, att);
+
+                    case Tok.Assign:
+                        Token assign = next();
+                        Exp exp = parseExpression();
+                        require(Tok.Seperator);
+                        return action.actOnDeclarator(type, iden, exp, att);
+
+                    case Tok.OpenParentheses:
+                        return parseFunc(type, iden, att);
+
+                    default:
+                        auto n1 = next();
+                        isEOF(type.tok);
+                        messages.report(UnexpectedTok, n1.location).arg(n1.get(sm));
+                        return action.actOnDeclarator(type, iden, null, att);
                 }
-                else if (next.type == Tok.OpenParentheses)
-                    return parseFunc(type, iden);
-                else
-                    messages.report(UnexpectedTok, next.location).arg(next.getType);
-            }
-            t = lexer.peek(len);
-            messages.report(InvalidDeclType, t.location)
-                .arg(sm.getText(t.asRange));
-            while(len--)
-                lexer.next;
-            while(lexer.peek.type != Tok.Identifier)
-                lexer.next;
-            type = Id(lexer.peek);
-            goto parseDeclAfterInvalidType;
+                messages.report(InvalidDeclType, peek.location)
+                    .arg(sm.getText(peek.asRange));
+
+            default:
+                if (peek.isBasicType)
+                    goto case Tok.Identifier;
+
+                messages.report(UnexpectedTok, peek.location)
+                    .arg(sm.getText(peek.asRange));
+
+                next();
+                return null;
         }
-        else if (t.type == Tok.Struct)
-        {
-            Id type = Id(lexer.next);
-            Id iden = Id(require(Tok.Identifier));
-            
-            return parseStruct(type, iden);
-        }
-        messages.report(UnexpectedTok, t.location)
-            .arg(t.getType)
+        messages.report(UnexpectedTok, peek.location)
+            .arg(peek.get(sm))
             .arg(Tok.Identifier)
             .fatal(ExitLevel.Parser);
     }
 
+    Extern parseLinkageType()
+    {
+        Extern e = Extern.D;
+        if(peek(1).type != Tok.OpenParentheses)
+            return e;
+
+        next(); next();
+
+        Token t = require(Tok.Identifier);
+
+        switch(sm.getText(t.asRange))
+        {
+            case "C":
+                if (peek(0).type == Tok.Plus && 
+                    peek(1).type == Tok.Plus)
+                    e = Extern.CPlusPlus;
+                else
+                    e = Extern.C;
+                break;
+            case "D":
+                break;
+            case "Windows":
+                e = Extern.Windows;
+                break;
+            case "Pascal":
+                e = Extern.Pascal;
+                break;
+            case "System":
+                e = Extern.System;
+                break;
+            default:
+                messages.report(UnexpectedLinkType, t.location);
+        }
+
+        if (!isa(Tok.CloseParentheses))
+            messages.report(UnexpectedTokSingle, peek.location);
+
+        return e;
+    }
+
     /**
       Parse a series of imports belonging to a single import token.
      */
@@ -129,16 +190,16 @@
         void addToRes(Decl d) { res ~= d; }
 
         bool done = false;
-        while (!done && !on_a(Tok.Seperator))
+        while (!done && !isa(Tok.Seperator))
         {
             ModuleName mod = parseModuleName();
-            Token tok = lexer.peek;
+            Token tok = peek;
             switch (tok.type)
             {
                 case Tok.Comma:
                     // import A, B.C;
                     // parse another module-name
-                    lexer.next();
+                    next();
                     res ~= action.actOnImport(_import, mod, null);
                     break;
                 case Tok.Assign:
@@ -152,7 +213,7 @@
                     }
                     //if (isStatic)
                     //    error("Static imports cannot be renamed");
-                    lexer.next();
+                    next();
                     Id name = mod.id;
                     mod = parseModuleName();
                     // create from mod and rename to `name`
@@ -161,7 +222,7 @@
                 case Tok.Colon:
                     // import A : a;
                     // selective imports, potentially import A : print = a
-                    lexer.next();
+                    next();
                     Decl d = action.actOnImport(_import, mod, null);
                     // do-while on a comma:
                     //   add explicit symbol
@@ -194,42 +255,147 @@
         require(Tok.Seperator);
         return res.safe();
 Lerror:
-        while (!on_a (Tok.Seperator))
-            lexer.next();
+        while (!isa (Tok.Seperator))
+            next();
         return res.safe();
     }
 
     /**
+      Parse interface
+     */
+    Decl parseInterface(Id type, Id iden, Attribute att)
+    {
+        auto decl = action.actOnDeclarator(type, iden, null, att);
+
+        if (peek.type == Tok.Colon)
+            // SuperInterfaces
+        {
+            next(); // Remove colon.
+
+            Id identifier;
+
+            // The identifier
+            identifier = Id(require(Tok.Identifier));
+
+            action.actOnInterfaceBaseClass(decl, identifier);
+
+            // We should now have an optional list of items, each starting ','
+            while (peek.type == Tok.Comma)
+            {
+                next(); // Remove comma
+
+                // The identifier
+                identifier = Id(require(Tok.Identifier));
+
+                action.actOnInterfaceBaseClass(decl, identifier);
+            }
+        }
+
+        require(Tok.OpenBrace);
+
+        auto nes = parseAttributeInit;
+        while( !isa(Tok.EOF) && !isa(Tok.CloseBrace) )
+        {
+            while ( peek.isAttribute )
+                nes ~= parseAttribute(nes[$-1]);
+
+            auto m_decl = parseDecl(nes[$-1].a);
+            action.actOnInterfaceMember(decl, m_decl);
+
+            nes = parseAttributeScope(nes);
+        }
+
+        require(Tok.CloseBrace);
+        
+        return decl;
+    }
+
+    /**
+      Parse class
+     */
+    Decl parseClass(Id type, Id iden, Attribute att)
+    {
+        auto decl = action.actOnDeclarator(type, iden, null, att);
+
+        if (peek.type == Tok.Colon)
+            // BaseClassList - Super class and interfaces(in that order)
+        {
+            next(); // Remove colon.
+
+            Token protection;
+            Id    identifier;
+
+            // First we expect an optional protection level.
+            if (peek.isBaseClassProtection)
+                protection = next();
+            // Then the identifier
+            identifier = Id(require(Tok.Identifier));
+
+            action.actOnClassBaseClass(decl, identifier);
+
+            // We should now have an optional list of items, each starting ','
+            while (peek.type == Tok.Comma)
+            {
+                next(); // Remove comma
+
+                // First we expect an optional protection level.
+                if (peek.isBaseClassProtection)
+                    protection = next();
+                // Then the identifier
+                identifier = Id(require(Tok.Identifier));
+
+                action.actOnClassBaseClass(decl, identifier);
+            }
+        }
+
+        require(Tok.OpenBrace);
+
+        auto nes = parseAttributeInit;
+        while( !isa(Tok.EOF) && !isa(Tok.CloseBrace) )
+        {
+            while ( peek.isAttribute )
+                nes ~= parseAttribute(nes[$-1]);
+
+            switch(peek.type)
+            {
+                case Tok.This:
+                    auto id = Id(next);
+                    auto m_decl = parseFunc(iden, id, nes[$-1].a);
+                    action.actOnClassMember(decl, m_decl);
+                    break;
+                    
+                default:
+                    auto m_decl = parseDecl(nes[$-1].a);
+                    action.actOnClassMember(decl, m_decl);
+            }
+
+            nes = parseAttributeScope(nes);
+        }
+
+        require(Tok.CloseBrace);
+        
+        return decl;
+    }
+
+    /**
       Parse struct
      */
-    Decl parseStruct(Id type, Id iden)
+    Decl parseStruct(Id type, Id iden, Attribute att)
     {
-        auto decl = action.actOnDeclarator(type, iden, null);
+        auto decl = action.actOnDeclarator(type, iden, null, att);
 
         require(Tok.OpenBrace);
 
-        while(lexer.peek.isBasicType || lexer.peek.isIdentifier)
+        auto nes = parseAttributeInit;
+        while( !isa(Tok.EOF) && !isa(Tok.CloseBrace) )
         {
-            auto m_decl = parseDecl();
+            while ( peek.isAttribute )
+                nes ~= parseAttribute(nes[$-1]);
+
+            auto m_decl = parseDecl(nes[$-1].a);
             action.actOnStructMember(decl, m_decl); 
-/*            Id var_type = Id(lexer.next);
-            Id var_iden = Id(require(Tok.Identifier));
-            Token next = lexer.peek();
-            if (next.type == Tok.Seperator)
-            {
-                Token sep = lexer.next();
-                action.actOnStructMember(decl, var_type, var_iden, null);
-                continue;
-            }
-            else if (next.type == Tok.Assign)
-            {
-                Token assign = lexer.next();
-                Exp exp = parseExpression();
-                require(Tok.Seperator);
-                action.actOnStructMember(decl, var_type, var_iden, exp);
-                continue;
-            }
-            messages.report(UnexpectedTok, next.location).arg(next.getType);*/
+
+            nes = parseAttributeScope(nes);
         }
 
         require(Tok.CloseBrace);
@@ -237,6 +403,125 @@
         return decl;
     }
 
+    Att[] parseAttributeInit()
+    {
+        Att[] nes;
+        nes ~= Att();
+        nes[0].nested = Scope;
+        return nes;
+    }
+
+    Att[] parseAttributeScope(Att[] nes)
+    {
+        while ( nes[$-1].nested == Single )
+            nes.length = nes.length - 1;
+
+        while ( isa(Tok.CloseBrace) && nes.length > 1)
+        {
+            while ( nes.length > 1 )
+            {
+                if( nes[$-1].nested == Scope )
+                {
+                    nes.length = nes.length - 1;
+                    next();
+                    break;
+                }
+                nes.length = nes.length - 1;
+            }
+        }
+
+        return nes;
+    }
+
+    Att parseAttribute(Att last)
+    {
+        Att _parseAttribute(Att last)
+        {
+            Att a = last;
+            a.nested = Single;
+
+            switch(peek.type)
+            {
+                case Tok.Public:
+                    a.a.setProtection(Protection.Public);
+                    break;
+                case Tok.Private:
+                    a.a.setProtection(Protection.Private);
+                    break;
+                case Tok.Package:
+                    a.a.setProtection(Protection.Package);
+                    break;
+                case Tok.Protected:
+                    a.a.setProtection(Protection.Protected);
+                    break;
+                case Tok.Export:
+                    a.a.setProtection(Protection.Export);
+                    break;
+                case Tok.Static:
+                    a.a.setStatic;
+                    break;
+                case Tok.Final:
+                    a.a.setFinal;
+                    break;
+                case Tok.Const:
+                    a.a.setConst;
+                    break;
+                case Tok.Abstract:
+                    a.a.setAbstract;
+                    break;
+                case Tok.Override:
+                    a.a.setOverride;
+                    break;
+                case Tok.Deprecated:
+                    a.a.setDeprecated;
+                    break;
+                case Tok.Auto:
+                    a.a.setAuto;
+                    break;
+                case Tok.Extern:
+                    Extern e = parseLinkageType;
+                    a.a.setExtern(e);
+                    break;
+            }
+            next();
+
+            return a;
+        }
+
+        Att a = _parseAttribute(last);
+
+        while (peek.isAttribute)
+        {
+            a = parseAttribute(a);
+        }
+
+        if (peek.type == Tok.Colon)
+        {
+            a.nested = All;
+            next();
+        }
+        else if  (peek.type == Tok.OpenBrace)
+        {
+            a.nested = Scope;
+            next();
+        }
+
+        return a;
+    }
+
+    enum : uint
+    { 
+        Single, 
+        Scope, 
+        All 
+    }
+
+    struct Att
+    {
+        Attribute a;
+        uint nested;
+    }
+
     /**
       Parse statements.
 
@@ -244,26 +529,18 @@
      */
     Stmt parseStatement()
     {
-        Token t = lexer.peek;
-
-        switch(t.type)
+        switch (peek.type)
         {
             case Tok.Return:
-                Token ret = lexer.next;
+                Token ret = next();
                 Exp exp;
-                if (lexer.peek.type != Tok.Seperator)
+                if (peek.type != Tok.Seperator)
                     exp = parseExpression();
                 require(Tok.Seperator);
                 return action.actOnReturnStmt(ret, exp);
 
-            /*
-               if (cond)
-                single statement | compound statement
-               [else
-                single statement | compound statement]
-             */
             case Tok.If:
-                Token _if = lexer.next();
+                Token _if = next();
 
                 require(Tok.OpenParentheses);
                 Exp cond = parseExpression();
@@ -275,107 +552,185 @@
                 // something than can be passed along
                 Token _else = _if;
                 Stmt elseB;
-                if (lexer.peek.type == Tok.Else)
+                if (peek.type == Tok.Else)
                 {
-                    _else = lexer.next;
+                    _else = next();
                     elseB = parseSingleOrCompoundStatement();
                 }
-
                 return action.actOnIfStmt(_if, cond, thenB, _else, elseB);
 
-            /*
-               while (cond)
-                single statement | compound statement
-             */
             case Tok.While:
-                Token _while = lexer.next;
+                Token _while = next();
                 require(Tok.OpenParentheses);
                 Exp cond = parseExpression();
                 require(Tok.CloseParentheses);
                 Stmt bodyStmt = parseSingleOrCompoundStatement();
                 return action.actOnWhileStmt(_while, cond, bodyStmt);
 
-            /*
-               One of four things:
-               A declaration of a function/variable `type id ...`
-               A direct assignment `id = exp;`
-               An indirect assignment `id.id = exp`
-               Some sort of free standing expression
+            case Tok.For:
+                Token _for = next();
+                require(Tok.OpenParentheses);
+                Stmt init;
+                if ( isa(Tok.Seperator))
+                    require(Tok.Seperator);
+                else
+                    init = parseStatement();
+
+                Exp cond;
+                if ( !isa(Tok.Seperator))
+                    cond = parseExpression();
+                require(Tok.Seperator);
 
-               The assignments should be handled as binary expressions?
-             */
-            case Tok.Identifier:
-                Token iden = lexer.peek;
-                Token n = lexer.peek(1);
-                // Must be an decl, if we start with a basic type, or two
-                // identifiers in a row
-                if (iden.isBasicType() || iden.isIdentifier())
+                Exp incre;
+                if ( !isa(Tok.CloseParentheses))
+                    incre = parseExpression();
+                require(Tok.CloseParentheses);
+
+                Stmt bodyStmt = parseSingleOrCompoundStatement();
+                return action.actOnForStmt(_for, init, cond, incre, bodyStmt);
+
+            case Tok.Switch:                
+                auto t = next();
+                require(Tok.OpenParentheses);
+                auto target = parseExpression();
+                auto res = action.actOnStartOfSwitchStmt(t, target);
+                require(Tok.CloseParentheses);
+                require(Tok.OpenBrace);
+                while (true)
                 {
-                    if ( n.type == Tok.Star || n.type == Tok.OpenBracket)
+                    Stmt[] statements;
+                    if (isa(Tok.Default))
                     {
-                        int len = peekParseType;
-                        if(lexer.peek(len).type == Tok.Identifier && len != 0)
-                            return action.actOnDeclStmt(parseVarDecl());
+                        Token _default = next();
+                        require(Tok.Colon);
+                        statements.length = 0;
+                        while (peek.type != Tok.Case
+                                && peek.type != Tok.Default
+                                && peek.type != Tok.CloseBrace)
+                            statements ~= parseStatement();
+                        action.actOnDefaultStmt(res, _default, statements);
+                        continue;
+                    }
+
+                    Token _case = peek;
+                    if (_case.type != Tok.Case)
+                        break;
+                    next();
+
+                    Exp[] literals;
+                    do
+                    {
+                        Exp e = parseExpression();
+                        literals ~= e;
+                    }
+                    while (skip(Tok.Comma));
+                    require(Tok.Colon);
 
-                        Exp exp = parseExpression();
-                        require(Tok.Seperator);
-                        return action.actOnExprStmt(exp);
-                    }
-                        
-                    if (n.isIdentifier())
-                        return action.actOnDeclStmt(parseVarDecl());
+                    while (peek.type != Tok.Case
+                            && peek.type != Tok.Default
+                            && peek.type != Tok.CloseBrace)
+                        statements ~= parseStatement();
+
+                    action.actOnCaseStmt(res, _case, literals, statements);
 
-                    // Expression: a.b, a = b, a(b) etc.
-                    Exp exp = parseExpression();
-                    require(Tok.Seperator);
-                    return action.actOnExprStmt(exp);
+                    if (peek.type == Tok.CloseBrace)
+                        break;
+                }
+                require(Tok.CloseBrace);
+                return res;
+        
+            case Tok.Star:
+                auto exp = parseExpression();
+                require(Tok.Seperator);
+                return action.actOnExprStmt(exp);
+
+            case Tok.Identifier:
+                // If it's a '*' it must be a method. Otherwise it won't give 
+                // any sense.
+
+                if (isa(Tok.Function, 1)   ||
+                    isa(Tok.Identifier, 1) ||
+                    isa(Tok.Star, 1))      
+                {
+                    Attribute a;
+                    return action.actOnDeclStmt(parseDecl(a));
                 }
 
-            case Tok.Switch:
-                messages.report(UnexpectedTok, lexer.peek.location).arg(lexer.next.getType);
-                return null;
+                if (isa(Tok.OpenBracket, 1))
+                {
+                    int i = 1;
+                    while (isa(Tok.OpenBracket, i)  || 
+                           isa(Tok.Star, i)         ||
+                           isa(Tok.Identifier, i))
+                    {
+                        if (isa(Tok.Identifier, i))
+                            return action.actOnDeclStmt(parseDecl(Attribute()));
+
+                        i++;
+                        if (isa(Tok.Star,i-1))
+                            continue;
+                        // Must be OpenBracket here..
+
+                        if (isa(Tok.Integer, i))
+                            i++;
+                        else
+                            if (isa(Tok.CloseBracket, i))
+                                return action.actOnDeclStmt(parseDecl(Attribute()));
+                            else
+                                i++;
+
+                        if (!isa(Tok.CloseBracket, i))
+                            break;
+                        i++;
+                    }
+                    if (isa(Tok.Function, i))
+                        return action.actOnDeclStmt(parseDecl(Attribute()));
+                }
+
+                // Expression: a.b, a = b, a(b) etc.
+                Exp exp = parseExpression();
+                require(Tok.Seperator);
+                return action.actOnExprStmt(exp);
+            
+            case Tok.Void: // And all basic types
+                return action.actOnDeclStmt(parseVarDecl());
 
             default:
-                if (t.isBasicType())
-                    goto case Tok.Identifier;
-                if (t.type == Tok.Star)
-                {
-                    auto exp = parseExpression();
-                    require(Tok.Seperator);
-                    return action.actOnExprStmt(exp);
-                }
-                messages.report(UnexpectedBeginStmt, lexer.peek.location).arg(lexer.next.getType);
+                if (peek.isBasicType)
+                    goto case Tok.Void;
+
+                messages.report(UnexpectedBeginStmt, peek.location).arg(peek.get(sm));
+                require(Tok.Seperator);
                 return null;
         }
-        messages.report(UnexpectedTok, t.location);
-        return null;
     }
 
     Decl parseVarDecl()
     {
         // manually hardcoded to only support "type id [= exp];"
         // as that is the only thing the codegen understands
-        Id type = parseType;
-        Id id = Id(lexer.next);
+        Id type = parseType();
+        Id id = Id(next());
         Exp init;
         if (skip(Tok.Assign))
             init = parseExpression();
         require(Tok.Seperator);
-        Decl d = action.actOnDeclarator(type, id, init);
+        Attribute att;
+        Decl d = action.actOnDeclarator(type, id, init, att);
         return d;
     }
 
     /**
       Parses a function/method given the already parsed return type and name
      */
-    Decl parseFunc(ref Id type, ref Id name)
+    Decl parseFunc(ref Id type, ref Id name, Attribute att)
     {
-        Decl func = action.actOnStartOfFunctionDef(type, name);
+        Decl func = action.actOnStartOfFunctionDef(type, name, att);
         parseFuncArgs(func);
 
-        if(lexer.peek.type == Tok.Seperator)
+        if(peek.type == Tok.Seperator)
         {
-            lexer.next;
+            next();
             return func;
         }
         Stmt stmt = parseCompoundStatement();
@@ -392,16 +747,16 @@
     {
         require(Tok.OpenParentheses); // Remove the "(" token.
 
-        while(lexer.peek.type != Tok.CloseParentheses)
+        while(peek.type != Tok.CloseParentheses)
         {
             auto t = parseType();
             Id i;
-            if(lexer.peek.type == Tok.Identifier)
+            if(peek.type == Tok.Identifier)
                 i = parseIdentifier();
             action.addFuncArg(func, t, i);
 
-            if(lexer.peek.type == Tok.Comma)
-                lexer.next;
+            if(peek.type == Tok.Comma)
+                next();
         }
 
         require(Tok.CloseParentheses); // Remove the ")"
@@ -413,7 +768,7 @@
      */
     Stmt parseSingleOrCompoundStatement()
     {
-        if (lexer.peek.type == Tok.OpenBrace)
+        if (peek.type == Tok.OpenBrace)
             return parseCompoundStatement();
         return parseStatement();
     }
@@ -421,14 +776,14 @@
     /**
       Parses a function-body or similar, expects an opening brace to be the
       current token.
-      
+
       Will consume both the starting { and ending }
      */
     Stmt parseCompoundStatement()
     {
         Token lbrace = require(Tok.OpenBrace);
         SmallArray!(Stmt, 32) stmts; // Try to use the stack only
-        while (lexer.peek.type != Tok.CloseBrace)
+        while ( !isa(Tok.CloseBrace) && !isa(Tok.EOF) )
             stmts ~= parseStatement();
         Token rbrace = require(Tok.CloseBrace);
         return action.actOnCompoundStmt(lbrace, rbrace, stmts.unsafe());
@@ -436,13 +791,13 @@
 
     Id parseIdentifier()
     {
-        Token tok = lexer.next;
+        Token tok = next();
 
         if (tok.type is Tok.Identifier)
             return Id(tok);
 
         messages.report(UnexpectedTokSingle, tok.location)
-            .arg(tok.getType)
+            .arg(tok.get(sm))
             .arg(Tok.Identifier);
     }
 
@@ -453,8 +808,8 @@
         while (skip(Tok.Dot))
         {
             mod.packages ~= id;
-            if (lexer.peek.type != Tok.Identifier) {
-                messages.report(ExpectedIdAfterPackage, lexer.peek.location);
+            if (peek.type != Tok.Identifier) {
+                messages.report(ExpectedIdAfterPackage, peek.location);
                 goto Lerror;
             }
             id = parseIdentifier();
@@ -463,7 +818,7 @@
         return mod;
 Lerror:
         while (!skip(Tok.Seperator))
-            lexer.next();
+            next();
         return mod;
     }
 
@@ -473,7 +828,7 @@
      */
     Id parseType()
     {
-        Token type = lexer.next;
+        Token type = next();
 
         Id currentType;
 
@@ -481,92 +836,81 @@
             messages.report(InvalidType, type.location);
 
         currentType = Id(type);
-        type = lexer.peek;
 
-        while(type.type == Tok.Star || type.type == Tok.OpenBracket)
+        while(true)
         {
-            if(type.type == Tok.Star)
-            {
-                currentType = PointerId(currentType);
-                lexer.next;
-            }
-            else
+            switch(peek.type)
             {
-                lexer.next;
-                if(lexer.peek.type == Tok.Integer)
-                    currentType = ArrayId(currentType, action.actOnNumericConstant(require(Tok.Integer)));
-                require(Tok.CloseBracket);
-                
-            }
-            type = lexer.peek;
-        }
+                case Tok.Star:
+                    currentType = PointerTypeId(currentType);
+                    next();
+                    break;
+                case Tok.OpenBracket:
+                    next();
+                    if (isa(Tok.Integer))
+                        currentType = StaticArrayTypeId(
+                                currentType, 
+                                action.actOnNumericConstant(
+                                    require(Tok.Integer)));
+                    require(Tok.CloseBracket);
+                    break;
+                case Tok.Function:
+                    next();
 
-        return currentType;
-    }
 
-    int peekParseType()
-    {
-        int i;
-        Token type = lexer.peek(i);
-
-        Id currentType;
+                    require(Tok.OpenParentheses); // Remove the "(" token.
 
-        if ( !(type.isBasicType || type.type == Tok.Identifier) )
-            return 0;
+                    DeclT[] decls;
 
-        currentType = Id(type);
-        type = lexer.peek(++i);
+                    while(peek.type != Tok.CloseParentheses)
+                    {
+                        auto t = parseType();
+                        Id i;
+                        if(peek.type == Tok.Identifier)
+                            i = parseIdentifier();
 
-        while(type.type == Tok.Star || type.type == Tok.OpenBracket)
-        {
-            if(type.type == Tok.Star)
-            {
-                i++;
+                        // Act on function type param
+                        decls ~= action.actOnDeclarator(t, i, null, Attribute());
+
+                        if(peek.type == Tok.Comma)
+                            next();
+                    }
+
+                    currentType = FunctionTypeId(currentType, decls);
+
+                    require(Tok.CloseParentheses); // Remove the ")"
+                    break;
+                default:
+                    goto end;
             }
-            else
-            {
-                if(lexer.peek(i++).type != Tok.OpenBracket)
-                    return 0;
-                if(lexer.peek(i).type == Tok.Integer)
-                {
-                    i++;
-                    if(lexer.peek(i++).type != Tok.CloseBracket)    
-                        return 0;
-                }
-                else
-                    if(lexer.peek(i++).type != Tok.CloseBracket)
-                        return 0;
-                
-            }
-            type = lexer.peek(i);
         }
-
-        return i;
+end:
+        return currentType;
     }
 
 private:
     // -- Expression parsing -- //
     Exp parsePostfixExp(Exp target)
     {
-        switch(lexer.peek.type)
+        switch(peek.type)
         {
             case Tok.Dot:
-                switch(lexer.peek(1).type)
+                switch(peek(1).type)
                 {
                     case Tok.Identifier:
-                        Token op = lexer.next;
-                        Id member = Id(lexer.next);
+                        Token op = next();
+                        Id member = Id(next());
                         Exp exp = action.actOnMemberReference(target, op.location, member);
                         return parsePostfixExp(exp);
                     default:
-                        Token t = lexer.peek(1);
+                        Token t = peek(1);
                         messages.report(ExpectedIdAfterDot, t.location);
                 }
             case Tok.OpenBracket:
-                Token open = lexer.next;
+                Token open = next();
                 Exp index = parseExpression();
                 Token close = require(Tok.CloseBracket);
-                return action.actOnIndexEpr(target, open, index, close);
+                return action.actOnIndexExpr(target, open, index, close);
             default:
                 return target;
         }
@@ -575,15 +919,15 @@
     Exp parseExpression(int p = 0)
     {
         auto exp = P();
-        Token next = lexer.peek();
+        Token n = peek();
         BinOp* op = null;
-        while ((op = binary(next.type)) != null && op.prec >= p)
+        while ((op = binary(n.type)) != null && op.prec >= p)
         {
-            lexer.next();
+            next();
             int q = op.leftAssoc? 1 + op.prec : op.prec;
             auto exp2 = parseExpression(q);
-            exp = action.actOnBinaryOp(next.location, op.operator, exp, exp2);
-            next = lexer.peek();
+            exp = action.actOnBinaryOp(n.location, op.operator, exp, exp2);
+            n = peek();
         }
 
         return exp;
@@ -591,46 +935,109 @@
 
     Exp P()
     {
-        Token next = lexer.next();
-        if (auto op = unary(next.type))
-            return action.actOnUnaryOp(next, parseExpression(op.prec));
-        else if (next.type == Tok.OpenParentheses)
+        Token n = next();
+        if (auto op = unary(n.type))
+            return action.actOnUnaryOp(n, parseExpression(op.prec));
+        else if (n.type == Tok.OpenParentheses)
         {
             auto e = parseExpression(0);
             require(Tok.CloseParentheses);
             return e;
         }
-        else if (next.type == Tok.Identifier)
+        else if (n.type == Tok.Identifier)
         {
-            Exp value = action.actOnIdentifierExp(Id(next));
+            Exp value = action.actOnIdentifierExp(Id(n));
             Exp iden = parsePostfixExp(value);
-            switch(lexer.peek.type)
+            switch(peek.type)
             {
                 case Tok.OpenParentheses:
-                    Token lp = lexer.next;
+                    Token lp = next();
                     SmallArray!(Exp, 8) args;
-                    while(lexer.peek.type != Tok.CloseParentheses)
+                    while(peek.type != Tok.CloseParentheses)
                     {
-                        if(lexer.peek.type == Tok.Comma)
-                            lexer.next;
+                        if(peek.type == Tok.Comma)
+                            next();
                         args ~= parseExpression();
                     }
 
-                    Token rp = lexer.next();
+                    Token rp = next();
                     return action.actOnCallExpr(iden, lp, args.unsafe(), rp);
 
                 default:
                     return iden;
             }
         }
-        else if (next.type == Tok.Cast)
-            return parseCast(next);
-        else if (next.type == Tok.Integer)
-            return action.actOnNumericConstant(next);
-        else if (next.type == Tok.String)
-            return action.actOnStringExp(next);
+        else if (n.type == Tok.Null)
+            return action.actOnNullExpr(n.location);
+        else if (n.type == Tok.Cast)
+            return parseCast(n);
+        else if (n.type == Tok.Integer)
+            return action.actOnNumericConstant(n);
+        else if (n.type == Tok.String)
+            return action.actOnStringExp(n);
+        else if (n.type == Tok.OpenBracket)
+        {
+            // Array literals
+            Exp[] exps;
+            exps ~= parseExpression();
+
+            while (isa(Tok.Comma))
+            {
+                next();
+
+                if (isa(Tok.CloseBracket))
+                    break;
+                exps ~= parseExpression();
+            }
+            scope e = require(Tok.CloseBracket);
+            return action.actOnArrayLiteralExpr(exps, n.location, e.location);
+        }
+        else if (n.type == Tok.New)
+        {
+            Exp[] allocator_args;
+            Exp[] constructor_args;
+
+            if ( isa(Tok.OpenParentheses))
+            {
+                next(); // Remove OpenParentheses
 
-        messages.report(ExpectedExp, next.location)
+                if ( !isa(Tok.CloseParentheses ) )
+                {
+                    allocator_args ~= parseExpression;
+                
+                    while ( isa(Tok.Comma) )
+                    {
+                        next(); // Remove Comma 
+
+                        allocator_args ~= parseExpression;
+                    }
+                }
+                require(Tok.CloseParentheses);
+            }
+
+            auto type = parseType;
+
+            if ( isa(Tok.OpenParentheses))
+            {
+                next(); // Remove OpenParentheses
+
+                if ( !isa(Tok.CloseParentheses ) )
+                {
+                    constructor_args ~= parseExpression;
+
+                    while ( isa(Tok.Comma) )
+                    {
+                        next(); // Remove Comma 
+
+                        constructor_args ~= parseExpression;
+                    }
+                }
+                require(Tok.CloseParentheses);
+            }
+            return action.actOnNewExpr(type, allocator_args, constructor_args);
+        }
+
+        messages.report(ExpectedExp, n.location)
             .fatal(ExitLevel.Parser);
         return null;
     }
@@ -638,13 +1045,13 @@
     Exp parseCast(ref Token _cast)
     {
         require(Tok.OpenParentheses);
-        auto next = lexer.next;
-        if(!next.isBasicType && !next.isIdentifier)
-            messages.report(ExpectedCastType, next.location);
-        
+        auto n = next();
+        if(!n.isBasicType && !n.isIdentifier)
+            messages.report(ExpectedCastType, n.location);
+
         require(Tok.CloseParentheses);
         auto exp = P();
-        return action.actOnCastExpr(_cast, Id(next), exp);
+        return action.actOnCastExpr(_cast, Id(n), exp);
     }
 
     struct UnOp
@@ -654,9 +1061,10 @@
     }
 
     static const UnOp[] _unary =
-    [
+        [
         {Tok.Minus, 4},
-        {Tok.Star, 4}
+        {Tok.Star, 4},
+        {Tok.And, 4}
     ];
     UnOp* unary(Tok t)
     {
@@ -675,8 +1083,28 @@
     }
 
     static const BinOp[] _binary =
-    [
-        {Tok.Assign,    1, false, Operator.Assign},
+        [
+        {Tok.Assign,        1, false, Operator.Assign},
+        {Tok.PlusAssign,    1, false, Operator.AddAssign},
+        {Tok.MinusAssign,   1, false, Operator.SubAssign},
+        {Tok.StarAssign,    1, false, Operator.MulAssign},
+        {Tok.SlashAssign,   1, false, Operator.DivAssign},
+        {Tok.PercentAssign, 1, false, Operator.ModAssign},
+
+        // =, += etc. 1
+        // (need special-case for the ternary operator at this level)
+        // ||, 2
+        // &&, 3
+        // |, 4
+        // &, 5
+        // ^, 6
+        // ==, !=, is, !is, 7
+        // <, <= etc, 7
+        // in, 7
+        // <<, >>, >>>, 8
+        // +, -, ~, 9
+        // *, /, %, 10
+        // unary operators here
 
         {Tok.Eq,        2, true, Operator.Eq},
         {Tok.Ne,        2, true, Operator.Ne},
@@ -691,7 +1119,11 @@
 
         {Tok.Star,      5, true, Operator.Mul},
         {Tok.Slash,     5, true, Operator.Div},
-        {Tok.Percent,   5, true, Operator.Mod}
+        {Tok.Percent,   5, true, Operator.Mod},
+
+        {Tok.LeftShift,             8, true, Operator.LeftShift},
+        {Tok.RightShift,            8, true, Operator.RightShift},
+        {Tok.UnsignedRightShift,    8, true, Operator.UnsignedRightShift}
     ];
     BinOp* binary(Tok t)
     {
@@ -705,27 +1137,53 @@
 
     Token require(Tok t)
     {
-        if (lexer.peek().type != t)
-            messages.report(UnexpectedTokSingle, lexer.peek.location)
-                .arg(lexer.peek.getType)
-                .arg(t);
-        return lexer.next();
+        if (!isa(t))
+            if(isa(Tok.EOF))
+                messages.report(UnexpectedEOF,
+                    [lexer.last.asRange][], [])
+                    .arg(lexer.last.get(sm))
+                    .fatal(ExitLevel.Parser);
+            else
+                messages.report(UnexpectedTokSingle, peek.location)
+                    .arg(peek.get(sm))
+                    .arg(typeToString[t]);
+        return next();
     }
 
     bool skip(Tok t)
     {
-        if (lexer.peek().type != t)
+        if (peek().type != t)
             return false;
-        lexer.next();
+        next();
         return true;
     }
 
-    bool on_a(Tok t)
+    bool isa(Tok t, int i = 0)
     {
-        return lexer.peek.type == t;
+        return peek(i).type == t;
     }
 
-    Lexer lexer;
+    bool isEOF(Token t)
+    {
+        if (isa(Tok.EOF))
+            messages.report(UnexpectedEOF, 
+                    [t.asRange][], [])
+                .arg(t.get(sm))
+                .fatal(ExitLevel.Parser);
+        return false;
+    }
+
+    Token next()
+    {
+        return lexer.next;
+    }
+
+    Token peek(int i = 0)
+    {
+        return lexer.peek(i);
+    }
+
+    Lexer lexer;   
     SourceManager sm;
 }