# HG changeset patch # User aziz # Date 1183889640 0 # Node ID 511a1aa2589639b8969ee5cd83c30c5a5f2b841e # Parent 6e8b67ae15b7536f5ee231924ad635902208b03a - Added reportErrors member to Lexer. Moved peek() down and rewrote it a bit making use of reportErrors. error() uses reportErrors too. - Started implementation of parseDeclaration(), parseAttributeSpecifier() and parseImportDeclaration(). diff -r 6e8b67ae15b7 -r 511a1aa25896 trunk/src/Lexer.d --- a/trunk/src/Lexer.d Sat Jul 07 21:34:02 2007 +0000 +++ b/trunk/src/Lexer.d Sun Jul 08 10:14:00 2007 +0000 @@ -35,6 +35,8 @@ Information[] errors; + bool reportErrors; + Identifier[string] idtable; this(string text, string fileName) @@ -50,7 +52,7 @@ this.p = this.text.ptr; this.end = this.p + this.text.length; - + this.reportErrors = true; loadKeywords(); } @@ -531,16 +533,6 @@ } } - void peek(ref Token t) - { - char* tmp = p; - uint len = errors.length; - scan(t); - p = tmp; - if (errors.length != len) - errors = errors[0..len]; - } - void scanNormalStringLiteral(ref Token t) { assert(*p == '"'); @@ -1434,9 +1426,27 @@ idtable[k.str] = k; } + void peek(ref Token t) + { + // Because peeked tokens are not stored in a linked + // list we need to switch off error reporting + // so as to avoid getting the same error more than once. + reportErrors = false; + char* save = p; + if (t.end) // For successive peeks. + { + p = t.end; + assert(text.ptr <= p && p < end); + } + scan(t); + p = save; + reportErrors = true; + } + void error(MID id, ...) { - errors ~= new Information(InfoType.Lexer, id, loc, arguments(_arguments, _argptr)); + if (reportErrors) + errors ~= new Information(InfoType.Lexer, id, loc, arguments(_arguments, _argptr)); } public TOK nextToken() diff -r 6e8b67ae15b7 -r 511a1aa25896 trunk/src/Parser.d --- a/trunk/src/Parser.d Sat Jul 07 21:34:02 2007 +0000 +++ b/trunk/src/Parser.d Sun Jul 08 10:14:00 2007 +0000 @@ -65,7 +65,22 @@ {} nT(); } - +/* + ReturnType try_(ReturnType)(lazy ReturnType parseMethod, out failed) + { + auto len = errors.length; + // lx.saveCheckPoint(); + auto result = parseMethod(); + if (errors.length != len) + { + // lx.revertCheckPoint(); + errors = errors[0..len]; + failed = true; + } + // lx.removeCheckPoint(); + return result; + } +*/ /++++++++++++++++++++++++++++++ + Declaration parsing methods + ++++++++++++++++++++++++++++++/ @@ -111,9 +126,64 @@ Declaration parseDeclaration() { + Declaration decl; + switch (token.type) + { + case T.Extern, + T.Align, + T.Pragma, + T.Deprecated, + T.Private, + T.Package, + T.Protected, + T.Public, + T.Export, + //T.Static, + T.Final, + T.Override, + T.Abstract, + T.Const, + T.Auto, + T.Scope: + case_AttributeSpecifier: + decl = parseAttributeSpecifier(); + break; + case T.Static: + Token t; + lx.peek(t); + if (t.type != T.Import) + goto case_AttributeSpecifier; + //goto case T.Import; + case T.Import: + parseImportDeclaration(); + case T.Module: + // Error: module is optional and can only appear once at the top of the source file. + default: + } return null; } + Declaration parseAttributeSpecifier() + { + // Attribute : + // Attribute DeclarationBlock + return null; + } + + Declaration parseImportDeclaration() + { + Declaration decl; + bool isStatic; + + if (token.type == T.Static) + { + isStatic = true; + nT(); + } + + return decl; + } + /+++++++++++++++++++++++++++++ + Expression parsing methods + +++++++++++++++++++++++++++++/