Mercurial > projects > dang
comparison src/parser/Parser.d @ 207:e0551773a005
Added the correct version.
author | Anders Johnsen <skabet@gmail.com> |
---|---|
date | Tue, 12 Aug 2008 18:19:34 +0200 |
parents | d3c148ca429b |
children | 42e663451371 |
comparison
equal
deleted
inserted
replaced
206:d3c148ca429b | 207:e0551773a005 |
---|---|
3 import lexer.Lexer, | 3 import lexer.Lexer, |
4 lexer.Token; | 4 lexer.Token; |
5 | 5 |
6 import parser.Action; | 6 import parser.Action; |
7 | 7 |
8 import basic.Message; | 8 import basic.Message, |
9 basic.Attribute; | |
9 | 10 |
10 import basic.SmallArray, | 11 import basic.SmallArray, |
11 basic.SourceManager; | 12 basic.SourceManager; |
12 | 13 |
13 import tango.io.Stdout, | 14 import tango.io.Stdout, |
32 this.sm = sm; | 33 this.sm = sm; |
33 this.lexer = lexer; | 34 this.lexer = lexer; |
34 this.action = act; | 35 this.action = act; |
35 | 36 |
36 Module m; | 37 Module m; |
37 if (lexer.peek.type == Tok.Module) | 38 if (peek.type == Tok.Module) |
38 { | 39 { |
39 Token _module = lexer.next; | 40 Token _module = next(); |
40 ModuleName name = parseModuleName(); | 41 ModuleName name = parseModuleName(); |
41 m = action.actOnModule(_module, sm.getText(name.asRange())); | 42 m = action.actOnModule(_module, sm.getText(name.asRange())); |
42 require(Tok.Seperator); | 43 require(Tok.Seperator); |
43 } | 44 } |
44 else | 45 else |
45 { | 46 { |
46 SLoc loc = lexer.peek.location; | 47 SLoc loc = peek.location; |
47 m = action.actOnImplicitModule(loc, sm.getFile(loc)); | 48 m = action.actOnImplicitModule(loc, sm.getFile(loc)); |
48 } | 49 } |
49 | 50 |
50 while (lexer.peek.type != Tok.EOF) | 51 auto nes = parseAttributeInit; |
51 foreach (d; parseDeclDef()) | 52 while( !isa(Tok.EOF) ) |
53 { | |
54 while ( peek.isAttribute ) | |
55 nes ~= parseAttribute(nes[$-1]); | |
56 | |
57 foreach (d; parseDeclDef(nes[$-1].a)) | |
52 action.actOnModuleDecl(m, d); | 58 action.actOnModuleDecl(m, d); |
53 | 59 |
60 nes = parseAttributeScope(nes); | |
61 } | |
62 | |
54 return m; | 63 return m; |
55 } | 64 } |
56 | 65 |
57 private: | 66 private: |
58 Decl[] parseDeclDef() | 67 Decl[] parseDeclDef(Attribute a) |
59 { | 68 { |
60 Token t = lexer.peek; | 69 if ( isa (Tok.Import) ) |
61 if (t.type == Tok.Import) | |
62 return parseImports(); | 70 return parseImports(); |
63 else | 71 |
64 return [parseDecl()]; | 72 return [parseDecl(a)]; |
65 } | 73 } |
66 | 74 |
67 Decl parseDecl() | 75 Decl parseDecl(Attribute att) |
68 { | 76 { |
69 Token t = lexer.peek; | 77 switch(peek.type) |
70 | 78 { |
71 if (t.isBasicType || t.isIdentifier) | 79 case Tok.Struct: |
72 { | 80 Id type = Id(next()); |
73 Id type; | 81 Id iden = Id(require(Tok.Identifier)); |
74 Id iden; | 82 return parseStruct(type, iden, att); |
75 int len = peekParseType; | 83 |
76 if(lexer.peek(len).type == Tok.Identifier && len != 0) | 84 case Tok.Class: |
77 { | 85 Id type = Id(next()); |
78 type = parseType; | 86 Id iden = Id(require(Tok.Identifier)); |
79 parseDeclAfterInvalidType: | 87 return parseClass(type, iden, att); |
80 iden = Id(require(Tok.Identifier)); | 88 |
81 Token next = lexer.peek(); | 89 case Tok.Interface: |
82 if (next.type == Tok.Seperator) | 90 Id type = Id(next()); |
91 Id iden = Id(require(Tok.Identifier)); | |
92 return parseInterface(type, iden, att); | |
93 | |
94 case Tok.Alias: | |
95 next(); | |
96 auto decl = parseDecl(Attribute()); | |
97 return action.actOnAliasDecl(decl, att); | |
98 | |
99 case Tok.Identifier: | |
100 Id type = parseType; | |
101 Id iden = Id(require(Tok.Identifier)); | |
102 | |
103 switch(peek.type) | |
83 { | 104 { |
84 Token sep = lexer.next(); | 105 case Tok.Seperator: |
85 return action.actOnDeclarator(type, iden, null); | 106 Token sep = next(); |
107 return action.actOnDeclarator(type, iden, null, att); | |
108 | |
109 case Tok.Assign: | |
110 Token assign = next(); | |
111 Exp exp = parseExpression(); | |
112 require(Tok.Seperator); | |
113 return action.actOnDeclarator(type, iden, exp, att); | |
114 | |
115 case Tok.OpenParentheses: | |
116 return parseFunc(type, iden, att); | |
117 | |
118 default: | |
119 auto n1 = next(); | |
120 isEOF(type.tok); | |
121 messages.report(UnexpectedTok, n1.location).arg(n1.get(sm)); | |
122 return action.actOnDeclarator(type, iden, null, att); | |
86 } | 123 } |
87 else if (next.type == Tok.Assign) | 124 messages.report(InvalidDeclType, peek.location) |
88 { | 125 .arg(sm.getText(peek.asRange)); |
89 Token assign = lexer.next(); | 126 |
90 Exp exp = parseExpression(); | 127 default: |
91 require(Tok.Seperator); | 128 if (peek.isBasicType) |
92 return action.actOnDeclarator(type, iden, exp); | 129 goto case Tok.Identifier; |
93 } | 130 |
94 else if (next.type == Tok.OpenParentheses) | 131 messages.report(UnexpectedTok, peek.location) |
95 return parseFunc(type, iden); | 132 .arg(sm.getText(peek.asRange)); |
96 else | 133 |
97 messages.report(UnexpectedTok, next.location).arg(next.getType); | 134 next(); |
98 } | 135 return null; |
99 t = lexer.peek(len); | 136 } |
100 messages.report(InvalidDeclType, t.location) | 137 messages.report(UnexpectedTok, peek.location) |
101 .arg(sm.getText(t.asRange)); | 138 .arg(peek.get(sm)) |
102 while(len--) | |
103 lexer.next; | |
104 while(lexer.peek.type != Tok.Identifier) | |
105 lexer.next; | |
106 type = Id(lexer.peek); | |
107 goto parseDeclAfterInvalidType; | |
108 } | |
109 else if (t.type == Tok.Struct) | |
110 { | |
111 Id type = Id(lexer.next); | |
112 Id iden = Id(require(Tok.Identifier)); | |
113 | |
114 return parseStruct(type, iden); | |
115 } | |
116 messages.report(UnexpectedTok, t.location) | |
117 .arg(t.getType) | |
118 .arg(Tok.Identifier) | 139 .arg(Tok.Identifier) |
119 .fatal(ExitLevel.Parser); | 140 .fatal(ExitLevel.Parser); |
141 } | |
142 | |
143 Extern parseLinkageType() | |
144 { | |
145 Extern e = Extern.D; | |
146 if(peek(1).type != Tok.OpenParentheses) | |
147 return e; | |
148 | |
149 next(); next(); | |
150 | |
151 Token t = require(Tok.Identifier); | |
152 | |
153 switch(sm.getText(t.asRange)) | |
154 { | |
155 case "C": | |
156 if (peek(0).type == Tok.Plus && | |
157 peek(1).type == Tok.Plus) | |
158 e = Extern.CPlusPlus; | |
159 else | |
160 e = Extern.C; | |
161 break; | |
162 case "D": | |
163 break; | |
164 case "Windows": | |
165 e = Extern.Windows; | |
166 break; | |
167 case "Pascal": | |
168 e = Extern.Pascal; | |
169 break; | |
170 case "System": | |
171 e = Extern.System; | |
172 break; | |
173 default: | |
174 messages.report(UnexpectedLinkType, t.location); | |
175 } | |
176 | |
177 if (!isa(Tok.CloseParentheses)) | |
178 messages.report(UnexpectedTokSingle, peek.location); | |
179 | |
180 return e; | |
120 } | 181 } |
121 | 182 |
122 /** | 183 /** |
123 Parse a series of imports belonging to a single import token. | 184 Parse a series of imports belonging to a single import token. |
124 */ | 185 */ |
127 Token _import = require(Tok.Import); | 188 Token _import = require(Tok.Import); |
128 SmallArray!(Decl) res; | 189 SmallArray!(Decl) res; |
129 void addToRes(Decl d) { res ~= d; } | 190 void addToRes(Decl d) { res ~= d; } |
130 | 191 |
131 bool done = false; | 192 bool done = false; |
132 while (!done && !on_a(Tok.Seperator)) | 193 while (!done && !isa(Tok.Seperator)) |
133 { | 194 { |
134 ModuleName mod = parseModuleName(); | 195 ModuleName mod = parseModuleName(); |
135 Token tok = lexer.peek; | 196 Token tok = peek; |
136 switch (tok.type) | 197 switch (tok.type) |
137 { | 198 { |
138 case Tok.Comma: | 199 case Tok.Comma: |
139 // import A, B.C; | 200 // import A, B.C; |
140 // parse another module-name | 201 // parse another module-name |
141 lexer.next(); | 202 next(); |
142 res ~= action.actOnImport(_import, mod, null); | 203 res ~= action.actOnImport(_import, mod, null); |
143 break; | 204 break; |
144 case Tok.Assign: | 205 case Tok.Assign: |
145 // import B = A.A; | 206 // import B = A.A; |
146 // ^- must be a single identifier | 207 // ^- must be a single identifier |
150 SLoc loc = mod.packages[0].tok.location; | 211 SLoc loc = mod.packages[0].tok.location; |
151 messages.report(RenameMustBeSingleIdent, loc); | 212 messages.report(RenameMustBeSingleIdent, loc); |
152 } | 213 } |
153 //if (isStatic) | 214 //if (isStatic) |
154 // error("Static imports cannot be renamed"); | 215 // error("Static imports cannot be renamed"); |
155 lexer.next(); | 216 next(); |
156 Id name = mod.id; | 217 Id name = mod.id; |
157 mod = parseModuleName(); | 218 mod = parseModuleName(); |
158 // create from mod and rename to `name` | 219 // create from mod and rename to `name` |
159 res ~= action.actOnImport(_import, mod, &name); | 220 res ~= action.actOnImport(_import, mod, &name); |
160 break; | 221 break; |
161 case Tok.Colon: | 222 case Tok.Colon: |
162 // import A : a; | 223 // import A : a; |
163 // selective imports, potentially import A : print = a | 224 // selective imports, potentially import A : print = a |
164 lexer.next(); | 225 next(); |
165 Decl d = action.actOnImport(_import, mod, null); | 226 Decl d = action.actOnImport(_import, mod, null); |
166 // do-while on a comma: | 227 // do-while on a comma: |
167 // add explicit symbol | 228 // add explicit symbol |
168 do | 229 do |
169 { | 230 { |
192 } | 253 } |
193 | 254 |
194 require(Tok.Seperator); | 255 require(Tok.Seperator); |
195 return res.safe(); | 256 return res.safe(); |
196 Lerror: | 257 Lerror: |
197 while (!on_a (Tok.Seperator)) | 258 while (!isa (Tok.Seperator)) |
198 lexer.next(); | 259 next(); |
199 return res.safe(); | 260 return res.safe(); |
261 } | |
262 | |
263 /** | |
264 Parse interface | |
265 */ | |
266 Decl parseInterface(Id type, Id iden, Attribute att) | |
267 { | |
268 auto decl = action.actOnDeclarator(type, iden, null, att); | |
269 | |
270 if (peek.type == Tok.Colon) | |
271 // SuperInterfaces | |
272 { | |
273 next(); // Remove colon. | |
274 | |
275 Id identifier; | |
276 | |
277 // The identifier | |
278 identifier = Id(require(Tok.Identifier)); | |
279 | |
280 action.actOnInterfaceBaseClass(decl, identifier); | |
281 | |
282 // We should now have an optional list of items, each starting ',' | |
283 while (peek.type == Tok.Comma) | |
284 { | |
285 next(); // Remove comma | |
286 | |
287 // The identifier | |
288 identifier = Id(require(Tok.Identifier)); | |
289 | |
290 action.actOnInterfaceBaseClass(decl, identifier); | |
291 } | |
292 } | |
293 | |
294 require(Tok.OpenBrace); | |
295 | |
296 auto nes = parseAttributeInit; | |
297 while( !isa(Tok.EOF) && !isa(Tok.CloseBrace) ) | |
298 { | |
299 while ( peek.isAttribute ) | |
300 nes ~= parseAttribute(nes[$-1]); | |
301 | |
302 auto m_decl = parseDecl(nes[$-1].a); | |
303 action.actOnInterfaceMember(decl, m_decl); | |
304 | |
305 nes = parseAttributeScope(nes); | |
306 } | |
307 | |
308 require(Tok.CloseBrace); | |
309 | |
310 return decl; | |
311 } | |
312 | |
313 /** | |
314 Parse class | |
315 */ | |
316 Decl parseClass(Id type, Id iden, Attribute att) | |
317 { | |
318 auto decl = action.actOnDeclarator(type, iden, null, att); | |
319 | |
320 if (peek.type == Tok.Colon) | |
321 // BaseClassList - Super class and interfaces(in that order) | |
322 { | |
323 next(); // Remove colon. | |
324 | |
325 Token protection; | |
326 Id identifier; | |
327 | |
328 // First we expect an optional protection level. | |
329 if (peek.isBaseClassProtection) | |
330 protection = next(); | |
331 // Then the identifier | |
332 identifier = Id(require(Tok.Identifier)); | |
333 | |
334 action.actOnClassBaseClass(decl, identifier); | |
335 | |
336 // We should now have an optional list of items, each starting ',' | |
337 while (peek.type == Tok.Comma) | |
338 { | |
339 next(); // Remove comma | |
340 | |
341 // First we expect an optional protection level. | |
342 if (peek.isBaseClassProtection) | |
343 protection = next(); | |
344 // Then the identifier | |
345 identifier = Id(require(Tok.Identifier)); | |
346 | |
347 action.actOnClassBaseClass(decl, identifier); | |
348 } | |
349 } | |
350 | |
351 require(Tok.OpenBrace); | |
352 | |
353 auto nes = parseAttributeInit; | |
354 while( !isa(Tok.EOF) && !isa(Tok.CloseBrace) ) | |
355 { | |
356 while ( peek.isAttribute ) | |
357 nes ~= parseAttribute(nes[$-1]); | |
358 | |
359 switch(peek.type) | |
360 { | |
361 case Tok.This: | |
362 auto id = Id(next); | |
363 auto m_decl = parseFunc(iden, id, nes[$-1].a); | |
364 action.actOnClassMember(decl, m_decl); | |
365 break; | |
366 | |
367 default: | |
368 auto m_decl = parseDecl(nes[$-1].a); | |
369 action.actOnClassMember(decl, m_decl); | |
370 } | |
371 | |
372 nes = parseAttributeScope(nes); | |
373 } | |
374 | |
375 require(Tok.CloseBrace); | |
376 | |
377 return decl; | |
200 } | 378 } |
201 | 379 |
202 /** | 380 /** |
203 Parse struct | 381 Parse struct |
204 */ | 382 */ |
205 Decl parseStruct(Id type, Id iden) | 383 Decl parseStruct(Id type, Id iden, Attribute att) |
206 { | 384 { |
207 auto decl = action.actOnDeclarator(type, iden, null); | 385 auto decl = action.actOnDeclarator(type, iden, null, att); |
208 | 386 |
209 require(Tok.OpenBrace); | 387 require(Tok.OpenBrace); |
210 | 388 |
211 while(lexer.peek.isBasicType || lexer.peek.isIdentifier) | 389 auto nes = parseAttributeInit; |
212 { | 390 while( !isa(Tok.EOF) && !isa(Tok.CloseBrace) ) |
213 auto m_decl = parseDecl(); | 391 { |
392 while ( peek.isAttribute ) | |
393 nes ~= parseAttribute(nes[$-1]); | |
394 | |
395 auto m_decl = parseDecl(nes[$-1].a); | |
214 action.actOnStructMember(decl, m_decl); | 396 action.actOnStructMember(decl, m_decl); |
215 /* Id var_type = Id(lexer.next); | 397 |
216 Id var_iden = Id(require(Tok.Identifier)); | 398 nes = parseAttributeScope(nes); |
217 Token next = lexer.peek(); | |
218 if (next.type == Tok.Seperator) | |
219 { | |
220 Token sep = lexer.next(); | |
221 action.actOnStructMember(decl, var_type, var_iden, null); | |
222 continue; | |
223 } | |
224 else if (next.type == Tok.Assign) | |
225 { | |
226 Token assign = lexer.next(); | |
227 Exp exp = parseExpression(); | |
228 require(Tok.Seperator); | |
229 action.actOnStructMember(decl, var_type, var_iden, exp); | |
230 continue; | |
231 } | |
232 messages.report(UnexpectedTok, next.location).arg(next.getType);*/ | |
233 } | 399 } |
234 | 400 |
235 require(Tok.CloseBrace); | 401 require(Tok.CloseBrace); |
236 | 402 |
237 return decl; | 403 return decl; |
238 } | 404 } |
239 | 405 |
406 Att[] parseAttributeInit() | |
407 { | |
408 Att[] nes; | |
409 nes ~= Att(); | |
410 nes[0].nested = Scope; | |
411 return nes; | |
412 } | |
413 | |
414 Att[] parseAttributeScope(Att[] nes) | |
415 { | |
416 while ( nes[$-1].nested == Single ) | |
417 nes.length = nes.length - 1; | |
418 | |
419 while ( isa(Tok.CloseBrace) && nes.length > 1) | |
420 { | |
421 while ( nes.length > 1 ) | |
422 { | |
423 if( nes[$-1].nested == Scope ) | |
424 { | |
425 nes.length = nes.length - 1; | |
426 next(); | |
427 break; | |
428 } | |
429 nes.length = nes.length - 1; | |
430 } | |
431 } | |
432 | |
433 return nes; | |
434 } | |
435 | |
436 Att parseAttribute(Att last) | |
437 { | |
438 Att _parseAttribute(Att last) | |
439 { | |
440 Att a = last; | |
441 a.nested = Single; | |
442 | |
443 switch(peek.type) | |
444 { | |
445 case Tok.Public: | |
446 a.a.setProtection(Protection.Public); | |
447 break; | |
448 case Tok.Private: | |
449 a.a.setProtection(Protection.Private); | |
450 break; | |
451 case Tok.Package: | |
452 a.a.setProtection(Protection.Package); | |
453 break; | |
454 case Tok.Protected: | |
455 a.a.setProtection(Protection.Protected); | |
456 break; | |
457 case Tok.Export: | |
458 a.a.setProtection(Protection.Export); | |
459 break; | |
460 case Tok.Static: | |
461 a.a.setStatic; | |
462 break; | |
463 case Tok.Final: | |
464 a.a.setFinal; | |
465 break; | |
466 case Tok.Const: | |
467 a.a.setConst; | |
468 break; | |
469 case Tok.Abstract: | |
470 a.a.setAbstract; | |
471 break; | |
472 case Tok.Override: | |
473 a.a.setOverride; | |
474 break; | |
475 case Tok.Deprecated: | |
476 a.a.setDeprecated; | |
477 break; | |
478 case Tok.Auto: | |
479 a.a.setAuto; | |
480 break; | |
481 case Tok.Extern: | |
482 Extern e = parseLinkageType; | |
483 a.a.setExtern(e); | |
484 break; | |
485 } | |
486 next(); | |
487 | |
488 return a; | |
489 } | |
490 | |
491 Att a = _parseAttribute(last); | |
492 | |
493 while (peek.isAttribute) | |
494 { | |
495 a = parseAttribute(a); | |
496 } | |
497 | |
498 if (peek.type == Tok.Colon) | |
499 { | |
500 a.nested = All; | |
501 next(); | |
502 } | |
503 else if (peek.type == Tok.OpenBrace) | |
504 { | |
505 a.nested = Scope; | |
506 next(); | |
507 } | |
508 | |
509 return a; | |
510 } | |
511 | |
512 enum : uint | |
513 { | |
514 Single, | |
515 Scope, | |
516 All | |
517 } | |
518 | |
519 struct Att | |
520 { | |
521 Attribute a; | |
522 uint nested; | |
523 } | |
524 | |
240 /** | 525 /** |
241 Parse statements. | 526 Parse statements. |
242 | 527 |
243 This is the place to attack! | 528 This is the place to attack! |
244 */ | 529 */ |
245 Stmt parseStatement() | 530 Stmt parseStatement() |
246 { | 531 { |
247 Token t = lexer.peek; | 532 switch (peek.type) |
248 | |
249 switch(t.type) | |
250 { | 533 { |
251 case Tok.Return: | 534 case Tok.Return: |
252 Token ret = lexer.next; | 535 Token ret = next(); |
253 Exp exp; | 536 Exp exp; |
254 if (lexer.peek.type != Tok.Seperator) | 537 if (peek.type != Tok.Seperator) |
255 exp = parseExpression(); | 538 exp = parseExpression(); |
256 require(Tok.Seperator); | 539 require(Tok.Seperator); |
257 return action.actOnReturnStmt(ret, exp); | 540 return action.actOnReturnStmt(ret, exp); |
258 | 541 |
259 /* | |
260 if (cond) | |
261 single statement | compound statement | |
262 [else | |
263 single statement | compound statement] | |
264 */ | |
265 case Tok.If: | 542 case Tok.If: |
266 Token _if = lexer.next(); | 543 Token _if = next(); |
267 | 544 |
268 require(Tok.OpenParentheses); | 545 require(Tok.OpenParentheses); |
269 Exp cond = parseExpression(); | 546 Exp cond = parseExpression(); |
270 require(Tok.CloseParentheses); | 547 require(Tok.CloseParentheses); |
271 | 548 |
273 | 550 |
274 // if there is no else part we use the if as token, to have | 551 // if there is no else part we use the if as token, to have |
275 // something than can be passed along | 552 // something than can be passed along |
276 Token _else = _if; | 553 Token _else = _if; |
277 Stmt elseB; | 554 Stmt elseB; |
278 if (lexer.peek.type == Tok.Else) | 555 if (peek.type == Tok.Else) |
279 { | 556 { |
280 _else = lexer.next; | 557 _else = next(); |
281 elseB = parseSingleOrCompoundStatement(); | 558 elseB = parseSingleOrCompoundStatement(); |
282 } | 559 } |
283 | |
284 return action.actOnIfStmt(_if, cond, thenB, _else, elseB); | 560 return action.actOnIfStmt(_if, cond, thenB, _else, elseB); |
285 | 561 |
286 /* | |
287 while (cond) | |
288 single statement | compound statement | |
289 */ | |
290 case Tok.While: | 562 case Tok.While: |
291 Token _while = lexer.next; | 563 Token _while = next(); |
292 require(Tok.OpenParentheses); | 564 require(Tok.OpenParentheses); |
293 Exp cond = parseExpression(); | 565 Exp cond = parseExpression(); |
294 require(Tok.CloseParentheses); | 566 require(Tok.CloseParentheses); |
295 Stmt bodyStmt = parseSingleOrCompoundStatement(); | 567 Stmt bodyStmt = parseSingleOrCompoundStatement(); |
296 return action.actOnWhileStmt(_while, cond, bodyStmt); | 568 return action.actOnWhileStmt(_while, cond, bodyStmt); |
297 | 569 |
298 /* | 570 case Tok.For: |
299 One of four things: | 571 Token _for = next(); |
300 A declaration of a function/variable `type id ...` | 572 require(Tok.OpenParentheses); |
301 A direct assignment `id = exp;` | 573 Stmt init; |
302 An indirect assignment `id.id = exp` | 574 if ( isa(Tok.Seperator)) |
303 Some sort of free standing expression | 575 require(Tok.Seperator); |
304 | 576 else |
305 The assignments should be handled as binary expressions? | 577 init = parseStatement(); |
306 */ | 578 |
579 Exp cond; | |
580 if ( !isa(Tok.Seperator)) | |
581 cond = parseExpression(); | |
582 require(Tok.Seperator); | |
583 | |
584 Exp incre; | |
585 if ( !isa(Tok.CloseParentheses)) | |
586 incre = parseExpression(); | |
587 require(Tok.CloseParentheses); | |
588 | |
589 Stmt bodyStmt = parseSingleOrCompoundStatement(); | |
590 return action.actOnForStmt(_for, init, cond, incre, bodyStmt); | |
591 | |
592 case Tok.Switch: | |
593 auto t = next(); | |
594 require(Tok.OpenParentheses); | |
595 auto target = parseExpression(); | |
596 auto res = action.actOnStartOfSwitchStmt(t, target); | |
597 require(Tok.CloseParentheses); | |
598 require(Tok.OpenBrace); | |
599 while (true) | |
600 { | |
601 Stmt[] statements; | |
602 if (isa(Tok.Default)) | |
603 { | |
604 Token _default = next(); | |
605 require(Tok.Colon); | |
606 statements.length = 0; | |
607 while (peek.type != Tok.Case | |
608 && peek.type != Tok.Default | |
609 && peek.type != Tok.CloseBrace) | |
610 statements ~= parseStatement(); | |
611 action.actOnDefaultStmt(res, _default, statements); | |
612 continue; | |
613 } | |
614 | |
615 Token _case = peek; | |
616 if (_case.type != Tok.Case) | |
617 break; | |
618 next(); | |
619 | |
620 Exp[] literals; | |
621 do | |
622 { | |
623 Exp e = parseExpression(); | |
624 literals ~= e; | |
625 } | |
626 while (skip(Tok.Comma)); | |
627 require(Tok.Colon); | |
628 | |
629 while (peek.type != Tok.Case | |
630 && peek.type != Tok.Default | |
631 && peek.type != Tok.CloseBrace) | |
632 statements ~= parseStatement(); | |
633 | |
634 action.actOnCaseStmt(res, _case, literals, statements); | |
635 | |
636 if (peek.type == Tok.CloseBrace) | |
637 break; | |
638 } | |
639 require(Tok.CloseBrace); | |
640 return res; | |
641 | |
642 case Tok.Star: | |
643 auto exp = parseExpression(); | |
644 require(Tok.Seperator); | |
645 return action.actOnExprStmt(exp); | |
646 | |
307 case Tok.Identifier: | 647 case Tok.Identifier: |
308 Token iden = lexer.peek; | 648 // If it's a '*' it must be a method. Otherwise it won't give |
309 Token n = lexer.peek(1); | 649 // any sense. |
310 // Must be an decl, if we start with a basic type, or two | 650 |
311 // identifiers in a row | 651 if (isa(Tok.Function, 1) || |
312 if (iden.isBasicType() || iden.isIdentifier()) | 652 isa(Tok.Identifier, 1) || |
653 isa(Tok.Star, 1)) | |
313 { | 654 { |
314 if ( n.type == Tok.Star || n.type == Tok.OpenBracket) | 655 Attribute a; |
656 return action.actOnDeclStmt(parseDecl(a)); | |
657 } | |
658 | |
659 if (isa(Tok.OpenBracket, 1)) | |
660 { | |
661 int i = 1; | |
662 while (isa(Tok.OpenBracket, i) || | |
663 isa(Tok.Star, i) || | |
664 isa(Tok.Identifier, i)) | |
315 { | 665 { |
316 int len = peekParseType; | 666 if (isa(Tok.Identifier, i)) |
317 if(lexer.peek(len).type == Tok.Identifier && len != 0) | 667 return action.actOnDeclStmt(parseDecl(Attribute())); |
318 return action.actOnDeclStmt(parseVarDecl()); | 668 |
319 | 669 i++; |
320 Exp exp = parseExpression(); | 670 if (isa(Tok.Star,i-1)) |
321 require(Tok.Seperator); | 671 continue; |
322 return action.actOnExprStmt(exp); | 672 // Must be OpenBracket here.. |
673 | |
674 if (isa(Tok.Integer, i)) | |
675 i++; | |
676 else | |
677 if (isa(Tok.CloseBracket, i)) | |
678 return action.actOnDeclStmt(parseDecl(Attribute())); | |
679 else | |
680 i++; | |
681 | |
682 if (!isa(Tok.CloseBracket, i)) | |
683 break; | |
684 i++; | |
323 } | 685 } |
324 | 686 if (isa(Tok.Function, i)) |
325 if (n.isIdentifier()) | 687 return action.actOnDeclStmt(parseDecl(Attribute())); |
326 return action.actOnDeclStmt(parseVarDecl()); | |
327 | |
328 // Expression: a.b, a = b, a(b) etc. | |
329 Exp exp = parseExpression(); | |
330 require(Tok.Seperator); | |
331 return action.actOnExprStmt(exp); | |
332 } | 688 } |
333 | 689 |
334 case Tok.Switch: | 690 // Expression: a.b, a = b, a(b) etc. |
335 messages.report(UnexpectedTok, lexer.peek.location).arg(lexer.next.getType); | 691 Exp exp = parseExpression(); |
692 require(Tok.Seperator); | |
693 return action.actOnExprStmt(exp); | |
694 | |
695 case Tok.Void: // And all basic types | |
696 return action.actOnDeclStmt(parseVarDecl()); | |
697 | |
698 default: | |
699 if (peek.isBasicType) | |
700 goto case Tok.Void; | |
701 | |
702 messages.report(UnexpectedBeginStmt, peek.location).arg(peek.get(sm)); | |
703 require(Tok.Seperator); | |
336 return null; | 704 return null; |
337 | 705 } |
338 default: | |
339 if (t.isBasicType()) | |
340 goto case Tok.Identifier; | |
341 if (t.type == Tok.Star) | |
342 { | |
343 auto exp = parseExpression(); | |
344 require(Tok.Seperator); | |
345 return action.actOnExprStmt(exp); | |
346 } | |
347 messages.report(UnexpectedBeginStmt, lexer.peek.location).arg(lexer.next.getType); | |
348 return null; | |
349 } | |
350 messages.report(UnexpectedTok, t.location); | |
351 return null; | |
352 } | 706 } |
353 | 707 |
354 Decl parseVarDecl() | 708 Decl parseVarDecl() |
355 { | 709 { |
356 // manually hardcoded to only support "type id [= exp];" | 710 // manually hardcoded to only support "type id [= exp];" |
357 // as that is the only thing the codegen understands | 711 // as that is the only thing the codegen understands |
358 Id type = parseType; | 712 Id type = parseType(); |
359 Id id = Id(lexer.next); | 713 Id id = Id(next()); |
360 Exp init; | 714 Exp init; |
361 if (skip(Tok.Assign)) | 715 if (skip(Tok.Assign)) |
362 init = parseExpression(); | 716 init = parseExpression(); |
363 require(Tok.Seperator); | 717 require(Tok.Seperator); |
364 Decl d = action.actOnDeclarator(type, id, init); | 718 Attribute att; |
719 Decl d = action.actOnDeclarator(type, id, init, att); | |
365 return d; | 720 return d; |
366 } | 721 } |
367 | 722 |
368 /** | 723 /** |
369 Parses a function/method given the already parsed return type and name | 724 Parses a function/method given the already parsed return type and name |
370 */ | 725 */ |
371 Decl parseFunc(ref Id type, ref Id name) | 726 Decl parseFunc(ref Id type, ref Id name, Attribute att) |
372 { | 727 { |
373 Decl func = action.actOnStartOfFunctionDef(type, name); | 728 Decl func = action.actOnStartOfFunctionDef(type, name, att); |
374 parseFuncArgs(func); | 729 parseFuncArgs(func); |
375 | 730 |
376 if(lexer.peek.type == Tok.Seperator) | 731 if(peek.type == Tok.Seperator) |
377 { | 732 { |
378 lexer.next; | 733 next(); |
379 return func; | 734 return func; |
380 } | 735 } |
381 Stmt stmt = parseCompoundStatement(); | 736 Stmt stmt = parseCompoundStatement(); |
382 | 737 |
383 return action.actOnEndOfFunction(func, stmt); | 738 return action.actOnEndOfFunction(func, stmt); |
390 */ | 745 */ |
391 void parseFuncArgs(Decl func) | 746 void parseFuncArgs(Decl func) |
392 { | 747 { |
393 require(Tok.OpenParentheses); // Remove the "(" token. | 748 require(Tok.OpenParentheses); // Remove the "(" token. |
394 | 749 |
395 while(lexer.peek.type != Tok.CloseParentheses) | 750 while(peek.type != Tok.CloseParentheses) |
396 { | 751 { |
397 auto t = parseType(); | 752 auto t = parseType(); |
398 Id i; | 753 Id i; |
399 if(lexer.peek.type == Tok.Identifier) | 754 if(peek.type == Tok.Identifier) |
400 i = parseIdentifier(); | 755 i = parseIdentifier(); |
401 action.addFuncArg(func, t, i); | 756 action.addFuncArg(func, t, i); |
402 | 757 |
403 if(lexer.peek.type == Tok.Comma) | 758 if(peek.type == Tok.Comma) |
404 lexer.next; | 759 next(); |
405 } | 760 } |
406 | 761 |
407 require(Tok.CloseParentheses); // Remove the ")" | 762 require(Tok.CloseParentheses); // Remove the ")" |
408 } | 763 } |
409 | 764 |
411 Parse either a block, or a single statement as allowed after if, while | 766 Parse either a block, or a single statement as allowed after if, while |
412 and for. | 767 and for. |
413 */ | 768 */ |
414 Stmt parseSingleOrCompoundStatement() | 769 Stmt parseSingleOrCompoundStatement() |
415 { | 770 { |
416 if (lexer.peek.type == Tok.OpenBrace) | 771 if (peek.type == Tok.OpenBrace) |
417 return parseCompoundStatement(); | 772 return parseCompoundStatement(); |
418 return parseStatement(); | 773 return parseStatement(); |
419 } | 774 } |
420 | 775 |
421 /** | 776 /** |
422 Parses a function-body or similar, expects an opening brace to be the | 777 Parses a function-body or similar, expects an opening brace to be the |
423 current token. | 778 current token. |
424 | 779 |
425 Will consume both the starting { and ending } | 780 Will consume both the starting { and ending } |
426 */ | 781 */ |
427 Stmt parseCompoundStatement() | 782 Stmt parseCompoundStatement() |
428 { | 783 { |
429 Token lbrace = require(Tok.OpenBrace); | 784 Token lbrace = require(Tok.OpenBrace); |
430 SmallArray!(Stmt, 32) stmts; // Try to use the stack only | 785 SmallArray!(Stmt, 32) stmts; // Try to use the stack only |
431 while (lexer.peek.type != Tok.CloseBrace) | 786 while ( !isa(Tok.CloseBrace) && !isa(Tok.EOF) ) |
432 stmts ~= parseStatement(); | 787 stmts ~= parseStatement(); |
433 Token rbrace = require(Tok.CloseBrace); | 788 Token rbrace = require(Tok.CloseBrace); |
434 return action.actOnCompoundStmt(lbrace, rbrace, stmts.unsafe()); | 789 return action.actOnCompoundStmt(lbrace, rbrace, stmts.unsafe()); |
435 } | 790 } |
436 | 791 |
437 Id parseIdentifier() | 792 Id parseIdentifier() |
438 { | 793 { |
439 Token tok = lexer.next; | 794 Token tok = next(); |
440 | 795 |
441 if (tok.type is Tok.Identifier) | 796 if (tok.type is Tok.Identifier) |
442 return Id(tok); | 797 return Id(tok); |
443 | 798 |
444 messages.report(UnexpectedTokSingle, tok.location) | 799 messages.report(UnexpectedTokSingle, tok.location) |
445 .arg(tok.getType) | 800 .arg(tok.get(sm)) |
446 .arg(Tok.Identifier); | 801 .arg(Tok.Identifier); |
447 } | 802 } |
448 | 803 |
449 ModuleName parseModuleName() | 804 ModuleName parseModuleName() |
450 { | 805 { |
451 auto id = parseIdentifier(); | 806 auto id = parseIdentifier(); |
452 ModuleName mod; | 807 ModuleName mod; |
453 while (skip(Tok.Dot)) | 808 while (skip(Tok.Dot)) |
454 { | 809 { |
455 mod.packages ~= id; | 810 mod.packages ~= id; |
456 if (lexer.peek.type != Tok.Identifier) { | 811 if (peek.type != Tok.Identifier) { |
457 messages.report(ExpectedIdAfterPackage, lexer.peek.location); | 812 messages.report(ExpectedIdAfterPackage, peek.location); |
458 goto Lerror; | 813 goto Lerror; |
459 } | 814 } |
460 id = parseIdentifier(); | 815 id = parseIdentifier(); |
461 } | 816 } |
462 mod.id = id; | 817 mod.id = id; |
463 return mod; | 818 return mod; |
464 Lerror: | 819 Lerror: |
465 while (!skip(Tok.Seperator)) | 820 while (!skip(Tok.Seperator)) |
466 lexer.next(); | 821 next(); |
467 return mod; | 822 return mod; |
468 } | 823 } |
469 | 824 |
470 | 825 |
471 /** | 826 /** |
472 Parse a type - this includes pointer and array(at some point) types. | 827 Parse a type - this includes pointer and array(at some point) types. |
473 */ | 828 */ |
474 Id parseType() | 829 Id parseType() |
475 { | 830 { |
476 Token type = lexer.next; | 831 Token type = next(); |
477 | 832 |
478 Id currentType; | 833 Id currentType; |
479 | 834 |
480 if ( !(type.isBasicType || type.type == Tok.Identifier) ) | 835 if ( !(type.isBasicType || type.type == Tok.Identifier) ) |
481 messages.report(InvalidType, type.location); | 836 messages.report(InvalidType, type.location); |
482 | 837 |
483 currentType = Id(type); | 838 currentType = Id(type); |
484 type = lexer.peek; | 839 |
485 | 840 while(true) |
486 while(type.type == Tok.Star || type.type == Tok.OpenBracket) | 841 { |
487 { | 842 switch(peek.type) |
488 if(type.type == Tok.Star) | |
489 { | 843 { |
490 currentType = PointerId(currentType); | 844 case Tok.Star: |
491 lexer.next; | 845 currentType = PointerTypeId(currentType); |
846 next(); | |
847 break; | |
848 case Tok.OpenBracket: | |
849 next(); | |
850 if (isa(Tok.Integer)) | |
851 currentType = StaticArrayTypeId( | |
852 currentType, | |
853 action.actOnNumericConstant( | |
854 require(Tok.Integer))); | |
855 require(Tok.CloseBracket); | |
856 break; | |
857 case Tok.Function: | |
858 next(); | |
859 | |
860 | |
861 require(Tok.OpenParentheses); // Remove the "(" token. | |
862 | |
863 DeclT[] decls; | |
864 | |
865 while(peek.type != Tok.CloseParentheses) | |
866 { | |
867 auto t = parseType(); | |
868 Id i; | |
869 if(peek.type == Tok.Identifier) | |
870 i = parseIdentifier(); | |
871 | |
872 // Act on function type param | |
873 decls ~= action.actOnDeclarator(t, i, null, Attribute()); | |
874 | |
875 if(peek.type == Tok.Comma) | |
876 next(); | |
877 } | |
878 | |
879 currentType = FunctionTypeId(currentType, decls); | |
880 | |
881 require(Tok.CloseParentheses); // Remove the ")" | |
882 break; | |
883 default: | |
884 goto end; | |
492 } | 885 } |
493 else | 886 } |
494 { | 887 end: |
495 lexer.next; | |
496 if(lexer.peek.type == Tok.Integer) | |
497 currentType = ArrayId(currentType, action.actOnNumericConstant(require(Tok.Integer))); | |
498 require(Tok.CloseBracket); | |
499 | |
500 } | |
501 type = lexer.peek; | |
502 } | |
503 | |
504 return currentType; | 888 return currentType; |
505 } | |
506 | |
507 int peekParseType() | |
508 { | |
509 int i; | |
510 Token type = lexer.peek(i); | |
511 | |
512 Id currentType; | |
513 | |
514 if ( !(type.isBasicType || type.type == Tok.Identifier) ) | |
515 return 0; | |
516 | |
517 currentType = Id(type); | |
518 type = lexer.peek(++i); | |
519 | |
520 while(type.type == Tok.Star || type.type == Tok.OpenBracket) | |
521 { | |
522 if(type.type == Tok.Star) | |
523 { | |
524 i++; | |
525 } | |
526 else | |
527 { | |
528 if(lexer.peek(i++).type != Tok.OpenBracket) | |
529 return 0; | |
530 if(lexer.peek(i).type == Tok.Integer) | |
531 { | |
532 i++; | |
533 if(lexer.peek(i++).type != Tok.CloseBracket) | |
534 return 0; | |
535 } | |
536 else | |
537 if(lexer.peek(i++).type != Tok.CloseBracket) | |
538 return 0; | |
539 | |
540 } | |
541 type = lexer.peek(i); | |
542 } | |
543 | |
544 return i; | |
545 } | 889 } |
546 | 890 |
547 private: | 891 private: |
548 // -- Expression parsing -- // | 892 // -- Expression parsing -- // |
549 Exp parsePostfixExp(Exp target) | 893 Exp parsePostfixExp(Exp target) |
550 { | 894 { |
551 switch(lexer.peek.type) | 895 switch(peek.type) |
552 { | 896 { |
553 case Tok.Dot: | 897 case Tok.Dot: |
554 switch(lexer.peek(1).type) | 898 switch(peek(1).type) |
555 { | 899 { |
556 case Tok.Identifier: | 900 case Tok.Identifier: |
557 Token op = lexer.next; | 901 Token op = next(); |
558 Id member = Id(lexer.next); | 902 Id member = Id(next()); |
559 Exp exp = action.actOnMemberReference(target, op.location, member); | 903 Exp exp = action.actOnMemberReference(target, op.location, member); |
560 return parsePostfixExp(exp); | 904 return parsePostfixExp(exp); |
561 default: | 905 default: |
562 Token t = lexer.peek(1); | 906 Token t = peek(1); |
563 messages.report(ExpectedIdAfterDot, t.location); | 907 messages.report(ExpectedIdAfterDot, t.location); |
564 } | 908 } |
565 case Tok.OpenBracket: | 909 case Tok.OpenBracket: |
566 Token open = lexer.next; | 910 Token open = next(); |
567 Exp index = parseExpression(); | 911 Exp index = parseExpression(); |
568 Token close = require(Tok.CloseBracket); | 912 Token close = require(Tok.CloseBracket); |
569 return action.actOnIndexEpr(target, open, index, close); | 913 return action.actOnIndexExpr(target, open, index, close); |
570 default: | 914 default: |
571 return target; | 915 return target; |
572 } | 916 } |
573 } | 917 } |
574 | 918 |
575 Exp parseExpression(int p = 0) | 919 Exp parseExpression(int p = 0) |
576 { | 920 { |
577 auto exp = P(); | 921 auto exp = P(); |
578 Token next = lexer.peek(); | 922 Token n = peek(); |
579 BinOp* op = null; | 923 BinOp* op = null; |
580 while ((op = binary(next.type)) != null && op.prec >= p) | 924 while ((op = binary(n.type)) != null && op.prec >= p) |
581 { | 925 { |
582 lexer.next(); | 926 next(); |
583 int q = op.leftAssoc? 1 + op.prec : op.prec; | 927 int q = op.leftAssoc? 1 + op.prec : op.prec; |
584 auto exp2 = parseExpression(q); | 928 auto exp2 = parseExpression(q); |
585 exp = action.actOnBinaryOp(next.location, op.operator, exp, exp2); | 929 exp = action.actOnBinaryOp(n.location, op.operator, exp, exp2); |
586 next = lexer.peek(); | 930 n = peek(); |
587 } | 931 } |
588 | 932 |
589 return exp; | 933 return exp; |
590 } | 934 } |
591 | 935 |
592 Exp P() | 936 Exp P() |
593 { | 937 { |
594 Token next = lexer.next(); | 938 Token n = next(); |
595 if (auto op = unary(next.type)) | 939 if (auto op = unary(n.type)) |
596 return action.actOnUnaryOp(next, parseExpression(op.prec)); | 940 return action.actOnUnaryOp(n, parseExpression(op.prec)); |
597 else if (next.type == Tok.OpenParentheses) | 941 else if (n.type == Tok.OpenParentheses) |
598 { | 942 { |
599 auto e = parseExpression(0); | 943 auto e = parseExpression(0); |
600 require(Tok.CloseParentheses); | 944 require(Tok.CloseParentheses); |
601 return e; | 945 return e; |
602 } | 946 } |
603 else if (next.type == Tok.Identifier) | 947 else if (n.type == Tok.Identifier) |
604 { | 948 { |
605 Exp value = action.actOnIdentifierExp(Id(next)); | 949 Exp value = action.actOnIdentifierExp(Id(n)); |
606 Exp iden = parsePostfixExp(value); | 950 Exp iden = parsePostfixExp(value); |
607 switch(lexer.peek.type) | 951 switch(peek.type) |
608 { | 952 { |
609 case Tok.OpenParentheses: | 953 case Tok.OpenParentheses: |
610 Token lp = lexer.next; | 954 Token lp = next(); |
611 SmallArray!(Exp, 8) args; | 955 SmallArray!(Exp, 8) args; |
612 while(lexer.peek.type != Tok.CloseParentheses) | 956 while(peek.type != Tok.CloseParentheses) |
613 { | 957 { |
614 if(lexer.peek.type == Tok.Comma) | 958 if(peek.type == Tok.Comma) |
615 lexer.next; | 959 next(); |
616 args ~= parseExpression(); | 960 args ~= parseExpression(); |
617 } | 961 } |
618 | 962 |
619 Token rp = lexer.next(); | 963 Token rp = next(); |
620 return action.actOnCallExpr(iden, lp, args.unsafe(), rp); | 964 return action.actOnCallExpr(iden, lp, args.unsafe(), rp); |
621 | 965 |
622 default: | 966 default: |
623 return iden; | 967 return iden; |
624 } | 968 } |
625 } | 969 } |
626 else if (next.type == Tok.Cast) | 970 else if (n.type == Tok.Null) |
627 return parseCast(next); | 971 return action.actOnNullExpr(n.location); |
628 else if (next.type == Tok.Integer) | 972 else if (n.type == Tok.Cast) |
629 return action.actOnNumericConstant(next); | 973 return parseCast(n); |
630 else if (next.type == Tok.String) | 974 else if (n.type == Tok.Integer) |
631 return action.actOnStringExp(next); | 975 return action.actOnNumericConstant(n); |
632 | 976 else if (n.type == Tok.String) |
633 messages.report(ExpectedExp, next.location) | 977 return action.actOnStringExp(n); |
978 else if (n.type == Tok.OpenBracket) | |
979 { | |
980 // Array literals | |
981 Exp[] exps; | |
982 exps ~= parseExpression(); | |
983 | |
984 while (isa(Tok.Comma)) | |
985 { | |
986 next(); | |
987 | |
988 if (isa(Tok.CloseBracket)) | |
989 break; | |
990 exps ~= parseExpression(); | |
991 } | |
992 scope e = require(Tok.CloseBracket); | |
993 return action.actOnArrayLiteralExpr(exps, n.location, e.location); | |
994 } | |
995 else if (n.type == Tok.New) | |
996 { | |
997 Exp[] allocator_args; | |
998 Exp[] constructor_args; | |
999 | |
1000 if ( isa(Tok.OpenParentheses)) | |
1001 { | |
1002 next(); // Remove OpenParentheses | |
1003 | |
1004 if ( !isa(Tok.CloseParentheses ) ) | |
1005 { | |
1006 allocator_args ~= parseExpression; | |
1007 | |
1008 while ( isa(Tok.Comma) ) | |
1009 { | |
1010 next(); // Remove Comma | |
1011 | |
1012 allocator_args ~= parseExpression; | |
1013 } | |
1014 } | |
1015 require(Tok.CloseParentheses); | |
1016 } | |
1017 | |
1018 auto type = parseType; | |
1019 | |
1020 if ( isa(Tok.OpenParentheses)) | |
1021 { | |
1022 next(); // Remove OpenParentheses | |
1023 | |
1024 if ( !isa(Tok.CloseParentheses ) ) | |
1025 { | |
1026 constructor_args ~= parseExpression; | |
1027 | |
1028 while ( isa(Tok.Comma) ) | |
1029 { | |
1030 next(); // Remove Comma | |
1031 | |
1032 constructor_args ~= parseExpression; | |
1033 } | |
1034 } | |
1035 require(Tok.CloseParentheses); | |
1036 } | |
1037 return action.actOnNewExpr(type, allocator_args, constructor_args); | |
1038 } | |
1039 | |
1040 messages.report(ExpectedExp, n.location) | |
634 .fatal(ExitLevel.Parser); | 1041 .fatal(ExitLevel.Parser); |
635 return null; | 1042 return null; |
636 } | 1043 } |
637 | 1044 |
638 Exp parseCast(ref Token _cast) | 1045 Exp parseCast(ref Token _cast) |
639 { | 1046 { |
640 require(Tok.OpenParentheses); | 1047 require(Tok.OpenParentheses); |
641 auto next = lexer.next; | 1048 auto n = next(); |
642 if(!next.isBasicType && !next.isIdentifier) | 1049 if(!n.isBasicType && !n.isIdentifier) |
643 messages.report(ExpectedCastType, next.location); | 1050 messages.report(ExpectedCastType, n.location); |
644 | 1051 |
645 require(Tok.CloseParentheses); | 1052 require(Tok.CloseParentheses); |
646 auto exp = P(); | 1053 auto exp = P(); |
647 return action.actOnCastExpr(_cast, Id(next), exp); | 1054 return action.actOnCastExpr(_cast, Id(n), exp); |
648 } | 1055 } |
649 | 1056 |
650 struct UnOp | 1057 struct UnOp |
651 { | 1058 { |
652 Tok tokenType; | 1059 Tok tokenType; |
653 int prec; | 1060 int prec; |
654 } | 1061 } |
655 | 1062 |
656 static const UnOp[] _unary = | 1063 static const UnOp[] _unary = |
657 [ | 1064 [ |
658 {Tok.Minus, 4}, | 1065 {Tok.Minus, 4}, |
659 {Tok.Star, 4} | 1066 {Tok.Star, 4}, |
1067 {Tok.And, 4} | |
660 ]; | 1068 ]; |
661 UnOp* unary(Tok t) | 1069 UnOp* unary(Tok t) |
662 { | 1070 { |
663 foreach (ref op; _unary) | 1071 foreach (ref op; _unary) |
664 if (op.tokenType == t) | 1072 if (op.tokenType == t) |
673 bool leftAssoc; | 1081 bool leftAssoc; |
674 Operator operator; | 1082 Operator operator; |
675 } | 1083 } |
676 | 1084 |
677 static const BinOp[] _binary = | 1085 static const BinOp[] _binary = |
678 [ | 1086 [ |
679 {Tok.Assign, 1, false, Operator.Assign}, | 1087 {Tok.Assign, 1, false, Operator.Assign}, |
1088 {Tok.PlusAssign, 1, false, Operator.AddAssign}, | |
1089 {Tok.MinusAssign, 1, false, Operator.SubAssign}, | |
1090 {Tok.StarAssign, 1, false, Operator.MulAssign}, | |
1091 {Tok.SlashAssign, 1, false, Operator.DivAssign}, | |
1092 {Tok.PercentAssign, 1, false, Operator.ModAssign}, | |
1093 | |
1094 // =, += etc. 1 | |
1095 // (need special-case for the ternary operator at this level) | |
1096 // ||, 2 | |
1097 // &&, 3 | |
1098 // |, 4 | |
1099 // &, 5 | |
1100 // ^, 6 | |
1101 // ==, !=, is, !is, 7 | |
1102 // <, <= etc, 7 | |
1103 // in, 7 | |
1104 // <<, >>, >>>, 8 | |
1105 // +, -, ~, 9 | |
1106 // *, /, %, 10 | |
1107 // unary operators here | |
680 | 1108 |
681 {Tok.Eq, 2, true, Operator.Eq}, | 1109 {Tok.Eq, 2, true, Operator.Eq}, |
682 {Tok.Ne, 2, true, Operator.Ne}, | 1110 {Tok.Ne, 2, true, Operator.Ne}, |
683 | 1111 |
684 {Tok.Lt, 2, true, Operator.Lt}, | 1112 {Tok.Lt, 2, true, Operator.Lt}, |
689 {Tok.Plus, 3, true, Operator.Add}, | 1117 {Tok.Plus, 3, true, Operator.Add}, |
690 {Tok.Minus, 3, true, Operator.Sub}, | 1118 {Tok.Minus, 3, true, Operator.Sub}, |
691 | 1119 |
692 {Tok.Star, 5, true, Operator.Mul}, | 1120 {Tok.Star, 5, true, Operator.Mul}, |
693 {Tok.Slash, 5, true, Operator.Div}, | 1121 {Tok.Slash, 5, true, Operator.Div}, |
694 {Tok.Percent, 5, true, Operator.Mod} | 1122 {Tok.Percent, 5, true, Operator.Mod}, |
1123 | |
1124 {Tok.LeftShift, 8, true, Operator.LeftShift}, | |
1125 {Tok.RightShift, 8, true, Operator.RightShift}, | |
1126 {Tok.UnsignedRightShift, 8, true, Operator.UnsignedRightShift} | |
695 ]; | 1127 ]; |
696 BinOp* binary(Tok t) | 1128 BinOp* binary(Tok t) |
697 { | 1129 { |
698 foreach (ref op; _binary) | 1130 foreach (ref op; _binary) |
699 if (op.tokenType == t) | 1131 if (op.tokenType == t) |
703 | 1135 |
704 private: | 1136 private: |
705 | 1137 |
706 Token require(Tok t) | 1138 Token require(Tok t) |
707 { | 1139 { |
708 if (lexer.peek().type != t) | 1140 if (!isa(t)) |
709 messages.report(UnexpectedTokSingle, lexer.peek.location) | 1141 if(isa(Tok.EOF)) |
710 .arg(lexer.peek.getType) | 1142 messages.report(UnexpectedEOF, |
711 .arg(t); | 1143 [lexer.last.asRange][], []) |
712 return lexer.next(); | 1144 .arg(lexer.last.get(sm)) |
1145 .fatal(ExitLevel.Parser); | |
1146 else | |
1147 messages.report(UnexpectedTokSingle, peek.location) | |
1148 .arg(peek.get(sm)) | |
1149 .arg(typeToString[t]); | |
1150 return next(); | |
713 } | 1151 } |
714 | 1152 |
715 bool skip(Tok t) | 1153 bool skip(Tok t) |
716 { | 1154 { |
717 if (lexer.peek().type != t) | 1155 if (peek().type != t) |
718 return false; | 1156 return false; |
719 lexer.next(); | 1157 next(); |
720 return true; | 1158 return true; |
721 } | 1159 } |
722 | 1160 |
723 bool on_a(Tok t) | 1161 bool isa(Tok t, int i = 0) |
724 { | 1162 { |
725 return lexer.peek.type == t; | 1163 return peek(i).type == t; |
726 } | 1164 } |
727 | 1165 |
728 Lexer lexer; | 1166 bool isEOF(Token t) |
1167 { | |
1168 if (isa(Tok.EOF)) | |
1169 messages.report(UnexpectedEOF, | |
1170 [t.asRange][], []) | |
1171 .arg(t.get(sm)) | |
1172 .fatal(ExitLevel.Parser); | |
1173 return false; | |
1174 } | |
1175 | |
1176 Token next() | |
1177 { | |
1178 return lexer.next; | |
1179 } | |
1180 | |
1181 Token peek(int i = 0) | |
1182 { | |
1183 return lexer.peek(i); | |
1184 } | |
1185 | |
1186 Lexer lexer; | |
729 SourceManager sm; | 1187 SourceManager sm; |
730 } | 1188 } |
731 | 1189 |