Mercurial > projects > dil
comparison trunk/src/cmd/Generate.d @ 752:51e9dfe27f20
Revised module cmd.Generate.
author | Aziz K?ksal <aziz.koeksal@gmail.com> |
---|---|
date | Wed, 13 Feb 2008 15:45:32 +0100 |
parents | 7e7c85235673 |
children | 90668b83ae5e |
comparison
equal
deleted
inserted
replaced
751:8caf18892c1b | 752:51e9dfe27f20 |
---|---|
2 Author: Aziz Köksal | 2 Author: Aziz Köksal |
3 License: GPL3 | 3 License: GPL3 |
4 +/ | 4 +/ |
5 module cmd.Generate; | 5 module cmd.Generate; |
6 | 6 |
7 import dil.ast.Node; | 7 import dil.ast.DefaultVisitor; |
8 import dil.ast.Node, | |
9 dil.ast.Declaration, | |
10 dil.ast.Statement, | |
11 dil.ast.Expression, | |
12 dil.ast.Types; | |
8 import dil.lexer.Lexer; | 13 import dil.lexer.Lexer; |
9 import dil.parser.Parser; | 14 import dil.parser.Parser; |
10 import dil.File; | 15 import dil.File; |
11 import tango.io.Print; | 16 import tango.io.Print; |
12 import common; | 17 import common; |
41 case '<': result ~= "<"; break; | 46 case '<': result ~= "<"; break; |
42 case '>': result ~= ">"; break; | 47 case '>': result ~= ">"; break; |
43 case '&': result ~= "&"; break; | 48 case '&': result ~= "&"; break; |
44 default: result ~= c; | 49 default: result ~= c; |
45 } | 50 } |
46 return result; | 51 if (result.length != text.length) |
52 return result; | |
53 // Nothing escaped. Return original text. | |
54 delete result; | |
55 return text; | |
47 } | 56 } |
48 | 57 |
49 | 58 |
50 /// Find object in subject and return position. | 59 /// Find object in subject and return position. |
51 /// Returns -1 if no match was found. | 60 /// Returns -1 if no match was found. |
76 if (c == object) | 85 if (c == object) |
77 return i; | 86 return i; |
78 return -1; | 87 return -1; |
79 } | 88 } |
80 | 89 |
81 /// Returns the short class name of an instance descending from Node. | 90 /// Returns: the short class name of an instance descending from Node. |
82 char[] getShortClassName(Node node) | 91 char[] getShortClassName(Node node) |
83 { | 92 { |
84 static char[][] name_table; | 93 static char[][] name_table; |
85 if (name_table is null) | 94 if (name_table is null) |
86 name_table = new char[][NodeKind.max+1]; // Create a new table. | 95 name_table = new char[][NodeKind.max+1]; // Create a new table. |
286 | 295 |
287 // The size of the arrays must equal the number of members in enum DocPart. | 296 // The size of the arrays must equal the number of members in enum DocPart. |
288 static assert(html_tags.length == DocPart.max+1); | 297 static assert(html_tags.length == DocPart.max+1); |
289 static assert(xml_tags.length == DocPart.max+1); | 298 static assert(xml_tags.length == DocPart.max+1); |
290 | 299 |
291 /// Prints the syntax tree of a source file using the buffer print. | 300 /// Extended token structure. |
301 struct TokenEx | |
302 { | |
303 Token* token; /// The lexer token. | |
304 Node[] beginNodes; /// beginNodes[n].begin == token | |
305 Node[] endNodes; /// endNodes[n].end == token | |
306 } | |
307 | |
308 /// Builds an array of TokenEx items. | |
309 class TokenExBuilder : DefaultVisitor | |
310 { | |
311 private TokenEx*[Token*] tokenTable; | |
312 | |
313 TokenEx[] build(Node root, Token* first) | |
314 { | |
315 Token* token = first; | |
316 | |
317 uint count; | |
318 while (token) | |
319 { | |
320 count++; | |
321 token = token.next; | |
322 } | |
323 | |
324 auto toks = new TokenEx[count]; | |
325 token = first; | |
326 foreach (ref tokEx; toks) | |
327 { | |
328 tokEx.token = token; | |
329 if (!token.isWhitespace) | |
330 tokenTable[token] = &tokEx; | |
331 token = token.next; | |
332 } | |
333 | |
334 super.visitN(root); | |
335 tokenTable = null; | |
336 return toks; | |
337 } | |
338 | |
339 TokenEx* getTokenEx()(Token* t) | |
340 { | |
341 auto p = t in tokenTable; | |
342 assert(p, t.srcText~" is not in tokenTable"); | |
343 return *p; | |
344 } | |
345 | |
346 void push()(Node n) | |
347 { | |
348 auto begin = n.begin; | |
349 if (begin) | |
350 { assert(n.end); | |
351 auto txbegin = getTokenEx(begin); | |
352 auto txend = getTokenEx(n.end); | |
353 txbegin.beginNodes ~= n; | |
354 txend.endNodes ~= n; | |
355 } | |
356 } | |
357 | |
358 // Override dispatch functions. | |
359 override: | |
360 Declaration visitD(Declaration n) | |
361 { return push(n), super.visitD(n); } | |
362 Statement visitS(Statement n) | |
363 { return push(n), super.visitS(n); } | |
364 Expression visitE(Expression n) | |
365 { return push(n), super.visitE(n); } | |
366 TypeNode visitT(TypeNode n) | |
367 { return push(n), super.visitT(n); } | |
368 Node visitN(Node n) | |
369 { return push(n), super.visitN(n); } | |
370 } | |
371 | |
372 char getTag(NodeCategory nc) | |
373 { | |
374 char tag; | |
375 switch (nc) | |
376 { | |
377 alias NodeCategory NC; | |
378 case NC.Declaration: tag = 'd'; break; | |
379 case NC.Statement: tag = 's'; break; | |
380 case NC.Expression: tag = 'e'; break; | |
381 case NC.Type: tag = 't'; break; | |
382 case NC.Other: tag = 'o'; break; | |
383 default: | |
384 assert(0); | |
385 } | |
386 return tag; | |
387 } | |
388 | |
389 void printErrors(Lexer lx, string[] tags, Print!(char) print) | |
390 { | |
391 foreach (error; lx.errors) | |
392 { | |
393 print.formatln(tags[DocPart.Error], "L", error.filePath, Format("{0},{1}", error.loc, error.col), "L", xml_escape(error.getMsg)); | |
394 } | |
395 } | |
396 | |
397 void printErrors(Parser parser, string[] tags, Print!(char) print) | |
398 { | |
399 foreach (error; parser.errors) | |
400 { | |
401 print.formatln(tags[DocPart.Error], "P", error.filePath, Format("{0},{1}", error.loc, error.col), "P", xml_escape(error.getMsg)); | |
402 } | |
403 } | |
404 | |
292 void syntaxToDoc(string filePath, Print!(char) print, DocOption options) | 405 void syntaxToDoc(string filePath, Print!(char) print, DocOption options) |
293 { | 406 { |
294 auto tags = options & DocOption.HTML ? html_tags : xml_tags; | 407 auto tags = options & DocOption.HTML ? html_tags : xml_tags; |
295 auto sourceText = loadFile(filePath); | 408 auto sourceText = loadFile(filePath); |
296 auto parser = new Parser(sourceText, filePath); | 409 auto parser = new Parser(sourceText, filePath); |
297 auto root = parser.start(); | 410 auto root = parser.start(); |
298 auto lx = parser.lexer; | 411 auto lx = parser.lexer; |
299 | 412 |
300 auto token = lx.head; | 413 auto builder = new TokenExBuilder(); |
414 auto tokenExList = builder.build(root, lx.firstToken()); | |
301 | 415 |
302 print(tags[DocPart.Head]~\n); | 416 print(tags[DocPart.Head]~\n); |
303 // Output error messages. | |
304 if (lx.errors.length || parser.errors.length) | 417 if (lx.errors.length || parser.errors.length) |
305 { | 418 { // Output error messages. |
306 print(tags[DocPart.CompBegin]~\n); | 419 print(tags[DocPart.CompBegin]~\n); |
307 foreach (error; lx.errors) | 420 printErrors(lx, tags, print); |
308 { | 421 printErrors(parser, tags, print); |
309 print.formatln(tags[DocPart.Error], "L", error.filePath, Format("{0},{1}", error.loc, error.col), "L", xml_escape(error.getMsg)); | |
310 } | |
311 foreach (error; parser.errors) | |
312 { | |
313 print.formatln(tags[DocPart.Error], "P", error.filePath, Format("{0},{1}", error.loc, error.col), "P", xml_escape(error.getMsg)); | |
314 } | |
315 print(tags[DocPart.CompEnd]~\n); | 422 print(tags[DocPart.CompEnd]~\n); |
316 } | 423 } |
317 print(tags[DocPart.SrcBegin]); | 424 print(tags[DocPart.SrcBegin]); |
318 | 425 |
319 Node[][Token*] beginNodes, endNodes; | 426 // Iterate over list of tokens. |
320 | 427 foreach (ref tokenEx; tokenExList) |
321 void populateAAs(Node[] nodes) | 428 { |
322 { | 429 auto token = tokenEx.token; |
323 foreach (node; nodes) | |
324 { | |
325 assert(delegate bool(){ | |
326 foreach (child; node.children) | |
327 if (child is null) | |
328 return false; | |
329 return true; | |
330 }() == true, Format("Node '{0}' has a null child", node.classinfo.name) | |
331 ); | |
332 auto begin = node.begin; | |
333 if (begin) | |
334 { | |
335 auto end = node.end; | |
336 assert(end); | |
337 beginNodes[begin] ~= node; | |
338 endNodes[end] ~= node; | |
339 } | |
340 | |
341 if (node.children.length) | |
342 populateAAs(node.children); | |
343 } | |
344 } | |
345 assert(delegate bool(){ | |
346 foreach (child; root.children) | |
347 if (child is null) | |
348 return false; | |
349 return true; | |
350 }() == true, Format("Root node has a null child") | |
351 ); | |
352 populateAAs(root.children); | |
353 | |
354 char[] getTag(NodeCategory nc) | |
355 { | |
356 char[] tag; | |
357 switch (nc) | |
358 { | |
359 alias NodeCategory NC; | |
360 case NC.Declaration: tag = "d"; break; | |
361 case NC.Statement: tag = "s"; break; | |
362 case NC.Expression: tag = "e"; break; | |
363 case NC.Type: tag = "t"; break; | |
364 case NC.Other: tag = "o"; break; | |
365 default: | |
366 } | |
367 return tag; | |
368 } | |
369 | |
370 // Traverse linked list and print tokens. | |
371 while (token.kind != TOK.EOF) | |
372 { | |
373 token = token.next; | |
374 | |
375 // Print whitespace. | 430 // Print whitespace. |
376 if (token.ws) | 431 if (token.ws) |
377 print(token.ws[0..token.start - token.ws]); | 432 print(token.wsChars); |
378 | 433 |
379 Node[]* nodes = token in beginNodes; | 434 foreach (node; tokenEx.beginNodes) |
380 | 435 print.format(tags[DocPart.SyntaxBegin], getTag(node.category), getShortClassName(node)); |
381 if (nodes) | |
382 { | |
383 foreach (node; *nodes) | |
384 print.format(tags[DocPart.SyntaxBegin], getTag(node.category), getShortClassName(node)); | |
385 } | |
386 | 436 |
387 printToken(token, tags, print); | 437 printToken(token, tags, print); |
388 | 438 |
389 nodes = token in endNodes; | 439 if (options & DocOption.HTML) |
390 | 440 foreach_reverse (node; tokenEx.endNodes) |
391 if (nodes) | 441 print(tags[DocPart.SyntaxEnd]); |
392 { | 442 else |
393 foreach_reverse (node; *nodes) | 443 foreach_reverse (node; tokenEx.endNodes) |
394 if (options & DocOption.HTML) | 444 print.format(tags[DocPart.SyntaxEnd], getTag(node.category)); |
395 print(tags[DocPart.SyntaxEnd]); | |
396 else | |
397 print.format(tags[DocPart.SyntaxEnd], getTag(node.category)); | |
398 } | |
399 } | 445 } |
400 print(\n~tags[DocPart.SrcEnd])(\n~tags[DocPart.Tail]); | 446 print(\n~tags[DocPart.SrcEnd])(\n~tags[DocPart.Tail]); |
401 } | 447 } |
402 | 448 |
403 /// Prints all tokens of a source file using the buffer print. | 449 /// Prints all tokens of a source file using the buffer print. |
404 void tokensToDoc(string filePath, Print!(char) print, DocOption options) | 450 void tokensToDoc(string filePath, Print!(char) print, DocOption options) |
405 { | 451 { |
406 auto tags = options & DocOption.HTML ? html_tags : xml_tags; | 452 auto tags = options & DocOption.HTML ? html_tags : xml_tags; |
407 auto sourceText = loadFile(filePath); | 453 auto sourceText = loadFile(filePath); |
408 auto lx = new Lexer(sourceText, filePath); | 454 auto lx = new Lexer(sourceText, filePath); |
409 | 455 lx.scanAll(); |
410 auto token = lx.getTokens(); | |
411 | 456 |
412 print(tags[DocPart.Head]~\n); | 457 print(tags[DocPart.Head]~\n); |
413 | |
414 if (lx.errors.length) | 458 if (lx.errors.length) |
415 { | 459 { |
416 print(tags[DocPart.CompBegin]~\n); | 460 print(tags[DocPart.CompBegin]~\n); |
417 foreach (error; lx.errors) | 461 printErrors(lx, tags, print); |
418 { | |
419 print.formatln(tags[DocPart.Error], "L", error.filePath, Format("{0},{1}", error.loc, error.col), "L", xml_escape(error.getMsg)); | |
420 } | |
421 print(tags[DocPart.CompEnd]~\n); | 462 print(tags[DocPart.CompEnd]~\n); |
422 } | 463 } |
423 print(tags[DocPart.SrcBegin]); | 464 print(tags[DocPart.SrcBegin]); |
424 | 465 |
425 // Traverse linked list and print tokens. | 466 // Traverse linked list and print tokens. |
467 auto token = lx.firstToken(); | |
426 while (token.kind != TOK.EOF) | 468 while (token.kind != TOK.EOF) |
427 { | 469 { |
428 token = token.next; | |
429 // Print whitespace. | 470 // Print whitespace. |
430 if (token.ws) | 471 if (token.ws) |
431 print(token.ws[0..token.start - token.ws]); | 472 print(token.wsChars); |
432 printToken(token, tags, print); | 473 printToken(token, tags, print); |
474 token = token.next; | |
433 } | 475 } |
434 print(\n~tags[DocPart.SrcEnd])(\n~tags[DocPart.Tail]); | 476 print(\n~tags[DocPart.SrcEnd])(\n~tags[DocPart.Tail]); |
435 } | 477 } |
436 | 478 |
437 /// Prints a token with tags using the buffer print. | 479 /// Prints a token with tags using the buffer print. |