363
|
1 /++
|
|
2 Author: Aziz Köksal
|
|
3 License: GPL3
|
|
4 +/
|
|
5 module cmd.Generate;
|
|
6 import dil.SyntaxTree;
|
|
7 import dil.Token;
|
|
8 import dil.Parser, dil.Lexer;
|
|
9 import dil.File;
|
|
10 import std.stdio;
|
|
11
|
|
12 enum DocOption
|
|
13 {
|
|
14 Tokens,
|
|
15 Syntax = 1<<1,
|
|
16 HTML = 1<<2,
|
|
17 XML = 1<<3
|
|
18 }
|
|
19
|
364
|
20 void execute(string fileName, DocOption options)
|
|
21 {
|
|
22 if (options & DocOption.Syntax)
|
|
23 syntaxToDoc(fileName, options);
|
|
24 else
|
|
25 tokensToDoc(fileName, options);
|
|
26 }
|
|
27
|
363
|
28 char[] xml_escape(char[] text)
|
|
29 {
|
|
30 char[] result;
|
|
31 foreach(c; text)
|
|
32 switch(c)
|
|
33 {
|
|
34 case '<': result ~= "<"; break;
|
|
35 case '>': result ~= ">"; break;
|
|
36 case '&': result ~= "&"; break;
|
|
37 default: result ~= c;
|
|
38 }
|
|
39 return result;
|
|
40 }
|
|
41
|
|
42 char[] getShortClassName(Node n)
|
|
43 {
|
|
44 static char[][] name_table;
|
|
45 if (name_table is null)
|
|
46 name_table = new char[][NodeKind.max+1];
|
|
47 char[] name = name_table[n.kind];
|
|
48 if (name !is null)
|
|
49 return name;
|
|
50
|
|
51 alias std.string.find find;
|
|
52 name = n.classinfo.name;
|
|
53 name = name[find(name, ".")+1 .. $]; // Remove package name
|
|
54 name = name[find(name, ".")+1 .. $]; // Remove module name
|
|
55 char[] remove;
|
|
56 switch (n.category)
|
|
57 {
|
|
58 alias NodeCategory NC;
|
|
59 case NC.Declaration: remove = "Declaration"; break;
|
|
60 case NC.Statement:
|
|
61 if (n.kind == NodeKind.Statements)
|
|
62 return name;
|
|
63 remove = "Statement";
|
|
64 break;
|
|
65 case NC.Expression: remove = "Expression"; break;
|
|
66 case NC.Type: remove = "Type"; break;
|
|
67 case NC.Other: return name;
|
|
68 default:
|
|
69 }
|
|
70 // Remove common suffix.
|
|
71 auto idx = find(name, remove);
|
|
72 if (idx != -1)
|
|
73 name = name[0 .. idx];
|
|
74 // Store the name.
|
|
75 name_table[n.kind] = name;
|
|
76 return name;
|
|
77 }
|
|
78
|
|
79 enum DocPart
|
|
80 {
|
|
81 Head,
|
|
82 CompBegin,
|
|
83 CompEnd,
|
|
84 Error,
|
|
85 SyntaxBegin,
|
|
86 SyntaxEnd,
|
|
87 SrcBegin,
|
|
88 SrcEnd,
|
|
89 Tail,
|
|
90 // Tokens:
|
|
91 Identifier,
|
|
92 Comment,
|
|
93 StringLiteral,
|
|
94 CharLiteral,
|
|
95 Operator,
|
|
96 LorG,
|
|
97 LessEqual,
|
|
98 GreaterEqual,
|
|
99 AndLogical,
|
|
100 OrLogical,
|
|
101 NotEqual,
|
|
102 Not,
|
|
103 Number,
|
|
104 Bracket,
|
|
105 SpecialToken,
|
|
106 Shebang,
|
|
107 Keyword,
|
|
108 HLineBegin,
|
|
109 HLineEnd,
|
|
110 Filespec,
|
|
111 }
|
|
112
|
|
113 auto html_tags = [
|
|
114 // Head
|
|
115 `<html>`\n
|
|
116 `<head>`\n
|
|
117 `<meta http-equiv="Content-Type" content="text/html; charset=utf-8">`\n
|
|
118 `<link href="dil_html.css" rel="stylesheet" type="text/css">`\n
|
|
119 `</head>`\n
|
|
120 `<body>`[],
|
|
121 // CompBegin
|
|
122 `<div class="compilerinfo">`,
|
|
123 // CompEnd
|
|
124 `</div>`,
|
|
125 // Error
|
|
126 `<p class="error %s">%s(%d)%s: %s</p>`,
|
|
127 // SyntaxBegin
|
|
128 `<span class="%s %s">`,
|
|
129 // SyntaxEnd
|
|
130 `</span>`,
|
|
131 // SrcBegin
|
|
132 `<pre class="sourcecode">`,
|
|
133 // SrcEnd
|
|
134 `</pre>`,
|
|
135 // Tail
|
|
136 `</html>`,
|
|
137 // Identifier
|
|
138 `<span class="i">%s</span>`,
|
|
139 // Comment
|
|
140 `<span class="c%s">%s</span>`,
|
|
141 // StringLiteral
|
|
142 `<span class="sl">%s</span>`,
|
|
143 // CharLiteral
|
|
144 `<span class="cl">%s</span>`,
|
|
145 // Operator
|
|
146 `<span class="op">%s</span>`,
|
|
147 // LorG
|
|
148 `<span class="oplg"><></span>`,
|
|
149 // LessEqual
|
|
150 `<span class="ople"><=</span>`,
|
|
151 // GreaterEqual
|
|
152 `<span class="opge">>=</span>`,
|
|
153 // AndLogical
|
|
154 `<span class="opaa">&&</span>`,
|
|
155 // OrLogical
|
|
156 `<span class="opoo">||</span>`,
|
|
157 // NotEqual
|
|
158 `<span class="opne">!=</span>`,
|
|
159 // Not
|
|
160 `<span class="opn">!</span>`,
|
|
161 // Number
|
|
162 `<span class="n">%s</span>`,
|
|
163 // Bracket
|
|
164 `<span class="br">%s</span>`,
|
|
165 // SpecialToken
|
|
166 `<span class="st">%s</span>`,
|
|
167 // Shebang
|
|
168 `<span class="shebang">%s</span>`,
|
|
169 // Keyword
|
|
170 `<span class="k">%s</span>`,
|
|
171 // HLineBegin
|
|
172 `<span class="hl">#line`,
|
|
173 // HLineEnd
|
|
174 "</span>",
|
|
175 // Filespec
|
|
176 `<span class="fs">%s</span>`,
|
|
177 ];
|
|
178
|
|
179 auto xml_tags = [
|
|
180 // Head
|
|
181 `<?xml version="1.0"?>`\n
|
|
182 `<?xml-stylesheet href="dil_xml.css" type="text/css"?>`\n
|
|
183 `<root>`[],
|
|
184 // CompBegin
|
|
185 `<compilerinfo>`,
|
|
186 // CompEnd
|
|
187 `</compilerinfo>`,
|
|
188 // Error
|
|
189 `<error t="%s">%s(%d)%s: %s</error>`,
|
|
190 // SyntaxBegin
|
|
191 `<%s t="%s">`,
|
|
192 // SyntaxEnd
|
|
193 `</%s>`,
|
|
194 // SrcBegin
|
|
195 `<sourcecode>`,
|
|
196 // SrcEnd
|
|
197 `</sourcecode>`,
|
|
198 // Tail
|
|
199 `</root>`,
|
|
200 // Identifier
|
|
201 "<i>%s</i>",
|
|
202 // Comment
|
|
203 `<c t="%s">%s</c>`,
|
|
204 // StringLiteral
|
|
205 "<sl>%s</sl>",
|
|
206 // CharLiteral
|
|
207 "<cl>%s</cl>",
|
|
208 // Operator
|
|
209 "<op>%s</op>",
|
|
210 // LorG
|
|
211 `<op t="lg"><></op>`,
|
|
212 // LessEqual
|
|
213 `<op t="le"><=</op>`,
|
|
214 // GreaterEqual
|
|
215 `<op t="ge">>=</op>`,
|
|
216 // AndLogical
|
|
217 `<op t="aa">&&</op>`,
|
|
218 // OrLogical
|
|
219 `<op t="oo">||</op>`,
|
|
220 // NotEqual
|
|
221 `<op t="ne">!=</op>`,
|
|
222 // Not
|
|
223 `<op t="n">!</op>`,
|
|
224 // Number
|
|
225 "<n>%s</n>",
|
|
226 // Bracket
|
|
227 "<br>%s</br>",
|
|
228 // SpecialToken
|
|
229 "<st>%s</st>",
|
|
230 // Shebang
|
|
231 "<shebang>%s</shebang>",
|
|
232 // Keyword
|
|
233 "<k>%s</k>",
|
|
234 // HLineBegin
|
|
235 "<hl>#line",
|
|
236 // HLineEnd
|
|
237 "</hl>",
|
|
238 // Filespec
|
|
239 "<fs>%s</fs>",
|
|
240 ];
|
|
241
|
|
242 static assert(html_tags.length == DocPart.max+1);
|
|
243 static assert(xml_tags.length == DocPart.max+1);
|
|
244
|
|
245 void syntaxToDoc(string fileName, DocOption options)
|
|
246 {
|
|
247 auto tags = options & DocOption.HTML ? html_tags : xml_tags;
|
|
248 auto sourceText = loadFile(fileName);
|
|
249 auto parser = new Parser(sourceText, fileName);
|
368
|
250 auto root = parser.start();
|
363
|
251 auto lx = parser.lx;
|
|
252
|
|
253 auto token = lx.head;
|
|
254 char* end = lx.text.ptr;
|
|
255
|
|
256 writefln(tags[DocPart.Head]);
|
|
257 // Output error messages.
|
|
258 if (lx.errors.length || parser.errors.length)
|
|
259 {
|
|
260 writefln(tags[DocPart.CompBegin]);
|
|
261 foreach (error; lx.errors)
|
|
262 {
|
|
263 writefln(tags[DocPart.Error], "L", lx.fileName, error.loc, "L", xml_escape(error.getMsg));
|
|
264 }
|
|
265 foreach (error; parser.errors)
|
|
266 {
|
|
267 writefln(tags[DocPart.Error], "P", lx.fileName, error.loc, "P", xml_escape(error.getMsg));
|
|
268 }
|
|
269 writefln(tags[DocPart.CompEnd]);
|
|
270 }
|
|
271 writef(tags[DocPart.SrcBegin]);
|
|
272
|
|
273 Node[][Token*] beginNodes, endNodes;
|
|
274
|
|
275 void populateAAs(Node[] nodes)
|
|
276 {
|
|
277 foreach (node; nodes)
|
|
278 {
|
|
279 auto begin = node.begin;
|
|
280 if (begin)
|
|
281 {
|
|
282 auto end = node.end;
|
|
283 assert(end);
|
|
284 beginNodes[begin] ~= node;
|
|
285 endNodes[end] ~= node;
|
|
286 }
|
|
287 if (node.children.length)
|
|
288 populateAAs(node.children);
|
|
289 }
|
|
290 }
|
|
291 populateAAs(root.children);
|
|
292
|
|
293 char[] getTag(NodeCategory nc)
|
|
294 {
|
|
295 char[] tag;
|
|
296 switch (nc)
|
|
297 {
|
|
298 alias NodeCategory NC;
|
|
299 case NC.Declaration: tag = "d"; break;
|
|
300 case NC.Statement: tag = "s"; break;
|
|
301 case NC.Expression: tag = "e"; break;
|
|
302 case NC.Type: tag = "t"; break;
|
|
303 case NC.Other: tag = "o"; break;
|
|
304 default:
|
|
305 }
|
|
306 return tag;
|
|
307 }
|
|
308
|
|
309 // Traverse linked list and print tokens.
|
|
310 while (token.type != TOK.EOF)
|
|
311 {
|
|
312 token = token.next;
|
|
313
|
|
314 // Print whitespace between previous and current token.
|
|
315 if (end != token.start)
|
|
316 writef("%s", end[0 .. token.start - end]);
|
|
317
|
|
318 Node[]* nodes = token in beginNodes;
|
|
319
|
|
320 if (nodes)
|
|
321 {
|
|
322 foreach (node; *nodes)
|
|
323 writef(tags[DocPart.SyntaxBegin], getTag(node.category), getShortClassName(node));
|
|
324 }
|
|
325
|
|
326 printToken(token, tags);
|
|
327
|
|
328 nodes = token in endNodes;
|
|
329
|
|
330 if (nodes)
|
|
331 {
|
|
332 foreach_reverse (node; *nodes)
|
|
333 if (options & DocOption.HTML)
|
|
334 writef(tags[DocPart.SyntaxEnd]);
|
|
335 else
|
|
336 writef(tags[DocPart.SyntaxEnd], getTag(node.category));
|
|
337 }
|
|
338
|
|
339 end = token.end;
|
|
340 }
|
|
341 writef(tags[DocPart.SrcEnd], tags[DocPart.Tail]);
|
|
342 }
|
|
343
|
|
344 void tokensToDoc(string fileName, DocOption options)
|
|
345 {
|
|
346 auto tags = options & DocOption.HTML ? html_tags : xml_tags;
|
|
347 auto sourceText = loadFile(fileName);
|
|
348 auto lx = new Lexer(sourceText, fileName);
|
|
349
|
|
350 auto token = lx.getTokens();
|
|
351 char* end = lx.text.ptr;
|
|
352
|
|
353 writefln(tags[DocPart.Head]);
|
|
354
|
|
355 if (lx.errors.length)
|
|
356 {
|
|
357 writefln(tags[DocPart.CompBegin]);
|
|
358 foreach (error; lx.errors)
|
|
359 {
|
|
360 writefln(tags[DocPart.Error], "L", lx.fileName, error.loc, "L", xml_escape(error.getMsg));
|
|
361 }
|
|
362 writefln(tags[DocPart.CompEnd]);
|
|
363 }
|
|
364 writef(tags[DocPart.SrcBegin]);
|
|
365
|
|
366 // Traverse linked list and print tokens.
|
|
367 while (token.type != TOK.EOF)
|
|
368 {
|
|
369 token = token.next;
|
|
370
|
|
371 // Print whitespace between previous and current token.
|
|
372 if (end != token.start)
|
|
373 writef("%s", end[0 .. token.start - end]);
|
|
374 printToken(token, tags);
|
|
375 end = token.end;
|
|
376 }
|
|
377 writef(\n, tags[DocPart.SrcEnd], \n, tags[DocPart.Tail]);
|
|
378 }
|
|
379
|
|
380 void printToken(Token* token, string[] tags)
|
|
381 {
|
|
382 alias DocPart DP;
|
|
383 string srcText = xml_escape(token.srcText);
|
|
384
|
|
385 switch(token.type)
|
|
386 {
|
|
387 case TOK.Identifier:
|
|
388 writef(tags[DP.Identifier], srcText);
|
|
389 break;
|
|
390 case TOK.Comment:
|
|
391 string t;
|
|
392 switch (token.start[1])
|
|
393 {
|
|
394 case '/': t = "l"; break;
|
|
395 case '*': t = "b"; break;
|
|
396 case '+': t = "n"; break;
|
|
397 default:
|
|
398 assert(0);
|
|
399 }
|
|
400 writef(tags[DP.Comment], t, srcText);
|
|
401 break;
|
|
402 case TOK.String:
|
|
403 writef(tags[DP.StringLiteral], srcText);
|
|
404 break;
|
|
405 case TOK.CharLiteral, TOK.WCharLiteral, TOK.DCharLiteral:
|
|
406 writef(tags[DP.CharLiteral], srcText);
|
|
407 break;
|
|
408 case TOK.Assign, TOK.Equal,
|
|
409 TOK.Less, TOK.Greater,
|
|
410 TOK.LShiftAssign, TOK.LShift,
|
|
411 TOK.RShiftAssign, TOK.RShift,
|
|
412 TOK.URShiftAssign, TOK.URShift,
|
|
413 TOK.OrAssign, TOK.OrBinary,
|
|
414 TOK.AndAssign, TOK.AndBinary,
|
|
415 TOK.PlusAssign, TOK.PlusPlus, TOK.Plus,
|
|
416 TOK.MinusAssign, TOK.MinusMinus, TOK.Minus,
|
|
417 TOK.DivAssign, TOK.Div,
|
|
418 TOK.MulAssign, TOK.Mul,
|
|
419 TOK.ModAssign, TOK.Mod,
|
|
420 TOK.XorAssign, TOK.Xor,
|
|
421 TOK.CatAssign,
|
|
422 TOK.Tilde,
|
|
423 TOK.Unordered,
|
|
424 TOK.UorE,
|
|
425 TOK.UorG,
|
|
426 TOK.UorGorE,
|
|
427 TOK.UorL,
|
|
428 TOK.UorLorE,
|
|
429 TOK.LorEorG:
|
|
430 writef(tags[DP.Operator], srcText);
|
|
431 break;
|
|
432 case TOK.LorG:
|
|
433 writef(tags[DP.LorG]);
|
|
434 break;
|
|
435 case TOK.LessEqual:
|
|
436 writef(tags[DP.LessEqual]);
|
|
437 break;
|
|
438 case TOK.GreaterEqual:
|
|
439 writef(tags[DP.GreaterEqual]);
|
|
440 break;
|
|
441 case TOK.AndLogical:
|
|
442 writef(tags[DP.AndLogical]);
|
|
443 break;
|
|
444 case TOK.OrLogical:
|
|
445 writef(tags[DP.OrLogical]);
|
|
446 break;
|
|
447 case TOK.NotEqual:
|
|
448 writef(tags[DP.NotEqual]);
|
|
449 break;
|
|
450 case TOK.Not:
|
|
451 // Check if this is part of a template instantiation.
|
|
452 // TODO: comments aren't skipped.
|
|
453 if (token.prev.type == TOK.Identifier && token.next.type == TOK.LParen)
|
|
454 goto default;
|
|
455 writef(tags[DP.Not]);
|
|
456 break;
|
|
457 case TOK.Int32, TOK.Int64, TOK.Uint32, TOK.Uint64,
|
|
458 TOK.Float32, TOK.Float64, TOK.Float80,
|
|
459 TOK.Imaginary32, TOK.Imaginary64, TOK.Imaginary80:
|
|
460 writef(tags[DP.Number], srcText);
|
|
461 break;
|
|
462 case TOK.LParen, TOK.RParen, TOK.LBracket,
|
|
463 TOK.RBracket, TOK.LBrace, TOK.RBrace:
|
|
464 writef(tags[DP.Bracket], srcText);
|
|
465 break;
|
|
466 case TOK.Shebang:
|
|
467 writef(tags[DP.Shebang], srcText);
|
|
468 break;
|
|
469 case TOK.HashLine:
|
|
470 void printWS(char* start, char* end)
|
|
471 {
|
|
472 if (start != end)
|
|
473 writef(start[0 .. end - start]);
|
|
474 }
|
|
475 writef(tags[DP.HLineBegin]);
|
|
476 auto num = token.line_num;
|
|
477 // Print whitespace between #line and number
|
|
478 auto ptr = token.start + "#line".length;
|
|
479 printWS(ptr, num.start);
|
|
480 printToken(num, tags);
|
|
481 if (token.line_filespec)
|
|
482 {
|
|
483 auto filespec = token.line_filespec;
|
|
484 // Print whitespace between number and filespec
|
|
485 printWS(num.end, filespec.start);
|
|
486 writef(tags[DP.Filespec], xml_escape(filespec.srcText));
|
|
487
|
|
488 ptr = filespec.end;
|
|
489 }
|
|
490 else
|
|
491 ptr = num.end;
|
|
492 // Print remaining whitespace
|
|
493 printWS(ptr, token.end);
|
|
494 writef(tags[DP.HLineEnd]);
|
|
495 break;
|
|
496 default:
|
|
497 if (token.isKeyword())
|
|
498 writef(tags[DP.Keyword], srcText);
|
|
499 else if (token.isSpecialToken)
|
|
500 writef(tags[DP.SpecialToken], srcText);
|
|
501 else
|
|
502 writef("%s", srcText);
|
|
503 }
|
|
504 }
|