363
|
1 /++
|
|
2 Author: Aziz Köksal
|
|
3 License: GPL3
|
|
4 +/
|
|
5 module cmd.Generate;
|
|
6 import dil.SyntaxTree;
|
|
7 import dil.Token;
|
|
8 import dil.Parser, dil.Lexer;
|
|
9 import dil.File;
|
|
10 import std.stdio;
|
|
11
|
|
12 enum DocOption
|
|
13 {
|
|
14 Tokens,
|
|
15 Syntax = 1<<1,
|
|
16 HTML = 1<<2,
|
|
17 XML = 1<<3
|
|
18 }
|
|
19
|
|
20 char[] xml_escape(char[] text)
|
|
21 {
|
|
22 char[] result;
|
|
23 foreach(c; text)
|
|
24 switch(c)
|
|
25 {
|
|
26 case '<': result ~= "<"; break;
|
|
27 case '>': result ~= ">"; break;
|
|
28 case '&': result ~= "&"; break;
|
|
29 default: result ~= c;
|
|
30 }
|
|
31 return result;
|
|
32 }
|
|
33
|
|
34 char[] getShortClassName(Node n)
|
|
35 {
|
|
36 static char[][] name_table;
|
|
37 if (name_table is null)
|
|
38 name_table = new char[][NodeKind.max+1];
|
|
39 char[] name = name_table[n.kind];
|
|
40 if (name !is null)
|
|
41 return name;
|
|
42
|
|
43 alias std.string.find find;
|
|
44 name = n.classinfo.name;
|
|
45 name = name[find(name, ".")+1 .. $]; // Remove package name
|
|
46 name = name[find(name, ".")+1 .. $]; // Remove module name
|
|
47 char[] remove;
|
|
48 switch (n.category)
|
|
49 {
|
|
50 alias NodeCategory NC;
|
|
51 case NC.Declaration: remove = "Declaration"; break;
|
|
52 case NC.Statement:
|
|
53 if (n.kind == NodeKind.Statements)
|
|
54 return name;
|
|
55 remove = "Statement";
|
|
56 break;
|
|
57 case NC.Expression: remove = "Expression"; break;
|
|
58 case NC.Type: remove = "Type"; break;
|
|
59 case NC.Other: return name;
|
|
60 default:
|
|
61 }
|
|
62 // Remove common suffix.
|
|
63 auto idx = find(name, remove);
|
|
64 if (idx != -1)
|
|
65 name = name[0 .. idx];
|
|
66 // Store the name.
|
|
67 name_table[n.kind] = name;
|
|
68 return name;
|
|
69 }
|
|
70
|
|
71 enum DocPart
|
|
72 {
|
|
73 Head,
|
|
74 CompBegin,
|
|
75 CompEnd,
|
|
76 Error,
|
|
77 SyntaxBegin,
|
|
78 SyntaxEnd,
|
|
79 SrcBegin,
|
|
80 SrcEnd,
|
|
81 Tail,
|
|
82 // Tokens:
|
|
83 Identifier,
|
|
84 Comment,
|
|
85 StringLiteral,
|
|
86 CharLiteral,
|
|
87 Operator,
|
|
88 LorG,
|
|
89 LessEqual,
|
|
90 GreaterEqual,
|
|
91 AndLogical,
|
|
92 OrLogical,
|
|
93 NotEqual,
|
|
94 Not,
|
|
95 Number,
|
|
96 Bracket,
|
|
97 SpecialToken,
|
|
98 Shebang,
|
|
99 Keyword,
|
|
100 HLineBegin,
|
|
101 HLineEnd,
|
|
102 Filespec,
|
|
103 }
|
|
104
|
|
105 auto html_tags = [
|
|
106 // Head
|
|
107 `<html>`\n
|
|
108 `<head>`\n
|
|
109 `<meta http-equiv="Content-Type" content="text/html; charset=utf-8">`\n
|
|
110 `<link href="dil_html.css" rel="stylesheet" type="text/css">`\n
|
|
111 `</head>`\n
|
|
112 `<body>`[],
|
|
113 // CompBegin
|
|
114 `<div class="compilerinfo">`,
|
|
115 // CompEnd
|
|
116 `</div>`,
|
|
117 // Error
|
|
118 `<p class="error %s">%s(%d)%s: %s</p>`,
|
|
119 // SyntaxBegin
|
|
120 `<span class="%s %s">`,
|
|
121 // SyntaxEnd
|
|
122 `</span>`,
|
|
123 // SrcBegin
|
|
124 `<pre class="sourcecode">`,
|
|
125 // SrcEnd
|
|
126 `</pre>`,
|
|
127 // Tail
|
|
128 `</html>`,
|
|
129 // Identifier
|
|
130 `<span class="i">%s</span>`,
|
|
131 // Comment
|
|
132 `<span class="c%s">%s</span>`,
|
|
133 // StringLiteral
|
|
134 `<span class="sl">%s</span>`,
|
|
135 // CharLiteral
|
|
136 `<span class="cl">%s</span>`,
|
|
137 // Operator
|
|
138 `<span class="op">%s</span>`,
|
|
139 // LorG
|
|
140 `<span class="oplg"><></span>`,
|
|
141 // LessEqual
|
|
142 `<span class="ople"><=</span>`,
|
|
143 // GreaterEqual
|
|
144 `<span class="opge">>=</span>`,
|
|
145 // AndLogical
|
|
146 `<span class="opaa">&&</span>`,
|
|
147 // OrLogical
|
|
148 `<span class="opoo">||</span>`,
|
|
149 // NotEqual
|
|
150 `<span class="opne">!=</span>`,
|
|
151 // Not
|
|
152 `<span class="opn">!</span>`,
|
|
153 // Number
|
|
154 `<span class="n">%s</span>`,
|
|
155 // Bracket
|
|
156 `<span class="br">%s</span>`,
|
|
157 // SpecialToken
|
|
158 `<span class="st">%s</span>`,
|
|
159 // Shebang
|
|
160 `<span class="shebang">%s</span>`,
|
|
161 // Keyword
|
|
162 `<span class="k">%s</span>`,
|
|
163 // HLineBegin
|
|
164 `<span class="hl">#line`,
|
|
165 // HLineEnd
|
|
166 "</span>",
|
|
167 // Filespec
|
|
168 `<span class="fs">%s</span>`,
|
|
169 ];
|
|
170
|
|
171 auto xml_tags = [
|
|
172 // Head
|
|
173 `<?xml version="1.0"?>`\n
|
|
174 `<?xml-stylesheet href="dil_xml.css" type="text/css"?>`\n
|
|
175 `<root>`[],
|
|
176 // CompBegin
|
|
177 `<compilerinfo>`,
|
|
178 // CompEnd
|
|
179 `</compilerinfo>`,
|
|
180 // Error
|
|
181 `<error t="%s">%s(%d)%s: %s</error>`,
|
|
182 // SyntaxBegin
|
|
183 `<%s t="%s">`,
|
|
184 // SyntaxEnd
|
|
185 `</%s>`,
|
|
186 // SrcBegin
|
|
187 `<sourcecode>`,
|
|
188 // SrcEnd
|
|
189 `</sourcecode>`,
|
|
190 // Tail
|
|
191 `</root>`,
|
|
192 // Identifier
|
|
193 "<i>%s</i>",
|
|
194 // Comment
|
|
195 `<c t="%s">%s</c>`,
|
|
196 // StringLiteral
|
|
197 "<sl>%s</sl>",
|
|
198 // CharLiteral
|
|
199 "<cl>%s</cl>",
|
|
200 // Operator
|
|
201 "<op>%s</op>",
|
|
202 // LorG
|
|
203 `<op t="lg"><></op>`,
|
|
204 // LessEqual
|
|
205 `<op t="le"><=</op>`,
|
|
206 // GreaterEqual
|
|
207 `<op t="ge">>=</op>`,
|
|
208 // AndLogical
|
|
209 `<op t="aa">&&</op>`,
|
|
210 // OrLogical
|
|
211 `<op t="oo">||</op>`,
|
|
212 // NotEqual
|
|
213 `<op t="ne">!=</op>`,
|
|
214 // Not
|
|
215 `<op t="n">!</op>`,
|
|
216 // Number
|
|
217 "<n>%s</n>",
|
|
218 // Bracket
|
|
219 "<br>%s</br>",
|
|
220 // SpecialToken
|
|
221 "<st>%s</st>",
|
|
222 // Shebang
|
|
223 "<shebang>%s</shebang>",
|
|
224 // Keyword
|
|
225 "<k>%s</k>",
|
|
226 // HLineBegin
|
|
227 "<hl>#line",
|
|
228 // HLineEnd
|
|
229 "</hl>",
|
|
230 // Filespec
|
|
231 "<fs>%s</fs>",
|
|
232 ];
|
|
233
|
|
234 static assert(html_tags.length == DocPart.max+1);
|
|
235 static assert(xml_tags.length == DocPart.max+1);
|
|
236
|
|
237 void syntaxToDoc(string fileName, DocOption options)
|
|
238 {
|
|
239 auto tags = options & DocOption.HTML ? html_tags : xml_tags;
|
|
240 auto sourceText = loadFile(fileName);
|
|
241 auto parser = new Parser(sourceText, fileName);
|
|
242 parser.start();
|
|
243 auto root = parser.parseModule();
|
|
244 auto lx = parser.lx;
|
|
245
|
|
246 auto token = lx.head;
|
|
247 char* end = lx.text.ptr;
|
|
248
|
|
249 writefln(tags[DocPart.Head]);
|
|
250 // Output error messages.
|
|
251 if (lx.errors.length || parser.errors.length)
|
|
252 {
|
|
253 writefln(tags[DocPart.CompBegin]);
|
|
254 foreach (error; lx.errors)
|
|
255 {
|
|
256 writefln(tags[DocPart.Error], "L", lx.fileName, error.loc, "L", xml_escape(error.getMsg));
|
|
257 }
|
|
258 foreach (error; parser.errors)
|
|
259 {
|
|
260 writefln(tags[DocPart.Error], "P", lx.fileName, error.loc, "P", xml_escape(error.getMsg));
|
|
261 }
|
|
262 writefln(tags[DocPart.CompEnd]);
|
|
263 }
|
|
264 writef(tags[DocPart.SrcBegin]);
|
|
265
|
|
266 Node[][Token*] beginNodes, endNodes;
|
|
267
|
|
268 void populateAAs(Node[] nodes)
|
|
269 {
|
|
270 foreach (node; nodes)
|
|
271 {
|
|
272 auto begin = node.begin;
|
|
273 if (begin)
|
|
274 {
|
|
275 auto end = node.end;
|
|
276 assert(end);
|
|
277 beginNodes[begin] ~= node;
|
|
278 endNodes[end] ~= node;
|
|
279 }
|
|
280 if (node.children.length)
|
|
281 populateAAs(node.children);
|
|
282 }
|
|
283 }
|
|
284 populateAAs(root.children);
|
|
285
|
|
286 char[] getTag(NodeCategory nc)
|
|
287 {
|
|
288 char[] tag;
|
|
289 switch (nc)
|
|
290 {
|
|
291 alias NodeCategory NC;
|
|
292 case NC.Declaration: tag = "d"; break;
|
|
293 case NC.Statement: tag = "s"; break;
|
|
294 case NC.Expression: tag = "e"; break;
|
|
295 case NC.Type: tag = "t"; break;
|
|
296 case NC.Other: tag = "o"; break;
|
|
297 default:
|
|
298 }
|
|
299 return tag;
|
|
300 }
|
|
301
|
|
302 // Traverse linked list and print tokens.
|
|
303 while (token.type != TOK.EOF)
|
|
304 {
|
|
305 token = token.next;
|
|
306
|
|
307 // Print whitespace between previous and current token.
|
|
308 if (end != token.start)
|
|
309 writef("%s", end[0 .. token.start - end]);
|
|
310
|
|
311 Node[]* nodes = token in beginNodes;
|
|
312
|
|
313 if (nodes)
|
|
314 {
|
|
315 foreach (node; *nodes)
|
|
316 writef(tags[DocPart.SyntaxBegin], getTag(node.category), getShortClassName(node));
|
|
317 }
|
|
318
|
|
319 printToken(token, tags);
|
|
320
|
|
321 nodes = token in endNodes;
|
|
322
|
|
323 if (nodes)
|
|
324 {
|
|
325 foreach_reverse (node; *nodes)
|
|
326 if (options & DocOption.HTML)
|
|
327 writef(tags[DocPart.SyntaxEnd]);
|
|
328 else
|
|
329 writef(tags[DocPart.SyntaxEnd], getTag(node.category));
|
|
330 }
|
|
331
|
|
332 end = token.end;
|
|
333 }
|
|
334 writef(tags[DocPart.SrcEnd], tags[DocPart.Tail]);
|
|
335 }
|
|
336
|
|
337 void tokensToDoc(string fileName, DocOption options)
|
|
338 {
|
|
339 auto tags = options & DocOption.HTML ? html_tags : xml_tags;
|
|
340 auto sourceText = loadFile(fileName);
|
|
341 auto lx = new Lexer(sourceText, fileName);
|
|
342
|
|
343 auto token = lx.getTokens();
|
|
344 char* end = lx.text.ptr;
|
|
345
|
|
346 writefln(tags[DocPart.Head]);
|
|
347
|
|
348 if (lx.errors.length)
|
|
349 {
|
|
350 writefln(tags[DocPart.CompBegin]);
|
|
351 foreach (error; lx.errors)
|
|
352 {
|
|
353 writefln(tags[DocPart.Error], "L", lx.fileName, error.loc, "L", xml_escape(error.getMsg));
|
|
354 }
|
|
355 writefln(tags[DocPart.CompEnd]);
|
|
356 }
|
|
357 writef(tags[DocPart.SrcBegin]);
|
|
358
|
|
359 // Traverse linked list and print tokens.
|
|
360 while (token.type != TOK.EOF)
|
|
361 {
|
|
362 token = token.next;
|
|
363
|
|
364 // Print whitespace between previous and current token.
|
|
365 if (end != token.start)
|
|
366 writef("%s", end[0 .. token.start - end]);
|
|
367 printToken(token, tags);
|
|
368 end = token.end;
|
|
369 }
|
|
370 writef(\n, tags[DocPart.SrcEnd], \n, tags[DocPart.Tail]);
|
|
371 }
|
|
372
|
|
373 void printToken(Token* token, string[] tags)
|
|
374 {
|
|
375 alias DocPart DP;
|
|
376 string srcText = xml_escape(token.srcText);
|
|
377
|
|
378 switch(token.type)
|
|
379 {
|
|
380 case TOK.Identifier:
|
|
381 writef(tags[DP.Identifier], srcText);
|
|
382 break;
|
|
383 case TOK.Comment:
|
|
384 string t;
|
|
385 switch (token.start[1])
|
|
386 {
|
|
387 case '/': t = "l"; break;
|
|
388 case '*': t = "b"; break;
|
|
389 case '+': t = "n"; break;
|
|
390 default:
|
|
391 assert(0);
|
|
392 }
|
|
393 writef(tags[DP.Comment], t, srcText);
|
|
394 break;
|
|
395 case TOK.String:
|
|
396 writef(tags[DP.StringLiteral], srcText);
|
|
397 break;
|
|
398 case TOK.CharLiteral, TOK.WCharLiteral, TOK.DCharLiteral:
|
|
399 writef(tags[DP.CharLiteral], srcText);
|
|
400 break;
|
|
401 case TOK.Assign, TOK.Equal,
|
|
402 TOK.Less, TOK.Greater,
|
|
403 TOK.LShiftAssign, TOK.LShift,
|
|
404 TOK.RShiftAssign, TOK.RShift,
|
|
405 TOK.URShiftAssign, TOK.URShift,
|
|
406 TOK.OrAssign, TOK.OrBinary,
|
|
407 TOK.AndAssign, TOK.AndBinary,
|
|
408 TOK.PlusAssign, TOK.PlusPlus, TOK.Plus,
|
|
409 TOK.MinusAssign, TOK.MinusMinus, TOK.Minus,
|
|
410 TOK.DivAssign, TOK.Div,
|
|
411 TOK.MulAssign, TOK.Mul,
|
|
412 TOK.ModAssign, TOK.Mod,
|
|
413 TOK.XorAssign, TOK.Xor,
|
|
414 TOK.CatAssign,
|
|
415 TOK.Tilde,
|
|
416 TOK.Unordered,
|
|
417 TOK.UorE,
|
|
418 TOK.UorG,
|
|
419 TOK.UorGorE,
|
|
420 TOK.UorL,
|
|
421 TOK.UorLorE,
|
|
422 TOK.LorEorG:
|
|
423 writef(tags[DP.Operator], srcText);
|
|
424 break;
|
|
425 case TOK.LorG:
|
|
426 writef(tags[DP.LorG]);
|
|
427 break;
|
|
428 case TOK.LessEqual:
|
|
429 writef(tags[DP.LessEqual]);
|
|
430 break;
|
|
431 case TOK.GreaterEqual:
|
|
432 writef(tags[DP.GreaterEqual]);
|
|
433 break;
|
|
434 case TOK.AndLogical:
|
|
435 writef(tags[DP.AndLogical]);
|
|
436 break;
|
|
437 case TOK.OrLogical:
|
|
438 writef(tags[DP.OrLogical]);
|
|
439 break;
|
|
440 case TOK.NotEqual:
|
|
441 writef(tags[DP.NotEqual]);
|
|
442 break;
|
|
443 case TOK.Not:
|
|
444 // Check if this is part of a template instantiation.
|
|
445 // TODO: comments aren't skipped.
|
|
446 if (token.prev.type == TOK.Identifier && token.next.type == TOK.LParen)
|
|
447 goto default;
|
|
448 writef(tags[DP.Not]);
|
|
449 break;
|
|
450 case TOK.Int32, TOK.Int64, TOK.Uint32, TOK.Uint64,
|
|
451 TOK.Float32, TOK.Float64, TOK.Float80,
|
|
452 TOK.Imaginary32, TOK.Imaginary64, TOK.Imaginary80:
|
|
453 writef(tags[DP.Number], srcText);
|
|
454 break;
|
|
455 case TOK.LParen, TOK.RParen, TOK.LBracket,
|
|
456 TOK.RBracket, TOK.LBrace, TOK.RBrace:
|
|
457 writef(tags[DP.Bracket], srcText);
|
|
458 break;
|
|
459 case TOK.Shebang:
|
|
460 writef(tags[DP.Shebang], srcText);
|
|
461 break;
|
|
462 case TOK.HashLine:
|
|
463 void printWS(char* start, char* end)
|
|
464 {
|
|
465 if (start != end)
|
|
466 writef(start[0 .. end - start]);
|
|
467 }
|
|
468 writef(tags[DP.HLineBegin]);
|
|
469 auto num = token.line_num;
|
|
470 // Print whitespace between #line and number
|
|
471 auto ptr = token.start + "#line".length;
|
|
472 printWS(ptr, num.start);
|
|
473 printToken(num, tags);
|
|
474 if (token.line_filespec)
|
|
475 {
|
|
476 auto filespec = token.line_filespec;
|
|
477 // Print whitespace between number and filespec
|
|
478 printWS(num.end, filespec.start);
|
|
479 writef(tags[DP.Filespec], xml_escape(filespec.srcText));
|
|
480
|
|
481 ptr = filespec.end;
|
|
482 }
|
|
483 else
|
|
484 ptr = num.end;
|
|
485 // Print remaining whitespace
|
|
486 printWS(ptr, token.end);
|
|
487 writef(tags[DP.HLineEnd]);
|
|
488 break;
|
|
489 default:
|
|
490 if (token.isKeyword())
|
|
491 writef(tags[DP.Keyword], srcText);
|
|
492 else if (token.isSpecialToken)
|
|
493 writef(tags[DP.SpecialToken], srcText);
|
|
494 else
|
|
495 writef("%s", srcText);
|
|
496 }
|
|
497 }
|