comparison trunk/src/cmd/Generate.d @ 759:9c47f377ca0b

Revised module cmd.Generate. Added class TagMapLoader. Fixed StringExpression.getString() and related code in the Parser. Added options 'xml_map' and 'html_map' to config.d
author Aziz K?ksal <aziz.koeksal@gmail.com>
date Fri, 15 Feb 2008 02:07:53 +0100
parents e4b60543c5e8
children 307905dadf5d
comparison
equal deleted inserted replaced
758:f4b9680c0e16 759:9c47f377ca0b
11 dil.ast.Expression, 11 dil.ast.Expression,
12 dil.ast.Types; 12 dil.ast.Types;
13 import dil.lexer.Lexer; 13 import dil.lexer.Lexer;
14 import dil.parser.Parser; 14 import dil.parser.Parser;
15 import dil.SourceText; 15 import dil.SourceText;
16 import dil.Information;
17 import dil.SettingsLoader;
18 import dil.Settings;
16 import common; 19 import common;
17 20
21 import tango.io.GrowBuffer;
18 import tango.io.Print; 22 import tango.io.Print;
19 23
20 /// Options for the generate command. 24 /// Options for the generate command.
21 enum DocOption 25 enum DocOption
22 { 26 {
26 HTML = 1<<2, 30 HTML = 1<<2,
27 XML = 1<<3 31 XML = 1<<3
28 } 32 }
29 33
30 /// Executes the command. 34 /// Executes the command.
31 void execute(string filePath, DocOption options) 35 void execute(string filePath, DocOption options, InfoManager infoMan)
32 { 36 {
33 assert(options != DocOption.Empty); 37 assert(options != DocOption.Empty);
38 auto mapFilePath = options & DocOption.HTML ? GlobalSettings.htmlMapFile
39 : GlobalSettings.xmlMapFile;
40 auto map = TagMapLoader(infoMan).load(mapFilePath);
41 auto tags = new TagMap(map);
42
43 if (infoMan.hasInfo)
44 return;
45
34 if (options & DocOption.Syntax) 46 if (options & DocOption.Syntax)
35 syntaxToDoc(filePath, Stdout, options); 47 syntaxToDoc(filePath, tags, Stdout, options);
36 else 48 else
37 tokensToDoc(filePath, Stdout, options); 49 tokensToDoc(filePath, tags, Stdout, options);
38 } 50 }
39 51
40 /// Escapes the characters '<', '>' and '&' with named character entities. 52 /// Escapes the characters '<', '>' and '&' with named character entities.
41 char[] xml_escape(char[] text) 53 char[] xml_escape(char[] text)
42 { 54 {
54 // Nothing escaped. Return original text. 66 // Nothing escaped. Return original text.
55 delete result; 67 delete result;
56 return text; 68 return text;
57 } 69 }
58 70
59 71 class TagMap
60 /// Find object in subject and return position. 72 {
61 /// Returns -1 if no match was found. 73 string[string] table;
62 /+int find(char[] subject, char[] object) 74
63 { 75 this(string[string] table)
64 if (object.length > subject.length) 76 {
65 return -1; 77 this.table = table;
66 foreach (i, c; subject) 78 Identifier = this["Identifier", "{0}"];
67 { 79 String = this["String", "{0}"];
68 if (c == object[0]) 80 Char = this["Char", "{0}"];
69 { 81 Number = this["Number", "{0}"];
70 if (object.length > (subject.length - i)) 82 Keyword = this["Keyword", "{0}"];
71 return -1; 83 LineC = this["LineC", "{0}"];
72 if (object == subject[i..i+object.length]) 84 BlockC = this["BlockC", "{0}"];
73 return i; 85 NestedC = this["NestedC", "{0}"];
74 } 86 Shebang = this["Shebang", "{0}"];
75 } 87 HLine = this["HLine", "{0}"];
76 return -1; 88 Filespec = this["Filespec", "{0}"];
77 }+/ 89 Illegal = this["Illegal", "{0}"];
78 90 Newline = this["Newline", "{0}"];
79 /++ 91 SpecialToken = this["SpecialToken", "{0}"];
80 Find the last occurrence of object in subject. 92 Declaration = this["Declaration", "d"];
81 Returns the index if found, or -1 if not. 93 Statement = this["Statement", "s"];
82 +/ 94 Expression = this["Expression", "e"];
95 Type = this["Type", "t"];
96 Other = this["Other", "o"];
97 EOF = this["EOF", ""];
98 }
99
100 string opIndex(string str, string fallback = "")
101 {
102 auto p = str in table;
103 if (p)
104 return *p;
105 return fallback;
106 }
107
108 string Identifier, String, Char, Number, Keyword, LineC, BlockC,
109 NestedC, Shebang, HLine, Filespec, Illegal, Newline, SpecialToken,
110 Declaration, Statement, Expression, Type, Other, EOF;
111
112 /// Returns the tag for the category 'nc'.
113 string getTag(NodeCategory nc)
114 {
115 string tag;
116 switch (nc)
117 { alias NodeCategory NC;
118 case NC.Declaration: tag = Declaration; break;
119 case NC.Statement: tag = Statement; break;
120 case NC.Expression: tag = Expression; break;
121 case NC.Type: tag = Type; break;
122 case NC.Other: tag = Other; break;
123 default: assert(0);
124 }
125 return tag;
126 }
127 }
128
129 /// Find the last occurrence of object in subject.
130 /// Returns: the index if found, or -1 if not.
83 int rfind(char[] subject, char object) 131 int rfind(char[] subject, char object)
84 { 132 {
85 foreach_reverse(i, c; subject) 133 foreach_reverse(i, c; subject)
86 if (c == object) 134 if (c == object)
87 return i; 135 return i;
127 name = name[0 .. $ - suffixLength]; 175 name = name[0 .. $ - suffixLength];
128 // Store the name in the table. 176 // Store the name in the table.
129 name_table[node.kind] = name; 177 name_table[node.kind] = name;
130 return name; 178 return name;
131 } 179 }
132
133 /// Indices into the XML and HTML tag arrays.
134 enum DocPart
135 {
136 Head,
137 CompBegin,
138 CompEnd,
139 Error,
140 SyntaxBegin,
141 SyntaxEnd,
142 SrcBegin,
143 SrcEnd,
144 Tail,
145 // Tokens:
146 Identifier,
147 Comment,
148 StringLiteral,
149 CharLiteral,
150 Operator,
151 LorG,
152 LessEqual,
153 GreaterEqual,
154 AndLogical,
155 OrLogical,
156 NotEqual,
157 Not,
158 Number,
159 Bracket,
160 SpecialToken,
161 Shebang,
162 Keyword,
163 HLineBegin,
164 HLineEnd,
165 Filespec,
166 }
167
168 auto html_tags = [
169 // Head
170 `<html>`\n
171 `<head>`\n
172 `<meta http-equiv="Content-Type" content="text/html; charset=utf-8">`\n
173 `<link href="html.css" rel="stylesheet" type="text/css">`\n
174 `</head>`\n
175 `<body>`[],
176 // CompBegin
177 `<div class="compilerinfo">`,
178 // CompEnd
179 `</div>`,
180 // Error
181 `<p class="error {0}">{1}({2}){3}: {4}</p>`,
182 // SyntaxBegin
183 `<span class="{0} {1}">`,
184 // SyntaxEnd
185 `</span>`,
186 // SrcBegin
187 `<pre class="sourcecode">`,
188 // SrcEnd
189 `</pre>`,
190 // Tail
191 `</html>`,
192 // Identifier
193 `<span class="i">{0}</span>`,
194 // Comment
195 `<span class="c{0}">{1}</span>`,
196 // StringLiteral
197 `<span class="sl">{0}</span>`,
198 // CharLiteral
199 `<span class="chl">{0}</span>`,
200 // Operator
201 `<span class="op">{0}</span>`,
202 // LorG
203 `<span class="oplg">&lt;&gt;</span>`,
204 // LessEqual
205 `<span class="ople">&lt;=</span>`,
206 // GreaterEqual
207 `<span class="opge">&gt;=</span>`,
208 // AndLogical
209 `<span class="opaa">&amp;&amp;</span>`,
210 // OrLogical
211 `<span class="opoo">||</span>`,
212 // NotEqual
213 `<span class="opne">!=</span>`,
214 // Not
215 `<span class="opn">!</span>`,
216 // Number
217 `<span class="n">{0}</span>`,
218 // Bracket
219 `<span class="br">{0}</span>`,
220 // SpecialToken
221 `<span class="st">{0}</span>`,
222 // Shebang
223 `<span class="shebang">{0}</span>`,
224 // Keyword
225 `<span class="k">{0}</span>`,
226 // HLineBegin
227 `<span class="hl">`,
228 // HLineEnd
229 "</span>",
230 // Filespec
231 `<span class="fs">{0}</span>`,
232 ];
233
234 auto xml_tags = [
235 // Head
236 `<?xml version="1.0"?>`\n
237 `<?xml-stylesheet href="xml.css" type="text/css"?>`\n
238 `<root>`[],
239 // CompBegin
240 `<compilerinfo>`,
241 // CompEnd
242 `</compilerinfo>`,
243 // Error
244 `<error t="{0}">{1}({2}){3}: {4}</error>`,
245 // SyntaxBegin
246 `<{0} t="{1}">`,
247 // SyntaxEnd
248 `</{0}>`,
249 // SrcBegin
250 `<sourcecode>`,
251 // SrcEnd
252 `</sourcecode>`,
253 // Tail
254 `</root>`,
255 // Identifier
256 "<i>{0}</i>",
257 // Comment
258 `<c t="{0}">{1}</c>`,
259 // StringLiteral
260 "<sl>{0}</sl>",
261 // CharLiteral
262 "<cl>{0}</cl>",
263 // Operator
264 "<op>{0}</op>",
265 // LorG
266 `<op t="lg">&lt;&gt;</op>`,
267 // LessEqual
268 `<op t="le">&lt;=</op>`,
269 // GreaterEqual
270 `<op t="ge">&gt;=</op>`,
271 // AndLogical
272 `<op t="aa">&amp;&amp;</op>`,
273 // OrLogical
274 `<op t="oo">||</op>`,
275 // NotEqual
276 `<op t="ne">!=</op>`,
277 // Not
278 `<op t="n">!</op>`,
279 // Number
280 "<n>{0}</n>",
281 // Bracket
282 "<br>{0}</br>",
283 // SpecialToken
284 "<st>{0}</st>",
285 // Shebang
286 "<shebang>{0}</shebang>",
287 // Keyword
288 "<k>{0}</k>",
289 // HLineBegin
290 "<hl>",
291 // HLineEnd
292 "</hl>",
293 // Filespec
294 "<fs>{0}</fs>",
295 ];
296
297 // The size of the arrays must equal the number of members in enum DocPart.
298 static assert(html_tags.length == DocPart.max+1);
299 static assert(xml_tags.length == DocPart.max+1);
300 180
301 /// Extended token structure. 181 /// Extended token structure.
302 struct TokenEx 182 struct TokenEx
303 { 183 {
304 Token* token; /// The lexer token. 184 Token* token; /// The lexer token.
362 push(n); 242 push(n);
363 return super.dispatch(n); 243 return super.dispatch(n);
364 } 244 }
365 } 245 }
366 246
367 char getTag(NodeCategory nc) 247 void printErrors(Lexer lx, TagMap tags, Print!(char) print)
368 { 248 {
369 char tag; 249 foreach (e; lx.errors)
370 switch (nc) 250 print.format(tags["LexerError"], e.filePath, e.loc, e.col, xml_escape(e.getMsg));
371 { 251 }
372 alias NodeCategory NC; 252
373 case NC.Declaration: tag = 'd'; break; 253 void printErrors(Parser parser, TagMap tags, Print!(char) print)
374 case NC.Statement: tag = 's'; break; 254 {
375 case NC.Expression: tag = 'e'; break; 255 foreach (e; parser.errors)
376 case NC.Type: tag = 't'; break; 256 print.format(tags["ParserError"], e.filePath, e.loc, e.col, xml_escape(e.getMsg));
377 case NC.Other: tag = 'o'; break; 257 }
378 default: 258
379 assert(0); 259 void syntaxToDoc(string filePath, TagMap tags, Print!(char) print, DocOption options)
380 } 260 {
381 return tag;
382 }
383
384 void printErrors(Lexer lx, string[] tags, Print!(char) print)
385 {
386 foreach (error; lx.errors)
387 {
388 print.formatln(tags[DocPart.Error], "L", error.filePath, Format("{0},{1}", error.loc, error.col), "L", xml_escape(error.getMsg));
389 }
390 }
391
392 void printErrors(Parser parser, string[] tags, Print!(char) print)
393 {
394 foreach (error; parser.errors)
395 {
396 print.formatln(tags[DocPart.Error], "P", error.filePath, Format("{0},{1}", error.loc, error.col), "P", xml_escape(error.getMsg));
397 }
398 }
399
400 void syntaxToDoc(string filePath, Print!(char) print, DocOption options)
401 {
402 auto tags = options & DocOption.HTML ? html_tags : xml_tags;
403 auto parser = new Parser(new SourceText(filePath, true)); 261 auto parser = new Parser(new SourceText(filePath, true));
404 auto root = parser.start(); 262 auto root = parser.start();
405 auto lx = parser.lexer; 263 auto lx = parser.lexer;
406 264
407 auto builder = new TokenExBuilder(); 265 auto builder = new TokenExBuilder();
408 auto tokenExList = builder.build(root, lx.firstToken()); 266 auto tokenExList = builder.build(root, lx.firstToken());
409 267
410 print(tags[DocPart.Head]~\n); 268 print(tags["DocHead"]);
411 if (lx.errors.length || parser.errors.length) 269 if (lx.errors.length || parser.errors.length)
412 { // Output error messages. 270 { // Output error messages.
413 print(tags[DocPart.CompBegin]~\n); 271 print(tags["CompBegin"]);
414 printErrors(lx, tags, print); 272 printErrors(lx, tags, print);
415 printErrors(parser, tags, print); 273 printErrors(parser, tags, print);
416 print(tags[DocPart.CompEnd]~\n); 274 print(tags["CompEnd"]);
417 } 275 }
418 print(tags[DocPart.SrcBegin]); 276 print(tags["SourceBegin"]);
277
278 auto tagNodeBegin = tags["NodeBegin"];
279 auto tagNodeEnd = tags["NodeEnd"];
419 280
420 // Iterate over list of tokens. 281 // Iterate over list of tokens.
421 foreach (ref tokenEx; tokenExList) 282 foreach (ref tokenEx; tokenExList)
422 { 283 {
423 auto token = tokenEx.token; 284 auto token = tokenEx.token;
424 // Print whitespace. 285
425 if (token.ws) 286 token.ws && print(token.wsChars); // Print preceding whitespace.
426 print(token.wsChars); 287 // <node>
427
428 foreach (node; tokenEx.beginNodes) 288 foreach (node; tokenEx.beginNodes)
429 print.format(tags[DocPart.SyntaxBegin], getTag(node.category), getShortClassName(node)); 289 print.format(tagNodeBegin, tags.getTag(node.category), getShortClassName(node));
430 290 // Token text.
431 printToken(token, tags, print); 291 printToken(token, tags, print);
432 292 // </node>
433 if (options & DocOption.HTML) 293 if (options & DocOption.HTML)
434 foreach_reverse (node; tokenEx.endNodes) 294 foreach_reverse (node; tokenEx.endNodes)
435 print(tags[DocPart.SyntaxEnd]); 295 print(tagNodeEnd);
436 else 296 else
437 foreach_reverse (node; tokenEx.endNodes) 297 foreach_reverse (node; tokenEx.endNodes)
438 print.format(tags[DocPart.SyntaxEnd], getTag(node.category)); 298 print.format(tagNodeEnd, tags.getTag(node.category));
439 } 299 }
440 print(\n~tags[DocPart.SrcEnd])(\n~tags[DocPart.Tail]); 300 print(tags["SourceEnd"]);
301 print(tags["DocEnd"]);
441 } 302 }
442 303
443 /// Prints all tokens of a source file using the buffer print. 304 /// Prints all tokens of a source file using the buffer print.
444 void tokensToDoc(string filePath, Print!(char) print, DocOption options) 305 void tokensToDoc(string filePath, TagMap tags, Print!(char) print, DocOption options)
445 { 306 {
446 auto tags = options & DocOption.HTML ? html_tags : xml_tags;
447 auto lx = new Lexer(new SourceText(filePath, true)); 307 auto lx = new Lexer(new SourceText(filePath, true));
448 lx.scanAll(); 308 lx.scanAll();
449 309
450 print(tags[DocPart.Head]~\n); 310 print(tags["DocHead"]);
451 if (lx.errors.length) 311 if (lx.errors.length)
452 { 312 {
453 print(tags[DocPart.CompBegin]~\n); 313 print(tags["CompBegin"]);
454 printErrors(lx, tags, print); 314 printErrors(lx, tags, print);
455 print(tags[DocPart.CompEnd]~\n); 315 print(tags["CompEnd"]);
456 } 316 }
457 print(tags[DocPart.SrcBegin]); 317 print(tags["SourceBegin"]);
458 318
459 // Traverse linked list and print tokens. 319 // Traverse linked list and print tokens.
460 auto token = lx.firstToken(); 320 auto token = lx.firstToken();
461 while (token.kind != TOK.EOF) 321 while (token)
462 { 322 {
463 // Print whitespace. 323 token.ws && print(token.wsChars); // Print preceding whitespace.
464 if (token.ws)
465 print(token.wsChars);
466 printToken(token, tags, print); 324 printToken(token, tags, print);
467 token = token.next; 325 token = token.next;
468 } 326 }
469 print(\n~tags[DocPart.SrcEnd])(\n~tags[DocPart.Tail]); 327 print(tags["SourceEnd"]);
470 } 328 print(tags["DocEnd"]);
329 }
330
331 void printToken(Token* token, string[] tags, Print!(char) print)
332 {}
471 333
472 /// Prints a token with tags using the buffer print. 334 /// Prints a token with tags using the buffer print.
473 void printToken(Token* token, string[] tags, Print!(char) print) 335 void printToken(Token* token, TagMap tags, Print!(char) print)
474 { 336 {
475 alias DocPart DP;
476 string srcText = xml_escape(token.srcText);
477
478 switch(token.kind) 337 switch(token.kind)
479 { 338 {
480 case TOK.Identifier: 339 case TOK.Identifier:
481 print.format(tags[DP.Identifier], srcText); 340 print.format(tags.Identifier, token.srcText);
482 break; 341 break;
483 case TOK.Comment: 342 case TOK.Comment:
484 string t; 343 string formatStr;
485 switch (token.start[1]) 344 switch (token.start[1])
486 { 345 {
487 case '/': t = "l"; break; 346 case '/': formatStr = tags.LineC; break;
488 case '*': t = "b"; break; 347 case '*': formatStr = tags.BlockC; break;
489 case '+': t = "n"; break; 348 case '+': formatStr = tags.NestedC; break;
490 default: 349 default: assert(0);
491 assert(0); 350 }
492 } 351 print.format(formatStr, xml_escape(token.srcText));
493 print.format(tags[DP.Comment], t, srcText);
494 break; 352 break;
495 case TOK.String: 353 case TOK.String:
496 print.format(tags[DP.StringLiteral], srcText); 354 print.format(tags.String, xml_escape(token.srcText));
497 break; 355 break;
498 case TOK.CharLiteral: 356 case TOK.CharLiteral:
499 print.format(tags[DP.CharLiteral], srcText); 357 print.format(tags.Char, xml_escape(token.srcText));
500 break;
501 case TOK.Assign, TOK.Equal,
502 TOK.Less, TOK.Greater,
503 TOK.LShiftAssign, TOK.LShift,
504 TOK.RShiftAssign, TOK.RShift,
505 TOK.URShiftAssign, TOK.URShift,
506 TOK.OrAssign, TOK.OrBinary,
507 TOK.AndAssign, TOK.AndBinary,
508 TOK.PlusAssign, TOK.PlusPlus, TOK.Plus,
509 TOK.MinusAssign, TOK.MinusMinus, TOK.Minus,
510 TOK.DivAssign, TOK.Div,
511 TOK.MulAssign, TOK.Mul,
512 TOK.ModAssign, TOK.Mod,
513 TOK.XorAssign, TOK.Xor,
514 TOK.CatAssign,
515 TOK.Tilde,
516 TOK.Unordered,
517 TOK.UorE,
518 TOK.UorG,
519 TOK.UorGorE,
520 TOK.UorL,
521 TOK.UorLorE,
522 TOK.LorEorG:
523 print.format(tags[DP.Operator], srcText);
524 break;
525 case TOK.LorG:
526 print(tags[DP.LorG]);
527 break;
528 case TOK.LessEqual:
529 print(tags[DP.LessEqual]);
530 break;
531 case TOK.GreaterEqual:
532 print(tags[DP.GreaterEqual]);
533 break;
534 case TOK.AndLogical:
535 print(tags[DP.AndLogical]);
536 break;
537 case TOK.OrLogical:
538 print(tags[DP.OrLogical]);
539 break;
540 case TOK.NotEqual:
541 print(tags[DP.NotEqual]);
542 break;
543 case TOK.Not:
544 // Check if this is part of a template instantiation.
545 if (token.prevNWS.kind == TOK.Identifier && token.nextNWS.kind == TOK.LParen)
546 goto default;
547 print(tags[DP.Not]);
548 break; 358 break;
549 case TOK.Int32, TOK.Int64, TOK.Uint32, TOK.Uint64, 359 case TOK.Int32, TOK.Int64, TOK.Uint32, TOK.Uint64,
550 TOK.Float32, TOK.Float64, TOK.Float80, 360 TOK.Float32, TOK.Float64, TOK.Float80,
551 TOK.Imaginary32, TOK.Imaginary64, TOK.Imaginary80: 361 TOK.Imaginary32, TOK.Imaginary64, TOK.Imaginary80:
552 print.format(tags[DP.Number], srcText); 362 print.format(tags.Number, token.srcText);
553 break;
554 case TOK.LParen, TOK.RParen, TOK.LBracket,
555 TOK.RBracket, TOK.LBrace, TOK.RBrace:
556 print.format(tags[DP.Bracket], srcText);
557 break; 363 break;
558 case TOK.Shebang: 364 case TOK.Shebang:
559 print.format(tags[DP.Shebang], srcText); 365 print.format(tags.Shebang, xml_escape(token.srcText));
560 break; 366 break;
561 case TOK.HashLine: 367 case TOK.HashLine:
368 auto formatStr = tags.HLine;
369 // The text to be inserted into formatStr.
370 auto buffer = new GrowBuffer;
371 auto print2 = new Print!(char)(Format, buffer);
372
562 void printWS(char* start, char* end) 373 void printWS(char* start, char* end)
563 { 374 {
564 if (start != end) 375 start != end && print2(start[0 .. end - start]);
565 print(start[0 .. end - start]); 376 }
566 } 377
567 print(tags[DP.HLineBegin]);
568 auto num = token.tokLineNum; 378 auto num = token.tokLineNum;
569 if (num is null) 379 if (num is null)
570 { 380 { // Malformed #line
571 print(token.srcText); 381 print.format(formatStr, token.srcText);
572 print(tags[DP.HLineEnd]);
573 break; 382 break;
574 } 383 }
575 // Print whitespace between #line and number 384
385 // Print whitespace between #line and number.
576 auto ptr = token.start; 386 auto ptr = token.start;
577 printWS(ptr, num.start); // prints "#line" as well 387 printWS(ptr, num.start); // Prints "#line" as well.
578 printToken(num, tags, print); 388 printToken(num, tags, print2);
579 if (token.tokLineFilespec) 389
580 { 390 if (auto filespec = token.tokLineFilespec)
581 auto filespec = token.tokLineFilespec; 391 { // Print whitespace between number and filespec.
582 // Print whitespace between number and filespec
583 printWS(num.end, filespec.start); 392 printWS(num.end, filespec.start);
584 print.format(tags[DP.Filespec], xml_escape(filespec.srcText)); 393 print2.format(tags.Filespec, xml_escape(filespec.srcText));
585
586 ptr = filespec.end; 394 ptr = filespec.end;
587 } 395 }
588 else 396 else
589 ptr = num.end; 397 ptr = num.end;
590 // Print remaining whitespace 398 // Print remaining whitespace
591 printWS(ptr, token.end); 399 printWS(ptr, token.end);
592 print(tags[DP.HLineEnd]); 400 // Finally print the whole token.
401 print.format(formatStr, cast(char[])buffer.slice());
402 break;
403 case TOK.Illegal:
404 print.format(tags.Illegal, token.srcText());
405 break;
406 case TOK.Newline:
407 print.format(tags.Newline, token.srcText());
408 break;
409 case TOK.EOF:
410 print(tags.EOF);
593 break; 411 break;
594 default: 412 default:
595 if (token.isKeyword()) 413 if (token.isKeyword())
596 print.format(tags[DP.Keyword], srcText); 414 print.format(tags.Keyword, token.srcText);
597 else if (token.isSpecialToken) 415 else if (token.isSpecialToken)
598 print.format(tags[DP.SpecialToken], srcText); 416 print.format(tags.SpecialToken, token.srcText);
599 else 417 else
600 print(srcText); 418 print(tags[token.srcText]);
601 } 419 }
602 } 420 }