# HG changeset patch # User Aziz K?ksal # Date 1203117133 -3600 # Node ID 307905dadf5d9795cd5d795c1ec5ee2c3be81cce # Parent ea9e8b141742cffdd8b3faa7c142d2a26579c77d DDoc code sections are highlighted now. diff -r ea9e8b141742 -r 307905dadf5d trunk/src/cmd/DDoc.d --- a/trunk/src/cmd/DDoc.d Fri Feb 15 03:19:01 2008 +0100 +++ b/trunk/src/cmd/DDoc.d Sat Feb 16 00:12:13 2008 +0100 @@ -4,6 +4,7 @@ +/ module cmd.DDoc; +import cmd.Generate; import dil.doc.Parser; import dil.doc.Macro; import dil.doc.Doc; @@ -47,6 +48,9 @@ // foreach (k, v; mtable.table) // Stdout(k)("=")(v.text); + auto tokenHL = new TokenHighlighter(infoMan); // For DDoc code sections. + + // Process D files. foreach (filePath; filePaths) { auto mod = new Module(filePath, infoMan); @@ -70,7 +74,7 @@ infoMan2 = new InfoManager(); } - writeDocFile(dest.toString(), mod, mtable, incUndoc, infoMan2); + writeDocFile(dest.toString(), mod, mtable, incUndoc, tokenHL, infoMan2); if (infoMan2) infoMan ~= infoMan2.info; @@ -78,7 +82,7 @@ } void writeDocFile(string dest, Module mod, MacroTable mtable, bool incUndoc, - InfoManager infoMan) + TokenHighlighter tokenHL, InfoManager infoMan) { // Create a macro environment for this module. mtable = new MacroTable(mtable); @@ -93,8 +97,8 @@ mtable.insert("DATETIME", time_str.dup); mtable.insert("YEAR", time_str[20..24].dup); - auto doc = new DDocEmitter(mtable, incUndoc); - doc.emit(mod); + auto doc = new DDocEmitter(mtable, incUndoc, mod, tokenHL); + doc.emit(); // Set BODY macro to the text produced by the DDocEmitter. mtable.insert("BODY", doc.text); // Do the macro expansion pass. @@ -116,17 +120,22 @@ char[] text; bool includeUndocumented; MacroTable mtable; + Module modul; + TokenHighlighter tokenHL; - this(MacroTable mtable, bool includeUndocumented) + this(MacroTable mtable, bool includeUndocumented, Module modul, + TokenHighlighter tokenHL) { this.mtable = mtable; this.includeUndocumented = includeUndocumented; + this.modul = modul; + this.tokenHL = tokenHL; } /// Entry method. - char[] emit(Module mod) + char[] emit() { - if (auto d = mod.moduleDecl) + if (auto d = modul.moduleDecl) { if (ddoc(d)) { @@ -135,7 +144,7 @@ DESC({ writeComment(); }); } } - MEMBERS("MODULE", { visitD(mod.root); }); + MEMBERS("MODULE", { visitD(modul.root); }); write(\n); return text; } @@ -336,7 +345,8 @@ while (++p < end) if (p+2 < end && *p == '-' && p[1] == '-' && p[2] == '-') break; - result ~= "$(D_CODE " ~ scanCodeSection(makeString(codeBegin, p)) ~ ")"; + auto codeText = makeString(codeBegin, p); + result ~= tokenHL.highlight(codeText, modul.filePath); while (p < end && *p == '-') p++; continue; @@ -350,11 +360,6 @@ return result; } - char[] scanCodeSection(char[] text) - { - return text; - } - /// Escapes '<', '>' and '&' with named HTML entities. char[] escape(char[] text) { @@ -698,10 +703,8 @@ if (d.typeNode) type = textSpan(d.typeNode.baseType.begin, d.typeNode.end); foreach (name; d.names) - { DECL({ write(escape(type), " "); SYMBOL(name.str); }); - DESC({ writeComment(); }); - } + DESC({ writeComment(); }); return d; } diff -r ea9e8b141742 -r 307905dadf5d trunk/src/cmd/Generate.d --- a/trunk/src/cmd/Generate.d Fri Feb 15 03:19:01 2008 +0100 +++ b/trunk/src/cmd/Generate.d Sat Feb 16 00:12:13 2008 +0100 @@ -12,6 +12,7 @@ dil.ast.Types; import dil.lexer.Lexer; import dil.parser.Parser; +import dil.semantic.Module; import dil.SourceText; import dil.Information; import dil.SettingsLoader; @@ -44,9 +45,9 @@ return; if (options & DocOption.Syntax) - syntaxToDoc(filePath, tags, Stdout, options); + highlightSyntax(filePath, tags, Stdout, options); else - tokensToDoc(filePath, tags, Stdout, options); + highlightTokens(filePath, tags, Stdout); } /// Escapes the characters '<', '>' and '&' with named character entities. @@ -193,15 +194,12 @@ TokenEx[] build(Node root, Token* first) { - Token* token = first; + auto token = first; - uint count; - while (token) - { + uint count; // Count tokens. + for (; token; token = token.next) count++; - token = token.next; - } - + // Creat the exact number of TokenEx instances. auto toks = new TokenEx[count]; token = first; foreach (ref tokEx; toks) @@ -224,7 +222,8 @@ return *p; } - void push()(Node n) + // Override dispatch function. + override Node dispatch(Node n) { auto begin = n.begin; if (begin) @@ -234,12 +233,6 @@ txbegin.beginNodes ~= n; txend.endNodes ~= n; } - } - - // Override dispatch function. - override Node dispatch(Node n) - { - push(n); return super.dispatch(n); } } @@ -256,7 +249,7 @@ print.format(tags["ParserError"], e.filePath, e.loc, e.col, xml_escape(e.getMsg)); } -void syntaxToDoc(string filePath, TagMap tags, Print!(char) print, DocOption options) +void highlightSyntax(string filePath, TagMap tags, Print!(char) print, DocOption options) { auto parser = new Parser(new SourceText(filePath, true)); auto root = parser.start(); @@ -284,6 +277,10 @@ auto token = tokenEx.token; token.ws && print(token.wsChars); // Print preceding whitespace. + if (token.isWhitespace) { + printToken(token, tags, print); + continue; + } // foreach (node; tokenEx.beginNodes) print.format(tagNodeBegin, tags.getTag(node.category), getShortClassName(node)); @@ -302,7 +299,7 @@ } /// Prints all tokens of a source file using the buffer print. -void tokensToDoc(string filePath, TagMap tags, Print!(char) print, DocOption options) +void highlightTokens(string filePath, TagMap tags, Print!(char) print) { auto lx = new Lexer(new SourceText(filePath, true)); lx.scanAll(); @@ -315,21 +312,51 @@ print(tags["CompEnd"]); } print(tags["SourceBegin"]); - // Traverse linked list and print tokens. - auto token = lx.firstToken(); - while (token) - { + for (auto token = lx.firstToken(); token; token = token.next) { token.ws && print(token.wsChars); // Print preceding whitespace. printToken(token, tags, print); - token = token.next; } print(tags["SourceEnd"]); print(tags["DocEnd"]); } -void printToken(Token* token, string[] tags, Print!(char) print) -{} +class TokenHighlighter +{ + TagMap tags; + this(InfoManager infoMan, bool useHTML = true) + { + auto map = TagMapLoader(infoMan).load(GlobalSettings.htmlMapFile); + tags = new TagMap(map); + } + + /// Highlights tokens in a DDoc code section. + /// Returns: a string with the highlighted tokens (in HTML tags.) + string highlight(string text, string filePath) + { + auto buffer = new GrowBuffer(text.length); + auto print = new Print!(char)(Format, buffer); + + auto lx = new Lexer(new SourceText(filePath, text)); + lx.scanAll(); + + // Traverse linked list and print tokens. + print("$(D_CODE\n"); + if (lx.errors.length) + { // Output error messages. + print(tags["CompBegin"]); + printErrors(lx, tags, print); + print(tags["CompEnd"]); + } + // Traverse linked list and print tokens. + for (auto token = lx.firstToken(); token; token = token.next) { + token.ws && print(token.wsChars); // Print preceding whitespace. + printToken(token, tags, print); + } + print("\n)"); + return cast(char[])buffer.slice(); + } +} /// Prints a token with tags using the buffer print. void printToken(Token* token, TagMap tags, Print!(char) print)