diff trunk/src/cmd/Generate.d @ 761:307905dadf5d

DDoc code sections are highlighted now.
author Aziz K?ksal <aziz.koeksal@gmail.com>
date Sat, 16 Feb 2008 00:12:13 +0100
parents 9c47f377ca0b
children f26f13b5a3a3
line wrap: on
line diff
--- a/trunk/src/cmd/Generate.d	Fri Feb 15 03:19:01 2008 +0100
+++ b/trunk/src/cmd/Generate.d	Sat Feb 16 00:12:13 2008 +0100
@@ -12,6 +12,7 @@
        dil.ast.Types;
 import dil.lexer.Lexer;
 import dil.parser.Parser;
+import dil.semantic.Module;
 import dil.SourceText;
 import dil.Information;
 import dil.SettingsLoader;
@@ -44,9 +45,9 @@
     return;
 
   if (options & DocOption.Syntax)
-    syntaxToDoc(filePath, tags, Stdout, options);
+    highlightSyntax(filePath, tags, Stdout, options);
   else
-    tokensToDoc(filePath, tags, Stdout, options);
+    highlightTokens(filePath, tags, Stdout);
 }
 
 /// Escapes the characters '<', '>' and '&' with named character entities.
@@ -193,15 +194,12 @@
 
   TokenEx[] build(Node root, Token* first)
   {
-    Token* token = first;
+    auto token = first;
 
-    uint count;
-    while (token)
-    {
+    uint count; // Count tokens.
+    for (; token; token = token.next)
       count++;
-      token = token.next;
-    }
-
+    // Creat the exact number of TokenEx instances.
     auto toks = new TokenEx[count];
     token = first;
     foreach (ref tokEx; toks)
@@ -224,7 +222,8 @@
     return *p;
   }
 
-  void push()(Node n)
+  // Override dispatch function.
+  override Node dispatch(Node n)
   {
     auto begin = n.begin;
     if (begin)
@@ -234,12 +233,6 @@
       txbegin.beginNodes ~= n;
       txend.endNodes ~= n;
     }
-  }
-
-  // Override dispatch function.
-  override Node dispatch(Node n)
-  {
-    push(n);
     return super.dispatch(n);
   }
 }
@@ -256,7 +249,7 @@
     print.format(tags["ParserError"], e.filePath, e.loc, e.col, xml_escape(e.getMsg));
 }
 
-void syntaxToDoc(string filePath, TagMap tags, Print!(char) print, DocOption options)
+void highlightSyntax(string filePath, TagMap tags, Print!(char) print, DocOption options)
 {
   auto parser = new Parser(new SourceText(filePath, true));
   auto root = parser.start();
@@ -284,6 +277,10 @@
     auto token = tokenEx.token;
 
     token.ws && print(token.wsChars); // Print preceding whitespace.
+    if (token.isWhitespace) {
+      printToken(token, tags, print);
+      continue;
+    }
     // <node>
     foreach (node; tokenEx.beginNodes)
       print.format(tagNodeBegin, tags.getTag(node.category), getShortClassName(node));
@@ -302,7 +299,7 @@
 }
 
 /// Prints all tokens of a source file using the buffer print.
-void tokensToDoc(string filePath, TagMap tags, Print!(char) print, DocOption options)
+void highlightTokens(string filePath, TagMap tags, Print!(char) print)
 {
   auto lx = new Lexer(new SourceText(filePath, true));
   lx.scanAll();
@@ -315,21 +312,51 @@
     print(tags["CompEnd"]);
   }
   print(tags["SourceBegin"]);
-
   // Traverse linked list and print tokens.
-  auto token = lx.firstToken();
-  while (token)
-  {
+  for (auto token = lx.firstToken(); token; token = token.next) {
     token.ws && print(token.wsChars); // Print preceding whitespace.
     printToken(token, tags, print);
-    token = token.next;
   }
   print(tags["SourceEnd"]);
   print(tags["DocEnd"]);
 }
 
-void printToken(Token* token, string[] tags, Print!(char) print)
-{}
+class TokenHighlighter
+{
+  TagMap tags;
+  this(InfoManager infoMan, bool useHTML = true)
+  {
+    auto map = TagMapLoader(infoMan).load(GlobalSettings.htmlMapFile);
+    tags = new TagMap(map);
+  }
+
+  /// Highlights tokens in a DDoc code section.
+  /// Returns: a string with the highlighted tokens (in HTML tags.)
+  string highlight(string text, string filePath)
+  {
+    auto buffer = new GrowBuffer(text.length);
+    auto print = new Print!(char)(Format, buffer);
+
+    auto lx = new Lexer(new SourceText(filePath, text));
+    lx.scanAll();
+
+    // Traverse linked list and print tokens.
+    print("$(D_CODE\n");
+    if (lx.errors.length)
+    { // Output error messages.
+      print(tags["CompBegin"]);
+      printErrors(lx, tags, print);
+      print(tags["CompEnd"]);
+    }
+    // Traverse linked list and print tokens.
+    for (auto token = lx.firstToken(); token; token = token.next) {
+      token.ws && print(token.wsChars); // Print preceding whitespace.
+      printToken(token, tags, print);
+    }
+    print("\n)");
+    return cast(char[])buffer.slice();
+  }
+}
 
 /// Prints a token with tags using the buffer print.
 void printToken(Token* token, TagMap tags, Print!(char) print)