changeset 410:4d9ee8e60712

Added destructors for two particular tokens. Fixed some asserts in Lexer.scanNumber().
author Aziz K?ksal <aziz.koeksal@gmail.com>
date Tue, 25 Sep 2007 14:28:35 +0200
parents 38fccd2640eb
children cca83c0c00fd
files trunk/src/dil/Lexer.d trunk/src/dil/Token.d
diffstat 2 files changed, 50 insertions(+), 4 deletions(-) [+]
line wrap: on
line diff
--- a/trunk/src/dil/Lexer.d	Tue Sep 25 12:15:49 2007 +0200
+++ b/trunk/src/dil/Lexer.d	Tue Sep 25 14:28:35 2007 +0200
@@ -1410,7 +1410,7 @@
 
   LscanHex:
     assert(digits == 0);
-    assert(*p == 'x');
+    assert(*p == 'x' || *p == 'X');
     while (1)
     {
       if (*++p == '_')
@@ -1427,7 +1427,7 @@
         ulong_ += *p - 'a' + 10;
     }
 
-    assert(ishexad(p[-1]) || p[-1] == '_' || p[-1] == 'x');
+    assert(ishexad(p[-1]) || p[-1] == '_' || p[-1] == 'x' || p[-1] == 'X');
     assert(!ishexad(*p) && *p != '_');
 
     switch (*p)
@@ -1449,7 +1449,7 @@
 
   LscanBinary:
     assert(digits == 0);
-    assert(*p == 'b');
+    assert(*p == 'b' || *p == 'B');
     while (1)
     {
       if (*++p == '0')
@@ -1474,7 +1474,7 @@
     else if (digits > 64)
       error(MID.OverflowBinaryNumber);
 
-    assert(p[-1] == '0' || p[-1] == '1' || p[-1] == '_', p[-1] ~ "");
+    assert(p[-1] == '0' || p[-1] == '1' || p[-1] == '_' || p[-1] == 'b' || p[-1] == 'B', p[-1] ~ "");
     assert( !(*p == '0' || *p == '1' || *p == '_') );
     goto Lfinalize;
 
--- a/trunk/src/dil/Token.d	Tue Sep 25 12:15:49 2007 +0200
+++ b/trunk/src/dil/Token.d	Tue Sep 25 14:28:35 2007 +0200
@@ -213,6 +213,14 @@
     return *start == '_' && type != TOK.Identifier;
   }
 
+version(D2)
+{
+  bool isTokenStringLiteral()
+  {
+    return type == TOK.String && tok_str !is null;
+  }
+}
+
   int opEquals(TOK type2)
   {
     return type == type2;
@@ -229,8 +237,46 @@
 
   delete(void* p)
   {
+    auto token = cast(Token*)p;
+    if (token)
+    {
+      if(token.type == TOK.HashLine)
+        token.destructHashLineToken();
+      else
+      {
+      version(D2)
+        if (token.isTokenStringLiteral)
+          token.destructTokenStringLiteral();
+      }
+    }
     free(p);
   }
+
+  void destructHashLineToken()
+  {
+    assert(type == TOK.HashLine);
+    delete line_num;
+    delete line_filespec;
+  }
+
+version(D2)
+{
+  void destructTokenStringLiteral()
+  {
+    assert(type == TOK.String);
+    assert(start && *start == 'q' && start[1] == '{');
+    assert(tok_str !is null);
+    auto tok_it = tok_str;
+    auto tok_del = tok_str;
+    while (tok_it && tok_it.type != TOK.EOF)
+    {
+      tok_it = tok_it.next;
+      assert(tok_del && tok_del.type != TOK.EOF);
+      delete tok_del;
+      tok_del = tok_it;
+    }
+  }
+}
 }
 
 const string[] tokToString = [