# HG changeset patch # User Aziz K?ksal # Date 1198092893 -3600 # Node ID 0bac0bb506ca99ef4f2af09a280f4e480fc8e7da # Parent d7050f2a4814f69afc3b3581815d6f78c1153809 Added '--table' option to 'stats' command. diff -r d7050f2a4814 -r 0bac0bb506ca trunk/src/cmd/Statistics.d --- a/trunk/src/cmd/Statistics.d Wed Dec 19 18:48:12 2007 +0100 +++ b/trunk/src/cmd/Statistics.d Wed Dec 19 20:34:53 2007 +0100 @@ -18,6 +18,15 @@ uint commentCount; /// Counter for comments. uint tokenCount; /// Counter for all tokens produced by the Lexer. uint linesOfCode; /// Number of lines. + uint[] tokensTable; /// Table of counters for all token types. + + static Statistics opCall(bool allocateTokensTable) + { + Statistics s; + if (allocateTokensTable) + s.tokensTable = new uint[TOK.MAX]; + return s; + } void opAddAssign(Statistics s) { @@ -29,16 +38,18 @@ this.commentCount += s.commentCount; this.tokenCount += s.tokenCount; this.linesOfCode += s.linesOfCode; + foreach (i, count; s.tokensTable) + this.tokensTable[i] += count; } } -void execute(string[] filePaths) +void execute(string[] filePaths, bool printTokensTable) { Statistics[] stats; foreach (filePath; filePaths) - stats ~= getStatistics(filePath); + stats ~= getStatistics(filePath, printTokensTable); - Statistics total; + auto total = Statistics(printTokensTable); foreach (i, ref stat; stats) { @@ -67,6 +78,7 @@ } if (filePaths.length > 1) + { Stdout.formatln( "--------------------------------------------------------------------------------\n" "Total of {} files:\n" @@ -88,24 +100,49 @@ total.tokenCount, total.linesOfCode ); + } + + if (printTokensTable) + { + Stdout("Table of tokens:").newline; + Stdout.formatln(" {,10} | {}", "Count", "Token type"); + Stdout("-----------------------------").newline; + foreach (i, count; total.tokensTable) + Stdout.formatln(" {,10} | {}", count, Token.toString(cast(TOK)i)); + Stdout("// End of table.").newline; + } } -Statistics getStatistics(string filePath) +Statistics getStatistics(string filePath, bool printTokensTable) { auto sourceText = loadFile(filePath); auto lx = new Lexer(sourceText, filePath); lx.scanAll(); auto token = lx.firstToken(); - Statistics stats; + auto stats = Statistics(printTokensTable); // Lexer creates HEAD + Newline, which are not in the source text. // No token left behind! stats.tokenCount = 2; stats.linesOfCode = lx.lineNum; + if (printTokensTable) + { + stats.tokensTable[TOK.HEAD] = 1; + stats.tokensTable[TOK.Newline & ~TOK.Whitespace] = 1; + } // Traverse linked list. while (1) { stats.tokenCount += 1; + + if (printTokensTable) + { + if (token.isWhitespace) + stats.tokensTable[token.type & ~TOK.Whitespace] += 1; + else + stats.tokensTable[token.type] += 1; + } + // Count whitespace characters if (token.ws !is null) stats.whitespaceCount += token.start - token.ws; diff -r d7050f2a4814 -r 0bac0bb506ca trunk/src/main.d --- a/trunk/src/main.d Wed Dec 19 18:48:12 2007 +0100 +++ b/trunk/src/main.d Wed Dec 19 20:34:53 2007 +0100 @@ -125,7 +125,14 @@ cmd.ImportGraph.execute(filePath, includePaths, regexps, levels, options); break; case "stats", "statistics": - cmd.Statistics.execute(args[2..$]); + char[][] filePaths; + bool printTokensTable; + foreach (arg; args[2..$]) + if (arg == "--table") + printTokensTable = true; + else + filePaths ~= arg; + cmd.Statistics.execute(filePaths, printTokensTable); break; case "tok", "tokenize": char[] filePath; @@ -260,7 +267,10 @@ case "stats", "statistics": msg = "Gather statistics about D source files. Usage: - dil stat file.d [file2.d, ...] + dil stat file.d [file2.d, ...] [Options] + +Options: + --table : print the count of all types of tokens in a table. Example: dil stat src/dil/Parser.d src/dil/Lexer.d";