diff options
| author | gingerBill <bill@gingerbill.org> | 2020-05-27 18:23:37 +0100 |
|---|---|---|
| committer | gingerBill <bill@gingerbill.org> | 2020-05-27 18:23:37 +0100 |
| commit | 1a0614b0d7f4b6010d79ac0a402d3c4c1f389529 (patch) | |
| tree | aeee6f81470fe8f9ed0d918008272ed081a099bd /src/main.cpp | |
| parent | 876820789e9dedaa6198c4cd145702485e3bd21c (diff) | |
Improve performance of tokenization and parsing
Diffstat (limited to 'src/main.cpp')
| -rw-r--r-- | src/main.cpp | 30 |
1 files changed, 30 insertions, 0 deletions
diff --git a/src/main.cpp b/src/main.cpp index 0e3d5836d..30000961c 100644 --- a/src/main.cpp +++ b/src/main.cpp @@ -1167,10 +1167,14 @@ void show_timings(Checker *c, Timings *t) { isize files = 0; isize packages = p->packages.count; isize total_file_size = 0; + f64 total_tokenizing_time = 0; + f64 total_parsing_time = 0; for_array(i, p->packages) { files += p->packages[i]->files.count; for_array(j, p->packages[i]->files) { AstFile *file = p->packages[i]->files[j]; + total_tokenizing_time += file->time_to_tokenize; + total_parsing_time += file->time_to_parse; total_file_size += file->tokenizer.end - file->tokenizer.start; } } @@ -1187,6 +1191,32 @@ void show_timings(Checker *c, Timings *t) { gb_printf("\n"); } { + f64 time = total_tokenizing_time; + gb_printf("Tokenization Only\n"); + gb_printf("LOC/s - %.3f\n", cast(f64)lines/time); + gb_printf("us/LOC - %.3f\n", 1.0e6*time/cast(f64)lines); + gb_printf("Tokens/s - %.3f\n", cast(f64)tokens/time); + gb_printf("us/Token - %.3f\n", 1.0e6*time/cast(f64)tokens); + gb_printf("bytes/s - %.3f\n", cast(f64)total_file_size/time); + gb_printf("MiB/s - %.3f\n", cast(f64)(total_file_size/time)/(1024*1024)); + gb_printf("us/bytes - %.3f\n", 1.0e6*time/cast(f64)total_file_size); + + gb_printf("\n"); + } + { + f64 time = total_parsing_time; + gb_printf("Parsing Only\n"); + gb_printf("LOC/s - %.3f\n", cast(f64)lines/time); + gb_printf("us/LOC - %.3f\n", 1.0e6*time/cast(f64)lines); + gb_printf("Tokens/s - %.3f\n", cast(f64)tokens/time); + gb_printf("us/Token - %.3f\n", 1.0e6*time/cast(f64)tokens); + gb_printf("bytes/s - %.3f\n", cast(f64)total_file_size/time); + gb_printf("MiB/s - %.3f\n", cast(f64)(total_file_size/time)/(1024*1024)); + gb_printf("us/bytes - %.3f\n", 1.0e6*time/cast(f64)total_file_size); + + gb_printf("\n"); + } + { TimeStamp ts = {}; for_array(i, t->sections) { TimeStamp s = t->sections[i]; |