aboutsummaryrefslogtreecommitdiff
path: root/src/parser.cpp
diff options
context:
space:
mode:
authorgingerBill <gingerBill@users.noreply.github.com>2021-09-18 12:55:13 +0100
committerGitHub <noreply@github.com>2021-09-18 12:55:13 +0100
commit6855538729ea4f859f692715c4c231e4a59ac604 (patch)
treef9bd2df2289ab97032170151da38b076df00e0f6 /src/parser.cpp
parent15921d032cfff6994289d1f299bef168f0a49a4e (diff)
parent05ac2002e0296c3acccca1d8cffaafb002e43120 (diff)
Merge branch 'master' into llvm-12.0.1-windows
Diffstat (limited to 'src/parser.cpp')
-rw-r--r--src/parser.cpp16
1 files changed, 6 insertions, 10 deletions
diff --git a/src/parser.cpp b/src/parser.cpp
index e1f21f459..f72d8a73c 100644
--- a/src/parser.cpp
+++ b/src/parser.cpp
@@ -1196,7 +1196,7 @@ CommentGroup *consume_comment_group(AstFile *f, isize n, isize *end_line_) {
CommentGroup *comments = nullptr;
if (list.count > 0) {
- comments = gb_alloc_item(heap_allocator(), CommentGroup);
+ comments = gb_alloc_item(permanent_allocator(), CommentGroup);
comments->list = slice_from_array(list);
array_add(&f->comments, comments);
}
@@ -4645,7 +4645,8 @@ ParseFileError init_ast_file(AstFile *f, String fullpath, TokenPos *err_pos) {
zero_item(&f->tokenizer);
f->tokenizer.curr_file_id = f->id;
- TokenizerInitError err = init_tokenizer_from_fullpath(&f->tokenizer, f->fullpath);
+ bool copy_file_contents = build_context.command_kind == Command_strip_semicolon;
+ TokenizerInitError err = init_tokenizer_from_fullpath(&f->tokenizer, f->fullpath, copy_file_contents);
if (err != TokenizerInit_None) {
switch (err) {
case TokenizerInit_Empty:
@@ -4710,9 +4711,6 @@ ParseFileError init_ast_file(AstFile *f, String fullpath, TokenPos *err_pos) {
block_size = ((block_size + page_size-1)/page_size) * page_size;
block_size = gb_clamp(block_size, page_size, DEFAULT_MINIMUM_BLOCK_SIZE);
f->arena.minimum_block_size = block_size;
- #if 0
- arena_init_local_mutex(&f->arena);
- #endif
array_init(&f->comments, heap_allocator(), 0, 0);
array_init(&f->imports, heap_allocator(), 0, 0);
@@ -4727,8 +4725,6 @@ void destroy_ast_file(AstFile *f) {
array_free(&f->tokens);
array_free(&f->comments);
array_free(&f->imports);
- gb_free(heap_allocator(), f->tokenizer.fullpath.text);
- destroy_tokenizer(&f->tokenizer);
}
bool init_parser(Parser *p) {
@@ -4795,7 +4791,7 @@ WORKER_TASK_PROC(parser_worker_proc) {
void parser_add_file_to_process(Parser *p, AstPackage *pkg, FileInfo fi, TokenPos pos) {
// TODO(bill): Use a better allocator
ImportedFile f = {pkg, fi, pos, p->file_to_process_count++};
- auto wd = gb_alloc_item(heap_allocator(), ParserWorkerData);
+ auto wd = gb_alloc_item(permanent_allocator(), ParserWorkerData);
wd->parser = p;
wd->imported_file = f;
global_thread_pool_add_task(parser_worker_proc, wd);
@@ -4833,7 +4829,7 @@ WORKER_TASK_PROC(foreign_file_worker_proc) {
void parser_add_foreign_file_to_process(Parser *p, AstPackage *pkg, AstForeignFileKind kind, FileInfo fi, TokenPos pos) {
// TODO(bill): Use a better allocator
ImportedFile f = {pkg, fi, pos, p->file_to_process_count++};
- auto wd = gb_alloc_item(heap_allocator(), ForeignFileWorkerData);
+ auto wd = gb_alloc_item(permanent_allocator(), ForeignFileWorkerData);
wd->parser = p;
wd->imported_file = f;
wd->foreign_kind = kind;
@@ -4854,7 +4850,7 @@ AstPackage *try_add_import_path(Parser *p, String const &path, String const &rel
string_set_add(&p->imported_files, path);
- AstPackage *pkg = gb_alloc_item(heap_allocator(), AstPackage);
+ AstPackage *pkg = gb_alloc_item(permanent_allocator(), AstPackage);
pkg->kind = kind;
pkg->fullpath = path;
array_init(&pkg->files, heap_allocator());