diff options
| author | DanielGavin <danielgavin5@hotmail.com> | 2023-04-03 22:25:55 +0200 |
|---|---|---|
| committer | DanielGavin <danielgavin5@hotmail.com> | 2023-04-03 22:25:55 +0200 |
| commit | bd0461b45a29f2b638252a8ff27ecdb764f1c4ec (patch) | |
| tree | c9072e1baf3b1b6c59a0a03a5dea987f96dc9d0d /src | |
| parent | 58606d4394eac344516513d805902367030eaf79 (diff) | |
File cache was using the wrong allocator - might fix the issues with semantic tokens crashing ones in a while.
Diffstat (limited to 'src')
| -rw-r--r-- | src/main.odin | 2 | ||||
| -rw-r--r-- | src/server/documents.odin | 4 | ||||
| -rw-r--r-- | src/server/requests.odin | 4 |
3 files changed, 9 insertions, 1 deletions
diff --git a/src/main.odin b/src/main.odin index 5ce8254..cc21cb9 100644 --- a/src/main.odin +++ b/src/main.odin @@ -113,7 +113,7 @@ main :: proc() { */ when ODIN_DEBUG && ODIN_OS == .Windows { - set_stacktrace() + //set_stacktrace() } init_global_temporary_allocator(mem.Megabyte * 100) diff --git a/src/server/documents.odin b/src/server/documents.odin index 1c281c0..bbde1ca 100644 --- a/src/server/documents.odin +++ b/src/server/documents.odin @@ -307,6 +307,10 @@ document_close :: proc(uri_string: string) -> common.Error { return .InvalidRequest } + if document.uri.uri in file_resolve_cache.files { + delete_key(&file_resolve_cache.files, document.uri.uri) + } + document_free_allocator(document.allocator) document.allocator = nil diff --git a/src/server/requests.odin b/src/server/requests.odin index 7de995a..555ffd3 100644 --- a/src/server/requests.odin +++ b/src/server/requests.odin @@ -666,6 +666,8 @@ request_initialize :: proc( append(&indexer.builtin_packages, path.join({core, "runtime"})) } + file_resolve_cache.files = make(map[string]FileResolve, 200) + setup_index() for pkg in indexer.builtin_packages { @@ -1125,6 +1127,8 @@ request_semantic_token_range :: proc( symbols: SemanticTokens if config.enable_semantic_tokens { + resolve_entire_file_cached(document) + if file, ok := file_resolve_cache.files[document.uri.uri]; ok { symbols = get_semantic_tokens( document, |