diff options
| author | DanielGavin <danielgavin5@hotmail.com> | 2020-12-05 00:39:50 +0100 |
|---|---|---|
| committer | DanielGavin <danielgavin5@hotmail.com> | 2020-12-05 00:39:50 +0100 |
| commit | 498e8a3895cd5b1db756b7f61eb48d1fd4211460 (patch) | |
| tree | 71f35915bc9c449f762c1b7e25014fc6f7685e39 /src | |
| parent | efd2930b74943a4dbe463810f0c7b3e9ede0ab84 (diff) | |
added allocator - no more parsing the file every request
Diffstat (limited to 'src')
| -rw-r--r-- | src/common/allocator.odin | 128 | ||||
| -rw-r--r-- | src/server/analysis.odin | 1 | ||||
| -rw-r--r-- | src/server/documents.odin | 55 | ||||
| -rw-r--r-- | src/server/requests.odin | 330 |
4 files changed, 384 insertions, 130 deletions
diff --git a/src/common/allocator.odin b/src/common/allocator.odin new file mode 100644 index 0000000..cee54c4 --- /dev/null +++ b/src/common/allocator.odin @@ -0,0 +1,128 @@ +package common + +import "core:mem" + +Scratch_Allocator :: struct { + data: []byte, + curr_offset: int, + prev_allocation: rawptr, + backup_allocator: mem.Allocator, + leaked_allocations: [dynamic]rawptr, +} + +scratch_allocator_init :: proc(s: ^Scratch_Allocator, size: int, backup_allocator := context.allocator) { + s.data = mem.make_aligned([]byte, size, 2*align_of(rawptr), backup_allocator); + s.curr_offset = 0; + s.prev_allocation = nil; + s.backup_allocator = backup_allocator; + s.leaked_allocations.allocator = backup_allocator; +} + +scratch_allocator_destroy :: proc(s: ^Scratch_Allocator) { + if s == nil { + return; + } + for ptr in s.leaked_allocations { + free(ptr, s.backup_allocator); + } + delete(s.leaked_allocations); + delete(s.data, s.backup_allocator); + s^ = {}; +} + +scratch_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode, + size, alignment: int, + old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr { + + s := (^Scratch_Allocator)(allocator_data); + + if s.data == nil { + DEFAULT_BACKING_SIZE :: 1<<22; + if !(context.allocator.procedure != scratch_allocator_proc && + context.allocator.data != allocator_data) { + panic("cyclic initialization of the scratch allocator with itself"); + } + scratch_allocator_init(s, DEFAULT_BACKING_SIZE); + } + + size := size; + + switch mode { + case .Alloc: + size = mem.align_forward_int(size, alignment); + + switch { + case s.curr_offset+size <= len(s.data): + start := uintptr(raw_data(s.data)); + ptr := start + uintptr(s.curr_offset); + ptr = mem.align_forward_uintptr(ptr, uintptr(alignment)); + mem.zero(rawptr(ptr), size); + + s.prev_allocation = rawptr(ptr); + offset := int(ptr - start); + s.curr_offset = offset + size; + return rawptr(ptr); + } + a := s.backup_allocator; + if a.procedure == nil { + a = context.allocator; + s.backup_allocator = a; + } + + ptr := mem.alloc(size, alignment, a, loc); + if s.leaked_allocations == nil { + s.leaked_allocations = make([dynamic]rawptr, a); + } + append(&s.leaked_allocations, ptr); + + if logger := context.logger; logger.lowest_level <= .Warning { + if logger.procedure != nil { + logger.procedure(logger.data, .Warning, "mem.Scratch_Allocator resorted to backup_allocator" , logger.options, loc); + } + } + + return ptr; + + case .Free: + case .Free_All: + s.curr_offset = 0; + s.prev_allocation = nil; + for ptr in s.leaked_allocations { + free(ptr, s.backup_allocator); + } + clear(&s.leaked_allocations); + + case .Resize: + begin := uintptr(raw_data(s.data)); + end := begin + uintptr(len(s.data)); + old_ptr := uintptr(old_memory); + //if begin <= old_ptr && old_ptr < end && old_ptr+uintptr(size) < end { + // s.curr_offset = int(old_ptr-begin)+size; + // return old_memory; + //} + ptr := scratch_allocator_proc(allocator_data, .Alloc, size, alignment, old_memory, old_size, flags, loc); + mem.copy(ptr, old_memory, old_size); + scratch_allocator_proc(allocator_data, .Free, 0, alignment, old_memory, old_size, flags, loc); + return ptr; + + case .Query_Features: + set := (^mem.Allocator_Mode_Set)(old_memory); + if set != nil { + set^ = {.Alloc, .Free, .Free_All, .Resize, .Query_Features}; + } + return set; + + case .Query_Info: + return nil; + } + + + return nil; +} + +scratch_allocator :: proc(allocator: ^Scratch_Allocator) -> mem.Allocator { + return mem.Allocator{ + procedure = scratch_allocator_proc, + data = allocator, + }; +}
\ No newline at end of file diff --git a/src/server/analysis.odin b/src/server/analysis.odin index 245ca33..b84379f 100644 --- a/src/server/analysis.odin +++ b/src/server/analysis.odin @@ -67,7 +67,6 @@ make_ast_context :: proc(file: ast.File, imports: [] Package, package_name: stri document_package = package_name, current_package = package_name, }; - return ast_context; } diff --git a/src/server/documents.odin b/src/server/documents.odin index 7f14252..5a0261c 100644 --- a/src/server/documents.odin +++ b/src/server/documents.odin @@ -35,10 +35,12 @@ Document :: struct { ast: ast.File, imports: [] Package, package_name: string, + allocator: ^common.Scratch_Allocator, //because does not support freeing I use arena allocators for each document }; DocumentStorage :: struct { documents: map [string] Document, + free_allocators: [dynamic] ^common.Scratch_Allocator, }; document_storage: DocumentStorage; @@ -47,6 +49,24 @@ document_storage_shutdown :: proc() { delete(document_storage.documents); } +document_get_allocator :: proc() -> ^common.Scratch_Allocator { + + if len(document_storage.free_allocators) > 0 { + return pop(&document_storage.free_allocators); + } + + else { + allocator := new(common.Scratch_Allocator); + common.scratch_allocator_init(allocator, mem.megabytes(1)); + return allocator; + } + +} + +document_free_allocator :: proc(allocator: ^common.Scratch_Allocator) { + append(&document_storage.free_allocators, allocator); +} + document_get :: proc(uri_string: string) -> ^Document { uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator); @@ -84,6 +104,7 @@ document_open :: proc(uri_string: string, text: string, config: ^common.Config, document.client_owned = true; document.text = transmute([] u8)text; document.used_text = len(document.text); + document.allocator = document_get_allocator(); if err := document_refresh(document, config, writer); err != .None { return err; @@ -98,6 +119,7 @@ document_open :: proc(uri_string: string, text: string, config: ^common.Config, text = transmute([] u8)text, client_owned = true, used_text = len(text), + allocator = document_get_allocator(), }; if err := document_refresh(&document, config, writer); err != .None { @@ -221,9 +243,9 @@ document_close :: proc(uri_string: string) -> common.Error { return .InvalidRequest; } - //free_imports(document); - - //common.free_ast_file(document.ast); + free_all(common.scratch_allocator(document.allocator)); + document_free_allocator(document.allocator); + document.allocator = nil; document.client_owned = false; @@ -246,7 +268,6 @@ document_refresh :: proc(document: ^Document, config: ^common.Config, writer: ^W return .ParseError; } - //right now we don't allow to writer errors out from files read from the file directory, core files, etc. if writer != nil && len(errors) > 0 { document.diagnosed_errors = true; @@ -322,24 +343,8 @@ parser_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) { } -free_imports :: proc(document: ^Document) { - if document.imports != nil { - - for imp in document.imports { - delete(imp.name); - } - - delete(document.imports); - delete(document.package_name); - - document.imports = nil; - } -} - parse_document :: proc(document: ^Document, config: ^common.Config) -> ([] ParserError, bool) { - context.allocator = context.temp_allocator; - p := parser.Parser { err = parser_error_handler, warn = parser_warning_handler, @@ -347,7 +352,9 @@ parse_document :: proc(document: ^Document, config: ^common.Config) -> ([] Parse current_errors = make([dynamic] ParserError, context.temp_allocator); - //common.free_ast_file(document.ast); + free_all(common.scratch_allocator(document.allocator)); + + context.allocator = common.scratch_allocator(document.allocator); document.ast = ast.File { fullpath = document.uri.path, @@ -356,10 +363,8 @@ parse_document :: proc(document: ^Document, config: ^common.Config) -> ([] Parse parser.parse_file(&p, &document.ast); - //free_imports(document); - - document.imports = make([]Package, len(document.ast.imports), context.temp_allocator); - document.package_name = strings.to_lower(path.dir(document.uri.path, context.temp_allocator), context.temp_allocator); + document.imports = make([]Package, len(document.ast.imports)); + document.package_name = strings.to_lower(path.dir(document.uri.path, context.temp_allocator)); for imp, index in document.ast.imports { diff --git a/src/server/requests.odin b/src/server/requests.odin index 3347544..96146a1 100644 --- a/src/server/requests.odin +++ b/src/server/requests.odin @@ -20,9 +20,39 @@ Header :: struct { content_type: string, }; +RequestType :: enum { + Initialize, + Initialized, + Shutdown, + Exit, + DidOpen, + DidChange, + DidClose, + DidSave, + Definition, + Completion, + SignatureHelp, + DocumentSymbol, + SemanticTokensFull, + SemanticTokensRange, +}; + +RequestInfo :: struct { + params: json.Value, + id: RequestId, + config: ^common.Config, + writer: ^Writer, + result: common.Error, +}; + + pool: thread.Pool; +get_request_info :: proc(task: ^thread.Task) -> ^RequestInfo { + return cast(^RequestInfo)task.data; +} + make_response_message :: proc(id: RequestId, params: ResponseParams) -> ResponseMessage { return ResponseMessage { @@ -136,22 +166,35 @@ read_and_parse_body :: proc(reader: ^Reader, header: Header) -> (json.Value, boo return value, true; } +request_map : map [string] RequestType = + {"initialize" = .Initialize, + "initialized" = .Initialized, + "shutdown" = .Shutdown, + "exit" = .Exit, + "textDocument/didOpen" = .DidOpen, + "textDocument/didChange" = .DidChange, + "textDocument/didClose" = .DidClose, + "textDocument/didSave" = .DidSave, + "textDocument/definition" = .Definition, + "textDocument/completion" = .Completion, + "textDocument/signatureHelp" = .SignatureHelp, + "textDocument/documentSymbol" = .DocumentSymbol, + "textDocument/semanticTokens/full" = .SemanticTokensFull, + "textDocument/semanticTokens/range" = .SemanticTokensRange}; + +handle_error :: proc(err: common.Error, id: RequestId, writer: ^Writer) { + + if err != .None { -call_map : map [string] proc(json.Value, RequestId, ^common.Config, ^Writer) -> common.Error = - {"initialize" = request_initialize, - "initialized" = request_initialized, - "shutdown" = request_shutdown, - "exit" = notification_exit, - "textDocument/didOpen" = notification_did_open, - "textDocument/didChange" = notification_did_change, - "textDocument/didClose" = notification_did_close, - "textDocument/didSave" = notification_did_save, - "textDocument/definition" = request_definition, - "textDocument/completion" = request_completion, - "textDocument/signatureHelp" = request_signature_help, - "textDocument/documentSymbol" = request_document_symbols, - "textDocument/semanticTokens/full" = request_semantic_token_full, - "textDocument/semanticTokens/range" = request_semantic_token_range}; + response := make_response_message_error( + id = id, + error = ResponseError {code = err, message = ""} + ); + + send_error(response, writer); + } + +} handle_request :: proc(request: json.Value, config: ^common.Config, writer: ^Writer) -> bool { @@ -180,8 +223,8 @@ handle_request :: proc(request: json.Value, config: ^common.Config, writer: ^Wri method := root["method"].value.(json.String); - fn: proc(json.Value, RequestId, ^common.Config, ^Writer) -> common.Error; - fn, ok = call_map[method]; + request_type: RequestType; + request_type, ok = request_map[method]; if !ok { @@ -194,35 +237,78 @@ handle_request :: proc(request: json.Value, config: ^common.Config, writer: ^Wri } else { - err := fn(root["params"], id, config, writer); - if err != .None { + info := new(RequestInfo); + + info.params = root["params"]; + info.id = id; + info.config = config; + info.writer = writer; + + task_proc: thread.Task_Proc; + + switch request_type { + case .Initialize: + task_proc = request_initialize; + case .Initialized: + task_proc = request_initialized; + case .Shutdown: + task_proc = request_shutdown; + case .Exit: + task_proc = notification_exit; + case .DidOpen: + task_proc = notification_did_open; + case .DidChange: + task_proc = notification_did_change; + case .DidClose: + task_proc = notification_did_close; + case .DidSave: + task_proc = notification_did_save; + case .Definition: + task_proc = request_definition; + case .Completion: + task_proc = request_completion; + case .SignatureHelp: + task_proc = request_signature_help; + case .DocumentSymbol: + task_proc = request_document_symbols; + case .SemanticTokensFull: + task_proc = request_semantic_token_full; + case .SemanticTokensRange: + task_proc = request_semantic_token_range; + } - response := make_response_message_error( - id = id, - error = ResponseError {code = err, message = ""} - ); + task := thread.Task { + data = info, + procedure = task_proc, + }; + + task_proc(&task); - send_error(response, writer); - } } return true; } -request_initialize :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_initialize :: proc(task: ^thread.Task) { + + info := get_request_info(task); + + using info; params_object, ok := params.value.(json.Object); if !ok { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } initialize_params: RequestInitializeParams; if unmarshal(params, initialize_params, context.temp_allocator) != .None { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } config.workspace_folders = make([dynamic]common.WorkspaceFolder); @@ -298,7 +384,7 @@ request_initialize :: proc(params: json.Value, id: RequestId, config: ^common.Co }, semanticTokensProvider = SemanticTokensOptions { range = true, - full = false, + full = true, legend = SemanticTokensLegend { tokenTypes = token_types, tokenModifiers = token_modifiers, @@ -319,15 +405,17 @@ request_initialize :: proc(params: json.Value, id: RequestId, config: ^common.Co index.build_static_index(context.allocator, config); log.info("Finished indexing"); - - return .None; } -request_initialized :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - return .None; +request_initialized :: proc(task: ^thread.Task) { + } -request_shutdown :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_shutdown :: proc(task: ^thread.Task) { + + info := get_request_info(task); + + using info; response := make_response_message( params = nil, @@ -335,32 +423,35 @@ request_shutdown :: proc(params: json.Value, id: RequestId, config: ^common.Conf ); send_response(response, writer); - - return .None; } -request_definition :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_definition :: proc(task: ^thread.Task) { + + info := get_request_info(task); + + using info; params_object, ok := params.value.(json.Object); if !ok { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } definition_params: TextDocumentPositionParams; if unmarshal(params, definition_params, context.temp_allocator) != .None { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } document := document_get(definition_params.textDocument.uri); if document == nil { - return .InternalError; + handle_error(.InternalError, id, writer); + return; } - document_refresh(document, config, nil); - location, ok2 := get_definition_location(document, definition_params.position); if !ok2 { @@ -373,18 +464,20 @@ request_definition :: proc(params: json.Value, id: RequestId, config: ^common.Co ); send_response(response, writer); +} - return .None; -} +request_completion :: proc(task: ^thread.Task) { + info := get_request_info(task); -request_completion :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { + using info; params_object, ok := params.value.(json.Object); if !ok { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } completition_params: CompletionParams; @@ -392,22 +485,23 @@ request_completion :: proc(params: json.Value, id: RequestId, config: ^common.Co if unmarshal(params, completition_params, context.temp_allocator) != .None { log.error("Failed to unmarshal completion request"); - return .ParseError; + handle_error(.ParseError, id, writer); + return; } document := document_get(completition_params.textDocument.uri); if document == nil { - return .InternalError; + handle_error(.InternalError, id, writer); + return; } - document_refresh(document, config, nil); - list: CompletionList; list, ok = get_completion_list(document, completition_params.position); if !ok { - return .InternalError; + handle_error(.InternalError, id, writer); + return; } response := make_response_message( @@ -416,32 +510,35 @@ request_completion :: proc(params: json.Value, id: RequestId, config: ^common.Co ); send_response(response, writer); - - return .None; } -request_signature_help :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_signature_help :: proc(task: ^thread.Task) { + + info := get_request_info(task); + + using info; params_object, ok := params.value.(json.Object); if !ok { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } signature_params: SignatureHelpParams; if unmarshal(params, signature_params, context.temp_allocator) != .None { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } document := document_get(signature_params.textDocument.uri); if document == nil { - return .InternalError; + handle_error(.InternalError, id, writer); + return; } - document_refresh(document, config, nil); - help: SignatureHelp; help, ok = get_signature_information(document, signature_params.position); @@ -451,100 +548,123 @@ request_signature_help :: proc(params: json.Value, id: RequestId, config: ^commo ); send_response(response, writer); - - return .None; } -notification_exit :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +notification_exit :: proc(task: ^thread.Task) { + info := get_request_info(task); + using info; config.running = false; - return .None; } -notification_did_open :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +notification_did_open :: proc(task: ^thread.Task) { + + info := get_request_info(task); + + using info; params_object, ok := params.value.(json.Object); if !ok { log.error("Failed to parse open document notification"); - return .ParseError; + handle_error(.ParseError, id, writer); + return; } open_params: DidOpenTextDocumentParams; if unmarshal(params, open_params, context.allocator) != .None { log.error("Failed to parse open document notification"); - return .ParseError; + handle_error(.ParseError, id, writer); + return; } - return document_open(open_params.textDocument.uri, open_params.textDocument.text, config, writer); + if n := document_open(open_params.textDocument.uri, open_params.textDocument.text, config, writer); n != .None { + handle_error(n, id, writer); + } } -notification_did_change :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +notification_did_change :: proc(task: ^thread.Task) { + + info := get_request_info(task); + + using info; params_object, ok := params.value.(json.Object); if !ok { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } change_params: DidChangeTextDocumentParams; if unmarshal(params, change_params, context.temp_allocator) != .None { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } document_apply_changes(change_params.textDocument.uri, change_params.contentChanges, config, writer); - - return .None; } -notification_did_close :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +notification_did_close :: proc(task: ^thread.Task) { + + info := get_request_info(task); + + using info; params_object, ok := params.value.(json.Object); if !ok { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } close_params: DidCloseTextDocumentParams; if unmarshal(params, close_params, context.temp_allocator) != .None { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } - return document_close(close_params.textDocument.uri); + if n := document_close(close_params.textDocument.uri); n != .None { + handle_error(n, id, writer); + return; + } } -notification_did_save :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - +notification_did_save :: proc(task: ^thread.Task) { - return .None; } -request_semantic_token_full :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_semantic_token_full :: proc(task: ^thread.Task) { + + info := get_request_info(task); + + using info; params_object, ok := params.value.(json.Object); if !ok { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } semantic_params: SemanticTokensParams; if unmarshal(params, semantic_params, context.temp_allocator) != .None { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } document := document_get(semantic_params.textDocument.uri); if document == nil { - return .InternalError; + handle_error(.InternalError, id, writer); + return; } - document_refresh(document, config, nil); - range := common.Range { start = common.Position { line = 0, @@ -564,32 +684,35 @@ request_semantic_token_full :: proc(params: json.Value, id: RequestId, config: ^ ); send_response(response, writer); - - return .None; } -request_semantic_token_range :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_semantic_token_range :: proc(task: ^thread.Task) { + + info := get_request_info(task); + + using info; params_object, ok := params.value.(json.Object); if !ok { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } semantic_params: SemanticTokensRangeParams; if unmarshal(params, semantic_params, context.temp_allocator) != .None { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } document := document_get(semantic_params.textDocument.uri); if document == nil { - return .InternalError; + handle_error(.InternalError, id, writer); + return; } - document_refresh(document, config, nil); - //symbols: SemanticTokens; symbols := get_semantic_tokens(document, semantic_params.range); @@ -599,42 +722,41 @@ request_semantic_token_range :: proc(params: json.Value, id: RequestId, config: ); send_response(response, writer); - - return .None; } -request_document_symbols :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_document_symbols :: proc(task: ^thread.Task) { + + info := get_request_info(task); + using info; params_object, ok := params.value.(json.Object); if !ok { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } symbol_params: DocumentSymbolParams; if unmarshal(params, symbol_params, context.temp_allocator) != .None { - return .ParseError; + handle_error(.ParseError, id, writer); + return; } document := document_get(symbol_params.textDocument.uri); if document == nil { - return .InternalError; + handle_error(.InternalError, id, writer); + return; } - document_refresh(document, config, nil); - symbols := get_document_symbols(document); - response := make_response_message( params = symbols, id = id, ); send_response(response, writer); - - return .None; }
\ No newline at end of file |