diff options
Diffstat (limited to 'src/server')
| -rw-r--r-- | src/server/check.odin | 118 | ||||
| -rw-r--r-- | src/server/completion.odin | 652 | ||||
| -rw-r--r-- | src/server/definition.odin | 91 | ||||
| -rw-r--r-- | src/server/document_links.odin | 20 | ||||
| -rw-r--r-- | src/server/document_symbols.odin | 56 | ||||
| -rw-r--r-- | src/server/documents.odin | 252 | ||||
| -rw-r--r-- | src/server/format.odin | 40 | ||||
| -rw-r--r-- | src/server/hover.odin | 126 | ||||
| -rw-r--r-- | src/server/inlay_hints.odin | 36 | ||||
| -rw-r--r-- | src/server/lens.odin | 14 | ||||
| -rw-r--r-- | src/server/log.odin | 18 | ||||
| -rw-r--r-- | src/server/reader.odin | 42 | ||||
| -rw-r--r-- | src/server/requests.odin | 648 | ||||
| -rw-r--r-- | src/server/response.odin | 36 | ||||
| -rw-r--r-- | src/server/semantic_tokens.odin | 468 | ||||
| -rw-r--r-- | src/server/signature.odin | 110 | ||||
| -rw-r--r-- | src/server/unmarshal.odin | 96 | ||||
| -rw-r--r-- | src/server/writer.odin | 18 |
18 files changed, 1421 insertions, 1420 deletions
diff --git a/src/server/check.odin b/src/server/check.odin index ab6dfb2..8273084 100644 --- a/src/server/check.odin +++ b/src/server/check.odin @@ -18,49 +18,49 @@ import "core:text/scanner" import "shared:common" -when ODIN_OS == "windows" { +when ODIN_OS == .Windows { is_package :: proc(file: string, pkg: string) { } check :: proc(uri: common.Uri, writer: ^Writer, config: ^common.Config) { - data := make([]byte, mem.kilobytes(10), context.temp_allocator); + data := make([]byte, mem.kilobytes(10), context.temp_allocator) - buffer: []byte; - code: u32; - ok: bool; + buffer: []byte + code: u32 + ok: bool - collection_builder := strings.make_builder(context.temp_allocator); + collection_builder := strings.make_builder(context.temp_allocator) for k, v in common.config.collections { if k == "" || k == "core" || k == "vendor" { - continue; + continue } - strings.write_string(&collection_builder, fmt.aprintf("-collection:%v=%v ", k, v)); + strings.write_string(&collection_builder, fmt.aprintf("-collection:%v=%v ", k, v)) } - command: string; + command: string if config.odin_command != "" { - command = config.odin_command; + command = config.odin_command } else { - command = "odin"; + command = "odin" } if code, ok, buffer = common.run_executable(fmt.tprintf("%v check %s %s -no-entry-point %s", command, path.dir(uri.path, context.temp_allocator), strings.to_string(collection_builder), config.checker_args), &data); !ok { - log.errorf("Odin check failed with code %v for file %v", code, uri.path); - return; + log.errorf("Odin check failed with code %v for file %v", code, uri.path) + return } - s: scanner.Scanner; + s: scanner.Scanner - scanner.init(&s, string(buffer)); + scanner.init(&s, string(buffer)) - s.whitespace = {'\t', ' '}; + s.whitespace = {'\t', ' '} - current: rune; + current: rune ErrorSeperator :: struct { message: string, @@ -69,101 +69,101 @@ when ODIN_OS == "windows" { uri: string, } - error_seperators := make([dynamic]ErrorSeperator, context.temp_allocator); + error_seperators := make([dynamic]ErrorSeperator, context.temp_allocator) //find all the signatures string(digit:digit) loop: for scanner.peek(&s) != scanner.EOF { - error: ErrorSeperator; + error: ErrorSeperator - source_pos := s.src_pos; + source_pos := s.src_pos if source_pos == 1 { - source_pos = 0; + source_pos = 0 } for scanner.peek(&s) != '(' { - n := scanner.scan(&s); + n := scanner.scan(&s) if n == scanner.EOF { - break loop; + break loop } } - error.uri = string(buffer[source_pos:s.src_pos-1]); + error.uri = string(buffer[source_pos:s.src_pos-1]) - left_paren := scanner.scan(&s); + left_paren := scanner.scan(&s) if left_paren != '(' { - break loop; + break loop } - lhs_digit := scanner.scan(&s); + lhs_digit := scanner.scan(&s) if lhs_digit != scanner.Int { - break loop; + break loop } - line, column: int; - ok: bool; + line, column: int + ok: bool - line, ok = strconv.parse_int(scanner.token_text(&s)); + line, ok = strconv.parse_int(scanner.token_text(&s)) if !ok { - break loop; + break loop } - seperator := scanner.scan(&s); + seperator := scanner.scan(&s) if seperator != ':' { - break loop; + break loop } rhs_digit := scanner.scan(&s) if rhs_digit != scanner.Int { - break loop; + break loop } - column, ok = strconv.parse_int(scanner.token_text(&s)); + column, ok = strconv.parse_int(scanner.token_text(&s)) if !ok { - break loop; + break loop } - right_paren := scanner.scan(&s); + right_paren := scanner.scan(&s) if right_paren != ')' { - break loop; + break loop } - source_pos = s.src_pos; + source_pos = s.src_pos for scanner.peek(&s) != '\n' { - n := scanner.scan(&s); + n := scanner.scan(&s) if n == scanner.EOF { - break; + break } } if source_pos == s.src_pos { - continue; + continue } - error.message = string(buffer[source_pos:s.src_pos-1]); - error.column = column; - error.line = line; + error.message = string(buffer[source_pos:s.src_pos-1]) + error.column = column + error.line = line - append(&error_seperators, error); + append(&error_seperators, error) } - errors := make(map[string][dynamic]Diagnostic, 0, context.temp_allocator); + errors := make(map[string][dynamic]Diagnostic, 0, context.temp_allocator) for error in error_seperators { if error.uri not_in errors { - errors[error.uri] = make([dynamic]Diagnostic, context.temp_allocator); + errors[error.uri] = make([dynamic]Diagnostic, context.temp_allocator) } append(&errors[error.uri], Diagnostic { @@ -180,48 +180,48 @@ when ODIN_OS == "windows" { }, }, message = error.message, - }); + }) } - matches, err := filepath.glob(fmt.tprintf("%v/*.odin", path.dir(uri.path, context.temp_allocator))); + matches, err := filepath.glob(fmt.tprintf("%v/*.odin", path.dir(uri.path, context.temp_allocator))) if err == .None { for match in matches { - uri := common.create_uri(match, context.temp_allocator); + uri := common.create_uri(match, context.temp_allocator) params := NotificationPublishDiagnosticsParams { uri = uri.uri, diagnostics = {}, - }; + } notifaction := Notification { jsonrpc = "2.0", method = "textDocument/publishDiagnostics", params = params, - }; + } if writer != nil { - send_notification(notifaction, writer); + send_notification(notifaction, writer) } } } for k, v in errors { - uri := common.create_uri(k, context.temp_allocator); + uri := common.create_uri(k, context.temp_allocator) params := NotificationPublishDiagnosticsParams { uri = uri.uri, diagnostics = v[:], - }; + } notifaction := Notification { jsonrpc = "2.0", method = "textDocument/publishDiagnostics", params = params, - }; + } if writer != nil { - send_notification(notifaction, writer); + send_notification(notifaction, writer) } } } diff --git a/src/server/completion.odin b/src/server/completion.odin index 59df7a3..ae9d4e3 100644 --- a/src/server/completion.odin +++ b/src/server/completion.odin @@ -35,64 +35,64 @@ Completion_Type :: enum { } get_completion_list :: proc(document: ^common.Document, position: common.Position, completion_context: CompletionContext) -> (CompletionList, bool) { - using analysis; + using analysis - list: CompletionList; + list: CompletionList - position_context, ok := get_document_position_context(document, position, .Completion); + position_context, ok := get_document_position_context(document, position, .Completion) if !ok || position_context.abort_completion { - return list, true; + return list, true } if position_context.import_stmt == nil && strings.contains_any(completion_context.triggerCharacter, "/:\"") { - return list, true; + return list, true } - ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri); + ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri) - get_globals(document.ast, &ast_context); + get_globals(document.ast, &ast_context) - ast_context.current_package = ast_context.document_package; - ast_context.value_decl = position_context.value_decl; + ast_context.current_package = ast_context.document_package + ast_context.value_decl = position_context.value_decl if position_context.function != nil { - get_locals(document.ast, position_context.function, &ast_context, &position_context); + get_locals(document.ast, position_context.function, &ast_context, &position_context) } - completion_type: Completion_Type = .Identifier; + completion_type: Completion_Type = .Identifier if position_context.comp_lit != nil && is_lhs_comp_lit(&position_context) { - completion_type = .Comp_Lit; + completion_type = .Comp_Lit } if position_context.selector != nil { - completion_type = .Selector; + completion_type = .Selector } if position_context.tag != nil { - completion_type = .Directive; + completion_type = .Directive } if position_context.implicit { - completion_type = .Implicit; + completion_type = .Implicit } if position_context.import_stmt != nil { - completion_type = .Package; + completion_type = .Package } if position_context.switch_type_stmt != nil && position_context.case_clause != nil { - if assign, ok := position_context.switch_type_stmt.tag.derived.(ast.Assign_Stmt); ok && assign.rhs != nil && len(assign.rhs) == 1 { + if assign, ok := position_context.switch_type_stmt.tag.derived.(^ast.Assign_Stmt); ok && assign.rhs != nil && len(assign.rhs) == 1 { - ast_context.use_globals = true; - ast_context.use_locals = true; + ast_context.use_globals = true + ast_context.use_locals = true if symbol, ok := resolve_type_expression(&ast_context, assign.rhs[0]); ok { if union_value, ok := symbol.value.(index.SymbolUnionValue); ok { - completion_type = .Switch_Type; + completion_type = .Switch_Type } } } @@ -100,22 +100,22 @@ get_completion_list :: proc(document: ^common.Document, position: common.Positio switch completion_type { case .Comp_Lit: - get_comp_lit_completion(&ast_context, &position_context, &list); + get_comp_lit_completion(&ast_context, &position_context, &list) case .Identifier: - get_identifier_completion(&ast_context, &position_context, &list); + get_identifier_completion(&ast_context, &position_context, &list) case .Implicit: - get_implicit_completion(&ast_context, &position_context, &list); + get_implicit_completion(&ast_context, &position_context, &list) case .Selector: - get_selector_completion(&ast_context, &position_context, &list); + get_selector_completion(&ast_context, &position_context, &list) case .Switch_Type: - get_type_switch_completion(&ast_context, &position_context, &list); + get_type_switch_completion(&ast_context, &position_context, &list) case .Directive: - get_directive_completion(&ast_context, &position_context, &list); + get_directive_completion(&ast_context, &position_context, &list) case .Package: - get_package_completion(&ast_context, &position_context, &list); + get_package_completion(&ast_context, &position_context, &list) } - return list, true; + return list, true } get_attribute_completion :: proc(ast_context: ^analysis.AstContext, position_context: ^analysis.DocumentPositionContext, list: ^CompletionList) { @@ -124,9 +124,9 @@ get_attribute_completion :: proc(ast_context: ^analysis.AstContext, position_con get_directive_completion :: proc(ast_context: ^analysis.AstContext, position_context: ^analysis.DocumentPositionContext, list: ^CompletionList) { - list.isIncomplete = false; + list.isIncomplete = false - items := make([dynamic]CompletionItem, context.temp_allocator); + items := make([dynamic]CompletionItem, context.temp_allocator) /* Right now just return all the possible completions, but later on I should give the context specific ones @@ -151,28 +151,28 @@ get_directive_completion :: proc(ast_context: ^analysis.AstContext, position_con "procedure", "load", "partial", - }; + } for elem in directive_list { item := CompletionItem { detail = elem, label = elem, kind = .Constant, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; + list.items = items[:] } get_comp_lit_completion :: proc(ast_context: ^analysis.AstContext, position_context: ^analysis.DocumentPositionContext, list: ^CompletionList) { - using analysis; + using analysis - items := make([dynamic]CompletionItem, context.temp_allocator); + items := make([dynamic]CompletionItem, context.temp_allocator) if position_context.parent_comp_lit.type == nil { - return; + return } if symbol, ok := resolve_type_expression(ast_context, position_context.parent_comp_lit.type); ok { @@ -180,11 +180,11 @@ get_comp_lit_completion :: proc(ast_context: ^analysis.AstContext, position_cont #partial switch v in comp_symbol.value { case index.SymbolStructValue: for name, i in v.names { - ast_context.current_package = comp_symbol.pkg; + ast_context.current_package = comp_symbol.pkg if resolved, ok := resolve_type_expression(ast_context, v.types[i]); ok { if field_exists_in_comp_lit(position_context.comp_lit, name) { - continue; + continue } item := CompletionItem { @@ -192,126 +192,126 @@ get_comp_lit_completion :: proc(ast_context: ^analysis.AstContext, position_cont kind = .Field, detail = fmt.tprintf("%v.%v: %v", comp_symbol.name, name, common.node_to_string(v.types[i])), documentation = resolved.doc, - }; + } - append(&items, item); + append(&items, item) } } } } } - list.items = items[:]; + list.items = items[:] } get_selector_completion :: proc(ast_context: ^analysis.AstContext, position_context: ^analysis.DocumentPositionContext, list: ^CompletionList) { - using analysis; + using analysis - items := make([dynamic]CompletionItem, context.temp_allocator); + items := make([dynamic]CompletionItem, context.temp_allocator) - ast_context.current_package = ast_context.document_package; + ast_context.current_package = ast_context.document_package - selector: index.Symbol; - ok: bool; + selector: index.Symbol + ok: bool - ast_context.use_locals = true; - ast_context.use_globals = true; + ast_context.use_locals = true + ast_context.use_globals = true - selector, ok = resolve_type_expression(ast_context, position_context.selector); + selector, ok = resolve_type_expression(ast_context, position_context.selector) if !ok { - return; + return } //if (selector.type != .Variable && selector.type != .Package && selector.type != .Enum && selector.name != "") || (selector.type == .Variable && selector.type == .Enum) { if selector.type != .Variable && selector.type != .Package { - return; + return } if selector.pkg != "" { - ast_context.current_package = selector.pkg; + ast_context.current_package = selector.pkg } else { - ast_context.current_package = ast_context.document_package; + ast_context.current_package = ast_context.document_package } - field: string; + field: string if position_context.field != nil { - switch v in position_context.field.derived { - case ast.Ident: - field = v.name; + #partial switch v in position_context.field.derived { + case ^ast.Ident: + field = v.name } } if s, ok := selector.value.(index.SymbolProcedureValue); ok { if len(s.return_types) == 1 { if selector, ok = resolve_type_expression(ast_context, s.return_types[0].type); !ok { - return; + return } } } #partial switch v in selector.value { case index.SymbolFixedArrayValue: - list.isIncomplete = true; + list.isIncomplete = true - containsColor := 1; - containsCoord := 1; + containsColor := 1 + containsCoord := 1 - expr_len := 0; + expr_len := 0 - if basic, ok := v.len.derived.(ast.Basic_Lit); ok { + if basic, ok := v.len.derived.(^ast.Basic_Lit); ok { if expr_len, ok = strconv.parse_int(basic.tok.text); !ok { - expr_len = 0; + expr_len = 0 } } if field != "" { for i := 0; i < len(field); i += 1 { - c := field[i]; + c := field[i] if _, ok := swizzle_color_components[c]; ok { - containsColor += 1; + containsColor += 1 } else if _, ok := swizzle_coord_components[c]; ok { - containsCoord += 1; + containsCoord += 1 } } } if containsColor == 1 && containsCoord == 1 { - save := expr_len; + save := expr_len for k in swizzle_color_components { if expr_len <= 0 { - break; + break } - expr_len -= 1; + expr_len -= 1 item := CompletionItem { label = fmt.tprintf("%v%c", field, k), kind = .Property, detail = fmt.tprintf("%v%c: %v", field, k, common.node_to_string(v.expr)), - }; - append(&items, item); + } + append(&items, item) } - expr_len = save; + expr_len = save for k in swizzle_coord_components { if expr_len <= 0 { - break; + break } - expr_len -= 1; + expr_len -= 1 item := CompletionItem { label = fmt.tprintf("%v%c", field, k), kind = .Property, detail = fmt.tprintf("%v%c: %v", field, k, common.node_to_string(v.expr)), - }; - append(&items, item); + } + append(&items, item) } } @@ -319,90 +319,90 @@ get_selector_completion :: proc(ast_context: ^analysis.AstContext, position_cont for k in swizzle_color_components { if expr_len <= 0 { - break; + break } - expr_len -= 1; + expr_len -= 1 item := CompletionItem { label = fmt.tprintf("%v%c", field, k), kind = .Property, detail = fmt.tprintf("%v%c: [%v]%v", field, k, containsColor, common.node_to_string(v.expr)), - }; - append(&items, item); + } + append(&items, item) } } else if containsCoord > 1 { for k in swizzle_coord_components { if expr_len <= 0 { - break; + break } - expr_len -= 1; + expr_len -= 1 item := CompletionItem { label = fmt.tprintf("%v%c", field, k), kind = .Property, detail = fmt.tprintf("%v%c: [%v]%v", field, k, containsCoord, common.node_to_string(v.expr)), - }; - append(&items, item); + } + append(&items, item) } } case index.SymbolUnionValue: - list.isIncomplete = false; + list.isIncomplete = false for type in v.types { if symbol, ok := resolve_type_expression(ast_context, type); ok { - base := path.base(symbol.pkg, false, context.temp_allocator); + base := path.base(symbol.pkg, false, context.temp_allocator) item := CompletionItem { kind = .EnumMember, detail = fmt.tprintf("%v", selector.name), documentation = symbol.doc, - }; + } if symbol.pkg == ast_context.document_package || base == "runtime" { - item.label = fmt.aprintf("(%v)", common.node_to_string(type)); + item.label = fmt.aprintf("(%v)", common.node_to_string(type)) } else { - item.label = fmt.aprintf("(%v.%v)", path.base(symbol.pkg, false, context.temp_allocator), common.node_to_string(type)); + item.label = fmt.aprintf("(%v.%v)", path.base(symbol.pkg, false, context.temp_allocator), common.node_to_string(type)) } - append(&items, item); + append(&items, item) } } case index.SymbolEnumValue: - list.isIncomplete = false; + list.isIncomplete = false for name in v.names { item := CompletionItem { label = name, kind = .EnumMember, detail = fmt.tprintf("%v.%v", selector.name, name), - }; + } - append(&items, item); + append(&items, item) } case index.SymbolStructValue: - list.isIncomplete = false; + list.isIncomplete = false for name, i in v.names { if selector.pkg != "" { - ast_context.current_package = selector.pkg; + ast_context.current_package = selector.pkg } else { - ast_context.current_package = ast_context.document_package; + ast_context.current_package = ast_context.document_package } if symbol, ok := resolve_type_expression(ast_context, v.types[i]); ok { - if expr, ok := position_context.selector.derived.(ast.Selector_Expr); ok { + if expr, ok := position_context.selector.derived.(^ast.Selector_Expr); ok { if expr.op.text == "->" && symbol.type != .Function { - continue; + continue } } if position_context.arrow && symbol.type != .Function { - continue; + continue } item := CompletionItem { @@ -410,9 +410,9 @@ get_selector_completion :: proc(ast_context: ^analysis.AstContext, position_cont kind = .Field, detail = fmt.tprintf("%v.%v: %v", selector.name, name, type_to_string(ast_context, v.types[i])), documentation = symbol.doc, - }; + } - append(&items, item); + append(&items, item) } else { //just give some generic symbol with name. item := CompletionItem { @@ -420,75 +420,75 @@ get_selector_completion :: proc(ast_context: ^analysis.AstContext, position_cont kind = .Field, detail = fmt.tprintf("%v: %v", name, common.node_to_string(v.types[i])), documentation = symbol.doc, - }; + } - append(&items, item); + append(&items, item) } } case index.SymbolPackageValue: - list.isIncomplete = true; + list.isIncomplete = true if searched, ok := index.fuzzy_search(field, {selector.pkg}); ok { for search in searched { - symbol := search.symbol; + symbol := search.symbol - resolve_unresolved_symbol(ast_context, &symbol); - build_procedure_symbol_signature(&symbol); + resolve_unresolved_symbol(ast_context, &symbol) + build_procedure_symbol_signature(&symbol) item := CompletionItem { label = symbol.name, kind = cast(CompletionItemKind)symbol.type, detail = concatenate_symbol_information(ast_context, symbol, true), documentation = symbol.doc, - }; + } if symbol.type == .Function { - item.insertText = fmt.tprintf("%v($0)", item.label); - item.insertTextFormat = .Snippet; - item.command.command = "editor.action.triggerParameterHints"; - item.deprecated = .Deprecated in symbol.flags; + item.insertText = fmt.tprintf("%v($0)", item.label) + item.insertTextFormat = .Snippet + item.command.command = "editor.action.triggerParameterHints" + item.deprecated = .Deprecated in symbol.flags } - append(&items, item); + append(&items, item) } } else { - log.errorf("Failed to fuzzy search, field: %v, package: %v", field, selector.pkg); - return; + log.errorf("Failed to fuzzy search, field: %v, package: %v", field, selector.pkg) + return } } - list.items = items[:]; + list.items = items[:] } get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_context: ^analysis.DocumentPositionContext, list: ^CompletionList) { - using analysis; + using analysis - items := make([dynamic]CompletionItem, context.temp_allocator); + items := make([dynamic]CompletionItem, context.temp_allocator) - list.isIncomplete = false; + list.isIncomplete = false - selector: index.Symbol; + selector: index.Symbol - ast_context.use_locals = true; - ast_context.use_globals = true; + ast_context.use_locals = true + ast_context.use_globals = true if selector.pkg != "" { - ast_context.current_package = selector.pkg; + ast_context.current_package = selector.pkg } else { - ast_context.current_package = ast_context.document_package; + ast_context.current_package = ast_context.document_package } //enum switch infer if position_context.switch_stmt != nil && position_context.case_clause != nil && position_context.switch_stmt.cond != nil { - used_enums := make(map[string]bool, 5, context.temp_allocator); + used_enums := make(map[string]bool, 5, context.temp_allocator) - if block, ok := position_context.switch_stmt.body.derived.(ast.Block_Stmt); ok { + if block, ok := position_context.switch_stmt.body.derived.(^ast.Block_Stmt); ok { for stmt in block.stmts { - if case_clause, ok := stmt.derived.(ast.Case_Clause); ok { + if case_clause, ok := stmt.derived.(^ast.Case_Clause); ok { for name in case_clause.list { - if implicit, ok := name.derived.(ast.Implicit_Selector_Expr); ok { - used_enums[implicit.field.name] = true; + if implicit, ok := name.derived.(^ast.Implicit_Selector_Expr); ok { + used_enums[implicit.field.name] = true } } } @@ -498,20 +498,20 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont if enum_value, ok := unwrap_enum(ast_context, position_context.switch_stmt.cond); ok { for name in enum_value.names { if name in used_enums { - continue; + continue } item := CompletionItem { label = name, kind = .EnumMember, detail = name, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; - return; + list.items = items[:] + return } } @@ -527,13 +527,13 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont label = name, kind = .EnumMember, detail = name, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; - return; + list.items = items[:] + return } } } @@ -549,13 +549,13 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont label = name, kind = .EnumMember, detail = name, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; - return; + list.items = items[:] + return } } } @@ -563,13 +563,17 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont //infer bitset and enums based on the identifier comp_lit, i.e. a := My_Struct { my_ident = . } if position_context.comp_lit != nil { if position_context.parent_comp_lit.type == nil { - return; + return } - field_name: string; + field_name: string if position_context.field_value != nil { - field_name = position_context.field_value.field.derived.(ast.Ident).name; + if field, ok := position_context.field_value.field.derived.(^ast.Ident); ok { + field_name = field.name + } else { + return + } } if symbol, ok := resolve_type_expression(ast_context, position_context.parent_comp_lit.type); ok { @@ -577,27 +581,27 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont if s, ok := comp_symbol.value.(index.SymbolStructValue); ok { //We can either have the final - elem_index := -1; + elem_index := -1 for elem, i in comp_lit.elems { if position_in_node(elem, position_context.position) { - elem_index = i; + elem_index = i } } - type: ^ast.Expr; + type: ^ast.Expr for name, i in s.names { if name != field_name { - continue; + continue } - type = s.types[i]; - break; + type = s.types[i] + break } if type == nil && len(s.types) > elem_index { - type = s.types[elem_index]; + type = s.types[elem_index] } if enum_value, ok := unwrap_enum(ast_context, type); ok { @@ -606,13 +610,13 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont label = enum_name, kind = .EnumMember, detail = enum_name, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; - return; + list.items = items[:] + return } else if bitset_symbol, ok := resolve_type_expression(ast_context, type); ok { if value, ok := unwrap_bitset(ast_context, bitset_symbol); ok { for name in value.names { @@ -621,12 +625,12 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont label = name, kind = .EnumMember, detail = name, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; - return; + list.items = items[:] + return } } } @@ -635,15 +639,15 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont } if position_context.binary != nil && (position_context.binary.op.text == "==" || position_context.binary.op.text == "!=") { - context_node: ^ast.Expr; - enum_node: ^ast.Expr; + context_node: ^ast.Expr + enum_node: ^ast.Expr if position_in_node(position_context.binary.right, position_context.position) { - context_node = position_context.binary.right; - enum_node = position_context.binary.left; + context_node = position_context.binary.right + enum_node = position_context.binary.left } else if position_in_node(position_context.binary.left, position_context.position) { - context_node = position_context.binary.left; - enum_node = position_context.binary.right; + context_node = position_context.binary.left + enum_node = position_context.binary.right } if context_node != nil && enum_node != nil { @@ -653,34 +657,34 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont label = name, kind = .EnumMember, detail = name, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; - return; + list.items = items[:] + return } } } if position_context.assign != nil && position_context.assign.rhs != nil && position_context.assign.lhs != nil { - rhs_index: int; + rhs_index: int for elem in position_context.assign.rhs { if position_in_node(elem, position_context.position) { - break; + break } else { //procedures are the only types that can return more than one value if symbol, ok := resolve_type_expression(ast_context, elem); ok { if procedure, ok := symbol.value.(index.SymbolProcedureValue); ok { if procedure.return_types == nil { - return; + return } - rhs_index += len(procedure.return_types); + rhs_index += len(procedure.return_types) } else { - rhs_index += 1; + rhs_index += 1 } } } @@ -693,37 +697,37 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont label = name, kind = .EnumMember, detail = name, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; - return; + list.items = items[:] + return } } } if position_context.returns != nil && position_context.function != nil { - return_index: int; + return_index: int if position_context.returns.results == nil { - return; + return } for result, i in position_context.returns.results { if position_in_node(result, position_context.position) { - return_index = i; - break; + return_index = i + break } } if position_context.function.type == nil { - return; + return } if position_context.function.type.results == nil { - return; + return } if len(position_context.function.type.results.list) > return_index { @@ -733,24 +737,24 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont label = name, kind = .EnumMember, detail = name, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; - return; + list.items = items[:] + return } } } if position_context.call != nil { - if call, ok := position_context.call.derived.(ast.Call_Expr); ok { - parameter_index, parameter_ok := find_position_in_call_param(ast_context, call); + if call, ok := position_context.call.derived.(^ast.Call_Expr); ok { + parameter_index, parameter_ok := find_position_in_call_param(ast_context, call^) if symbol, ok := resolve_type_expression(ast_context, call.expr); ok && parameter_ok { if proc_value, ok := symbol.value.(index.SymbolProcedureValue); ok { if len(proc_value.arg_types) <= parameter_index { - return; + return } if enum_value, ok := unwrap_enum(ast_context, proc_value.arg_types[parameter_index].type); ok { @@ -759,13 +763,13 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont label = name, kind = .EnumMember, detail = name, - }; + } - append(&items, item); + append(&items, item) } - list.items = items[:]; - return; + list.items = items[:] + return } } } @@ -774,11 +778,11 @@ get_implicit_completion :: proc(ast_context: ^analysis.AstContext, position_cont } get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_context: ^analysis.DocumentPositionContext, list: ^CompletionList) { - using analysis; + using analysis - items := make([dynamic]CompletionItem, context.temp_allocator); + items := make([dynamic]CompletionItem, context.temp_allocator) - list.isIncomplete = true; + list.isIncomplete = true CombinedResult :: struct { score: f32, @@ -789,51 +793,51 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co pkg: string, signature: string, flags: index.SymbolFlags, - }; + } combined_sort_interface :: proc(s: ^[dynamic]CombinedResult) -> sort.Interface { return sort.Interface { collection = rawptr(s), len = proc(it: sort.Interface) -> int { - s := (^[dynamic]CombinedResult)(it.collection); - return len(s^); + s := (^[dynamic]CombinedResult)(it.collection) + return len(s^) }, less = proc(it: sort.Interface, i, j: int) -> bool { - s := (^[dynamic]CombinedResult)(it.collection); - return s[i].score > s[j].score; + s := (^[dynamic]CombinedResult)(it.collection) + return s[i].score > s[j].score }, swap = proc(it: sort.Interface, i, j: int) { - s := (^[dynamic]CombinedResult)(it.collection); - s[i], s[j] = s[j], s[i]; + s := (^[dynamic]CombinedResult)(it.collection) + s[i], s[j] = s[j], s[i] }, - }; - }; + } + } - combined := make([dynamic]CombinedResult); + combined := make([dynamic]CombinedResult) - lookup := ""; + lookup := "" if position_context.identifier != nil { - if ident, ok := position_context.identifier.derived.(ast.Ident); ok { - lookup = ident.name; + if ident, ok := position_context.identifier.derived.(^ast.Ident); ok { + lookup = ident.name } } - pkgs := make([dynamic]string, context.temp_allocator); + pkgs := make([dynamic]string, context.temp_allocator) - usings := get_using_packages(ast_context); + usings := get_using_packages(ast_context) for u in usings { - append(&pkgs, u); + append(&pkgs, u) } - append(&pkgs, ast_context.document_package); + append(&pkgs, ast_context.document_package) if results, ok := index.fuzzy_search(lookup, pkgs[:]); ok { for r in results { - r := r; - resolve_unresolved_symbol(ast_context, &r.symbol); - build_procedure_symbol_signature(&r.symbol); + r := r + resolve_unresolved_symbol(ast_context, &r.symbol) + build_procedure_symbol_signature(&r.symbol) if r.symbol.uri != ast_context.uri { append(&combined, CombinedResult { score = r.score, @@ -843,36 +847,36 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co flags = r.symbol.flags, signature = r.symbol.signature, pkg = r.symbol.pkg, - }); + }) } } } - matcher := common.make_fuzzy_matcher(lookup); + matcher := common.make_fuzzy_matcher(lookup) global: for k, v in ast_context.globals { if position_context.global_lhs_stmt { - break; + break } //combined is sorted and should do binary search instead. for result in combined { if result.name == k { - continue global; + continue global } } - ast_context.use_locals = true; - ast_context.use_globals = true; - ast_context.current_package = ast_context.document_package; + ast_context.use_locals = true + ast_context.use_globals = true + ast_context.current_package = ast_context.document_package - ident := index.new_type(ast.Ident, v.expr.pos, v.expr.end, context.temp_allocator); - ident.name = k; + ident := index.new_type(ast.Ident, v.expr.pos, v.expr.end, context.temp_allocator) + ident.name = k if symbol, ok := resolve_type_identifier(ast_context, ident^); ok { - symbol.signature = get_signature(ast_context, ident^, symbol); + symbol.signature = get_signature(ast_context, ident^, symbol) - build_procedure_symbol_signature(&symbol); + build_procedure_symbol_signature(&symbol) if score, ok := common.fuzzy_match(matcher, ident.name); ok == 1 { append(&combined, CombinedResult { @@ -883,7 +887,7 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co flags = symbol.flags, pkg = symbol.pkg, signature = symbol.signature, - }); + }) } } } @@ -891,22 +895,22 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co for _, local in ast_context.locals { for k, v in local { if position_context.global_lhs_stmt { - break; + break } - local_offset := get_local_offset(ast_context, position_context.position, k); + local_offset := get_local_offset(ast_context, position_context.position, k) - ast_context.use_locals = true; - ast_context.use_globals = true; - ast_context.current_package = ast_context.document_package; + ast_context.use_locals = true + ast_context.use_globals = true + ast_context.current_package = ast_context.document_package - ident := index.new_type(ast.Ident, {offset = local_offset}, {offset = local_offset}, context.temp_allocator); - ident.name = k; + ident := index.new_type(ast.Ident, {offset = local_offset}, {offset = local_offset}, context.temp_allocator) + ident.name = k if symbol, ok := resolve_type_identifier(ast_context, ident^); ok { - symbol.signature = get_signature(ast_context, ident^, symbol); + symbol.signature = get_signature(ast_context, ident^, symbol) - build_procedure_symbol_signature(&symbol); + build_procedure_symbol_signature(&symbol) if score, ok := common.fuzzy_match(matcher, ident.name); ok == 1 { append(&combined, CombinedResult { @@ -917,7 +921,7 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co flags = symbol.flags, pkg = symbol.pkg, signature = symbol.signature, - }); + }) } } } @@ -925,13 +929,13 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co for pkg in ast_context.imports { if position_context.global_lhs_stmt { - break; + break } symbol := index.Symbol { name = pkg.base, type = .Package, - }; + } if score, ok := common.fuzzy_match(matcher, symbol.name); ok == 1 { append(&combined, CombinedResult { @@ -942,7 +946,7 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co flags = symbol.flags, signature = symbol.signature, pkg = symbol.pkg, - }); + }) } } @@ -950,7 +954,7 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co symbol := index.Symbol { name = keyword, type = .Keyword, - }; + } if score, ok := common.fuzzy_match(matcher, keyword); ok == 1 { append(&combined, CombinedResult { @@ -961,7 +965,7 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co flags = symbol.flags, signature = symbol.signature, pkg = symbol.pkg, - }); + }) } } @@ -969,7 +973,7 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co symbol := index.Symbol { name = keyword, type = .Keyword, - }; + } if score, ok := common.fuzzy_match(matcher, keyword); ok == 1 { append(&combined, CombinedResult { @@ -980,29 +984,29 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co flags = symbol.flags, signature = symbol.signature, pkg = symbol.pkg, - }); + }) } } if common.config.enable_snippets { for k, v in snippets { if score, ok := common.fuzzy_match(matcher, k); ok == 1 { - append(&combined, CombinedResult {score = score * 1.1, snippet = v, name = k}); + append(&combined, CombinedResult {score = score * 1.1, snippet = v, name = k}) } } } - sort.sort(combined_sort_interface(&combined)); + sort.sort(combined_sort_interface(&combined)) //hard code for now - top_results := combined[0:(min(50, len(combined)))]; + top_results := combined[0:(min(50, len(combined)))] for result in top_results { - result := result; + result := result //Skip procedures when the position is in proc decl if position_in_proc_decl(position_context) && result.type == .Function && common.config.enable_procedure_context { - continue; + continue } if result.snippet.insert != "" { @@ -1012,72 +1016,72 @@ get_identifier_completion :: proc(ast_context: ^analysis.AstContext, position_co kind = .Snippet, detail = result.snippet.detail, insertTextFormat = .Snippet, - }; + } - edits := make([dynamic]TextEdit, context.temp_allocator); + edits := make([dynamic]TextEdit, context.temp_allocator) for pkg in result.snippet.packages { - edit, ok := get_core_insert_package_if_non_existent(ast_context, pkg); + edit, ok := get_core_insert_package_if_non_existent(ast_context, pkg) if ok { - append(&edits, edit); + append(&edits, edit) } } - item.additionalTextEdits = edits[:]; + item.additionalTextEdits = edits[:] - append(&items, item); + append(&items, item) } else { item := CompletionItem { label = result.name, documentation = result.doc, - }; + } - item.kind = cast(CompletionItemKind)result.type; + item.kind = cast(CompletionItemKind)result.type if result.type == .Function { - item.insertText = fmt.tprintf("%v($0)", item.label); - item.insertTextFormat = .Snippet; - item.deprecated = .Deprecated in result.flags; - item.command.command = "editor.action.triggerParameterHints"; + item.insertText = fmt.tprintf("%v($0)", item.label) + item.insertTextFormat = .Snippet + item.deprecated = .Deprecated in result.flags + item.command.command = "editor.action.triggerParameterHints" } - item.detail = concatenate_symbol_information(ast_context, result.pkg, result.name, result.signature, result.type, true); + item.detail = concatenate_symbol_information(ast_context, result.pkg, result.name, result.signature, result.type, true) - append(&items, item); + append(&items, item) } } - list.items = items[:]; + list.items = items[:] } get_package_completion :: proc(ast_context: ^analysis.AstContext, position_context: ^analysis.DocumentPositionContext, list: ^CompletionList) { - items := make([dynamic]CompletionItem, context.temp_allocator); + items := make([dynamic]CompletionItem, context.temp_allocator) - list.isIncomplete = false; + list.isIncomplete = false - fullpath_length := len(position_context.import_stmt.fullpath); + fullpath_length := len(position_context.import_stmt.fullpath) if fullpath_length <= 1 { - return; + return } - without_quotes := position_context.import_stmt.fullpath[1:fullpath_length-1]; - absolute_path := without_quotes; - colon_index := strings.index(without_quotes, ":"); + without_quotes := position_context.import_stmt.fullpath[1:fullpath_length-1] + absolute_path := without_quotes + colon_index := strings.index(without_quotes, ":") if colon_index >= 0 { - c := without_quotes[0:colon_index]; + c := without_quotes[0:colon_index] if colon_index+1 < len(without_quotes) { - absolute_path = filepath.join(elems = {common.config.collections[c], filepath.dir(without_quotes[colon_index+1:], context.temp_allocator)}, allocator = context.temp_allocator); + absolute_path = filepath.join(elems = {common.config.collections[c], filepath.dir(without_quotes[colon_index+1:], context.temp_allocator)}, allocator = context.temp_allocator) } else { - absolute_path = common.config.collections[c]; + absolute_path = common.config.collections[c] } } else { - import_file_dir := filepath.dir(position_context.import_stmt.pos.file, context.temp_allocator); - import_dir := filepath.dir(without_quotes, context.temp_allocator); - absolute_path = filepath.join(elems = {import_file_dir, import_dir}, allocator = context.temp_allocator); + import_file_dir := filepath.dir(position_context.import_stmt.pos.file, context.temp_allocator) + import_dir := filepath.dir(without_quotes, context.temp_allocator) + absolute_path = filepath.join(elems = {import_file_dir, import_dir}, allocator = context.temp_allocator) } if !strings.contains(position_context.import_stmt.fullpath, "/") && !strings.contains(position_context.import_stmt.fullpath, ":") { @@ -1087,9 +1091,9 @@ get_package_completion :: proc(ast_context: ^analysis.AstContext, position_conte detail = "collection", label = key, kind = .Module, - }; + } - append(&items, item); + append(&items, item) } } @@ -1099,103 +1103,103 @@ get_package_completion :: proc(ast_context: ^analysis.AstContext, position_conte detail = pkg, label = filepath.base(pkg), kind = .Folder, - }; + } if item.label[0] == '.' { - continue; + continue } - append(&items, item); + append(&items, item) } - list.items = items[:]; + list.items = items[:] } search_for_packages :: proc(fullpath: string) -> [] string { - packages := make([dynamic]string, context.temp_allocator); + packages := make([dynamic]string, context.temp_allocator) - fh, err := os.open(fullpath); + fh, err := os.open(fullpath) if err != 0 { - return {}; + return {} } if files, err := os.read_dir(fh, 0, context.temp_allocator); err == 0 { for file in files { if file.is_dir { - append(&packages, file.fullpath); + append(&packages, file.fullpath) } } } - return packages[:]; + return packages[:] } get_type_switch_completion :: proc(ast_context: ^analysis.AstContext, position_context: ^analysis.DocumentPositionContext, list: ^CompletionList) { - using analysis; + using analysis - items := make([dynamic]CompletionItem, context.temp_allocator); - list.isIncomplete = false; + items := make([dynamic]CompletionItem, context.temp_allocator) + list.isIncomplete = false - used_unions := make(map[string]bool, 5, context.temp_allocator); + used_unions := make(map[string]bool, 5, context.temp_allocator) - if block, ok := position_context.switch_type_stmt.body.derived.(ast.Block_Stmt); ok { + if block, ok := position_context.switch_type_stmt.body.derived.(^ast.Block_Stmt); ok { for stmt in block.stmts { - if case_clause, ok := stmt.derived.(ast.Case_Clause); ok { + if case_clause, ok := stmt.derived.(^ast.Case_Clause); ok { for name in case_clause.list { - if ident, ok := name.derived.(ast.Ident); ok { - used_unions[ident.name] = true; + if ident, ok := name.derived.(^ast.Ident); ok { + used_unions[ident.name] = true } } } } } - ast_context.use_locals = true; - ast_context.use_globals = true; + ast_context.use_locals = true + ast_context.use_globals = true - if assign, ok := position_context.switch_type_stmt.tag.derived.(ast.Assign_Stmt); ok && assign.rhs != nil && len(assign.rhs) == 1 { + if assign, ok := position_context.switch_type_stmt.tag.derived.(^ast.Assign_Stmt); ok && assign.rhs != nil && len(assign.rhs) == 1 { if union_value, ok := unwrap_union(ast_context, assign.rhs[0]); ok { for type, i in union_value.types { - name := common.node_to_string(type); + name := common.node_to_string(type) if name in used_unions { - continue; + continue } if symbol, ok := resolve_type_expression(ast_context, union_value.types[i]); ok { item := CompletionItem { kind = .EnumMember, - }; + } if symbol.pkg == ast_context.document_package { - item.label = fmt.aprintf("%v", common.node_to_string(union_value.types[i])); - item.detail = item.label; + item.label = fmt.aprintf("%v", common.node_to_string(union_value.types[i])) + item.detail = item.label } else { - item.label = fmt.aprintf("%v.%v", path.base(symbol.pkg, false, context.temp_allocator), name); - item.detail = item.label; + item.label = fmt.aprintf("%v.%v", path.base(symbol.pkg, false, context.temp_allocator), name) + item.detail = item.label } - append(&items, item); + append(&items, item) } } } } - list.items = items[:]; + list.items = items[:] } get_core_insert_package_if_non_existent :: proc(ast_context: ^analysis.AstContext, pkg: string) -> (TextEdit, bool) { - builder := strings.make_builder(context.temp_allocator); + builder := strings.make_builder(context.temp_allocator) for imp in ast_context.imports { if imp.base == pkg { - return {}, false; + return {}, false } } - strings.write_string(&builder, fmt.tprintf("import \"core:%v\"", pkg)); + strings.write_string(&builder, fmt.tprintf("import \"core:%v\"", pkg)) return { newText = strings.to_string(builder), @@ -1209,7 +1213,7 @@ get_core_insert_package_if_non_existent :: proc(ast_context: ^analysis.AstContex character = 0, }, }, - }, true; + }, true } bitset_operators: map[string]bool = { @@ -1219,7 +1223,7 @@ bitset_operators: map[string]bool = { "<" = true, ">" = true, "==" = true, -}; +} bitset_assignment_operators: map[string]bool = { "|=" = true, @@ -1228,14 +1232,14 @@ bitset_assignment_operators: map[string]bool = { "<=" = true, ">=" = true, "=" = true, -}; +} is_bitset_binary_operator :: proc(op: string) -> bool { - return op in bitset_operators; + return op in bitset_operators } is_bitset_assignment_operator :: proc(op: string) -> bool { - return op in bitset_assignment_operators; + return op in bitset_assignment_operators } language_keywords: []string = { @@ -1280,14 +1284,14 @@ language_keywords: []string = { "using", "or_return", "or_else", -}; +} swizzle_color_components: map[u8]bool = { 'r' = true, 'g' = true, 'b' = true, 'a' = true, -}; +} swizzle_coord_components: map[u8]bool = { 'x' = true, diff --git a/src/server/definition.odin b/src/server/definition.odin index 116fbd9..cfa2c49 100644 --- a/src/server/definition.odin +++ b/src/server/definition.odin @@ -20,123 +20,120 @@ import "shared:index" import "shared:analysis" get_definition_location :: proc(document: ^common.Document, position: common.Position) -> ([]common.Location, bool) { - using analysis; + using analysis - locations := make([dynamic]common.Location, context.temp_allocator); + locations := make([dynamic]common.Location, context.temp_allocator) - location: common.Location; + location: common.Location - ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri); + ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri) - uri: string; + uri: string - position_context, ok := get_document_position_context(document, position, .Definition); + position_context, ok := get_document_position_context(document, position, .Definition) if !ok { - log.warn("Failed to get position context"); - return {}, false; + log.warn("Failed to get position context") + return {}, false } - get_globals(document.ast, &ast_context); + get_globals(document.ast, &ast_context) if position_context.function != nil { - get_locals(document.ast, position_context.function, &ast_context, &position_context); + get_locals(document.ast, position_context.function, &ast_context, &position_context) } if position_context.selector != nil { - //if the base selector is the client wants to go to. - if base, ok := position_context.selector.derived.(ast.Ident); ok && position_context.identifier != nil { - - ident := position_context.identifier.derived.(ast.Ident); + if base, ok := position_context.selector.derived.(^ast.Ident); ok && position_context.identifier != nil { + ident := position_context.identifier.derived.(^ast.Ident) if ident.name == base.name { - - if resolved, ok := resolve_location_identifier(&ast_context, ident); ok { - location.range = resolved.range; + if resolved, ok := resolve_location_identifier(&ast_context, ident^); ok { + location.range = resolved.range if resolved.uri == "" { - location.uri = document.uri.uri; + location.uri = document.uri.uri } else { - location.uri = resolved.uri; + location.uri = resolved.uri } - append(&locations, location); + append(&locations, location) - return locations[:], true; + return locations[:], true } else { - return {}, false; + return {}, false } } } //otherwise it's the field the client wants to go to. - selector: index.Symbol; + selector: index.Symbol - ast_context.use_locals = true; - ast_context.use_globals = true; - ast_context.current_package = ast_context.document_package; + ast_context.use_locals = true + ast_context.use_globals = true + ast_context.current_package = ast_context.document_package - selector, ok = resolve_type_expression(&ast_context, position_context.selector); + selector, ok = resolve_type_expression(&ast_context, position_context.selector) if !ok { - return {}, false; + return {}, false } - field: string; + field: string if position_context.field != nil { - switch v in position_context.field.derived { - case ast.Ident: - field = v.name; + #partial switch v in position_context.field.derived { + case ^ast.Ident: + field = v.name } } - uri = selector.uri; + uri = selector.uri #partial switch v in selector.value { case index.SymbolEnumValue: - location.range = selector.range; + location.range = selector.range case index.SymbolStructValue: for name, i in v.names { if strings.compare(name, field) == 0 { - location.range = common.get_token_range(v.types[i]^, document.ast.src); + location.range = common.get_token_range(v.types[i]^, document.ast.src) } } case index.SymbolPackageValue: if symbol, ok := index.lookup(field, selector.pkg); ok { - location.range = symbol.range; - uri = symbol.uri; + location.range = symbol.range + uri = symbol.uri } else { - return {}, false; + return {}, false } } if !ok { - return {}, false; + return {}, false } } else if position_context.identifier != nil { - if resolved, ok := resolve_location_identifier(&ast_context, position_context.identifier.derived.(ast.Ident)); ok { - location.range = resolved.range; - uri = resolved.uri; + if resolved, ok := resolve_location_identifier(&ast_context, position_context.identifier.derived.(^ast.Ident)^); ok { + location.range = resolved.range + uri = resolved.uri } else { - return {}, false; + return {}, false } } else { - return {}, false; + return {}, false } //if the symbol is generated by the ast we don't set the uri. if uri == "" { - location.uri = document.uri.uri; + location.uri = document.uri.uri } else { - location.uri = uri; + location.uri = uri } append(&locations, location) - return locations[:], true; + return locations[:], true }
\ No newline at end of file diff --git a/src/server/document_links.odin b/src/server/document_links.odin index 9295c33..ec1cecb 100644 --- a/src/server/document_links.odin +++ b/src/server/document_links.odin @@ -20,23 +20,23 @@ import "shared:index" import "shared:analysis" get_document_links :: proc(document: ^common.Document) -> ([]DocumentLink, bool) { - using analysis; + using analysis - links := make([dynamic]DocumentLink, 0, context.temp_allocator); + links := make([dynamic]DocumentLink, 0, context.temp_allocator) for imp in document.ast.imports { if len(imp.relpath.text) <= 1 { - continue; + continue } - e := strings.split(imp.relpath.text[1:len(imp.relpath.text)-1], ":", context.temp_allocator); + e := strings.split(imp.relpath.text[1:len(imp.relpath.text)-1], ":", context.temp_allocator) if len(e) != 2 { - continue; + continue } if e[0] != "core" { - continue; + continue } //Temporarly assuming non unicode @@ -53,16 +53,16 @@ get_document_links :: proc(document: ^common.Document) -> ([]DocumentLink, bool) }, } - range := common.get_token_range(node, string(document.text)); + range := common.get_token_range(node, string(document.text)) link := DocumentLink { range = range, target = fmt.tprintf("https://pkg.odin-lang.org/%v/%v", e[0], e[1]), tooltip = "Documentation", - }; + } - append(&links, link); + append(&links, link) } - return links[:], true; + return links[:], true } diff --git a/src/server/document_symbols.odin b/src/server/document_symbols.odin index 7105465..667b94a 100644 --- a/src/server/document_symbols.odin +++ b/src/server/document_symbols.odin @@ -21,22 +21,22 @@ import "shared:analysis" get_document_symbols :: proc(document: ^common.Document) -> []DocumentSymbol { - using analysis; + using analysis - ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri); + ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri) - get_globals(document.ast, &ast_context); + get_globals(document.ast, &ast_context) - symbols := make([dynamic]DocumentSymbol, context.temp_allocator); + symbols := make([dynamic]DocumentSymbol, context.temp_allocator) - package_symbol: DocumentSymbol; + package_symbol: DocumentSymbol if len(document.ast.decls) == 0 { - return {}; + return {} } - package_symbol.kind = .Package; - package_symbol.name = path.base(document.package_name, false, context.temp_allocator); + package_symbol.kind = .Package + package_symbol.name = path.base(document.package_name, false, context.temp_allocator) package_symbol.range = { start = { line = document.ast.decls[0].pos.line, @@ -44,35 +44,35 @@ get_document_symbols :: proc(document: ^common.Document) -> []DocumentSymbol { end = { line = document.ast.decls[len(document.ast.decls) - 1].end.line, }, - }; - package_symbol.selectionRange = package_symbol.range; + } + package_symbol.selectionRange = package_symbol.range - children_symbols := make([dynamic]DocumentSymbol, context.temp_allocator); + children_symbols := make([dynamic]DocumentSymbol, context.temp_allocator) for k, global in ast_context.globals { - symbol: DocumentSymbol; - symbol.range = common.get_token_range(global.expr, ast_context.file.src); - symbol.selectionRange = symbol.range; - symbol.name = k; - - switch v in global.expr.derived { - case ast.Struct_Type: - symbol.kind = .Struct; - case ast.Proc_Lit, ast.Proc_Group: - symbol.kind = .Function; - case ast.Enum_Type, ast.Union_Type: - symbol.kind = .Enum; + symbol: DocumentSymbol + symbol.range = common.get_token_range(global.expr, ast_context.file.src) + symbol.selectionRange = symbol.range + symbol.name = k + + #partial switch v in global.expr.derived { + case ^ast.Struct_Type: + symbol.kind = .Struct + case ^ast.Proc_Lit, ^ast.Proc_Group: + symbol.kind = .Function + case ^ast.Enum_Type, ^ast.Union_Type: + symbol.kind = .Enum case: - symbol.kind = .Variable; + symbol.kind = .Variable } - append(&children_symbols, symbol); + append(&children_symbols, symbol) } - package_symbol.children = children_symbols[:]; + package_symbol.children = children_symbols[:] - append(&symbols, package_symbol); + append(&symbols, package_symbol) - return symbols[:]; + return symbols[:] } diff --git a/src/server/documents.odin b/src/server/documents.odin index 07800e0..3bb4a01 100644 --- a/src/server/documents.odin +++ b/src/server/documents.odin @@ -27,58 +27,58 @@ DocumentStorage :: struct { free_allocators: [dynamic]^common.Scratch_Allocator, } -document_storage: DocumentStorage; +document_storage: DocumentStorage document_storage_shutdown :: proc() { for k, v in document_storage.documents { - delete(k); + delete(k) } for alloc in document_storage.free_allocators { - common.scratch_allocator_destroy(alloc); - free(alloc); + common.scratch_allocator_destroy(alloc) + free(alloc) } - delete(document_storage.free_allocators); - delete(document_storage.documents); + delete(document_storage.free_allocators) + delete(document_storage.documents) } document_get_allocator :: proc() -> ^common.Scratch_Allocator { if len(document_storage.free_allocators) > 0 { - return pop(&document_storage.free_allocators); + return pop(&document_storage.free_allocators) } else { - allocator := new(common.Scratch_Allocator); - common.scratch_allocator_init(allocator, mem.megabytes(1)); - return allocator; + allocator := new(common.Scratch_Allocator) + common.scratch_allocator_init(allocator, mem.megabytes(1)) + return allocator } } document_free_allocator :: proc(allocator: ^common.Scratch_Allocator) { - append(&document_storage.free_allocators, allocator); + append(&document_storage.free_allocators, allocator) } document_get :: proc(uri_string: string) -> ^common.Document { - uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator); + uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator) if !parsed_ok { - return nil; + return nil } - document := &document_storage.documents[uri.path]; + document := &document_storage.documents[uri.path] if document == nil { - log.errorf("Failed to get document %v", uri.path); - return nil; + log.errorf("Failed to get document %v", uri.path) + return nil } - intrinsics.atomic_add(&document.operating_on, 1); + intrinsics.atomic_add(&document.operating_on, 1) - return document; + return document } document_release :: proc(document: ^common.Document) { if document != nil { - intrinsics.atomic_sub(&document.operating_on, 1); + intrinsics.atomic_sub(&document.operating_on, 1) } } @@ -87,27 +87,27 @@ document_release :: proc(document: ^common.Document) { */ document_open :: proc(uri_string: string, text: string, config: ^common.Config, writer: ^Writer) -> common.Error { - uri, parsed_ok := common.parse_uri(uri_string, context.allocator); + uri, parsed_ok := common.parse_uri(uri_string, context.allocator) if !parsed_ok { - log.error("Failed to parse uri"); - return .ParseError; + log.error("Failed to parse uri") + return .ParseError } if document := &document_storage.documents[uri.path]; document != nil { if document.client_owned { - log.errorf("Client called open on an already open document: %v ", document.uri.path); - return .InvalidRequest; + log.errorf("Client called open on an already open document: %v ", document.uri.path) + return .InvalidRequest } - document.uri = uri; - document.client_owned = true; - document.text = transmute([]u8)text; - document.used_text = len(document.text); - document.allocator = document_get_allocator(); + document.uri = uri + document.client_owned = true + document.text = transmute([]u8)text + document.used_text = len(document.text) + document.allocator = document_get_allocator() if err := document_refresh(document, config, writer); err != .None { - return err; + return err } } else { document := common.Document { @@ -116,142 +116,142 @@ document_open :: proc(uri_string: string, text: string, config: ^common.Config, client_owned = true, used_text = len(text), allocator = document_get_allocator(), - }; + } if err := document_refresh(&document, config, writer); err != .None { - return err; + return err } - document_storage.documents[strings.clone(uri.path)] = document; + document_storage.documents[strings.clone(uri.path)] = document } - delete(uri_string); + delete(uri_string) - return .None; + return .None } /* Function that applies changes to the given document through incremental syncronization */ document_apply_changes :: proc(uri_string: string, changes: [dynamic]TextDocumentContentChangeEvent, config: ^common.Config, writer: ^Writer) -> common.Error { - uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator); + uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator) if !parsed_ok { - return .ParseError; + return .ParseError } - document := &document_storage.documents[uri.path]; + document := &document_storage.documents[uri.path] if !document.client_owned { - log.errorf("Client called change on an document not opened: %v ", document.uri.path); - return .InvalidRequest; + log.errorf("Client called change on an document not opened: %v ", document.uri.path) + return .InvalidRequest } for change in changes { //for some reason sublime doesn't seem to care even if i tell it to do incremental sync if range, ok := change.range.(common.Range); ok { - absolute_range, ok := common.get_absolute_range(range, document.text[:document.used_text]); + absolute_range, ok := common.get_absolute_range(range, document.text[:document.used_text]) if !ok { - return .ParseError; + return .ParseError } //lower bound is before the change - lower := document.text[:absolute_range.start]; + lower := document.text[:absolute_range.start] //new change between lower and upper - middle := change.text; + middle := change.text //upper bound is after the change - upper := document.text[absolute_range.end:document.used_text]; + upper := document.text[absolute_range.end:document.used_text] //total new size needed - document.used_text = len(lower) + len(change.text) + len(upper); + document.used_text = len(lower) + len(change.text) + len(upper) //Reduce the amount of allocation by allocating more memory than needed if document.used_text > len(document.text) { - new_text := make([]u8, document.used_text * 2); + new_text := make([]u8, document.used_text * 2) //join the 3 splices into the text - copy(new_text, lower); - copy(new_text[len(lower):], middle); - copy(new_text[len(lower) + len(middle):], upper); + copy(new_text, lower) + copy(new_text[len(lower):], middle) + copy(new_text[len(lower) + len(middle):], upper) - delete(document.text); + delete(document.text) - document.text = new_text; + document.text = new_text } else { //order matters here, we need to make sure we swap the data already in the text before the middle - copy(document.text, lower); - copy(document.text[len(lower) + len(middle):], upper); - copy(document.text[len(lower):], middle); + copy(document.text, lower) + copy(document.text[len(lower) + len(middle):], upper) + copy(document.text[len(lower):], middle) } } else { - document.used_text = len(change.text); + document.used_text = len(change.text) if document.used_text > len(document.text) { - new_text := make([]u8, document.used_text * 2); - copy(new_text, change.text); - delete(document.text); - document.text = new_text; + new_text := make([]u8, document.used_text * 2) + copy(new_text, change.text) + delete(document.text) + document.text = new_text } else { - copy(document.text, change.text); + copy(document.text, change.text) } } } //log.info(string(document.text[:document.used_text])); - return document_refresh(document, config, writer); + return document_refresh(document, config, writer) } document_close :: proc(uri_string: string) -> common.Error { - log.infof("document_close: %v", uri_string); + log.infof("document_close: %v", uri_string) - uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator); + uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator) if !parsed_ok { - return .ParseError; + return .ParseError } - document := &document_storage.documents[uri.path]; + document := &document_storage.documents[uri.path] if document == nil || !document.client_owned { - log.errorf("Client called close on a document that was never opened: %v ", document.uri.path); - return .InvalidRequest; + log.errorf("Client called close on a document that was never opened: %v ", document.uri.path) + return .InvalidRequest } - free_all(common.scratch_allocator(document.allocator)); - document_free_allocator(document.allocator); + free_all(common.scratch_allocator(document.allocator)) + document_free_allocator(document.allocator) - document.allocator = nil; - document.client_owned = false; + document.allocator = nil + document.client_owned = false - common.delete_uri(document.uri); + common.delete_uri(document.uri) - delete(document.text); + delete(document.text) - document.used_text = 0; + document.used_text = 0 - return .None; + return .None } document_refresh :: proc(document: ^common.Document, config: ^common.Config, writer: ^Writer) -> common.Error { - errors, ok := parse_document(document, config); + errors, ok := parse_document(document, config) if !ok { - return .ParseError; + return .ParseError } if writer != nil && len(errors) > 0 { - document.diagnosed_errors = true; + document.diagnosed_errors = true params := NotificationPublishDiagnosticsParams { uri = document.uri.uri, diagnostics = make([]Diagnostic, len(errors), context.temp_allocator), - }; + } for error, i in errors { params.diagnostics[i] = Diagnostic { @@ -268,16 +268,16 @@ document_refresh :: proc(document: ^common.Document, config: ^common.Config, wri severity = DiagnosticSeverity.Error, code = "test", message = error.message, - }; + } } notifaction := Notification { jsonrpc = "2.0", method = "textDocument/publishDiagnostics", params = params, - }; + } - send_notification(notifaction, writer); + send_notification(notifaction, writer) } if writer != nil && len(errors) == 0 { @@ -291,31 +291,31 @@ document_refresh :: proc(document: ^common.Document, config: ^common.Config, wri uri = document.uri.uri, diagnostics = make([]Diagnostic, len(errors), context.temp_allocator), }, - }; + } - document.diagnosed_errors = false; + document.diagnosed_errors = false - send_notification(notifaction, writer); + send_notification(notifaction, writer) } } //We only resolve the entire file, if we are dealing with the heavy features that require the entire file resolved. //This gives the user a choice to use "fast mode" with only completion and gotos. if config.enable_semantic_tokens || config.enable_inlay_hints { - resolve_entire_file(document); + resolve_entire_file(document) } - return .None; + return .None } -current_errors: [dynamic]ParserError; +current_errors: [dynamic]ParserError parser_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) { error := ParserError { line = pos.line,column = pos.column,file = pos.file, offset = pos.offset,message = fmt.tprintf(msg, ..args), - }; - append(¤t_errors, error); + } + append(¤t_errors, error) } parse_document :: proc(document: ^common.Document, config: ^common.Config) -> ([]ParserError, bool) { @@ -323,92 +323,92 @@ parse_document :: proc(document: ^common.Document, config: ^common.Config) -> ([ err = parser_error_handler, warn = common.parser_warning_handler, flags = {.Optional_Semicolons}, - }; + } - current_errors = make([dynamic]ParserError, context.temp_allocator); + current_errors = make([dynamic]ParserError, context.temp_allocator) - free_all(common.scratch_allocator(document.allocator)); + free_all(common.scratch_allocator(document.allocator)) - context.allocator = common.scratch_allocator(document.allocator); + context.allocator = common.scratch_allocator(document.allocator) //have to cheat the parser since it really wants to parse an entire package with the new changes... - pkg := new(ast.Package); - pkg.kind = .Normal; - pkg.fullpath = document.uri.path; + pkg := new(ast.Package) + pkg.kind = .Normal + pkg.fullpath = document.uri.path document.ast = ast.File { fullpath = document.uri.path, src = string(document.text[:document.used_text]), pkg = pkg, - }; + } - parser.parse_file(&p, &document.ast); + parser.parse_file(&p, &document.ast) - imports := make([dynamic]common.Package); + imports := make([dynamic]common.Package) - when ODIN_OS == "windows" { - document.package_name = strings.to_lower(path.dir(document.uri.path, context.temp_allocator)); + when ODIN_OS == .Windows { + document.package_name = strings.to_lower(path.dir(document.uri.path, context.temp_allocator)) } else { - document.package_name = path.dir(document.uri.path); + document.package_name = path.dir(document.uri.path) } for imp, index in document.ast.imports { if i := strings.index(imp.fullpath, "\""); i == -1 { - continue; + continue } //collection specified if i := strings.index(imp.fullpath, ":"); i != -1 && i > 1 && i < len(imp.fullpath) - 1 { if len(imp.fullpath) < 2 { - continue; + continue } - collection := imp.fullpath[1:i]; - p := imp.fullpath[i + 1:len(imp.fullpath) - 1]; + collection := imp.fullpath[1:i] + p := imp.fullpath[i + 1:len(imp.fullpath) - 1] - dir, ok := config.collections[collection]; + dir, ok := config.collections[collection] if !ok { - continue; + continue } - import_: common.Package; + import_: common.Package - when ODIN_OS == "windows" { - import_.name = strings.clone(path.join(elems = {strings.to_lower(dir, context.temp_allocator), p}, allocator = context.temp_allocator)); + when ODIN_OS == .Windows { + import_.name = strings.clone(path.join(elems = {strings.to_lower(dir, context.temp_allocator), p}, allocator = context.temp_allocator)) } else { - import_.name = strings.clone(path.join(elems = {dir, p}, allocator = context.temp_allocator)); + import_.name = strings.clone(path.join(elems = {dir, p}, allocator = context.temp_allocator)) } if imp.name.text != "" { - import_.base = imp.name.text; + import_.base = imp.name.text } else { - import_.base = path.base(import_.name, false); + import_.base = path.base(import_.name, false) } - append(&imports, import_); + append(&imports, import_) } else { //relative if len(imp.fullpath) < 2 { - continue; + continue } - import_: common.Package; - import_.name = path.join(elems = {document.package_name, imp.fullpath[1:len(imp.fullpath) - 1]}, allocator = context.temp_allocator); - import_.name = path.clean(import_.name); + import_: common.Package + import_.name = path.join(elems = {document.package_name, imp.fullpath[1:len(imp.fullpath) - 1]}, allocator = context.temp_allocator) + import_.name = path.clean(import_.name) if imp.name.text != "" { - import_.base = imp.name.text; + import_.base = imp.name.text } else { - import_.base = path.base(import_.name, false); + import_.base = path.base(import_.name, false) } - append(&imports, import_); + append(&imports, import_) } } - document.imports = imports[:]; + document.imports = imports[:] - return current_errors[:], true; + return current_errors[:], true } diff --git a/src/server/format.odin b/src/server/format.odin index 1bf4892..5829793 100644 --- a/src/server/format.odin +++ b/src/server/format.odin @@ -19,39 +19,39 @@ DocumentFormattingParams :: struct { get_complete_format :: proc(document: ^common.Document, config: ^common.Config) -> ([]TextEdit, bool) { - style := printer.default_style; - style.max_characters = config.formatter.characters; - style.tabs = config.formatter.tabs; + style := printer.default_style + style.max_characters = config.formatter.characters + style.tabs = config.formatter.tabs - prnt := printer.make_printer(style, context.temp_allocator); + prnt := printer.make_printer(style, context.temp_allocator) if document.ast.syntax_error_count > 0 { - return {}, true; + return {}, true } if len(document.text) == 0 { - return {}, true; + return {}, true } - src := printer.print(&prnt, &document.ast); + src := printer.print(&prnt, &document.ast) - end_line := 0; - end_charcter := 0; + end_line := 0 + end_charcter := 0 - last := document.text[0]; - line := 0; + last := document.text[0] + line := 0 for current_index := 0; current_index < len(document.text); current_index += 1 { - current := document.text[current_index]; + current := document.text[current_index] if last == '\r' && current == '\n' { - line += 1; - current_index += 1; + line += 1 + current_index += 1 } else if current == '\n' { - line += 1; + line += 1 } - last = current; + last = current } edit := TextEdit { @@ -66,11 +66,11 @@ get_complete_format :: proc(document: ^common.Document, config: ^common.Config) line = line+1, }, }, - }; + } - edits := make([dynamic]TextEdit, context.temp_allocator); + edits := make([dynamic]TextEdit, context.temp_allocator) - append(&edits, edit); + append(&edits, edit) - return edits[:], true; + return edits[:], true } diff --git a/src/server/hover.odin b/src/server/hover.odin index e7c5dba..5dd6acf 100644 --- a/src/server/hover.odin +++ b/src/server/hover.odin @@ -18,158 +18,158 @@ import "shared:index" import "shared:analysis" write_hover_content :: proc(ast_context: ^analysis.AstContext, symbol: index.Symbol) -> MarkupContent { - using analysis; + using analysis - content: MarkupContent; + content: MarkupContent - symbol := symbol; + symbol := symbol if untyped, ok := symbol.value.(index.SymbolUntypedValue); ok { switch untyped.type { - case .String: symbol.signature = "string"; - case .Bool: symbol.signature = "bool"; - case .Float: symbol.signature = "float"; - case .Integer: symbol.signature = "int"; + case .String: symbol.signature = "string" + case .Bool: symbol.signature = "bool" + case .Float: symbol.signature = "float" + case .Integer: symbol.signature = "int" } } - build_procedure_symbol_signature(&symbol); + build_procedure_symbol_signature(&symbol) - cat := concatenate_symbol_information(ast_context, symbol, false); + cat := concatenate_symbol_information(ast_context, symbol, false) if cat != "" { - content.kind = "markdown"; - content.value = fmt.tprintf("```odin\n %v\n```\n%v", cat, symbol.doc); + content.kind = "markdown" + content.value = fmt.tprintf("```odin\n %v\n```\n%v", cat, symbol.doc) } else { - content.kind = "plaintext"; + content.kind = "plaintext" } - return content; + return content } get_hover_information :: proc(document: ^common.Document, position: common.Position) -> (Hover, bool) { - using analysis; + using analysis hover := Hover { contents = { kind = "plaintext", }, - }; + } - ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri); + ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri) - position_context, ok := get_document_position_context(document, position, .Hover); + position_context, ok := get_document_position_context(document, position, .Hover) - get_globals(document.ast, &ast_context); + get_globals(document.ast, &ast_context) if position_context.function != nil { - get_locals(document.ast, position_context.function, &ast_context, &position_context); + get_locals(document.ast, position_context.function, &ast_context, &position_context) } if position_context.identifier != nil { - if ident, ok := position_context.identifier.derived.(ast.Ident); ok { + if ident, ok := position_context.identifier.derived.(^ast.Ident); ok { if _, ok := common.keyword_map[ident.name]; ok { - hover.contents.kind = "plaintext"; - hover.range = common.get_token_range(position_context.identifier^, ast_context.file.src); - return hover, true; + hover.contents.kind = "plaintext" + hover.range = common.get_token_range(position_context.identifier^, ast_context.file.src) + return hover, true } } } if position_context.selector != nil && position_context.identifier != nil { - hover.range = common.get_token_range(position_context.identifier^, ast_context.file.src); + hover.range = common.get_token_range(position_context.identifier^, ast_context.file.src) - ast_context.use_locals = true; - ast_context.use_globals = true; - ast_context.current_package = ast_context.document_package; + ast_context.use_locals = true + ast_context.use_globals = true + ast_context.current_package = ast_context.document_package //if the base selector is the client wants to go to. - if base, ok := position_context.selector.derived.(ast.Ident); ok && position_context.identifier != nil { + if base, ok := position_context.selector.derived.(^ast.Ident); ok && position_context.identifier != nil { - ident := position_context.identifier.derived.(ast.Ident); + ident := position_context.identifier.derived.(^ast.Ident)^ if ident.name == base.name { if resolved, ok := resolve_type_identifier(&ast_context, ident); ok { - resolved.signature = get_signature(&ast_context, ident, resolved); - resolved.name = ident.name; + resolved.signature = get_signature(&ast_context, ident, resolved) + resolved.name = ident.name if resolved.type == .Variable { - resolved.pkg = ast_context.document_package; + resolved.pkg = ast_context.document_package } - hover.contents = write_hover_content(&ast_context, resolved); - return hover, true; + hover.contents = write_hover_content(&ast_context, resolved) + return hover, true } } } - selector: index.Symbol; - selector, ok = resolve_type_expression(&ast_context, position_context.selector); + selector: index.Symbol + selector, ok = resolve_type_expression(&ast_context, position_context.selector) if !ok { - return hover, true; + return hover, true } - field: string; + field: string if position_context.field != nil { - switch v in position_context.field.derived { - case ast.Ident: - field = v.name; + #partial switch v in position_context.field.derived { + case ^ast.Ident: + field = v.name } } - hover.range = common.get_token_range(position_context.identifier^, document.ast.src); + hover.range = common.get_token_range(position_context.identifier^, document.ast.src) #partial switch v in selector.value { case index.SymbolStructValue: for name, i in v.names { if strings.compare(name, field) == 0 { if symbol, ok := resolve_type_expression(&ast_context, v.types[i]); ok { - symbol.name = name; //TODO refractor - never set symbol name after creation - change writer_hover_content - symbol.pkg = selector.name; - symbol.signature = common.node_to_string(v.types[i]); - hover.contents = write_hover_content(&ast_context, symbol); - return hover, true; + symbol.name = name //TODO refractor - never set symbol name after creation - change writer_hover_content + symbol.pkg = selector.name + symbol.signature = common.node_to_string(v.types[i]) + hover.contents = write_hover_content(&ast_context, symbol) + return hover, true } } } case index.SymbolPackageValue: if position_context.field != nil { - if ident, ok := position_context.field.derived.(ast.Ident); ok { - ast_context.current_package = selector.pkg; - if symbol, ok := resolve_type_identifier(&ast_context, ident); ok { - hover.contents = write_hover_content(&ast_context, symbol); - return hover, true; + if ident, ok := position_context.field.derived.(^ast.Ident); ok { + ast_context.current_package = selector.pkg + if symbol, ok := resolve_type_identifier(&ast_context, ident^); ok { + hover.contents = write_hover_content(&ast_context, symbol) + return hover, true } } } } } else if position_context.identifier != nil { - ast_context.use_locals = true; - ast_context.use_globals = true; - ast_context.current_package = ast_context.document_package; + ast_context.use_locals = true + ast_context.use_globals = true + ast_context.current_package = ast_context.document_package - ident := position_context.identifier.derived.(ast.Ident); + ident := position_context.identifier.derived.(^ast.Ident)^ - hover.range = common.get_token_range(position_context.identifier^, document.ast.src); + hover.range = common.get_token_range(position_context.identifier^, document.ast.src) if resolved, ok := resolve_type_identifier(&ast_context, ident); ok { - resolved.signature = get_signature(&ast_context, ident, resolved); - resolved.name = ident.name; + resolved.signature = get_signature(&ast_context, ident, resolved) + resolved.name = ident.name if resolved.type == .Variable { - resolved.pkg = ast_context.document_package; + resolved.pkg = ast_context.document_package } - hover.contents = write_hover_content(&ast_context, resolved); - return hover, true; + hover.contents = write_hover_content(&ast_context, resolved) + return hover, true } } - return hover, true; + return hover, true } diff --git a/src/server/inlay_hints.odin b/src/server/inlay_hints.odin index 1e7d2c3..ae3d9a0 100644 --- a/src/server/inlay_hints.odin +++ b/src/server/inlay_hints.odin @@ -9,11 +9,11 @@ import "shared:index" //document get_inlay_hints :: proc(document: ^common.Document, symbols: map[uintptr]index.Symbol) -> ([]InlayHint, bool) { - using analysis; + using analysis - hints := make([dynamic]InlayHint, context.temp_allocator); + hints := make([dynamic]InlayHint, context.temp_allocator) - ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri); + ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri) Visit_Data :: struct { calls: [dynamic]^ast.Node, @@ -21,20 +21,20 @@ get_inlay_hints :: proc(document: ^common.Document, symbols: map[uintptr]index.S data := Visit_Data { calls = make([dynamic]^ast.Node, context.temp_allocator), - }; + } visit :: proc(visitor: ^ast.Visitor, node: ^ast.Node) -> ^ast.Visitor { if node == nil || visitor == nil { - return nil; + return nil } - data := cast(^Visit_Data)visitor.data; + data := cast(^Visit_Data)visitor.data - if call, ok := node.derived.(ast.Call_Expr); ok { - append(&data.calls, node); + if call, ok := node.derived.(^ast.Call_Expr); ok { + append(&data.calls, node) } - return visitor; + return visitor } visitor := ast.Visitor { @@ -43,17 +43,17 @@ get_inlay_hints :: proc(document: ^common.Document, symbols: map[uintptr]index.S } for decl in document.ast.decls { - ast.walk(&visitor, decl); + ast.walk(&visitor, decl) } loop: for node_call in &data.calls { symbol_arg_count := 0 - call := node_call.derived.(ast.Call_Expr); + call := node_call.derived.(^ast.Call_Expr) for arg in call.args { - if _, ok := arg.derived.(ast.Field); ok { - continue loop; + if _, ok := arg.derived.(^ast.Field); ok { + continue loop } } @@ -62,23 +62,23 @@ get_inlay_hints :: proc(document: ^common.Document, symbols: map[uintptr]index.S for arg in symbol_call.arg_types { for name in arg.names { if symbol_arg_count >= len(call.args) { - continue loop; + continue loop } - if ident, ok := name.derived.(ast.Ident); ok { + if ident, ok := name.derived.(^ast.Ident); ok { hint := InlayHint { kind = "parameter", label = fmt.tprintf("%v = ", ident.name), range = common.get_token_range(call.args[symbol_arg_count], string(document.text)), } - append(&hints, hint); + append(&hints, hint) } - symbol_arg_count += 1; + symbol_arg_count += 1 } } } } } - return hints[:], true; + return hints[:], true }
\ No newline at end of file diff --git a/src/server/lens.odin b/src/server/lens.odin index 9d80d0c..93827f5 100644 --- a/src/server/lens.odin +++ b/src/server/lens.odin @@ -22,22 +22,22 @@ CodeLens :: struct { get_code_lenses :: proc(document: ^common.Document, position: common.Position) -> ([]CodeLens, bool) { - using analysis; + using analysis - ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri); + ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri) - get_globals(document.ast, &ast_context); + get_globals(document.ast, &ast_context) - symbols := make([dynamic]CodeLens, context.temp_allocator); + symbols := make([dynamic]CodeLens, context.temp_allocator) if len(document.ast.decls) == 0 { - return {}, true; + return {}, true } for name, global in ast_context.globals { - if proc_lit, ok := global.expr.derived.(ast.Proc_Lit); ok { + if proc_lit, ok := global.expr.derived.(^ast.Proc_Lit); ok { @@ -47,7 +47,7 @@ get_code_lenses :: proc(document: ^common.Document, position: common.Position) - } - return {}, false; + return {}, false } diff --git a/src/server/log.odin b/src/server/log.odin index cf83979..7ed3da9 100644 --- a/src/server/log.odin +++ b/src/server/log.odin @@ -12,27 +12,27 @@ Default_Console_Logger_Opts :: log.Options { .Short_File_Path, .Line, .Procedure, -} | log.Full_Timestamp_Opts; +} | log.Full_Timestamp_Opts Lsp_Logger_Data :: struct { writer: ^Writer, } create_lsp_logger :: proc(writer: ^Writer, lowest := log.Level.Debug, opt := Default_Console_Logger_Opts) -> log.Logger { - data := new(Lsp_Logger_Data); - data.writer = writer; - return log.Logger {lsp_logger_proc, data, lowest, opt}; + data := new(Lsp_Logger_Data) + data.writer = writer + return log.Logger {lsp_logger_proc, data, lowest, opt} } destroy_lsp_logger :: proc(log: ^log.Logger) { - free(log.data); + free(log.data) } lsp_logger_proc :: proc(logger_data: rawptr, level: log.Level, text: string, options: log.Options, location := #caller_location) { - data := cast(^Lsp_Logger_Data)logger_data; + data := cast(^Lsp_Logger_Data)logger_data - message := fmt.tprintf("%s", text); + message := fmt.tprintf("%s", text) notification := Notification { jsonrpc = "2.0", @@ -41,7 +41,7 @@ lsp_logger_proc :: proc(logger_data: rawptr, level: log.Level, text: string, opt type = 1, message = message, }, - }; + } - send_notification(notification, data.writer); + send_notification(notification, data.writer) } diff --git a/src/server/reader.odin b/src/server/reader.odin index a05e147..be0d4ae 100644 --- a/src/server/reader.odin +++ b/src/server/reader.odin @@ -4,7 +4,7 @@ import "core:os" import "core:mem" import "core:strings" -ReaderFn :: proc(_: rawptr, _: []byte) -> (int, int); +ReaderFn :: proc(_: rawptr, _: []byte) -> (int, int) Reader :: struct { reader_fn: ReaderFn, @@ -12,61 +12,61 @@ Reader :: struct { } make_reader :: proc(reader_fn: ReaderFn, reader_context: rawptr) -> Reader { - return Reader {reader_context = reader_context, reader_fn = reader_fn}; + return Reader {reader_context = reader_context, reader_fn = reader_fn} } read_u8 :: proc(reader: ^Reader) -> (u8, bool) { - value: [1]byte; + value: [1]byte - read, err := reader.reader_fn(reader.reader_context, value[:]); + read, err := reader.reader_fn(reader.reader_context, value[:]) if (err != 0 || read != 1) { - return 0, false; + return 0, false } - return value[0], true; + return value[0], true } read_until_delimiter :: proc(reader: ^Reader, delimiter: u8, builder: ^strings.Builder) -> bool { for true { - value, success := read_u8(reader); + value, success := read_u8(reader) if (!success) { - return false; + return false } - strings.write_byte(builder, value); + strings.write_byte(builder, value) if (value == delimiter) { - break; + break } } - return true; + return true } read_sized :: proc(reader: ^Reader, data: []u8) -> (ok: bool) { - ok = true; - size := len(data); - n := 0; + ok = true + size := len(data) + n := 0 for n < size && ok { - read: int; - err_code: int; + read: int + err_code: int - read, err_code = reader.reader_fn(reader.reader_context, data[n:]); + read, err_code = reader.reader_fn(reader.reader_context, data[n:]) - ok = err_code == 0; + ok = err_code == 0 - n += read; + n += read } if n >= size { - ok = true; + ok = true } - return; + return } diff --git a/src/server/requests.odin b/src/server/requests.odin index b06f4cd..00af132 100644 --- a/src/server/requests.odin +++ b/src/server/requests.odin @@ -63,7 +63,7 @@ make_response_message :: proc (id: RequestId, params: ResponseParams) -> Respons jsonrpc = "2.0", id = id, result = params, - }; + } } make_response_message_error :: proc (id: RequestId, error: ResponseError) -> ResponseMessageError { @@ -72,7 +72,7 @@ make_response_message_error :: proc (id: RequestId, error: ResponseError) -> Res jsonrpc = "2.0", id = id, error = error, - }; + } } RequestThreadData :: struct { @@ -87,157 +87,157 @@ Request :: struct { } -requests_sempahore: sync.Semaphore; -requests_mutex: sync.Mutex; +requests_sempahore: sync.Semaphore +requests_mutex: sync.Mutex -requests: [dynamic]Request; -deletings: [dynamic]Request; +requests: [dynamic]Request +deletings: [dynamic]Request thread_request_main :: proc(data: rawptr) { - request_data := cast(^RequestThreadData)data; + request_data := cast(^RequestThreadData)data for common.config.running { - header, success := read_and_parse_header(request_data.reader); + header, success := read_and_parse_header(request_data.reader) if (!success) { - log.error("Failed to read and parse header"); - return; + log.error("Failed to read and parse header") + return } - value: json.Value; - value, success = read_and_parse_body(request_data.reader, header); + value: json.Value + value, success = read_and_parse_body(request_data.reader, header) if (!success) { - log.error("Failed to read and parse body"); - return; + log.error("Failed to read and parse body") + return } - root, ok := value.(json.Object); + root, ok := value.(json.Object) if !ok { - log.error("No root object"); - return; + log.error("No root object") + return } - id: RequestId; - id_value: json.Value; - id_value, ok = root["id"]; + id: RequestId + id_value: json.Value + id_value, ok = root["id"] if ok { #partial switch v in id_value { case json.String: - id = v; + id = v case json.Integer: - id = v; + id = v case: - id = 0; + id = 0 } } - sync.mutex_lock(&requests_mutex); + sync.mutex_lock(&requests_mutex) - method := root["method"].(json.String); + method := root["method"].(json.String) if method == "$/cancelRequest" { - append(&deletings, Request { id = id }); + append(&deletings, Request { id = id }) } else if method in notification_map { - append(&requests, Request { value = root, is_notification = true}); - sync.semaphore_post(&requests_sempahore); + append(&requests, Request { value = root, is_notification = true}) + sync.semaphore_post(&requests_sempahore) } else { - append(&requests, Request { id = id, value = root}); - sync.semaphore_post(&requests_sempahore); + append(&requests, Request { id = id, value = root}) + sync.semaphore_post(&requests_sempahore) } - sync.mutex_unlock(&requests_mutex); + sync.mutex_unlock(&requests_mutex) - free_all(context.temp_allocator); + free_all(context.temp_allocator) } } read_and_parse_header :: proc (reader: ^Reader) -> (Header, bool) { - header: Header; + header: Header - builder := strings.make_builder(context.temp_allocator); + builder := strings.make_builder(context.temp_allocator) - found_content_length := false; + found_content_length := false for true { - strings.reset_builder(&builder); + strings.reset_builder(&builder) if !read_until_delimiter(reader, '\n', &builder) { - log.error("Failed to read with delimiter"); - return header, false; + log.error("Failed to read with delimiter") + return header, false } - message := strings.to_string(builder); + message := strings.to_string(builder) if len(message) == 0 || message[len(message) - 2] != '\r' { - log.error("No carriage return"); - return header, false; + log.error("No carriage return") + return header, false } if len(message) == 2 { - break; + break } - index := strings.last_index_byte(message, ':'); + index := strings.last_index_byte(message, ':') if index == -1 { - log.error("Failed to find semicolon"); - return header, false; + log.error("Failed to find semicolon") + return header, false } - header_name := message[0:index]; - header_value := message[len(header_name) + 2:len(message) - 2]; + header_name := message[0:index] + header_value := message[len(header_name) + 2:len(message) - 2] if strings.compare(header_name, "Content-Length") == 0 { if len(header_value) == 0 { - log.error("Header value has no length"); - return header, false; + log.error("Header value has no length") + return header, false } - value, ok := strconv.parse_int(header_value); + value, ok := strconv.parse_int(header_value) if !ok { - log.error("Failed to parse content length value"); - return header, false; + log.error("Failed to parse content length value") + return header, false } - header.content_length = value; + header.content_length = value - found_content_length = true; + found_content_length = true } else if strings.compare(header_name, "Content-Type") == 0 { if len(header_value) == 0 { - log.error("Header value has no length"); - return header, false; + log.error("Header value has no length") + return header, false } } } - return header, found_content_length; + return header, found_content_length } read_and_parse_body :: proc (reader: ^Reader, header: Header) -> (json.Value, bool) { - value: json.Value; + value: json.Value - data := make([]u8, header.content_length, context.temp_allocator); + data := make([]u8, header.content_length, context.temp_allocator) if !read_sized(reader, data) { - log.error("Failed to read body"); - return value, false; + log.error("Failed to read body") + return value, false } - err: json.Error; + err: json.Error - value, err = json.parse(data = data, allocator = context.allocator, parse_integers = true); + value, err = json.parse(data = data, allocator = context.allocator, parse_integers = true) if (err != json.Error.None) { - log.error("Failed to parse body"); - return value, false; + log.error("Failed to parse body") + return value, false } - return value, true; + return value, true } call_map : map [string] proc(json.Value, RequestId, ^common.Config, ^Writer) -> common.Error = @@ -260,7 +260,7 @@ call_map : map [string] proc(json.Value, RequestId, ^common.Config, ^Writer) -> "textDocument/formatting" = request_format_document, "odin/inlayHints" = request_inlay_hint, "textDocument/documentLink" = request_document_links, -}; +} notification_map: map [string] bool = { "textDocument/didOpen" = true, @@ -271,52 +271,52 @@ notification_map: map [string] bool = { } consume_requests :: proc (config: ^common.Config, writer: ^Writer) -> bool { - temp_requests := make([dynamic]Request, 0, context.temp_allocator); + temp_requests := make([dynamic]Request, 0, context.temp_allocator) - sync.mutex_lock(&requests_mutex); + sync.mutex_lock(&requests_mutex) for d in deletings { - delete_index := -1; + delete_index := -1 for request, i in requests { if request.id == d.id { - delete_index := i; - break; + delete_index := i + break } } if delete_index != -1 { - cancel(requests[delete_index].value, requests[delete_index].id, writer, config); - ordered_remove(&requests, delete_index); + cancel(requests[delete_index].value, requests[delete_index].id, writer, config) + ordered_remove(&requests, delete_index) } } for request in requests { - append(&temp_requests, request); + append(&temp_requests, request) } - sync.mutex_unlock(&requests_mutex); + sync.mutex_unlock(&requests_mutex) - request_index := 0; + request_index := 0 for ; request_index < len(temp_requests); request_index += 1 { - request := temp_requests[request_index]; - call(request.value, request.id, writer, config); + request := temp_requests[request_index] + call(request.value, request.id, writer, config) } - sync.mutex_lock(&requests_mutex); + sync.mutex_lock(&requests_mutex) for i := 0; i < request_index; i += 1 { - pop_front(&requests); + pop_front(&requests) } - sync.mutex_unlock(&requests_mutex); + sync.mutex_unlock(&requests_mutex) if request_index != len(temp_requests) { - sync.semaphore_post(&requests_sempahore); + sync.semaphore_post(&requests_sempahore) } - sync.semaphore_wait_for(&requests_sempahore); + sync.semaphore_wait_for(&requests_sempahore) - return true; + return true } @@ -324,47 +324,47 @@ cancel :: proc(value: json.Value, id: RequestId, writer: ^Writer, config: ^commo response := make_response_message( id = id, params = ResponseParams {}, - ); + ) - send_response(response, writer); + send_response(response, writer) } call :: proc(value: json.Value, id: RequestId, writer: ^Writer, config: ^common.Config) { - root := value.(json.Object); - method := root["method"].(json.String); + root := value.(json.Object) + method := root["method"].(json.String) if fn, ok := call_map[method]; !ok { - response := make_response_message_error(id = id, error = ResponseError {code = .MethodNotFound, message = ""}); - send_error(response, writer); + response := make_response_message_error(id = id, error = ResponseError {code = .MethodNotFound, message = ""}) + send_error(response, writer) } else { - err := fn(root["params"], id, config, writer); + err := fn(root["params"], id, config, writer) if err != .None { response := make_response_message_error( id = id, error = ResponseError {code = err, message = ""}, - ); - send_error(response, writer); + ) + send_error(response, writer) } } } request_initialize :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - initialize_params: RequestInitializeParams; + initialize_params: RequestInitializeParams if unmarshal(params, initialize_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - config.workspace_folders = make([dynamic]common.WorkspaceFolder); + config.workspace_folders = make([dynamic]common.WorkspaceFolder) for s in initialize_params.workspaceFolders { - append(&config.workspace_folders, s); + append(&config.workspace_folders, s) } read_ols_config :: proc(file: string, config: ^common.Config, uri: common.Uri) { @@ -381,111 +381,111 @@ request_initialize :: proc (params: json.Value, id: RequestId, config: ^common.C if unmarshal(value, ols_config, context.temp_allocator) == .None { - config.thread_count = ols_config.thread_pool_count; - config.enable_document_symbols = ols_config.enable_document_symbols; - config.enable_hover = ols_config.enable_hover; + config.thread_count = ols_config.thread_pool_count + config.enable_document_symbols = ols_config.enable_document_symbols + config.enable_hover = ols_config.enable_hover config.enable_format = true // ols_config.enable_format; - config.enable_semantic_tokens = ols_config.enable_semantic_tokens; - config.enable_procedure_context = ols_config.enable_procedure_context; - config.enable_snippets = ols_config.enable_snippets; - config.verbose = ols_config.verbose; - config.file_log = ols_config.file_log; - config.formatter = ols_config.formatter; - config.odin_command = strings.clone(ols_config.odin_command, context.allocator); - config.checker_args = ols_config.checker_args; - config.enable_inlay_hints = ols_config.enable_inlay_hints; + config.enable_semantic_tokens = ols_config.enable_semantic_tokens + config.enable_procedure_context = ols_config.enable_procedure_context + config.enable_snippets = ols_config.enable_snippets + config.verbose = ols_config.verbose + config.file_log = ols_config.file_log + config.formatter = ols_config.formatter + config.odin_command = strings.clone(ols_config.odin_command, context.allocator) + config.checker_args = ols_config.checker_args + config.enable_inlay_hints = ols_config.enable_inlay_hints for p in ols_config.collections { - forward_path, _ := filepath.to_slash(p.path, context.temp_allocator); + forward_path, _ := filepath.to_slash(p.path, context.temp_allocator) if filepath.is_abs(p.path) { - config.collections[strings.clone(p.name)] = strings.clone(forward_path); + config.collections[strings.clone(p.name)] = strings.clone(forward_path) } else { - config.collections[strings.clone(p.name)] = path.join(elems = {uri.path, forward_path}, allocator = context.allocator); + config.collections[strings.clone(p.name)] = path.join(elems = {uri.path, forward_path}, allocator = context.allocator) } } if ok := "" in config.collections; !ok { - config.collections[""] = strings.clone(uri.path); + config.collections[""] = strings.clone(uri.path) } } else { - log.errorf("Failed to unmarshal %v", file); + log.errorf("Failed to unmarshal %v", file) } } else { - log.errorf("Failed to parse json %v", file); + log.errorf("Failed to parse json %v", file) } } else { - log.errorf("Failed to read/find %v", file); + log.errorf("Failed to read/find %v", file) } } - project_uri := ""; + project_uri := "" if len(config.workspace_folders) > 0 { - project_uri = config.workspace_folders[0].uri; + project_uri = config.workspace_folders[0].uri } else if initialize_params.rootUri != "" { - project_uri = initialize_params.rootUri; + project_uri = initialize_params.rootUri } if uri, ok := common.parse_uri(project_uri, context.temp_allocator); ok { - ols_config_path := path.join(elems = {uri.path, "ols.json"}, allocator = context.temp_allocator); - read_ols_config(ols_config_path, config, uri); + ols_config_path := path.join(elems = {uri.path, "ols.json"}, allocator = context.temp_allocator) + read_ols_config(ols_config_path, config, uri) } - when ODIN_OS == "windows" { - odin_core_env := os.get_env("ODIN_ROOT", context.temp_allocator); + when ODIN_OS == .Windows { + odin_core_env := os.get_env("ODIN_ROOT", context.temp_allocator) } else { - odin_core_env, _ := os.getenv("ODIN_ROOT"); + odin_core_env, _ := os.getenv("ODIN_ROOT") } if "core" not_in config.collections && odin_core_env != "" { - forward_path, _ := filepath.to_slash(odin_core_env, context.temp_allocator); - config.collections["core"] = path.join(elems = {forward_path, "core"}, allocator = context.allocator); + forward_path, _ := filepath.to_slash(odin_core_env, context.temp_allocator) + config.collections["core"] = path.join(elems = {forward_path, "core"}, allocator = context.allocator) } if "vendor" not_in config.collections && odin_core_env != "" { - forward_path, _ := filepath.to_slash(odin_core_env, context.temp_allocator); - config.collections["vendor"] = path.join(elems = {forward_path, "vendor"}, allocator = context.allocator); + forward_path, _ := filepath.to_slash(odin_core_env, context.temp_allocator) + config.collections["vendor"] = path.join(elems = {forward_path, "vendor"}, allocator = context.allocator) } for format in initialize_params.capabilities.textDocument.hover.contentFormat { if format == "markdown" { - config.hover_support_md = true; + config.hover_support_md = true } } for format in initialize_params.capabilities.textDocument.completion.documentationFormat { if format == "markdown" { - config.completion_support_md = true; + config.completion_support_md = true } } - config.enable_snippets &= initialize_params.capabilities.textDocument.completion.completionItem.snippetSupport; - config.signature_offset_support = initialize_params.capabilities.textDocument.signatureHelp.signatureInformation.parameterInformation.labelOffsetSupport; + config.enable_snippets &= initialize_params.capabilities.textDocument.completion.completionItem.snippetSupport + config.signature_offset_support = initialize_params.capabilities.textDocument.signatureHelp.signatureInformation.parameterInformation.labelOffsetSupport - completionTriggerCharacters := []string {".", ">", "#", "\"", "/", ":"}; - signatureTriggerCharacters := []string {"(", ","}; - signatureRetriggerCharacters := []string {","}; + completionTriggerCharacters := []string {".", ">", "#", "\"", "/", ":"} + signatureTriggerCharacters := []string {"(", ","} + signatureRetriggerCharacters := []string {","} - token_type := type_info_of(SemanticTokenTypes).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum); - token_modifier := type_info_of(SemanticTokenModifiers).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum); + token_type := type_info_of(SemanticTokenTypes).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum) + token_modifier := type_info_of(SemanticTokenModifiers).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum) - token_types := make([]string, len(token_type.names), context.temp_allocator); - token_modifiers := make([]string, len(token_modifier.names), context.temp_allocator); + token_types := make([]string, len(token_type.names), context.temp_allocator) + token_modifiers := make([]string, len(token_modifier.names), context.temp_allocator) for name, i in token_type.names { if name == "EnumMember" { - token_types[i] = "enumMember"; + token_types[i] = "enumMember" } else { - token_types[i] = strings.to_lower(name, context.temp_allocator); + token_types[i] = strings.to_lower(name, context.temp_allocator) } } for name, i in token_modifier.names { - token_modifiers[i] = strings.to_lower(name, context.temp_allocator); + token_modifiers[i] = strings.to_lower(name, context.temp_allocator) } response := make_response_message( @@ -523,335 +523,335 @@ request_initialize :: proc (params: json.Value, id: RequestId, config: ^common.C resolveProvider = false, }, }, - }, id = id); + }, id = id) - send_response(response, writer); + send_response(response, writer) /* Temp index here, but should be some background thread that starts the indexing */ - index.indexer.dynamic_index = index.make_memory_index(index.make_symbol_collection(context.allocator, config)); + index.indexer.dynamic_index = index.make_memory_index(index.make_symbol_collection(context.allocator, config)) - index.build_static_index(context.allocator, config); + index.build_static_index(context.allocator, config) /* Add runtime package */ if core, ok := config.collections["core"]; ok { - when ODIN_OS == "windows" { - append(&index.indexer.builtin_packages, path.join(strings.to_lower(core, context.temp_allocator), "runtime")); + when ODIN_OS == .Windows { + append(&index.indexer.builtin_packages, path.join(strings.to_lower(core, context.temp_allocator), "runtime")) } else { - append(&index.indexer.builtin_packages, path.join(core, "runtime")); + append(&index.indexer.builtin_packages, path.join(core, "runtime")) } } - log.info("Finished indexing"); + log.info("Finished indexing") - return .None; + return .None } request_initialized :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - return .None; + return .None } request_shutdown :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - response := make_response_message(params = nil, id = id); + response := make_response_message(params = nil, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None } request_definition :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - definition_params: TextDocumentPositionParams; + definition_params: TextDocumentPositionParams if unmarshal(params, definition_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - document := document_get(definition_params.textDocument.uri); + document := document_get(definition_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } - locations, ok2 := get_definition_location(document, definition_params.position); + locations, ok2 := get_definition_location(document, definition_params.position) if !ok2 { - log.warn("Failed to get definition location"); + log.warn("Failed to get definition location") } if len(locations) == 1 { - response := make_response_message(params = locations[0], id = id); - send_response(response, writer); + response := make_response_message(params = locations[0], id = id) + send_response(response, writer) } else { - response := make_response_message(params = locations, id = id); - send_response(response, writer); + response := make_response_message(params = locations, id = id) + send_response(response, writer) } - return .None; + return .None } request_completion :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - completition_params: CompletionParams; + completition_params: CompletionParams if unmarshal(params, completition_params, context.temp_allocator) != .None { - log.error("Failed to unmarshal completion request"); - return .ParseError; + log.error("Failed to unmarshal completion request") + return .ParseError } - document := document_get(completition_params.textDocument.uri); + document := document_get(completition_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } - list: CompletionList; - list, ok = get_completion_list(document, completition_params.position, completition_params.context_); + list: CompletionList + list, ok = get_completion_list(document, completition_params.position, completition_params.context_) if !ok { - return .InternalError; + return .InternalError } - response := make_response_message(params = list, id = id); + response := make_response_message(params = list, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None } request_signature_help :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - signature_params: SignatureHelpParams; + signature_params: SignatureHelpParams if unmarshal(params, signature_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - document := document_get(signature_params.textDocument.uri); + document := document_get(signature_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } - help: SignatureHelp; - help, ok = get_signature_information(document, signature_params.position); + help: SignatureHelp + help, ok = get_signature_information(document, signature_params.position) if !ok { - return .InternalError; + return .InternalError } - response := make_response_message(params = help, id = id); + response := make_response_message(params = help, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None } request_format_document :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - format_params: DocumentFormattingParams; + format_params: DocumentFormattingParams if unmarshal(params, format_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - document := document_get(format_params.textDocument.uri); + document := document_get(format_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } - edit: []TextEdit; - edit, ok = get_complete_format(document, config); + edit: []TextEdit + edit, ok = get_complete_format(document, config) if !ok { - return .InternalError; + return .InternalError } - response := make_response_message(params = edit, id = id); + response := make_response_message(params = edit, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None } notification_exit :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - config.running = false; - return .None; + config.running = false + return .None } notification_did_open :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - log.error("Failed to parse open document notification"); - return .ParseError; + log.error("Failed to parse open document notification") + return .ParseError } - open_params: DidOpenTextDocumentParams; + open_params: DidOpenTextDocumentParams if unmarshal(params, open_params, context.allocator) != .None { - log.error("Failed to parse open document notification"); - return .ParseError; + log.error("Failed to parse open document notification") + return .ParseError } if n := document_open(open_params.textDocument.uri, open_params.textDocument.text, config, writer); n != .None { - return .InternalError; + return .InternalError } - return .None; + return .None } notification_did_change :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - change_params: DidChangeTextDocumentParams; + change_params: DidChangeTextDocumentParams if unmarshal(params, change_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - document_apply_changes(change_params.textDocument.uri, change_params.contentChanges, config, writer); + document_apply_changes(change_params.textDocument.uri, change_params.contentChanges, config, writer) - return .None; + return .None } notification_did_close :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - close_params: DidCloseTextDocumentParams; + close_params: DidCloseTextDocumentParams if unmarshal(params, close_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } if n := document_close(close_params.textDocument.uri); n != .None { - return .InternalError; + return .InternalError } - return .None; + return .None } notification_did_save :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - save_params: DidSaveTextDocumentParams; + save_params: DidSaveTextDocumentParams if unmarshal(params, save_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - uri: common.Uri; + uri: common.Uri if uri, ok = common.parse_uri(save_params.textDocument.uri, context.temp_allocator); !ok { - return .ParseError; + return .ParseError } - fullpath := uri.path; + fullpath := uri.path p := parser.Parser { err = index.log_error_handler, warn = index.log_warning_handler, - }; + } - dir := filepath.base(filepath.dir(fullpath, context.temp_allocator)); + dir := filepath.base(filepath.dir(fullpath, context.temp_allocator)) - pkg := new(ast.Package); - pkg.kind = .Normal; - pkg.fullpath = fullpath; - pkg.name = dir; + pkg := new(ast.Package) + pkg.kind = .Normal + pkg.fullpath = fullpath + pkg.name = dir if dir == "runtime" { - pkg.kind = .Runtime; + pkg.kind = .Runtime } file := ast.File { fullpath = fullpath, src = save_params.text, pkg = pkg, - }; + } - ok = parser.parse_file(&p, &file); + ok = parser.parse_file(&p, &file) if !ok { - log.errorf("error in parse file for indexing %v", fullpath); + log.errorf("error in parse file for indexing %v", fullpath) } for key, value in index.indexer.dynamic_index.collection.symbols { - when ODIN_OS == "windows"{ - uri := strings.to_lower(save_params.textDocument.uri, context.temp_allocator); + when ODIN_OS == .Windows { + uri := strings.to_lower(save_params.textDocument.uri, context.temp_allocator) } else { - uri := save_params.textDocument.uri; + uri := save_params.textDocument.uri } if value.uri == uri { - index.free_symbol(value, context.allocator); - index.indexer.dynamic_index.collection.symbols[key] = {}; + index.free_symbol(value, context.allocator) + index.indexer.dynamic_index.collection.symbols[key] = {} } } if ret := index.collect_symbols(&index.indexer.dynamic_index.collection, file, uri.uri); ret != .None { - log.errorf("failed to collect symbols on save %v", ret); + log.errorf("failed to collect symbols on save %v", ret) } - check(uri, writer, config); + check(uri, writer, config) - return .None; + return .None } request_semantic_token_full :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - semantic_params: SemanticTokensParams; + semantic_params: SemanticTokensParams if unmarshal(params, semantic_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - document := document_get(semantic_params.textDocument.uri); + document := document_get(semantic_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } range := common.Range { @@ -861,184 +861,184 @@ request_semantic_token_full :: proc (params: json.Value, id: RequestId, config: end = common.Position { line = 9000000, //should be enough }, - }; + } - symbols: SemanticTokens; + symbols: SemanticTokens if config.enable_semantic_tokens { if cache_symbols, ok := file_resolve_cache.files[document.uri.uri]; ok { - symbols = get_semantic_tokens(document, range, cache_symbols); + symbols = get_semantic_tokens(document, range, cache_symbols) } } - response := make_response_message(params = symbols, id = id); + response := make_response_message(params = symbols, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None } request_semantic_token_range :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .None; + return .None } - semantic_params: SemanticTokensRangeParams; + semantic_params: SemanticTokensRangeParams if unmarshal(params, semantic_params, context.temp_allocator) != .None { - return .None; + return .None } - document := document_get(semantic_params.textDocument.uri); + document := document_get(semantic_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } - symbols: SemanticTokens; + symbols: SemanticTokens if config.enable_semantic_tokens { if cache_symbols, ok := file_resolve_cache.files[document.uri.uri]; ok { - symbols = get_semantic_tokens(document, semantic_params.range, cache_symbols); + symbols = get_semantic_tokens(document, semantic_params.range, cache_symbols) } } - response := make_response_message(params = symbols, id = id); + response := make_response_message(params = symbols, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None } request_document_symbols :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - symbol_params: DocumentSymbolParams; + symbol_params: DocumentSymbolParams if unmarshal(params, symbol_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - document := document_get(symbol_params.textDocument.uri); + document := document_get(symbol_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } - symbols := get_document_symbols(document); + symbols := get_document_symbols(document) - response := make_response_message(params = symbols, id = id); + response := make_response_message(params = symbols, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None } request_hover :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - hover_params: HoverParams; + hover_params: HoverParams if unmarshal(params, hover_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - document := document_get(hover_params.textDocument.uri); + document := document_get(hover_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } - hover: Hover; - hover, ok = get_hover_information(document, hover_params.position); + hover: Hover + hover, ok = get_hover_information(document, hover_params.position) if !ok { - return .InternalError; + return .InternalError } - response := make_response_message(params = hover, id = id); + response := make_response_message(params = hover, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None } request_inlay_hint :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - inlay_params: InlayParams; + inlay_params: InlayParams if unmarshal(params, inlay_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - document := document_get(inlay_params.textDocument.uri); + document := document_get(inlay_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } - hints: []InlayHint; + hints: []InlayHint if cache_symbols, ok := file_resolve_cache.files[document.uri.uri]; ok { - hints, ok = get_inlay_hints(document, cache_symbols); + hints, ok = get_inlay_hints(document, cache_symbols) } if !ok { - return .InternalError; + return .InternalError } - response := make_response_message(params = hints, id = id); + response := make_response_message(params = hints, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None } request_document_links :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { - params_object, ok := params.(json.Object); + params_object, ok := params.(json.Object) if !ok { - return .ParseError; + return .ParseError } - link_params: DocumentLinkParams; + link_params: DocumentLinkParams if unmarshal(params, link_params, context.temp_allocator) != .None { - return .ParseError; + return .ParseError } - document := document_get(link_params.textDocument.uri); + document := document_get(link_params.textDocument.uri) if document == nil { - return .InternalError; + return .InternalError } - links: []DocumentLink; + links: []DocumentLink - links, ok = get_document_links(document); + links, ok = get_document_links(document) if !ok { - return .InternalError; + return .InternalError } - response := make_response_message(params = links, id = id); + response := make_response_message(params = links, id = id) - send_response(response, writer); + send_response(response, writer) - return .None; + return .None }
\ No newline at end of file diff --git a/src/server/response.odin b/src/server/response.odin index c132532..7c07f41 100644 --- a/src/server/response.odin +++ b/src/server/response.odin @@ -5,63 +5,63 @@ import "core:encoding/json" send_notification :: proc (notification: Notification, writer: ^Writer) -> bool { - data, error := json.marshal(notification, context.temp_allocator); + data, error := json.marshal(notification, context.temp_allocator) - header := fmt.tprintf("Content-Length: %v\r\n\r\n", len(data)); + header := fmt.tprintf("Content-Length: %v\r\n\r\n", len(data)) if error != .None { - return false; + return false } if !write_sized(writer, transmute([]u8)header) { - return false; + return false } if !write_sized(writer, data) { - return false; + return false } - return true; + return true } send_response :: proc (response: ResponseMessage, writer: ^Writer) -> bool { - data, error := json.marshal(response, context.temp_allocator); + data, error := json.marshal(response, context.temp_allocator) - header := fmt.tprintf("Content-Length: %v\r\n\r\n", len(data)); + header := fmt.tprintf("Content-Length: %v\r\n\r\n", len(data)) if error != .None { - return false; + return false } if !write_sized(writer, transmute([]u8)header) { - return false; + return false } if !write_sized(writer, data) { - return false; + return false } - return true; + return true } send_error :: proc (response: ResponseMessageError, writer: ^Writer) -> bool { - data, error := json.marshal(response, context.temp_allocator); + data, error := json.marshal(response, context.temp_allocator) - header := fmt.tprintf("Content-Length: %v\r\n\r\n", len(data)); + header := fmt.tprintf("Content-Length: %v\r\n\r\n", len(data)) if error != .None { - return false; + return false } if !write_sized(writer, transmute([]u8)header) { - return false; + return false } if !write_sized(writer, data) { - return false; + return false } - return true; + return true } diff --git a/src/server/semantic_tokens.odin b/src/server/semantic_tokens.odin index c0a117f..bd89bb3 100644 --- a/src/server/semantic_tokens.odin +++ b/src/server/semantic_tokens.odin @@ -85,56 +85,56 @@ SemanticTokenBuilder :: struct { make_token_builder :: proc(allocator := context.temp_allocator) -> SemanticTokenBuilder { return { tokens = make([dynamic]u32, 1000, context.temp_allocator), - }; + } } get_tokens :: proc(builder: SemanticTokenBuilder) -> SemanticTokens { return { data = builder.tokens[:], - }; + } } get_semantic_tokens :: proc(document: ^common.Document, range: common.Range, symbols: map[uintptr]index.Symbol) -> SemanticTokens { - using analysis; + using analysis - builder := make_token_builder(); + builder := make_token_builder() if document.ast.pkg_decl != nil { - write_semantic_token(&builder, document.ast.pkg_token, document.ast.src, .Keyword, .None); + write_semantic_token(&builder, document.ast.pkg_token, document.ast.src, .Keyword, .None) } - ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri); + ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri) - builder.symbols = symbols; + builder.symbols = symbols - ast_context.current_package = ast_context.document_package; + ast_context.current_package = ast_context.document_package for decl in document.ast.decls { if range.start.line <= decl.pos.line && decl.end.line <= range.end.line { - visit(decl, &builder, &ast_context); + visit(decl, &builder, &ast_context) } } - return get_tokens(builder); + return get_tokens(builder) } write_semantic_node :: proc(builder: ^SemanticTokenBuilder, node: ^ast.Node, src: string, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) { - position := common.get_relative_token_position(node.pos.offset, transmute([]u8)src, builder.current_start); - name := common.get_ast_node_string(node, src); - append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, cast(u32)modifier); - builder.current_start = node.pos.offset; + position := common.get_relative_token_position(node.pos.offset, transmute([]u8)src, builder.current_start) + name := common.get_ast_node_string(node, src) + append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, cast(u32)modifier) + builder.current_start = node.pos.offset } write_semantic_token :: proc(builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: string, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) { - position := common.get_relative_token_position(token.pos.offset, transmute([]u8)src, builder.current_start); - append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(token.text), cast(u32)type, cast(u32)modifier); - builder.current_start = token.pos.offset; + position := common.get_relative_token_position(token.pos.offset, transmute([]u8)src, builder.current_start) + append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(token.text), cast(u32)type, cast(u32)modifier) + builder.current_start = token.pos.offset } write_semantic_string :: proc(builder: ^SemanticTokenBuilder, pos: tokenizer.Pos, name: string, src: string, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) { - position := common.get_relative_token_position(pos.offset, transmute([]u8)src, builder.current_start); - append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, cast(u32)modifier); - builder.current_start = pos.offset; + position := common.get_relative_token_position(pos.offset, transmute([]u8)src, builder.current_start) + append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, cast(u32)modifier) + builder.current_start = pos.offset } visit :: proc { @@ -142,207 +142,207 @@ visit :: proc { visit_dynamic_array, visit_array, visit_stmt, -}; +} visit_array :: proc(array: $A/[]^$T, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { for elem, i in array { - visit(elem, builder, ast_context); + visit(elem, builder, ast_context) } } visit_dynamic_array :: proc(array: $A/[dynamic]^$T, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { for elem, i in array { - visit(elem, builder, ast_context); + visit(elem, builder, ast_context) } } visit_stmt :: proc(node: ^ast.Stmt, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { - visit_node(node, builder, ast_context); + visit_node(node, builder, ast_context) } visit_node :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { - using ast; + using ast if node == nil { - return; + return } - switch n in node.derived { - case Ellipsis: - write_semantic_string(builder, node.pos, "..", ast_context.file.src, .Operator, .None); - visit(n.expr, builder, ast_context); - case Ident: + #partial switch n in node.derived { + case ^Ellipsis: + write_semantic_string(builder, node.pos, "..", ast_context.file.src, .Operator, .None) + visit(n.expr, builder, ast_context) + case ^Ident: if symbol, ok := builder.symbols[cast(uintptr)node]; ok { if symbol.type == .Variable { - write_semantic_node(builder, node, ast_context.file.src, .Variable, .None); - return; + write_semantic_node(builder, node, ast_context.file.src, .Variable, .None) + return } #partial switch v in symbol.value { case index.SymbolPackageValue: - write_semantic_node(builder, node, ast_context.file.src, .Namespace, .None); + write_semantic_node(builder, node, ast_context.file.src, .Namespace, .None) case index.SymbolStructValue: - write_semantic_node(builder, node, ast_context.file.src, .Struct, .None); + write_semantic_node(builder, node, ast_context.file.src, .Struct, .None) case index.SymbolEnumValue: - write_semantic_node(builder, node, ast_context.file.src, .Enum, .None); + write_semantic_node(builder, node, ast_context.file.src, .Enum, .None) case index.SymbolUnionValue: - write_semantic_node(builder, node, ast_context.file.src, .Enum, .None); + write_semantic_node(builder, node, ast_context.file.src, .Enum, .None) case index.SymbolProcedureValue: - write_semantic_node(builder, node, ast_context.file.src, .Function, .None); + write_semantic_node(builder, node, ast_context.file.src, .Function, .None) case index.SymbolProcedureGroupValue: - write_semantic_node(builder, node, ast_context.file.src, .Function, .None); + write_semantic_node(builder, node, ast_context.file.src, .Function, .None) case index.SymbolUntypedValue: - write_semantic_node(builder, node, ast_context.file.src, .Type, .None); + write_semantic_node(builder, node, ast_context.file.src, .Type, .None) case index.SymbolBasicValue: - write_semantic_node(builder, node, ast_context.file.src, .Type, .None); + write_semantic_node(builder, node, ast_context.file.src, .Type, .None) case: //log.errorf("Unexpected symbol value: %v", symbol.value); //panic(fmt.tprintf("Unexpected symbol value: %v", symbol.value)); } } - case Selector_Expr: - visit_selector(cast(^Selector_Expr)node, builder, ast_context); - builder.selector = false; - case When_Stmt: - write_semantic_string(builder, n.when_pos, "when", ast_context.file.src, .Keyword, .None); - visit(n.cond, builder, ast_context); - visit(n.body, builder, ast_context); - visit(n.else_stmt, builder, ast_context); - case Pointer_Type: - write_semantic_string(builder, node.pos, "^", ast_context.file.src, .Operator, .None); - visit(n.elem, builder, ast_context); - case Value_Decl: - visit_value_decl(n, builder, ast_context); - case Block_Stmt: - visit(n.stmts, builder, ast_context); - case Expr_Stmt: - visit(n.expr, builder, ast_context); - case Branch_Stmt: - write_semantic_token(builder, n.tok, ast_context.file.src, .Type, .None); - case Poly_Type: - write_semantic_string(builder, n.dollar, "$", ast_context.file.src, .Operator, .None); - visit(n.type, builder, ast_context); - visit(n.specialization, builder, ast_context); - case Range_Stmt: - write_semantic_string(builder, n.for_pos, "for", ast_context.file.src, .Keyword, .None); + case ^Selector_Expr: + visit_selector(cast(^Selector_Expr)node, builder, ast_context) + builder.selector = false + case ^When_Stmt: + write_semantic_string(builder, n.when_pos, "when", ast_context.file.src, .Keyword, .None) + visit(n.cond, builder, ast_context) + visit(n.body, builder, ast_context) + visit(n.else_stmt, builder, ast_context) + case ^Pointer_Type: + write_semantic_string(builder, node.pos, "^", ast_context.file.src, .Operator, .None) + visit(n.elem, builder, ast_context) + case ^Value_Decl: + visit_value_decl(n^, builder, ast_context) + case ^Block_Stmt: + visit(n.stmts, builder, ast_context) + case ^Expr_Stmt: + visit(n.expr, builder, ast_context) + case ^Branch_Stmt: + write_semantic_token(builder, n.tok, ast_context.file.src, .Type, .None) + case ^Poly_Type: + write_semantic_string(builder, n.dollar, "$", ast_context.file.src, .Operator, .None) + visit(n.type, builder, ast_context) + visit(n.specialization, builder, ast_context) + case ^Range_Stmt: + write_semantic_string(builder, n.for_pos, "for", ast_context.file.src, .Keyword, .None) for val in n.vals { - if ident, ok := val.derived.(Ident); ok { - write_semantic_node(builder, val, ast_context.file.src, .Variable, .None); + if ident, ok := val.derived.(^Ident); ok { + write_semantic_node(builder, val, ast_context.file.src, .Variable, .None) } } - write_semantic_string(builder, n.in_pos, "in", ast_context.file.src, .Keyword, .None); - visit(n.expr, builder, ast_context); - visit(n.body, builder, ast_context); - case If_Stmt: - write_semantic_string(builder, n.if_pos, "if", ast_context.file.src, .Keyword, .None); - visit(n.init, builder, ast_context); - visit(n.cond, builder, ast_context); - visit(n.body, builder, ast_context); + write_semantic_string(builder, n.in_pos, "in", ast_context.file.src, .Keyword, .None) + visit(n.expr, builder, ast_context) + visit(n.body, builder, ast_context) + case ^If_Stmt: + write_semantic_string(builder, n.if_pos, "if", ast_context.file.src, .Keyword, .None) + visit(n.init, builder, ast_context) + visit(n.cond, builder, ast_context) + visit(n.body, builder, ast_context) if n.else_stmt != nil { - write_semantic_string(builder, n.else_pos, "else", ast_context.file.src, .Keyword, .None); - visit(n.else_stmt, builder, ast_context); + write_semantic_string(builder, n.else_pos, "else", ast_context.file.src, .Keyword, .None) + visit(n.else_stmt, builder, ast_context) } - case For_Stmt: - write_semantic_string(builder, n.for_pos, "for", ast_context.file.src, .Keyword, .None); - visit(n.init, builder, ast_context); - visit(n.cond, builder, ast_context); - visit(n.post, builder, ast_context); - visit(n.body, builder, ast_context); - case Switch_Stmt: - write_semantic_string(builder, n.switch_pos, "switch", ast_context.file.src, .Keyword, .None); - visit(n.init, builder, ast_context); - visit(n.cond, builder, ast_context); - visit(n.body, builder, ast_context); - case Type_Switch_Stmt: - write_semantic_string(builder, n.switch_pos, "switch", ast_context.file.src, .Keyword, .None); - visit(n.tag, builder, ast_context); - visit(n.expr, builder, ast_context); - visit(n.body, builder, ast_context); - case Assign_Stmt: + case ^For_Stmt: + write_semantic_string(builder, n.for_pos, "for", ast_context.file.src, .Keyword, .None) + visit(n.init, builder, ast_context) + visit(n.cond, builder, ast_context) + visit(n.post, builder, ast_context) + visit(n.body, builder, ast_context) + case ^Switch_Stmt: + write_semantic_string(builder, n.switch_pos, "switch", ast_context.file.src, .Keyword, .None) + visit(n.init, builder, ast_context) + visit(n.cond, builder, ast_context) + visit(n.body, builder, ast_context) + case ^Type_Switch_Stmt: + write_semantic_string(builder, n.switch_pos, "switch", ast_context.file.src, .Keyword, .None) + visit(n.tag, builder, ast_context) + visit(n.expr, builder, ast_context) + visit(n.body, builder, ast_context) + case ^Assign_Stmt: for l in n.lhs { - if ident, ok := l.derived.(Ident); ok { - write_semantic_node(builder, l, ast_context.file.src, .Variable, .None); + if ident, ok := l.derived.(^Ident); ok { + write_semantic_node(builder, l, ast_context.file.src, .Variable, .None) } else { - visit(l, builder, ast_context); + visit(l, builder, ast_context) } } - visit_token_op(builder, n.op, ast_context.file.src); - visit(n.rhs, builder, ast_context); - case Case_Clause: - write_semantic_string(builder, n.case_pos, "case", ast_context.file.src, .Keyword, .None); - visit(n.list, builder, ast_context); - visit(n.body, builder, ast_context); - case Call_Expr: - visit(n.expr, builder, ast_context); - visit(n.args, builder, ast_context); - case Implicit_Selector_Expr: - write_semantic_node(builder, n.field, ast_context.file.src, .Enum, .None); - case Array_Type: - visit(n.elem, builder, ast_context); - case Binary_Expr: - visit(n.left, builder, ast_context); - visit_token_op(builder, n.op, ast_context.file.src); - visit(n.right, builder, ast_context); - case Comp_Lit: - visit(n.type, builder, ast_context); - visit(n.elems, builder, ast_context); - case Struct_Type: - write_semantic_string(builder, n.pos, "struct", ast_context.file.src, .Keyword, .None); - visit_struct_fields(n, builder, ast_context); - case Type_Assertion: - visit(n.expr, builder, ast_context); - visit(n.type, builder, ast_context); - case Type_Cast: - write_semantic_string(builder, n.pos, "cast", ast_context.file.src, .Keyword, .None); - visit(n.type, builder, ast_context); - visit(n.expr, builder, ast_context); - case Paren_Expr: - visit(n.expr, builder, ast_context); - case Deref_Expr: - visit(n.expr, builder, ast_context); - case Return_Stmt: - write_semantic_string(builder, n.pos, "return", ast_context.file.src, .Keyword, .None); - visit(n.results, builder, ast_context); - case Dynamic_Array_Type: - write_semantic_string(builder, n.dynamic_pos, "dynamic", ast_context.file.src, .Keyword, .None); - visit(n.elem, builder, ast_context); - case Field_Value: - if ident, ok := n.field.derived.(Ident); ok { - write_semantic_node(builder, n.field, ast_context.file.src, .Property, .None); + visit_token_op(builder, n.op, ast_context.file.src) + visit(n.rhs, builder, ast_context) + case ^Case_Clause: + write_semantic_string(builder, n.case_pos, "case", ast_context.file.src, .Keyword, .None) + visit(n.list, builder, ast_context) + visit(n.body, builder, ast_context) + case ^Call_Expr: + visit(n.expr, builder, ast_context) + visit(n.args, builder, ast_context) + case ^Implicit_Selector_Expr: + write_semantic_node(builder, n.field, ast_context.file.src, .Enum, .None) + case ^Array_Type: + visit(n.elem, builder, ast_context) + case ^Binary_Expr: + visit(n.left, builder, ast_context) + visit_token_op(builder, n.op, ast_context.file.src) + visit(n.right, builder, ast_context) + case ^Comp_Lit: + visit(n.type, builder, ast_context) + visit(n.elems, builder, ast_context) + case ^Struct_Type: + write_semantic_string(builder, n.pos, "struct", ast_context.file.src, .Keyword, .None) + visit_struct_fields(n^, builder, ast_context) + case ^Type_Assertion: + visit(n.expr, builder, ast_context) + visit(n.type, builder, ast_context) + case ^Type_Cast: + write_semantic_string(builder, n.pos, "cast", ast_context.file.src, .Keyword, .None) + visit(n.type, builder, ast_context) + visit(n.expr, builder, ast_context) + case ^Paren_Expr: + visit(n.expr, builder, ast_context) + case ^Deref_Expr: + visit(n.expr, builder, ast_context) + case ^Return_Stmt: + write_semantic_string(builder, n.pos, "return", ast_context.file.src, .Keyword, .None) + visit(n.results, builder, ast_context) + case ^Dynamic_Array_Type: + write_semantic_string(builder, n.dynamic_pos, "dynamic", ast_context.file.src, .Keyword, .None) + visit(n.elem, builder, ast_context) + case ^Field_Value: + if ident, ok := n.field.derived.(^Ident); ok { + write_semantic_node(builder, n.field, ast_context.file.src, .Property, .None) } - visit(n.value, builder, ast_context); - case Index_Expr: - visit(n.expr, builder, ast_context); - visit(n.index, builder, ast_context); - case Basic_Lit: - visit_basic_lit(n, builder, ast_context); - case Unary_Expr: - visit(n.expr, builder, ast_context); - case Implicit: - case Slice_Expr: - visit(n.expr, builder, ast_context); - case Using_Stmt: - write_semantic_string(builder, n.pos, "using", ast_context.file.src, .Keyword, .None); - visit(n.list, builder, ast_context); - case Map_Type: - write_semantic_string(builder, n.tok_pos, "map", ast_context.file.src, .Keyword, .None); - visit(n.key, builder, ast_context); - visit(n.value, builder, ast_context); - case Defer_Stmt: - write_semantic_string(builder, n.pos, "defer", ast_context.file.src, .Keyword, .None); - visit(n.stmt, builder, ast_context); - case Import_Decl: - write_semantic_token(builder, n.import_tok, ast_context.file.src, .Keyword, .None); + visit(n.value, builder, ast_context) + case ^Index_Expr: + visit(n.expr, builder, ast_context) + visit(n.index, builder, ast_context) + case ^Basic_Lit: + visit_basic_lit(n^, builder, ast_context) + case ^Unary_Expr: + visit(n.expr, builder, ast_context) + case ^Implicit: + case ^Slice_Expr: + visit(n.expr, builder, ast_context) + case ^Using_Stmt: + write_semantic_string(builder, n.pos, "using", ast_context.file.src, .Keyword, .None) + visit(n.list, builder, ast_context) + case ^Map_Type: + write_semantic_string(builder, n.tok_pos, "map", ast_context.file.src, .Keyword, .None) + visit(n.key, builder, ast_context) + visit(n.value, builder, ast_context) + case ^Defer_Stmt: + write_semantic_string(builder, n.pos, "defer", ast_context.file.src, .Keyword, .None) + visit(n.stmt, builder, ast_context) + case ^Import_Decl: + write_semantic_token(builder, n.import_tok, ast_context.file.src, .Keyword, .None) if n.name.text != "" { - write_semantic_token(builder, n.name, ast_context.file.src, .Namespace, .None); + write_semantic_token(builder, n.name, ast_context.file.src, .Namespace, .None) } - write_semantic_token(builder, n.relpath, ast_context.file.src, .String, .None); + write_semantic_token(builder, n.relpath, ast_context.file.src, .String, .None) case: //log.errorf("unhandled semantic token node %v", n); //panic(fmt.tprintf("Missed semantic token handling %v", n)); @@ -350,179 +350,179 @@ visit_node :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: } visit_basic_lit :: proc(basic_lit: ast.Basic_Lit, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { - using analysis; + using analysis if symbol, ok := resolve_basic_lit(ast_context, basic_lit); ok { if untyped, ok := symbol.value.(index.SymbolUntypedValue); ok { switch untyped.type { case .Bool: - write_semantic_token(builder, basic_lit.tok, ast_context.file.src, .Keyword, .None); + write_semantic_token(builder, basic_lit.tok, ast_context.file.src, .Keyword, .None) case .Float, .Integer: - write_semantic_token(builder, basic_lit.tok, ast_context.file.src, .Number, .None); + write_semantic_token(builder, basic_lit.tok, ast_context.file.src, .Number, .None) case .String: - write_semantic_token(builder, basic_lit.tok, ast_context.file.src, .String, .None); + write_semantic_token(builder, basic_lit.tok, ast_context.file.src, .String, .None) } } } } visit_value_decl :: proc(value_decl: ast.Value_Decl, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { - using ast; + using ast if value_decl.type != nil { for name in value_decl.names { - write_semantic_node(builder, name, ast_context.file.src, .Variable, .None); + write_semantic_node(builder, name, ast_context.file.src, .Variable, .None) } - visit(value_decl.type, builder, ast_context); + visit(value_decl.type, builder, ast_context) - return; + return } if len(value_decl.values) == 1 { - switch v in value_decl.values[0].derived { - case Union_Type: - write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Enum, .None); - write_semantic_string(builder, v.pos, "union", ast_context.file.src, .Keyword, .None); - visit(v.variants, builder, ast_context); - case Struct_Type: - write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Struct, .None); - write_semantic_string(builder, v.pos, "struct", ast_context.file.src, .Keyword, .None); - visit_struct_fields(v, builder, ast_context); - case Enum_Type: - write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Enum, .None); - write_semantic_string(builder, v.pos, "enum", ast_context.file.src, .Keyword, .None); - visit_enum_fields(v, builder, ast_context); - case Proc_Group: - write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Function, .None); - write_semantic_string(builder, v.pos, "proc", ast_context.file.src, .Keyword, .None); + #partial switch v in value_decl.values[0].derived { + case ^Union_Type: + write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Enum, .None) + write_semantic_string(builder, v.pos, "union", ast_context.file.src, .Keyword, .None) + visit(v.variants, builder, ast_context) + case ^Struct_Type: + write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Struct, .None) + write_semantic_string(builder, v.pos, "struct", ast_context.file.src, .Keyword, .None) + visit_struct_fields(v^, builder, ast_context) + case ^Enum_Type: + write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Enum, .None) + write_semantic_string(builder, v.pos, "enum", ast_context.file.src, .Keyword, .None) + visit_enum_fields(v^, builder, ast_context) + case ^Proc_Group: + write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Function, .None) + write_semantic_string(builder, v.pos, "proc", ast_context.file.src, .Keyword, .None) for arg in v.args { - if ident, ok := arg.derived.(Ident); ok { - write_semantic_node(builder, arg, ast_context.file.src, .Function, .None); + if ident, ok := arg.derived.(^Ident); ok { + write_semantic_node(builder, arg, ast_context.file.src, .Function, .None) } } - case Proc_Lit: - write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Function, .None); - write_semantic_string(builder, v.pos, "proc", ast_context.file.src, .Keyword, .None); - visit_proc_type(v.type, builder, ast_context); + case ^Proc_Lit: + write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Function, .None) + write_semantic_string(builder, v.pos, "proc", ast_context.file.src, .Keyword, .None) + visit_proc_type(v.type, builder, ast_context) - visit(v.body, builder, ast_context); + visit(v.body, builder, ast_context) case: for name in value_decl.names { - write_semantic_node(builder, name, ast_context.file.src, .Variable, .None); + write_semantic_node(builder, name, ast_context.file.src, .Variable, .None) } - visit(value_decl.values[0], builder, ast_context); + visit(value_decl.values[0], builder, ast_context) } } else { for name in value_decl.names { - write_semantic_node(builder, name, ast_context.file.src, .Variable, .None); + write_semantic_node(builder, name, ast_context.file.src, .Variable, .None) } for value in value_decl.values { - visit(value, builder, ast_context); + visit(value, builder, ast_context) } } } visit_token_op :: proc(builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: string) { if token.text == "in" { - write_semantic_string(builder, token.pos, token.text, src, .Keyword, .None); + write_semantic_string(builder, token.pos, token.text, src, .Keyword, .None) } else { - write_semantic_string(builder, token.pos, token.text, src, .Operator, .None); + write_semantic_string(builder, token.pos, token.text, src, .Operator, .None) } } visit_proc_type :: proc(node: ^ast.Proc_Type, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { - using ast; + using ast if node == nil { - return; + return } if node.params != nil { for param in node.params.list { for name in param.names { - if ident, ok := name.derived.(Ident); ok { - write_semantic_node(builder, name, ast_context.file.src, .Parameter, .None); + if ident, ok := name.derived.(^Ident); ok { + write_semantic_node(builder, name, ast_context.file.src, .Parameter, .None) } } - visit(param.type, builder, ast_context); + visit(param.type, builder, ast_context) } } if node.results != nil { for result in node.results.list { - visit(result.names, builder, ast_context); - visit(result.type, builder, ast_context); + visit(result.names, builder, ast_context) + visit(result.type, builder, ast_context) } } } visit_enum_fields :: proc(node: ast.Enum_Type, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { - using ast; + using ast if node.fields == nil { - return; + return } for field in node.fields { - if ident, ok := field.derived.(Ident); ok { - write_semantic_node(builder, field, ast_context.file.src, .EnumMember, .None); + if ident, ok := field.derived.(^Ident); ok { + write_semantic_node(builder, field, ast_context.file.src, .EnumMember, .None) } - else if f, ok := field.derived.(Field_Value); ok { - if _, ok := f.field.derived.(Ident); ok { - write_semantic_node(builder, f.field, ast_context.file.src, .EnumMember, .None); + else if f, ok := field.derived.(^Field_Value); ok { + if _, ok := f.field.derived.(^Ident); ok { + write_semantic_node(builder, f.field, ast_context.file.src, .EnumMember, .None) } - visit(f.value, builder, ast_context); + visit(f.value, builder, ast_context) } } } visit_struct_fields :: proc(node: ast.Struct_Type, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { - using ast; + using ast if node.fields == nil { - return; + return } for field in node.fields.list { for name in field.names { - if ident, ok := name.derived.(Ident); ok { - write_semantic_node(builder, name, ast_context.file.src, .Property, .None); + if ident, ok := name.derived.(^Ident); ok { + write_semantic_node(builder, name, ast_context.file.src, .Property, .None) } } - visit(field.type, builder, ast_context); + visit(field.type, builder, ast_context) } } visit_selector :: proc(selector: ^ast.Selector_Expr, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) { - if _, ok := selector.expr.derived.(ast.Selector_Expr); ok { - visit_selector(cast(^ast.Selector_Expr)selector.expr, builder, ast_context); + if _, ok := selector.expr.derived.(^ast.Selector_Expr); ok { + visit_selector(cast(^ast.Selector_Expr)selector.expr, builder, ast_context) } else { - visit(selector.expr, builder, ast_context); - builder.selector = true; + visit(selector.expr, builder, ast_context) + builder.selector = true } if symbol, ok := builder.symbols[cast(uintptr)selector]; ok { if symbol.type == .Variable { - write_semantic_node(builder, selector.field, ast_context.file.src, .Method, .None); + write_semantic_node(builder, selector.field, ast_context.file.src, .Method, .None) } #partial switch v in symbol.value { case index.SymbolPackageValue: - write_semantic_node(builder, selector.field, ast_context.file.src, .Namespace, .None); + write_semantic_node(builder, selector.field, ast_context.file.src, .Namespace, .None) case index.SymbolStructValue: - write_semantic_node(builder, selector.field, ast_context.file.src, .Struct, .None); + write_semantic_node(builder, selector.field, ast_context.file.src, .Struct, .None) case index.SymbolEnumValue: - write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None); + write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None) case index.SymbolUnionValue: - write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None); + write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None) case index.SymbolProcedureValue: - write_semantic_node(builder, selector.field, ast_context.file.src, .Function, .None); + write_semantic_node(builder, selector.field, ast_context.file.src, .Function, .None) case index.SymbolProcedureGroupValue: - write_semantic_node(builder, selector.field, ast_context.file.src, .Function, .None); + write_semantic_node(builder, selector.field, ast_context.file.src, .Function, .None) } } }
\ No newline at end of file diff --git a/src/server/signature.odin b/src/server/signature.odin index d187336..e8b3ae1 100644 --- a/src/server/signature.odin +++ b/src/server/signature.odin @@ -54,165 +54,165 @@ ParameterInformation :: struct { */ build_procedure_symbol_signature :: proc(symbol: ^index.Symbol) { if value, ok := symbol.value.(index.SymbolProcedureValue); ok { - builder := strings.make_builder(context.temp_allocator); + builder := strings.make_builder(context.temp_allocator) - strings.write_string(&builder, "proc"); - strings.write_string(&builder, "("); + strings.write_string(&builder, "proc") + strings.write_string(&builder, "(") for arg, i in value.arg_types { - strings.write_string(&builder, common.node_to_string(arg)); + strings.write_string(&builder, common.node_to_string(arg)) if i != len(value.arg_types) - 1 { - strings.write_string(&builder, ", "); + strings.write_string(&builder, ", ") } } - strings.write_string(&builder, ")"); + strings.write_string(&builder, ")") if len(value.return_types) != 0 { - strings.write_string(&builder, " -> "); + strings.write_string(&builder, " -> ") if len(value.return_types) > 1 { - strings.write_string(&builder, "("); + strings.write_string(&builder, "(") } for arg, i in value.return_types { - strings.write_string(&builder, common.node_to_string(arg)); + strings.write_string(&builder, common.node_to_string(arg)) if i != len(value.return_types) - 1 { - strings.write_string(&builder, ", "); + strings.write_string(&builder, ", ") } } if len(value.return_types) > 1 { - strings.write_string(&builder, ")"); + strings.write_string(&builder, ")") } } - symbol.signature = strings.to_string(builder); + symbol.signature = strings.to_string(builder) } else if value, ok := symbol.value.(index.SymbolAggregateValue); ok { - symbol.signature = "proc"; + symbol.signature = "proc" } } seperate_proc_field_arguments :: proc(procedure: ^index.Symbol) { if value, ok := &procedure.value.(index.SymbolProcedureValue); ok { - types := make([dynamic]^ast.Field, context.temp_allocator); + types := make([dynamic]^ast.Field, context.temp_allocator) for arg, i in value.arg_types { if len(arg.names) == 1 { - append(&types, arg); - continue; + append(&types, arg) + continue } for name in arg.names { - field : ^ast.Field = index.new_type(ast.Field, {}, {}, context.temp_allocator); - field.names = make([]^ast.Expr, 1, context.temp_allocator); - field.names[0] = name; - field.type = arg.type; - append(&types, field); + field : ^ast.Field = index.new_type(ast.Field, {}, {}, context.temp_allocator) + field.names = make([]^ast.Expr, 1, context.temp_allocator) + field.names[0] = name + field.type = arg.type + append(&types, field) } } - value.arg_types = types[:]; + value.arg_types = types[:] } } get_signature_information :: proc(document: ^common.Document, position: common.Position) -> (SignatureHelp, bool) { - using analysis; + using analysis - signature_help: SignatureHelp; + signature_help: SignatureHelp - ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri); + ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri) - position_context, ok := get_document_position_context(document, position, .SignatureHelp); + position_context, ok := get_document_position_context(document, position, .SignatureHelp) if !ok { - return signature_help, true; + return signature_help, true } //TODO(should probably not be an ast.Expr, but ast.Call_Expr) if position_context.call == nil { - return signature_help, true; + return signature_help, true } - get_globals(document.ast, &ast_context); + get_globals(document.ast, &ast_context) if position_context.function != nil { - get_locals(document.ast, position_context.function, &ast_context, &position_context); + get_locals(document.ast, position_context.function, &ast_context, &position_context) } for comma, i in position_context.call_commas { if position_context.position > comma { - signature_help.activeParameter = i+1; + signature_help.activeParameter = i+1 } else if position_context.position == comma { - signature_help.activeParameter = i; + signature_help.activeParameter = i } } - call: index.Symbol; - call, ok = resolve_type_expression(&ast_context, position_context.call); + call: index.Symbol + call, ok = resolve_type_expression(&ast_context, position_context.call) if !ok { - return signature_help, true; + return signature_help, true } - seperate_proc_field_arguments(&call); + seperate_proc_field_arguments(&call) - signature_information := make([dynamic]SignatureInformation, context.temp_allocator); + signature_information := make([dynamic]SignatureInformation, context.temp_allocator) if value, ok := call.value.(index.SymbolProcedureValue); ok { - parameters := make([]ParameterInformation, len(value.arg_types), context.temp_allocator); + parameters := make([]ParameterInformation, len(value.arg_types), context.temp_allocator) for arg, i in value.arg_types { if arg.type != nil { - if _, is_ellipsis := arg.type.derived.(ast.Ellipsis); is_ellipsis { - signature_help.activeParameter = min(i, signature_help.activeParameter); + if _, is_ellipsis := arg.type.derived.(^ast.Ellipsis); is_ellipsis { + signature_help.activeParameter = min(i, signature_help.activeParameter) } } - parameters[i].label = common.node_to_string(arg); + parameters[i].label = common.node_to_string(arg) } - build_procedure_symbol_signature(&call); + build_procedure_symbol_signature(&call) info := SignatureInformation { label = concatenate_symbol_information(&ast_context, call, false), documentation = call.doc, parameters = parameters, - }; - append(&signature_information, info); + } + append(&signature_information, info) } else if value, ok := call.value.(index.SymbolAggregateValue); ok { //function overloaded procedures for symbol in value.symbols { - symbol := symbol; + symbol := symbol if value, ok := symbol.value.(index.SymbolProcedureValue); ok { - parameters := make([]ParameterInformation, len(value.arg_types), context.temp_allocator); + parameters := make([]ParameterInformation, len(value.arg_types), context.temp_allocator) for arg, i in value.arg_types { if arg.type != nil { - if _, is_ellipsis := arg.type.derived.(ast.Ellipsis); is_ellipsis { - signature_help.activeParameter = min(i, signature_help.activeParameter); + if _, is_ellipsis := arg.type.derived.(^ast.Ellipsis); is_ellipsis { + signature_help.activeParameter = min(i, signature_help.activeParameter) } } - parameters[i].label = common.node_to_string(arg); - parameters[i].activeParameter = i; + parameters[i].label = common.node_to_string(arg) + parameters[i].activeParameter = i } - build_procedure_symbol_signature(&symbol); + build_procedure_symbol_signature(&symbol) info := SignatureInformation { label = concatenate_symbol_information(&ast_context, symbol, false), documentation = symbol.doc, parameters = parameters, - }; + } - append(&signature_information, info); + append(&signature_information, info) } } } - signature_help.signatures = signature_information[:]; + signature_help.signatures = signature_information[:] - return signature_help, true; + return signature_help, true }
\ No newline at end of file diff --git a/src/server/unmarshal.odin b/src/server/unmarshal.odin index 0de3d53..c8a8071 100644 --- a/src/server/unmarshal.odin +++ b/src/server/unmarshal.odin @@ -12,33 +12,33 @@ import "core:fmt" unmarshal :: proc(json_value: json.Value, v: any, allocator: mem.Allocator) -> json.Marshal_Error { - using runtime; + using runtime if v == nil { - return .None; + return .None } if json_value == nil { - return .None; + return .None } - type_info := type_info_base(type_info_of(v.id)); + type_info := type_info_base(type_info_of(v.id)) #partial switch j in json_value { case json.Object: #partial switch variant in type_info.variant { case Type_Info_Struct: for field, i in variant.names { - a := any {rawptr(uintptr(v.data) + uintptr(variant.offsets[i])), variant.types[i].id}; + a := any {rawptr(uintptr(v.data) + uintptr(variant.offsets[i])), variant.types[i].id} //TEMP most likely have to rewrite the entire unmarshal using tags instead, because i sometimes have to support names like 'context', which can't be written like that if field[len(field)-1] == '_' { if ret := unmarshal(j[field[:len(field)-1]], a, allocator); ret != .None { - return ret; + return ret } } else { if ret := unmarshal(j[field], a, allocator); ret != .None { - return ret; + return ret } } @@ -50,59 +50,59 @@ unmarshal :: proc(json_value: json.Value, v: any, allocator: mem.Allocator) -> j //Note(Daniel, THIS IS REALLY SCUFFED. Need to talk to gingerbill about unmarshalling unions) //This only works for unions with one object - made to handle optionals - tag_ptr := uintptr(v.data) + variant.tag_offset; - tag_any := any {rawptr(tag_ptr), variant.tag_type.id}; + tag_ptr := uintptr(v.data) + variant.tag_offset + tag_any := any {rawptr(tag_ptr), variant.tag_type.id} - not_optional := 1; + not_optional := 1 - mem.copy(cast(rawptr)tag_ptr, ¬_optional, size_of(variant.tag_type)); + mem.copy(cast(rawptr)tag_ptr, ¬_optional, size_of(variant.tag_type)) - id := variant.variants[0].id; + id := variant.variants[0].id - unmarshal(json_value, any {v.data, id}, allocator); + unmarshal(json_value, any {v.data, id}, allocator) } case json.Array: #partial switch variant in type_info.variant { case Type_Info_Dynamic_Array: - array := (^mem.Raw_Dynamic_Array)(v.data); + array := (^mem.Raw_Dynamic_Array)(v.data) if array.data == nil { - array.data = mem.alloc(len(j) * variant.elem_size, variant.elem.align, allocator); - array.len = len(j); - array.cap = len(j); - array.allocator = allocator; + array.data = mem.alloc(len(j) * variant.elem_size, variant.elem.align, allocator) + array.len = len(j) + array.cap = len(j) + array.allocator = allocator } else { - return .Unsupported_Type; + return .Unsupported_Type } for i in 0..<array.len { - a := any {rawptr(uintptr(array.data) + uintptr(variant.elem_size * i)), variant.elem.id}; + a := any {rawptr(uintptr(array.data) + uintptr(variant.elem_size * i)), variant.elem.id} if ret := unmarshal(j[i], a, allocator); ret != .None { - return ret; + return ret } } case: - return .Unsupported_Type; + return .Unsupported_Type } case json.String: #partial switch variant in type_info.variant { case Type_Info_String: - str := (^string)(v.data); - str^ = strings.clone(j, allocator); + str := (^string)(v.data) + str^ = strings.clone(j, allocator) case Type_Info_Enum: for name, i in variant.names { - lower_name := strings.to_lower(name, allocator); - lower_j := strings.to_lower(string(j), allocator); + lower_name := strings.to_lower(name, allocator) + lower_j := strings.to_lower(string(j), allocator) if lower_name == lower_j { - mem.copy(v.data, &variant.values[i], size_of(variant.base)); + mem.copy(v.data, &variant.values[i], size_of(variant.base)) } - delete(lower_name, allocator); - delete(lower_j, allocator); + delete(lower_name, allocator) + delete(lower_j, allocator) } } case json.Integer: @@ -110,48 +110,48 @@ unmarshal :: proc(json_value: json.Value, v: any, allocator: mem.Allocator) -> j case Type_Info_Integer: switch type_info.size { case 8: - tmp := i64(j); - mem.copy(v.data, &tmp, type_info.size); + tmp := i64(j) + mem.copy(v.data, &tmp, type_info.size) case 4: - tmp := i32(j); - mem.copy(v.data, &tmp, type_info.size); + tmp := i32(j) + mem.copy(v.data, &tmp, type_info.size) case 2: - tmp := i16(j); - mem.copy(v.data, &tmp, type_info.size); + tmp := i16(j) + mem.copy(v.data, &tmp, type_info.size) case 1: - tmp := i8(j); - mem.copy(v.data, &tmp, type_info.size); + tmp := i8(j) + mem.copy(v.data, &tmp, type_info.size) case: - return .Unsupported_Type; + return .Unsupported_Type } case Type_Info_Union: - tag_ptr := uintptr(v.data) + variant.tag_offset; + tag_ptr := uintptr(v.data) + variant.tag_offset } case json.Float: if _, ok := type_info.variant.(Type_Info_Float); ok { switch type_info.size { case 8: - tmp := f64(j); - mem.copy(v.data, &tmp, type_info.size); + tmp := f64(j) + mem.copy(v.data, &tmp, type_info.size) case 4: - tmp := f32(j); - mem.copy(v.data, &tmp, type_info.size); + tmp := f32(j) + mem.copy(v.data, &tmp, type_info.size) case: - return .Unsupported_Type; + return .Unsupported_Type } } case json.Null: case json.Boolean: if _, ok := type_info.variant.(Type_Info_Boolean); ok { - tmp := bool(j); - mem.copy(v.data, &tmp, type_info.size); + tmp := bool(j) + mem.copy(v.data, &tmp, type_info.size) } case: - return .Unsupported_Type; + return .Unsupported_Type } - return .None; + return .None } diff --git a/src/server/writer.odin b/src/server/writer.odin index 46a340e..4918f81 100644 --- a/src/server/writer.odin +++ b/src/server/writer.odin @@ -6,7 +6,7 @@ import "core:fmt" import "core:strings" import "core:sync" -WriterFn :: proc(_: rawptr, _: []byte) -> (int, int); +WriterFn :: proc(_: rawptr, _: []byte) -> (int, int) Writer :: struct { writer_fn: WriterFn, @@ -15,21 +15,21 @@ Writer :: struct { } make_writer :: proc(writer_fn: WriterFn, writer_context: rawptr) -> Writer { - writer := Writer {writer_context = writer_context, writer_fn = writer_fn}; - sync.mutex_init(&writer.writer_mutex); - return writer; + writer := Writer {writer_context = writer_context, writer_fn = writer_fn} + sync.mutex_init(&writer.writer_mutex) + return writer } write_sized :: proc(writer: ^Writer, data: []byte) -> bool { - sync.mutex_lock(&writer.writer_mutex); - defer sync.mutex_unlock(&writer.writer_mutex); + sync.mutex_lock(&writer.writer_mutex) + defer sync.mutex_unlock(&writer.writer_mutex) - written, err := writer.writer_fn(writer.writer_context, data); + written, err := writer.writer_fn(writer.writer_context, data) if (err != 0) { - return false; + return false } - return true; + return true } |