aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorDaniel Gavin <danielgavin5@hotmail.com>2022-01-19 23:44:50 +0100
committerDaniel Gavin <danielgavin5@hotmail.com>2022-01-19 23:44:50 +0100
commit989345d0438429ae719287f9c7343f87b558f7e3 (patch)
treefa789c8e61dcc53d7cffc7f2485770a5026abdc8 /src
parent329180066047e6dd802872104af5be2955f092cf (diff)
Remove the failed idea of making request tasks
Diffstat (limited to 'src')
-rw-r--r--src/analysis/analysis.odin218
-rw-r--r--src/common/config.odin2
-rw-r--r--src/common/pool.odin156
-rw-r--r--src/common/types.odin1
-rw-r--r--src/main.odin20
-rw-r--r--src/server/caches.odin19
-rw-r--r--src/server/completion.odin2
-rw-r--r--src/server/definition.odin2
-rw-r--r--src/server/document_symbols.odin2
-rw-r--r--src/server/documents.odin12
-rw-r--r--src/server/hover.odin2
-rw-r--r--src/server/inlay_hints.odin20
-rw-r--r--src/server/lens.odin2
-rw-r--r--src/server/requests.odin635
-rw-r--r--src/server/semantic_tokens.odin98
-rw-r--r--src/server/signature.odin2
-rw-r--r--src/server/types.odin6
17 files changed, 402 insertions, 797 deletions
diff --git a/src/analysis/analysis.odin b/src/analysis/analysis.odin
index 801d4cd..68d5fc1 100644
--- a/src/analysis/analysis.odin
+++ b/src/analysis/analysis.odin
@@ -19,9 +19,7 @@ import "shared:common"
import "shared:index"
/*
- TODO(replace all of the possible ast walking with the new odin visitor function)
TODO(improve the current_package logic, kinda confusing switching between different packages with selectors)
- TODO(try to flatten some of the nested branches if possible)
*/
DocumentPositionContextHint :: enum {
@@ -87,10 +85,10 @@ AstContext :: struct {
value_decl: ^ast.Value_Decl,
field_name: string,
uri: string,
- symbol_cache: ^map[int]rawptr, //symbol_cache from the current document
+ recursion_counter: int, //Sometimes the ast is so malformed that it causes infinite recursion.
}
-make_ast_context :: proc(file: ast.File, imports: []common.Package, package_name: string, uri: string, symbol_cache: ^map[int]rawptr, allocator := context.temp_allocator) -> AstContext {
+make_ast_context :: proc(file: ast.File, imports: []common.Package, package_name: string, uri: string, allocator := context.temp_allocator) -> AstContext {
ast_context := AstContext {
locals = make(map[int]map[string][dynamic]DocumentLocal, 0, allocator),
globals = make(map[string]common.GlobalExpr, 0, allocator),
@@ -105,7 +103,6 @@ make_ast_context :: proc(file: ast.File, imports: []common.Package, package_name
document_package = package_name,
current_package = package_name,
uri = uri,
- symbol_cache = symbol_cache,
allocator = allocator,
};
@@ -434,8 +431,8 @@ resolve_generic_function_symbol :: proc(ast_context: ^AstContext, params: []^ast
name = function_name,
};
- return_types := make([dynamic]^ast.Field, context.temp_allocator);
- argument_types := make([dynamic]^ast.Field, context.temp_allocator);
+ return_types := make([dynamic]^ast.Field, ast_context.allocator);
+ argument_types := make([dynamic]^ast.Field, ast_context.allocator);
for result in results {
if result.type == nil {
@@ -446,7 +443,7 @@ resolve_generic_function_symbol :: proc(ast_context: ^AstContext, params: []^ast
if ok {
if m, ok := poly_map[ident.name]; ok {
- field := cast(^Field)index.clone_node(result, context.temp_allocator, nil);
+ field := cast(^Field)index.clone_node(result, ast_context.allocator, nil);
field.type = m;
append(&return_types, field);
} else {
@@ -465,7 +462,7 @@ resolve_generic_function_symbol :: proc(ast_context: ^AstContext, params: []^ast
//check the name for poly
if poly_type, ok := param.names[0].derived.(ast.Poly_Type); ok && param.type != nil {
if m, ok := poly_map[poly_type.type.name]; ok {
- field := cast(^Field)index.clone_node(param, context.temp_allocator, nil);
+ field := cast(^Field)index.clone_node(param, ast_context.allocator, nil);
field.type = m;
append(&argument_types, field);
}
@@ -820,7 +817,7 @@ resolve_basic_lit :: proc(ast_context: ^AstContext, basic_lit: ast.Basic_Lit) ->
resolve_basic_directive :: proc(ast_context: ^AstContext, directive: ast.Basic_Directive, a := #caller_location) -> (index.Symbol, bool) {
switch directive.name {
case "caller_location":
- ident := index.new_type(ast.Ident, directive.pos, directive.end, context.temp_allocator);
+ ident := index.new_type(ast.Ident, directive.pos, directive.end, ast_context.allocator);
ident.name = "Source_Code_Location";
ast_context.current_package = ast_context.document_package;
return resolve_type_identifier(ast_context, ident^)
@@ -829,48 +826,21 @@ resolve_basic_directive :: proc(ast_context: ^AstContext, directive: ast.Basic_D
return {}, false;
}
-//Experiment with caching the results of the current file, this might just make it slower,
-//but it will help with multiple requests like semantic tokens.
-//If this doesn't provide good results, just handle caching explicitly on semantic tokens only.
-lookup_symbol_cache :: proc(ast_context: ^AstContext, node: ast.Node) -> (index.Symbol, bool) {
- if ast_context.document_package != ast_context.current_package {
- return {}, false;
- }
-
- if cached := &ast_context.symbol_cache[node.end.offset]; cached != nil {
- symbol := cast(^index.Symbol)cached^;
- return symbol^, true;
- }
- return {}, false;
-}
-
-store_symbol_cache :: proc(ast_context: ^AstContext, data: rawptr, offset: int) {
- if ast_context.document_package != ast_context.current_package {
- return;
- }
- ast_context.symbol_cache[offset] = cast(rawptr)data;
-}
resolve_type_expression :: proc(ast_context: ^AstContext, node: ^ast.Expr) -> (index.Symbol, bool) {
- if symbol, ok := lookup_symbol_cache(ast_context, node^); ok {
- return symbol, true;
+ if node == nil {
+ return {}, false;
}
- context.temp_allocator = context.allocator;
-
- if symbol, ok := internal_resolve_type_expression(ast_context, node); ok {
- cached_symbol := index.new_clone_symbol(symbol);
- store_symbol_cache(ast_context, cached_symbol, node.end.offset);
- return symbol, true;
- }
-
- return {}, false;
-}
+ if ast_context.recursion_counter > 15 {
+ log.error("Recursion passed 15 attempts - giving up");
+ return {}, false;
+ }
+ ast_context.recursion_counter += 1;
-internal_resolve_type_expression :: proc(ast_context: ^AstContext, node: ^ast.Expr) -> (index.Symbol, bool) {
- if node == nil {
- return {}, false;
+ defer {
+ ast_context.recursion_counter -= 1;
}
using ast;
@@ -1021,7 +991,7 @@ internal_resolve_type_expression :: proc(ast_context: ^AstContext, node: ^ast.Ex
} else {
value := index.SymbolFixedArrayValue {
expr = s.expr,
- len = make_int_basic_value(components_count),
+ len = make_int_basic_value(ast_context, components_count),
};
selector.value = value;
selector.type = .Variable;
@@ -1084,7 +1054,7 @@ store_local :: proc(ast_context: ^AstContext, expr: ^ast.Expr, offset: int, name
}
add_local_group :: proc(ast_context: ^AstContext, id: int) {
- ast_context.locals[id] = make(map[string][dynamic]DocumentLocal, 0, ast_context.allocator);
+ ast_context.locals[id] = make(map[string][dynamic]DocumentLocal, 100, ast_context.allocator);
}
clear_local_group :: proc(ast_context: ^AstContext, id: int) {
@@ -1142,23 +1112,18 @@ get_local_offset :: proc(ast_context: ^AstContext, offset: int, name: string) ->
}
resolve_type_identifier :: proc(ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
- if symbol, ok := lookup_symbol_cache(ast_context, node); ok {
- return symbol, true;
- }
-
- context.temp_allocator = context.allocator;
+ using ast;
- if symbol, ok := internal_resolve_type_identifier(ast_context, node); ok {
- cached_symbol := index.new_clone_symbol(symbol);
- store_symbol_cache(ast_context, cached_symbol, node.end.offset);
- return symbol, true;
+ if ast_context.recursion_counter > 15 {
+ log.error("Recursion passed 15 attempts - giving up");
+ return {}, false;
}
- return {}, false;
-}
+ ast_context.recursion_counter += 1;
-internal_resolve_type_identifier :: proc(ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
- using ast;
+ defer {
+ ast_context.recursion_counter -= 1;
+ }
if pkg, ok := ast_context.in_package[node.name]; ok {
ast_context.current_package = pkg;
@@ -1301,7 +1266,7 @@ internal_resolve_type_identifier :: proc(ast_context: ^AstContext, node: ast.Ide
return_symbol.type = .Variable;
}
- return_symbol.doc = common.get_doc(global.docs, context.temp_allocator);
+ return_symbol.doc = common.get_doc(global.docs, ast_context.allocator);
return return_symbol, ok;
} else if node.name == "context" {
@@ -1313,7 +1278,7 @@ internal_resolve_type_identifier :: proc(ast_context: ^AstContext, node: ast.Ide
}
} else if v, ok := common.keyword_map[node.name]; ok {
//keywords
- ident := index.new_type(Ident, node.pos, node.end, context.temp_allocator);
+ ident := index.new_type(Ident, node.pos, node.end, ast_context.allocator);
ident.name = node.name;
symbol: index.Symbol;
@@ -1411,8 +1376,8 @@ resolve_ident_is_package :: proc(ast_context: ^AstContext, node: ast.Ident) -> b
}
expand_struct_usings :: proc(ast_context: ^AstContext, symbol: index.Symbol, value: index.SymbolStructValue) -> index.SymbolStructValue {
- names := slice.to_dynamic(value.names, context.temp_allocator);
- types := slice.to_dynamic(value.types, context.temp_allocator);
+ names := slice.to_dynamic(value.names, ast_context.allocator);
+ types := slice.to_dynamic(value.types, ast_context.allocator);
for k, v in value.usings {
ast_context.current_package = symbol.pkg;
@@ -1481,7 +1446,7 @@ resolve_symbol_return :: proc(ast_context: ^AstContext, symbol: index.Symbol, ok
if v.poly != nil {
//Todo(daniel): Maybe change the function to return a new symbol instead of referencing it.
//resolving the poly union means changing the type, so we do a copy of it.
- types := make([dynamic]^ast.Expr, context.temp_allocator);
+ types := make([dynamic]^ast.Expr, ast_context.allocator);
append_elems(&types, ..v.types);
v.types = types[:];
resolve_poly_union(ast_context, v.poly, &symbol);
@@ -1491,7 +1456,7 @@ resolve_symbol_return :: proc(ast_context: ^AstContext, symbol: index.Symbol, ok
if v.poly != nil {
//Todo(daniel): Maybe change the function to return a new symbol instead of referencing it.
//resolving the struct union means changing the type, so we do a copy of it.
- types := make([dynamic]^ast.Expr, context.temp_allocator);
+ types := make([dynamic]^ast.Expr, ast_context.allocator);
append_elems(&types, ..v.types);
v.types = types[:];
resolve_poly_struct(ast_context, v.poly, &symbol);
@@ -1613,26 +1578,26 @@ find_position_in_call_param :: proc(ast_context: ^AstContext, call: ast.Call_Exp
return len(call.args) - 1, true;
}
-make_pointer_ast :: proc(elem: ^ast.Expr) -> ^ast.Pointer_Type {
- pointer := index.new_type(ast.Pointer_Type, elem.pos, elem.end, context.temp_allocator);
+make_pointer_ast :: proc(ast_context: ^AstContext, elem: ^ast.Expr) -> ^ast.Pointer_Type {
+ pointer := index.new_type(ast.Pointer_Type, elem.pos, elem.end, ast_context.allocator);
pointer.elem = elem;
return pointer;
}
-make_bool_ast :: proc() -> ^ast.Ident {
- ident := index.new_type(ast.Ident, {}, {}, context.temp_allocator);
+make_bool_ast :: proc(ast_context: ^AstContext) -> ^ast.Ident {
+ ident := index.new_type(ast.Ident, {}, {}, ast_context.allocator);
ident.name = "bool";
return ident;
}
-make_int_ast :: proc() -> ^ast.Ident {
- ident := index.new_type(ast.Ident, {}, {}, context.temp_allocator);
+make_int_ast :: proc(ast_context: ^AstContext) -> ^ast.Ident {
+ ident := index.new_type(ast.Ident, {}, {}, ast_context.allocator);
ident.name = "int";
return ident;
}
-make_int_basic_value :: proc(n: int) -> ^ast.Basic_Lit {
- basic := index.new_type(ast.Basic_Lit, {}, {}, context.temp_allocator);
+make_int_basic_value :: proc(ast_context: ^AstContext, n: int) -> ^ast.Basic_Lit {
+ basic := index.new_type(ast.Basic_Lit, {}, {}, ast_context.allocator);
basic.tok.text = fmt.tprintf("%v", n);
return basic;
}
@@ -1678,8 +1643,8 @@ make_symbol_procedure_from_ast :: proc(ast_context: ^AstContext, n: ^ast.Node, v
name = name,
};
- return_types := make([dynamic]^ast.Field, context.temp_allocator);
- arg_types := make([dynamic]^ast.Field, context.temp_allocator);
+ return_types := make([dynamic]^ast.Field, ast_context.allocator);
+ arg_types := make([dynamic]^ast.Field, ast_context.allocator);
if v.results != nil {
for ret in v.results.list {
@@ -1784,20 +1749,6 @@ make_symbol_union_from_ast :: proc(ast_context: ^AstContext, v: ast.Union_Type,
symbol.name = "union";
}
- names := make([dynamic]string, context.temp_allocator);
-
- for variant in v.variants {
-
- if ident, ok := variant.derived.(ast.Ident); ok {
- append(&names, ident.name);
- } else if selector, ok := variant.derived.(ast.Selector_Expr); ok {
-
- if ident, ok := selector.field.derived.(ast.Ident); ok {
- append(&names, ident.name);
- }
- }
- }
-
symbol.value = index.SymbolUnionValue {
types = v.variants,
};
@@ -1823,7 +1774,7 @@ make_symbol_enum_from_ast :: proc(ast_context: ^AstContext, v: ast.Enum_Type, id
}
- names := make([dynamic]string, context.temp_allocator);
+ names := make([dynamic]string, ast_context.allocator);
for n in v.fields {
if ident, ok := n.derived.(ast.Ident); ok {
@@ -1877,15 +1828,15 @@ make_symbol_struct_from_ast :: proc(ast_context: ^AstContext, v: ast.Struct_Type
symbol.name = "struct";
}
- names := make([dynamic]string, context.temp_allocator);
- types := make([dynamic]^ast.Expr, context.temp_allocator);
- usings := make(map[string]bool, 0, context.temp_allocator);
+ names := make([dynamic]string, ast_context.allocator);
+ types := make([dynamic]^ast.Expr, ast_context.allocator);
+ usings := make(map[string]bool, 0, ast_context.allocator);
for field in v.fields.list {
for n in field.names {
if identifier, ok := n.derived.(ast.Ident); ok {
append(&names, identifier.name);
- append(&types, index.clone_type(field.type, context.temp_allocator, nil));
+ append(&types, index.clone_type(field.type, ast_context.allocator, nil));
if .Using in field.flags {
usings[identifier.name] = true;
@@ -2052,7 +2003,7 @@ get_generic_assignment :: proc(file: ast.File, value: ^ast.Expr, ast_context: ^A
switch v in &value.derived {
case Call_Expr:
ast_context.call = cast(^ast.Call_Expr)value;
-
+
if symbol, ok := resolve_type_expression(ast_context, v.expr); ok {
if procedure, ok := symbol.value.(index.SymbolProcedureValue); ok {
for ret in procedure.return_types {
@@ -2085,7 +2036,7 @@ get_generic_assignment :: proc(file: ast.File, value: ^ast.Expr, ast_context: ^A
append(results, v.type);
}
- b := make_bool_ast();
+ b := make_bool_ast(ast_context);
b.pos.file = v.type.pos.file;
append(results, b);
}
@@ -2131,7 +2082,6 @@ get_locals_value_decl :: proc(file: ast.File, value_decl: ast.Value_Decl, ast_co
}
get_locals_stmt :: proc(file: ast.File, stmt: ^ast.Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext, save_assign := false) {
-
ast_context.use_locals = true;
ast_context.use_globals = true;
ast_context.current_package = ast_context.document_package;
@@ -2184,7 +2134,6 @@ get_locals_stmt :: proc(file: ast.File, stmt: ^ast.Stmt, ast_context: ^AstContex
}
get_locals_block_stmt :: proc(file: ast.File, block: ast.Block_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
if !(block.pos.offset <= document_position.position && document_position.position <= block.end.offset) {
return;
}
@@ -2195,11 +2144,8 @@ get_locals_block_stmt :: proc(file: ast.File, block: ast.Block_Stmt, ast_context
}
get_locals_using_stmt :: proc(stmt: ast.Using_Stmt, ast_context: ^AstContext) {
-
for u in stmt.list {
-
if symbol, ok := resolve_type_expression(ast_context, u); ok {
-
#partial switch v in symbol.value {
case index.SymbolPackageValue:
if ident, ok := u.derived.(ast.Ident); ok {
@@ -2220,7 +2166,6 @@ get_locals_using_stmt :: proc(stmt: ast.Using_Stmt, ast_context: ^AstContext) {
}
get_locals_assign_stmt :: proc(file: ast.File, stmt: ast.Assign_Stmt, ast_context: ^AstContext) {
-
using ast;
if stmt.lhs == nil || stmt.rhs == nil {
@@ -2246,7 +2191,6 @@ get_locals_assign_stmt :: proc(file: ast.File, stmt: ast.Assign_Stmt, ast_contex
}
get_locals_if_stmt :: proc(file: ast.File, stmt: ast.If_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
return;
}
@@ -2257,7 +2201,6 @@ get_locals_if_stmt :: proc(file: ast.File, stmt: ast.If_Stmt, ast_context: ^AstC
}
get_locals_for_range_stmt :: proc(file: ast.File, stmt: ast.Range_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
using ast;
if !(stmt.body.pos.offset <= document_position.position && document_position.position <= stmt.body.end.offset) {
@@ -2297,7 +2240,7 @@ get_locals_for_range_stmt :: proc(file: ast.File, stmt: ast.Range_Stmt, ast_cont
}
if len(stmt.vals) >= 2 {
if ident, ok := stmt.vals[1].derived.(Ident); ok {
- store_local(ast_context, make_int_ast(), ident.pos.offset, ident.name, ast_context.local_id);
+ store_local(ast_context, make_int_ast(ast_context), ident.pos.offset, ident.name, ast_context.local_id);
ast_context.variables[ident.name] = true;
ast_context.in_package[ident.name] = symbol.pkg;
}
@@ -2313,7 +2256,7 @@ get_locals_for_range_stmt :: proc(file: ast.File, stmt: ast.Range_Stmt, ast_cont
if len(stmt.vals) >= 2 {
if ident, ok := stmt.vals[1].derived.(Ident); ok {
- store_local(ast_context, make_int_ast(), ident.pos.offset, ident.name, ast_context.local_id);
+ store_local(ast_context, make_int_ast(ast_context), ident.pos.offset, ident.name, ast_context.local_id);
ast_context.variables[ident.name] = true;
ast_context.in_package[ident.name] = symbol.pkg;
}
@@ -2328,19 +2271,18 @@ get_locals_for_range_stmt :: proc(file: ast.File, stmt: ast.Range_Stmt, ast_cont
}
if len(stmt.vals) >= 2 {
if ident, ok := stmt.vals[1].derived.(Ident); ok {
- store_local(ast_context, make_int_ast(), ident.pos.offset, ident.name, ast_context.local_id);
+ store_local(ast_context, make_int_ast(ast_context), ident.pos.offset, ident.name, ast_context.local_id);
ast_context.variables[ident.name] = true;
ast_context.in_package[ident.name] = symbol.pkg;
}
}
}
}
-
+
get_locals_stmt(file, stmt.body, ast_context, document_position);
}
get_locals_for_stmt :: proc(file: ast.File, stmt: ast.For_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
return;
}
@@ -2350,7 +2292,6 @@ get_locals_for_stmt :: proc(file: ast.File, stmt: ast.For_Stmt, ast_context: ^As
}
get_locals_switch_stmt :: proc(file: ast.File, stmt: ast.Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
return;
}
@@ -2359,7 +2300,6 @@ get_locals_switch_stmt :: proc(file: ast.File, stmt: ast.Switch_Stmt, ast_contex
}
get_locals_type_switch_stmt :: proc(file: ast.File, stmt: ast.Type_Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
using ast;
if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
@@ -2371,11 +2311,8 @@ get_locals_type_switch_stmt :: proc(file: ast.File, stmt: ast.Type_Switch_Stmt,
}
if block, ok := stmt.body.derived.(Block_Stmt); ok {
-
for block_stmt in block.stmts {
-
if cause, ok := block_stmt.derived.(Case_Clause); ok && cause.pos.offset <= document_position.position && document_position.position <= cause.end.offset {
-
for b in cause.body {
get_locals_stmt(file, b, ast_context, document_position);
}
@@ -2400,9 +2337,7 @@ get_locals_proc_param_and_results :: proc(file: ast.File, function: ast.Proc_Lit
}
if proc_lit.type != nil && proc_lit.type.params != nil {
-
for arg in proc_lit.type.params.list {
-
for name in arg.names {
if arg.type != nil {
str := common.get_ast_node_string(name, file.src);
@@ -2427,9 +2362,7 @@ get_locals_proc_param_and_results :: proc(file: ast.File, function: ast.Proc_Lit
}
if proc_lit.type != nil && proc_lit.type.results != nil {
-
for result in proc_lit.type.results.list {
-
for name in result.names {
if result.type != nil {
str := common.get_ast_node_string(name, file.src);
@@ -2443,7 +2376,6 @@ get_locals_proc_param_and_results :: proc(file: ast.File, function: ast.Proc_Lit
}
get_locals :: proc(file: ast.File, function: ^ast.Node, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
proc_lit, ok := function.derived.(ast.Proc_Lit);
if !ok || proc_lit.body == nil {
@@ -2472,26 +2404,28 @@ clear_locals :: proc(ast_context: ^AstContext) {
clear(&ast_context.usings);
}
-resolve_entire_file :: proc(document: ^common.Document, ast_context: ^AstContext, allocator := context.allocator) -> []index.Symbol {
- get_globals(document.ast, ast_context);
+resolve_entire_file :: proc(document: ^common.Document, allocator := context.allocator) -> map[uintptr]index.Symbol {
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri, allocator);
+
+ get_globals(document.ast, &ast_context);
ast_context.current_package = ast_context.document_package;
- symbols := make([dynamic]index.Symbol, allocator);
+ symbols := make(map[uintptr]index.Symbol, 100, allocator);
for k, v in ast_context.globals {
switch n in v.expr.derived {
case ast.Proc_Lit:
- resolve_entire_procedure(ast_context, n, &symbols, allocator);
- clear_local_group(ast_context, 0);
- add_local_group(ast_context, 0);
+ resolve_entire_procedure(&ast_context, n, &symbols, allocator);
+ clear_local_group(&ast_context, 0);
+ add_local_group(&ast_context, 0);
}
}
-
- return symbols[:];
+
+ return symbols;
}
-resolve_entire_procedure :: proc(ast_context: ^AstContext, procedure: ast.Proc_Lit, symbols: ^[dynamic]index.Symbol, allocator := context.allocator) {
+resolve_entire_procedure :: proc(ast_context: ^AstContext, procedure: ast.Proc_Lit, symbols: ^map[uintptr]index.Symbol, allocator := context.allocator) {
Scope :: struct {
offset: int,
id: int,
@@ -2499,7 +2433,7 @@ resolve_entire_procedure :: proc(ast_context: ^AstContext, procedure: ast.Proc_L
Visit_Data :: struct {
ast_context: ^AstContext,
- symbols: ^[dynamic]index.Symbol,
+ symbols: ^map[uintptr]index.Symbol,
scopes: [dynamic]Scope,
id_counter: int,
}
@@ -2538,7 +2472,7 @@ resolve_entire_procedure :: proc(ast_context: ^AstContext, procedure: ast.Proc_L
}
switch v in &node.derived {
- case ast.If_Stmt, ast.For_Stmt:
+ case ast.If_Stmt, ast.For_Stmt, ast.Range_Stmt, ast.Inline_Range_Stmt:
scope: Scope;
scope.id = data.id_counter;
scope.offset = node.end.offset;
@@ -2553,18 +2487,18 @@ resolve_entire_procedure :: proc(ast_context: ^AstContext, procedure: ast.Proc_L
get_locals_stmt(ast_context.file, cast(^ast.Stmt)node, ast_context, &position_context);
}
-
switch v in &node.derived {
- case ast.If_Stmt:
- case ast.For_Stmt:
- //get_locals_stmt(ast_context.file, v, ast_context, )
case ast.Ident:
if symbol, ok := resolve_type_identifier(ast_context, v); ok {
- append(data.symbols, symbol);
+ data.symbols[cast(uintptr)node] = symbol;
}
case ast.Selector_Expr:
if symbol, ok := resolve_type_expression(ast_context, &v.node); ok {
- append(data.symbols, symbol);
+ data.symbols[cast(uintptr)node] = symbol;
+ }
+ case ast.Call_Expr:
+ if symbol, ok := resolve_type_expression(ast_context, &v.node); ok {
+ data.symbols[cast(uintptr)node] = symbol;
}
}
@@ -2680,7 +2614,7 @@ get_signature :: proc(ast_context: ^AstContext, ident: ast.Ident, symbol: index.
return "enum";
}
case SymbolMapValue:
- return strings.concatenate(a = {"map[", common.node_to_string(v.key), "]", common.node_to_string(v.value)}, allocator = context.temp_allocator);
+ return strings.concatenate(a = {"map[", common.node_to_string(v.key), "]", common.node_to_string(v.value)}, allocator = ast_context.allocator);
case SymbolProcedureValue:
return "proc";
case SymbolStructValue:
@@ -2698,11 +2632,11 @@ get_signature :: proc(ast_context: ^AstContext, ident: ast.Ident, symbol: index.
return "union";
}
case SymbolDynamicArrayValue:
- return strings.concatenate(a = {"[dynamic]", common.node_to_string(v.expr)}, allocator = context.temp_allocator);
+ return strings.concatenate(a = {"[dynamic]", common.node_to_string(v.expr)}, allocator = ast_context.allocator);
case SymbolSliceValue:
- return strings.concatenate(a = {"[]", common.node_to_string(v.expr)}, allocator = context.temp_allocator);
+ return strings.concatenate(a = {"[]", common.node_to_string(v.expr)}, allocator = ast_context.allocator);
case SymbolFixedArrayValue:
- return strings.concatenate(a = {"[", common.node_to_string(v.len), "]", common.node_to_string(v.expr)}, allocator = context.temp_allocator);
+ return strings.concatenate(a = {"[", common.node_to_string(v.len), "]", common.node_to_string(v.expr)}, allocator = ast_context.allocator);
case SymbolPackageValue:
return "package";
case SymbolUntypedValue:
diff --git a/src/common/config.odin b/src/common/config.odin
index c7bf2da..afdcdeb 100644
--- a/src/common/config.odin
+++ b/src/common/config.odin
@@ -8,11 +8,11 @@ Config :: struct {
collections: map[string]string,
running: bool,
verbose: bool,
- debug_single_thread: bool,
enable_format: bool,
enable_hover: bool,
enable_document_symbols: bool,
enable_semantic_tokens: bool,
+ enable_inlay_hints: bool,
enable_procedure_context: bool,
enable_snippets: bool,
thread_count: int,
diff --git a/src/common/pool.odin b/src/common/pool.odin
deleted file mode 100644
index d1e5641..0000000
--- a/src/common/pool.odin
+++ /dev/null
@@ -1,156 +0,0 @@
-package common
-
-import "core:intrinsics"
-import "core:sync"
-import "core:mem"
-import "core:thread"
-
-Task_Status :: enum i32 {
- Ready,
- Busy,
- Waiting,
- Term,
-}
-
-Task_Proc :: proc(task: ^Task);
-
-Task :: struct {
- procedure: Task_Proc,
- data: rawptr,
- user_index: int,
-}
-
-Task_Id :: distinct i32;
-INVALID_TASK_ID :: Task_Id(-1);
-
-Pool :: struct {
- allocator: mem.Allocator,
- mutex: sync.Mutex,
- sem_available: sync.Semaphore,
- processing_task_count: int, // atomic
- is_running: bool,
- threads: []^thread.Thread,
- tasks: [dynamic]Task,
-}
-
-pool_init :: proc(pool: ^Pool, thread_count: int, allocator := context.allocator) {
- worker_thread_internal :: proc(t: ^thread.Thread) {
- pool := (^Pool)(t.data);
-
- temp_allocator: Scratch_Allocator;
-
- scratch_allocator_init(&temp_allocator, mem.megabytes(6));
-
- context.temp_allocator = scratch_allocator(&temp_allocator);
-
- for pool.is_running {
- sync.semaphore_wait_for(&pool.sem_available);
-
- if task, ok := pool_try_and_pop_task(pool); ok {
- pool_do_work(pool, &task);
- }
-
- free_all(context.temp_allocator);
- }
-
- scratch_allocator_destroy(&temp_allocator);
-
- sync.semaphore_post(&pool.sem_available, 1);
- };
-
- context.allocator = allocator;
- pool.allocator = allocator;
- pool.tasks = make([dynamic]Task);
- pool.threads = make([]^thread.Thread, thread_count);
-
- sync.mutex_init(&pool.mutex);
- sync.semaphore_init(&pool.sem_available);
- pool.is_running = true;
-
- for _, i in pool.threads {
- t := thread.create(worker_thread_internal);
- t.user_index = i;
- t.data = pool;
- pool.threads[i] = t;
- }
-}
-
-pool_destroy :: proc(pool: ^Pool) {
- delete(pool.tasks);
-
- for t in &pool.threads {
- thread.destroy(t);
- }
-
- delete(pool.threads, pool.allocator);
-
- sync.mutex_destroy(&pool.mutex);
- sync.semaphore_destroy(&pool.sem_available);
-}
-
-pool_start :: proc(pool: ^Pool) {
- for t in pool.threads {
- thread.start(t);
- }
-}
-
-pool_join :: proc(pool: ^Pool) {
- pool.is_running = false;
-
- sync.semaphore_post(&pool.sem_available, len(pool.threads));
-
- thread.yield();
-
- for t in pool.threads {
- thread.join(t);
- }
-}
-
-pool_add_task :: proc(pool: ^Pool, procedure: Task_Proc, data: rawptr, user_index: int = 0) {
- sync.mutex_lock(&pool.mutex);
- defer sync.mutex_unlock(&pool.mutex);
-
- task: Task;
- task.procedure = procedure;
- task.data = data;
- task.user_index = user_index;
-
- append(&pool.tasks, task);
- sync.semaphore_post(&pool.sem_available, 1);
-}
-
-pool_try_and_pop_task :: proc(pool: ^Pool) -> (task: Task, got_task: bool = false) {
- if sync.mutex_try_lock(&pool.mutex) {
- if len(pool.tasks) != 0 {
- intrinsics.atomic_add(&pool.processing_task_count, 1);
- task = pop_front(&pool.tasks);
- got_task = true;
- }
- sync.mutex_unlock(&pool.mutex);
- }
- return;
-}
-
-pool_do_work :: proc(pool: ^Pool, task: ^Task) {
- task.procedure(task);
- intrinsics.atomic_sub(&pool.processing_task_count, 1);
-}
-
-pool_wait_and_process :: proc(pool: ^Pool) {
- for len(pool.tasks) != 0 || intrinsics.atomic_load(&pool.processing_task_count) != 0 {
- if task, ok := pool_try_and_pop_task(pool); ok {
- pool_do_work(pool, &task);
- }
-
- // Safety kick
- if len(pool.tasks) != 0 && intrinsics.atomic_load(&pool.processing_task_count) == 0 {
- sync.mutex_lock(&pool.mutex);
- sync.semaphore_post(&pool.sem_available, len(pool.tasks));
- sync.mutex_unlock(&pool.mutex);
- }
-
- thread.yield();
- }
-
- pool_join(pool);
-}
diff --git a/src/common/types.odin b/src/common/types.odin
index 0956e89..c99bc3c 100644
--- a/src/common/types.odin
+++ b/src/common/types.odin
@@ -39,7 +39,6 @@ Document :: struct {
package_name: string,
allocator: ^Scratch_Allocator, //because parser does not support freeing I use arena allocators for each document
operating_on: int, //atomic
- symbol_cache: map[int]rawptr, //Stores all the symbol data for this current iteration of the file. Gets cleared every change.
}
parser_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
diff --git a/src/main.odin b/src/main.odin
index fd39745..6ac8f2e 100644
--- a/src/main.odin
+++ b/src/main.odin
@@ -17,6 +17,8 @@ import "shared:index"
import "shared:server"
import "shared:common"
+
+
os_read :: proc(handle: rawptr, data: []byte) -> (int, int) {
ptr := cast(^os.Handle)handle;
a, b := os.read(ptr^, data);
@@ -32,10 +34,10 @@ os_write :: proc(handle: rawptr, data: []byte) -> (int, int) {
//Note(Daniel, Should look into handling errors without crashing from parsing)
verbose_logger: log.Logger;
+file_logger: log.Logger;
+file_logger_init: bool;
run :: proc(reader: ^server.Reader, writer: ^server.Writer) {
-
- common.config.debug_single_thread = true;
common.config.collections = make(map[string]string);
log.info("Starting Odin Language Server");
@@ -44,11 +46,20 @@ run :: proc(reader: ^server.Reader, writer: ^server.Writer) {
for common.config.running {
- if common.config.verbose {
+ if common.config.file_log {
+ if !file_logger_init {
+ if fh, err := os.open("log.txt"); err == 0 {
+ file_logger = log.create_file_logger(fh, log.Level.Info);
+ }
+ }
+ context.logger = file_logger;
+ } else if common.config.verbose {
context.logger = verbose_logger;
} else {
context.logger = log.Logger {nil, nil, log.Level.Debug, nil};
}
+ a: int;
+ b: int;
header, success := server.read_and_parse_header(reader);
@@ -86,9 +97,6 @@ run :: proc(reader: ^server.Reader, writer: ^server.Writer) {
server.document_storage_shutdown();
index.free_static_index();
-
- common.pool_wait_and_process(&server.pool);
- common.pool_destroy(&server.pool);
}
end :: proc() {
diff --git a/src/server/caches.odin b/src/server/caches.odin
new file mode 100644
index 0000000..640677f
--- /dev/null
+++ b/src/server/caches.odin
@@ -0,0 +1,19 @@
+package server
+
+import "shared:index"
+import "shared:analysis"
+import "shared:common"
+
+//Used in semantic tokens and inlay hints to handle the entire file being resolved.
+FileResolveCache :: struct {
+ files: map[string]map[uintptr]index.Symbol,
+}
+
+file_resolve_cache: FileResolveCache
+
+resolve_entire_file :: proc(document: ^common.Document) {
+ file_resolve_cache.files[document.uri.uri] = analysis.resolve_entire_file(
+ document,
+ common.scratch_allocator(document.allocator),
+ )
+}
diff --git a/src/server/completion.odin b/src/server/completion.odin
index e869006..6b24d9d 100644
--- a/src/server/completion.odin
+++ b/src/server/completion.odin
@@ -49,7 +49,7 @@ get_completion_list :: proc(document: ^common.Document, position: common.Positio
return list, true;
}
- ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri, &document.symbol_cache);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri);
get_globals(document.ast, &ast_context);
diff --git a/src/server/definition.odin b/src/server/definition.odin
index 52e09dd..116fbd9 100644
--- a/src/server/definition.odin
+++ b/src/server/definition.odin
@@ -26,7 +26,7 @@ get_definition_location :: proc(document: ^common.Document, position: common.Pos
location: common.Location;
- ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri, &document.symbol_cache);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri);
uri: string;
diff --git a/src/server/document_symbols.odin b/src/server/document_symbols.odin
index 3d3a296..7105465 100644
--- a/src/server/document_symbols.odin
+++ b/src/server/document_symbols.odin
@@ -23,7 +23,7 @@ import "shared:analysis"
get_document_symbols :: proc(document: ^common.Document) -> []DocumentSymbol {
using analysis;
- ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri, &document.symbol_cache);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri);
get_globals(document.ast, &ast_context);
diff --git a/src/server/documents.odin b/src/server/documents.odin
index 94c0cd6..4f5610a 100644
--- a/src/server/documents.odin
+++ b/src/server/documents.odin
@@ -23,7 +23,7 @@ ParserError :: struct {
}
DocumentStorage :: struct {
- documents: map[string] common.Document,
+ documents: map[string]common.Document,
free_allocators: [dynamic]^common.Scratch_Allocator,
}
@@ -224,8 +224,8 @@ document_close :: proc(uri_string: string) -> common.Error {
free_all(common.scratch_allocator(document.allocator));
document_free_allocator(document.allocator);
- document.allocator = nil;
+ document.allocator = nil;
document.client_owned = false;
common.delete_uri(document.uri);
@@ -298,6 +298,12 @@ document_refresh :: proc(document: ^common.Document, config: ^common.Config, wri
}
}
+ //We only resolve the entire file, if we are dealing with the heavy features that require the entire file resolved.
+ //This gives the user a choice to use "fast mode" with only completion and gotos.
+ if config.enable_semantic_tokens || config.enable_inlay_hints {
+ resolve_entire_file(document);
+ }
+
return .None;
}
@@ -324,8 +330,6 @@ parse_document :: proc(document: ^common.Document, config: ^common.Config) -> ([
context.allocator = common.scratch_allocator(document.allocator);
- document.symbol_cache = make(map[int]rawptr, 10, common.scratch_allocator(document.allocator));
-
//have to cheat the parser since it really wants to parse an entire package with the new changes...
pkg := new(ast.Package);
pkg.kind = .Normal;
diff --git a/src/server/hover.odin b/src/server/hover.odin
index 1a45b8c..e7c5dba 100644
--- a/src/server/hover.odin
+++ b/src/server/hover.odin
@@ -57,7 +57,7 @@ get_hover_information :: proc(document: ^common.Document, position: common.Posit
},
};
- ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri, &document.symbol_cache);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri);
position_context, ok := get_document_position_context(document, position, .Hover);
diff --git a/src/server/inlay_hints.odin b/src/server/inlay_hints.odin
index e67aac9..1e7d2c3 100644
--- a/src/server/inlay_hints.odin
+++ b/src/server/inlay_hints.odin
@@ -8,20 +8,19 @@ import "shared:analysis"
import "shared:index"
//document
-get_inlay_hints :: proc(document: ^common.Document) -> ([]InlayHint, bool) {
-
+get_inlay_hints :: proc(document: ^common.Document, symbols: map[uintptr]index.Symbol) -> ([]InlayHint, bool) {
using analysis;
hints := make([dynamic]InlayHint, context.temp_allocator);
- ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri, &document.symbol_cache);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri);
Visit_Data :: struct {
- calls: [dynamic]ast.Call_Expr,
+ calls: [dynamic]^ast.Node,
}
data := Visit_Data {
- calls = make([dynamic]ast.Call_Expr, context.temp_allocator),
+ calls = make([dynamic]^ast.Node, context.temp_allocator),
};
visit :: proc(visitor: ^ast.Visitor, node: ^ast.Node) -> ^ast.Visitor {
@@ -32,7 +31,7 @@ get_inlay_hints :: proc(document: ^common.Document) -> ([]InlayHint, bool) {
data := cast(^Visit_Data)visitor.data;
if call, ok := node.derived.(ast.Call_Expr); ok {
- append(&data.calls, call);
+ append(&data.calls, node);
}
return visitor;
@@ -47,15 +46,18 @@ get_inlay_hints :: proc(document: ^common.Document) -> ([]InlayHint, bool) {
ast.walk(&visitor, decl);
}
- loop: for call in &data.calls {
+ loop: for node_call in &data.calls {
symbol_arg_count := 0
+
+ call := node_call.derived.(ast.Call_Expr);
+
for arg in call.args {
if _, ok := arg.derived.(ast.Field); ok {
continue loop;
}
}
- if symbol, ok := resolve_type_expression(&ast_context, &call); ok {
+ if symbol, ok := symbols[cast(uintptr)node_call]; ok {
if symbol_call, ok := symbol.value.(index.SymbolProcedureValue); ok {
for arg in symbol_call.arg_types {
for name in arg.names {
@@ -66,7 +68,7 @@ get_inlay_hints :: proc(document: ^common.Document) -> ([]InlayHint, bool) {
if ident, ok := name.derived.(ast.Ident); ok {
hint := InlayHint {
kind = "parameter",
- label = fmt.tprintf("%v:", ident.name),
+ label = fmt.tprintf("%v = ", ident.name),
range = common.get_token_range(call.args[symbol_arg_count], string(document.text)),
}
append(&hints, hint);
diff --git a/src/server/lens.odin b/src/server/lens.odin
index 822a115..9d80d0c 100644
--- a/src/server/lens.odin
+++ b/src/server/lens.odin
@@ -24,7 +24,7 @@ get_code_lenses :: proc(document: ^common.Document, position: common.Position) -
using analysis;
- ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri, &document.symbol_cache);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri);
get_globals(document.ast, &ast_context);
diff --git a/src/server/requests.odin b/src/server/requests.odin
index 4fc4b48..5ab8dde 100644
--- a/src/server/requests.odin
+++ b/src/server/requests.odin
@@ -56,11 +56,6 @@ RequestInfo :: struct {
result: common.Error,
}
-pool: common.Pool;
-
-get_request_info :: proc (task: ^common.Task) -> ^RequestInfo {
- return cast(^RequestInfo)task.data;
-}
make_response_message :: proc (id: RequestId, params: ResponseParams) -> ResponseMessage {
@@ -159,7 +154,7 @@ read_and_parse_body :: proc (reader: ^Reader, header: Header) -> (json.Value, bo
err: json.Error;
- value, err = json.parse(data = data, allocator = context.allocator, parse_integers = true);
+ value, err = json.parse(data = data, allocator = context.temp_allocator, parse_integers = true);
if (err != json.Error.None) {
log.error("Failed to parse body");
@@ -169,41 +164,27 @@ read_and_parse_body :: proc (reader: ^Reader, header: Header) -> (json.Value, bo
return value, true;
}
-request_map: map[string]RequestType = {
- "initialize" = .Initialize,
- "initialized" = .Initialized,
- "shutdown" = .Shutdown,
- "exit" = .Exit,
- "textDocument/didOpen" = .DidOpen,
- "textDocument/didChange" = .DidChange,
- "textDocument/didClose" = .DidClose,
- "textDocument/didSave" = .DidSave,
- "textDocument/definition" = .Definition,
- "textDocument/completion" = .Completion,
- "textDocument/signatureHelp" = .SignatureHelp,
- "textDocument/documentSymbol" = .DocumentSymbol,
- "textDocument/semanticTokens/full" = .SemanticTokensFull,
- "textDocument/semanticTokens/range" = .SemanticTokensRange,
- "textDocument/hover" = .Hover,
- "$/cancelRequest" = .CancelRequest,
- "textDocument/formatting" = .FormatDocument,
- "odin/inlayHints" = .InlayHint,
+call_map : map [string] proc(json.Value, RequestId, ^common.Config, ^Writer) -> common.Error =
+{
+ "initialize" = request_initialize,
+ "initialized" = request_initialized,
+ "shutdown" = request_shutdown,
+ "exit" = notification_exit,
+ "textDocument/didOpen" = notification_did_open,
+ "textDocument/didChange" = notification_did_change,
+ "textDocument/didClose" = notification_did_close,
+ "textDocument/didSave" = notification_did_save,
+ "textDocument/definition" = request_definition,
+ "textDocument/completion" = request_completion,
+ "textDocument/signatureHelp" = request_signature_help,
+ "textDocument/documentSymbol" = request_document_symbols,
+ "textDocument/semanticTokens/full" = request_semantic_token_full,
+ "textDocument/semanticTokens/range" = request_semantic_token_range,
+ "textDocument/hover" = request_hover,
+ "textDocument/formatting" = request_format_document,
};
-handle_error :: proc (err: common.Error, id: RequestId, writer: ^Writer) {
-
- if err != .None {
-
- response := make_response_message_error(
- id = id,
- error = ResponseError {code = err, message = ""});
-
- send_error(response, writer);
- }
-}
-
handle_request :: proc (request: json.Value, config: ^common.Config, writer: ^Writer) -> bool {
-
root, ok := request.(json.Object);
if !ok {
@@ -229,8 +210,8 @@ handle_request :: proc (request: json.Value, config: ^common.Config, writer: ^Wr
method := root["method"].(json.String);
- request_type: RequestType;
- request_type, ok = request_map[method];
+ fn: proc(json.Value, RequestId, ^common.Config, ^Writer) -> common.Error;
+ fn, ok = call_map[method];
if !ok {
response := make_response_message_error(
@@ -239,157 +220,31 @@ handle_request :: proc (request: json.Value, config: ^common.Config, writer: ^Wr
send_error(response, writer);
} else {
-
- info := new(RequestInfo);
-
- info.root = request;
- info.params = root["params"];
- info.id = id;
- info.config = config;
- info.writer = writer;
-
- task_proc: common.Task_Proc;
-
- switch request_type {
- case .Initialize:
- task_proc = request_initialize;
- case .Initialized:
- task_proc = request_initialized;
- case .Shutdown:
- task_proc = request_shutdown;
- case .Exit:
- task_proc = notification_exit;
- case .DidOpen:
- task_proc = notification_did_open;
- case .DidChange:
- task_proc = notification_did_change;
- case .DidClose:
- task_proc = notification_did_close;
- case .DidSave:
- task_proc = notification_did_save;
- case .Definition:
- task_proc = request_definition;
- case .Completion:
- task_proc = request_completion;
- case .SignatureHelp:
- task_proc = request_signature_help;
- case .DocumentSymbol:
- task_proc = request_document_symbols;
- case .SemanticTokensFull:
- task_proc = request_semantic_token_full;
- case .SemanticTokensRange:
- task_proc = request_semantic_token_range;
- case .Hover:
- task_proc = request_hover;
- case .CancelRequest:
- case .FormatDocument:
- task_proc = request_format_document;
- case .InlayHint:
- task_proc = request_inlay_hint;
- }
-
- task := common.Task {
- data = info,
- procedure = task_proc,
- };
-
- #partial switch request_type {
- case .CancelRequest:
- for {
- if task, ok := common.pool_try_and_pop_task(&pool); ok {
- common.pool_do_work(&pool, &task);
- } else {
- break;
- }
- }
- case .Initialize, .Initialized:
- task_proc(&task);
- case .Completion, .Definition, .Hover, .FormatDocument:
-
- uri := root["params"].(json.Object)["textDocument"].(json.Object)["uri"].(json.String);
-
- document := document_get(uri);
-
- if document == nil {
- handle_error(.InternalError, id, writer);
- return false;
- }
-
- info.document = document;
-
- task_proc(&task);
-
- case .DidClose, .DidChange, .DidOpen, .DidSave:
-
- uri := root["params"].(json.Object)["textDocument"].(json.Object)["uri"].(json.String);
-
- document := document_get(uri);
-
- if document != nil {
-
- for intrinsics.atomic_load(&document.operating_on) > 1 {
- if task, ok := common.pool_try_and_pop_task(&pool); ok {
- common.pool_do_work(&pool, &task);
- }
- }
- }
-
- task_proc(&task);
-
- document_release(document);
- case .Shutdown,.Exit:
- task_proc(&task);
- case .SignatureHelp, .SemanticTokensFull, .SemanticTokensRange, .DocumentSymbol, .InlayHint:
-
- uri := root["params"].(json.Object)["textDocument"].(json.Object)["uri"].(json.String);
-
- document := document_get(uri);
-
- if document == nil {
- handle_error(.InternalError, id, writer);
- return false;
- }
-
- info.document = document;
-
- if !config.debug_single_thread {
- common.pool_add_task(&pool, task_proc, info);
- } else {
- task_proc(&task);
- }
- case:
-
- if !config.debug_single_thread {
- common.pool_add_task(&pool, task_proc, info);
- } else {
- task_proc(&task);
- }
- }
+ err := fn(root["params"], id, config, writer);
+ if err != .None {
+ response := make_response_message_error(
+ id = id,
+ error = ResponseError {code = err, message = ""},
+ );
+ send_error(response, writer);
+ }
}
return true;
}
-request_initialize :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
+request_initialize :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
- defer free(info);
- defer json.destroy_value(info.root);
-
params_object, ok := params.(json.Object);
-
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+
+ if !ok {
+ return .ParseError;
+ }
initialize_params: RequestInitializeParams;
if unmarshal(params, initialize_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
config.workspace_folders = make([dynamic]common.WorkspaceFolder);
@@ -424,6 +279,7 @@ request_initialize :: proc (task: ^common.Task) {
config.formatter = ols_config.formatter;
config.odin_command = strings.clone(ols_config.odin_command, context.allocator);
config.checker_args = ols_config.checker_args;
+ config.enable_inlay_hints = ols_config.enable_inlay_hints;
for p in ols_config.collections {
@@ -479,8 +335,6 @@ request_initialize :: proc (task: ^common.Task) {
config.collections["vendor"] = path.join(elems = {forward_path, "vendor"}, allocator = context.allocator);
}
- common.pool_init(&pool, config.thread_count);
- common.pool_start(&pool);
for format in initialize_params.capabilities.textDocument.hover.contentFormat {
if format == "markdown" {
@@ -578,58 +432,40 @@ request_initialize :: proc (task: ^common.Task) {
}
log.info("Finished indexing");
-}
-
-request_initialized :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
- json.destroy_value(root);
- free(info);
+ return .None;
}
-request_shutdown :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
-
- defer {
- json.destroy_value(root);
- free(info);
- }
-
- response := make_response_message(
- params = nil,
- id = id);
-
- send_response(response, writer);
+request_initialized :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+ return .None;
}
-request_definition :: proc (task: ^common.Task) {
- info := get_request_info(task);
+request_shutdown :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+ response := make_response_message(params = nil, id = id);
- using info;
+ send_response(response, writer);
- defer {
- document_release(document);
- json.destroy_value(root);
- free(info);
- }
+ return .None;
+}
+request_definition :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
definition_params: TextDocumentPositionParams;
if unmarshal(params, definition_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
+
+ document := document_get(definition_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
locations, ok2 := get_definition_location(document, definition_params.position);
@@ -644,260 +480,192 @@ request_definition :: proc (task: ^common.Task) {
response := make_response_message(params = locations, id = id);
send_response(response, writer);
}
-}
-
-request_completion :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
- defer {
- document_release(document);
- json.destroy_value(root);
- free(info);
- }
+ return .None;
+}
+request_completion :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
completition_params: CompletionParams;
if unmarshal(params, completition_params, context.temp_allocator) != .None {
log.error("Failed to unmarshal completion request");
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
- //context.allocator = common.scratch_allocator(document.allocator);
+ document := document_get(completition_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
list: CompletionList;
list, ok = get_completion_list(document, completition_params.position, completition_params.context_);
if !ok {
- handle_error(.InternalError, id, writer);
- return;
+ return .InternalError;
}
- response := make_response_message(
- params = list,
- id = id);
+ response := make_response_message(params = list, id = id);
send_response(response, writer);
-}
-
-request_signature_help :: proc (task: ^common.Task) {
- info := get_request_info(task);
- using info;
-
- defer {
- document_release(document);
- json.destroy_value(root);
- free(info);
- }
+ return .None;
+}
+request_signature_help :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
signature_params: SignatureHelpParams;
if unmarshal(params, signature_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
+ document := document_get(signature_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
+
help: SignatureHelp;
help, ok = get_signature_information(document, signature_params.position);
if !ok {
- handle_error(.InternalError, id, writer);
- return;
+ return .InternalError;
}
- response := make_response_message(
- params = help,
- id = id);
+ response := make_response_message(params = help, id = id);
send_response(response, writer);
-}
-request_format_document :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
-
- defer {
- document_release(document);
- json.destroy_value(root);
- free(info);
- }
+ return .None;
+}
+request_format_document :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
format_params: DocumentFormattingParams;
if unmarshal(params, format_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
+ document := document_get(format_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
+
edit: []TextEdit;
edit, ok = get_complete_format(document, config);
if !ok {
- handle_error(.InternalError, id, writer);
- return;
+ return .InternalError;
}
- response := make_response_message(
- params = edit,
- id = id);
+ response := make_response_message(params = edit, id = id);
send_response(response, writer);
-}
-
-notification_exit :: proc (task: ^common.Task) {
- info := get_request_info(task);
- using info;
- defer {
- json.destroy_value(root);
- free(info);
- }
+ return .None;
+}
+notification_exit :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
config.running = false;
+ return .None;
}
-notification_did_open :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
-
- defer {
- json.destroy_value(root);
- free(info);
- }
-
+notification_did_open :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
log.error("Failed to parse open document notification");
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
open_params: DidOpenTextDocumentParams;
if unmarshal(params, open_params, context.allocator) != .None {
log.error("Failed to parse open document notification");
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
if n := document_open(open_params.textDocument.uri, open_params.textDocument.text, config, writer); n != .None {
- handle_error(n, id, writer);
+ return .InternalError;
}
-}
-
-notification_did_change :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
- defer {
- json.destroy_value(root);
- free(info);
- }
+ return .None;
+}
+notification_did_change :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
change_params: DidChangeTextDocumentParams;
if unmarshal(params, change_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
document_apply_changes(change_params.textDocument.uri, change_params.contentChanges, config, writer);
-}
-notification_did_close :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
-
- defer {
- json.destroy_value(root);
- free(info);
- }
+ return .None;
+}
+notification_did_close :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
close_params: DidCloseTextDocumentParams;
if unmarshal(params, close_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
if n := document_close(close_params.textDocument.uri); n != .None {
- handle_error(n, id, writer);
- return;
+ return .InternalError;
}
-}
-
-notification_did_save :: proc (task: ^common.Task) {
- info := get_request_info(task);
- using info;
-
- defer {
- json.destroy_value(root);
- free(info);
- }
+ return .None;
+}
+notification_did_save :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
save_params: DidSaveTextDocumentParams;
if unmarshal(params, save_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
uri: common.Uri;
if uri, ok = common.parse_uri(save_params.textDocument.uri, context.temp_allocator); !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
fullpath := uri.path;
@@ -942,34 +710,29 @@ notification_did_save :: proc (task: ^common.Task) {
}
check(uri, writer, config);
-}
-
-request_semantic_token_full :: proc (task: ^common.Task) {
- info := get_request_info(task);
- using info;
-
- defer {
- document_release(document);
- json.destroy_value(root);
- free(info);
-
- }
+ return .None;
+}
+request_semantic_token_full :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
semantic_params: SemanticTokensParams;
if unmarshal(params, semantic_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
+ document := document_get(semantic_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
+
range := common.Range {
start = common.Position {
line = 0,
@@ -982,155 +745,145 @@ request_semantic_token_full :: proc (task: ^common.Task) {
symbols: SemanticTokens;
if config.enable_semantic_tokens {
- symbols = get_semantic_tokens(document, range);
+ if cache_symbols, ok := file_resolve_cache.files[document.uri.uri]; ok {
+ symbols = get_semantic_tokens(document, range, cache_symbols);
+ }
}
- response := make_response_message(
- params = symbols,
- id = id);
+ response := make_response_message(params = symbols, id = id);
send_response(response, writer);
-}
-
-request_semantic_token_range :: proc (task: ^common.Task) {
- info := get_request_info(task);
- using info;
+ return .None;
+}
+request_semantic_token_range :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
- defer {
- document_release(document);
- json.destroy_value(root);
- free(info);
- }
-
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .None;
}
semantic_params: SemanticTokensRangeParams;
if unmarshal(params, semantic_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .None;
}
+ document := document_get(semantic_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
+
symbols: SemanticTokens;
if config.enable_semantic_tokens {
- symbols = get_semantic_tokens(document, semantic_params.range);
+ if cache_symbols, ok := file_resolve_cache.files[document.uri.uri]; ok {
+ symbols = get_semantic_tokens(document, semantic_params.range, cache_symbols);
+ }
}
- response := make_response_message(
- params = symbols,
- id = id);
+ response := make_response_message(params = symbols, id = id);
send_response(response, writer);
-}
-
-request_document_symbols :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
- defer {
- document_release(document);
- json.destroy_value(root);
- free(info);
- }
+ return .None;
+}
+request_document_symbols :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
symbol_params: DocumentSymbolParams;
if unmarshal(params, symbol_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
+ document := document_get(symbol_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
+
symbols := get_document_symbols(document);
- response := make_response_message(
- params = symbols,
- id = id);
+ response := make_response_message(params = symbols, id = id);
send_response(response, writer);
-}
-request_hover :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
+ return .None;
+}
- defer {
- document_release(document);
- json.destroy_value(root);
- free(info);
- }
-
+request_hover :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
hover_params: HoverParams;
if unmarshal(params, hover_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
}
+ document := document_get(hover_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
+
hover: Hover;
hover, ok = get_hover_information(document, hover_params.position);
if !ok {
- handle_error(.InternalError, id, writer);
- return;
+ return .InternalError;
}
- response := make_response_message(
- params = hover,
- id = id);
+ response := make_response_message(params = hover, id = id);
send_response(response, writer);
-}
-request_inlay_hint :: proc (task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
+ return .None;
+}
- defer {
- document_release(document);
- json.destroy_value(root);
- free(info);
- }
-
+request_inlay_hint :: proc (params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
params_object, ok := params.(json.Object);
if !ok {
- handle_error(.ParseError, id, writer);
- return;
+ return .ParseError;
+ }
+
+ inlay_params: InlayParams;
+
+ if unmarshal(params, inlay_params, context.temp_allocator) != .None {
+ return .ParseError;
}
+ document := document_get(inlay_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
+
hints: []InlayHint;
- hints, ok = get_inlay_hints(document);
+
+ if cache_symbols, ok := file_resolve_cache.files[document.uri.uri]; ok && config.enable_inlay_hints {
+ hints, ok = get_inlay_hints(document, cache_symbols);
+ }
if !ok {
- handle_error(.InternalError, id, writer);
- return;
+ return .InternalError;
}
response := make_response_message(params = hints, id = id);
send_response(response, writer);
+
+ return .None;
} \ No newline at end of file
diff --git a/src/server/semantic_tokens.odin b/src/server/semantic_tokens.odin
index 095bb91..2977ddc 100644
--- a/src/server/semantic_tokens.odin
+++ b/src/server/semantic_tokens.odin
@@ -3,6 +3,7 @@ package server
import "core:odin/tokenizer"
import "core:odin/ast"
import "core:log"
+import "core:fmt"
import "shared:common"
import "shared:index"
@@ -32,11 +33,11 @@ SemanticTokenTypes :: enum {
Method,
}
-SemanticTokenModifiers :: enum {
- None,
- Declaration,
- Definition,
- Deprecated,
+SemanticTokenModifiers :: enum(u32) {
+ None = 0,
+ Declaration = 2,
+ Definition = 4,
+ Deprecated = 8,
}
SemanticTokensClientCapabilities :: struct {
@@ -77,6 +78,8 @@ SemanticTokens :: struct {
SemanticTokenBuilder :: struct {
current_start: int,
tokens: [dynamic]u32,
+ symbols: map[uintptr]index.Symbol,
+ selector: bool,
}
make_token_builder :: proc(allocator := context.temp_allocator) -> SemanticTokenBuilder {
@@ -91,7 +94,7 @@ get_tokens :: proc(builder: SemanticTokenBuilder) -> SemanticTokens {
};
}
-get_semantic_tokens :: proc(document: ^common.Document, range: common.Range) -> SemanticTokens {
+get_semantic_tokens :: proc(document: ^common.Document, range: common.Range, symbols: map[uintptr]index.Symbol) -> SemanticTokens {
using analysis;
builder := make_token_builder();
@@ -100,9 +103,9 @@ get_semantic_tokens :: proc(document: ^common.Document, range: common.Range) ->
write_semantic_token(&builder, document.ast.pkg_token, document.ast.src, .Keyword, .None);
}
- ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri, &document.symbol_cache);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri);
- //resolve_entire_file(document, &ast_context, context.temp_allocator);
+ builder.symbols = symbols;
ast_context.current_package = ast_context.document_package;
@@ -118,19 +121,19 @@ get_semantic_tokens :: proc(document: ^common.Document, range: common.Range) ->
write_semantic_node :: proc(builder: ^SemanticTokenBuilder, node: ^ast.Node, src: string, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
position := common.get_relative_token_position(node.pos.offset, transmute([]u8)src, builder.current_start);
name := common.get_ast_node_string(node, src);
- append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, 0);
+ append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, cast(u32)modifier);
builder.current_start = node.pos.offset;
}
write_semantic_token :: proc(builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: string, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
position := common.get_relative_token_position(token.pos.offset, transmute([]u8)src, builder.current_start);
- append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(token.text), cast(u32)type, 0);
+ append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(token.text), cast(u32)type, cast(u32)modifier);
builder.current_start = token.pos.offset;
}
write_semantic_string :: proc(builder: ^SemanticTokenBuilder, pos: tokenizer.Pos, name: string, src: string, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
position := common.get_relative_token_position(pos.offset, transmute([]u8)src, builder.current_start);
- append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, 0);
+ append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, cast(u32)modifier);
builder.current_start = pos.offset;
}
@@ -169,11 +172,7 @@ visit_node :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context:
write_semantic_string(builder, node.pos, "..", ast_context.file.src, .Operator, .None);
visit(n.expr, builder, ast_context);
case Ident:
- if true {
- write_semantic_node(builder, node, ast_context.file.src, .Variable, .None);
- return;
- }
- if symbol, ok := analysis.lookup_symbol_cache(ast_context, n); ok {
+ if symbol, ok := builder.symbols[cast(uintptr)node]; ok {
if symbol.type == .Variable {
write_semantic_node(builder, node, ast_context.file.src, .Variable, .None);
}
@@ -195,6 +194,12 @@ visit_node :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context:
}
case Selector_Expr:
visit_selector(cast(^Selector_Expr)node, builder, ast_context);
+ builder.selector = false;
+ case When_Stmt:
+ write_semantic_string(builder, n.when_pos, "when", ast_context.file.src, .Keyword, .None);
+ visit(n.cond, builder, ast_context);
+ visit(n.body, builder, ast_context);
+ visit(n.else_stmt, builder, ast_context);
case Pointer_Type:
write_semantic_string(builder, node.pos, "^", ast_context.file.src, .Operator, .None);
visit(n.elem, builder, ast_context);
@@ -204,6 +209,12 @@ visit_node :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context:
visit(n.stmts, builder, ast_context);
case Expr_Stmt:
visit(n.expr, builder, ast_context);
+ case Branch_Stmt:
+ write_semantic_token(builder, n.tok, ast_context.file.src, .Type, .None);
+ case Poly_Type:
+ write_semantic_string(builder, n.dollar, "$", ast_context.file.src, .Operator, .None);
+ visit(n.type, builder, ast_context);
+ visit(n.specialization, builder, ast_context);
case Range_Stmt:
write_semantic_string(builder, n.for_pos, "for", ast_context.file.src, .Keyword, .None);
@@ -329,7 +340,8 @@ visit_node :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context:
write_semantic_token(builder, n.relpath, ast_context.file.src, .String, .None);
case:
- log.warnf("unhandled write node %v", n);
+ log.errorf("unhandled semantic token node %v", n);
+ //panic(fmt.tprintf("Missed semantic token handling %v", n));
}
}
@@ -337,16 +349,14 @@ visit_basic_lit :: proc(basic_lit: ast.Basic_Lit, builder: ^SemanticTokenBuilder
using analysis;
if symbol, ok := resolve_basic_lit(ast_context, basic_lit); ok {
-
- if generic, ok := symbol.value.(index.SymbolGenericValue); ok {
-
- ident := generic.expr.derived.(ast.Ident);
-
- if ident.name == "string" {
- write_semantic_node(builder, generic.expr, ast_context.file.src, .String, .None);
- } else if ident.name == "int" {
- write_semantic_node(builder, generic.expr, ast_context.file.src, .Number, .None);
- } else {
+ if untyped, ok := symbol.value.(index.SymbolUntypedValue); ok {
+ switch untyped.type {
+ case .Bool:
+ write_semantic_token(builder, basic_lit.tok, ast_context.file.src, .Keyword, .None);
+ case .Float, .Integer:
+ write_semantic_token(builder, basic_lit.tok, ast_context.file.src, .Number, .None);
+ case .String:
+ write_semantic_token(builder, basic_lit.tok, ast_context.file.src, .String, .None);
}
}
}
@@ -455,7 +465,6 @@ visit_enum_fields :: proc(node: ast.Enum_Type, builder: ^SemanticTokenBuilder, a
}
for field in node.fields {
-
if ident, ok := field.derived.(Ident); ok {
write_semantic_node(builder, field, ast_context.file.src, .EnumMember, .None);
}
@@ -476,7 +485,6 @@ visit_struct_fields :: proc(node: ast.Struct_Type, builder: ^SemanticTokenBuilde
}
for field in node.fields.list {
-
for name in field.names {
if ident, ok := name.derived.(Ident); ok {
write_semantic_node(builder, name, ast_context.file.src, .Property, .None);
@@ -488,6 +496,34 @@ visit_struct_fields :: proc(node: ast.Struct_Type, builder: ^SemanticTokenBuilde
}
visit_selector :: proc(selector: ^ast.Selector_Expr, builder: ^SemanticTokenBuilder, ast_context: ^analysis.AstContext) {
- using analysis;
- using ast;
+
+ if _, ok := selector.expr.derived.(ast.Selector_Expr); ok {
+ visit_selector(cast(^ast.Selector_Expr)selector.expr, builder, ast_context);
+ } else {
+ visit(selector.expr, builder, ast_context);
+ builder.selector = true;
+ }
+
+ if symbol, ok := builder.symbols[cast(uintptr)selector]; ok {
+ if symbol.type == .Variable {
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Method, .None);
+ }
+ #partial switch v in symbol.value {
+ case index.SymbolPackageValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Namespace, .None);
+ case index.SymbolStructValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Struct, .None);
+ case index.SymbolEnumValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None);
+ case index.SymbolUnionValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None);
+ case index.SymbolProcedureValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Function, .None);
+ case index.SymbolProcedureGroupValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Function, .None);
+ }
+ }
+
+
+ //((a.d).b).c
} \ No newline at end of file
diff --git a/src/server/signature.odin b/src/server/signature.odin
index 03d9ce7..d187336 100644
--- a/src/server/signature.odin
+++ b/src/server/signature.odin
@@ -118,7 +118,7 @@ get_signature_information :: proc(document: ^common.Document, position: common.P
signature_help: SignatureHelp;
- ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri, &document.symbol_cache);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, document.uri.uri);
position_context, ok := get_document_position_context(document, position, .SignatureHelp);
diff --git a/src/server/types.odin b/src/server/types.odin
index 1dbcbe1..bf0cff4 100644
--- a/src/server/types.odin
+++ b/src/server/types.odin
@@ -298,6 +298,7 @@ OlsConfig :: struct {
enable_format: bool,
enable_procedure_context: bool,
enable_snippets: bool,
+ enable_inlay_hints: bool,
verbose: bool,
file_log: bool,
formatter: common.Format_Config,
@@ -352,6 +353,11 @@ HoverParams :: struct {
position: common.Position,
}
+
+InlayParams :: struct {
+ textDocument: TextDocumentIdentifier,
+}
+
Hover :: struct {
contents: MarkupContent,
range: common.Range,