aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDanielGavin <danielgavin5@hotmail.com>2021-03-12 16:31:09 +0100
committerDanielGavin <danielgavin5@hotmail.com>2021-03-12 16:31:09 +0100
commit00ccd7e03e17dac40efb9b34a048d968dd77c218 (patch)
tree24e9e8d9743bc3e98b63183d1f976b11ab6d93d2
parentbaf86e02a2c45170d58ab828a13f52361129b255 (diff)
ran odinfmt on project
-rw-r--r--src/common/allocator.odin37
-rw-r--r--src/common/ast.odin1065
-rw-r--r--src/common/config.odin20
-rw-r--r--src/common/fuzzy.odin601
-rw-r--r--src/common/pool.odin66
-rw-r--r--src/common/position.odin383
-rw-r--r--src/common/pretty.odin454
-rw-r--r--src/common/sha1.odin454
-rw-r--r--src/common/track_allocator.odin248
-rw-r--r--src/common/types.odin39
-rw-r--r--src/common/uri.odin201
-rw-r--r--src/index/build.odin174
-rw-r--r--src/index/clone.odin375
-rw-r--r--src/index/collector.odin849
-rw-r--r--src/index/file_index.odin8
-rw-r--r--src/index/indexer.odin142
-rw-r--r--src/index/memory_index.odin79
-rw-r--r--src/index/symbol.odin194
-rw-r--r--src/index/util.odin303
-rw-r--r--src/main.odin121
-rw-r--r--src/server/action.odin16
-rw-r--r--src/server/analysis.odin4238
-rw-r--r--src/server/background.odin8
-rw-r--r--src/server/completion.odin1253
-rw-r--r--src/server/documents.odin638
-rw-r--r--src/server/format.odin46
-rw-r--r--src/server/hover.odin242
-rw-r--r--src/server/log.odin66
-rw-r--r--src/server/reader.odin68
-rw-r--r--src/server/requests.odin1599
-rw-r--r--src/server/response.odin81
-rw-r--r--src/server/semantic_tokens.odin980
-rw-r--r--src/server/types.odin470
-rw-r--r--src/server/unmarshal.odin281
-rw-r--r--src/server/workspace.odin2
-rw-r--r--src/server/writer.odin38
-rw-r--r--src/session/capture.odin3
-rw-r--r--src/session/replay.odin1
38 files changed, 7573 insertions, 8270 deletions
diff --git a/src/common/allocator.odin b/src/common/allocator.odin
index ce96487..0adbedf 100644
--- a/src/common/allocator.odin
+++ b/src/common/allocator.odin
@@ -5,20 +5,20 @@ import "core:mem"
Scratch_Allocator :: struct {
data: []byte,
curr_offset: int,
- prev_allocation: rawptr,
+ prev_allocation: rawptr,
backup_allocator: mem.Allocator,
leaked_allocations: [dynamic]rawptr,
}
-scratch_allocator_init :: proc(s: ^Scratch_Allocator, size: int, backup_allocator := context.allocator) {
- s.data = mem.make_aligned([]byte, size, 2*align_of(rawptr), backup_allocator);
- s.curr_offset = 0;
- s.prev_allocation = nil;
- s.backup_allocator = backup_allocator;
+scratch_allocator_init :: proc (s: ^Scratch_Allocator, size: int, backup_allocator := context.allocator) {
+ s.data = mem.make_aligned([]byte, size, 2 * align_of(rawptr), backup_allocator);
+ s.curr_offset = 0;
+ s.prev_allocation = nil;
+ s.backup_allocator = backup_allocator;
s.leaked_allocations.allocator = backup_allocator;
}
-scratch_allocator_destroy :: proc(s: ^Scratch_Allocator) {
+scratch_allocator_destroy :: proc (s: ^Scratch_Allocator) {
if s == nil {
return;
}
@@ -30,16 +30,16 @@ scratch_allocator_destroy :: proc(s: ^Scratch_Allocator) {
s^ = {};
}
-scratch_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
- size, alignment: int,
- old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
+scratch_allocator_proc :: proc (allocator_data: rawptr, mode: mem.Allocator_Mode,
+size, alignment: int,
+old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
s := (^Scratch_Allocator)(allocator_data);
if s.data == nil {
- DEFAULT_BACKING_SIZE :: 1<<22;
- if !(context.allocator.procedure != scratch_allocator_proc &&
- context.allocator.data != allocator_data) {
+ DEFAULT_BACKING_SIZE :: 1 << 22;
+ if !(context.allocator.procedure != scratch_allocator_proc &&
+ context.allocator.data != allocator_data) {
panic("cyclic initialization of the scratch allocator with itself");
}
scratch_allocator_init(s, DEFAULT_BACKING_SIZE);
@@ -51,8 +51,8 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
case .Alloc:
size = mem.align_forward_int(size, alignment);
- switch {
- case s.curr_offset+size <= len(s.data):
+ switch {
+ case s.curr_offset + size <= len(s.data):
start := uintptr(raw_data(s.data));
ptr := start + uintptr(s.curr_offset);
ptr = mem.align_forward_uintptr(ptr, uintptr(alignment));
@@ -65,7 +65,7 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
}
a := s.backup_allocator;
if a.procedure == nil {
- a = context.allocator;
+ a = context.allocator;
s.backup_allocator = a;
}
@@ -110,12 +110,11 @@ scratch_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode,
return nil;
}
-
return nil;
}
-scratch_allocator :: proc(allocator: ^Scratch_Allocator) -> mem.Allocator {
- return mem.Allocator{
+scratch_allocator :: proc (allocator: ^Scratch_Allocator) -> mem.Allocator {
+ return mem.Allocator {
procedure = scratch_allocator_proc,
data = allocator,
};
diff --git a/src/common/ast.odin b/src/common/ast.odin
index d7d4890..98f2d84 100644
--- a/src/common/ast.odin
+++ b/src/common/ast.odin
@@ -5,633 +5,610 @@ import "core:log"
import "core:mem"
import "core:fmt"
-keyword_map : map [string] bool =
- {"int" = true,
- "uint" = true,
- "string" = true,
- "u64" = true,
- "f32" = true,
- "f64" = true,
- "i64" = true,
- "i32" = true,
- "bool" = true,
- "rawptr" = true,
- "any" = true,
- "u32" = true,
- "true" = true,
- "false" = true,
- "nil" = true,
- "byte" = true,
- "u8" = true,
- "i8" = true};
-
+keyword_map: map[string]bool = {
+ "int" = true,
+ "uint" = true,
+ "string" = true,
+ "u64" = true,
+ "f32" = true,
+ "f64" = true,
+ "i64" = true,
+ "i32" = true,
+ "bool" = true,
+ "rawptr" = true,
+ "any" = true,
+ "u32" = true,
+ "true" = true,
+ "false" = true,
+ "nil" = true,
+ "byte" = true,
+ "u8" = true,
+ "i8" = true,
+};
GlobalExpr :: struct {
- name: string,
- expr: ^ast.Expr,
- mutable: bool,
- docs: ^ast.Comment_Group,
-};
+ name: string,
+ expr: ^ast.Expr,
+ mutable: bool,
+ docs: ^ast.Comment_Group,
+}
//TODO(add a sub procedure to avoid repeating the value decl work)
-collect_globals :: proc(file: ast.File) -> [] GlobalExpr {
-
- exprs := make([dynamic] GlobalExpr, context.temp_allocator);
-
- for decl in file.decls {
-
- if value_decl, ok := decl.derived.(ast.Value_Decl); ok {
-
- for name, i in value_decl.names {
-
- str := get_ast_node_string(name, file.src);
+collect_globals :: proc (file: ast.File) -> []GlobalExpr {
- if value_decl.type != nil {
- append(&exprs, GlobalExpr { name = str, expr = value_decl.type, mutable = value_decl.is_mutable, docs = value_decl.docs });
- }
+ exprs := make([dynamic]GlobalExpr, context.temp_allocator);
- else {
- if len(value_decl.values) > i {
- append(&exprs, GlobalExpr { name = str, expr = value_decl.values[i], docs = value_decl.docs });
- }
- }
+ for decl in file.decls {
- }
+ if value_decl, ok := decl.derived.(ast.Value_Decl); ok {
- }
+ for name, i in value_decl.names {
- else if when_decl, ok := decl.derived.(ast.When_Stmt); ok {
+ str := get_ast_node_string(name, file.src);
- if when_decl.cond == nil {
- continue;
- }
+ if value_decl.type != nil {
+ append(&exprs, GlobalExpr {name = str, expr = value_decl.type, mutable = value_decl.is_mutable, docs = value_decl.docs});
+ } else {
+ if len(value_decl.values) > i {
+ append(&exprs, GlobalExpr {name = str, expr = value_decl.values[i], docs = value_decl.docs});
+ }
+ }
+ }
+ } else if when_decl, ok := decl.derived.(ast.When_Stmt); ok {
- if when_decl.body == nil {
- continue;
- }
+ if when_decl.cond == nil {
+ continue;
+ }
- if binary, ok := when_decl.cond.derived.(ast.Binary_Expr); ok {
+ if when_decl.body == nil {
+ continue;
+ }
- if binary.left == nil || binary.right == nil {
- continue;
- }
+ if binary, ok := when_decl.cond.derived.(ast.Binary_Expr); ok {
- ident: ^ast.Ident;
- basic_lit: ^ast.Basic_Lit;
+ if binary.left == nil || binary.right == nil {
+ continue;
+ }
- if t, ok := binary.left.derived.(ast.Ident); ok {
- ident = cast(^ast.Ident)binary.left;
- }
+ ident: ^ast.Ident;
+ basic_lit: ^ast.Basic_Lit;
- else if t, ok := binary.left.derived.(ast.Basic_Lit); ok {
- basic_lit = cast(^ast.Basic_Lit)binary.left;
- }
+ if t, ok := binary.left.derived.(ast.Ident); ok {
+ ident = cast(^ast.Ident)binary.left;
+ } else if t, ok := binary.left.derived.(ast.Basic_Lit); ok {
+ basic_lit = cast(^ast.Basic_Lit)binary.left;
+ }
- if t, ok := binary.right.derived.(ast.Ident); ok {
- ident = cast(^ast.Ident)binary.right;
- }
+ if t, ok := binary.right.derived.(ast.Ident); ok {
+ ident = cast(^ast.Ident)binary.right;
+ } else if t, ok := binary.right.derived.(ast.Basic_Lit); ok {
+ basic_lit = cast(^ast.Basic_Lit)binary.right;
+ }
- else if t, ok := binary.right.derived.(ast.Basic_Lit); ok {
- basic_lit = cast(^ast.Basic_Lit)binary.right;
- }
+ if ident != nil && basic_lit != nil {
- if ident != nil && basic_lit != nil {
+ //hardcode for windows for now
+ if ident.name == "ODIN_OS" && basic_lit.tok.text == "\"windows\"" {
- //hardcode for windows for now
- if ident.name == "ODIN_OS" && basic_lit.tok.text == "\"windows\"" {
+ log.errorf("when %v %v", ident, basic_lit);
- log.errorf("when %v %v", ident, basic_lit);
+ if block, ok := when_decl.body.derived.(ast.Block_Stmt); ok {
- if block, ok := when_decl.body.derived.(ast.Block_Stmt); ok {
+ for stmt in block.stmts {
- for stmt in block.stmts {
+ if value_decl, ok := stmt.derived.(ast.Value_Decl); ok {
- if value_decl, ok := stmt.derived.(ast.Value_Decl); ok {
+ for name, i in value_decl.names {
- for name, i in value_decl.names {
+ str := get_ast_node_string(name, file.src);
- str := get_ast_node_string(name, file.src);
+ if value_decl.type != nil {
+ append(&exprs, GlobalExpr {name = str, expr = value_decl.type, mutable = value_decl.is_mutable, docs = value_decl.docs});
+ } else {
+ if len(value_decl.values) > i {
+ append(&exprs, GlobalExpr {name = str, expr = value_decl.values[i], docs = value_decl.docs});
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ //YUPPI - what a fun slide
- if value_decl.type != nil {
- append(&exprs, GlobalExpr { name = str, expr = value_decl.type, mutable = value_decl.is_mutable, docs = value_decl.docs });
- }
+ } else if foreign_decl, ok := decl.derived.(ast.Foreign_Block_Decl); ok {
- else {
- if len(value_decl.values) > i {
- append(&exprs, GlobalExpr { name = str, expr = value_decl.values[i], docs = value_decl.docs });
- }
- }
- }
- }
- }
- }
- }
- }
- }
- //YUPPI - what a fun slide
+ if foreign_decl.body == nil {
+ continue;
+ }
- }
+ if block, ok := foreign_decl.body.derived.(ast.Block_Stmt); ok {
- else if foreign_decl, ok := decl.derived.(ast.Foreign_Block_Decl); ok {
+ for stmt in block.stmts {
- if foreign_decl.body == nil {
- continue;
- }
+ if value_decl, ok := stmt.derived.(ast.Value_Decl); ok {
- if block, ok := foreign_decl.body.derived.(ast.Block_Stmt); ok {
+ for name, i in value_decl.names {
- for stmt in block.stmts {
+ str := get_ast_node_string(name, file.src);
- if value_decl, ok := stmt.derived.(ast.Value_Decl); ok {
-
- for name, i in value_decl.names {
-
- str := get_ast_node_string(name, file.src);
-
- if value_decl.type != nil {
- append(&exprs, GlobalExpr { name = str, expr = value_decl.type, mutable = value_decl.is_mutable, docs = value_decl.docs });
- }
-
- else {
- if len(value_decl.values) > i {
- append(&exprs, GlobalExpr { name = str, expr = value_decl.values[i], docs = value_decl.docs });
- }
- }
- }
- }
- }
- }
- }
- }
+ if value_decl.type != nil {
+ append(&exprs, GlobalExpr {name = str, expr = value_decl.type, mutable = value_decl.is_mutable, docs = value_decl.docs});
+ } else {
+ if len(value_decl.values) > i {
+ append(&exprs, GlobalExpr {name = str, expr = value_decl.values[i], docs = value_decl.docs});
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
- return exprs[:];
+ return exprs[:];
}
-
-get_ast_node_string :: proc(node: ^ast.Node, src: [] byte) -> string {
- return string(src[node.pos.offset:node.end.offset]);
+get_ast_node_string :: proc (node: ^ast.Node, src: []byte) -> string {
+ return string(src[node.pos.offset:node.end.offset]);
}
-free_ast :: proc{
- free_ast_node,
- free_ast_array,
- free_ast_dynamic_array,
- free_ast_comment,
-};
+free_ast :: proc {
+free_ast_node,
+free_ast_array,
+free_ast_dynamic_array,
+free_ast_comment};
-free_ast_comment :: proc(a: ^ast.Comment_Group, allocator: mem.Allocator) {
- if a == nil {
- return;
- }
+free_ast_comment :: proc (a: ^ast.Comment_Group, allocator: mem.Allocator) {
+ if a == nil {
+ return;
+ }
- if len(a.list) > 0 {
- delete(a.list, allocator);
- }
+ if len(a.list) > 0 {
+ delete(a.list, allocator);
+ }
- free(a, allocator);
+ free(a, allocator);
}
-free_ast_array :: proc(array: $A/[]^$T, allocator: mem.Allocator) {
+free_ast_array :: proc (array: $A/[]^$T, allocator: mem.Allocator) {
for elem, i in array {
free_ast(elem, allocator);
}
- delete(array, allocator);
+ delete(array, allocator);
}
-free_ast_dynamic_array :: proc(array: $A/[dynamic]^$T, allocator: mem.Allocator) {
+free_ast_dynamic_array :: proc (array: $A/[dynamic]^$T, allocator: mem.Allocator) {
for elem, i in array {
free_ast(elem, allocator);
}
- delete(array);
+ delete(array);
}
-free_ast_node :: proc(node: ^ast.Node, allocator: mem.Allocator) {
-
- using ast;
-
- if node == nil {
- return;
- }
-
- switch n in node.derived {
- case Bad_Expr:
- case Ident:
- case Implicit:
- case Undef:
- case Basic_Directive:
- case Basic_Lit:
- case Ellipsis:
- free_ast(n.expr, allocator);
- case Proc_Lit:
- free_ast(n.type, allocator);
- free_ast(n.body, allocator);
- free_ast(n.where_clauses, allocator);
- case Comp_Lit:
- free_ast(n.type, allocator);
- free_ast(n.elems, allocator);
- case Tag_Expr:
- free_ast(n.expr, allocator);
- case Unary_Expr:
- free_ast(n.expr, allocator);
- case Binary_Expr:
- free_ast(n.left, allocator);
- free_ast(n.right, allocator);
- case Paren_Expr:
- free_ast(n.expr, allocator);
- case Call_Expr:
- free_ast(n.expr, allocator);
- free_ast(n.args, allocator);
- case Selector_Expr:
- free_ast(n.expr, allocator);
- free_ast(n.field, allocator);
- case Implicit_Selector_Expr:
- free_ast(n.field, allocator);
- case Index_Expr:
- free_ast(n.expr, allocator);
- free_ast(n.index, allocator);
- case Deref_Expr:
- free_ast(n.expr, allocator);
- case Slice_Expr:
- free_ast(n.expr, allocator);
- free_ast(n.low, allocator);
- free_ast(n.high, allocator);
- case Field_Value:
- free_ast(n.field, allocator);
- free_ast(n.value, allocator);
- case Ternary_Expr:
- free_ast(n.cond, allocator);
- free_ast(n.x, allocator);
- free_ast(n.y, allocator);
- case Ternary_If_Expr:
- free_ast(n.x, allocator);
- free_ast(n.cond, allocator);
- free_ast(n.y, allocator);
- case Ternary_When_Expr:
- free_ast(n.x, allocator);
- free_ast(n.cond, allocator);
- free_ast(n.y, allocator);
- case Type_Assertion:
- free_ast(n.expr, allocator);
- free_ast(n.type, allocator);
- case Type_Cast:
- free_ast(n.type, allocator);
- free_ast(n.expr, allocator);
- case Auto_Cast:
- free_ast(n.expr, allocator);
- case Bad_Stmt:
- case Empty_Stmt:
- case Expr_Stmt:
- free_ast(n.expr, allocator);
- case Tag_Stmt:
- r := cast(^Expr_Stmt)node;
- free_ast(r.expr, allocator);
- case Assign_Stmt:
- free_ast(n.lhs, allocator);
- free_ast(n.rhs, allocator);
- case Block_Stmt:
- free_ast(n.label, allocator);
- free_ast(n.stmts, allocator);
- case If_Stmt:
- free_ast(n.label, allocator);
- free_ast(n.init, allocator);
- free_ast(n.cond, allocator);
- free_ast(n.body, allocator);
- free_ast(n.else_stmt, allocator);
- case When_Stmt:
- free_ast(n.cond, allocator);
- free_ast(n.body, allocator);
- free_ast(n.else_stmt, allocator);
- case Return_Stmt:
- free_ast(n.results, allocator);
- case Defer_Stmt:
- free_ast(n.stmt, allocator);
- case For_Stmt:
- free_ast(n.label, allocator);
- free_ast(n.init, allocator);
- free_ast(n.cond, allocator);
- free_ast(n.post, allocator);
- free_ast(n.body, allocator);
- case Range_Stmt:
- free_ast(n.label, allocator);
- free_ast(n.val0, allocator);
- free_ast(n.val1, allocator);
- free_ast(n.expr, allocator);
- free_ast(n.body, allocator);
- case Case_Clause:
- free_ast(n.list, allocator);
- free_ast(n.body, allocator);
- case Switch_Stmt:
- free_ast(n.label, allocator);
- free_ast(n.init, allocator);
- free_ast(n.cond, allocator);
- free_ast(n.body, allocator);
- case Type_Switch_Stmt:
- free_ast(n.label, allocator);
- free_ast(n.tag, allocator);
- free_ast(n.expr, allocator);
- free_ast(n.body, allocator);
- case Branch_Stmt:
- free_ast(n.label, allocator);
- case Using_Stmt:
- free_ast(n.list, allocator);
- case Bad_Decl:
- case Value_Decl:
- free_ast(n.attributes, allocator);
- free_ast(n.names, allocator);
- free_ast(n.type, allocator);
- free_ast(n.values, allocator);
- //free_ast(n.docs);
- //free_ast(n.comment);
- case Package_Decl:
- //free_ast(n.docs);
- //free_ast(n.comment);
- case Import_Decl:
- //free_ast(n.docs);
- //free_ast(n.comment);
- case Foreign_Block_Decl:
- free_ast(n.attributes, allocator);
- free_ast(n.foreign_library, allocator);
- free_ast(n.body, allocator);
- case Foreign_Import_Decl:
- free_ast(n.name, allocator);
- free_ast(n.attributes, allocator);
- case Proc_Group:
- free_ast(n.args, allocator);
- case Attribute:
- free_ast(n.elems, allocator);
- case Field:
- free_ast(n.names, allocator);
- free_ast(n.type, allocator);
- free_ast(n.default_value, allocator);
- //free_ast(n.docs);
- //free_ast(n.comment);
- case Field_List:
- free_ast(n.list, allocator);
- case Typeid_Type:
- free_ast(n.specialization, allocator);
- case Helper_Type:
- free_ast(n.type, allocator);
- case Distinct_Type:
- free_ast(n.type, allocator);
- case Poly_Type:
- free_ast(n.type, allocator);
- free_ast(n.specialization, allocator);
- case Proc_Type:
- free_ast(n.params, allocator);
- free_ast(n.results, allocator);
- case Pointer_Type:
- free_ast(n.elem, allocator);
- case Array_Type:
- free_ast(n.len, allocator);
- free_ast(n.elem, allocator);
- free_ast(n.tag, allocator);
- case Dynamic_Array_Type:
- free_ast(n.elem, allocator);
- free_ast(n.tag, allocator);
- case Struct_Type:
- free_ast(n.poly_params, allocator);
- free_ast(n.align, allocator);
- free_ast(n.fields, allocator);
- free_ast(n.where_clauses, allocator);
- case Union_Type:
- free_ast(n.poly_params, allocator);
- free_ast(n.align, allocator);
- free_ast(n.variants, allocator);
- free_ast(n.where_clauses, allocator);
- case Enum_Type:
- free_ast(n.base_type, allocator);
- free_ast(n.fields, allocator);
- case Bit_Set_Type:
- free_ast(n.elem, allocator);
- free_ast(n.underlying, allocator);
- case Map_Type:
- free_ast(n.key, allocator);
- free_ast(n.value, allocator);
- case:
- log.warnf("free Unhandled node kind: %T", n);
- }
-
- mem.free(node, allocator);
-}
+free_ast_node :: proc (node: ^ast.Node, allocator: mem.Allocator) {
+ using ast;
+ if node == nil {
+ return;
+ }
-free_ast_file :: proc(file: ast.File, allocator := context.allocator) {
+ switch n in node.derived {
+ case Bad_Expr:
+ case Ident:
+ case Implicit:
+ case Undef:
+ case Basic_Directive:
+ case Basic_Lit:
+ case Ellipsis:
+ free_ast(n.expr, allocator);
+ case Proc_Lit:
+ free_ast(n.type, allocator);
+ free_ast(n.body, allocator);
+ free_ast(n.where_clauses, allocator);
+ case Comp_Lit:
+ free_ast(n.type, allocator);
+ free_ast(n.elems, allocator);
+ case Tag_Expr:
+ free_ast(n.expr, allocator);
+ case Unary_Expr:
+ free_ast(n.expr, allocator);
+ case Binary_Expr:
+ free_ast(n.left, allocator);
+ free_ast(n.right, allocator);
+ case Paren_Expr:
+ free_ast(n.expr, allocator);
+ case Call_Expr:
+ free_ast(n.expr, allocator);
+ free_ast(n.args, allocator);
+ case Selector_Expr:
+ free_ast(n.expr, allocator);
+ free_ast(n.field, allocator);
+ case Implicit_Selector_Expr:
+ free_ast(n.field, allocator);
+ case Index_Expr:
+ free_ast(n.expr, allocator);
+ free_ast(n.index, allocator);
+ case Deref_Expr:
+ free_ast(n.expr, allocator);
+ case Slice_Expr:
+ free_ast(n.expr, allocator);
+ free_ast(n.low, allocator);
+ free_ast(n.high, allocator);
+ case Field_Value:
+ free_ast(n.field, allocator);
+ free_ast(n.value, allocator);
+ case Ternary_Expr:
+ free_ast(n.cond, allocator);
+ free_ast(n.x, allocator);
+ free_ast(n.y, allocator);
+ case Ternary_If_Expr:
+ free_ast(n.x, allocator);
+ free_ast(n.cond, allocator);
+ free_ast(n.y, allocator);
+ case Ternary_When_Expr:
+ free_ast(n.x, allocator);
+ free_ast(n.cond, allocator);
+ free_ast(n.y, allocator);
+ case Type_Assertion:
+ free_ast(n.expr, allocator);
+ free_ast(n.type, allocator);
+ case Type_Cast:
+ free_ast(n.type, allocator);
+ free_ast(n.expr, allocator);
+ case Auto_Cast:
+ free_ast(n.expr, allocator);
+ case Bad_Stmt:
+ case Empty_Stmt:
+ case Expr_Stmt:
+ free_ast(n.expr, allocator);
+ case Tag_Stmt:
+ r := cast(^Expr_Stmt)node;
+ free_ast(r.expr, allocator);
+ case Assign_Stmt:
+ free_ast(n.lhs, allocator);
+ free_ast(n.rhs, allocator);
+ case Block_Stmt:
+ free_ast(n.label, allocator);
+ free_ast(n.stmts, allocator);
+ case If_Stmt:
+ free_ast(n.label, allocator);
+ free_ast(n.init, allocator);
+ free_ast(n.cond, allocator);
+ free_ast(n.body, allocator);
+ free_ast(n.else_stmt, allocator);
+ case When_Stmt:
+ free_ast(n.cond, allocator);
+ free_ast(n.body, allocator);
+ free_ast(n.else_stmt, allocator);
+ case Return_Stmt:
+ free_ast(n.results, allocator);
+ case Defer_Stmt:
+ free_ast(n.stmt, allocator);
+ case For_Stmt:
+ free_ast(n.label, allocator);
+ free_ast(n.init, allocator);
+ free_ast(n.cond, allocator);
+ free_ast(n.post, allocator);
+ free_ast(n.body, allocator);
+ case Range_Stmt:
+ free_ast(n.label, allocator);
+ free_ast(n.val0, allocator);
+ free_ast(n.val1, allocator);
+ free_ast(n.expr, allocator);
+ free_ast(n.body, allocator);
+ case Case_Clause:
+ free_ast(n.list, allocator);
+ free_ast(n.body, allocator);
+ case Switch_Stmt:
+ free_ast(n.label, allocator);
+ free_ast(n.init, allocator);
+ free_ast(n.cond, allocator);
+ free_ast(n.body, allocator);
+ case Type_Switch_Stmt:
+ free_ast(n.label, allocator);
+ free_ast(n.tag, allocator);
+ free_ast(n.expr, allocator);
+ free_ast(n.body, allocator);
+ case Branch_Stmt:
+ free_ast(n.label, allocator);
+ case Using_Stmt:
+ free_ast(n.list, allocator);
+ case Bad_Decl:
+ case Value_Decl:
+ free_ast(n.attributes, allocator);
+ free_ast(n.names, allocator);
+ free_ast(n.type, allocator);
+ free_ast(n.values, allocator);
+ //free_ast(n.docs);
+ //free_ast(n.comment);
+ case Package_Decl:
+ //free_ast(n.docs);
+ //free_ast(n.comment);
+ case Import_Decl:
+ //free_ast(n.docs);
+ //free_ast(n.comment);
+ case Foreign_Block_Decl:
+ free_ast(n.attributes, allocator);
+ free_ast(n.foreign_library, allocator);
+ free_ast(n.body, allocator);
+ case Foreign_Import_Decl:
+ free_ast(n.name, allocator);
+ free_ast(n.attributes, allocator);
+ case Proc_Group:
+ free_ast(n.args, allocator);
+ case Attribute:
+ free_ast(n.elems, allocator);
+ case Field:
+ free_ast(n.names, allocator);
+ free_ast(n.type, allocator);
+ free_ast(n.default_value, allocator);
+ //free_ast(n.docs);
+ //free_ast(n.comment);
+ case Field_List:
+ free_ast(n.list, allocator);
+ case Typeid_Type:
+ free_ast(n.specialization, allocator);
+ case Helper_Type:
+ free_ast(n.type, allocator);
+ case Distinct_Type:
+ free_ast(n.type, allocator);
+ case Poly_Type:
+ free_ast(n.type, allocator);
+ free_ast(n.specialization, allocator);
+ case Proc_Type:
+ free_ast(n.params, allocator);
+ free_ast(n.results, allocator);
+ case Pointer_Type:
+ free_ast(n.elem, allocator);
+ case Array_Type:
+ free_ast(n.len, allocator);
+ free_ast(n.elem, allocator);
+ free_ast(n.tag, allocator);
+ case Dynamic_Array_Type:
+ free_ast(n.elem, allocator);
+ free_ast(n.tag, allocator);
+ case Struct_Type:
+ free_ast(n.poly_params, allocator);
+ free_ast(n.align, allocator);
+ free_ast(n.fields, allocator);
+ free_ast(n.where_clauses, allocator);
+ case Union_Type:
+ free_ast(n.poly_params, allocator);
+ free_ast(n.align, allocator);
+ free_ast(n.variants, allocator);
+ free_ast(n.where_clauses, allocator);
+ case Enum_Type:
+ free_ast(n.base_type, allocator);
+ free_ast(n.fields, allocator);
+ case Bit_Set_Type:
+ free_ast(n.elem, allocator);
+ free_ast(n.underlying, allocator);
+ case Map_Type:
+ free_ast(n.key, allocator);
+ free_ast(n.value, allocator);
+ case:
+ log.warnf("free Unhandled node kind: %T", n);
+ }
- for decl in file.decls {
- free_ast(decl, allocator);
- }
+ mem.free(node, allocator);
+}
- free_ast(file.pkg_decl, allocator);
+free_ast_file :: proc (file: ast.File, allocator := context.allocator) {
- for comment in file.comments {
- free_ast(comment, allocator);
- }
+ for decl in file.decls {
+ free_ast(decl, allocator);
+ }
- delete(file.comments);
- delete(file.imports);
- delete(file.decls);
-}
+ free_ast(file.pkg_decl, allocator);
+ for comment in file.comments {
+ free_ast(comment, allocator);
+ }
-node_equal :: proc{
- node_equal_node,
- node_equal_array,
- node_equal_dynamic_array
-};
+ delete(file.comments);
+ delete(file.imports);
+ delete(file.decls);
+}
+
+node_equal :: proc {
+node_equal_node,
+node_equal_array,
+node_equal_dynamic_array};
-node_equal_array :: proc(a, b: $A/[]^$T) -> bool {
+node_equal_array :: proc (a, b: $A/[]^$T) -> bool {
- ret := true;
+ ret := true;
- if len(a) != len(b) {
- return false;
- }
+ if len(a) != len(b) {
+ return false;
+ }
for elem, i in a {
ret &= node_equal(elem, b[i]);
}
- return ret;
+ return ret;
}
-node_equal_dynamic_array :: proc(a, b: $A/[dynamic]^$T) -> bool {
+node_equal_dynamic_array :: proc (a, b: $A/[dynamic]^$T) -> bool {
- ret := true;
+ ret := true;
- if len(a) != len(b) {
- return false;
- }
+ if len(a) != len(b) {
+ return false;
+ }
for elem, i in a {
ret &= node_equal(elem, b[i]);
}
- return ret;
+ return ret;
}
+node_equal_node :: proc (a, b: ^ast.Node) -> bool {
+
+ using ast;
-node_equal_node :: proc(a, b: ^ast.Node) -> bool {
-
- using ast;
-
- if a == nil || b == nil {
- return false;
- }
-
- switch m in b.derived {
- case Bad_Expr:
- if n, ok := a.derived.(Bad_Expr); ok {
- return true;
- }
- case Ident:
- if n, ok := a.derived.(Ident); ok {
- return true;
- //return n.name == m.name;
- }
- case Implicit:
- if n, ok := a.derived.(Implicit); ok {
- return true;
- }
- case Undef:
- if n, ok := a.derived.(Undef); ok {
- return true;
- }
- case Basic_Lit:
- if n, ok := a.derived.(Basic_Lit); ok {
- return true;
- }
- case Poly_Type:
- return true;
- //return node_equal(n.sp)
- //if n, ok := a.derived.(Poly_Type); ok {
- // ret := node_equal(n.type, m.type);
- // ret &= node_equal(n.specialization, m.specialization);
- // return ret;
- //}
- case Ellipsis:
- if n, ok := a.derived.(Ellipsis); ok {
- return node_equal(n.expr, m.expr);
- }
- case Tag_Expr:
- if n, ok := a.derived.(Tag_Expr); ok {
- return node_equal(n.expr, m.expr);
- }
- case Unary_Expr:
- if n, ok := a.derived.(Unary_Expr); ok {
- return node_equal(n.expr, m.expr);
- }
- case Binary_Expr:
- if n, ok := a.derived.(Binary_Expr); ok {
- ret := node_equal(n.left, m.left);
- ret &= node_equal(n.right, m.right);
- return ret;
- }
- case Paren_Expr:
- if n, ok := a.derived.(Paren_Expr); ok {
- return node_equal(n.expr, m.expr);
- }
- case Selector_Expr:
- if n, ok := a.derived.(Selector_Expr); ok {
- ret := node_equal(n.expr, m.expr);
- ret &= node_equal(n.field, m.field);
- return ret;
- }
- case Slice_Expr:
- if n, ok := a.derived.(Slice_Expr); ok {
- ret := node_equal(n.expr, m.expr);
- ret &= node_equal(n.low, m.low);
- ret &= node_equal(n.high, m.high);
- return ret;
- }
- case Distinct_Type:
- if n, ok := a.derived.(Distinct_Type); ok {
- return node_equal(n.type, m.type);
- }
- case Proc_Type:
- if n, ok := a.derived.(Proc_Type); ok {
- ret := node_equal(n.params, m.params);
- ret &= node_equal(n.results, m.results);
- return ret;
- }
- case Pointer_Type:
- if n, ok := a.derived.(Pointer_Type); ok {
- return node_equal(n.elem, m.elem);
- }
- case Array_Type:
- if n, ok := a.derived.(Array_Type); ok {
- ret := node_equal(n.len, m.len);
- ret &= node_equal(n.elem, m.elem);
- return ret;
- }
- case Dynamic_Array_Type:
- if n, ok := a.derived.(Dynamic_Array_Type); ok {
- return node_equal(n.elem, m.elem);
- }
- case Struct_Type:
- if n, ok := a.derived.(Struct_Type); ok {
- ret := node_equal(n.poly_params, m.poly_params);
- ret &= node_equal(n.align, m.align);
- ret &= node_equal(n.fields, m.fields);
- return ret;
- }
- case Field:
- if n, ok := a.derived.(Field); ok {
- ret := node_equal(n.names, m.names);
- ret &= node_equal(n.type, m.type);
- ret &= node_equal(n.default_value, m.default_value);
- return ret;
- }
+ if a == nil || b == nil {
+ return false;
+ }
+
+ switch m in b.derived {
+ case Bad_Expr:
+ if n, ok := a.derived.(Bad_Expr); ok {
+ return true;
+ }
+ case Ident:
+ if n, ok := a.derived.(Ident); ok {
+ return true;
+ //return n.name == m.name;
+ }
+ case Implicit:
+ if n, ok := a.derived.(Implicit); ok {
+ return true;
+ }
+ case Undef:
+ if n, ok := a.derived.(Undef); ok {
+ return true;
+ }
+ case Basic_Lit:
+ if n, ok := a.derived.(Basic_Lit); ok {
+ return true;
+ }
+ case Poly_Type:
+ return true;
+ //return node_equal(n.sp)
+ //if n, ok := a.derived.(Poly_Type); ok {
+ // ret := node_equal(n.type, m.type);
+ // ret &= node_equal(n.specialization, m.specialization);
+ // return ret;
+ //}
+ case Ellipsis:
+ if n, ok := a.derived.(Ellipsis); ok {
+ return node_equal(n.expr, m.expr);
+ }
+ case Tag_Expr:
+ if n, ok := a.derived.(Tag_Expr); ok {
+ return node_equal(n.expr, m.expr);
+ }
+ case Unary_Expr:
+ if n, ok := a.derived.(Unary_Expr); ok {
+ return node_equal(n.expr, m.expr);
+ }
+ case Binary_Expr:
+ if n, ok := a.derived.(Binary_Expr); ok {
+ ret := node_equal(n.left, m.left);
+ ret &= node_equal(n.right, m.right);
+ return ret;
+ }
+ case Paren_Expr:
+ if n, ok := a.derived.(Paren_Expr); ok {
+ return node_equal(n.expr, m.expr);
+ }
+ case Selector_Expr:
+ if n, ok := a.derived.(Selector_Expr); ok {
+ ret := node_equal(n.expr, m.expr);
+ ret &= node_equal(n.field, m.field);
+ return ret;
+ }
+ case Slice_Expr:
+ if n, ok := a.derived.(Slice_Expr); ok {
+ ret := node_equal(n.expr, m.expr);
+ ret &= node_equal(n.low, m.low);
+ ret &= node_equal(n.high, m.high);
+ return ret;
+ }
+ case Distinct_Type:
+ if n, ok := a.derived.(Distinct_Type); ok {
+ return node_equal(n.type, m.type);
+ }
+ case Proc_Type:
+ if n, ok := a.derived.(Proc_Type); ok {
+ ret := node_equal(n.params, m.params);
+ ret &= node_equal(n.results, m.results);
+ return ret;
+ }
+ case Pointer_Type:
+ if n, ok := a.derived.(Pointer_Type); ok {
+ return node_equal(n.elem, m.elem);
+ }
+ case Array_Type:
+ if n, ok := a.derived.(Array_Type); ok {
+ ret := node_equal(n.len, m.len);
+ ret &= node_equal(n.elem, m.elem);
+ return ret;
+ }
+ case Dynamic_Array_Type:
+ if n, ok := a.derived.(Dynamic_Array_Type); ok {
+ return node_equal(n.elem, m.elem);
+ }
+ case Struct_Type:
+ if n, ok := a.derived.(Struct_Type); ok {
+ ret := node_equal(n.poly_params, m.poly_params);
+ ret &= node_equal(n.align, m.align);
+ ret &= node_equal(n.fields, m.fields);
+ return ret;
+ }
+ case Field:
+ if n, ok := a.derived.(Field); ok {
+ ret := node_equal(n.names, m.names);
+ ret &= node_equal(n.type, m.type);
+ ret &= node_equal(n.default_value, m.default_value);
+ return ret;
+ }
case Field_List:
- if n, ok := a.derived.(Field_List); ok {
- return node_equal(n.list, m.list);
- }
- case Field_Value:
- if n, ok := a.derived.(Field_Value); ok {
- ret := node_equal(n.field, m.field);
- ret &= node_equal(n.value, m.value);
- return ret;
- }
- case Union_Type:
- if n, ok := a.derived.(Union_Type); ok {
- ret := node_equal(n.poly_params, m.poly_params);
- ret &= node_equal(n.align, m.align);
- ret &= node_equal(n.variants, m.variants);
- return ret;
- }
- case Enum_Type:
- if n, ok := a.derived.(Enum_Type); ok {
- ret := node_equal(n.base_type, m.base_type);
- ret &= node_equal(n.fields, m.fields);
- return ret;
- }
- case Bit_Set_Type:
- if n, ok := a.derived.(Bit_Set_Type); ok {
- ret := node_equal(n.elem, m.elem);
- ret &= node_equal(n.underlying, m.underlying);
- return ret;
- }
- case Map_Type:
- if n, ok := a.derived.(Map_Type); ok {
- ret := node_equal(n.key, m.key);
- ret &= node_equal(n.value, m.value);
- return ret;
- }
- case Call_Expr:
- if n, ok := a.derived.(Call_Expr); ok {
- ret := node_equal(n.expr, m.expr);
- ret &= node_equal(n.args, m.args);
- return ret;
- }
- case Typeid_Type:
- return true;
- //if n, ok := a.derived.(Typeid_Type); ok {
- // return node_equal(n.specialization, m.specialization);
- //}
- case:
- log.warn("Unhandled poly node kind: %T", m);
- }
-
- return false;
+ if n, ok := a.derived.(Field_List); ok {
+ return node_equal(n.list, m.list);
+ }
+ case Field_Value:
+ if n, ok := a.derived.(Field_Value); ok {
+ ret := node_equal(n.field, m.field);
+ ret &= node_equal(n.value, m.value);
+ return ret;
+ }
+ case Union_Type:
+ if n, ok := a.derived.(Union_Type); ok {
+ ret := node_equal(n.poly_params, m.poly_params);
+ ret &= node_equal(n.align, m.align);
+ ret &= node_equal(n.variants, m.variants);
+ return ret;
+ }
+ case Enum_Type:
+ if n, ok := a.derived.(Enum_Type); ok {
+ ret := node_equal(n.base_type, m.base_type);
+ ret &= node_equal(n.fields, m.fields);
+ return ret;
+ }
+ case Bit_Set_Type:
+ if n, ok := a.derived.(Bit_Set_Type); ok {
+ ret := node_equal(n.elem, m.elem);
+ ret &= node_equal(n.underlying, m.underlying);
+ return ret;
+ }
+ case Map_Type:
+ if n, ok := a.derived.(Map_Type); ok {
+ ret := node_equal(n.key, m.key);
+ ret &= node_equal(n.value, m.value);
+ return ret;
+ }
+ case Call_Expr:
+ if n, ok := a.derived.(Call_Expr); ok {
+ ret := node_equal(n.expr, m.expr);
+ ret &= node_equal(n.args, m.args);
+ return ret;
+ }
+ case Typeid_Type:
+ return true;
+ //if n, ok := a.derived.(Typeid_Type); ok {
+ // return node_equal(n.specialization, m.specialization);
+ //}
+ case:
+ log.warn("Unhandled poly node kind: %T", m);
+ }
+
+ return false;
} \ No newline at end of file
diff --git a/src/common/config.odin b/src/common/config.odin
index 12ae171..ddf4d2a 100644
--- a/src/common/config.odin
+++ b/src/common/config.odin
@@ -1,13 +1,13 @@
package common
Config :: struct {
- workspace_folders: [dynamic] WorkspaceFolder,
- completion_support_md: bool,
- hover_support_md: bool,
- signature_offset_support: bool,
- collections: map [string] string,
- running: bool,
- verbose: bool,
- debug_single_thread: bool,
- enable_semantic_tokens: bool, //This will be removed when vscode client stops sending me semantic tokens after disabling it in requests initialize.
-}; \ No newline at end of file
+ workspace_folders: [dynamic]WorkspaceFolder,
+ completion_support_md: bool,
+ hover_support_md: bool,
+ signature_offset_support: bool,
+ collections: map[string]string,
+ running: bool,
+ verbose: bool,
+ debug_single_thread: bool,
+ enable_semantic_tokens: bool, //This will be removed when vscode client stops sending me semantic tokens after disabling it in requests initialize.
+} \ No newline at end of file
diff --git a/src/common/fuzzy.odin b/src/common/fuzzy.odin
index c582421..f0b4a82 100644
--- a/src/common/fuzzy.odin
+++ b/src/common/fuzzy.odin
@@ -4,430 +4,413 @@ import "core:strings"
import "core:fmt"
/*
- Ported from https://github.com/llvm/llvm-project/blob/master/clang-tools-extra/clangd/FuzzyMatch.cpp
+ Ported from https://github.com/llvm/llvm-project/blob/master/clang-tools-extra/clangd/FuzzyMatch.cpp
*/
max_pattern :: 63;
-max_word :: 127;
+max_word :: 127;
awful_score: int = -(1 << 13);
perfect_bonus :: 4;
-miss :: 0;
-match :: 1;
+miss :: 0;
+match :: 1;
FuzzyCharTypeSet :: u8;
-
-
//do bitfield instead
FuzzyScoreInfo :: struct {
- score: int,
- prev: int,
-};
-
-
+ score: int,
+ prev: int,
+}
-FuzzyCharRole :: enum(u8) {
- Unknown = 0, // Stray control characters or impossible states.
- Tail = 1, // Part of a word segment, but not the first character.
- Head = 2, // The first character of a word segment.
- Separator = 3, // Punctuation characters that separate word segments.
-};
+FuzzyCharRole :: enum (u8)
+// Stray control characters or impossible states.
+// Part of a word segment, but not the first character.
+// The first character of a word segment.
+{
+ Unknown = 0,
+ Tail = 1,
+ Head = 2,
+ Separator = 3, // Punctuation characters that separate word segments.
+}
-FuzzyCharType :: enum(u8) {
- Empty = 0, // Before-the-start and after-the-end (and control chars).
- Lower = 1, // Lowercase letters, digits, and non-ASCII bytes.
- Upper = 2, // Uppercase letters.
- Punctuation = 3, // ASCII punctuation (including Space)
-};
+FuzzyCharType :: enum (u8)
+// Before-the-start and after-the-end (and control chars).
+// Lowercase letters, digits, and non-ASCII bytes.
+// Uppercase letters.
+{
+ Empty = 0,
+ Lower = 1,
+ Upper = 2,
+ Punctuation = 3, // ASCII punctuation (including Space)
+}
FuzzyMatcher :: struct {
- pattern: string,
- word: string,
- lower_pattern: string,
- lower_word: string,
- scores: [max_pattern + 1][max_word + 1][2] FuzzyScoreInfo,
- pattern_count: int,
- pattern_type_set: FuzzyCharTypeSet,
- word_type_set: FuzzyCharTypeSet,
- pattern_role: [max_pattern] FuzzyCharRole,
- word_count: int,
- score_scale: f32,
- word_role: [max_word] FuzzyCharRole,
-};
-
+ pattern: string,
+ word: string,
+ lower_pattern: string,
+ lower_word: string,
+ scores: [max_pattern + 1][max_word + 1][2]FuzzyScoreInfo,
+ pattern_count: int,
+ pattern_type_set: FuzzyCharTypeSet,
+ word_type_set: FuzzyCharTypeSet,
+ pattern_role: [max_pattern]FuzzyCharRole,
+ word_count: int,
+ score_scale: f32,
+ word_role: [max_word]FuzzyCharRole,
+}
-char_roles : [] u8 = {
- // clang-format off
- // Curr= Empty Lower Upper Separ
- /* Prev=Empty */ 0x00, 0xaa, 0xaa, 0xff, // At start, Lower|Upper->Head
- /* Prev=Lower */ 0x00, 0x55, 0xaa, 0xff, // In word, Upper->Head;Lower->Tail
- /* Prev=Upper */ 0x00, 0x55, 0x59, 0xff, // Ditto, but U(U)U->Tail
- /* Prev=Separ */ 0x00, 0xaa, 0xaa, 0xff, // After separator, like at start
- // clang-format on
+char_roles: []u8 =
+// clang-format off
+// Curr= Empty Lower Upper Separ
+/*Prev=Empty */{
+ 0x00,0xaa,0xaa,0xff, // At start, Lower|Upper->Head
+ /*Prev=Lower */0x00,0x55,0xaa,0xff, // In word, Upper->Head;Lower->Tail
+ /*Prev=Upper */0x00,0x55,0x59,0xff, // Ditto, but U(U)U->Tail
+ /*Prev=Separ */0x00,0xaa,0xaa,0xff, // After separator, like at start
+ // clang-format on
};
-char_types : [] u8 = {
- 0x00, 0x00, 0x00, 0x00, // Control characters
- 0x00, 0x00, 0x00, 0x00, // Control characters
- 0xff, 0xff, 0xff, 0xff, // Punctuation
- 0x55, 0x55, 0xf5, 0xff, // Numbers->Lower, more Punctuation.
- 0xab, 0xaa, 0xaa, 0xaa, // @ and A-O
- 0xaa, 0xaa, 0xea, 0xff, // P-Z, more Punctuation.
- 0x57, 0x55, 0x55, 0x55, // ` and a-o
- 0x55, 0x55, 0xd5, 0x3f, // p-z, Punctuation, DEL.
- 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, // Bytes over 127 -> Lower.
- 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, // (probably UTF-8).
- 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
- 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
+char_types: []u8 = {
+ 0x00,0x00,0x00,0x00, // Control characters
+ 0x00,0x00,0x00,0x00, // Control characters
+ 0xff,0xff,0xff,0xff, // Punctuation
+ 0x55,0x55,0xf5,0xff, // Numbers->Lower, more Punctuation.
+ 0xab,0xaa,0xaa,0xaa, // @ and A-O
+ 0xaa,0xaa,0xea,0xff, // P-Z, more Punctuation.
+ 0x57,0x55,0x55,0x55, // ` and a-o
+ 0x55,0x55,0xd5,0x3f, // p-z, Punctuation, DEL.
+ 0x55,0x55,0x55,0x55,0x55,0x55,0x55,0x55, // Bytes over 127 -> Lower.
+ 0x55,0x55,0x55,0x55,0x55,0x55,0x55,0x55, // (probably UTF-8).
+ 0x55,0x55,0x55,0x55,0x55,0x55,0x55,0x55,
+ 0x55,0x55,0x55,0x55,0x55,0x55,0x55,0x55,
};
-
make_fuzzy_matcher :: proc (pattern: string, allocator := context.temp_allocator) -> ^FuzzyMatcher {
- matcher := new(FuzzyMatcher, allocator);
-
- matcher.pattern_count = min(len(pattern), max_pattern);
- matcher.score_scale = matcher.pattern_count > 0 ? 1 / cast(f32)(perfect_bonus * matcher.pattern_count) : 0;
- matcher.pattern = pattern[0:matcher.pattern_count];
- matcher.lower_pattern = strings.to_lower(matcher.pattern, context.temp_allocator);
+ matcher := new(FuzzyMatcher, allocator);
- score_info_miss: FuzzyScoreInfo;
- score_info_miss.score = 0;
- score_info_miss.prev = miss;
+ matcher.pattern_count = min(len(pattern), max_pattern);
+ matcher.score_scale = matcher.pattern_count > 0 ? 1 / cast(f32)(perfect_bonus * matcher.pattern_count) : 0;
+ matcher.pattern = pattern[0:matcher.pattern_count];
+ matcher.lower_pattern = strings.to_lower(matcher.pattern, context.temp_allocator);
- matcher.scores[0][0][miss] = score_info_miss;
+ score_info_miss: FuzzyScoreInfo;
+ score_info_miss.score = 0;
+ score_info_miss.prev = miss;
- score_info_match: FuzzyScoreInfo;
- score_info_match.score = awful_score;
- score_info_match.prev = match;
+ matcher.scores[0][0][miss] = score_info_miss;
- matcher.scores[0][0][match] = score_info_match;
+ score_info_match: FuzzyScoreInfo;
+ score_info_match.score = awful_score;
+ score_info_match.prev = match;
- for p := 0; p < matcher.pattern_count; p += 1 {
+ matcher.scores[0][0][match] = score_info_match;
- for w := 0; w < p; w+= 1 {
+ for p := 0; p < matcher.pattern_count; p += 1 {
- for a := 0; a < 2; a += 1 {
- score_info: FuzzyScoreInfo;
- score_info.score = awful_score;
- score_info.prev = miss;
- matcher.scores[p][w][a] = score_info;
- ref := matcher.pattern_role[:matcher.pattern_count];
- matcher.pattern_type_set = fuzzy_calculate_roles(matcher.pattern, &ref);
- }
+ for w := 0; w < p; w += 1 {
- }
+ for a := 0; a < 2; a += 1 {
+ score_info: FuzzyScoreInfo;
+ score_info.score = awful_score;
+ score_info.prev = miss;
+ matcher.scores[p][w][a] = score_info;
+ ref := matcher.pattern_role[:matcher.pattern_count];
+ matcher.pattern_type_set = fuzzy_calculate_roles(matcher.pattern, &ref);
+ }
+ }
+ }
-
- }
-
- return matcher;
+ return matcher;
}
-fuzzy_to_acronym :: proc(word: string) -> (string, bool) {
+fuzzy_to_acronym :: proc (word: string) -> (string, bool) {
- builder := strings.make_builder(context.temp_allocator);
+ builder := strings.make_builder(context.temp_allocator);
- if len(word) <= 1 {
- return "", false;
- }
+ if len(word) <= 1 {
+ return "", false;
+ }
- i := 1;
- last_char := word[0];
+ i := 1;
+ last_char := word[0];
- strings.write_byte(&builder, last_char);
+ strings.write_byte(&builder, last_char);
- for i < len(word) {
+ for i < len(word) {
- if last_char == '_' {
- strings.write_byte(&builder, word[i]);
- }
+ if last_char == '_' {
+ strings.write_byte(&builder, word[i]);
+ }
- last_char = word[i];
+ last_char = word[i];
- i += 1;
- }
+ i += 1;
+ }
- str := strings.to_string(builder);
+ str := strings.to_string(builder);
- if len(str) <= 1 {
- return "", false;
- }
+ if len(str) <= 1 {
+ return "", false;
+ }
- return str, true;
+ return str, true;
}
fuzzy_match :: proc (matcher: ^FuzzyMatcher, word: string) -> (f32, bool) {
- if !fuzzy_init(matcher, word) {
- return 0, false;
- }
+ if !fuzzy_init(matcher, word) {
+ return 0, false;
+ }
- if matcher.pattern_count <= 0 {
- return 1, true;
- }
+ if matcher.pattern_count <= 0 {
+ return 1, true;
+ }
- if acronym, ok := fuzzy_to_acronym(word); ok {
- if acronym == matcher.pattern {
- return 20, true;
- }
- }
+ if acronym, ok := fuzzy_to_acronym(word); ok {
+ if acronym == matcher.pattern {
+ return 20, true;
+ }
+ }
- fuzzy_build_graph(matcher);
+ fuzzy_build_graph(matcher);
- best := max(cast(int)matcher.scores[matcher.pattern_count][matcher.word_count][miss].score,
- cast(int)matcher.scores[matcher.pattern_count][matcher.word_count][match].score);
+ best := max(cast(int)matcher.scores[matcher.pattern_count][matcher.word_count][miss].score,
+ cast(int)matcher.scores[matcher.pattern_count][matcher.word_count][match].score);
- if fuzzy_is_awful(best) {
- return 0.0, false;
- }
+ if fuzzy_is_awful(best) {
+ return 0.0, false;
+ }
- score := matcher.score_scale * min(perfect_bonus * cast(f32)matcher.pattern_count, cast(f32)max(0, best));
+ score := matcher.score_scale * min(perfect_bonus * cast(f32)matcher.pattern_count, cast(f32)max(0, best));
- if matcher.word_count == matcher.pattern_count {
- score *= 2;
- }
+ if matcher.word_count == matcher.pattern_count {
+ score *= 2;
+ }
- return score, true;
+ return score, true;
}
-fuzzy_is_awful :: proc(s: int) -> bool {
- return s < awful_score / 2;
+fuzzy_is_awful :: proc (s: int) -> bool {
+ return s < awful_score / 2;
}
-fuzzy_calculate_roles :: proc(text: string, roles: ^[] FuzzyCharRole) -> FuzzyCharTypeSet {
+fuzzy_calculate_roles :: proc (text: string, roles: ^[]FuzzyCharRole) -> FuzzyCharTypeSet {
- assert(len(text) == len(roles));
+ assert(len(text) == len(roles));
- if len(text) == 0 {
- return 0;
- }
+ if len(text) == 0 {
+ return 0;
+ }
- type: FuzzyCharType = cast(FuzzyCharType)fuzzy_packed_lookup(char_types, cast(uint)text[0]);
+ type: FuzzyCharType = cast(FuzzyCharType)fuzzy_packed_lookup(char_types, cast(uint)text[0]);
- type_set: FuzzyCharTypeSet = cast(u8)(1 << cast(uint)type);
+ type_set: FuzzyCharTypeSet = cast(u8)(1 << cast(uint)type);
- types := type;
+ types := type;
- for i := 0; i < len(text) - 1; i += 1 {
- type = cast(FuzzyCharType)fuzzy_packed_lookup(char_types, cast(uint)text[i+1]);
- type_set |= 1 << cast(uint)type;
+ for i := 0; i < len(text) - 1; i += 1 {
+ type = cast(FuzzyCharType)fuzzy_packed_lookup(char_types, cast(uint)text[i + 1]);
+ type_set |= 1 << cast(uint)type;
- fuzzy_rotate(type, &types);
+ fuzzy_rotate(type, &types);
- roles[i] = cast(FuzzyCharRole)fuzzy_packed_lookup(char_roles, cast(uint)types);
- }
+ roles[i] = cast(FuzzyCharRole)fuzzy_packed_lookup(char_roles, cast(uint)types);
+ }
- fuzzy_rotate(.Empty, &types);
+ fuzzy_rotate(.Empty, &types);
- roles[len(text) - 1] = cast(FuzzyCharRole) fuzzy_packed_lookup(char_roles, cast(uint)types);
+ roles[len(text) - 1] = cast(FuzzyCharRole)fuzzy_packed_lookup(char_roles, cast(uint)types);
- return type_set;
+ return type_set;
}
-fuzzy_rotate :: proc(t: FuzzyCharType, types: ^FuzzyCharType) {
- types^ = cast(FuzzyCharType)(((cast(uint)types^ << 2) | cast(uint)t) & 0x3f);
+fuzzy_rotate :: proc (t: FuzzyCharType, types: ^FuzzyCharType) {
+ types^ = cast(FuzzyCharType)(((cast(uint)types^ << 2) | cast(uint)t) & 0x3f);
}
-fuzzy_packed_lookup :: proc(data: $A/[]$T, i: uint) -> T {
- return (data[i >> 2] >> ((i & 3) * 2)) & 3;
+fuzzy_packed_lookup :: proc (data: $A/[]$T, i: uint) -> T {
+ return (data[i >> 2] >> ((i & 3) * 2)) & 3;
}
-fuzzy_init :: proc(matcher: ^FuzzyMatcher, word: string) -> bool {
-
- matcher.word = word;
- matcher.word_count = min(max_word, len(matcher.word));
+fuzzy_init :: proc (matcher: ^FuzzyMatcher, word: string) -> bool {
- if matcher.pattern_count > matcher.word_count {
- return false;
- }
+ matcher.word = word;
+ matcher.word_count = min(max_word, len(matcher.word));
- if matcher.pattern_count == 0 {
- return true;
- }
+ if matcher.pattern_count > matcher.word_count {
+ return false;
+ }
- matcher.lower_word = strings.to_lower(word, context.temp_allocator);
+ if matcher.pattern_count == 0 {
+ return true;
+ }
- w, p := 0, 0;
+ matcher.lower_word = strings.to_lower(word, context.temp_allocator);
- for ; p != matcher.pattern_count; w += 1{
- if w == matcher.word_count {
- return false;
- }
+ w, p := 0, 0;
- if matcher.lower_word[w] == matcher.lower_pattern[p] {
- p += 1;
- }
- }
+ for ; p != matcher.pattern_count; w += 1 {
+ if w == matcher.word_count {
+ return false;
+ }
+ if matcher.lower_word[w] == matcher.lower_pattern[p] {
+ p += 1;
+ }
+ }
- ref := matcher.word_role[:matcher.word_count];
+ ref := matcher.word_role[:matcher.word_count];
- matcher.word_type_set = fuzzy_calculate_roles(word, &ref);
+ matcher.word_type_set = fuzzy_calculate_roles(word, &ref);
- return true;
+ return true;
}
-fuzzy_skip_penalty :: proc(matcher: ^FuzzyMatcher, w: int) -> int {
+fuzzy_skip_penalty :: proc (matcher: ^FuzzyMatcher, w: int) -> int {
- if w == 0 { // Skipping the first character.
- return 3;
- }
+ if w == 0 { // Skipping the first character.
+ return 3;
+ }
- if matcher.word_role[w] == .Head { // Skipping a segment.
- return 1;
- }
+ if matcher.word_role[w] == .Head { // Skipping a segment.
+ return 1;
+ }
- return 0;
+ return 0;
}
-fuzzy_build_graph :: proc(matcher: ^FuzzyMatcher) {
-
- for w := 0; w < matcher.word_count; w += 1 {
-
- s: FuzzyScoreInfo;
+fuzzy_build_graph :: proc (matcher: ^FuzzyMatcher) {
- score := cast(int)matcher.scores[0][w][miss].score;
- penalty := fuzzy_skip_penalty(matcher, w);
- sum := score - penalty;
+ for w := 0; w < matcher.word_count; w += 1 {
- s.score = sum;
- s.prev = miss;
+ s: FuzzyScoreInfo;
- matcher.scores[0][w + 1][miss] = s;
+ score := cast(int)matcher.scores[0][w][miss].score;
+ penalty := fuzzy_skip_penalty(matcher, w);
+ sum := score - penalty;
- s.score = awful_score;
- s.prev = miss;
+ s.score = sum;
+ s.prev = miss;
- matcher.scores[0][w + 1][match] = s;
+ matcher.scores[0][w + 1][miss] = s;
- }
+ s.score = awful_score;
+ s.prev = miss;
- for p := 0; p < matcher.pattern_count; p += 1 {
+ matcher.scores[0][w + 1][match] = s;
+ }
- for w := p; w < matcher.word_count; w += 1 {
- score := &matcher.scores[p + 1][w + 1];
- pre_miss := &matcher.scores[p + 1][w];
+ for p := 0; p < matcher.pattern_count; p += 1 {
- match_miss_score := pre_miss[match].score;
- miss_miss_score := pre_miss[miss].score;
+ for w := p; w < matcher.word_count; w += 1 {
+ score := &matcher.scores[p + 1][w + 1];
+ pre_miss := &matcher.scores[p + 1][w];
- if p < matcher.pattern_count - 1 {
- match_miss_score -= fuzzy_skip_penalty(matcher, w);
- miss_miss_score -= fuzzy_skip_penalty(matcher, w);
- }
+ match_miss_score := pre_miss[match].score;
+ miss_miss_score := pre_miss[miss].score;
- if match_miss_score > miss_miss_score {
- s: FuzzyScoreInfo;
- s.score = match_miss_score;
- s.prev = match;
- score[miss] = s;
- }
+ if p < matcher.pattern_count - 1 {
+ match_miss_score -= fuzzy_skip_penalty(matcher, w);
+ miss_miss_score -= fuzzy_skip_penalty(matcher, w);
+ }
- else {
- s: FuzzyScoreInfo;
- s.score = miss_miss_score;
- s.prev = miss;
- score[miss] = s;
- }
+ if match_miss_score > miss_miss_score {
+ s: FuzzyScoreInfo;
+ s.score = match_miss_score;
+ s.prev = match;
+ score[miss] = s;
+ } else {
+ s: FuzzyScoreInfo;
+ s.score = miss_miss_score;
+ s.prev = miss;
+ score[miss] = s;
+ }
- pre_match := &matcher.scores[p][w];
+ pre_match := &matcher.scores[p][w];
- match_match_score := fuzzy_allow_match(matcher, p, w, match) ?
- cast(int)pre_match[match].score + fuzzy_match_bonus(matcher, p, w, match)
- : awful_score;
-
- miss_match_score := fuzzy_allow_match(matcher, p, w, miss) ?
- cast(int)pre_match[miss].score + fuzzy_match_bonus(matcher, p, w, miss)
- : awful_score;
-
- if match_match_score > miss_match_score {
- s: FuzzyScoreInfo;
- s.score = match_match_score;
- s.prev = match;
- score[match] = s;
- }
-
- else {
- s: FuzzyScoreInfo;
- s.score = miss_match_score;
- s.prev = miss;
- score[match] = s;
- }
-
- }
-
- }
+ match_match_score := fuzzy_allow_match(matcher, p, w, match) ? cast(int)pre_match[match].score + fuzzy_match_bonus(matcher, p, w, match) : awful_score;
+ miss_match_score := fuzzy_allow_match(matcher, p, w, miss) ? cast(int)pre_match[miss].score + fuzzy_match_bonus(matcher, p, w, miss) : awful_score;
+ if match_match_score > miss_match_score {
+ s: FuzzyScoreInfo;
+ s.score = match_match_score;
+ s.prev = match;
+ score[match] = s;
+ } else {
+ s: FuzzyScoreInfo;
+ s.score = miss_match_score;
+ s.prev = miss;
+ score[match] = s;
+ }
+ }
+ }
}
-
-fuzzy_match_bonus :: proc(matcher: ^FuzzyMatcher, p: int, w: int, last: int) -> int {
-
- assert(matcher.lower_pattern[p] == matcher.lower_word[w]);
-
- s := 1;
-
- is_pattern_single_case := (cast(uint)matcher.pattern_type_set == 1 << cast(uint)FuzzyCharType.Lower);
- is_pattern_single_case |= (cast(uint)matcher.pattern_type_set == 1 << cast(uint)FuzzyCharType.Upper);
-
- // Bonus: case matches, or a Head in the pattern aligns with one in the word.
- // Single-case patterns lack segmentation signals and we assume any character
- // can be a head of a segment.
- if matcher.pattern[p] == matcher.word[w] ||
- (matcher.word_role[w] == FuzzyCharRole.Head &&
- (is_pattern_single_case || matcher.pattern_role[p] == FuzzyCharRole.Head)) {
- s += 1;
- //fmt.println("match 1");
- }
-
- // Bonus: a consecutive match. First character match also gets a bonus to
- // ensure prefix final match score normalizes to 1.0.
- if w == 0 || last == match {
- s += 2;
- //fmt.println("match 2");
- }
-
- // Penalty: matching inside a segment (and previous char wasn't matched).
- if matcher.word_role[w] == FuzzyCharRole.Tail && p > 0 && last == miss {
- s -= 3;
- //fmt.println("match 3");
- }
-
- // Penalty: a Head in the pattern matches in the middle of a word segment.
- if matcher.pattern_role[p] == FuzzyCharRole.Head && matcher.word_role[w] == FuzzyCharRole.Tail {
- s -= 1;
- //fmt.println("match 4");
- }
-
- // Penalty: matching the first pattern character in the middle of a segment.
- if p == 0 && matcher.word_role[w] == FuzzyCharRole.Tail {
- s -= 4;
- //fmt.println("match 5");
- }
-
- assert(s <= perfect_bonus);
-
- return s;
+fuzzy_match_bonus :: proc (matcher: ^FuzzyMatcher, p: int, w: int, last: int) -> int {
+
+ assert(matcher.lower_pattern[p] == matcher.lower_word[w]);
+
+ s := 1;
+
+ is_pattern_single_case := (cast(uint)matcher.pattern_type_set == 1 << cast(uint)FuzzyCharType.Lower);
+ is_pattern_single_case |= (cast(uint)matcher.pattern_type_set == 1 << cast(uint)FuzzyCharType.Upper);
+
+ // Bonus: case matches, or a Head in the pattern aligns with one in the word.
+ // Single-case patterns lack segmentation signals and we assume any character
+ // can be a head of a segment.
+ if matcher.pattern[p] == matcher.word[w] ||
+ (matcher.word_role[w] == FuzzyCharRole.Head &&
+ (is_pattern_single_case || matcher.pattern_role[p] == FuzzyCharRole.Head)) {
+ s += 1;
+ //fmt.println("match 1");
+ }
+
+ // Bonus: a consecutive match. First character match also gets a bonus to
+ // ensure prefix final match score normalizes to 1.0.
+ if w == 0 || last == match {
+ s += 2;
+ //fmt.println("match 2");
+ }
+
+ // Penalty: matching inside a segment (and previous char wasn't matched).
+ if matcher.word_role[w] == FuzzyCharRole.Tail && p > 0 && last == miss {
+ s -= 3;
+ //fmt.println("match 3");
+ }
+
+ // Penalty: a Head in the pattern matches in the middle of a word segment.
+ if matcher.pattern_role[p] == FuzzyCharRole.Head && matcher.word_role[w] == FuzzyCharRole.Tail {
+ s -= 1;
+ //fmt.println("match 4");
+ }
+
+ // Penalty: matching the first pattern character in the middle of a segment.
+ if p == 0 && matcher.word_role[w] == FuzzyCharRole.Tail {
+ s -= 4;
+ //fmt.println("match 5");
+ }
+
+ assert(s <= perfect_bonus);
+
+ return s;
}
-fuzzy_allow_match :: proc(matcher: ^FuzzyMatcher, p: int, w: int, last: int) -> bool {
-
- if matcher.lower_pattern[p] != matcher.lower_word[w] {
- return false;
- }
+fuzzy_allow_match :: proc (matcher: ^FuzzyMatcher, p: int, w: int, last: int) -> bool {
- if last == miss {
+ if matcher.lower_pattern[p] != matcher.lower_word[w] {
+ return false;
+ }
- if matcher.word_role[w] == FuzzyCharRole.Tail && (matcher.word[w] == matcher.lower_word[w] ||
- 0 >= (cast(uint)matcher.word_type_set & 1 << cast(uint)FuzzyCharType.Lower)) {
- return false;
- }
+ if last == miss {
- }
-
- return true;
-}
+ if matcher.word_role[w] == FuzzyCharRole.Tail && (matcher.word[w] == matcher.lower_word[w] ||
+ 0 >= (cast(uint)matcher.word_type_set & 1 << cast(uint)FuzzyCharType.Lower)) {
+ return false;
+ }
+ }
+ return true;
+} \ No newline at end of file
diff --git a/src/common/pool.odin b/src/common/pool.odin
index e82bc38..6789b63 100644
--- a/src/common/pool.odin
+++ b/src/common/pool.odin
@@ -12,39 +12,36 @@ Task_Status :: enum i32 {
Term,
}
-Task_Proc :: #type proc(task: ^Task);
+Task_Proc :: proc (task: ^Task);
Task :: struct {
- procedure: Task_Proc,
- data: rawptr,
+ procedure: Task_Proc,
+ data: rawptr,
user_index: int,
}
-Task_Id :: distinct i32;
+Task_Id :: distinct i32;
INVALID_TASK_ID :: Task_Id(-1);
-
Pool :: struct {
allocator: mem.Allocator,
mutex: sync.Mutex,
sem_available: sync.Semaphore,
processing_task_count: int, // atomic
is_running: bool,
-
- threads: []^thread.Thread,
-
- tasks: [dynamic]Task,
+ threads: []^thread.Thread,
+ tasks: [dynamic]Task,
}
-pool_init :: proc(pool: ^Pool, thread_count: int, allocator := context.allocator) {
- worker_thread_internal :: proc(t: ^thread.Thread) {
+pool_init :: proc (pool: ^Pool, thread_count: int, allocator := context.allocator) {
+ worker_thread_internal :: proc (t: ^thread.Thread) {
pool := (^Pool)(t.data);
- temp_allocator: Scratch_Allocator;
+ temp_allocator: Scratch_Allocator;
- scratch_allocator_init(&temp_allocator, mem.megabytes(6));
+ scratch_allocator_init(&temp_allocator, mem.megabytes(6));
- context.temp_allocator = scratch_allocator(&temp_allocator);
+ context.temp_allocator = scratch_allocator(&temp_allocator);
for pool.is_running {
sync.semaphore_wait_for(&pool.sem_available);
@@ -53,19 +50,18 @@ pool_init :: proc(pool: ^Pool, thread_count: int, allocator := context.allocator
pool_do_work(pool, &task);
}
- free_all(context.temp_allocator);
+ free_all(context.temp_allocator);
}
- scratch_allocator_destroy(&temp_allocator);
+ scratch_allocator_destroy(&temp_allocator);
sync.semaphore_post(&pool.sem_available, 1);
- }
-
+ };
context.allocator = allocator;
- pool.allocator = allocator;
- pool.tasks = make([dynamic]Task);
- pool.threads = make([]^thread.Thread, thread_count);
+ pool.allocator = allocator;
+ pool.tasks = make([dynamic]Task);
+ pool.threads = make([]^thread.Thread, thread_count);
sync.mutex_init(&pool.mutex);
sync.semaphore_init(&pool.sem_available);
@@ -73,13 +69,13 @@ pool_init :: proc(pool: ^Pool, thread_count: int, allocator := context.allocator
for _, i in pool.threads {
t := thread.create(worker_thread_internal);
- t.user_index = i;
- t.data = pool;
+ t.user_index = i;
+ t.data = pool;
pool.threads[i] = t;
}
}
-pool_destroy :: proc(pool: ^Pool) {
+pool_destroy :: proc (pool: ^Pool) {
delete(pool.tasks);
for t in &pool.threads {
@@ -92,13 +88,13 @@ pool_destroy :: proc(pool: ^Pool) {
sync.semaphore_destroy(&pool.sem_available);
}
-pool_start :: proc(pool: ^Pool) {
+pool_start :: proc (pool: ^Pool) {
for t in pool.threads {
thread.start(t);
}
}
-pool_join :: proc(pool: ^Pool) {
+pool_join :: proc (pool: ^Pool) {
pool.is_running = false;
sync.semaphore_post(&pool.sem_available, len(pool.threads));
@@ -110,24 +106,24 @@ pool_join :: proc(pool: ^Pool) {
}
}
-pool_add_task :: proc(pool: ^Pool, procedure: Task_Proc, data: rawptr, user_index: int = 0) {
+pool_add_task :: proc (pool: ^Pool, procedure: Task_Proc, data: rawptr, user_index: int = 0) {
sync.mutex_lock(&pool.mutex);
defer sync.mutex_unlock(&pool.mutex);
task: Task;
- task.procedure = procedure;
- task.data = data;
+ task.procedure = procedure;
+ task.data = data;
task.user_index = user_index;
append(&pool.tasks, task);
sync.semaphore_post(&pool.sem_available, 1);
}
-pool_try_and_pop_task :: proc(pool: ^Pool) -> (task: Task, got_task: bool = false) {
+pool_try_and_pop_task :: proc (pool: ^Pool) -> (task: Task, got_task: bool = false) {
if sync.mutex_try_lock(&pool.mutex) {
if len(pool.tasks) != 0 {
intrinsics.atomic_add(&pool.processing_task_count, 1);
- task = pop_front(&pool.tasks);
+ task = pop_front(&pool.tasks);
got_task = true;
}
sync.mutex_unlock(&pool.mutex);
@@ -135,14 +131,12 @@ pool_try_and_pop_task :: proc(pool: ^Pool) -> (task: Task, got_task: bool = fals
return;
}
-
-pool_do_work :: proc(pool: ^Pool, task: ^Task) {
+pool_do_work :: proc (pool: ^Pool, task: ^Task) {
task.procedure(task);
intrinsics.atomic_sub(&pool.processing_task_count, 1);
}
-
-pool_wait_and_process :: proc(pool: ^Pool) {
+pool_wait_and_process :: proc (pool: ^Pool) {
for len(pool.tasks) != 0 || intrinsics.atomic_load(&pool.processing_task_count) != 0 {
if task, ok := pool_try_and_pop_task(pool); ok {
pool_do_work(pool, &task);
@@ -159,4 +153,4 @@ pool_wait_and_process :: proc(pool: ^Pool) {
}
pool_join(pool);
-}
+} \ No newline at end of file
diff --git a/src/common/position.odin b/src/common/position.odin
index 62a4615..9145091 100644
--- a/src/common/position.odin
+++ b/src/common/position.odin
@@ -6,306 +6,269 @@ import "core:fmt"
import "core:odin/ast"
/*
- This file handles the conversion between utf-16 and utf-8 offsets in the text document
+ This file handles the conversion between utf-16 and utf-8 offsets in the text document
*/
//TODO(Optimize by calculating all the newlines at parse instead of calculating them)
Position :: struct {
- line: int,
+ line: int,
character: int,
-};
+}
Range :: struct {
start: Position,
- end: Position,
-};
+ end: Position,
+}
Location :: struct {
- uri: string,
+ uri: string,
range: Range,
-};
-
+}
AbsoluteRange :: struct {
- start: int,
- end: int,
-};
+ start: int,
+ end: int,
+}
AbsolutePosition :: int;
-get_absolute_position :: proc(position: Position, document_text: [] u8) -> (AbsolutePosition, bool) {
- absolute: AbsolutePosition;
+get_absolute_position :: proc (position: Position, document_text: []u8) -> (AbsolutePosition, bool) {
+ absolute: AbsolutePosition;
- if len(document_text) == 0 {
- absolute = 0;
- return absolute, true;
- }
+ if len(document_text) == 0 {
+ absolute = 0;
+ return absolute, true;
+ }
- line_count := 0;
- index := 1;
- last := document_text[0];
+ line_count := 0;
+ index := 1;
+ last := document_text[0];
- if !get_index_at_line(&index, &line_count, &last, document_text, position.line) {
- return absolute, false;
- }
+ if !get_index_at_line(&index, &line_count, &last, document_text, position.line) {
+ return absolute, false;
+ }
- absolute = index + get_character_offset_u16_to_u8(position.character, document_text[index:]);
+ absolute = index + get_character_offset_u16_to_u8(position.character, document_text[index:]);
- return absolute, true;
+ return absolute, true;
}
-get_relative_token_position :: proc(offset: int, document_text: [] u8, current_start: int) -> Position {
-
- start_index := current_start;
-
- data := document_text[start_index:];
-
- i: int;
+get_relative_token_position :: proc (offset: int, document_text: []u8, current_start: int) -> Position {
- position: Position;
+ start_index := current_start;
- for i + start_index < offset {
+ data := document_text[start_index:];
- r, w := utf8.decode_rune(data[i:]);
+ i: int;
- if r == '\n' { //\r?
- position.character = 0;
- position.line += 1;
- i += 1;
- }
+ position: Position;
- else if w == 0 {
- return position;
- }
+ for i + start_index < offset {
- else {
- if r < 0x10000 {
- position.character += 1;
- }
+ r, w := utf8.decode_rune(data[i:]);
- else {
- position.character += 2;
- }
+ if r == '\n' { //\r?
+ position.character = 0;
+ position.line += 1;
+ i += 1;
+ } else if w == 0 {
+ return position;
+ } else {
+ if r < 0x10000 {
+ position.character += 1;
+ } else {
+ position.character += 2;
+ }
- i += w;
- }
- }
+ i += w;
+ }
+ }
- return position;
+ return position;
}
/*
- Get the range of a token in utf16 space
- */
-get_token_range :: proc(node: ast.Node, document_text: [] u8) -> Range {
- range: Range;
-
+ Get the range of a token in utf16 space
+*/
+get_token_range :: proc (node: ast.Node, document_text: []u8) -> Range {
+ range: Range;
- go_backwards_to_endline :: proc(offset: int, document_text: [] u8) -> int {
+ go_backwards_to_endline :: proc (offset: int, document_text: []u8) -> int {
- index := offset;
+ index := offset;
- for index > 0 && document_text[index] != '\n' && document_text[index] != '\r' {
- index -= 1;
- }
+ for index > 0 && document_text[index] != '\n' && document_text[index] != '\r' {
+ index -= 1;
+ }
- if index == 0 {
- return 0;
- }
+ if index == 0 {
+ return 0;
+ }
- return index+1;
- }
+ return index + 1;
+ };
- pos_offset := min(len(document_text)-1, node.pos.offset);
- end_offset := min(len(document_text)-1, node.end.offset);
+ pos_offset := min(len(document_text) - 1, node.pos.offset);
+ end_offset := min(len(document_text) - 1, node.end.offset);
- offset := go_backwards_to_endline(pos_offset, document_text);
+ offset := go_backwards_to_endline(pos_offset, document_text);
- range.start.line = node.pos.line-1;
- range.start.character = get_character_offset_u8_to_u16(node.pos.column-1, document_text[offset:]);
+ range.start.line = node.pos.line - 1;
+ range.start.character = get_character_offset_u8_to_u16(node.pos.column - 1, document_text[offset:]);
- offset = go_backwards_to_endline(end_offset, document_text);
+ offset = go_backwards_to_endline(end_offset, document_text);
- range.end.line = node.end.line-1;
- range.end.character = get_character_offset_u8_to_u16(node.end.column-1, document_text[offset:]);
+ range.end.line = node.end.line - 1;
+ range.end.character = get_character_offset_u8_to_u16(node.end.column - 1, document_text[offset:]);
- return range;
+ return range;
}
-get_absolute_range :: proc(range: Range, document_text: [] u8) -> (AbsoluteRange, bool) {
+get_absolute_range :: proc (range: Range, document_text: []u8) -> (AbsoluteRange, bool) {
- absolute: AbsoluteRange;
+ absolute: AbsoluteRange;
- if len(document_text) == 0 {
- absolute.start = 0;
- absolute.end = 0;
- return absolute, true;
- }
+ if len(document_text) == 0 {
+ absolute.start = 0;
+ absolute.end = 0;
+ return absolute, true;
+ }
- line_count := 0;
- index := 1;
- last := document_text[0];
+ line_count := 0;
+ index := 1;
+ last := document_text[0];
- if !get_index_at_line(&index, &line_count, &last, document_text, range.start.line) {
- return absolute, false;
- }
+ if !get_index_at_line(&index, &line_count, &last, document_text, range.start.line) {
+ return absolute, false;
+ }
- absolute.start = index + get_character_offset_u16_to_u8(range.start.character, document_text[index:]);
+ absolute.start = index + get_character_offset_u16_to_u8(range.start.character, document_text[index:]);
- //if the last line was indexed at zero we have to move it back to index 1.
- //This happens when line = 0
- if index == 0 {
- index = 1;
- }
+ //if the last line was indexed at zero we have to move it back to index 1.
+ //This happens when line = 0
+ if index == 0 {
+ index = 1;
+ }
- if !get_index_at_line(&index, &line_count, &last, document_text, range.end.line) {
- return absolute, false;
- }
+ if !get_index_at_line(&index, &line_count, &last, document_text, range.end.line) {
+ return absolute, false;
+ }
- absolute.end = index + get_character_offset_u16_to_u8(range.end.character, document_text[index:]);
+ absolute.end = index + get_character_offset_u16_to_u8(range.end.character, document_text[index:]);
- return absolute, true;
+ return absolute, true;
}
+get_index_at_line :: proc (current_index: ^int, current_line: ^int, last: ^u8, document_text: []u8, end_line: int) -> bool {
-get_index_at_line :: proc(current_index: ^int, current_line: ^int, last: ^u8, document_text: []u8, end_line: int) -> bool {
-
- if end_line == 0 {
- current_index^ = 0;
- return true;
- }
-
- if current_line^ == end_line {
- return true;
- }
-
-
- for ; current_index^ < len(document_text); current_index^ += 1 {
-
- current := document_text[current_index^];
+ if end_line == 0 {
+ current_index^ = 0;
+ return true;
+ }
- if last^ == '\r' {
- current_line^ += 1;
+ if current_line^ == end_line {
+ return true;
+ }
- if current_line^ == end_line {
- last^ = current;
- current_index^ += 1;
- return true;
- }
+ for ; current_index^ < len(document_text); current_index^ += 1 {
- }
+ current := document_text[current_index^];
- else if current == '\n' {
- current_line^ += 1;
+ if last^ == '\r' {
+ current_line^ += 1;
- if current_line^ == end_line {
- last^ = current;
- current_index^ += 1;
- return true;
- }
+ if current_line^ == end_line {
+ last^ = current;
+ current_index^ += 1;
+ return true;
+ }
+ } else if current == '\n' {
+ current_line^ += 1;
- }
+ if current_line^ == end_line {
+ last^ = current;
+ current_index^ += 1;
+ return true;
+ }
+ }
- last^ = document_text[current_index^];
- }
-
- return false;
+ last^ = document_text[current_index^];
+ }
+ return false;
}
-get_character_offset_u16_to_u8 :: proc(character_offset: int, document_text: [] u8) -> int {
-
- utf8_idx := 0;
- utf16_idx := 0;
-
- for utf16_idx < character_offset {
-
- r, w := utf8.decode_rune(document_text[utf8_idx:]);
+get_character_offset_u16_to_u8 :: proc (character_offset: int, document_text: []u8) -> int {
- if r == '\n' {
- return utf8_idx;
- }
+ utf8_idx := 0;
+ utf16_idx := 0;
- else if w == 0 {
- return utf8_idx;
- }
+ for utf16_idx < character_offset {
- else if r < 0x10000 {
- utf16_idx += 1;
- }
+ r, w := utf8.decode_rune(document_text[utf8_idx:]);
- else {
- utf16_idx += 2;
- }
+ if r == '\n' {
+ return utf8_idx;
+ } else if w == 0 {
+ return utf8_idx;
+ } else if r < 0x10000 {
+ utf16_idx += 1;
+ } else {
+ utf16_idx += 2;
+ }
- utf8_idx += w;
+ utf8_idx += w;
+ }
- }
-
- return utf8_idx;
+ return utf8_idx;
}
-get_character_offset_u8_to_u16 :: proc(character_offset: int, document_text: [] u8) -> int {
-
- utf8_idx := 0;
- utf16_idx := 0;
-
- for utf8_idx < character_offset {
+get_character_offset_u8_to_u16 :: proc (character_offset: int, document_text: []u8) -> int {
- r, w := utf8.decode_rune(document_text[utf8_idx:]);
+ utf8_idx := 0;
+ utf16_idx := 0;
- if r == '\n' {
- return utf16_idx;
- }
+ for utf8_idx < character_offset {
- else if w == 0 {
- return utf16_idx;
- }
+ r, w := utf8.decode_rune(document_text[utf8_idx:]);
- else if r < 0x10000 {
- utf16_idx += 1;
- }
+ if r == '\n' {
+ return utf16_idx;
+ } else if w == 0 {
+ return utf16_idx;
+ } else if r < 0x10000 {
+ utf16_idx += 1;
+ } else {
+ utf16_idx += 2;
+ }
- else {
- utf16_idx += 2;
- }
-
- utf8_idx += w;
-
- }
-
- return utf16_idx;
+ utf8_idx += w;
+ }
+ return utf16_idx;
}
-get_end_line_u16 :: proc(document_text: [] u8) -> int {
-
- utf8_idx := 0;
- utf16_idx := 0;
-
- for utf8_idx < len(document_text) {
- r, w := utf8.decode_rune(document_text[utf8_idx:]);
-
- if r == '\n' {
- return utf16_idx;
- }
-
- else if w == 0 {
- return utf16_idx;
- }
+get_end_line_u16 :: proc (document_text: []u8) -> int {
- else if r < 0x10000 {
- utf16_idx += 1;
- }
+ utf8_idx := 0;
+ utf16_idx := 0;
- else {
- utf16_idx += 2;
- }
+ for utf8_idx < len(document_text) {
+ r, w := utf8.decode_rune(document_text[utf8_idx:]);
- utf8_idx += w;
+ if r == '\n' {
+ return utf16_idx;
+ } else if w == 0 {
+ return utf16_idx;
+ } else if r < 0x10000 {
+ utf16_idx += 1;
+ } else {
+ utf16_idx += 2;
+ }
- }
+ utf8_idx += w;
+ }
- return utf16_idx;
+ return utf16_idx;
} \ No newline at end of file
diff --git a/src/common/pretty.odin b/src/common/pretty.odin
index 2255621..ff0d5ff 100644
--- a/src/common/pretty.odin
+++ b/src/common/pretty.odin
@@ -3,244 +3,238 @@ package common
import "core:odin/ast"
import "core:fmt"
-
/*
- Ast visualization to help in debugging and development
- */
-
-print_ast :: proc{
- print_ast_array,
- print_ast_dynamic_array,
- print_ast_node,
-};
+ Ast visualization to help in debugging and development
+*/
-print_ast_array :: proc(array: $A/[]^$T, depth: int, src: []byte, newline := false) {
+print_ast :: proc {
+print_ast_array,
+print_ast_dynamic_array,
+print_ast_node};
- for elem, i in array {
- print_ast(elem, depth, src);
- }
+print_ast_array :: proc (array: $A/[]^$T, depth: int, src: []byte, newline := false) {
+ for elem, i in array {
+ print_ast(elem, depth, src);
+ }
}
-print_ast_dynamic_array :: proc(array: $A/[dynamic]^$T, depth: int, src: []byte, newline := false) {
-
- for elem, i in array {
- print_ast(elem, depth, src);
- }
+print_ast_dynamic_array :: proc (array: $A/[dynamic]^$T, depth: int, src: []byte, newline := false) {
+ for elem, i in array {
+ print_ast(elem, depth, src);
+ }
}
/*
- Not fully printed out, filling it in as needed.
- */
-
-print_ast_node :: proc(node: ^ast.Node, depth: int, src: []byte, newline := false) {
-
- using ast;
-
- if node == nil {
- return;
- }
-
- if newline {
- fmt.println();
-
- for i := 0; i < depth; i += 1 {
- fmt.printf(" ", );
- }
-
- }
-
- name := string(src[node.pos.offset:node.end.offset]);
-
- switch n in node.derived {
- case Bad_Expr:
- case Ident:
- fmt.printf(" %v ", n.name);
- case Implicit:
- case Undef:
- case Basic_Lit:
- case Ellipsis:
- print_ast(n.expr, depth + 1, src);
- case Proc_Lit:
- fmt.printf("function");
- print_ast(n.type, depth + 1, src);
- print_ast(n.body, depth + 1, src, true);
- case Comp_Lit:
- print_ast(n.type, depth + 1, src);
- print_ast(n.elems, depth + 1, src);
- case Tag_Expr:
- print_ast(n.expr, depth + 1, src);
- case Unary_Expr:
- print_ast(n.expr, depth + 1, src);
- case Binary_Expr:
- print_ast(n.left, depth + 1, src);
- fmt.printf("%v", n.op.text);
- print_ast(n.right, depth + 1, src);
- case Paren_Expr:
- print_ast(n.expr, depth + 1, src);
- case Call_Expr:
- fmt.printf("call");
- print_ast(n.expr, depth + 1, src);
- fmt.printf("(");
- print_ast(n.args, depth + 1, src);
- fmt.printf(")");
- case Selector_Expr:
- print_ast(n.expr, depth + 1, src);
- fmt.printf(".");
- print_ast(n.field, depth + 1, src);
- case Index_Expr:
- print_ast(n.expr, depth + 1, src);
- print_ast(n.index, depth + 1, src);
- case Deref_Expr:
- print_ast(n.expr, depth + 1, src);
- case Slice_Expr:
- print_ast(n.expr, depth + 1, src);
- print_ast(n.low, depth + 1, src);
- print_ast(n.high, depth + 1, src);
- case Field_Value:
- print_ast(n.field, depth + 1, src);
- print_ast(n.value, depth + 1, src);
- case Ternary_Expr:
- print_ast(n.cond, depth + 1, src);
- print_ast(n.x, depth + 1, src);
- print_ast(n.y, depth + 1, src);
- case Ternary_If_Expr:
- print_ast(n.x, depth + 1, src);
- print_ast(n.cond, depth + 1, src);
- print_ast(n.y, depth + 1, src);
- case Ternary_When_Expr:
- print_ast(n.x, depth + 1, src);
- print_ast(n.cond, depth + 1, src);
- print_ast(n.y, depth + 1, src);
- case Type_Assertion:
- print_ast(n.expr, depth + 1, src);
- print_ast(n.type, depth + 1, src);
- case Type_Cast:
- print_ast(n.type, depth + 1, src);
- print_ast(n.expr, depth + 1, src);
- case Auto_Cast:
- print_ast(n.expr, depth + 1, src);
- case Bad_Stmt:
- case Empty_Stmt:
- case Expr_Stmt:
- print_ast(n.expr, depth + 1, src);
- case Tag_Stmt:
- r := cast(^Expr_Stmt)node;
- print_ast(r.expr, depth + 1, src);
- case Assign_Stmt:
- print_ast(n.lhs, depth + 1, src);
- print_ast(n.rhs, depth + 1, src);
- case Block_Stmt:
- print_ast(n.label, depth + 1, src);
- print_ast(n.stmts, depth + 1, src);
- case If_Stmt:
- print_ast(n.label, depth + 1, src);
- print_ast(n.init, depth + 1, src);
- print_ast(n.cond, depth + 1, src);
- print_ast(n.body, depth + 1, src);
- print_ast(n.else_stmt, depth + 1, src);
- case When_Stmt:
- print_ast(n.cond, depth + 1, src);
- print_ast(n.body, depth + 1, src);
- print_ast(n.else_stmt, depth + 1, src);
- case Return_Stmt:
- print_ast(n.results, depth + 1, src);
- case Defer_Stmt:
- print_ast(n.stmt, depth + 1, src);
- case For_Stmt:
- print_ast(n.label, depth + 1, src);
- print_ast(n.init, depth + 1, src);
- print_ast(n.cond, depth + 1, src);
- print_ast(n.post, depth + 1, src);
- print_ast(n.body, depth + 1, src);
- case Range_Stmt:
- print_ast(n.label, depth + 1, src);
- print_ast(n.val0, depth + 1, src);
- print_ast(n.val1, depth + 1, src);
- print_ast(n.expr, depth + 1, src);
- print_ast(n.body, depth + 1, src);
- case Case_Clause:
- print_ast(n.list, depth + 1, src);
- print_ast(n.body, depth + 1, src);
- case Switch_Stmt:
- print_ast(n.label, depth + 1, src);
- print_ast(n.init, depth + 1, src);
- print_ast(n.cond, depth + 1, src);
- print_ast(n.body, depth + 1, src);
- case Type_Switch_Stmt:
- print_ast(n.label, depth + 1, src);
- print_ast(n.tag, depth + 1, src);
- print_ast(n.expr, depth + 1, src);
- print_ast(n.body, depth + 1, src);
- case Branch_Stmt:
- print_ast(n.label, depth + 1, src);
- case Using_Stmt:
- print_ast(n.list, depth + 1, src);
- case Bad_Decl:
- case Value_Decl:
- print_ast(n.attributes, depth + 1, src);
- print_ast(n.names, depth + 1, src);
- print_ast(n.type, depth + 1, src);
- print_ast(n.values, depth + 1, src);
- fmt.println();
- case Package_Decl:
- case Import_Decl:
- case Foreign_Block_Decl:
- print_ast(n.attributes, depth + 1, src);
- print_ast(n.foreign_library, depth + 1, src);
- print_ast(n.body, depth + 1, src);
- case Foreign_Import_Decl:
- print_ast(n.name, depth + 1, src);
- case Proc_Group:
- print_ast(n.args, depth + 1, src);
- case Attribute:
- print_ast(n.elems, depth + 1, src);
- case Field:
- print_ast(n.names, depth + 1, src);
- print_ast(n.type, depth + 1, src);
- print_ast(n.default_value, depth + 1, src);
- case Field_List:
- print_ast(n.list, depth + 1, src);
- case Typeid_Type:
- print_ast(n.specialization, depth + 1, src);
- case Helper_Type:
- print_ast(n.type, depth + 1, src);
- case Distinct_Type:
- print_ast(n.type, depth + 1, src);
- case Poly_Type:
- print_ast(n.type, depth + 1, src);
- print_ast(n.specialization, depth + 1, src);
- case Proc_Type:
- print_ast(n.params, depth + 1, src);
- print_ast(n.results, depth + 1, src);
- case Pointer_Type:
- print_ast(n.elem, depth + 1, src);
- case Array_Type:
- print_ast(n.len, depth + 1, src);
- print_ast(n.elem, depth + 1, src);
- case Dynamic_Array_Type:
- print_ast(n.elem, depth + 1, src);
- case Struct_Type:
- fmt.printf("struct");
- print_ast(n.poly_params, depth + 1, src);
- print_ast(n.align, depth + 1, src);
- print_ast(n.fields, depth + 1, src);
- case Union_Type:
- print_ast(n.poly_params, depth + 1, src);
- print_ast(n.align, depth + 1, src);
- print_ast(n.variants, depth + 1, src);
- case Enum_Type:
- print_ast(n.base_type, depth + 1, src);
- print_ast(n.fields, depth + 1, src);
- case Bit_Set_Type:
- print_ast(n.elem, depth + 1, src);
- print_ast(n.underlying, depth + 1, src);
- case Map_Type:
- print_ast(n.key, depth + 1, src);
- print_ast(n.value, depth + 1, src);
- case:
- fmt.panicf("Unhandled node kind: %T", n);
- }
-
+ Not fully printed out, filling it in as needed.
+*/
+
+print_ast_node :: proc (node: ^ast.Node, depth: int, src: []byte, newline := false) {
+
+ using ast;
+
+ if node == nil {
+ return;
+ }
+
+ if newline {
+ fmt.println();
+
+ for i := 0; i < depth; i += 1 {
+ fmt.printf(" ");
+ }
+ }
+
+ name := string(src[node.pos.offset:node.end.offset]);
+
+ switch n in node.derived {
+ case Bad_Expr:
+ case Ident:
+ fmt.printf(" %v ", n.name);
+ case Implicit:
+ case Undef:
+ case Basic_Lit:
+ case Ellipsis:
+ print_ast(n.expr, depth + 1, src);
+ case Proc_Lit:
+ fmt.printf("function");
+ print_ast(n.type, depth + 1, src);
+ print_ast(n.body, depth + 1, src, true);
+ case Comp_Lit:
+ print_ast(n.type, depth + 1, src);
+ print_ast(n.elems, depth + 1, src);
+ case Tag_Expr:
+ print_ast(n.expr, depth + 1, src);
+ case Unary_Expr:
+ print_ast(n.expr, depth + 1, src);
+ case Binary_Expr:
+ print_ast(n.left, depth + 1, src);
+ fmt.printf("%v", n.op.text);
+ print_ast(n.right, depth + 1, src);
+ case Paren_Expr:
+ print_ast(n.expr, depth + 1, src);
+ case Call_Expr:
+ fmt.printf("call");
+ print_ast(n.expr, depth + 1, src);
+ fmt.printf("(");
+ print_ast(n.args, depth + 1, src);
+ fmt.printf(")");
+ case Selector_Expr:
+ print_ast(n.expr, depth + 1, src);
+ fmt.printf(".");
+ print_ast(n.field, depth + 1, src);
+ case Index_Expr:
+ print_ast(n.expr, depth + 1, src);
+ print_ast(n.index, depth + 1, src);
+ case Deref_Expr:
+ print_ast(n.expr, depth + 1, src);
+ case Slice_Expr:
+ print_ast(n.expr, depth + 1, src);
+ print_ast(n.low, depth + 1, src);
+ print_ast(n.high, depth + 1, src);
+ case Field_Value:
+ print_ast(n.field, depth + 1, src);
+ print_ast(n.value, depth + 1, src);
+ case Ternary_Expr:
+ print_ast(n.cond, depth + 1, src);
+ print_ast(n.x, depth + 1, src);
+ print_ast(n.y, depth + 1, src);
+ case Ternary_If_Expr:
+ print_ast(n.x, depth + 1, src);
+ print_ast(n.cond, depth + 1, src);
+ print_ast(n.y, depth + 1, src);
+ case Ternary_When_Expr:
+ print_ast(n.x, depth + 1, src);
+ print_ast(n.cond, depth + 1, src);
+ print_ast(n.y, depth + 1, src);
+ case Type_Assertion:
+ print_ast(n.expr, depth + 1, src);
+ print_ast(n.type, depth + 1, src);
+ case Type_Cast:
+ print_ast(n.type, depth + 1, src);
+ print_ast(n.expr, depth + 1, src);
+ case Auto_Cast:
+ print_ast(n.expr, depth + 1, src);
+ case Bad_Stmt:
+ case Empty_Stmt:
+ case Expr_Stmt:
+ print_ast(n.expr, depth + 1, src);
+ case Tag_Stmt:
+ r := cast(^Expr_Stmt)node;
+ print_ast(r.expr, depth + 1, src);
+ case Assign_Stmt:
+ print_ast(n.lhs, depth + 1, src);
+ print_ast(n.rhs, depth + 1, src);
+ case Block_Stmt:
+ print_ast(n.label, depth + 1, src);
+ print_ast(n.stmts, depth + 1, src);
+ case If_Stmt:
+ print_ast(n.label, depth + 1, src);
+ print_ast(n.init, depth + 1, src);
+ print_ast(n.cond, depth + 1, src);
+ print_ast(n.body, depth + 1, src);
+ print_ast(n.else_stmt, depth + 1, src);
+ case When_Stmt:
+ print_ast(n.cond, depth + 1, src);
+ print_ast(n.body, depth + 1, src);
+ print_ast(n.else_stmt, depth + 1, src);
+ case Return_Stmt:
+ print_ast(n.results, depth + 1, src);
+ case Defer_Stmt:
+ print_ast(n.stmt, depth + 1, src);
+ case For_Stmt:
+ print_ast(n.label, depth + 1, src);
+ print_ast(n.init, depth + 1, src);
+ print_ast(n.cond, depth + 1, src);
+ print_ast(n.post, depth + 1, src);
+ print_ast(n.body, depth + 1, src);
+ case Range_Stmt:
+ print_ast(n.label, depth + 1, src);
+ print_ast(n.val0, depth + 1, src);
+ print_ast(n.val1, depth + 1, src);
+ print_ast(n.expr, depth + 1, src);
+ print_ast(n.body, depth + 1, src);
+ case Case_Clause:
+ print_ast(n.list, depth + 1, src);
+ print_ast(n.body, depth + 1, src);
+ case Switch_Stmt:
+ print_ast(n.label, depth + 1, src);
+ print_ast(n.init, depth + 1, src);
+ print_ast(n.cond, depth + 1, src);
+ print_ast(n.body, depth + 1, src);
+ case Type_Switch_Stmt:
+ print_ast(n.label, depth + 1, src);
+ print_ast(n.tag, depth + 1, src);
+ print_ast(n.expr, depth + 1, src);
+ print_ast(n.body, depth + 1, src);
+ case Branch_Stmt:
+ print_ast(n.label, depth + 1, src);
+ case Using_Stmt:
+ print_ast(n.list, depth + 1, src);
+ case Bad_Decl:
+ case Value_Decl:
+ print_ast(n.attributes, depth + 1, src);
+ print_ast(n.names, depth + 1, src);
+ print_ast(n.type, depth + 1, src);
+ print_ast(n.values, depth + 1, src);
+ fmt.println();
+ case Package_Decl:
+ case Import_Decl:
+ case Foreign_Block_Decl:
+ print_ast(n.attributes, depth + 1, src);
+ print_ast(n.foreign_library, depth + 1, src);
+ print_ast(n.body, depth + 1, src);
+ case Foreign_Import_Decl:
+ print_ast(n.name, depth + 1, src);
+ case Proc_Group:
+ print_ast(n.args, depth + 1, src);
+ case Attribute:
+ print_ast(n.elems, depth + 1, src);
+ case Field:
+ print_ast(n.names, depth + 1, src);
+ print_ast(n.type, depth + 1, src);
+ print_ast(n.default_value, depth + 1, src);
+ case Field_List:
+ print_ast(n.list, depth + 1, src);
+ case Typeid_Type:
+ print_ast(n.specialization, depth + 1, src);
+ case Helper_Type:
+ print_ast(n.type, depth + 1, src);
+ case Distinct_Type:
+ print_ast(n.type, depth + 1, src);
+ case Poly_Type:
+ print_ast(n.type, depth + 1, src);
+ print_ast(n.specialization, depth + 1, src);
+ case Proc_Type:
+ print_ast(n.params, depth + 1, src);
+ print_ast(n.results, depth + 1, src);
+ case Pointer_Type:
+ print_ast(n.elem, depth + 1, src);
+ case Array_Type:
+ print_ast(n.len, depth + 1, src);
+ print_ast(n.elem, depth + 1, src);
+ case Dynamic_Array_Type:
+ print_ast(n.elem, depth + 1, src);
+ case Struct_Type:
+ fmt.printf("struct");
+ print_ast(n.poly_params, depth + 1, src);
+ print_ast(n.align, depth + 1, src);
+ print_ast(n.fields, depth + 1, src);
+ case Union_Type:
+ print_ast(n.poly_params, depth + 1, src);
+ print_ast(n.align, depth + 1, src);
+ print_ast(n.variants, depth + 1, src);
+ case Enum_Type:
+ print_ast(n.base_type, depth + 1, src);
+ print_ast(n.fields, depth + 1, src);
+ case Bit_Set_Type:
+ print_ast(n.elem, depth + 1, src);
+ print_ast(n.underlying, depth + 1, src);
+ case Map_Type:
+ print_ast(n.key, depth + 1, src);
+ print_ast(n.value, depth + 1, src);
+ case:
+ fmt.panicf("Unhandled node kind: %T", n);
+ }
} \ No newline at end of file
diff --git a/src/common/sha1.odin b/src/common/sha1.odin
index d3d2119..80a8765 100644
--- a/src/common/sha1.odin
+++ b/src/common/sha1.odin
@@ -4,47 +4,47 @@ import "core:fmt"
//ported version of https://llvm.org/doxygen/SHa1_8cpp_source.html
-rol :: proc(number: u32, bits: u32) -> u32 {
- return number << bits | number >> (32 - bits);
+rol :: proc (number: u32, bits: u32) -> u32 {
+ return number << bits | number >> (32 - bits);
}
-blk0 :: proc(buf: [] u32, i: int) -> u32 {
- return buf[i];
+blk0 :: proc (buf: []u32, i: int) -> u32 {
+ return buf[i];
}
-blk :: proc(buf: [] u32, i: int) -> u32 {
- buf[i & 15] = rol(buf[(i + 13) & 15] ~ buf[(i + 8) & 15] ~ buf[(i + 2) & 15]
- ~ buf[i & 15], 1);
+blk :: proc (buf: []u32, i: int) -> u32 {
+ buf[i & 15] = rol(buf[(i + 13) & 15] ~ buf[(i + 8) & 15] ~ buf[(i + 2) & 15] ~
+ buf[i & 15], 1);
- return buf[i & 15];
+ return buf[i & 15];
}
-r0 :: proc(a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: [] u32) {
- e^ += ((b^ & (c^ ~ d^)) ~ d^) + blk0(buf, i) + 0x5a827999 + rol(a^, 5);
- b^ = rol(b^, 30);
+r0 :: proc (a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: []u32) {
+ e^ += ((b^ & (c^ ~ d^)) ~ d^) + blk0(buf, i) + 0x5a827999 + rol(a^, 5);
+ b^ = rol(b^, 30);
}
-r1 :: proc(a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: [] u32) {
- e^ += ((b^ & (c^ ~ d^)) ~ d^) + blk(buf, i) + 0x5a827999 + rol(a^, 5);
- b^ += rol(b^, 30);
+r1 :: proc (a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: []u32) {
+ e^ += ((b^ & (c^ ~ d^)) ~ d^) + blk(buf, i) + 0x5a827999 + rol(a^, 5);
+ b^ += rol(b^, 30);
}
-r2 :: proc(a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: [] u32) {
- e^ += (b^ ~ c^ ~ d^) + blk(buf, i) + 0x6ed9eba1 + rol(a^, 5);
- b^ += rol(b^, 30);
+r2 :: proc (a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: []u32) {
+ e^ += (b^ ~ c^ ~ d^) + blk(buf, i) + 0x6ed9eba1 + rol(a^, 5);
+ b^ += rol(b^, 30);
}
-r3 :: proc(a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: [] u32) {
- e^ += (((b^ | c^) & d^) | (b^ & c^)) + blk(buf, i) + 0x8F1bbcdc + rol(a^, 5);
- b^ += rol(b^, 30);
+r3 :: proc (a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: []u32) {
+ e^ += (((b^ | c^) & d^) | (b^ & c^)) + blk(buf, i) + 0x8F1bbcdc + rol(a^, 5);
+ b^ += rol(b^, 30);
}
-r4 :: proc(a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: [] u32) {
- e^ += (b^ ~ c^ ~ d^) + blk(buf, i) + 0xca62c1d6 + rol(a^, 5);
- b^ += rol(b^, 30);
+r4 :: proc (a: ^u32, b: ^u32, c: ^u32, d: ^u32, e: ^u32, i: int, buf: []u32) {
+ e^ += (b^ ~ c^ ~ d^) + blk(buf, i) + 0xca62c1d6 + rol(a^, 5);
+ b^ += rol(b^, 30);
}
-SHA1_K0 :: 0x5a827999;
+SHA1_K0 :: 0x5a827999;
SHA1_K20 :: 0x6ed9eba1;
SHA1_K40 :: 0x8f1bbcdc;
SHA1_K60 :: 0xca62c1d6;
@@ -56,254 +56,242 @@ SEED_3 :: 0x10325476;
SEED_4 :: 0xc3d2e1f0;
BLOCK_LENGTH :: 64;
-HASH_LENGTH :: 20;
+HASH_LENGTH :: 20;
Sha1context :: struct {
- buf: struct #raw_union {
- c: [BLOCK_LENGTH] byte,
- l: [BLOCK_LENGTH/4] u32,
- },
- state: [HASH_LENGTH / 4] u32,
- byte_count: u32,
- buf_offset: u8,
-};
-
-sha1_init :: proc(state_context: ^Sha1context) {
- state_context.state[0] = SEED_0;
- state_context.state[1] = SEED_1;
- state_context.state[2] = SEED_2;
- state_context.state[3] = SEED_3;
- state_context.state[4] = SEED_4;
- state_context.byte_count = 0;
- state_context.buf_offset = 0;
+ buf: struct #raw_union {
+ c: [BLOCK_LENGTH]byte,
+ l: [BLOCK_LENGTH / 4]u32,
+ },
+ state: [HASH_LENGTH / 4]u32,
+ byte_count: u32,
+ buf_offset: u8,
}
-sha1_hash_block :: proc(state_context: ^Sha1context) {
- a := state_context.state[0];
- b := state_context.state[1];
- c := state_context.state[2];
- d := state_context.state[3];
- e := state_context.state[4];
-
- // 4 rounds of 20 operations each. loop unrolled.
- r0(&a, &b, &c, &d, &e, 0, state_context.buf.l[:]);
- r0(&e, &a, &b, &c, &d, 1, state_context.buf.l[:]);
- r0(&d, &e, &a, &b, &c, 2, state_context.buf.l[:]);
- r0(&c, &d, &e, &a, &b, 3, state_context.buf.l[:]);
- r0(&b, &c, &d, &e, &a, 4, state_context.buf.l[:]);
- r0(&a, &b, &c, &d, &e, 5, state_context.buf.l[:]);
- r0(&e, &a, &b, &c, &d, 6, state_context.buf.l[:]);
- r0(&d, &e, &a, &b, &c, 7, state_context.buf.l[:]);
- r0(&c, &d, &e, &a, &b, 8, state_context.buf.l[:]);
- r0(&b, &c, &d, &e, &a, 9, state_context.buf.l[:]);
- r0(&a, &b, &c, &d, &e, 10, state_context.buf.l[:]);
- r0(&e, &a, &b, &c, &d, 11, state_context.buf.l[:]);
- r0(&d, &e, &a, &b, &c, 12, state_context.buf.l[:]);
- r0(&c, &d, &e, &a, &b, 13, state_context.buf.l[:]);
- r0(&b, &c, &d, &e, &a, 14, state_context.buf.l[:]);
- r0(&a, &b, &c, &d, &e, 15, state_context.buf.l[:]);
- r1(&e, &a, &b, &c, &d, 16, state_context.buf.l[:]);
- r1(&d, &e, &a, &b, &c, 17, state_context.buf.l[:]);
- r1(&c, &d, &e, &a, &b, 18, state_context.buf.l[:]);
- r1(&b, &c, &d, &e, &a, 19, state_context.buf.l[:]);
-
- r2(&a, &b, &c, &d, &e, 20, state_context.buf.l[:]);
- r2(&e, &a, &b, &c, &d, 21, state_context.buf.l[:]);
- r2(&d, &e, &a, &b, &c, 22, state_context.buf.l[:]);
- r2(&c, &d, &e, &a, &b, 23, state_context.buf.l[:]);
- r2(&b, &c, &d, &e, &a, 24, state_context.buf.l[:]);
- r2(&a, &b, &c, &d, &e, 25, state_context.buf.l[:]);
- r2(&e, &a, &b, &c, &d, 26, state_context.buf.l[:]);
- r2(&d, &e, &a, &b, &c, 27, state_context.buf.l[:]);
- r2(&c, &d, &e, &a, &b, 28, state_context.buf.l[:]);
- r2(&b, &c, &d, &e, &a, 29, state_context.buf.l[:]);
- r2(&a, &b, &c, &d, &e, 30, state_context.buf.l[:]);
- r2(&e, &a, &b, &c, &d, 31, state_context.buf.l[:]);
- r2(&d, &e, &a, &b, &c, 32, state_context.buf.l[:]);
- r2(&c, &d, &e, &a, &b, 33, state_context.buf.l[:]);
- r2(&b, &c, &d, &e, &a, 34, state_context.buf.l[:]);
- r2(&a, &b, &c, &d, &e, 35, state_context.buf.l[:]);
- r2(&e, &a, &b, &c, &d, 36, state_context.buf.l[:]);
- r2(&d, &e, &a, &b, &c, 37, state_context.buf.l[:]);
- r2(&c, &d, &e, &a, &b, 38, state_context.buf.l[:]);
- r2(&b, &c, &d, &e, &a, 39, state_context.buf.l[:]);
-
- r3(&a, &b, &c, &d, &e, 40, state_context.buf.l[:]);
- r3(&e, &a, &b, &c, &d, 41, state_context.buf.l[:]);
- r3(&d, &e, &a, &b, &c, 42, state_context.buf.l[:]);
- r3(&c, &d, &e, &a, &b, 43, state_context.buf.l[:]);
- r3(&b, &c, &d, &e, &a, 44, state_context.buf.l[:]);
- r3(&a, &b, &c, &d, &e, 45, state_context.buf.l[:]);
- r3(&e, &a, &b, &c, &d, 46, state_context.buf.l[:]);
- r3(&d, &e, &a, &b, &c, 47, state_context.buf.l[:]);
- r3(&c, &d, &e, &a, &b, 48, state_context.buf.l[:]);
- r3(&b, &c, &d, &e, &a, 49, state_context.buf.l[:]);
- r3(&a, &b, &c, &d, &e, 50, state_context.buf.l[:]);
- r3(&e, &a, &b, &c, &d, 51, state_context.buf.l[:]);
- r3(&d, &e, &a, &b, &c, 52, state_context.buf.l[:]);
- r3(&c, &d, &e, &a, &b, 53, state_context.buf.l[:]);
- r3(&b, &c, &d, &e, &a, 54, state_context.buf.l[:]);
- r3(&a, &b, &c, &d, &e, 55, state_context.buf.l[:]);
- r3(&e, &a, &b, &c, &d, 56, state_context.buf.l[:]);
- r3(&d, &e, &a, &b, &c, 57, state_context.buf.l[:]);
- r3(&c, &d, &e, &a, &b, 58, state_context.buf.l[:]);
- r3(&b, &c, &d, &e, &a, 59, state_context.buf.l[:]);
-
- r4(&a, &b, &c, &d, &e, 60, state_context.buf.l[:]);
- r4(&e, &a, &b, &c, &d, 61, state_context.buf.l[:]);
- r4(&d, &e, &a, &b, &c, 62, state_context.buf.l[:]);
- r4(&c, &d, &e, &a, &b, 63, state_context.buf.l[:]);
- r4(&b, &c, &d, &e, &a, 64, state_context.buf.l[:]);
- r4(&a, &b, &c, &d, &e, 65, state_context.buf.l[:]);
- r4(&e, &a, &b, &c, &d, 66, state_context.buf.l[:]);
- r4(&d, &e, &a, &b, &c, 67, state_context.buf.l[:]);
- r4(&c, &d, &e, &a, &b, 68, state_context.buf.l[:]);
- r4(&b, &c, &d, &e, &a, 69, state_context.buf.l[:]);
- r4(&a, &b, &c, &d, &e, 70, state_context.buf.l[:]);
- r4(&e, &a, &b, &c, &d, 71, state_context.buf.l[:]);
- r4(&d, &e, &a, &b, &c, 72, state_context.buf.l[:]);
- r4(&c, &d, &e, &a, &b, 73, state_context.buf.l[:]);
- r4(&b, &c, &d, &e, &a, 74, state_context.buf.l[:]);
- r4(&a, &b, &c, &d, &e, 75, state_context.buf.l[:]);
- r4(&e, &a, &b, &c, &d, 76, state_context.buf.l[:]);
- r4(&d, &e, &a, &b, &c, 77, state_context.buf.l[:]);
- r4(&c, &d, &e, &a, &b, 78, state_context.buf.l[:]);
- r4(&b, &c, &d, &e, &a, 79, state_context.buf.l[:]);
-
- state_context.state[0] += a;
- state_context.state[1] += b;
- state_context.state[2] += c;
- state_context.state[3] += d;
- state_context.state[4] += e;
- }
-
-
-sha1_add_uncounted :: proc(state_context: ^Sha1context, data: byte) {
-
-
- when ODIN_ENDIAN == "big" {
- state_context.buf.c[state_context.buf_offset] = data;
- }
-
- else {
- state_context.buf.c[state_context.buf_offset ~ 3] = data;
- }
-
- state_context.buf_offset += 1;
-
- if state_context.buf_offset == BLOCK_LENGTH {
- sha1_hash_block(state_context);
- state_context.buf_offset = 0;
- }
-
+sha1_init :: proc (state_context: ^Sha1context) {
+ state_context.state[0] = SEED_0;
+ state_context.state[1] = SEED_1;
+ state_context.state[2] = SEED_2;
+ state_context.state[3] = SEED_3;
+ state_context.state[4] = SEED_4;
+ state_context.byte_count = 0;
+ state_context.buf_offset = 0;
}
-sha1_write_byte :: proc(state_context: ^Sha1context, data: byte) {
- state_context.byte_count += 1;
- sha1_add_uncounted(state_context, data);
+sha1_hash_block :: proc (state_context: ^Sha1context) {
+ a := state_context.state[0];
+ b := state_context.state[1];
+ c := state_context.state[2];
+ d := state_context.state[3];
+ e := state_context.state[4];
+
+ // 4 rounds of 20 operations each. loop unrolled.
+ r0(&a, &b, &c, &d, &e, 0, state_context.buf.l[:]);
+ r0(&e, &a, &b, &c, &d, 1, state_context.buf.l[:]);
+ r0(&d, &e, &a, &b, &c, 2, state_context.buf.l[:]);
+ r0(&c, &d, &e, &a, &b, 3, state_context.buf.l[:]);
+ r0(&b, &c, &d, &e, &a, 4, state_context.buf.l[:]);
+ r0(&a, &b, &c, &d, &e, 5, state_context.buf.l[:]);
+ r0(&e, &a, &b, &c, &d, 6, state_context.buf.l[:]);
+ r0(&d, &e, &a, &b, &c, 7, state_context.buf.l[:]);
+ r0(&c, &d, &e, &a, &b, 8, state_context.buf.l[:]);
+ r0(&b, &c, &d, &e, &a, 9, state_context.buf.l[:]);
+ r0(&a, &b, &c, &d, &e, 10, state_context.buf.l[:]);
+ r0(&e, &a, &b, &c, &d, 11, state_context.buf.l[:]);
+ r0(&d, &e, &a, &b, &c, 12, state_context.buf.l[:]);
+ r0(&c, &d, &e, &a, &b, 13, state_context.buf.l[:]);
+ r0(&b, &c, &d, &e, &a, 14, state_context.buf.l[:]);
+ r0(&a, &b, &c, &d, &e, 15, state_context.buf.l[:]);
+ r1(&e, &a, &b, &c, &d, 16, state_context.buf.l[:]);
+ r1(&d, &e, &a, &b, &c, 17, state_context.buf.l[:]);
+ r1(&c, &d, &e, &a, &b, 18, state_context.buf.l[:]);
+ r1(&b, &c, &d, &e, &a, 19, state_context.buf.l[:]);
+
+ r2(&a, &b, &c, &d, &e, 20, state_context.buf.l[:]);
+ r2(&e, &a, &b, &c, &d, 21, state_context.buf.l[:]);
+ r2(&d, &e, &a, &b, &c, 22, state_context.buf.l[:]);
+ r2(&c, &d, &e, &a, &b, 23, state_context.buf.l[:]);
+ r2(&b, &c, &d, &e, &a, 24, state_context.buf.l[:]);
+ r2(&a, &b, &c, &d, &e, 25, state_context.buf.l[:]);
+ r2(&e, &a, &b, &c, &d, 26, state_context.buf.l[:]);
+ r2(&d, &e, &a, &b, &c, 27, state_context.buf.l[:]);
+ r2(&c, &d, &e, &a, &b, 28, state_context.buf.l[:]);
+ r2(&b, &c, &d, &e, &a, 29, state_context.buf.l[:]);
+ r2(&a, &b, &c, &d, &e, 30, state_context.buf.l[:]);
+ r2(&e, &a, &b, &c, &d, 31, state_context.buf.l[:]);
+ r2(&d, &e, &a, &b, &c, 32, state_context.buf.l[:]);
+ r2(&c, &d, &e, &a, &b, 33, state_context.buf.l[:]);
+ r2(&b, &c, &d, &e, &a, 34, state_context.buf.l[:]);
+ r2(&a, &b, &c, &d, &e, 35, state_context.buf.l[:]);
+ r2(&e, &a, &b, &c, &d, 36, state_context.buf.l[:]);
+ r2(&d, &e, &a, &b, &c, 37, state_context.buf.l[:]);
+ r2(&c, &d, &e, &a, &b, 38, state_context.buf.l[:]);
+ r2(&b, &c, &d, &e, &a, 39, state_context.buf.l[:]);
+
+ r3(&a, &b, &c, &d, &e, 40, state_context.buf.l[:]);
+ r3(&e, &a, &b, &c, &d, 41, state_context.buf.l[:]);
+ r3(&d, &e, &a, &b, &c, 42, state_context.buf.l[:]);
+ r3(&c, &d, &e, &a, &b, 43, state_context.buf.l[:]);
+ r3(&b, &c, &d, &e, &a, 44, state_context.buf.l[:]);
+ r3(&a, &b, &c, &d, &e, 45, state_context.buf.l[:]);
+ r3(&e, &a, &b, &c, &d, 46, state_context.buf.l[:]);
+ r3(&d, &e, &a, &b, &c, 47, state_context.buf.l[:]);
+ r3(&c, &d, &e, &a, &b, 48, state_context.buf.l[:]);
+ r3(&b, &c, &d, &e, &a, 49, state_context.buf.l[:]);
+ r3(&a, &b, &c, &d, &e, 50, state_context.buf.l[:]);
+ r3(&e, &a, &b, &c, &d, 51, state_context.buf.l[:]);
+ r3(&d, &e, &a, &b, &c, 52, state_context.buf.l[:]);
+ r3(&c, &d, &e, &a, &b, 53, state_context.buf.l[:]);
+ r3(&b, &c, &d, &e, &a, 54, state_context.buf.l[:]);
+ r3(&a, &b, &c, &d, &e, 55, state_context.buf.l[:]);
+ r3(&e, &a, &b, &c, &d, 56, state_context.buf.l[:]);
+ r3(&d, &e, &a, &b, &c, 57, state_context.buf.l[:]);
+ r3(&c, &d, &e, &a, &b, 58, state_context.buf.l[:]);
+ r3(&b, &c, &d, &e, &a, 59, state_context.buf.l[:]);
+
+ r4(&a, &b, &c, &d, &e, 60, state_context.buf.l[:]);
+ r4(&e, &a, &b, &c, &d, 61, state_context.buf.l[:]);
+ r4(&d, &e, &a, &b, &c, 62, state_context.buf.l[:]);
+ r4(&c, &d, &e, &a, &b, 63, state_context.buf.l[:]);
+ r4(&b, &c, &d, &e, &a, 64, state_context.buf.l[:]);
+ r4(&a, &b, &c, &d, &e, 65, state_context.buf.l[:]);
+ r4(&e, &a, &b, &c, &d, 66, state_context.buf.l[:]);
+ r4(&d, &e, &a, &b, &c, 67, state_context.buf.l[:]);
+ r4(&c, &d, &e, &a, &b, 68, state_context.buf.l[:]);
+ r4(&b, &c, &d, &e, &a, 69, state_context.buf.l[:]);
+ r4(&a, &b, &c, &d, &e, 70, state_context.buf.l[:]);
+ r4(&e, &a, &b, &c, &d, 71, state_context.buf.l[:]);
+ r4(&d, &e, &a, &b, &c, 72, state_context.buf.l[:]);
+ r4(&c, &d, &e, &a, &b, 73, state_context.buf.l[:]);
+ r4(&b, &c, &d, &e, &a, 74, state_context.buf.l[:]);
+ r4(&a, &b, &c, &d, &e, 75, state_context.buf.l[:]);
+ r4(&e, &a, &b, &c, &d, 76, state_context.buf.l[:]);
+ r4(&d, &e, &a, &b, &c, 77, state_context.buf.l[:]);
+ r4(&c, &d, &e, &a, &b, 78, state_context.buf.l[:]);
+ r4(&b, &c, &d, &e, &a, 79, state_context.buf.l[:]);
+
+ state_context.state[0] += a;
+ state_context.state[1] += b;
+ state_context.state[2] += c;
+ state_context.state[3] += d;
+ state_context.state[4] += e;
}
-sha1_update :: proc(state_context: ^Sha1context, data: [] byte) {
-
- state_context.byte_count += cast(u32)len(data);
-
- current_data := data;
+sha1_add_uncounted :: proc (state_context: ^Sha1context, data: byte) {
- if state_context.buf_offset > 0 {
- remainder := min(len(current_data), BLOCK_LENGTH - cast(int)state_context.buf_offset);
+ when ODIN_ENDIAN == "big" {
+ state_context.buf.c[state_context.buf_offset] = data;
+ } else
- for i := 0; i < remainder; i += 1 {
- sha1_add_uncounted(state_context, current_data[i]);
- }
+ {
+ state_context.buf.c[state_context.buf_offset ~ 3] = data;
+ }
- current_data = current_data[remainder-1:];
- }
+ state_context.buf_offset += 1;
- for len(current_data) >= BLOCK_LENGTH {
- assert(state_context.buf_offset == 0);
- assert(BLOCK_LENGTH % 4 == 0);
+ if state_context.buf_offset == BLOCK_LENGTH {
+ sha1_hash_block(state_context);
+ state_context.buf_offset = 0;
+ }
+}
- BLOCK_LENGTH_32 :: BLOCK_LENGTH / 4;
+sha1_write_byte :: proc (state_context: ^Sha1context, data: byte) {
+ state_context.byte_count += 1;
+ sha1_add_uncounted(state_context, data);
+}
- for i := 0; i < BLOCK_LENGTH_32; i += 1 {
- n := (transmute([] u32)current_data)[i];
+sha1_update :: proc (state_context: ^Sha1context, data: []byte) {
- state_context.buf.l[i] = (((n & 0xFF) << 24) |
- ((n & 0xFF00) << 8) |
- ((n & 0xFF0000) >> 8) |
- ((n & 0xFF000000) >> 24));
- }
+ state_context.byte_count += cast(u32)len(data);
- sha1_hash_block(state_context);
+ current_data := data;
- current_data = current_data[BLOCK_LENGTH-1:];
- }
+ if state_context.buf_offset > 0 {
+ remainder := min(len(current_data), BLOCK_LENGTH - cast(int)state_context.buf_offset);
- for c in current_data {
- sha1_add_uncounted(state_context, c);
- }
+ for i := 0; i < remainder; i += 1 {
+ sha1_add_uncounted(state_context, current_data[i]);
+ }
-}
+ current_data = current_data[remainder - 1:];
+ }
-sha1_pad :: proc(state_context: ^Sha1context) {
+ for len(current_data) >= BLOCK_LENGTH {
+ assert(state_context.buf_offset == 0);
+ assert(BLOCK_LENGTH % 4 == 0);
- sha1_add_uncounted(state_context, 0x80);
+ BLOCK_LENGTH_32 :: BLOCK_LENGTH / 4;
- for state_context.buf_offset != 56 {
- sha1_add_uncounted(state_context, 0x00);
- }
+ for i := 0; i < BLOCK_LENGTH_32; i += 1 {
+ n := (transmute([]u32)current_data)[i];
- sha1_add_uncounted(state_context, 0); // We're only using 32 bit lengths
- sha1_add_uncounted(state_context, 0); // But SHA-1 supports 64 bit lengths
- sha1_add_uncounted(state_context, 0); // So zero pad the top bits
- sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count >> 29)); // Shifting to multiply by 8
- sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count >> 21)); // as SHA-1 supports bitstreams as well as
- sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count >> 13)); // byte.
- sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count >> 5));
- sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count << 3));
+ state_context.buf.l[i] = (((n & 0xFF) << 24) |
+ ((n & 0xFF00) << 8) |
+ ((n & 0xFF0000) >> 8) |
+ ((n & 0xFF000000) >> 24));
+ }
+ sha1_hash_block(state_context);
+ current_data = current_data[BLOCK_LENGTH - 1:];
+ }
+ for c in current_data {
+ sha1_add_uncounted(state_context, c);
+ }
}
-sha1_final :: proc(state_context: ^Sha1context, result: ^[5] u32) {
- sha1_pad(state_context);
+sha1_pad :: proc (state_context: ^Sha1context) {
- when ODIN_ENDIAN == "big" {
+ sha1_add_uncounted(state_context, 0x80);
- for i := 0; i < 5; i += 1 {
- result[i] = state_context.state[i];
- }
-
- }
-
- else {
- for i := 0; i < 5; i += 1 {
- result[i] = (((state_context.state[i]) << 24) & 0xff000000) |
- (((state_context.state[i]) << 8) & 0x00ff0000) |
- (((state_context.state[i]) >> 8) & 0x0000ff00) |
- (((state_context.state[i]) >> 24) & 0x000000ff);
- }
- }
+ for state_context.buf_offset != 56 {
+ sha1_add_uncounted(state_context, 0x00);
+ }
+ sha1_add_uncounted(state_context, 0); // We're only using 32 bit lengths
+ sha1_add_uncounted(state_context, 0); // But SHA-1 supports 64 bit lengths
+ sha1_add_uncounted(state_context, 0); // So zero pad the top bits
+ sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count >> 29)); // Shifting to multiply by 8
+ sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count >> 21)); // as SHA-1 supports bitstreams as well as
+ sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count >> 13)); // byte.
+ sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count >> 5));
+ sha1_add_uncounted(state_context, cast(u8)(state_context.byte_count << 3));
}
+sha1_final :: proc (state_context: ^Sha1context, result: ^[5]u32) {
+ sha1_pad(state_context);
+ when ODIN_ENDIAN == "big" {
-sha1_hash :: proc(data: [] byte) -> [20] byte {
+ for i := 0; i < 5; i += 1 {
+ result[i] = state_context.state[i];
+ }
+ } else
- sha1_context: Sha1context;
- sha1_init(&sha1_context);
- sha1_update(&sha1_context, data);
+ {
+ for i := 0; i < 5; i += 1 {
+ result[i] = (((state_context.state[i]) << 24) & 0xff000000) |
+ (((state_context.state[i]) << 8) & 0x00ff0000) |
+ (((state_context.state[i]) >> 8) & 0x0000ff00) |
+ (((state_context.state[i]) >> 24) & 0x000000ff);
+ }
+ }
+}
- result: [20] byte;
+sha1_hash :: proc (data: []byte) -> [20]byte {
- sha1_final(&sha1_context, cast(^[5] u32)&result);
+ sha1_context: Sha1context;
+ sha1_init(&sha1_context);
+ sha1_update(&sha1_context, data);
- ret: [20] byte;
+ result: [20]byte;
- copy(ret[:], result[:]);
+ sha1_final(&sha1_context, cast(^[5]u32)&result);
- return ret;
-}
+ ret: [20]byte;
+
+ copy(ret[:], result[:]);
+ return ret;
+} \ No newline at end of file
diff --git a/src/common/track_allocator.odin b/src/common/track_allocator.odin
index d2c6d24..69c0e86 100644
--- a/src/common/track_allocator.odin
+++ b/src/common/track_allocator.odin
@@ -1,7 +1,7 @@
package common
/*
- https://gist.github.com/jharler/7ee9a4d5b46e31f7f9399da49cfabe72
+ https://gist.github.com/jharler/7ee9a4d5b46e31f7f9399da49cfabe72
*/
import "core:mem"
@@ -13,184 +13,180 @@ import "core:log"
// ----------------------------------------------------------------------------------------------------
ThreadSafe_Allocator_Data :: struct {
- actual_allocator : mem.Allocator,
- mutex : sync.Mutex,
+ actual_allocator: mem.Allocator,
+ mutex: sync.Mutex,
}
// ----------------------------------------------------------------------------------------------------
threadsafe_allocator :: proc (allocator: mem.Allocator) -> mem.Allocator {
- data := new(ThreadSafe_Allocator_Data);
- data.actual_allocator = allocator;
- sync.mutex_init(&data.mutex);
+ data := new(ThreadSafe_Allocator_Data);
+ data.actual_allocator = allocator;
+ sync.mutex_init(&data.mutex);
- return mem.Allocator { procedure = threadsafe_allocator_proc, data = data};
+ return mem.Allocator {procedure = threadsafe_allocator_proc, data = data};
}
// ----------------------------------------------------------------------------------------------------
-threadsafe_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode, size, alignment: int,
- old_memory: rawptr, old_size: int, flags : u64 = 0, loc := #caller_location) -> rawptr {
+threadsafe_allocator_proc :: proc (allocator_data: rawptr, mode: mem.Allocator_Mode, size, alignment: int,
+old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
- data := cast(^ThreadSafe_Allocator_Data)allocator_data;
+ data := cast(^ThreadSafe_Allocator_Data)allocator_data;
- sync.mutex_lock(&data.mutex);
- defer sync.mutex_unlock(&data.mutex);
+ sync.mutex_lock(&data.mutex);
+ defer sync.mutex_unlock(&data.mutex);
- return data.actual_allocator.procedure(data.actual_allocator.data, mode, size, alignment, old_memory, old_size, flags, loc);
+ return data.actual_allocator.procedure(data.actual_allocator.data, mode, size, alignment, old_memory, old_size, flags, loc);
}
// ----------------------------------------------------------------------------------------------------
Memleak_Allocator_Data :: struct {
- actual_allocator : mem.Allocator,
- allocations : map[rawptr] Memleak_Entry,
- frees : map[rawptr] Memleak_Entry,
- allocation_count : u32,
- unexpected_frees : u32,
- mutex : sync.Mutex,
- track_frees : bool,
+ actual_allocator: mem.Allocator,
+ allocations: map[rawptr]Memleak_Entry,
+ frees: map[rawptr]Memleak_Entry,
+ allocation_count: u32,
+ unexpected_frees: u32,
+ mutex: sync.Mutex,
+ track_frees: bool,
}
// ----------------------------------------------------------------------------------------------------
Memleak_Entry :: struct {
- location : runtime.Source_Code_Location,
- size : int,
- index : u32,
+ location: runtime.Source_Code_Location,
+ size: int,
+ index: u32,
}
// ----------------------------------------------------------------------------------------------------
-memleak_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode, size, alignment: int,
- old_memory: rawptr, old_size: int, flags : u64 = 0, loc := #caller_location) -> rawptr {
-
- memleak := cast(^Memleak_Allocator_Data)allocator_data;
-
- sync.mutex_lock(&memleak.mutex);
- defer sync.mutex_unlock(&memleak.mutex);
-
- if mode == .Free {
- if old_memory not_in memleak.allocations {
- if memleak.track_frees {
- if old_memory in memleak.frees {
- fmt.println(fmt.tprintf("{0}({1}:{2}) {3} freed memory already freed by this memleak allocator", loc.file_path, loc.line, loc.column, loc.procedure));
- free_loc := memleak.frees[old_memory].location;
- fmt.println(fmt.tprintf("{0}({1}:{2}) {3} <<< freed here", loc.file_path, loc.line, loc.column, loc.procedure));
- }
- else {
- fmt.println(fmt.tprintf("{0}({1}:{2}) {3} freed memory not allocated or previously freed by this memleak allocator", loc.file_path, loc.line, loc.column, loc.procedure));
- }
- }
- else {
- fmt.println(fmt.tprintf("{0}({1}:{2}) {3} freed memory not allocated by this memleak allocator", loc.file_path, loc.line, loc.column, loc.procedure));
- }
- memleak.unexpected_frees += 1;
- return nil;
- }
- else {
- //entry := &memleak.allocations[old_memory];
- delete_key(&memleak.allocations, old_memory);
-
- if memleak.track_frees {
- memleak.frees[old_memory] = Memleak_Entry {
- location = loc,
- size = size,
- index = 0,
- };
- }
- }
- }
-
- result := memleak.actual_allocator.procedure(memleak.actual_allocator.data, mode, size, alignment, old_memory, old_size, flags, loc);
-
- if mode == .Resize && result != old_memory {
- delete_key(&memleak.allocations, old_memory);
- }
-
- if mode != .Free {
- // using a conditional breakpoint with memleak.allocation_count in the condition
- // can be very useful for inspecting the stack trace of a particular allocation
-
- memleak.allocations[result] = Memleak_Entry {
- location = loc,
- size = size,
- index = memleak.allocation_count,
- };
-
- memleak.allocation_count += 1;
-
- if memleak.track_frees {
- if result in memleak.frees {
- delete_key(&memleak.frees, result);
- }
- }
- }
-
- return result;
+memleak_allocator_proc :: proc (allocator_data: rawptr, mode: mem.Allocator_Mode, size, alignment: int,
+old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
+
+ memleak := cast(^Memleak_Allocator_Data)allocator_data;
+
+ sync.mutex_lock(&memleak.mutex);
+ defer sync.mutex_unlock(&memleak.mutex);
+
+ if mode == .Free {
+ if old_memory not_in memleak.allocations {
+ if memleak.track_frees {
+ if old_memory in memleak.frees {
+ fmt.println(fmt.tprintf("{0}({1}:{2}) {3} freed memory already freed by this memleak allocator", loc.file_path, loc.line, loc.column, loc.procedure));
+ free_loc := memleak.frees[old_memory].location;
+ fmt.println(fmt.tprintf("{0}({1}:{2}) {3} <<< freed here", loc.file_path, loc.line, loc.column, loc.procedure));
+ } else {
+ fmt.println(fmt.tprintf("{0}({1}:{2}) {3} freed memory not allocated or previously freed by this memleak allocator", loc.file_path, loc.line, loc.column, loc.procedure));
+ }
+ } else {
+ fmt.println(fmt.tprintf("{0}({1}:{2}) {3} freed memory not allocated by this memleak allocator", loc.file_path, loc.line, loc.column, loc.procedure));
+ }
+ memleak.unexpected_frees += 1;
+ return nil;
+ } else {
+ //entry := &memleak.allocations[old_memory];
+ delete_key(&memleak.allocations, old_memory);
+
+ if memleak.track_frees {
+ memleak.frees[old_memory] = Memleak_Entry {
+ location = loc,
+ size = size,
+ index = 0,
+ };
+ }
+ }
+ }
+
+ result := memleak.actual_allocator.procedure(memleak.actual_allocator.data, mode, size, alignment, old_memory, old_size, flags, loc);
+
+ if mode == .Resize && result != old_memory {
+ delete_key(&memleak.allocations, old_memory);
+ }
+
+ if mode != .Free {
+ // using a conditional breakpoint with memleak.allocation_count in the condition
+ // can be very useful for inspecting the stack trace of a particular allocation
+
+ memleak.allocations[result] = Memleak_Entry {
+ location = loc,
+ size = size,
+ index = memleak.allocation_count,
+ };
+
+ memleak.allocation_count += 1;
+
+ if memleak.track_frees {
+ if result in memleak.frees {
+ delete_key(&memleak.frees, result);
+ }
+ }
+ }
+
+ return result;
}
// ----------------------------------------------------------------------------------------------------
memleak_allocator :: proc (track_frees: bool) -> mem.Allocator {
- make([]byte, 1, context.temp_allocator); // so the temp allocation doesn't clutter our results
+ make([]byte, 1, context.temp_allocator); // so the temp allocation doesn't clutter our results
- data := new(Memleak_Allocator_Data);
- data.actual_allocator = context.allocator;
- data.allocations = make(map[rawptr]Memleak_Entry);
+ data := new(Memleak_Allocator_Data);
+ data.actual_allocator = context.allocator;
+ data.allocations = make(map[rawptr]Memleak_Entry);
- if track_frees {
- data.track_frees = true;
- data.frees = make(map[rawptr]Memleak_Entry);
- }
+ if track_frees {
+ data.track_frees = true;
+ data.frees = make(map[rawptr]Memleak_Entry);
+ }
- sync.mutex_init(&data.mutex);
+ sync.mutex_init(&data.mutex);
- return mem.Allocator { procedure = memleak_allocator_proc, data = data};
+ return mem.Allocator {procedure = memleak_allocator_proc, data = data};
}
// ----------------------------------------------------------------------------------------------------
-memleak_detected_leaks :: proc() -> bool {
- if context.allocator.procedure == memleak_allocator_proc {
- memleak := cast(^Memleak_Allocator_Data)context.allocator.data;
- return len(memleak.allocations) > 0;
- }
+memleak_detected_leaks :: proc () -> bool {
+ if context.allocator.procedure == memleak_allocator_proc {
+ memleak := cast(^Memleak_Allocator_Data)context.allocator.data;
+ return len(memleak.allocations) > 0;
+ }
- return false;
+ return false;
}
// ----------------------------------------------------------------------------------------------------
-memleak_dump :: proc( memleak_alloc : mem.Allocator, dump_proc : proc(message:string, user_data:rawptr), user_data : rawptr) {
- memleak := cast(^Memleak_Allocator_Data)memleak_alloc.data;
+memleak_dump :: proc (memleak_alloc: mem.Allocator, dump_proc: proc (message: string, user_data: rawptr), user_data: rawptr) {
+ memleak := cast(^Memleak_Allocator_Data)memleak_alloc.data;
- context.allocator = memleak.actual_allocator;
+ context.allocator = memleak.actual_allocator;
- // check for an ignore default_temp_allocator_proc allocations
- tmp_check := 0;
- for _, leak in &memleak.allocations {
- if leak.location.procedure == "default_temp_allocator_proc" {
- tmp_check += 1;
- }
- }
+ // check for an ignore default_temp_allocator_proc allocations
+ tmp_check := 0;
+ for _, leak in &memleak.allocations {
+ if leak.location.procedure == "default_temp_allocator_proc" {
+ tmp_check += 1;
+ }
+ }
+ dump_proc(fmt.tprintf("{0} memory leaks detected!", len(memleak.allocations) - tmp_check), user_data);
+ dump_proc(fmt.tprintf("{0} unexpected frees", memleak.unexpected_frees), user_data);
- dump_proc(fmt.tprintf("{0} memory leaks detected!", len(memleak.allocations) - tmp_check), user_data);
- dump_proc(fmt.tprintf("{0} unexpected frees", memleak.unexpected_frees), user_data);
+ for _, leak in &memleak.allocations {
+ if leak.location.procedure != "default_temp_allocator_proc" {
+ dump_proc(fmt.tprintf("{0}({1}:{2}) {3} allocated {4} bytes [{5}]", leak.location.file_path, leak.location.line, leak.location.column, leak.location.procedure, leak.size, leak.index), user_data);
+ }
+ }
- for _, leak in &memleak.allocations {
- if leak.location.procedure != "default_temp_allocator_proc" {
- dump_proc(fmt.tprintf("{0}({1}:{2}) {3} allocated {4} bytes [{5}]", leak.location.file_path, leak.location.line, leak.location.column, leak.location.procedure, leak.size, leak.index), user_data);
- }
- }
-
- context.allocator = mem.Allocator {procedure = memleak_allocator_proc, data = memleak};
+ context.allocator = mem.Allocator {procedure = memleak_allocator_proc, data = memleak};
}
// ----------------------------------------------------------------------------------------------------
-log_dump :: proc(message:string, user_data:rawptr) {
- log.info(message);
+log_dump :: proc (message: string, user_data: rawptr) {
+ log.info(message);
} \ No newline at end of file
diff --git a/src/common/types.odin b/src/common/types.odin
index 73b3f3a..4cdfc24 100644
--- a/src/common/types.odin
+++ b/src/common/types.odin
@@ -1,26 +1,27 @@
package common
+Error :: enum
-Error :: enum {
- None = 0,
+// Defined by JSON RPC
- // Defined by JSON RPC
- ParseError = -32700,
- InvalidRequest = -32600,
- MethodNotFound = -32601,
- InvalidParams = -32602,
- InternalError = -32603,
- serverErrorStart = -32099,
- serverErrorEnd = -32000,
- ServerNotInitialized = -32002,
- UnknownErrorCode = -32001,
+// Defined by the protocol.
- // Defined by the protocol.
- RequestCancelled = -32800,
- ContentModified = -32801,
-};
+{
+ None = 0,
+ ParseError = -32700,
+ InvalidRequest = -32600,
+ MethodNotFound = -32601,
+ InvalidParams = -32602,
+ InternalError = -32603,
+ serverErrorStart = -32099,
+ serverErrorEnd = -32000,
+ ServerNotInitialized = -32002,
+ UnknownErrorCode = -32001,
+ RequestCancelled = -32800,
+ ContentModified = -32801,
+}
WorkspaceFolder :: struct {
- name: string,
- uri: string,
-};
+ name: string,
+ uri: string,
+} \ No newline at end of file
diff --git a/src/common/uri.odin b/src/common/uri.odin
index c1d5bfd..b69ae92 100644
--- a/src/common/uri.odin
+++ b/src/common/uri.odin
@@ -8,164 +8,151 @@ import "core:unicode/utf8"
import "core:path/filepath"
Uri :: struct {
- uri: string,
- decode_full: string,
- path: string,
-};
+ uri: string,
+ decode_full: string,
+ path: string,
+}
//Note(Daniel, This is an extremely incomplete uri parser and for now ignores fragment and query and only handles file schema)
-parse_uri :: proc(value: string, allocator: mem.Allocator) -> (Uri, bool) {
+parse_uri :: proc (value: string, allocator: mem.Allocator) -> (Uri, bool) {
- uri: Uri;
+ uri: Uri;
- decoded, ok := decode_percent(value, allocator);
+ decoded, ok := decode_percent(value, allocator);
- if !ok {
- return uri, false;
- }
+ if !ok {
+ return uri, false;
+ }
- starts := "file:///";
+ starts := "file:///";
- start_index := len(starts);
+ start_index := len(starts);
- if !starts_with(decoded, starts) {
- return uri, false;
- }
+ if !starts_with(decoded, starts) {
+ return uri, false;
+ }
- when ODIN_OS != "windows" {
- start_index -= 1;
- }
+ when ODIN_OS != "windows" {
+ start_index -= 1;
+ }
- uri.uri = strings.clone(value, allocator);
- uri.decode_full = decoded;
- uri.path = decoded[start_index:];
+ uri.uri = strings.clone(value, allocator);
+ uri.decode_full = decoded;
+ uri.path = decoded[start_index:];
- return uri, true;
+ return uri, true;
}
-
//Note(Daniel, Again some really incomplete and scuffed uri writer)
-create_uri :: proc(path: string, allocator: mem.Allocator) -> Uri {
- path_forward, _ := filepath.to_slash(path, context.temp_allocator);
+create_uri :: proc (path: string, allocator: mem.Allocator) -> Uri {
+ path_forward, _ := filepath.to_slash(path, context.temp_allocator);
- builder := strings.make_builder(allocator);
+ builder := strings.make_builder(allocator);
- //bad
- when ODIN_OS == "windows" {
- strings.write_string(&builder, "file:///");
- }
- else {
- strings.write_string(&builder, "file://");
- }
+ //bad
+ when ODIN_OS == "windows" {
+ strings.write_string(&builder, "file:///");
+ } else
+ {
+ strings.write_string(&builder, "file://");
+ }
- strings.write_string(&builder, encode_percent(path_forward, context.temp_allocator));
+ strings.write_string(&builder, encode_percent(path_forward, context.temp_allocator));
- uri: Uri;
+ uri: Uri;
- uri.uri = strings.to_string(builder);
- uri.decode_full = strings.clone(path_forward, allocator);
- uri.path = uri.decode_full;
+ uri.uri = strings.to_string(builder);
+ uri.decode_full = strings.clone(path_forward, allocator);
+ uri.path = uri.decode_full;
- return uri;
+ return uri;
}
-delete_uri :: proc(uri: Uri) {
+delete_uri :: proc (uri: Uri) {
- if uri.uri != "" {
- delete(uri.uri);
- }
+ if uri.uri != "" {
+ delete(uri.uri);
+ }
- if uri.decode_full != "" {
- delete(uri.decode_full);
- }
+ if uri.decode_full != "" {
+ delete(uri.decode_full);
+ }
}
-encode_percent :: proc(value: string, allocator: mem.Allocator) -> string {
-
- builder := strings.make_builder(allocator);
+encode_percent :: proc (value: string, allocator: mem.Allocator) -> string {
- data := transmute([]u8)value;
- index: int;
+ builder := strings.make_builder(allocator);
- for index < len(value) {
+ data := transmute([]u8)value;
+ index: int;
- r, w := utf8.decode_rune(data[index:]);
+ for index < len(value) {
- if r > 127 || r == ':'{
+ r, w := utf8.decode_rune(data[index:]);
- for i := 0; i < w; i += 1 {
- strings.write_string(&builder, strings.concatenate({"%", fmt.tprintf("%X", data[index+i])},
- context.temp_allocator));
- }
+ if r > 127 || r == ':' {
- }
+ for i := 0; i < w; i += 1 {
+ strings.write_string(&builder, strings.concatenate({"%", fmt.tprintf("%X", data[index + i])},
+ context.temp_allocator));
+ }
+ } else {
+ strings.write_byte(&builder, data[index]);
+ }
- else {
- strings.write_byte(&builder, data[index]);
- }
+ index += w;
+ }
- index += w;
- }
-
- return strings.to_string(builder);
+ return strings.to_string(builder);
}
@(private)
-starts_with :: proc(value: string, starts_with: string) -> bool {
-
- if len(value) < len(starts_with) {
- return false;
- }
+starts_with :: proc (value: string, starts_with: string) -> bool {
- for i := 0; i < len(starts_with); i += 1 {
+ if len(value) < len(starts_with) {
+ return false;
+ }
- if value[i] != starts_with[i] {
- return false;
- }
+ for i := 0; i < len(starts_with); i += 1 {
- }
+ if value[i] != starts_with[i] {
+ return false;
+ }
+ }
- return true;
+ return true;
}
-
@(private)
-decode_percent :: proc(value: string, allocator: mem.Allocator) -> (string, bool) {
+decode_percent :: proc (value: string, allocator: mem.Allocator) -> (string, bool) {
- builder := strings.make_builder(allocator);
+ builder := strings.make_builder(allocator);
- for i := 0; i < len(value); i += 1 {
+ for i := 0; i < len(value); i += 1 {
- if value[i] == '%' {
+ if value[i] == '%' {
- if i+2 < len(value) {
+ if i + 2 < len(value) {
- v, ok := strconv.parse_i64_of_base(value[i+1:i+3], 16);
+ v, ok := strconv.parse_i64_of_base(value[i + 1:i + 3], 16);
- if !ok {
- strings.destroy_builder(&builder);
- return "", false;
- }
+ if !ok {
+ strings.destroy_builder(&builder);
+ return "", false;
+ }
- strings.write_byte(&builder, cast(byte)v);
+ strings.write_byte(&builder, cast(byte)v);
- i+= 2;
- }
-
- else {
- strings.destroy_builder(&builder);
- return "", false;
- }
-
- }
-
- else {
- strings.write_byte(&builder, value[i]);
- }
-
- }
-
- return strings.to_string(builder), true;
-}
+ i += 2;
+ } else {
+ strings.destroy_builder(&builder);
+ return "", false;
+ }
+ } else {
+ strings.write_byte(&builder, value[i]);
+ }
+ }
+ return strings.to_string(builder), true;
+} \ No newline at end of file
diff --git a/src/index/build.odin b/src/index/build.odin
index 6412174..87f5bd1 100644
--- a/src/index/build.odin
+++ b/src/index/build.odin
@@ -12,141 +12,131 @@ import "core:strings"
import "shared:common"
/*
- Not fully sure how to handle rebuilding, but one thing is for sure, dynamic indexing has to have a background thread
- rebuilding every minute or less to fight against stale information
- */
-
+ Not fully sure how to handle rebuilding, but one thing is for sure, dynamic indexing has to have a background thread
+ rebuilding every minute or less to fight against stale information
+*/
//test version for static indexing
symbol_collection: SymbolCollection;
-files: [dynamic] string;
+files: [dynamic]string;
-platform_os : map [string] bool = {
- "windows" = true,
- "linux" = true,
- "essence" = true,
- "js" = true,
- "freebsd" = true,
+platform_os: map[string]bool = {
+ "windows" = true,
+ "linux" = true,
+ "essence" = true,
+ "js" = true,
+ "freebsd" = true,
};
-walk_static_index_build :: proc(info: os.File_Info, in_err: os.Errno) -> (err: os.Errno, skip_dir: bool) {
-
- if info.is_dir {
- return 0, false;
- }
-
- if filepath.ext(info.name) != ".odin" {
- return 0, false;
- }
-
+walk_static_index_build :: proc (info: os.File_Info, in_err: os.Errno) -> (err: os.Errno, skip_dir: bool) {
- last_underscore_index := strings.last_index(info.name, "_");
- last_dot_index := strings.last_index(info.name, ".");
+ if info.is_dir {
+ return 0, false;
+ }
- if last_underscore_index+1 < last_dot_index {
+ if filepath.ext(info.name) != ".odin" {
+ return 0, false;
+ }
- name_between := info.name[last_underscore_index+1:last_dot_index];
+ last_underscore_index := strings.last_index(info.name, "_");
+ last_dot_index := strings.last_index(info.name, ".");
- if _, ok := platform_os[name_between]; ok {
+ if last_underscore_index + 1 < last_dot_index {
- if name_between != ODIN_OS {
- return 0, false;
- }
+ name_between := info.name[last_underscore_index + 1:last_dot_index];
- }
+ if _, ok := platform_os[name_between]; ok {
- }
+ if name_between != ODIN_OS {
+ return 0, false;
+ }
+ }
+ }
- forward, _ := filepath.to_slash(info.fullpath, context.temp_allocator);
+ forward, _ := filepath.to_slash(info.fullpath, context.temp_allocator);
- append(&files, strings.clone(forward, context.allocator));
+ append(&files, strings.clone(forward, context.allocator));
- return 0, false;
-};
+ return 0, false;
+}
-build_static_index :: proc(allocator := context.allocator, config: ^common.Config) {
+build_static_index :: proc (allocator := context.allocator, config: ^common.Config) {
- symbol_collection = make_symbol_collection(allocator, config);
+ symbol_collection = make_symbol_collection(allocator, config);
- files = make([dynamic] string, context.allocator);
+ files = make([dynamic]string, context.allocator);
- for k, v in config.collections {
- filepath.walk(v, walk_static_index_build);
- }
+ for k, v in config.collections {
+ filepath.walk(v, walk_static_index_build);
+ }
- context.allocator = context.temp_allocator;
+ context.allocator = context.temp_allocator;
- for fullpath in files {
+ for fullpath in files {
- data, ok := os.read_entire_file(fullpath, context.temp_allocator);
+ data, ok := os.read_entire_file(fullpath, context.temp_allocator);
- if !ok {
- log.errorf("failed to read entire file for indexing %v", fullpath);
- continue;
- }
+ if !ok {
+ log.errorf("failed to read entire file for indexing %v", fullpath);
+ continue;
+ }
- p := parser.Parser {
- err = log_error_handler,
- warn = log_warning_handler,
- };
+ p := parser.Parser {
+ err = log_error_handler,
+ warn = log_warning_handler,
+ };
- //have to cheat the parser since it really wants to parse an entire package with the new changes...
+ //have to cheat the parser since it really wants to parse an entire package with the new changes...
- dir := filepath.base(filepath.dir(fullpath, context.temp_allocator));
+ dir := filepath.base(filepath.dir(fullpath, context.temp_allocator));
- pkg := new(ast.Package);
- pkg.kind = .Normal;
- pkg.fullpath = fullpath;
- pkg.name = dir;
+ pkg := new(ast.Package);
+ pkg.kind = .Normal;
+ pkg.fullpath = fullpath;
+ pkg.name = dir;
- if dir == "runtime" {
- pkg.kind = .Runtime;
- }
+ if dir == "runtime" {
+ pkg.kind = .Runtime;
+ }
- file := ast.File {
- fullpath = fullpath,
- src = data,
- pkg = pkg,
- };
+ file := ast.File {
+ fullpath = fullpath,
+ src = data,
+ pkg = pkg,
+ };
- ok = parser.parse_file(&p, &file);
+ ok = parser.parse_file(&p, &file);
- if !ok {
- log.info(pkg);
- log.errorf("error in parse file for indexing %v", fullpath);
- }
+ if !ok {
+ log.info(pkg);
+ log.errorf("error in parse file for indexing %v", fullpath);
+ }
- uri := common.create_uri(fullpath, context.temp_allocator);
+ uri := common.create_uri(fullpath, context.temp_allocator);
- //ERROR hover on uri does not show string
- collect_symbols(&symbol_collection, file, uri.uri);
+ //ERROR hover on uri does not show string
+ collect_symbols(&symbol_collection, file, uri.uri);
- free_all(context.temp_allocator);
+ free_all(context.temp_allocator);
- delete(fullpath, allocator);
- }
+ delete(fullpath, allocator);
+ }
- delete(files);
+ delete(files);
- indexer.static_index = make_memory_index(symbol_collection);
+ indexer.static_index = make_memory_index(symbol_collection);
}
-free_static_index :: proc() {
- delete_symbol_collection(symbol_collection);
+free_static_index :: proc () {
+ delete_symbol_collection(symbol_collection);
}
-
-log_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
- log.warnf("%v %v %v", pos, msg, args);
+log_error_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
+ log.warnf("%v %v %v", pos, msg, args);
}
-log_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
- log.warnf("%v %v %v", pos, msg, args);
-}
-
-
-
-
-
+log_warning_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
+ log.warnf("%v %v %v", pos, msg, args);
+} \ No newline at end of file
diff --git a/src/index/clone.odin b/src/index/clone.odin
index 915f822..d3abcfe 100644
--- a/src/index/clone.odin
+++ b/src/index/clone.odin
@@ -7,218 +7,209 @@ import "core:odin/ast"
import "core:strings"
import "core:log"
-new_type :: proc($T: typeid, pos, end: tokenizer.Pos, allocator: mem.Allocator) -> ^T {
+new_type :: proc ($T: typeid, pos, end: tokenizer.Pos, allocator: mem.Allocator) -> ^T {
n := mem.new(T, allocator);
- n.pos = pos;
- n.end = end;
+ n.pos = pos;
+ n.end = end;
n.derived = n^;
base: ^ast.Node = n; // dummy check
_ = base; // "Use" type to make -vet happy
return n;
}
-clone_type :: proc{
- clone_node,
- clone_expr,
- clone_array,
- clone_dynamic_array,
-};
-
-clone_array :: proc(array: $A/[]^$T, allocator: mem.Allocator, unique_strings: ^map[string] string) -> A {
- if len(array) == 0 {
- return nil;
- }
- res := make(A, len(array), allocator);
- for elem, i in array {
- res[i] = auto_cast clone_type(elem, allocator, unique_strings);
- }
- return res;
+clone_type :: proc {
+clone_node,
+clone_expr,
+clone_array,
+clone_dynamic_array};
+
+clone_array :: proc (array: $A/[]^$T, allocator: mem.Allocator, unique_strings: ^map[string]string) -> A {
+ if len(array) == 0 {
+ return nil;
+ }
+ res := make(A, len(array), allocator);
+ for elem, i in array {
+ res[i] = auto_cast clone_type(elem, allocator, unique_strings);
+ }
+ return res;
}
-clone_dynamic_array :: proc(array: $A/[dynamic]^$T, allocator: mem.Allocator, unique_strings: ^map[string] string) -> A {
- if len(array) == 0 {
- return nil;
- }
- res := make(A, len(array), allocator);
- for elem, i in array {
- res[i] = auto_cast clone_type(elem, allocator, unique_strings);
- }
- return res;
+clone_dynamic_array :: proc (array: $A/[dynamic]^$T, allocator: mem.Allocator, unique_strings: ^map[string]string) -> A {
+ if len(array) == 0 {
+ return nil;
+ }
+ res := make(A, len(array), allocator);
+ for elem, i in array {
+ res[i] = auto_cast clone_type(elem, allocator, unique_strings);
+ }
+ return res;
}
-clone_expr :: proc(node: ^ast.Expr, allocator: mem.Allocator, unique_strings: ^map[string] string) -> ^ast.Expr {
- return cast(^ast.Expr)clone_node(node, allocator, unique_strings);
+clone_expr :: proc (node: ^ast.Expr, allocator: mem.Allocator, unique_strings: ^map[string]string) -> ^ast.Expr {
+ return cast(^ast.Expr)clone_node(node, allocator, unique_strings);
}
-clone_node :: proc(node: ^ast.Node, allocator: mem.Allocator, unique_strings: ^map[string] string) -> ^ast.Node {
-
- using ast;
-
- if node == nil {
- return nil;
- }
-
- size := size_of(Node);
- align := align_of(Node);
- ti := type_info_of(node.derived.id);
- if ti != nil {
- size = ti.size;
- align = ti.align;
- }
-
- res := cast(^Node)mem.alloc(size, align, allocator);
- src: rawptr = node;
- if node.derived != nil {
- src = node.derived.data;
- }
- mem.copy(res, src, size);
- res.derived.data = rawptr(res);
-
- if unique_strings != nil && node.pos.file != "" {
- res.pos.file = get_index_unique_string(unique_strings, allocator, node.pos.file);
- }
-
- else {
- res.pos.file = node.pos.file;
- }
-
- if unique_strings != nil && node.end.file != "" {
- res.end.file = get_index_unique_string(unique_strings, allocator, node.end.file);
- }
-
- else {
- res.end.file = node.end.file;
- }
-
-
- switch n in node.derived {
- case Bad_Expr:
- case Ident:
- r := cast(^Ident)res;
- if unique_strings == nil {
- r.name = strings.clone(n.name, allocator);
- }
- else {
- r.name = get_index_unique_string(unique_strings, allocator, n.name);
- }
- case Implicit:
- case Undef:
- case Basic_Lit:
- case Basic_Directive:
- case Ellipsis:
- r := cast(^Ellipsis)res;
- r.expr = clone_type(r.expr, allocator, unique_strings);
- case Tag_Expr:
- r := cast(^Tag_Expr)res;
- r.expr = clone_type(r.expr, allocator, unique_strings);
- case Unary_Expr:
- r := cast(^Unary_Expr)res;
- r.expr = clone_type(r.expr, allocator, unique_strings);
- case Binary_Expr:
- r := cast(^Binary_Expr)res;
- r.left = clone_type(r.left, allocator, unique_strings);
- r.right = clone_type(r.right, allocator, unique_strings);
- case Paren_Expr:
- r := cast(^Paren_Expr)res;
- r.expr = clone_type(r.expr, allocator, unique_strings);
- case Selector_Expr:
- r := cast(^Selector_Expr)res;
- r.expr = clone_type(r.expr, allocator, unique_strings);
- r.field = auto_cast clone_type(r.field, allocator, unique_strings);
- case Implicit_Selector_Expr:
- r := cast(^Implicit_Selector_Expr)res;
- r.field = auto_cast clone_type(r.field, allocator, unique_strings);
- case Slice_Expr:
- r := cast(^Slice_Expr)res;
- r.expr = clone_type(r.expr, allocator, unique_strings);
- r.low = clone_type(r.low, allocator, unique_strings);
- r.high = clone_type(r.high, allocator, unique_strings);
- case Attribute:
- r := cast(^Attribute)res;
- r.elems = clone_type(r.elems, allocator, unique_strings);
- case Distinct_Type:
- r := cast(^Distinct_Type)res;
- r.type = clone_type(r.type, allocator, unique_strings);
- case Proc_Type:
- r := cast(^Proc_Type)res;
- r.params = auto_cast clone_type(r.params, allocator, unique_strings);
- r.results = auto_cast clone_type(r.results, allocator, unique_strings);
- case Pointer_Type:
- r := cast(^Pointer_Type)res;
- r.elem = clone_type(r.elem, allocator, unique_strings);
- case Array_Type:
- r := cast(^Array_Type)res;
- r.len = clone_type(r.len, allocator, unique_strings);
- r.elem = clone_type(r.elem, allocator, unique_strings);
- r.tag = clone_type(r.tag, allocator, unique_strings);
- case Dynamic_Array_Type:
- r := cast(^Dynamic_Array_Type)res;
- r.elem = clone_type(r.elem, allocator, unique_strings);
- r.tag = clone_type(r.tag, allocator, unique_strings);
- case Struct_Type:
- r := cast(^Struct_Type)res;
- r.poly_params = auto_cast clone_type(r.poly_params, allocator, unique_strings);
- r.align = clone_type(r.align, allocator, unique_strings);
- r.fields = auto_cast clone_type(r.fields, allocator, unique_strings);
- r.where_clauses = clone_type(r.where_clauses, allocator, unique_strings);
- case Field:
+clone_node :: proc (node: ^ast.Node, allocator: mem.Allocator, unique_strings: ^map[string]string) -> ^ast.Node {
+
+ using ast;
+
+ if node == nil {
+ return nil;
+ }
+
+ size := size_of(Node);
+ align := align_of(Node);
+ ti := type_info_of(node.derived.id);
+ if ti != nil {
+ size = ti.size;
+ align = ti.align;
+ }
+
+ res := cast(^Node)mem.alloc(size, align, allocator);
+ src: rawptr = node;
+ if node.derived != nil {
+ src = node.derived.data;
+ }
+ mem.copy(res, src, size);
+ res.derived.data = rawptr(res);
+
+ if unique_strings != nil && node.pos.file != "" {
+ res.pos.file = get_index_unique_string(unique_strings, allocator, node.pos.file);
+ } else {
+ res.pos.file = node.pos.file;
+ }
+
+ if unique_strings != nil && node.end.file != "" {
+ res.end.file = get_index_unique_string(unique_strings, allocator, node.end.file);
+ } else {
+ res.end.file = node.end.file;
+ }
+
+ switch n in node.derived {
+ case Bad_Expr:
+ case Ident:
+ r := cast(^Ident)res;
+ if unique_strings == nil {
+ r.name = strings.clone(n.name, allocator);
+ } else {
+ r.name = get_index_unique_string(unique_strings, allocator, n.name);
+ }
+ case Implicit:
+ case Undef:
+ case Basic_Lit:
+ case Basic_Directive:
+ case Ellipsis:
+ r := cast(^Ellipsis)res;
+ r.expr = clone_type(r.expr, allocator, unique_strings);
+ case Tag_Expr:
+ r := cast(^Tag_Expr)res;
+ r.expr = clone_type(r.expr, allocator, unique_strings);
+ case Unary_Expr:
+ r := cast(^Unary_Expr)res;
+ r.expr = clone_type(r.expr, allocator, unique_strings);
+ case Binary_Expr:
+ r := cast(^Binary_Expr)res;
+ r.left = clone_type(r.left, allocator, unique_strings);
+ r.right = clone_type(r.right, allocator, unique_strings);
+ case Paren_Expr:
+ r := cast(^Paren_Expr)res;
+ r.expr = clone_type(r.expr, allocator, unique_strings);
+ case Selector_Expr:
+ r := cast(^Selector_Expr)res;
+ r.expr = clone_type(r.expr, allocator, unique_strings);
+ r.field = auto_cast clone_type(r.field, allocator, unique_strings);
+ case Implicit_Selector_Expr:
+ r := cast(^Implicit_Selector_Expr)res;
+ r.field = auto_cast clone_type(r.field, allocator, unique_strings);
+ case Slice_Expr:
+ r := cast(^Slice_Expr)res;
+ r.expr = clone_type(r.expr, allocator, unique_strings);
+ r.low = clone_type(r.low, allocator, unique_strings);
+ r.high = clone_type(r.high, allocator, unique_strings);
+ case Attribute:
+ r := cast(^Attribute)res;
+ r.elems = clone_type(r.elems, allocator, unique_strings);
+ case Distinct_Type:
+ r := cast(^Distinct_Type)res;
+ r.type = clone_type(r.type, allocator, unique_strings);
+ case Proc_Type:
+ r := cast(^Proc_Type)res;
+ r.params = auto_cast clone_type(r.params, allocator, unique_strings);
+ r.results = auto_cast clone_type(r.results, allocator, unique_strings);
+ case Pointer_Type:
+ r := cast(^Pointer_Type)res;
+ r.elem = clone_type(r.elem, allocator, unique_strings);
+ case Array_Type:
+ r := cast(^Array_Type)res;
+ r.len = clone_type(r.len, allocator, unique_strings);
+ r.elem = clone_type(r.elem, allocator, unique_strings);
+ r.tag = clone_type(r.tag, allocator, unique_strings);
+ case Dynamic_Array_Type:
+ r := cast(^Dynamic_Array_Type)res;
+ r.elem = clone_type(r.elem, allocator, unique_strings);
+ r.tag = clone_type(r.tag, allocator, unique_strings);
+ case Struct_Type:
+ r := cast(^Struct_Type)res;
+ r.poly_params = auto_cast clone_type(r.poly_params, allocator, unique_strings);
+ r.align = clone_type(r.align, allocator, unique_strings);
+ r.fields = auto_cast clone_type(r.fields, allocator, unique_strings);
+ r.where_clauses = clone_type(r.where_clauses, allocator, unique_strings);
+ case Field:
r := cast(^Field)res;
- r.names = clone_type(r.names, allocator, unique_strings);
- r.type = clone_type(r.type, allocator, unique_strings);
+ r.names = clone_type(r.names, allocator, unique_strings);
+ r.type = clone_type(r.type, allocator, unique_strings);
r.default_value = clone_type(r.default_value, allocator, unique_strings);
case Field_List:
r := cast(^Field_List)res;
- r.list = clone_type(r.list, allocator, unique_strings);
- case Field_Value:
+ r.list = clone_type(r.list, allocator, unique_strings);
+ case Field_Value:
r := cast(^Field_Value)res;
r.field = clone_type(r.field, allocator, unique_strings);
r.value = clone_type(r.value, allocator, unique_strings);
- case Union_Type:
- r := cast(^Union_Type)res;
- r.poly_params = auto_cast clone_type(r.poly_params, allocator, unique_strings);
- r.align = clone_type(r.align, allocator, unique_strings);
- r.variants = clone_type(r.variants, allocator, unique_strings);
- r.where_clauses = clone_type(r.where_clauses, allocator, unique_strings);
- case Enum_Type:
- r := cast(^Enum_Type)res;
- r.base_type = clone_type(r.base_type, allocator, unique_strings);
- r.fields = clone_type(r.fields, allocator, unique_strings);
- case Bit_Set_Type:
- r := cast(^Bit_Set_Type)res;
- r.elem = clone_type(r.elem, allocator, unique_strings);
- r.underlying = clone_type(r.underlying, allocator, unique_strings);
- case Map_Type:
- r := cast(^Map_Type)res;
- r.key = clone_type(r.key, allocator, unique_strings);
- r.value = clone_type(r.value, allocator, unique_strings);
- case Call_Expr:
- r := cast(^Call_Expr)res;
- r.expr = clone_type(r.expr, allocator, unique_strings);
- r.args = clone_type(r.args, allocator, unique_strings);
- case Typeid_Type:
+ case Union_Type:
+ r := cast(^Union_Type)res;
+ r.poly_params = auto_cast clone_type(r.poly_params, allocator, unique_strings);
+ r.align = clone_type(r.align, allocator, unique_strings);
+ r.variants = clone_type(r.variants, allocator, unique_strings);
+ r.where_clauses = clone_type(r.where_clauses, allocator, unique_strings);
+ case Enum_Type:
+ r := cast(^Enum_Type)res;
+ r.base_type = clone_type(r.base_type, allocator, unique_strings);
+ r.fields = clone_type(r.fields, allocator, unique_strings);
+ case Bit_Set_Type:
+ r := cast(^Bit_Set_Type)res;
+ r.elem = clone_type(r.elem, allocator, unique_strings);
+ r.underlying = clone_type(r.underlying, allocator, unique_strings);
+ case Map_Type:
+ r := cast(^Map_Type)res;
+ r.key = clone_type(r.key, allocator, unique_strings);
+ r.value = clone_type(r.value, allocator, unique_strings);
+ case Call_Expr:
+ r := cast(^Call_Expr)res;
+ r.expr = clone_type(r.expr, allocator, unique_strings);
+ r.args = clone_type(r.args, allocator, unique_strings);
+ case Typeid_Type:
r := cast(^Typeid_Type)res;
r.specialization = clone_type(r.specialization, allocator, unique_strings);
- case Ternary_When_Expr:
- r := cast(^Ternary_When_Expr)res;
- r.x = clone_type(r.x, allocator, unique_strings);
+ case Ternary_When_Expr:
+ r := cast(^Ternary_When_Expr)res;
+ r.x = clone_type(r.x, allocator, unique_strings);
r.cond = clone_type(r.cond, allocator, unique_strings);
- r.y = clone_type(r.y, allocator, unique_strings);
- case Poly_Type:
- r := cast(^Poly_Type)res;
- r.type = auto_cast clone_type(r.type, allocator, unique_strings);
- r.specialization = clone_type(r.specialization, allocator, unique_strings);
- case Proc_Group:
- r := cast(^Proc_Group)res;
- r.args = clone_type(r.args, allocator, unique_strings);
- case Comp_Lit:
- r := cast(^Comp_Lit)res;
- r.type = clone_type(r.type, allocator, unique_strings);
- r.elems = clone_type(r.elems, allocator, unique_strings);
- case:
- log.warn("Clone type Unhandled node kind: %T", n);
- }
-
- return res;
-}
-
-
+ r.y = clone_type(r.y, allocator, unique_strings);
+ case Poly_Type:
+ r := cast(^Poly_Type)res;
+ r.type = auto_cast clone_type(r.type, allocator, unique_strings);
+ r.specialization = clone_type(r.specialization, allocator, unique_strings);
+ case Proc_Group:
+ r := cast(^Proc_Group)res;
+ r.args = clone_type(r.args, allocator, unique_strings);
+ case Comp_Lit:
+ r := cast(^Comp_Lit)res;
+ r.type = clone_type(r.type, allocator, unique_strings);
+ r.elems = clone_type(r.elems, allocator, unique_strings);
+ case:
+ log.warn("Clone type Unhandled node kind: %T", n);
+ }
+
+ return res;
+} \ No newline at end of file
diff --git a/src/index/collector.odin b/src/index/collector.odin
index 6cf7e7d..60bef9e 100644
--- a/src/index/collector.odin
+++ b/src/index/collector.odin
@@ -12,544 +12,507 @@ import "core:strconv"
import "shared:common"
-
SymbolCollection :: struct {
- allocator: mem.Allocator,
- config: ^common.Config,
- symbols: map[uint] Symbol,
- unique_strings: map[string] string, //store all our strings as unique strings and reference them to save memory.
-};
+ allocator: mem.Allocator,
+ config: ^common.Config,
+ symbols: map[uint]Symbol,
+ unique_strings: map[string]string, //store all our strings as unique strings and reference them to save memory.
+}
get_index_unique_string :: proc {
- get_index_unique_string_collection,
- get_index_unique_string_collection_raw,
-};
+get_index_unique_string_collection,
+get_index_unique_string_collection_raw};
-get_index_unique_string_collection :: proc(collection: ^SymbolCollection, s: string) -> string {
- return get_index_unique_string_collection_raw(&collection.unique_strings, collection.allocator, s);
+get_index_unique_string_collection :: proc (collection: ^SymbolCollection, s: string) -> string {
+ return get_index_unique_string_collection_raw(&collection.unique_strings, collection.allocator, s);
}
-get_index_unique_string_collection_raw :: proc(unique_strings: ^map[string] string, allocator: mem.Allocator, s: string) -> string {
- //i'm hashing this string way to much
- if _, ok := unique_strings[s]; !ok {
- str := strings.clone(s, allocator);
- unique_strings[str] = str; //yeah maybe I have to use some integer and hash it, tried that before but got name collisions.
- }
+get_index_unique_string_collection_raw :: proc (unique_strings: ^map[string]string, allocator: mem.Allocator, s: string) -> string {
+ //i'm hashing this string way to much
+ if _, ok := unique_strings[s]; !ok {
+ str := strings.clone(s, allocator);
+ unique_strings[str] = str; //yeah maybe I have to use some integer and hash it, tried that before but got name collisions.
+ }
- return unique_strings[s];
+ return unique_strings[s];
}
-make_symbol_collection :: proc(allocator := context.allocator, config: ^common.Config) -> SymbolCollection {
- return SymbolCollection {
- allocator = allocator,
- config = config,
- symbols = make(map[uint] Symbol, 16, allocator),
- unique_strings = make(map[string] string, 16, allocator),
- };
+make_symbol_collection :: proc (allocator := context.allocator, config: ^common.Config) -> SymbolCollection {
+ return SymbolCollection {
+ allocator = allocator,
+ config = config,
+ symbols = make(map[uint]Symbol, 16, allocator),
+ unique_strings = make(map[string]string, 16, allocator),
+ };
}
-delete_symbol_collection :: proc(collection: SymbolCollection) {
+delete_symbol_collection :: proc (collection: SymbolCollection) {
- for k, v in collection.symbols {
- free_symbol(v, collection.allocator);
- }
+ for k, v in collection.symbols {
+ free_symbol(v, collection.allocator);
+ }
- for k, v in collection.unique_strings {
- delete(v, collection.allocator);
- }
+ for k, v in collection.unique_strings {
+ delete(v, collection.allocator);
+ }
-
- delete(collection.symbols);
- delete(collection.unique_strings);
+ delete(collection.symbols);
+ delete(collection.unique_strings);
}
-collect_procedure_fields :: proc(collection: ^SymbolCollection, proc_type: ^ast.Proc_Type, arg_list: ^ast.Field_List, return_list: ^ast.Field_List, package_map: map [string] string) -> SymbolProcedureValue {
-
- returns := make([dynamic] ^ast.Field, 0, collection.allocator);
- args := make([dynamic] ^ast.Field, 0, collection.allocator);
+collect_procedure_fields :: proc (collection: ^SymbolCollection, proc_type: ^ast.Proc_Type, arg_list: ^ast.Field_List, return_list: ^ast.Field_List, package_map: map[string]string) -> SymbolProcedureValue {
- if return_list != nil {
+ returns := make([dynamic]^ast.Field, 0, collection.allocator);
+ args := make([dynamic]^ast.Field, 0, collection.allocator);
- for ret in return_list.list {
- cloned := cast(^ast.Field)clone_type(ret, collection.allocator, &collection.unique_strings);
- replace_package_alias(cloned, package_map, collection);
- append(&returns, cloned);
- }
+ if return_list != nil {
- }
+ for ret in return_list.list {
+ cloned := cast(^ast.Field)clone_type(ret, collection.allocator, &collection.unique_strings);
+ replace_package_alias(cloned, package_map, collection);
+ append(&returns, cloned);
+ }
+ }
- if arg_list != nil {
+ if arg_list != nil {
- for arg in arg_list.list {
- cloned := cast(^ast.Field)clone_type(arg, collection.allocator, &collection.unique_strings);
- replace_package_alias(cloned, package_map, collection);
- append(&args, cloned);
- }
+ for arg in arg_list.list {
+ cloned := cast(^ast.Field)clone_type(arg, collection.allocator, &collection.unique_strings);
+ replace_package_alias(cloned, package_map, collection);
+ append(&args, cloned);
+ }
+ }
- }
+ value := SymbolProcedureValue {
+ return_types = returns[:],
+ arg_types = args[:],
+ generic = proc_type.generic,
+ };
- value := SymbolProcedureValue {
- return_types = returns[:],
- arg_types = args[:],
- generic = proc_type.generic,
- };
-
- return value;
+ return value;
}
-collect_struct_fields :: proc(collection: ^SymbolCollection, struct_type: ast.Struct_Type, package_map: map [string] string) -> SymbolStructValue {
-
- names := make([dynamic] string, 0, collection.allocator);
- types := make([dynamic] ^ast.Expr, 0, collection.allocator);
- usings := make(map [string] bool, 0, collection.allocator);
-
- for field in struct_type.fields.list {
+collect_struct_fields :: proc (collection: ^SymbolCollection, struct_type: ast.Struct_Type, package_map: map[string]string) -> SymbolStructValue {
- for n in field.names {
- ident := n.derived.(ast.Ident);
- append(&names, get_index_unique_string(collection, ident.name));
+ names := make([dynamic]string, 0, collection.allocator);
+ types := make([dynamic]^ast.Expr, 0, collection.allocator);
+ usings := make(map[string]bool, 0, collection.allocator);
- cloned := clone_type(field.type, collection.allocator, &collection.unique_strings);
- replace_package_alias(cloned, package_map, collection);
- append(&types, cloned);
+ for field in struct_type.fields.list {
- if .Using in field.flags {
- usings[names[len(names)-1]] = true;
- }
+ for n in field.names {
+ ident := n.derived.(ast.Ident);
+ append(&names, get_index_unique_string(collection, ident.name));
- }
+ cloned := clone_type(field.type, collection.allocator, &collection.unique_strings);
+ replace_package_alias(cloned, package_map, collection);
+ append(&types, cloned);
- }
+ if .Using in field.flags {
+ usings[names[len(names) - 1]] = true;
+ }
+ }
+ }
- value := SymbolStructValue {
- names = names[:],
- types = types[:],
- usings = usings,
- };
+ value := SymbolStructValue {
+ names = names[:],
+ types = types[:],
+ usings = usings,
+ };
- return value;
+ return value;
}
-collect_enum_fields :: proc(collection: ^SymbolCollection, fields: [] ^ast.Expr, package_map: map [string] string) -> SymbolEnumValue {
+collect_enum_fields :: proc (collection: ^SymbolCollection, fields: []^ast.Expr, package_map: map[string]string) -> SymbolEnumValue {
- names := make([dynamic] string, 0, collection.allocator);
+ names := make([dynamic]string, 0, collection.allocator);
- //ERROR no hover on n in the for, but elsewhere is fine
- for n in fields {
+ //ERROR no hover on n in the for, but elsewhere is fine
+ for n in fields {
- if ident, ok := n.derived.(ast.Ident); ok {
- append(&names, get_index_unique_string(collection, ident.name));
- }
+ if ident, ok := n.derived.(ast.Ident); ok {
+ append(&names, get_index_unique_string(collection, ident.name));
+ } else if field, ok := n.derived.(ast.Field_Value); ok {
+ append(&names, get_index_unique_string(collection, field.field.derived.(ast.Ident).name));
+ }
+ }
- else if field, ok := n.derived.(ast.Field_Value); ok {
- append(&names, get_index_unique_string(collection, field.field.derived.(ast.Ident).name));
- }
+ value := SymbolEnumValue {
+ names = names[:]
+ };
- }
-
- value := SymbolEnumValue {
- names = names[:],
- };
-
-
- return value;
+ return value;
}
-collect_union_fields :: proc(collection: ^SymbolCollection, union_type: ast.Union_Type, package_map: map [string] string) -> SymbolUnionValue {
-
- names := make([dynamic] string, 0, collection.allocator);
+collect_union_fields :: proc (collection: ^SymbolCollection, union_type: ast.Union_Type, package_map: map[string]string) -> SymbolUnionValue {
+ names := make([dynamic]string, 0, collection.allocator);
- for variant in union_type.variants {
+ for variant in union_type.variants {
- if ident, ok := variant.derived.(ast.Ident); ok {
- append(&names, get_index_unique_string(collection, ident.name));
- }
+ if ident, ok := variant.derived.(ast.Ident); ok {
+ append(&names, get_index_unique_string(collection, ident.name));
+ }
+ }
- }
+ value := SymbolUnionValue {
+ names = names[:]
+ };
- value := SymbolUnionValue {
- names = names[:],
- };
-
- return value;
+ return value;
}
-collect_bitset_field :: proc(collection: ^SymbolCollection, bitset_type: ast.Bit_Set_Type, package_map: map [string] string) -> SymbolBitSetValue {
+collect_bitset_field :: proc (collection: ^SymbolCollection, bitset_type: ast.Bit_Set_Type, package_map: map[string]string) -> SymbolBitSetValue {
- value := SymbolBitSetValue {
- expr = clone_type(bitset_type.elem, collection.allocator, &collection.unique_strings),
- };
+ value := SymbolBitSetValue {
+ expr = clone_type(bitset_type.elem, collection.allocator, &collection.unique_strings)
+ };
- return value;
+ return value;
}
-collect_generic :: proc(collection: ^SymbolCollection, expr: ^ast.Expr, package_map: map [string] string) -> SymbolGenericValue {
+collect_generic :: proc (collection: ^SymbolCollection, expr: ^ast.Expr, package_map: map[string]string) -> SymbolGenericValue {
- cloned := clone_type(expr, collection.allocator, &collection.unique_strings);
- replace_package_alias(cloned, package_map, collection);
+ cloned := clone_type(expr, collection.allocator, &collection.unique_strings);
+ replace_package_alias(cloned, package_map, collection);
- value := SymbolGenericValue {
- expr = cloned,
- };
+ value := SymbolGenericValue {
+ expr = cloned
+ };
- return value;
+ return value;
}
-
-collect_symbols :: proc(collection: ^SymbolCollection, file: ast.File, uri: string) -> common.Error {
-
- forward, _ := filepath.to_slash(file.fullpath, context.temp_allocator);
- directory := strings.to_lower(path.dir(forward, context.temp_allocator), context.temp_allocator);
- package_map := get_package_mapping(file, collection.config, uri);
-
- exprs := common.collect_globals(file);
-
- for expr in exprs {
-
- symbol: Symbol;
-
- token: ast.Node;
- token_type: SymbolType;
-
- name := expr.name;
-
- col_expr := expr.expr;
-
- if helper, ok := col_expr.derived.(ast.Helper_Type); ok {
- if helper.type != nil {
- col_expr = helper.type;
- }
- }
-
- if dist, ok := col_expr.derived.(ast.Distinct_Type); ok {
- if dist.type != nil {
- col_expr = dist.type;
- }
- }
-
- switch v in col_expr.derived {
- case ast.Proc_Lit:
- token = v;
- token_type = .Function;
-
- if v.type.params != nil {
- symbol.signature = strings.concatenate( {"(", string(file.src[v.type.params.pos.offset:v.type.params.end.offset]), ")"},
- collection.allocator);
- }
-
- if v.type.results != nil {
- symbol.returns = strings.concatenate( {"(", string(file.src[v.type.results.pos.offset:v.type.results.end.offset]), ")"},
- collection.allocator);
- }
-
- if v.type != nil {
- symbol.value = collect_procedure_fields(collection, v.type, v.type.params, v.type.results, package_map);
- }
- case ast.Proc_Type:
- token = v;
- token_type = .Function;
-
- if v.params != nil {
- symbol.signature = strings.concatenate( {"(", string(file.src[v.params.pos.offset:v.params.end.offset]), ")"},
- collection.allocator);
- }
-
- if v.results != nil {
- symbol.returns = strings.concatenate( {"(", string(file.src[v.results.pos.offset:v.results.end.offset]), ")"},
- collection.allocator);
- }
-
- symbol.value = collect_procedure_fields(collection, cast(^ast.Proc_Type)col_expr, v.params, v.results, package_map);
- case ast.Proc_Group:
- token = v;
- token_type = .Function;
- symbol.value = SymbolProcedureGroupValue {
- group = clone_type(col_expr, collection.allocator, &collection.unique_strings),
- };
- case ast.Struct_Type:
- token = v;
- token_type = .Struct;
- symbol.value = collect_struct_fields(collection, v, package_map);
- symbol.signature = "struct";
- case ast.Enum_Type:
- token = v;
- token_type = .Enum;
- symbol.value = collect_enum_fields(collection, v.fields, package_map);
- symbol.signature = "enum";
- case ast.Union_Type:
- token = v;
- token_type = .Enum;
- symbol.value = collect_union_fields(collection, v, package_map);
- symbol.signature = "union";
- case ast.Bit_Set_Type:
- token = v;
- token_type = .Enum;
- symbol.value = collect_bitset_field(collection, v, package_map);
- symbol.signature = "bitset";
- case ast.Basic_Lit:
- token = v;
- symbol.value = collect_generic(collection, col_expr, package_map);
- case ast.Ident:
- token = v;
- token_type = .Variable;
- symbol.value = collect_generic(collection, col_expr, package_map);
- case: // default
- symbol.value = collect_generic(collection, col_expr, package_map);
- token_type = .Variable;
- token = expr.expr;
- break;
- }
-
- symbol.range = common.get_token_range(token, file.src);
- symbol.name = get_index_unique_string(collection, name);
- symbol.pkg = get_index_unique_string(collection, directory);
- symbol.type = token_type;
-
- when ODIN_OS == "windows" {
- symbol.uri = get_index_unique_string(collection, strings.to_lower(uri, context.temp_allocator));
- }
- else {
- symbol.uri = get_index_unique_string(collection, uri);
- }
-
-
- if expr.docs != nil {
-
- tmp: string;
-
- for doc in expr.docs.list {
- tmp = strings.concatenate({tmp, "\n", doc.text}, context.temp_allocator);
- }
-
- if tmp != "" {
- replaced, allocated := strings.replace_all(tmp, "//", "", context.temp_allocator);
- symbol.doc = strings.clone(replaced, collection.allocator);
- }
-
- }
-
- cat := strings.concatenate({symbol.pkg, name}, context.temp_allocator);
-
- id := get_symbol_id(cat);
-
- //right now i'm not checking comments whether is for windows, linux, etc, and some packages do not specify that(os)
- if v, ok := collection.symbols[id]; !ok || v.name == "" {
- collection.symbols[id] = symbol;
- }
-
- else {
- free_symbol(symbol, collection.allocator);
- }
-
- }
-
- return .None;
+collect_symbols :: proc (collection: ^SymbolCollection, file: ast.File, uri: string) -> common.Error {
+
+ forward, _ := filepath.to_slash(file.fullpath, context.temp_allocator);
+ directory := strings.to_lower(path.dir(forward, context.temp_allocator), context.temp_allocator);
+ package_map := get_package_mapping(file, collection.config, uri);
+
+ exprs := common.collect_globals(file);
+
+ for expr in exprs {
+
+ symbol: Symbol;
+
+ token: ast.Node;
+ token_type: SymbolType;
+
+ name := expr.name;
+
+ col_expr := expr.expr;
+
+ if helper, ok := col_expr.derived.(ast.Helper_Type); ok {
+ if helper.type != nil {
+ col_expr = helper.type;
+ }
+ }
+
+ if dist, ok := col_expr.derived.(ast.Distinct_Type); ok {
+ if dist.type != nil {
+ col_expr = dist.type;
+ }
+ }
+
+ switch v in col_expr.derived {
+ case ast.Proc_Lit:
+ token = v;
+ token_type = .Function;
+
+ if v.type.params != nil {
+ symbol.signature = strings.concatenate({"(", string(file.src[v.type.params.pos.offset:v.type.params.end.offset]), ")"},
+ collection.allocator);
+ }
+
+ if v.type.results != nil {
+ symbol.returns = strings.concatenate({"(", string(file.src[v.type.results.pos.offset:v.type.results.end.offset]), ")"},
+ collection.allocator);
+ }
+
+ if v.type != nil {
+ symbol.value = collect_procedure_fields(collection, v.type, v.type.params, v.type.results, package_map);
+ }
+ case ast.Proc_Type:
+ token = v;
+ token_type = .Function;
+
+ if v.params != nil {
+ symbol.signature = strings.concatenate({"(", string(file.src[v.params.pos.offset:v.params.end.offset]), ")"},
+ collection.allocator);
+ }
+
+ if v.results != nil {
+ symbol.returns = strings.concatenate({"(", string(file.src[v.results.pos.offset:v.results.end.offset]), ")"},
+ collection.allocator);
+ }
+
+ symbol.value = collect_procedure_fields(collection, cast(^ast.Proc_Type)col_expr, v.params, v.results, package_map);
+ case ast.Proc_Group:
+ token = v;
+ token_type = .Function;
+ symbol.value = SymbolProcedureGroupValue {
+ group = clone_type(col_expr, collection.allocator, &collection.unique_strings)
+ };
+ case ast.Struct_Type:
+ token = v;
+ token_type = .Struct;
+ symbol.value = collect_struct_fields(collection, v, package_map);
+ symbol.signature = "struct";
+ case ast.Enum_Type:
+ token = v;
+ token_type = .Enum;
+ symbol.value = collect_enum_fields(collection, v.fields, package_map);
+ symbol.signature = "enum";
+ case ast.Union_Type:
+ token = v;
+ token_type = .Enum;
+ symbol.value = collect_union_fields(collection, v, package_map);
+ symbol.signature = "union";
+ case ast.Bit_Set_Type:
+ token = v;
+ token_type = .Enum;
+ symbol.value = collect_bitset_field(collection, v, package_map);
+ symbol.signature = "bitset";
+ case ast.Basic_Lit:
+ token = v;
+ symbol.value = collect_generic(collection, col_expr, package_map);
+ case ast.Ident:
+ token = v;
+ token_type = .Variable;
+ symbol.value = collect_generic(collection, col_expr, package_map);
+ case: // default
+ symbol.value = collect_generic(collection, col_expr, package_map);
+ token_type = .Variable;
+ token = expr.expr;
+ break;
+ }
+
+ symbol.range = common.get_token_range(token, file.src);
+ symbol.name = get_index_unique_string(collection, name);
+ symbol.pkg = get_index_unique_string(collection, directory);
+ symbol.type = token_type;
+
+ when ODIN_OS == "windows" {
+ symbol.uri = get_index_unique_string(collection, strings.to_lower(uri, context.temp_allocator));
+ } else
+ {
+ symbol.uri = get_index_unique_string(collection, uri);
+ }
+
+ if expr.docs != nil {
+
+ tmp: string;
+
+ for doc in expr.docs.list {
+ tmp = strings.concatenate({tmp, "\n", doc.text}, context.temp_allocator);
+ }
+
+ if tmp != "" {
+ replaced, allocated := strings.replace_all(tmp, "//", "", context.temp_allocator);
+ symbol.doc = strings.clone(replaced, collection.allocator);
+ }
+ }
+
+ cat := strings.concatenate({symbol.pkg, name}, context.temp_allocator);
+
+ id := get_symbol_id(cat);
+
+ //right now i'm not checking comments whether is for windows, linux, etc, and some packages do not specify that(os)
+ if v, ok := collection.symbols[id]; !ok || v.name == "" {
+ collection.symbols[id] = symbol;
+ } else {
+ free_symbol(symbol, collection.allocator);
+ }
+ }
+
+ return .None;
}
-
/*
- Gets the map from import alias to absolute package directory
+ Gets the map from import alias to absolute package directory
*/
-get_package_mapping :: proc(file: ast.File, config: ^common.Config, uri: string) -> map [string] string {
+get_package_mapping :: proc (file: ast.File, config: ^common.Config, uri: string) -> map[string]string {
- package_map := make(map [string] string, 0, context.temp_allocator);
+ package_map := make(map[string]string, 0, context.temp_allocator);
- for imp, index in file.imports {
+ for imp, index in file.imports {
- //collection specified
- if i := strings.index(imp.fullpath, ":"); i != -1 {
+ //collection specified
+ if i := strings.index(imp.fullpath, ":"); i != -1 {
- //ERROR hover on collection should show string
- collection := imp.fullpath[1:i];
- p := imp.fullpath[i+1:len(imp.fullpath)-1];
+ //ERROR hover on collection should show string
+ collection := imp.fullpath[1:i];
+ p := imp.fullpath[i + 1:len(imp.fullpath) - 1];
- dir, ok := config.collections[collection];
+ dir, ok := config.collections[collection];
- if !ok {
- continue;
- }
+ if !ok {
+ continue;
+ }
- name: string;
+ name: string;
- full := path.join(elems = {strings.to_lower(dir, context.temp_allocator), p}, allocator = context.temp_allocator);
+ full := path.join(elems = {strings.to_lower(dir, context.temp_allocator), p}, allocator = context.temp_allocator);
- if imp.name.text != "" {
- name = imp.name.text;
- }
+ if imp.name.text != "" {
+ name = imp.name.text;
+ } else {
+ name = path.base(full, false, context.temp_allocator);
+ }
- else {
- name = path.base(full, false, context.temp_allocator);
- }
+ package_map[name] = strings.to_lower(full, context.temp_allocator);
+ } else {
- package_map[name] = strings.to_lower(full, context.temp_allocator);
+ name: string;
- }
+ base := path.base(uri, false, context.temp_allocator);
- else {
+ full := path.join(elems = {base, imp.fullpath[1:len(imp.fullpath) - 1]}, allocator = context.temp_allocator);
- name: string;
+ full = path.clean(full, context.temp_allocator);
- base := path.base(uri, false, context.temp_allocator);
+ if imp.name.text != "" {
+ name = imp.name.text;
+ //ERROR hover is wrong on name
+ } else {
+ name = path.base(full, false, context.temp_allocator);
+ }
- full := path.join(elems = {base, imp.fullpath[1:len(imp.fullpath)-1]}, allocator = context.temp_allocator);
+ package_map[name] = strings.to_lower(full, context.temp_allocator);
+ }
+ }
- full = path.clean(full, context.temp_allocator);
-
- if imp.name.text != "" {
- name = imp.name.text;
- //ERROR hover is wrong on name
- }
-
- else {
- name = path.base(full, false, context.temp_allocator);
- }
-
- package_map[name] = strings.to_lower(full, context.temp_allocator);
- }
-
- }
-
-
- return package_map;
+ return package_map;
}
-
/*
- We can't have the alias names for packages with selector expression since that is specific to every files import, instead just replace it with the absolute
- package name(absolute directory path)
+ We can't have the alias names for packages with selector expression since that is specific to every files import, instead just replace it with the absolute
+ package name(absolute directory path)
*/
-replace_package_alias :: proc{
- replace_package_alias_node,
- replace_package_alias_expr,
- replace_package_alias_array,
- replace_package_alias_dynamic_array,
-};
+replace_package_alias :: proc {
+replace_package_alias_node,
+replace_package_alias_expr,
+replace_package_alias_array,
+replace_package_alias_dynamic_array};
-replace_package_alias_array :: proc(array: $A/[]^$T, package_map: map [string] string, collection: ^SymbolCollection) {
-
- for elem, i in array {
- replace_package_alias(elem, package_map, collection);
- }
+replace_package_alias_array :: proc (array: $A/[]^$T, package_map: map[string]string, collection: ^SymbolCollection) {
+ for elem, i in array {
+ replace_package_alias(elem, package_map, collection);
+ }
}
-replace_package_alias_dynamic_array :: proc(array: $A/[dynamic]^$T, package_map: map [string] string, collection: ^SymbolCollection) {
-
- for elem, i in array {
- replace_package_alias(elem, package_map, collection);
- }
+replace_package_alias_dynamic_array :: proc (array: $A/[dynamic]^$T, package_map: map[string]string, collection: ^SymbolCollection) {
+ for elem, i in array {
+ replace_package_alias(elem, package_map, collection);
+ }
}
-replace_package_alias_expr :: proc(node: ^ast.Expr, package_map: map [string] string, collection: ^SymbolCollection) {
- replace_package_alias_node(node, package_map, collection);
+replace_package_alias_expr :: proc (node: ^ast.Expr, package_map: map[string]string, collection: ^SymbolCollection) {
+ replace_package_alias_node(node, package_map, collection);
}
-replace_package_alias_node :: proc(node: ^ast.Node, package_map: map [string] string, collection: ^SymbolCollection) {
-
- using ast;
-
- if node == nil {
- return;
- }
-
- switch n in node.derived {
- case Bad_Expr:
- case Ident:
- case Implicit:
- case Undef:
- case Basic_Lit:
- case Basic_Directive:
- case Ellipsis:
- replace_package_alias(n.expr, package_map, collection);
- case Tag_Expr:
- replace_package_alias(n.expr, package_map, collection);
- case Unary_Expr:
- replace_package_alias(n.expr, package_map, collection);
- case Binary_Expr:
- replace_package_alias(n.left, package_map, collection);
- replace_package_alias(n.right, package_map, collection);
- case Paren_Expr:
- replace_package_alias(n.expr, package_map, collection);
- case Selector_Expr:
-
- if _, ok := n.expr.derived.(Ident); ok {
-
- ident := &n.expr.derived.(Ident);
-
- if package_name, ok := package_map[ident.name]; ok {
- ident.name = get_index_unique_string(collection, package_name);
- }
-
- }
-
- else {
- replace_package_alias(n.expr, package_map, collection);
- replace_package_alias(n.field, package_map, collection);
- }
- case Implicit_Selector_Expr:
- replace_package_alias(n.field, package_map, collection);
- case Slice_Expr:
- replace_package_alias(n.expr, package_map, collection);
- replace_package_alias(n.low, package_map, collection);
- replace_package_alias(n.high, package_map, collection);
- case Attribute:
- replace_package_alias(n.elems, package_map, collection);
- case Distinct_Type:
- replace_package_alias(n.type, package_map, collection);
- case Proc_Type:
- replace_package_alias(n.params, package_map, collection);
- replace_package_alias(n.results, package_map, collection);
- case Pointer_Type:
- replace_package_alias(n.elem, package_map, collection);
- case Array_Type:
- replace_package_alias(n.len, package_map, collection);
- replace_package_alias(n.elem, package_map, collection);
- case Dynamic_Array_Type:
- replace_package_alias(n.elem, package_map, collection);
- case Struct_Type:
- replace_package_alias(n.poly_params, package_map, collection);
- replace_package_alias(n.align, package_map, collection);
- replace_package_alias(n.fields, package_map, collection);
- case Field:
+replace_package_alias_node :: proc (node: ^ast.Node, package_map: map[string]string, collection: ^SymbolCollection) {
+
+ using ast;
+
+ if node == nil {
+ return;
+ }
+
+ switch n in node.derived {
+ case Bad_Expr:
+ case Ident:
+ case Implicit:
+ case Undef:
+ case Basic_Lit:
+ case Basic_Directive:
+ case Ellipsis:
+ replace_package_alias(n.expr, package_map, collection);
+ case Tag_Expr:
+ replace_package_alias(n.expr, package_map, collection);
+ case Unary_Expr:
+ replace_package_alias(n.expr, package_map, collection);
+ case Binary_Expr:
+ replace_package_alias(n.left, package_map, collection);
+ replace_package_alias(n.right, package_map, collection);
+ case Paren_Expr:
+ replace_package_alias(n.expr, package_map, collection);
+ case Selector_Expr:
+
+ if _, ok := n.expr.derived.(Ident); ok {
+
+ ident := &n.expr.derived.(Ident);
+
+ if package_name, ok := package_map[ident.name]; ok {
+ ident.name = get_index_unique_string(collection, package_name);
+ }
+ } else {
+ replace_package_alias(n.expr, package_map, collection);
+ replace_package_alias(n.field, package_map, collection);
+ }
+ case Implicit_Selector_Expr:
+ replace_package_alias(n.field, package_map, collection);
+ case Slice_Expr:
+ replace_package_alias(n.expr, package_map, collection);
+ replace_package_alias(n.low, package_map, collection);
+ replace_package_alias(n.high, package_map, collection);
+ case Attribute:
+ replace_package_alias(n.elems, package_map, collection);
+ case Distinct_Type:
+ replace_package_alias(n.type, package_map, collection);
+ case Proc_Type:
+ replace_package_alias(n.params, package_map, collection);
+ replace_package_alias(n.results, package_map, collection);
+ case Pointer_Type:
+ replace_package_alias(n.elem, package_map, collection);
+ case Array_Type:
+ replace_package_alias(n.len, package_map, collection);
+ replace_package_alias(n.elem, package_map, collection);
+ case Dynamic_Array_Type:
+ replace_package_alias(n.elem, package_map, collection);
+ case Struct_Type:
+ replace_package_alias(n.poly_params, package_map, collection);
+ replace_package_alias(n.align, package_map, collection);
+ replace_package_alias(n.fields, package_map, collection);
+ case Field:
replace_package_alias(n.names, package_map, collection);
replace_package_alias(n.type, package_map, collection);
replace_package_alias(n.default_value, package_map, collection);
case Field_List:
- replace_package_alias(n.list, package_map, collection);
- case Field_Value:
- replace_package_alias(n.field, package_map, collection);
- replace_package_alias(n.value, package_map, collection);
- case Union_Type:
- replace_package_alias(n.poly_params, package_map, collection);
- replace_package_alias(n.align, package_map, collection);
- replace_package_alias(n.variants, package_map, collection);
- case Enum_Type:
- replace_package_alias(n.base_type, package_map, collection);
- replace_package_alias(n.fields, package_map, collection);
- case Bit_Set_Type:
- replace_package_alias(n.elem, package_map, collection);
- replace_package_alias(n.underlying, package_map, collection);
- case Map_Type:
- replace_package_alias(n.key, package_map, collection);
- replace_package_alias(n.value, package_map, collection);
- case Call_Expr:
- replace_package_alias(n.expr, package_map, collection);
- replace_package_alias(n.args, package_map, collection);
- case Typeid_Type:
+ replace_package_alias(n.list, package_map, collection);
+ case Field_Value:
+ replace_package_alias(n.field, package_map, collection);
+ replace_package_alias(n.value, package_map, collection);
+ case Union_Type:
+ replace_package_alias(n.poly_params, package_map, collection);
+ replace_package_alias(n.align, package_map, collection);
+ replace_package_alias(n.variants, package_map, collection);
+ case Enum_Type:
+ replace_package_alias(n.base_type, package_map, collection);
+ replace_package_alias(n.fields, package_map, collection);
+ case Bit_Set_Type:
+ replace_package_alias(n.elem, package_map, collection);
+ replace_package_alias(n.underlying, package_map, collection);
+ case Map_Type:
+ replace_package_alias(n.key, package_map, collection);
+ replace_package_alias(n.value, package_map, collection);
+ case Call_Expr:
+ replace_package_alias(n.expr, package_map, collection);
+ replace_package_alias(n.args, package_map, collection);
+ case Typeid_Type:
replace_package_alias(n.specialization, package_map, collection);
- case Poly_Type:
- replace_package_alias(n.type, package_map, collection);
- replace_package_alias(n.specialization, package_map, collection);
- case Proc_Group:
- replace_package_alias(n.args, package_map, collection);
- case Comp_Lit:
- replace_package_alias(n.type, package_map, collection);
- replace_package_alias(n.elems, package_map, collection);
- case:
- log.warnf("Replace Unhandled node kind: %T", n);
- }
-
+ case Poly_Type:
+ replace_package_alias(n.type, package_map, collection);
+ replace_package_alias(n.specialization, package_map, collection);
+ case Proc_Group:
+ replace_package_alias(n.args, package_map, collection);
+ case Comp_Lit:
+ replace_package_alias(n.type, package_map, collection);
+ replace_package_alias(n.elems, package_map, collection);
+ case:
+ log.warnf("Replace Unhandled node kind: %T", n);
+ }
} \ No newline at end of file
diff --git a/src/index/file_index.odin b/src/index/file_index.odin
index fe787e5..7b98413 100644
--- a/src/index/file_index.odin
+++ b/src/index/file_index.odin
@@ -1,7 +1 @@
-package index
-
-/*
- This is indexer for static files operating on a file database to index symbols and files.
-
- NOTE(Daniel, Let's be honest probably will not be made any time soon)
- */ \ No newline at end of file
+package index \ No newline at end of file
diff --git a/src/index/indexer.odin b/src/index/indexer.odin
index 451abb6..3d9cb8c 100644
--- a/src/index/indexer.odin
+++ b/src/index/indexer.odin
@@ -6,119 +6,113 @@ import "core:strings"
import "core:log"
import "core:sort"
-
/*
- Concept ideas:
-
- static indexing:
+ Concept ideas:
- is responsible for implementing the indexing of symbols for static files.
+ static indexing:
- This is to solve the scaling problem of large projects with many files and symbols, as most of these files will be static.
+ is responsible for implementing the indexing of symbols for static files.
- Possible scopes for static files:
- global scope (we don't have hiarachy of namespaces and therefore only need to look at the global scope)
+ This is to solve the scaling problem of large projects with many files and symbols, as most of these files will be static.
- Scopes not part of the indexer:
- function scope, file scope, package scope(these are only relevant for dynamic active files in your project, that use the ast instead of indexing)
+ Possible scopes for static files:
+ global scope (we don't have hiarachy of namespaces and therefore only need to look at the global scope)
- Potential features:
- Allow for saving the indexer, instead of recreating it everytime the lsp starts(but you would have to account for stale data).
+ Scopes not part of the indexer:
+ function scope, file scope, package scope(these are only relevant for dynamic active files in your project, that use the ast instead of indexing)
+ Potential features:
+ Allow for saving the indexer, instead of recreating it everytime the lsp starts(but you would have to account for stale data).
- dynamic indexing:
- When the user modifies files we need some smaller index to handle everything the user is using right now. This will allow
- us to rebuild parts of the index without too much of a performance hit.
+ dynamic indexing:
- This index is first searched and if nothing is found look in the static index.
- */
+ When the user modifies files we need some smaller index to handle everything the user is using right now. This will allow
+ us to rebuild parts of the index without too much of a performance hit.
+ This index is first searched and if nothing is found look in the static index.
+*/
Indexer :: struct {
- built_in_packages: [dynamic] string,
- static_index: MemoryIndex,
- dynamic_index: MemoryIndex,
-};
+ built_in_packages: [dynamic]string,
+ static_index: MemoryIndex,
+ dynamic_index: MemoryIndex,
+}
indexer: Indexer;
FuzzyResult :: struct {
- symbol: Symbol,
- score: f32,
-};
-
-
-lookup :: proc(name: string, pkg: string, loc := #caller_location) -> (Symbol, bool) {
+ symbol: Symbol,
+ score: f32,
+}
- if symbol, ok := memory_index_lookup(&indexer.dynamic_index, name, pkg); ok {
- log.infof("lookup dynamic name: %v pkg: %v, symbol %v location %v", name, pkg, symbol, loc);
- return symbol, true;
- }
+lookup :: proc (name: string, pkg: string, loc := #caller_location) -> (Symbol, bool) {
- if symbol, ok := memory_index_lookup(&indexer.static_index, name, pkg); ok {
- log.infof("lookup name: %v pkg: %v, symbol %v location %v", name, pkg, symbol, loc);
- return symbol, true;
- }
+ if symbol, ok := memory_index_lookup(&indexer.dynamic_index, name, pkg); ok {
+ log.infof("lookup dynamic name: %v pkg: %v, symbol %v location %v", name, pkg, symbol, loc);
+ return symbol, true;
+ }
- for built in indexer.built_in_packages {
+ if symbol, ok := memory_index_lookup(&indexer.static_index, name, pkg); ok {
+ log.infof("lookup name: %v pkg: %v, symbol %v location %v", name, pkg, symbol, loc);
+ return symbol, true;
+ }
- if symbol, ok := memory_index_lookup(&indexer.static_index, name, built); ok {
- log.infof("lookup name: %v pkg: %v, symbol %v location %v", name, pkg, symbol, loc);
- return symbol, true;
- }
+ for built in indexer.built_in_packages {
- }
+ if symbol, ok := memory_index_lookup(&indexer.static_index, name, built); ok {
+ log.infof("lookup name: %v pkg: %v, symbol %v location %v", name, pkg, symbol, loc);
+ return symbol, true;
+ }
+ }
- log.infof("lookup failed name: %v pkg: %v location %v", name, pkg, loc);
- return {}, false;
+ log.infof("lookup failed name: %v pkg: %v location %v", name, pkg, loc);
+ return {}, false;
}
+fuzzy_search :: proc (name: string, pkgs: []string) -> ([]FuzzyResult, bool) {
+ dynamic_results, dynamic_ok := memory_index_fuzzy_search(&indexer.dynamic_index, name, pkgs);
+ static_results, static_ok := memory_index_fuzzy_search(&indexer.static_index, name, pkgs);
+ result := make([dynamic]FuzzyResult, context.temp_allocator);
+ files := make(map[string]bool, 0, context.temp_allocator);
-fuzzy_search :: proc(name: string, pkgs: [] string) -> ([] FuzzyResult, bool) {
- dynamic_results, dynamic_ok := memory_index_fuzzy_search(&indexer.dynamic_index, name, pkgs);
- static_results, static_ok := memory_index_fuzzy_search(&indexer.static_index, name, pkgs);
- result := make([dynamic] FuzzyResult, context.temp_allocator);
- files := make(map [string] bool, 0, context.temp_allocator);
-
- if !dynamic_ok || !static_ok {
- return {}, false;
- }
+ if !dynamic_ok || !static_ok {
+ return {}, false;
+ }
- for r in dynamic_results {
- files[r.symbol.uri] = true;
- append(&result, r);
- }
+ for r in dynamic_results {
+ files[r.symbol.uri] = true;
+ append(&result, r);
+ }
- for r in static_results {
+ for r in static_results {
- if r.symbol.uri in files {
- continue;
- }
+ if r.symbol.uri in files {
+ continue;
+ }
- append(&result, r);
- }
+ append(&result, r);
+ }
- sort.sort(fuzzy_sort_interface(&result));
+ sort.sort(fuzzy_sort_interface(&result));
- return result[:], true;
+ return result[:], true;
}
-
-fuzzy_sort_interface :: proc(s: ^[dynamic] FuzzyResult) -> sort.Interface {
- return sort.Interface{
+fuzzy_sort_interface :: proc (s: ^[dynamic]FuzzyResult) -> sort.Interface {
+ return sort.Interface {
collection = rawptr(s),
- len = proc(it: sort.Interface) -> int {
- s := (^[dynamic] FuzzyResult)(it.collection);
+ len = proc (it: sort.Interface) -> int {
+ s := (^[dynamic]FuzzyResult)(it.collection);
return len(s^);
},
- less = proc(it: sort.Interface, i, j: int) -> bool {
- s := (^[dynamic] FuzzyResult)(it.collection);
+ less = proc (it: sort.Interface, i, j: int) -> bool {
+ s := (^[dynamic]FuzzyResult)(it.collection);
return s[i].score > s[j].score;
},
- swap = proc(it: sort.Interface, i, j: int) {
- s := (^[dynamic] FuzzyResult)(it.collection);
+ swap = proc (it: sort.Interface, i, j: int) {
+ s := (^[dynamic]FuzzyResult)(it.collection);
s[i], s[j] = s[j], s[i];
},
};
-}
+} \ No newline at end of file
diff --git a/src/index/memory_index.odin b/src/index/memory_index.odin
index d7880a6..3361ba1 100644
--- a/src/index/memory_index.odin
+++ b/src/index/memory_index.odin
@@ -9,66 +9,63 @@ import "core:sort"
import "shared:common"
/*
- This is a in memory index designed for the dynamic indexing of symbols and files.
- Designed for few files and should be fast at rebuilding.
+ This is a in memory index designed for the dynamic indexing of symbols and files.
+ Designed for few files and should be fast at rebuilding.
- Right now the implementation is extremely naive.
- */
+ Right now the implementation is extremely naive.
+*/
MemoryIndex :: struct {
- collection: SymbolCollection,
-};
-
-
-make_memory_index :: proc(collection: SymbolCollection) -> MemoryIndex {
+ collection: SymbolCollection,
+}
- return MemoryIndex {
- collection = collection,
- };
+make_memory_index :: proc (collection: SymbolCollection) -> MemoryIndex {
+ return MemoryIndex {
+ collection = collection
+ };
}
-memory_index_lookup :: proc(index: ^MemoryIndex, name: string, pkg: string) -> (Symbol, bool) {
- id := get_symbol_id(strings.concatenate({pkg, name}, context.temp_allocator));
- return index.collection.symbols[id];
+memory_index_lookup :: proc (index: ^MemoryIndex, name: string, pkg: string) -> (Symbol, bool) {
+ id := get_symbol_id(strings.concatenate({pkg, name}, context.temp_allocator));
+ return index.collection.symbols[id];
}
-memory_index_fuzzy_search :: proc(index: ^MemoryIndex, name: string, pkgs: [] string) -> ([] FuzzyResult, bool) {
-
- symbols := make([dynamic] FuzzyResult, 0, context.temp_allocator);
+memory_index_fuzzy_search :: proc (index: ^MemoryIndex, name: string, pkgs: []string) -> ([]FuzzyResult, bool) {
- fuzzy_matcher := common.make_fuzzy_matcher(name);
+ symbols := make([dynamic]FuzzyResult, 0, context.temp_allocator);
- top := 20;
+ fuzzy_matcher := common.make_fuzzy_matcher(name);
- for _, symbol in index.collection.symbols {
+ top := 20;
- if !exists_in_scope(symbol.pkg, pkgs) {
- continue;
- }
+ for _, symbol in index.collection.symbols {
- if score, ok := common.fuzzy_match(fuzzy_matcher, symbol.name); ok {
- result := FuzzyResult {
- symbol = symbol,
- score = score,
- };
+ if !exists_in_scope(symbol.pkg, pkgs) {
+ continue;
+ }
- append(&symbols, result);
- }
+ if score, ok := common.fuzzy_match(fuzzy_matcher, symbol.name); ok {
+ result := FuzzyResult {
+ symbol = symbol,
+ score = score,
+ };
- }
+ append(&symbols, result);
+ }
+ }
- sort.sort(fuzzy_sort_interface(&symbols));
+ sort.sort(fuzzy_sort_interface(&symbols));
- return symbols[:min(top, len(symbols))], true;
+ return symbols[:min(top, len(symbols))], true;
}
-exists_in_scope :: proc(symbol_scope: string, scope: [] string) -> bool {
+exists_in_scope :: proc (symbol_scope: string, scope: []string) -> bool {
- for s in scope {
- if strings.compare(symbol_scope, s) == 0 {
- return true;
- }
- }
+ for s in scope {
+ if strings.compare(symbol_scope, s) == 0 {
+ return true;
+ }
+ }
- return false;
+ return false;
} \ No newline at end of file
diff --git a/src/index/symbol.odin b/src/index/symbol.odin
index cb61d11..2e06e47 100644
--- a/src/index/symbol.odin
+++ b/src/index/symbol.odin
@@ -12,125 +12,127 @@ import "core:slice"
import "shared:common"
/*
- Note(Daniel, how concerned should we be about keeping the memory usage low for the symbol. You could hash some of strings.
- Right now I have the unique string map in order to have strings reference the same string match.)
+ Note(Daniel, how concerned should we be about keeping the memory usage low for the symbol. You could hash some of strings.
+ Right now I have the unique string map in order to have strings reference the same string match.)
- */
+*/
SymbolStructValue :: struct {
- names: [] string,
- types: [] ^ast.Expr,
- usings: map [string] bool,
- generic: bool,
-};
-
-
-SymbolPackageValue :: struct {
+ names: []string,
+ types: []^ast.Expr,
+ usings: map[string]bool,
+ generic: bool,
+}
-};
+SymbolPackageValue :: struct {}
SymbolProcedureValue :: struct {
- return_types: [] ^ast.Field,
- arg_types: [] ^ast.Field,
- generic: bool,
-};
+ return_types: []^ast.Field,
+ arg_types: []^ast.Field,
+ generic: bool,
+}
SymbolProcedureGroupValue :: struct {
- group: ^ast.Expr,
-};
+ group: ^ast.Expr,
+}
SymbolEnumValue :: struct {
- names: [] string,
-};
+ names: []string,
+}
SymbolUnionValue :: struct {
- names: [] string,
-};
+ names: []string,
+}
SymbolBitSetValue :: struct {
- expr: ^ast.Expr,
-};
+ expr: ^ast.Expr,
+}
/*
- Generic symbol that is used by the indexer for any variable type(constants, defined global variables, etc),
+ Generic symbol that is used by the indexer for any variable type(constants, defined global variables, etc),
*/
SymbolGenericValue :: struct {
- expr: ^ast.Expr,
-};
+ expr: ^ast.Expr,
+}
SymbolValue :: union {
- SymbolStructValue,
- SymbolPackageValue,
- SymbolProcedureValue,
- SymbolGenericValue,
- SymbolProcedureGroupValue,
- SymbolUnionValue,
- SymbolEnumValue,
- SymbolBitSetValue,
-};
+ SymbolStructValue,
+ SymbolPackageValue,
+ SymbolProcedureValue,
+ SymbolGenericValue,
+ SymbolProcedureGroupValue,
+ SymbolUnionValue,
+ SymbolEnumValue,
+ SymbolBitSetValue,
+}
Symbol :: struct {
- range: common.Range,
- uri: string,
- pkg: string,
- name: string,
- doc: string,
- signature: string,
- returns: string,
- type: SymbolType,
- value: SymbolValue,
-};
-
-SymbolType :: enum {
- Function = 3,
- Field = 5,
- Variable = 6,
- Package = 9, //set by ast symbol
- Enum = 13,
- Keyword = 14, //set by ast symbol
- EnumMember = 20,
- Struct = 22,
-};
-
-free_symbol :: proc(symbol: Symbol, allocator: mem.Allocator) {
-
- if symbol.signature != "" && symbol.signature != "struct" &&
- symbol.signature != "union" && symbol.signature != "enum" &&
- symbol.signature != "bitset" && symbol.signature != "bitfield" {
- delete(symbol.signature, allocator);
- }
-
- if symbol.returns != "" {
- delete(symbol.returns, allocator);
- }
-
- if symbol.doc != "" {
- delete(symbol.doc, allocator);
- }
-
- #partial switch v in symbol.value {
- case SymbolProcedureValue:
- common.free_ast(v.return_types, allocator);
- common.free_ast(v.arg_types, allocator);
- case SymbolStructValue:
- delete(v.names, allocator);
- common.free_ast(v.types, allocator);
- case SymbolGenericValue:
- common.free_ast(v.expr, allocator);
- case SymbolProcedureGroupValue:
- common.free_ast(v.group, allocator);
- case SymbolEnumValue:
- delete(v.names, allocator);
- case SymbolUnionValue:
- delete(v.names, allocator);
- case SymbolBitSetValue:
- common.free_ast(v.expr, allocator);
- }
+ range: common.Range,
+ uri: string,
+ pkg: string,
+ name: string,
+ doc: string,
+ signature: string,
+ returns: string,
+ type: SymbolType,
+ value: SymbolValue,
+}
+
+SymbolType :: enum
+
+//set by ast symbol
+
+//set by ast symbol
+
+{
+ Function = 3,
+ Field = 5,
+ Variable = 6,
+ Package = 9,
+ Enum = 13,
+ Keyword = 14,
+ EnumMember = 20,
+ Struct = 22,
+}
+free_symbol :: proc (symbol: Symbol, allocator: mem.Allocator) {
+
+ if symbol.signature != "" && symbol.signature != "struct" &&
+ symbol.signature != "union" && symbol.signature != "enum" &&
+ symbol.signature != "bitset" && symbol.signature != "bitfield" {
+ delete(symbol.signature, allocator);
+ }
+
+ if symbol.returns != "" {
+ delete(symbol.returns, allocator);
+ }
+
+ if symbol.doc != "" {
+ delete(symbol.doc, allocator);
+ }
+
+ #partial switch v in symbol.value {
+ case SymbolProcedureValue:
+ common.free_ast(v.return_types, allocator);
+ common.free_ast(v.arg_types, allocator);
+ case SymbolStructValue:
+ delete(v.names, allocator);
+ common.free_ast(v.types, allocator);
+ case SymbolGenericValue:
+ common.free_ast(v.expr, allocator);
+ case SymbolProcedureGroupValue:
+ common.free_ast(v.group, allocator);
+ case SymbolEnumValue:
+ delete(v.names, allocator);
+ case SymbolUnionValue:
+ delete(v.names, allocator);
+ case SymbolBitSetValue:
+ common.free_ast(v.expr, allocator);
+ }
}
-get_symbol_id :: proc(str: string) -> uint {
- ret := common.sha1_hash(transmute([]byte)str);
- r := cast(^uint)slice.first_ptr(ret[:]);
- return r^;
+get_symbol_id :: proc (str: string) -> uint {
+ ret := common.sha1_hash(transmute([]byte)str);
+ r := cast(^uint)slice.first_ptr(ret[:]);
+ return r^;
} \ No newline at end of file
diff --git a/src/index/util.odin b/src/index/util.odin
index ea49eb0..b179a01 100644
--- a/src/index/util.odin
+++ b/src/index/util.odin
@@ -5,171 +5,164 @@ import "core:strings"
import "core:path"
/*
- Returns the string representation of a type. This allows us to print the signature without storing it in the indexer as a string(saving memory).
+ Returns the string representation of a type. This allows us to print the signature without storing it in the indexer as a string(saving memory).
*/
-node_to_string :: proc(node: ^ast.Node) -> string {
+node_to_string :: proc (node: ^ast.Node) -> string {
- builder := strings.make_builder(context.temp_allocator);
+ builder := strings.make_builder(context.temp_allocator);
- build_string(node, &builder);
+ build_string(node, &builder);
- return strings.to_string(builder);
+ return strings.to_string(builder);
}
-build_string :: proc{
- build_string_ast_array,
- build_string_dynamic_array,
- build_string_node,
-};
+build_string :: proc {
+build_string_ast_array,
+build_string_dynamic_array,
+build_string_node};
-build_string_dynamic_array :: proc(array: $A/[]^$T, builder: ^strings.Builder) {
-
- for elem, i in array {
- build_string(elem, builder);
- }
+build_string_dynamic_array :: proc (array: $A/[]^$T, builder: ^strings.Builder) {
+ for elem, i in array {
+ build_string(elem, builder);
+ }
}
-build_string_ast_array :: proc(array: $A/[dynamic]^$T, builder: ^strings.Builder) {
-
- for elem, i in array {
- build_string(elem, builder);
- }
+build_string_ast_array :: proc (array: $A/[dynamic]^$T, builder: ^strings.Builder) {
+ for elem, i in array {
+ build_string(elem, builder);
+ }
}
-build_string_node :: proc(node: ^ast.Node, builder: ^strings.Builder) {
-
- using ast;
-
- if node == nil {
- return;
- }
-
- switch n in node.derived {
- case Bad_Expr:
- case Ident:
- if strings.contains(n.name, "/") {
- strings.write_string(builder, path.base(n.name, false, context.temp_allocator));
- }
- else {
- strings.write_string(builder, n.name);
- }
- case Implicit:
- case Undef:
- case Basic_Lit:
- //strings.write_string(builder, n.tok.text);
- case Ellipsis:
- build_string(n.expr, builder);
- case Proc_Lit:
- build_string(n.type, builder);
- build_string(n.body, builder);
- case Comp_Lit:
- build_string(n.type, builder);
- build_string(n.elems, builder);
- case Tag_Expr:
- build_string(n.expr, builder);
- case Unary_Expr:
- build_string(n.expr, builder);
- case Binary_Expr:
- build_string(n.left, builder);
- build_string(n.right, builder);
- case Paren_Expr:
- strings.write_string(builder, "(");
- build_string(n.expr, builder);
- strings.write_string(builder, ")");
- case Call_Expr:
- build_string(n.expr, builder);
- strings.write_string(builder, "(");
- build_string(n.args, builder);
- strings.write_string(builder, ")");
- case Selector_Expr:
- build_string(n.expr, builder);
- strings.write_string(builder, ".");
- build_string(n.field, builder);
- case Index_Expr:
- build_string(n.expr, builder);
- strings.write_string(builder, "[");
- build_string(n.index, builder);
- strings.write_string(builder, "]");
- case Deref_Expr:
- build_string(n.expr, builder);
- case Slice_Expr:
- build_string(n.expr, builder);
- build_string(n.low, builder);
- build_string(n.high, builder);
- case Field_Value:
- build_string(n.field, builder);
- strings.write_string(builder, ": ");
- build_string(n.value, builder);
- case Type_Cast:
- build_string(n.type, builder);
- build_string(n.expr, builder);
- case Bad_Stmt:
- case Bad_Decl:
- case Attribute:
- build_string(n.elems, builder);
- case Field:
- build_string(n.names, builder);
- if len(n.names) > 0 {
- strings.write_string(builder, ": ");
- }
- build_string(n.type, builder);
- build_string(n.default_value, builder);
- case Field_List:
- for field, i in n.list {
- build_string(field, builder);
- if len(n.list) - 1 != i {
- strings.write_string(builder, ",");
- }
- }
- case Typeid_Type:
- build_string(n.specialization, builder);
- case Helper_Type:
- build_string(n.type, builder);
- case Distinct_Type:
- build_string(n.type, builder);
- case Poly_Type:
- build_string(n.type, builder);
- build_string(n.specialization, builder);
- case Proc_Type:
- strings.write_string(builder, "proc(");
- build_string(n.params, builder);
- strings.write_string(builder, ") -> ");
- build_string(n.results, builder);
- case Pointer_Type:
- strings.write_string(builder, "^");
- build_string(n.elem, builder);
- case Array_Type:
- strings.write_string(builder, "[");
- build_string(n.len, builder);
- strings.write_string(builder, "]");
- build_string(n.elem, builder);
- case Dynamic_Array_Type:
- strings.write_string(builder, "[dynamic]");
- build_string(n.elem, builder);
- case Struct_Type:
- build_string(n.poly_params, builder);
- build_string(n.align, builder);
- build_string(n.fields, builder);
- case Union_Type:
- build_string(n.poly_params, builder);
- build_string(n.align, builder);
- build_string(n.variants, builder);
- case Enum_Type:
- build_string(n.base_type, builder);
- build_string(n.fields, builder);
- case Bit_Set_Type:
- build_string(n.elem, builder);
- build_string(n.underlying, builder);
- case Map_Type:
- strings.write_string(builder, "map");
- strings.write_string(builder, "[");
- build_string(n.key, builder);
- strings.write_string(builder, "]");
- build_string(n.value, builder);
- }
-
-
-
-}
+build_string_node :: proc (node: ^ast.Node, builder: ^strings.Builder) {
+
+ using ast;
+
+ if node == nil {
+ return;
+ }
+
+ switch n in node.derived {
+ case Bad_Expr:
+ case Ident:
+ if strings.contains(n.name, "/") {
+ strings.write_string(builder, path.base(n.name, false, context.temp_allocator));
+ } else {
+ strings.write_string(builder, n.name);
+ }
+ case Implicit:
+ case Undef:
+ case Basic_Lit:
+ //strings.write_string(builder, n.tok.text);
+ case Ellipsis:
+ build_string(n.expr, builder);
+ case Proc_Lit:
+ build_string(n.type, builder);
+ build_string(n.body, builder);
+ case Comp_Lit:
+ build_string(n.type, builder);
+ build_string(n.elems, builder);
+ case Tag_Expr:
+ build_string(n.expr, builder);
+ case Unary_Expr:
+ build_string(n.expr, builder);
+ case Binary_Expr:
+ build_string(n.left, builder);
+ build_string(n.right, builder);
+ case Paren_Expr:
+ strings.write_string(builder, "(");
+ build_string(n.expr, builder);
+ strings.write_string(builder, ")");
+ case Call_Expr:
+ build_string(n.expr, builder);
+ strings.write_string(builder, "(");
+ build_string(n.args, builder);
+ strings.write_string(builder, ")");
+ case Selector_Expr:
+ build_string(n.expr, builder);
+ strings.write_string(builder, ".");
+ build_string(n.field, builder);
+ case Index_Expr:
+ build_string(n.expr, builder);
+ strings.write_string(builder, "[");
+ build_string(n.index, builder);
+ strings.write_string(builder, "]");
+ case Deref_Expr:
+ build_string(n.expr, builder);
+ case Slice_Expr:
+ build_string(n.expr, builder);
+ build_string(n.low, builder);
+ build_string(n.high, builder);
+ case Field_Value:
+ build_string(n.field, builder);
+ strings.write_string(builder, ": ");
+ build_string(n.value, builder);
+ case Type_Cast:
+ build_string(n.type, builder);
+ build_string(n.expr, builder);
+ case Bad_Stmt:
+ case Bad_Decl:
+ case Attribute:
+ build_string(n.elems, builder);
+ case Field:
+ build_string(n.names, builder);
+ if len(n.names) > 0 {
+ strings.write_string(builder, ": ");
+ }
+ build_string(n.type, builder);
+ build_string(n.default_value, builder);
+ case Field_List:
+ for field, i in n.list {
+ build_string(field, builder);
+ if len(n.list) - 1 != i {
+ strings.write_string(builder, ",");
+ }
+ }
+ case Typeid_Type:
+ build_string(n.specialization, builder);
+ case Helper_Type:
+ build_string(n.type, builder);
+ case Distinct_Type:
+ build_string(n.type, builder);
+ case Poly_Type:
+ build_string(n.type, builder);
+ build_string(n.specialization, builder);
+ case Proc_Type:
+ strings.write_string(builder, "proc(");
+ build_string(n.params, builder);
+ strings.write_string(builder, ") -> ");
+ build_string(n.results, builder);
+ case Pointer_Type:
+ strings.write_string(builder, "^");
+ build_string(n.elem, builder);
+ case Array_Type:
+ strings.write_string(builder, "[");
+ build_string(n.len, builder);
+ strings.write_string(builder, "]");
+ build_string(n.elem, builder);
+ case Dynamic_Array_Type:
+ strings.write_string(builder, "[dynamic]");
+ build_string(n.elem, builder);
+ case Struct_Type:
+ build_string(n.poly_params, builder);
+ build_string(n.align, builder);
+ build_string(n.fields, builder);
+ case Union_Type:
+ build_string(n.poly_params, builder);
+ build_string(n.align, builder);
+ build_string(n.variants, builder);
+ case Enum_Type:
+ build_string(n.base_type, builder);
+ build_string(n.fields, builder);
+ case Bit_Set_Type:
+ build_string(n.elem, builder);
+ build_string(n.underlying, builder);
+ case Map_Type:
+ strings.write_string(builder, "map");
+ strings.write_string(builder, "[");
+ build_string(n.key, builder);
+ strings.write_string(builder, "]");
+ build_string(n.value, builder);
+ }
+} \ No newline at end of file
diff --git a/src/main.odin b/src/main.odin
index e480e35..c8587f2 100644
--- a/src/main.odin
+++ b/src/main.odin
@@ -17,103 +17,96 @@ import "shared:index"
import "shared:server"
import "shared:common"
-os_read :: proc(handle: rawptr, data: [] byte) -> (int, int)
+os_read :: proc (handle: rawptr, data: []byte) -> (int, int)
{
- ptr := cast(^os.Handle)handle;
- a, b := os.read(ptr^, data);
- return a, cast(int)b;
+ ptr := cast(^os.Handle)handle;
+ a, b := os.read(ptr^, data);
+ return a, cast(int)b;
}
-os_write :: proc(handle: rawptr, data: [] byte) -> (int, int)
+os_write :: proc (handle: rawptr, data: []byte) -> (int, int)
{
- ptr := cast(^os.Handle)handle;
- a, b := os.write(ptr^, data);
- return a, cast(int)b;
+ ptr := cast(^os.Handle)handle;
+ a, b := os.write(ptr^, data);
+ return a, cast(int)b;
}
//Note(Daniel, Should look into handling errors without crashing from parsing)
-
verbose_logger: log.Logger;
-run :: proc(reader: ^server.Reader, writer: ^server.Writer) {
-
- config: common.Config;
- config.debug_single_thread = true;
- config.collections = make(map [string] string);
+run :: proc (reader: ^server.Reader, writer: ^server.Writer) {
- log.info("Starting Odin Language Server");
+ config: common.Config;
+ config.debug_single_thread = true;
+ config.collections = make(map[string]string);
+ log.info("Starting Odin Language Server");
- config.running = true;
+ config.running = true;
- for config.running {
+ for config.running {
- if config.verbose {
- context.logger = verbose_logger;
- }
+ if config.verbose {
+ context.logger = verbose_logger;
+ } else {
+ context.logger = log.Logger {nil, nil, log.Level.Debug, nil};
+ }
- else {
- context.logger = log.Logger{nil, nil, log.Level.Debug, nil};
- }
+ header, success := server.read_and_parse_header(reader);
- header, success := server.read_and_parse_header(reader);
+ if (!success) {
+ log.error("Failed to read and parse header");
+ return;
+ }
- if(!success) {
- log.error("Failed to read and parse header");
- return;
- }
+ value: json.Value;
+ value, success = server.read_and_parse_body(reader, header);
- value: json.Value;
- value, success = server.read_and_parse_body(reader, header);
+ if (!success) {
+ log.error("Failed to read and parse body");
+ return;
+ }
- if(!success) {
- log.error("Failed to read and parse body");
- return;
- }
+ success = server.handle_request(value, &config, writer);
- success = server.handle_request(value, &config, writer);
+ if (!success) {
+ log.error("Unrecoverable handle request");
+ return;
+ }
- if(!success) {
- log.error("Unrecoverable handle request");
- return;
- }
+ free_all(context.temp_allocator);
+ }
- free_all(context.temp_allocator);
- }
+ for k, v in config.collections {
+ delete(k);
+ delete(v);
+ }
- for k, v in config.collections {
- delete(k);
- delete(v);
- }
+ delete(config.collections);
+ delete(config.workspace_folders);
- delete(config.collections);
- delete(config.workspace_folders);
+ server.document_storage_shutdown();
- server.document_storage_shutdown();
+ index.free_static_index();
- index.free_static_index();
-
- common.pool_wait_and_process(&server.pool);
- common.pool_destroy(&server.pool);
+ common.pool_wait_and_process(&server.pool);
+ common.pool_destroy(&server.pool);
}
-end :: proc() {
-
+end :: proc () {
}
+main :: proc () {
-main :: proc() {
+ reader := server.make_reader(os_read, cast(rawptr)&os.stdin);
+ writer := server.make_writer(os_write, cast(rawptr)&os.stdout);
- reader := server.make_reader(os_read, cast(rawptr)&os.stdin);
- writer := server.make_writer(os_write, cast(rawptr)&os.stdout);
+ verbose_logger := server.create_lsp_logger(&writer, log.Level.Error);
- verbose_logger := server.create_lsp_logger(&writer, log.Level.Error);
+ context.logger = verbose_logger;
- context.logger = verbose_logger;
-
- init_global_temporary_allocator(mem.megabytes(100));
-
- run(&reader, &writer);
-}
+ init_global_temporary_allocator(mem.megabytes(100));
+ run(&reader, &writer);
+} \ No newline at end of file
diff --git a/src/server/action.odin b/src/server/action.odin
index f0b97ed..44b3458 100644
--- a/src/server/action.odin
+++ b/src/server/action.odin
@@ -1,22 +1,16 @@
package server
-CodeActionKind :: struct {
-
-};
+CodeActionKind :: struct {}
CodeActionClientCapabilities :: struct {
-
codeActionLiteralSupport: struct {
-
codeActionKind: struct {
- valueSet: [] CodeActionKind,
+ valueSet: []CodeActionKind,
},
},
-
-};
+}
CodeActionOptions :: struct {
- codeActionKinds: [] CodeActionKind,
+ codeActionKinds: []CodeActionKind,
resolveProvider: bool,
-};
-
+} \ No newline at end of file
diff --git a/src/server/analysis.odin b/src/server/analysis.odin
index d9b9dc2..8dc4509 100644
--- a/src/server/analysis.odin
+++ b/src/server/analysis.odin
@@ -18,2701 +18,2479 @@ import "shared:common"
import "shared:index"
/*
- TODO(replace all of the possible ast walking with the new odin visitor function)
- TODO(improve the current_package logic, kinda confusing switching between different packages with selectors)
+ TODO(replace all of the possible ast walking with the new odin visitor function)
+ TODO(improve the current_package logic, kinda confusing switching between different packages with selectors)
*/
-bool_lit := "bool";
-int_lit := "int";
+bool_lit := "bool";
+int_lit := "int";
string_lit := "string";
DocumentPositionContextHint :: enum {
- Completion,
- SignatureHelp,
- Definition,
- Hover,
-};
+ Completion,
+ SignatureHelp,
+ Definition,
+ Hover,
+}
DocumentPositionContext :: struct {
- file: ast.File,
- position: common.AbsolutePosition,
- line: int,
- function: ^ast.Proc_Lit, //used to help with type resolving in function scope
- selector: ^ast.Expr, //used for completion
- identifier: ^ast.Node,
- field: ^ast.Expr, //used for completion
- call: ^ast.Expr, //used for signature help
- returns: ^ast.Return_Stmt, //used for completion
- comp_lit: ^ast.Comp_Lit, //used for completion
- parent_comp_lit: ^ast.Comp_Lit, //used for completion
- implicit: bool, //used for completion
- arrow: bool,
- binary: ^ast.Binary_Expr, //used for completion
- assign: ^ast.Assign_Stmt, //used for completion
- switch_stmt: ^ast.Switch_Stmt, //used for completion
- switch_type_stmt: ^ast.Type_Switch_Stmt, //used for completion
- case_clause: ^ast.Case_Clause, //used for completion
- value_decl: ^ast.Value_Decl, //used for completion
- hint: DocumentPositionContextHint,
-};
+ file: ast.File,
+ position: common.AbsolutePosition,
+ line: int,
+ function: ^ast.Proc_Lit, //used to help with type resolving in function scope
+ selector: ^ast.Expr, //used for completion
+ identifier: ^ast.Node,
+ field: ^ast.Expr, //used for completion
+ call: ^ast.Expr, //used for signature help
+ returns: ^ast.Return_Stmt, //used for completion
+ comp_lit: ^ast.Comp_Lit, //used for completion
+ parent_comp_lit: ^ast.Comp_Lit, //used for completion
+ implicit: bool, //used for completion
+ arrow: bool,
+ binary: ^ast.Binary_Expr, //used for completion
+ assign: ^ast.Assign_Stmt, //used for completion
+ switch_stmt: ^ast.Switch_Stmt, //used for completion
+ switch_type_stmt: ^ast.Type_Switch_Stmt, //used for completion
+ case_clause: ^ast.Case_Clause, //used for completion
+ value_decl: ^ast.Value_Decl, //used for completion
+ hint: DocumentPositionContextHint,
+}
DocumentLocal :: struct {
- expr: ^ast.Expr,
- offset: int,
+ expr: ^ast.Expr,
+ offset: int,
}
AstContext :: struct {
- locals: map [string] [dynamic] DocumentLocal, //locals all the way to the document position
- globals: map [string] ^ast.Expr,
- variables: map [string] bool,
- parameters: map [string] bool,
- in_package: map[string] string, //sometimes you have to extract types from arrays/maps and you lose package information
- usings: [dynamic] string,
- file: ast.File,
- allocator: mem.Allocator,
- imports: [] Package, //imports for the current document
- current_package: string,
- document_package: string,
- use_globals: bool,
- use_locals: bool,
- call: ^ast.Call_Expr, //used to determene the types for generics and the correct function for overloaded functions
- position: common.AbsolutePosition,
- value_decl: ^ast.Value_Decl,
- field_name: string,
-};
-
-make_ast_context :: proc(file: ast.File, imports: [] Package, package_name: string, allocator := context.temp_allocator) -> AstContext {
-
- ast_context := AstContext {
- locals = make(map [string] [dynamic] DocumentLocal, 0, allocator),
- globals = make(map [string] ^ast.Expr, 0, allocator),
- variables = make(map [string] bool, 0, allocator),
- usings = make([dynamic] string, allocator),
- parameters = make(map [string] bool, 0, allocator),
- in_package = make(map[string] string, 0, allocator),
- file = file,
- imports = imports,
- use_locals = true,
- use_globals = true,
- document_package = package_name,
- current_package = package_name,
- };
- return ast_context;
+ locals: map[string][dynamic]DocumentLocal, //locals all the way to the document position
+ globals: map[string]^ast.Expr,
+ variables: map[string]bool,
+ parameters: map[string]bool,
+ in_package: map[string]string, //sometimes you have to extract types from arrays/maps and you lose package information
+ usings: [dynamic]string,
+ file: ast.File,
+ allocator: mem.Allocator,
+ imports: []Package, //imports for the current document
+ current_package: string,
+ document_package: string,
+ use_globals: bool,
+ use_locals: bool,
+ call: ^ast.Call_Expr, //used to determene the types for generics and the correct function for overloaded functions
+ position: common.AbsolutePosition,
+ value_decl: ^ast.Value_Decl,
+ field_name: string,
}
-tokenizer_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
+make_ast_context :: proc (file: ast.File, imports: []Package, package_name: string, allocator := context.temp_allocator) -> AstContext {
+
+ ast_context := AstContext {
+ locals = make(map[string][dynamic]DocumentLocal, 0, allocator),
+ globals = make(map[string]^ast.Expr, 0, allocator),
+ variables = make(map[string]bool, 0, allocator),
+ usings = make([dynamic]string, allocator),
+ parameters = make(map[string]bool, 0, allocator),
+ in_package = make(map[string]string, 0, allocator),
+ file = file,
+ imports = imports,
+ use_locals = true,
+ use_globals = true,
+ document_package = package_name,
+ current_package = package_name,
+ };
+ return ast_context;
+}
+tokenizer_error_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
}
/*
- Walk through the type expression while both the call expression and specialization type are the same
- */
+ Walk through the type expression while both the call expression and specialization type are the same
+*/
resolve_poly_spec :: proc {
- resolve_poly_spec_node,
- resolve_poly_spec_array,
- resolve_poly_spec_dynamic_array,
-};
+resolve_poly_spec_node,
+resolve_poly_spec_array,
+resolve_poly_spec_dynamic_array};
-resolve_poly_spec_array :: proc(ast_context: ^AstContext, call_array: $A/[]^$T, spec_array: $D/[]^$K, poly_map: ^map[string]^ast.Expr) {
+resolve_poly_spec_array :: proc (ast_context: ^AstContext, call_array: $A/[]^$T, spec_array: $D/[]^$K, poly_map: ^map[string]^ast.Expr) {
- if len(call_array) != len(spec_array) {
- return;
- }
-
- for elem, i in call_array {
- resolve_poly_spec(ast_context, elem, spec_array[i], poly_map);
- }
+ if len(call_array) != len(spec_array) {
+ return;
+ }
+ for elem, i in call_array {
+ resolve_poly_spec(ast_context, elem, spec_array[i], poly_map);
+ }
}
-resolve_poly_spec_dynamic_array :: proc(ast_context: ^AstContext, call_array: $A/[dynamic]^$T, spec_array: $D/[dynamic]^$K, poly_map: ^map[string]^ast.Expr) {
+resolve_poly_spec_dynamic_array :: proc (ast_context: ^AstContext, call_array: $A/[dynamic]^$T, spec_array: $D/[dynamic]^$K, poly_map: ^map[string]^ast.Expr) {
- if len(call_array) != len(spec_array) {
- return;
- }
-
- for elem, i in call_array {
- resolve_poly_spec(ast_context, elem, spec_array[i], poly_map);
- }
+ if len(call_array) != len(spec_array) {
+ return;
+ }
+ for elem, i in call_array {
+ resolve_poly_spec(ast_context, elem, spec_array[i], poly_map);
+ }
}
-get_poly_node_to_expr :: proc(node: ^ast.Node) -> ^ast.Expr {
-
- using ast;
+get_poly_node_to_expr :: proc (node: ^ast.Node) -> ^ast.Expr {
- switch v in node.derived {
- case Ident:
- return cast(^Expr)node;
- case:
- log.warnf("Unhandled poly to node kind %v", v);
- }
+ using ast;
- return nil;
-}
-
-resolve_poly_spec_node :: proc(ast_context: ^AstContext, call_node: ^ast.Node, spec_node: ^ast.Node, poly_map: ^map[string]^ast.Expr) {
-
- /*
- Note(Daniel, uncertain about the switch cases being enough or too little)
- */
-
- using ast;
-
- if call_node == nil || spec_node == nil {
- return;
- }
-
- switch m in spec_node.derived {
- case Bad_Expr:
- case Ident:
- case Implicit:
- case Undef:
- case Basic_Lit:
- case Poly_Type:
- if expr := get_poly_node_to_expr(call_node); expr != nil {
- poly_map[m.type.name] = expr;
- }
- case Ellipsis:
- if n, ok := call_node.derived.(Ellipsis); ok {
- resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
- }
- case Tag_Expr:
- if n, ok := call_node.derived.(Tag_Expr); ok {
- resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
- }
- case Unary_Expr:
- if n, ok := call_node.derived.(Unary_Expr); ok {
- resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
- }
- case Binary_Expr:
- if n, ok := call_node.derived.(Binary_Expr); ok {
- resolve_poly_spec(ast_context, n.left, m.left, poly_map);
- resolve_poly_spec(ast_context, n.right, m.right, poly_map);
- }
- case Paren_Expr:
- if n, ok := call_node.derived.(Paren_Expr); ok {
- resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
- }
- case Selector_Expr:
- if n, ok := call_node.derived.(Selector_Expr); ok {
- resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
- resolve_poly_spec(ast_context, n.field, m.field, poly_map);
- }
- case Slice_Expr:
- if n, ok := call_node.derived.(Slice_Expr); ok {
- resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
- resolve_poly_spec(ast_context, n.low, m.low, poly_map);
- resolve_poly_spec(ast_context, n.high, m.high, poly_map);
- }
- case Distinct_Type:
- if n, ok := call_node.derived.(Distinct_Type); ok {
- resolve_poly_spec(ast_context, n.type, m.type, poly_map);
- }
- case Proc_Type:
- if n, ok := call_node.derived.(Proc_Type); ok {
- resolve_poly_spec(ast_context, n.params, m.params, poly_map);
- resolve_poly_spec(ast_context, n.results, m.results, poly_map);
- }
- case Pointer_Type:
- if n, ok := call_node.derived.(Pointer_Type); ok {
- resolve_poly_spec(ast_context, n.elem, m.elem, poly_map);
- }
- case Array_Type:
- if n, ok := call_node.derived.(Array_Type); ok {
- resolve_poly_spec(ast_context, n.len, m.len, poly_map);
- resolve_poly_spec(ast_context, n.elem, m.elem, poly_map);
- }
- case Dynamic_Array_Type:
- if n, ok := call_node.derived.(Dynamic_Array_Type); ok {
- resolve_poly_spec(ast_context, n.elem, m.elem, poly_map);
- }
- case Struct_Type:
- if n, ok := call_node.derived.(Struct_Type); ok {
- resolve_poly_spec(ast_context, n.poly_params, m.poly_params, poly_map);
- resolve_poly_spec(ast_context, n.align, m.align, poly_map);
- resolve_poly_spec(ast_context, n.fields, m.fields, poly_map);
- }
- case Field:
- if n, ok := call_node.derived.(Field); ok {
- resolve_poly_spec(ast_context, n.names, m.names, poly_map);
- resolve_poly_spec(ast_context, n.type, m.type, poly_map);
- resolve_poly_spec(ast_context, n.default_value, m.default_value, poly_map);
- }
- case Field_List:
- if n, ok := call_node.derived.(Field_List); ok {
- resolve_poly_spec(ast_context, n.list, m.list, poly_map);
- }
- case Field_Value:
- if n, ok := call_node.derived.(Field_Value); ok {
- resolve_poly_spec(ast_context, n.field, m.field, poly_map);
- resolve_poly_spec(ast_context, n.value, m.value, poly_map);
- }
- case Union_Type:
- if n, ok := call_node.derived.(Union_Type); ok {
- resolve_poly_spec(ast_context, n.poly_params, m.poly_params, poly_map);
- resolve_poly_spec(ast_context, n.align, m.align, poly_map);
- resolve_poly_spec(ast_context, n.variants, m.variants, poly_map);
- }
- case Enum_Type:
- if n, ok := call_node.derived.(Enum_Type); ok {
- resolve_poly_spec(ast_context, n.base_type, m.base_type, poly_map);
- resolve_poly_spec(ast_context, n.fields, m.fields, poly_map);
- }
- case Bit_Set_Type:
- if n, ok := call_node.derived.(Bit_Set_Type); ok {
- resolve_poly_spec(ast_context, n.elem, m.elem, poly_map);
- resolve_poly_spec(ast_context, n.underlying, m.underlying, poly_map);
- }
- case Map_Type:
- if n, ok := call_node.derived.(Map_Type); ok {
- resolve_poly_spec(ast_context, n.key, m.key, poly_map);
- resolve_poly_spec(ast_context, n.value, m.value, poly_map);
- }
- case Call_Expr:
- if n, ok := call_node.derived.(Call_Expr); ok {
- resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
- resolve_poly_spec(ast_context, n.args, m.args, poly_map);
- }
- case Typeid_Type:
- if n, ok := call_node.derived.(Typeid_Type); ok {
- resolve_poly_spec(ast_context, n.specialization, m.specialization, poly_map);
- }
- case:
- log.error("Unhandled poly node kind: %T", m);
- }
+ switch v in node.derived {
+ case Ident:
+ return cast(^Expr)node;
+ case:
+ log.warnf("Unhandled poly to node kind %v", v);
+ }
+ return nil;
}
-resolve_type_comp_literal :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, current_symbol: index.Symbol, current_comp_lit: ^ast.Comp_Lit) -> (index.Symbol, bool) {
-
- if position_context.comp_lit == current_comp_lit {
- return current_symbol, true;
- }
+resolve_poly_spec_node :: proc (ast_context: ^AstContext, call_node: ^ast.Node, spec_node: ^ast.Node, poly_map: ^map[string]^ast.Expr) {
- for elem in current_comp_lit.elems {
+ /*
+ Note(Daniel, uncertain about the switch cases being enough or too little)
+ */
- if !position_in_node(elem, position_context.position) {
- continue;
- }
+ using ast;
- if field_value, ok := elem.derived.(ast.Field_Value); ok {
+ if call_node == nil || spec_node == nil {
+ return;
+ }
- if comp_lit, ok := field_value.value.derived.(ast.Comp_Lit); ok {
+ switch m in spec_node.derived {
+ case Bad_Expr:
+ case Ident:
+ case Implicit:
+ case Undef:
+ case Basic_Lit:
+ case Poly_Type:
+ if expr := get_poly_node_to_expr(call_node); expr != nil {
+ poly_map[m.type.name] = expr;
+ }
+ case Ellipsis:
+ if n, ok := call_node.derived.(Ellipsis); ok {
+ resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
+ }
+ case Tag_Expr:
+ if n, ok := call_node.derived.(Tag_Expr); ok {
+ resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
+ }
+ case Unary_Expr:
+ if n, ok := call_node.derived.(Unary_Expr); ok {
+ resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
+ }
+ case Binary_Expr:
+ if n, ok := call_node.derived.(Binary_Expr); ok {
+ resolve_poly_spec(ast_context, n.left, m.left, poly_map);
+ resolve_poly_spec(ast_context, n.right, m.right, poly_map);
+ }
+ case Paren_Expr:
+ if n, ok := call_node.derived.(Paren_Expr); ok {
+ resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
+ }
+ case Selector_Expr:
+ if n, ok := call_node.derived.(Selector_Expr); ok {
+ resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
+ resolve_poly_spec(ast_context, n.field, m.field, poly_map);
+ }
+ case Slice_Expr:
+ if n, ok := call_node.derived.(Slice_Expr); ok {
+ resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
+ resolve_poly_spec(ast_context, n.low, m.low, poly_map);
+ resolve_poly_spec(ast_context, n.high, m.high, poly_map);
+ }
+ case Distinct_Type:
+ if n, ok := call_node.derived.(Distinct_Type); ok {
+ resolve_poly_spec(ast_context, n.type, m.type, poly_map);
+ }
+ case Proc_Type:
+ if n, ok := call_node.derived.(Proc_Type); ok {
+ resolve_poly_spec(ast_context, n.params, m.params, poly_map);
+ resolve_poly_spec(ast_context, n.results, m.results, poly_map);
+ }
+ case Pointer_Type:
+ if n, ok := call_node.derived.(Pointer_Type); ok {
+ resolve_poly_spec(ast_context, n.elem, m.elem, poly_map);
+ }
+ case Array_Type:
+ if n, ok := call_node.derived.(Array_Type); ok {
+ resolve_poly_spec(ast_context, n.len, m.len, poly_map);
+ resolve_poly_spec(ast_context, n.elem, m.elem, poly_map);
+ }
+ case Dynamic_Array_Type:
+ if n, ok := call_node.derived.(Dynamic_Array_Type); ok {
+ resolve_poly_spec(ast_context, n.elem, m.elem, poly_map);
+ }
+ case Struct_Type:
+ if n, ok := call_node.derived.(Struct_Type); ok {
+ resolve_poly_spec(ast_context, n.poly_params, m.poly_params, poly_map);
+ resolve_poly_spec(ast_context, n.align, m.align, poly_map);
+ resolve_poly_spec(ast_context, n.fields, m.fields, poly_map);
+ }
+ case Field:
+ if n, ok := call_node.derived.(Field); ok {
+ resolve_poly_spec(ast_context, n.names, m.names, poly_map);
+ resolve_poly_spec(ast_context, n.type, m.type, poly_map);
+ resolve_poly_spec(ast_context, n.default_value, m.default_value, poly_map);
+ }
+ case Field_List:
+ if n, ok := call_node.derived.(Field_List); ok {
+ resolve_poly_spec(ast_context, n.list, m.list, poly_map);
+ }
+ case Field_Value:
+ if n, ok := call_node.derived.(Field_Value); ok {
+ resolve_poly_spec(ast_context, n.field, m.field, poly_map);
+ resolve_poly_spec(ast_context, n.value, m.value, poly_map);
+ }
+ case Union_Type:
+ if n, ok := call_node.derived.(Union_Type); ok {
+ resolve_poly_spec(ast_context, n.poly_params, m.poly_params, poly_map);
+ resolve_poly_spec(ast_context, n.align, m.align, poly_map);
+ resolve_poly_spec(ast_context, n.variants, m.variants, poly_map);
+ }
+ case Enum_Type:
+ if n, ok := call_node.derived.(Enum_Type); ok {
+ resolve_poly_spec(ast_context, n.base_type, m.base_type, poly_map);
+ resolve_poly_spec(ast_context, n.fields, m.fields, poly_map);
+ }
+ case Bit_Set_Type:
+ if n, ok := call_node.derived.(Bit_Set_Type); ok {
+ resolve_poly_spec(ast_context, n.elem, m.elem, poly_map);
+ resolve_poly_spec(ast_context, n.underlying, m.underlying, poly_map);
+ }
+ case Map_Type:
+ if n, ok := call_node.derived.(Map_Type); ok {
+ resolve_poly_spec(ast_context, n.key, m.key, poly_map);
+ resolve_poly_spec(ast_context, n.value, m.value, poly_map);
+ }
+ case Call_Expr:
+ if n, ok := call_node.derived.(Call_Expr); ok {
+ resolve_poly_spec(ast_context, n.expr, m.expr, poly_map);
+ resolve_poly_spec(ast_context, n.args, m.args, poly_map);
+ }
+ case Typeid_Type:
+ if n, ok := call_node.derived.(Typeid_Type); ok {
+ resolve_poly_spec(ast_context, n.specialization, m.specialization, poly_map);
+ }
+ case:
+ log.error("Unhandled poly node kind: %T", m);
+ }
+}
- if s, ok := current_symbol.value.(index.SymbolStructValue); ok {
+resolve_type_comp_literal :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, current_symbol: index.Symbol, current_comp_lit: ^ast.Comp_Lit) -> (index.Symbol, bool) {
- for name, i in s.names {
+ if position_context.comp_lit == current_comp_lit {
+ return current_symbol, true;
+ }
- if name == field_value.field.derived.(ast.Ident).name {
+ for elem in current_comp_lit.elems {
- if symbol, ok := resolve_type_expression(ast_context, s.types[i]); ok {
- return resolve_type_comp_literal(ast_context, position_context, symbol, cast(^ast.Comp_Lit)field_value.value);
- }
+ if !position_in_node(elem, position_context.position) {
+ continue;
+ }
- }
+ if field_value, ok := elem.derived.(ast.Field_Value); ok {
- }
+ if comp_lit, ok := field_value.value.derived.(ast.Comp_Lit); ok {
- }
+ if s, ok := current_symbol.value.(index.SymbolStructValue); ok {
- }
+ for name, i in s.names {
- }
+ if name == field_value.field.derived.(ast.Ident).name {
- }
+ if symbol, ok := resolve_type_expression(ast_context, s.types[i]); ok {
+ return resolve_type_comp_literal(ast_context, position_context, symbol, cast(^ast.Comp_Lit)field_value.value);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
- return current_symbol, true;
+ return current_symbol, true;
}
resolve_generic_function :: proc {
- resolve_generic_function_ast,
- resolve_generic_function_symbol,
-};
-
-resolve_generic_function_symbol :: proc(ast_context: ^AstContext, params: []^ast.Field, results: []^ast.Field) -> (index.Symbol, bool) {
- using ast;
-
- if params == nil {
- return index.Symbol {}, false;
- }
-
- if results == nil {
- return index.Symbol {}, false;
- }
-
- if ast_context.call == nil {
- return index.Symbol {}, false;
- }
-
- call_expr := ast_context.call;
- poly_map := make(map[string]^Expr, 0, context.temp_allocator);
- i := 0;
-
+resolve_generic_function_ast,
+resolve_generic_function_symbol};
- for param in params {
+resolve_generic_function_symbol :: proc (ast_context: ^AstContext, params: []^ast.Field, results: []^ast.Field) -> (index.Symbol, bool) {
+ using ast;
- for name in param.names {
-
- if len(call_expr.args) <= i {
- break;
- }
-
- if poly, ok := name.derived.(Poly_Type); ok {
- poly_map[poly.type.name] = call_expr.args[i];
- }
-
- if param.type == nil {
- continue;
- }
-
- if poly, ok := param.type.derived.(Poly_Type); ok {
-
- if arg_eval, ok := resolve_type_expression(ast_context, call_expr.args[i]); ok {
+ if params == nil {
+ return index.Symbol {}, false;
+ }
- if value, ok := arg_eval.value.(index.SymbolGenericValue); ok {
- resolve_poly_spec_node(ast_context, value.expr, poly.specialization, &poly_map);
- }
+ if results == nil {
+ return index.Symbol {}, false;
+ }
- }
- }
+ if ast_context.call == nil {
+ return index.Symbol {}, false;
+ }
- i += 1;
- }
+ call_expr := ast_context.call;
+ poly_map := make(map[string]^Expr, 0, context.temp_allocator);
+ i := 0;
- }
+ for param in params {
- function_name := "";
- function_range: common.Range;
+ for name in param.names {
- if ident, ok := call_expr.expr.derived.(Ident); ok {
- function_name = ident.name;
- function_range = common.get_token_range(ident, ast_context.file.src);
- }
+ if len(call_expr.args) <= i {
+ break;
+ }
- else if selector, ok := call_expr.expr.derived.(Selector_Expr); ok {
- function_name = selector.field.name;
- function_range = common.get_token_range(selector, ast_context.file.src);
- }
+ if poly, ok := name.derived.(Poly_Type); ok {
+ poly_map[poly.type.name] = call_expr.args[i];
+ }
- else {
- log.debug("call expr expr could not be derived correctly");
- return index.Symbol {}, false;
- }
+ if param.type == nil {
+ continue;
+ }
- symbol := index.Symbol {
- range = function_range,
- type = .Function,
- name = function_name,
- };
+ if poly, ok := param.type.derived.(Poly_Type); ok {
- return_types := make([dynamic] ^ast.Field, context.temp_allocator);
+ if arg_eval, ok := resolve_type_expression(ast_context, call_expr.args[i]); ok {
- for result in results {
+ if value, ok := arg_eval.value.(index.SymbolGenericValue); ok {
+ resolve_poly_spec_node(ast_context, value.expr, poly.specialization, &poly_map);
+ }
+ }
+ }
- if result.type == nil {
- continue;
- }
+ i += 1;
+ }
+ }
- if ident, ok := result.type.derived.(Ident); ok {
- field := cast(^Field)index.clone_node(result, context.temp_allocator, nil);
+ function_name := "";
+ function_range: common.Range;
+
+ if ident, ok := call_expr.expr.derived.(Ident); ok {
+ function_name = ident.name;
+ function_range = common.get_token_range(ident, ast_context.file.src);
+ } else if selector, ok := call_expr.expr.derived.(Selector_Expr); ok {
+ function_name = selector.field.name;
+ function_range = common.get_token_range(selector, ast_context.file.src);
+ } else {
+ log.debug("call expr expr could not be derived correctly");
+ return index.Symbol {}, false;
+ }
- if m := &poly_map[ident.name]; m != nil {
- field.type = poly_map[ident.name];
- append(&return_types, field);
- }
+ symbol := index.Symbol {
+ range = function_range,
+ type = .Function,
+ name = function_name,
+ };
- else{
- return index.Symbol {}, false;
- }
+ return_types := make([dynamic]^ast.Field, context.temp_allocator);
- }
+ for result in results {
- }
+ if result.type == nil {
+ continue;
+ }
+ if ident, ok := result.type.derived.(Ident); ok {
+ field := cast(^Field)index.clone_node(result, context.temp_allocator, nil);
- symbol.value = index.SymbolProcedureValue {
- return_types = return_types[:],
- arg_types = params,
- };
+ if m := &poly_map[ident.name]; m != nil {
+ field.type = poly_map[ident.name];
+ append(&return_types, field);
+ } else {
+ return index.Symbol {}, false;
+ }
+ }
+ }
+ symbol.value = index.SymbolProcedureValue {
+ return_types = return_types[:],
+ arg_types = params,
+ };
- //log.infof("return %v", poly_map);
+ //log.infof("return %v", poly_map);
- return symbol, true;
+ return symbol, true;
}
-resolve_generic_function_ast :: proc(ast_context: ^AstContext, proc_lit: ast.Proc_Lit) -> (index.Symbol, bool) {
+resolve_generic_function_ast :: proc (ast_context: ^AstContext, proc_lit: ast.Proc_Lit) -> (index.Symbol, bool) {
- using ast;
+ using ast;
- if proc_lit.type.params == nil {
- return index.Symbol {}, false;
- }
+ if proc_lit.type.params == nil {
+ return index.Symbol {}, false;
+ }
- if proc_lit.type.results == nil {
- return index.Symbol {}, false;
- }
+ if proc_lit.type.results == nil {
+ return index.Symbol {}, false;
+ }
- if ast_context.call == nil {
- return index.Symbol {}, false;
- }
+ if ast_context.call == nil {
+ return index.Symbol {}, false;
+ }
- return resolve_generic_function_symbol(ast_context, proc_lit.type.params.list, proc_lit.type.results.list);
+ return resolve_generic_function_symbol(ast_context, proc_lit.type.params.list, proc_lit.type.results.list);
}
-
/*
- Figure out which function the call expression is using out of the list from proc group
- */
-resolve_function_overload :: proc(ast_context: ^AstContext, group: ast.Proc_Group) -> (index.Symbol, bool) {
-
- using ast;
-
- //log.info("overload");
-
- if ast_context.call == nil {
- //log.info("no call");
- return index.Symbol {}, false;
- }
-
- call_expr := ast_context.call;
-
- for arg_expr in group.args {
+ Figure out which function the call expression is using out of the list from proc group
+*/
+resolve_function_overload :: proc (ast_context: ^AstContext, group: ast.Proc_Group) -> (index.Symbol, bool) {
- next_fn: if f, ok := resolve_type_expression(ast_context, arg_expr); ok {
+ using ast;
- if procedure, ok := f.value.(index.SymbolProcedureValue); ok {
+ //log.info("overload");
- if len(procedure.arg_types) < len(call_expr.args) {
- continue;
- }
+ if ast_context.call == nil {
+ //log.info("no call");
+ return index.Symbol {}, false;
+ }
- for arg, i in call_expr.args {
+ call_expr := ast_context.call;
- if eval_call_expr, ok := resolve_type_expression(ast_context, arg); ok {
+ for arg_expr in group.args {
- #partial switch v in eval_call_expr.value {
- case index.SymbolProcedureValue:
- case index.SymbolGenericValue:
- if !common.node_equal(v.expr, procedure.arg_types[i].type) {
- break next_fn;
- }
- case index.SymbolStructValue:
- }
+ next_fn: if f, ok := resolve_type_expression(ast_context, arg_expr); ok {
- }
+ if procedure, ok := f.value.(index.SymbolProcedureValue); ok {
- else {
- //log.debug("Failed to resolve call expr");
- return index.Symbol {}, false;
- }
- }
+ if len(procedure.arg_types) < len(call_expr.args) {
+ continue;
+ }
- //log.debugf("return overload %v", f);
- return f, true;
- }
+ for arg, i in call_expr.args {
- }
+ if eval_call_expr, ok := resolve_type_expression(ast_context, arg); ok {
- }
+ #partial switch v in eval_call_expr.value {
+ case index.SymbolProcedureValue:
+ case index.SymbolGenericValue:
+ if !common.node_equal(v.expr, procedure.arg_types[i].type) {
+ break next_fn;
+ }
+ case index.SymbolStructValue:
+ }
+ } else {
+ //log.debug("Failed to resolve call expr");
+ return index.Symbol {}, false;
+ }
+ }
+ //log.debugf("return overload %v", f);
+ return f, true;
+ }
+ }
+ }
- return index.Symbol {}, false;
+ return index.Symbol {}, false;
}
-resolve_basic_lit :: proc(ast_context: ^AstContext, basic_lit: ast.Basic_Lit) -> (index.Symbol, bool) {
+resolve_basic_lit :: proc (ast_context: ^AstContext, basic_lit: ast.Basic_Lit) -> (index.Symbol, bool) {
- /*
- This is temporary, since basic lit is untyped, but either way it's going to be an ident representing a keyword.
+ /*
+ This is temporary, since basic lit is untyped, but either way it's going to be an ident representing a keyword.
- Could perhaps name them "$integer", "$float", etc.
- */
+ Could perhaps name them "$integer", "$float", etc.
+ */
- ident := index.new_type(ast.Ident, basic_lit.pos, basic_lit.end, context.temp_allocator);
+ ident := index.new_type(ast.Ident, basic_lit.pos, basic_lit.end, context.temp_allocator);
- symbol := index.Symbol {
- type = .Keyword,
- };
-
- if v, ok := strconv.parse_bool(basic_lit.tok.text); ok {
- ident.name = bool_lit;
- }
-
- else if v, ok := strconv.parse_int(basic_lit.tok.text); ok {
- ident.name = int_lit;
- }
-
- else {
- ident.name = string_lit;
- }
+ symbol := index.Symbol {
+ type = .Keyword
+ };
- symbol.value = index.SymbolGenericValue {
- expr = ident,
- };
+ if v, ok := strconv.parse_bool(basic_lit.tok.text); ok {
+ ident.name = bool_lit;
+ } else if v, ok := strconv.parse_int(basic_lit.tok.text); ok {
+ ident.name = int_lit;
+ } else {
+ ident.name = string_lit;
+ }
- return symbol, true;
-}
+ symbol.value = index.SymbolGenericValue {
+ expr = ident
+ };
-resolve_type_expression :: proc(ast_context: ^AstContext, node: ^ast.Expr) -> (index.Symbol, bool) {
-
- if node == nil {
- return {}, false;
- }
-
- using ast;
-
- switch v in node.derived {
- case Proc_Type:
- return make_symbol_procedure_from_ast(ast_context, node, v, ast_context.field_name), true;
- case Ident:
- return resolve_type_identifier(ast_context, v);
- case Basic_Lit:
- return resolve_basic_lit(ast_context, v);
- case Type_Cast:
- return resolve_type_expression(ast_context, v.type);
- case Auto_Cast:
- return resolve_type_expression(ast_context, v.expr);
- case Unary_Expr:
- if v.op.kind == .And {
- return resolve_type_expression(ast_context, make_pointer_ast(v.expr));
- }
-
- else {
- return resolve_type_expression(ast_context, v.expr);
- }
- case Deref_Expr:
- return resolve_type_expression(ast_context, v.expr);
- case Paren_Expr:
- return resolve_type_expression(ast_context, v.expr);
- case Slice_Expr:
- return resolve_type_expression(ast_context, v.expr);
- case Tag_Expr:
- return resolve_type_expression(ast_context, v.expr);
- case Helper_Type:
- return resolve_type_expression(ast_context, v.type);
- case Ellipsis:
- return resolve_type_expression(ast_context, v.expr);
- case Implicit:
- ident := index.new_type(Ident, v.node.pos, v.node.end, context.temp_allocator);
- ident.name = v.tok.text;
- return resolve_type_identifier(ast_context, ident^);
- case Type_Assertion:
- return resolve_type_expression(ast_context, v.type);
- case Proc_Lit:
- if v.type.results != nil {
- if len(v.type.results.list) == 1 {
- return resolve_type_expression(ast_context, v.type.results.list[0].type);
- }
- }
- case Pointer_Type:
-
- /*
- Add flag to not pull out a type from a pointer for function overloading.
- */
-
- if v2, ok := v.elem.derived.(ast.Pointer_Type); !ok {
- return resolve_type_expression(ast_context, v.elem);
- }
-
- else if v2, ok := v.elem.derived.(ast.Type_Assertion); !ok {
- return resolve_type_expression(ast_context, v.elem);
- }
-
- else {
- return make_symbol_generic_from_ast(ast_context, node), true;
- }
-
- case Index_Expr:
- indexed, ok := resolve_type_expression(ast_context, v.expr);
-
- if generic, ok := indexed.value.(index.SymbolGenericValue); ok {
-
- switch c in generic.expr.derived {
- case Array_Type:
- return resolve_type_expression(ast_context, c.elem);
- case Dynamic_Array_Type:
- return resolve_type_expression(ast_context, c.elem);
- case Map_Type:
- return resolve_type_expression(ast_context, c.value);
- }
-
- }
-
- return index.Symbol {}, false;
- case Call_Expr:
- ast_context.call = cast(^Call_Expr)node;
- return resolve_type_expression(ast_context, v.expr);
- case Implicit_Selector_Expr:
- return index.Symbol {}, false;
- case Selector_Call_Expr:
- return resolve_type_expression(ast_context, v.expr);
- case Selector_Expr:
-
- if selector, ok := resolve_type_expression(ast_context, v.expr); ok {
-
- ast_context.use_locals = false;
-
- #partial switch s in selector.value {
- case index.SymbolProcedureValue:
-
- if len(s.return_types) == 1 {
- selector_expr := index.new_type(ast.Selector_Expr, s.return_types[0].node.pos, s.return_types[0].node.end, context.temp_allocator);
- selector_expr.expr = s.return_types[0].type;
- selector_expr.field = v.field;
- return resolve_type_expression(ast_context, selector_expr);
- }
- case index.SymbolStructValue:
- if selector.pkg != "" {
- ast_context.current_package = selector.pkg;
- }
-
- else {
- ast_context.current_package = ast_context.document_package;
- }
-
- for name, i in s.names {
- if v.field != nil && name == v.field.name {
- ast_context.field_name = v.field.name;
- return resolve_type_expression(ast_context, s.types[i]);
- }
- }
- case index.SymbolPackageValue:
-
- ast_context.current_package = selector.pkg;
-
- if v.field != nil {
- return resolve_symbol_return(ast_context, index.lookup(v.field.name, selector.pkg));
- }
-
- else {
- return index.Symbol {}, false;
- }
- }
- }
-
- else {
- return index.Symbol {}, false;
- }
- case:
- log.warnf("default node kind, resolve_type_expression: %T", v);
-
- if v == nil {
- return {}, false;
- }
-
- return make_symbol_generic_from_ast(ast_context, node), true;
- }
-
- return index.Symbol {}, false;
+ return symbol, true;
}
-store_local :: proc(ast_context: ^AstContext, expr: ^ast.Expr, offset: int, name: string) {
+resolve_type_expression :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (index.Symbol, bool) {
- local_stack := &ast_context.locals[name];
+ if node == nil {
+ return {}, false;
+ }
- if local_stack == nil {
- ast_context.locals[name] = make([dynamic] DocumentLocal, context.temp_allocator);
- local_stack = &ast_context.locals[name];
- }
+ using ast;
+
+ switch v in node.derived {
+ case Proc_Type:
+ return make_symbol_procedure_from_ast(ast_context, node, v, ast_context.field_name), true;
+ case Ident:
+ return resolve_type_identifier(ast_context, v);
+ case Basic_Lit:
+ return resolve_basic_lit(ast_context, v);
+ case Type_Cast:
+ return resolve_type_expression(ast_context, v.type);
+ case Auto_Cast:
+ return resolve_type_expression(ast_context, v.expr);
+ case Unary_Expr:
+ if v.op.kind == .And {
+ return resolve_type_expression(ast_context, make_pointer_ast(v.expr));
+ } else {
+ return resolve_type_expression(ast_context, v.expr);
+ }
+ case Deref_Expr:
+ return resolve_type_expression(ast_context, v.expr);
+ case Paren_Expr:
+ return resolve_type_expression(ast_context, v.expr);
+ case Slice_Expr:
+ return resolve_type_expression(ast_context, v.expr);
+ case Tag_Expr:
+ return resolve_type_expression(ast_context, v.expr);
+ case Helper_Type:
+ return resolve_type_expression(ast_context, v.type);
+ case Ellipsis:
+ return resolve_type_expression(ast_context, v.expr);
+ case Implicit:
+ ident := index.new_type(Ident, v.node.pos, v.node.end, context.temp_allocator);
+ ident.name = v.tok.text;
+ return resolve_type_identifier(ast_context, ident^);
+ case Type_Assertion:
+ return resolve_type_expression(ast_context, v.type);
+ case Proc_Lit:
+ if v.type.results != nil {
+ if len(v.type.results.list) == 1 {
+ return resolve_type_expression(ast_context, v.type.results.list[0].type);
+ }
+ }
+ case Pointer_Type:
+
+ /*
+ Add flag to not pull out a type from a pointer for function overloading.
+ */
+
+ if v2, ok := v.elem.derived.(ast.Pointer_Type); !ok {
+ return resolve_type_expression(ast_context, v.elem);
+ } else if v2, ok := v.elem.derived.(ast.Type_Assertion); !ok {
+ return resolve_type_expression(ast_context, v.elem);
+ } else {
+ return make_symbol_generic_from_ast(ast_context, node), true;
+ }
+
+ case Index_Expr:
+ indexed, ok := resolve_type_expression(ast_context, v.expr);
+
+ if generic, ok := indexed.value.(index.SymbolGenericValue); ok {
+
+ switch c in generic.expr.derived {
+ case Array_Type:
+ return resolve_type_expression(ast_context, c.elem);
+ case Dynamic_Array_Type:
+ return resolve_type_expression(ast_context, c.elem);
+ case Map_Type:
+ return resolve_type_expression(ast_context, c.value);
+ }
+ }
+
+ return index.Symbol {}, false;
+ case Call_Expr:
+ ast_context.call = cast(^Call_Expr)node;
+ return resolve_type_expression(ast_context, v.expr);
+ case Implicit_Selector_Expr:
+ return index.Symbol {}, false;
+ case Selector_Call_Expr:
+ return resolve_type_expression(ast_context, v.expr);
+ case Selector_Expr:
+
+ if selector, ok := resolve_type_expression(ast_context, v.expr); ok {
+
+ ast_context.use_locals = false;
+
+ #partial switch s in selector.value {
+ case index.SymbolProcedureValue:
+
+ if len(s.return_types) == 1 {
+ selector_expr := index.new_type(ast.Selector_Expr, s.return_types[0].node.pos, s.return_types[0].node.end, context.temp_allocator);
+ selector_expr.expr = s.return_types[0].type;
+ selector_expr.field = v.field;
+ return resolve_type_expression(ast_context, selector_expr);
+ }
+ case index.SymbolStructValue:
+ if selector.pkg != "" {
+ ast_context.current_package = selector.pkg;
+ } else {
+ ast_context.current_package = ast_context.document_package;
+ }
+
+ for name, i in s.names {
+ if v.field != nil && name == v.field.name {
+ ast_context.field_name = v.field.name;
+ return resolve_type_expression(ast_context, s.types[i]);
+ }
+ }
+ case index.SymbolPackageValue:
+
+ ast_context.current_package = selector.pkg;
+
+ if v.field != nil {
+ return resolve_symbol_return(ast_context, index.lookup(v.field.name, selector.pkg));
+ } else {
+ return index.Symbol {}, false;
+ }
+ }
+ } else {
+ return index.Symbol {}, false;
+ }
+ case:
+ log.warnf("default node kind, resolve_type_expression: %T", v);
+
+ if v == nil {
+ return {}, false;
+ }
+
+ return make_symbol_generic_from_ast(ast_context, node), true;
+ }
- append(local_stack, DocumentLocal { expr = expr, offset = offset });
+ return index.Symbol {}, false;
}
-get_local :: proc(ast_context: ^AstContext, offset: int, name: string) -> ^ast.Expr {
-
- previous := 0;
+store_local :: proc (ast_context: ^AstContext, expr: ^ast.Expr, offset: int, name: string) {
- //is the local we are getting being declared?
- if ast_context.value_decl != nil {
+ local_stack := &ast_context.locals[name];
- for value_decl_name in ast_context.value_decl.names {
+ if local_stack == nil {
+ ast_context.locals[name] = make([dynamic]DocumentLocal, context.temp_allocator);
+ local_stack = &ast_context.locals[name];
+ }
- if ident, ok := value_decl_name.derived.(ast.Ident); ok {
+ append(local_stack, DocumentLocal {expr = expr, offset = offset});
+}
- if ident.name == name {
- previous = 1;
- break;
- }
- }
+get_local :: proc (ast_context: ^AstContext, offset: int, name: string) -> ^ast.Expr {
- }
+ previous := 0;
+ //is the local we are getting being declared?
+ if ast_context.value_decl != nil {
- }
+ for value_decl_name in ast_context.value_decl.names {
- if local_stack, ok := ast_context.locals[name]; ok {
+ if ident, ok := value_decl_name.derived.(ast.Ident); ok {
- for i := len(local_stack)-1; i >= 0; i -= 1 {
+ if ident.name == name {
+ previous = 1;
+ break;
+ }
+ }
+ }
+ }
- if local_stack[i].offset <= offset {
- return local_stack[max(0, i - previous)].expr;
- }
+ if local_stack, ok := ast_context.locals[name]; ok {
- }
+ for i := len(local_stack) - 1; i >= 0; i -= 1 {
- }
+ if local_stack[i].offset <= offset {
+ return local_stack[max(0, i - previous)].expr;
+ }
+ }
+ }
- return nil;
+ return nil;
}
/*
- Function recusively goes through the identifier until it hits a struct, enum, procedure literals, since you can
- have chained variable declarations. ie. a := foo { test = 2}; b := a; c := b;
- */
-resolve_type_identifier :: proc(ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
-
- using ast;
-
- if pkg, ok := ast_context.in_package[node.name]; ok {
- ast_context.current_package = pkg;
- }
-
- if _, ok := ast_context.parameters[node.name]; ok {
- for imp in ast_context.imports {
-
- if strings.compare(imp.base, node.name) == 0 {
-
- symbol := index.Symbol {
- type = .Package,
- pkg = imp.name,
- value = index.SymbolPackageValue {
- }
- };
-
- return symbol, true;
- }
-
- }
- }
-
- //note(Daniel, if global and local ends up being 100% same just make a function that takes the map)
- if local := get_local(ast_context, node.pos.offset, node.name); local != nil && ast_context.use_locals {
-
- switch v in local.derived {
- case Ident:
-
- if node.name == v.name {
- break;
- }
-
- return resolve_type_identifier(ast_context, v);
- case Union_Type:
- return make_symbol_union_from_ast(ast_context, v, node), true;
- case Enum_Type:
- return make_symbol_enum_from_ast(ast_context, v, node), true;
- case Struct_Type:
- return make_symbol_struct_from_ast(ast_context, v, node), true;
- case Bit_Set_Type:
- return make_symbol_bitset_from_ast(ast_context, v, node), true;
- case Proc_Lit:
- if !v.type.generic {
- return make_symbol_procedure_from_ast(ast_context, local, v.type^, node.name), true;
- }
- else {
- return resolve_generic_function(ast_context, v);
- }
- case Proc_Group:
- return resolve_function_overload(ast_context, v);
- case Array_Type:
- return make_symbol_generic_from_ast(ast_context, local), true;
- case Dynamic_Array_Type:
- return make_symbol_generic_from_ast(ast_context, local), true;
- case Call_Expr:
- return resolve_type_expression(ast_context, local);
- case:
- log.warnf("default type node kind: %T", v);
- return resolve_type_expression(ast_context, local);
- //return make_symbol_generic_from_ast(ast_context, local), true;
- }
- }
-
- else if global, ok := ast_context.globals[node.name]; ast_context.use_globals && ok {
-
- switch v in global.derived {
- case Ident:
-
- if node.name == v.name {
- break;
- }
-
- return resolve_type_identifier(ast_context, v);
- case Struct_Type:
- return make_symbol_struct_from_ast(ast_context, v, node), true;
- case Bit_Set_Type:
- return make_symbol_bitset_from_ast(ast_context, v, node), true;
- case Union_Type:
- return make_symbol_union_from_ast(ast_context, v, node), true;
- case Enum_Type:
- return make_symbol_enum_from_ast(ast_context, v, node), true;
- case Proc_Lit:
- if !v.type.generic {
- return make_symbol_procedure_from_ast(ast_context, global, v.type^, node.name), true;
- }
- else {
- return resolve_generic_function(ast_context, v);
- }
- case Proc_Group:
- return resolve_function_overload(ast_context, v);
- case Array_Type:
- return make_symbol_generic_from_ast(ast_context, global), true;
- case Dynamic_Array_Type:
- return make_symbol_generic_from_ast(ast_context, global), true;
- case Call_Expr:
- return resolve_type_expression(ast_context, global);
- case:
- log.warnf("default type node kind: %T", v);
- return resolve_type_expression(ast_context, global);
- }
-
- }
-
- //if there are more of these variables that hard builtin, move them to the indexer
- else if node.name == "context" {
- return index.lookup("Context", ast_context.current_package);
- }
- //keywords
- else if v, ok := common.keyword_map[node.name]; ok {
-
- ident := index.new_type(Ident, node.pos, node.end, context.temp_allocator);
- ident.name = node.name;
-
- symbol := index.Symbol {
- type = .Keyword,
- signature = node.name,
- pkg = ast_context.current_package,
- value = index.SymbolGenericValue {
- expr = ident,
- },
- };
-
- return symbol, true;
- }
-
- else {
-
- //right now we replace the package ident with the absolute directory name, so it should have '/' which is not a valid ident character
- if strings.contains(node.name, "/") {
-
- symbol := index.Symbol {
- type = .Package,
- pkg = node.name,
- value = index.SymbolPackageValue {
- }
- };
-
- return symbol, true;
-
- }
-
- //part of the ast so we check the imports of the document
- else {
-
- for imp in ast_context.imports {
-
- if strings.compare(imp.base, node.name) == 0 {
-
- symbol := index.Symbol {
- type = .Package,
- pkg = imp.name,
- value = index.SymbolPackageValue {
- }
- };
-
- return symbol, true;
- }
-
- }
-
- }
-
- //last option is to check the index
-
- if symbol, ok := index.lookup(node.name, ast_context.current_package); ok {
- return resolve_symbol_return(ast_context, symbol);
- }
-
- for u in ast_context.usings {
-
- //TODO(Daniel, make into a map, not really required for performance but looks nicer)
- for imp in ast_context.imports {
-
- if strings.compare(imp.base, u) == 0 {
-
- if symbol, ok := index.lookup(node.name, imp.name); ok {
- return resolve_symbol_return(ast_context, symbol);
- }
- }
-
- }
- }
-
- //TODO(daniel, index can be used on identifiers if using is in the function scope)
- }
-
- return index.Symbol {}, false;
-}
+ Function recusively goes through the identifier until it hits a struct, enum, procedure literals, since you can
+ have chained variable declarations. ie. a := foo { test = 2}; b := a; c := b;
+*/
+resolve_type_identifier :: proc (ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
-resolve_ident_is_variable :: proc(ast_context: ^AstContext, node: ast.Ident) -> bool {
+ using ast;
- if v, ok := ast_context.variables[node.name]; ok && v {
- return true;
- }
+ if pkg, ok := ast_context.in_package[node.name]; ok {
+ ast_context.current_package = pkg;
+ }
- if symbol, ok := index.lookup(node.name, ast_context.current_package); ok {
- return symbol.type == .Variable;
- }
+ if _, ok := ast_context.parameters[node.name]; ok {
+ for imp in ast_context.imports {
- return false;
-}
+ if strings.compare(imp.base, node.name) == 0 {
-resolve_ident_is_package :: proc(ast_context: ^AstContext, node: ast.Ident) -> bool {
+ symbol := index.Symbol {
+ type = .Package,
+ pkg = imp.name,
+ value = index.SymbolPackageValue {},
+ };
- if strings.contains(node.name, "/") {
- return true;
- }
+ return symbol, true;
+ }
+ }
+ }
- else {
+ //note(Daniel, if global and local ends up being 100% same just make a function that takes the map)
+ if local := get_local(ast_context, node.pos.offset, node.name); local != nil && ast_context.use_locals {
+
+ switch v in local.derived {
+ case Ident:
+
+ if node.name == v.name {
+ break;
+ }
+
+ return resolve_type_identifier(ast_context, v);
+ case Union_Type:
+ return make_symbol_union_from_ast(ast_context, v, node), true;
+ case Enum_Type:
+ return make_symbol_enum_from_ast(ast_context, v, node), true;
+ case Struct_Type:
+ return make_symbol_struct_from_ast(ast_context, v, node), true;
+ case Bit_Set_Type:
+ return make_symbol_bitset_from_ast(ast_context, v, node), true;
+ case Proc_Lit:
+ if !v.type.generic {
+ return make_symbol_procedure_from_ast(ast_context, local, v.type^, node.name), true;
+ } else {
+ return resolve_generic_function(ast_context, v);
+ }
+ case Proc_Group:
+ return resolve_function_overload(ast_context, v);
+ case Array_Type:
+ return make_symbol_generic_from_ast(ast_context, local), true;
+ case Dynamic_Array_Type:
+ return make_symbol_generic_from_ast(ast_context, local), true;
+ case Call_Expr:
+ return resolve_type_expression(ast_context, local);
+ case:
+ log.warnf("default type node kind: %T", v);
+ return resolve_type_expression(ast_context, local);
+ //return make_symbol_generic_from_ast(ast_context, local), true;
+ }
+ } else if global, ok := ast_context.globals[node.name]; ast_context.use_globals && ok {
+
+ switch v in global.derived {
+ case Ident:
+
+ if node.name == v.name {
+ break;
+ }
+
+ return resolve_type_identifier(ast_context, v);
+ case Struct_Type:
+ return make_symbol_struct_from_ast(ast_context, v, node), true;
+ case Bit_Set_Type:
+ return make_symbol_bitset_from_ast(ast_context, v, node), true;
+ case Union_Type:
+ return make_symbol_union_from_ast(ast_context, v, node), true;
+ case Enum_Type:
+ return make_symbol_enum_from_ast(ast_context, v, node), true;
+ case Proc_Lit:
+ if !v.type.generic {
+ return make_symbol_procedure_from_ast(ast_context, global, v.type^, node.name), true;
+ } else {
+ return resolve_generic_function(ast_context, v);
+ }
+ case Proc_Group:
+ return resolve_function_overload(ast_context, v);
+ case Array_Type:
+ return make_symbol_generic_from_ast(ast_context, global), true;
+ case Dynamic_Array_Type:
+ return make_symbol_generic_from_ast(ast_context, global), true;
+ case Call_Expr:
+ return resolve_type_expression(ast_context, global);
+ case:
+ log.warnf("default type node kind: %T", v);
+ return resolve_type_expression(ast_context, global);
+ }
+ } else
+
+ //if there are more of these variables that hard builtin, move them to the indexer
+ if node.name == "context" {
+ return index.lookup("Context", ast_context.current_package);
+ } else
+ //keywords
+ if v, ok := common.keyword_map[node.name]; ok {
+
+ ident := index.new_type(Ident, node.pos, node.end, context.temp_allocator);
+ ident.name = node.name;
+
+ symbol := index.Symbol {
+ type = .Keyword,
+ signature = node.name,
+ pkg = ast_context.current_package,
+ value = index.SymbolGenericValue {
+ expr = ident
+ },
+ };
+
+ return symbol, true;
+ } else {
+
+ //right now we replace the package ident with the absolute directory name, so it should have '/' which is not a valid ident character
+ if strings.contains(node.name, "/") {
+
+ symbol := index.Symbol {
+ type = .Package,
+ pkg = node.name,
+ value = index.SymbolPackageValue {},
+ };
+
+ return symbol, true;
+ } else
+
+ //part of the ast so we check the imports of the document
+ {
+
+ for imp in ast_context.imports {
+
+ if strings.compare(imp.base, node.name) == 0 {
+
+ symbol := index.Symbol {
+ type = .Package,
+ pkg = imp.name,
+ value = index.SymbolPackageValue {},
+ };
+
+ return symbol, true;
+ }
+ }
+ }
+
+ //last option is to check the index
+
+ if symbol, ok := index.lookup(node.name, ast_context.current_package); ok {
+ return resolve_symbol_return(ast_context, symbol);
+ }
+
+ for u in ast_context.usings {
+
+ //TODO(Daniel, make into a map, not really required for performance but looks nicer)
+ for imp in ast_context.imports {
+
+ if strings.compare(imp.base, u) == 0 {
+
+ if symbol, ok := index.lookup(node.name, imp.name); ok {
+ return resolve_symbol_return(ast_context, symbol);
+ }
+ }
+ }
+ }
+
+ //TODO(daniel, index can be used on identifiers if using is in the function scope)
+ }
- for imp in ast_context.imports {
+ return index.Symbol {}, false;
+}
- if imp.base == node.name {
- return true;
- }
+resolve_ident_is_variable :: proc (ast_context: ^AstContext, node: ast.Ident) -> bool {
- }
+ if v, ok := ast_context.variables[node.name]; ok && v {
+ return true;
+ }
- }
+ if symbol, ok := index.lookup(node.name, ast_context.current_package); ok {
+ return symbol.type == .Variable;
+ }
- return false;
+ return false;
}
-expand_struct_usings :: proc(ast_context: ^AstContext, symbol: index.Symbol, value: index.SymbolStructValue) -> index.SymbolStructValue {
+resolve_ident_is_package :: proc (ast_context: ^AstContext, node: ast.Ident) -> bool {
- //ERROR no completion or over on names and types - generic resolve error
- names := slice.to_dynamic(value.names, context.temp_allocator);
- types := slice.to_dynamic(value.types, context.temp_allocator);
+ if strings.contains(node.name, "/") {
+ return true;
+ } else {
- //ERROR no hover on k and v(completion works)
- for k, v in value.usings {
+ for imp in ast_context.imports {
- ast_context.current_package = symbol.pkg;
+ if imp.base == node.name {
+ return true;
+ }
+ }
+ }
- field_expr: ^ast.Expr;
+ return false;
+}
- for name, i in value.names {
+expand_struct_usings :: proc (ast_context: ^AstContext, symbol: index.Symbol, value: index.SymbolStructValue) -> index.SymbolStructValue {
- if name == k && v {
- field_expr = value.types[i];
- }
+ //ERROR no completion or over on names and types - generic resolve error
+ names := slice.to_dynamic(value.names, context.temp_allocator);
+ types := slice.to_dynamic(value.types, context.temp_allocator);
- }
+ //ERROR no hover on k and v(completion works)
+ for k, v in value.usings {
- if field_expr == nil {
- continue;
- }
+ ast_context.current_package = symbol.pkg;
- if s, ok := resolve_type_expression(ast_context, field_expr); ok {
+ field_expr: ^ast.Expr;
- if struct_value, ok := s.value.(index.SymbolStructValue); ok {
+ for name, i in value.names {
- for name in struct_value.names {
- append(&names, name);
- }
+ if name == k && v {
+ field_expr = value.types[i];
+ }
+ }
- for type in struct_value.types {
- append(&types, type);
- }
+ if field_expr == nil {
+ continue;
+ }
- }
+ if s, ok := resolve_type_expression(ast_context, field_expr); ok {
- }
+ if struct_value, ok := s.value.(index.SymbolStructValue); ok {
- }
+ for name in struct_value.names {
+ append(&names, name);
+ }
- return {
- names = names[:],
- types = types[:],
- };
+ for type in struct_value.types {
+ append(&types, type);
+ }
+ }
+ }
+ }
+ return {
+ names = names[:],
+ types = types[:],
+ };
}
-resolve_symbol_return :: proc(ast_context: ^AstContext, symbol: index.Symbol, ok := true) -> (index.Symbol, bool) {
-
- if !ok {
- return symbol, ok;
- }
-
- #partial switch v in symbol.value {
- case index.SymbolProcedureGroupValue:
- if symbol, ok := resolve_function_overload(ast_context, v.group.derived.(ast.Proc_Group)); ok {
- return symbol, true;
- }
- else {
- return symbol, false;
- }
- case index.SymbolProcedureValue:
- if v.generic {
- return resolve_generic_function_symbol(ast_context, v.arg_types, v.return_types);
- }
- else {
- return symbol, true;
- }
- case index.SymbolStructValue:
-
- //expand the types and names from the using - can't be done while indexing without complicating everything(this also saves memory)
- if len(v.usings) > 0 {
- expanded := symbol;
- expanded.value = expand_struct_usings(ast_context, symbol, v);
- return expanded, true;
- }
- else {
- return symbol, true;
- }
-
- case index.SymbolGenericValue:
- return resolve_type_expression(ast_context, v.expr);
- }
-
- return symbol, true;
-}
+resolve_symbol_return :: proc (ast_context: ^AstContext, symbol: index.Symbol, ok := true) -> (index.Symbol, bool) {
-resolve_location_identifier :: proc(ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
+ if !ok {
+ return symbol, ok;
+ }
- symbol: index.Symbol;
+ #partial switch v in symbol.value {
+ case index.SymbolProcedureGroupValue:
+ if symbol, ok := resolve_function_overload(ast_context, v.group.derived.(ast.Proc_Group)); ok {
+ return symbol, true;
+ } else {
+ return symbol, false;
+ }
+ case index.SymbolProcedureValue:
+ if v.generic {
+ return resolve_generic_function_symbol(ast_context, v.arg_types, v.return_types);
+ } else {
+ return symbol, true;
+ }
+ case index.SymbolStructValue:
+
+ //expand the types and names from the using - can't be done while indexing without complicating everything(this also saves memory)
+ if len(v.usings) > 0 {
+ expanded := symbol;
+ expanded.value = expand_struct_usings(ast_context, symbol, v);
+ return expanded, true;
+ } else {
+ return symbol, true;
+ }
+
+ case index.SymbolGenericValue:
+ return resolve_type_expression(ast_context, v.expr);
+ }
- if local := get_local(ast_context, node.pos.offset, node.name); local != nil {
- symbol.range = common.get_token_range(get_local(ast_context, node.pos.offset, node.name), ast_context.file.src);
- return symbol, true;
- }
+ return symbol, true;
+}
- else if global, ok := ast_context.globals[node.name]; ok {
- symbol.range = common.get_token_range(global, ast_context.file.src);
- return symbol, true;
- }
+resolve_location_identifier :: proc (ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
+ symbol: index.Symbol;
- return index.lookup(node.name, ast_context.document_package);
-}
+ if local := get_local(ast_context, node.pos.offset, node.name); local != nil {
+ symbol.range = common.get_token_range(get_local(ast_context, node.pos.offset, node.name), ast_context.file.src);
+ return symbol, true;
+ } else if global, ok := ast_context.globals[node.name]; ok {
+ symbol.range = common.get_token_range(global, ast_context.file.src);
+ return symbol, true;
+ }
-make_pointer_ast :: proc(elem: ^ast.Expr) -> ^ast.Pointer_Type {
- pointer := index.new_type(ast.Pointer_Type, elem.pos, elem.end, context.temp_allocator);
- pointer.elem = elem;
- return pointer;
+ return index.lookup(node.name, ast_context.document_package);
}
-make_bool_ast :: proc() -> ^ast.Ident {
- ident := index.new_type(ast.Ident, {}, {}, context.temp_allocator);
- ident.name = bool_lit;
- return ident;
+make_pointer_ast :: proc (elem: ^ast.Expr) -> ^ast.Pointer_Type {
+ pointer := index.new_type(ast.Pointer_Type, elem.pos, elem.end, context.temp_allocator);
+ pointer.elem = elem;
+ return pointer;
}
-make_int_ast :: proc() -> ^ast.Ident {
- ident := index.new_type(ast.Ident, {}, {}, context.temp_allocator);
- ident.name = int_lit;
- return ident;
+make_bool_ast :: proc () -> ^ast.Ident {
+ ident := index.new_type(ast.Ident, {}, {}, context.temp_allocator);
+ ident.name = bool_lit;
+ return ident;
}
-get_package_from_node :: proc(node: ast.Node) -> string {
- slashed, _ := filepath.to_slash(node.pos.file, context.temp_allocator);
- ret := strings.to_lower(path.dir(slashed, context.temp_allocator), context.temp_allocator);
- return ret;
+make_int_ast :: proc () -> ^ast.Ident {
+ ident := index.new_type(ast.Ident, {}, {}, context.temp_allocator);
+ ident.name = int_lit;
+ return ident;
}
-get_using_packages :: proc(ast_context: ^AstContext) -> [] string {
-
- usings := make([] string, len(ast_context.usings), context.temp_allocator);
+get_package_from_node :: proc (node: ast.Node) -> string {
+ slashed, _ := filepath.to_slash(node.pos.file, context.temp_allocator);
+ ret := strings.to_lower(path.dir(slashed, context.temp_allocator), context.temp_allocator);
+ return ret;
+}
- if len(ast_context.usings) == 0 {
- return usings;
- }
+get_using_packages :: proc (ast_context: ^AstContext) -> []string {
- //probably map instead
- for u, i in ast_context.usings {
+ usings := make([]string, len(ast_context.usings), context.temp_allocator);
- for imp in ast_context.imports {
+ if len(ast_context.usings) == 0 {
+ return usings;
+ }
- if strings.compare(imp.base, u) == 0 {
- usings[i] = imp.name;
- }
+ //probably map instead
+ for u, i in ast_context.usings {
- }
+ for imp in ast_context.imports {
- }
+ if strings.compare(imp.base, u) == 0 {
+ usings[i] = imp.name;
+ }
+ }
+ }
- return usings;
+ return usings;
}
-make_symbol_procedure_from_ast :: proc(ast_context: ^AstContext, n: ^ast.Node, v: ast.Proc_Type, name: string) -> index.Symbol {
+make_symbol_procedure_from_ast :: proc (ast_context: ^AstContext, n: ^ast.Node, v: ast.Proc_Type, name: string) -> index.Symbol {
- symbol := index.Symbol {
- range = common.get_token_range(n^, ast_context.file.src),
- type = .Function,
- pkg = get_package_from_node(n^),
- };
-
- symbol.name = name;
+ symbol := index.Symbol {
+ range = common.get_token_range(n^, ast_context.file.src),
+ type = .Function,
+ pkg = get_package_from_node(n^),
+ };
- return_types := make([dynamic] ^ast.Field, context.temp_allocator);
- arg_types := make([dynamic] ^ast.Field, context.temp_allocator);
+ symbol.name = name;
- if v.results != nil {
+ return_types := make([dynamic]^ast.Field, context.temp_allocator);
+ arg_types := make([dynamic]^ast.Field, context.temp_allocator);
- for ret in v.results.list {
- append(&return_types, ret);
- }
+ if v.results != nil {
- symbol.returns = strings.concatenate( {"(", string(ast_context.file.src[v.results.pos.offset:v.results.end.offset]), ")"}, context.temp_allocator);
+ for ret in v.results.list {
+ append(&return_types, ret);
+ }
- }
+ symbol.returns = strings.concatenate({"(", string(ast_context.file.src[v.results.pos.offset:v.results.end.offset]), ")"}, context.temp_allocator);
+ }
- if v.params != nil {
+ if v.params != nil {
- for param in v.params.list {
- append(&arg_types, param);
- }
+ for param in v.params.list {
+ append(&arg_types, param);
+ }
- symbol.signature = strings.concatenate( {"(", string(ast_context.file.src[v.params.pos.offset:v.params.end.offset]), ")"}, context.temp_allocator);
- }
+ symbol.signature = strings.concatenate({"(", string(ast_context.file.src[v.params.pos.offset:v.params.end.offset]), ")"}, context.temp_allocator);
+ }
- symbol.value = index.SymbolProcedureValue {
- return_types = return_types[:],
- arg_types = arg_types[:],
- };
+ symbol.value = index.SymbolProcedureValue {
+ return_types = return_types[:],
+ arg_types = arg_types[:],
+ };
- return symbol;
+ return symbol;
}
-make_symbol_generic_from_ast :: proc(ast_context: ^AstContext, expr: ^ast.Expr) -> index.Symbol {
+make_symbol_generic_from_ast :: proc (ast_context: ^AstContext, expr: ^ast.Expr) -> index.Symbol {
- symbol := index.Symbol {
- range = common.get_token_range(expr, ast_context.file.src),
- type = .Variable,
- signature = index.node_to_string(expr),
- pkg = get_package_from_node(expr^),
- };
+ symbol := index.Symbol {
+ range = common.get_token_range(expr, ast_context.file.src),
+ type = .Variable,
+ signature = index.node_to_string(expr),
+ pkg = get_package_from_node(expr^),
+ };
- symbol.value = index.SymbolGenericValue {
- expr = expr,
- };
+ symbol.value = index.SymbolGenericValue {
+ expr = expr
+ };
- return symbol;
+ return symbol;
}
-make_symbol_union_from_ast :: proc(ast_context: ^AstContext, v: ast.Union_Type, ident: ast.Ident) -> index.Symbol {
-
- symbol := index.Symbol {
- range = common.get_token_range(v, ast_context.file.src),
- type = .Enum,
- name = ident.name,
- pkg = get_package_from_node(v.node),
- };
+make_symbol_union_from_ast :: proc (ast_context: ^AstContext, v: ast.Union_Type, ident: ast.Ident) -> index.Symbol {
- names := make([dynamic] string, context.temp_allocator);
+ symbol := index.Symbol {
+ range = common.get_token_range(v, ast_context.file.src),
+ type = .Enum,
+ name = ident.name,
+ pkg = get_package_from_node(v.node),
+ };
- for variant in v.variants {
+ names := make([dynamic]string, context.temp_allocator);
- if ident, ok := variant.derived.(ast.Ident); ok {
- append(&names, ident.name);
- }
+ for variant in v.variants {
- }
+ if ident, ok := variant.derived.(ast.Ident); ok {
+ append(&names, ident.name);
+ }
+ }
- symbol.value = index.SymbolUnionValue {
- names = names[:],
- };
+ symbol.value = index.SymbolUnionValue {
+ names = names[:]
+ };
- return symbol;
+ return symbol;
}
-make_symbol_enum_from_ast :: proc(ast_context: ^AstContext, v: ast.Enum_Type, ident: ast.Ident) -> index.Symbol {
-
- symbol := index.Symbol {
- range = common.get_token_range(v, ast_context.file.src),
- type = .Enum,
- name = ident.name,
- pkg = get_package_from_node(v.node),
- };
+make_symbol_enum_from_ast :: proc (ast_context: ^AstContext, v: ast.Enum_Type, ident: ast.Ident) -> index.Symbol {
- names := make([dynamic] string, context.temp_allocator);
-
- for n in v.fields {
+ symbol := index.Symbol {
+ range = common.get_token_range(v, ast_context.file.src),
+ type = .Enum,
+ name = ident.name,
+ pkg = get_package_from_node(v.node),
+ };
- if ident, ok := n.derived.(ast.Ident); ok {
- append(&names, ident.name);
- }
+ names := make([dynamic]string, context.temp_allocator);
- else if field, ok := n.derived.(ast.Field_Value); ok {
- append(&names, field.field.derived.(ast.Ident).name);
- }
+ for n in v.fields {
- }
+ if ident, ok := n.derived.(ast.Ident); ok {
+ append(&names, ident.name);
+ } else if field, ok := n.derived.(ast.Field_Value); ok {
+ append(&names, field.field.derived.(ast.Ident).name);
+ }
+ }
- symbol.value = index.SymbolEnumValue {
- names = names[:],
- };
+ symbol.value = index.SymbolEnumValue {
+ names = names[:]
+ };
- return symbol;
+ return symbol;
}
-make_symbol_bitset_from_ast :: proc(ast_context: ^AstContext, v: ast.Bit_Set_Type, ident: ast.Ident) -> index.Symbol {
+make_symbol_bitset_from_ast :: proc (ast_context: ^AstContext, v: ast.Bit_Set_Type, ident: ast.Ident) -> index.Symbol {
- symbol := index.Symbol {
- range = common.get_token_range(v, ast_context.file.src),
- type = .Enum,
- name = ident.name,
- pkg = get_package_from_node(v.node),
- };
+ symbol := index.Symbol {
+ range = common.get_token_range(v, ast_context.file.src),
+ type = .Enum,
+ name = ident.name,
+ pkg = get_package_from_node(v.node),
+ };
- symbol.value = index.SymbolBitSetValue {
- expr = v.elem,
- };
+ symbol.value = index.SymbolBitSetValue {
+ expr = v.elem
+ };
- return symbol;
+ return symbol;
}
-make_symbol_struct_from_ast :: proc(ast_context: ^AstContext, v: ast.Struct_Type, ident: ast.Ident) -> index.Symbol {
-
- symbol := index.Symbol {
- range = common.get_token_range(v, ast_context.file.src),
- type = .Struct,
- name = ident.name,
- pkg = get_package_from_node(v.node),
- };
+make_symbol_struct_from_ast :: proc (ast_context: ^AstContext, v: ast.Struct_Type, ident: ast.Ident) -> index.Symbol {
- names := make([dynamic] string, context.temp_allocator);
- types := make([dynamic] ^ast.Expr, context.temp_allocator);
- usings := make(map [string] bool, 0, context.temp_allocator);
-
- for field in v.fields.list {
+ symbol := index.Symbol {
+ range = common.get_token_range(v, ast_context.file.src),
+ type = .Struct,
+ name = ident.name,
+ pkg = get_package_from_node(v.node),
+ };
- for n in field.names {
- if identifier, ok := n.derived.(ast.Ident); ok {
- append(&names, identifier.name);
- append(&types, index.clone_type(field.type, context.temp_allocator, nil));
+ names := make([dynamic]string, context.temp_allocator);
+ types := make([dynamic]^ast.Expr, context.temp_allocator);
+ usings := make(map[string]bool, 0, context.temp_allocator);
- if .Using in field.flags {
- usings[identifier.name] = true;
- }
+ for field in v.fields.list {
- }
- }
+ for n in field.names {
+ if identifier, ok := n.derived.(ast.Ident); ok {
+ append(&names, identifier.name);
+ append(&types, index.clone_type(field.type, context.temp_allocator, nil));
- }
+ if .Using in field.flags {
+ usings[identifier.name] = true;
+ }
+ }
+ }
+ }
- symbol.value = index.SymbolStructValue {
- names = names[:],
- types = types[:],
- usings = usings,
- };
+ symbol.value = index.SymbolStructValue {
+ names = names[:],
+ types = types[:],
+ usings = usings,
+ };
- if v.poly_params != nil {
- resolve_poly_struct(ast_context, v, &symbol);
- }
+ if v.poly_params != nil {
+ resolve_poly_struct(ast_context, v, &symbol);
+ }
- //TODO change the expand to not double copy the array, but just pass the dynamic arrays
- if len(usings) > 0 {
- symbol.value = expand_struct_usings(ast_context, symbol, symbol.value.(index.SymbolStructValue));
- }
+ //TODO change the expand to not double copy the array, but just pass the dynamic arrays
+ if len(usings) > 0 {
+ symbol.value = expand_struct_usings(ast_context, symbol, symbol.value.(index.SymbolStructValue));
+ }
- return symbol;
+ return symbol;
}
-resolve_poly_struct :: proc(ast_context: ^AstContext, v: ast.Struct_Type, symbol: ^index.Symbol) {
-
- if ast_context.call == nil {
- log.infof("no call");
- return;
- }
-
- symbol_value := &symbol.value.(index.SymbolStructValue);
-
- if symbol_value == nil {
- log.infof("no value");
- return;
- }
-
- i := 0;
-
- poly_map := make(map [string] ^ast.Expr, 0, context.temp_allocator);
-
- for param in v.poly_params.list {
-
- for name in param.names {
-
- if len(ast_context.call.args) <= i {
- break;
- }
-
- if param.type == nil {
- continue;
- }
-
- if poly, ok := param.type.derived.(ast.Typeid_Type); ok {
-
- if ident, ok := name.derived.(ast.Ident); ok {
- poly_map[ident.name] = ast_context.call.args[i];
- }
-
- }
+resolve_poly_struct :: proc (ast_context: ^AstContext, v: ast.Struct_Type, symbol: ^index.Symbol) {
+ if ast_context.call == nil {
+ log.infof("no call");
+ return;
+ }
- i += 1;
- }
+ symbol_value := &symbol.value.(index.SymbolStructValue);
+ if symbol_value == nil {
+ log.infof("no value");
+ return;
+ }
- }
+ i := 0;
- for type, i in symbol_value.types {
+ poly_map := make(map[string]^ast.Expr, 0, context.temp_allocator);
- if ident, ok := type.derived.(ast.Ident); ok {
+ for param in v.poly_params.list {
- if expr, ok := poly_map[ident.name]; ok {
- symbol_value.types[i] = expr;
- }
+ for name in param.names {
- }
+ if len(ast_context.call.args) <= i {
+ break;
+ }
- else if call_expr, ok := type.derived.(ast.Call_Expr); ok {
+ if param.type == nil {
+ continue;
+ }
- if call_expr.args == nil {
- continue;
- }
+ if poly, ok := param.type.derived.(ast.Typeid_Type); ok {
- for arg, i in call_expr.args {
+ if ident, ok := name.derived.(ast.Ident); ok {
+ poly_map[ident.name] = ast_context.call.args[i];
+ }
+ }
- if ident, ok := arg.derived.(ast.Ident); ok {
+ i += 1;
+ }
+ }
- if expr, ok := poly_map[ident.name]; ok {
- call_expr.args[i] = expr;
- }
+ for type, i in symbol_value.types {
- }
+ if ident, ok := type.derived.(ast.Ident); ok {
+ if expr, ok := poly_map[ident.name]; ok {
+ symbol_value.types[i] = expr;
+ }
+ } else if call_expr, ok := type.derived.(ast.Call_Expr); ok {
- }
+ if call_expr.args == nil {
+ continue;
+ }
- }
+ for arg, i in call_expr.args {
- }
+ if ident, ok := arg.derived.(ast.Ident); ok {
+ if expr, ok := poly_map[ident.name]; ok {
+ call_expr.args[i] = expr;
+ }
+ }
+ }
+ }
+ }
}
-get_globals :: proc(file: ast.File, ast_context: ^AstContext) {
+get_globals :: proc (file: ast.File, ast_context: ^AstContext) {
- ast_context.variables["context"] = true;
+ ast_context.variables["context"] = true;
- exprs := common.collect_globals(file);
+ exprs := common.collect_globals(file);
- for expr in exprs {
- ast_context.globals[expr.name] = expr.expr;
- ast_context.variables[expr.name] = expr.mutable;
- }
+ for expr in exprs {
+ ast_context.globals[expr.name] = expr.expr;
+ ast_context.variables[expr.name] = expr.mutable;
+ }
}
-get_generic_assignment :: proc(file: ast.File, value: ^ast.Expr, ast_context: ^AstContext, results: ^[dynamic]^ast.Expr) {
-
- using ast;
-
- ast_context.use_locals = true;
- ast_context.use_globals = true;
-
- switch v in value.derived {
- case Call_Expr:
-
- ast_context.call = cast(^ast.Call_Expr)value;
-
- if symbol, ok := resolve_type_expression(ast_context, v.expr); ok {
-
- if procedure, ok := symbol.value.(index.SymbolProcedureValue); ok {
-
- for ret in procedure.return_types {
- append(results, ret.type);
- }
-
- }
-
- }
-
- case Comp_Lit:
- if v.type != nil {
- append(results, v.type);
- }
- case Array_Type:
- if v.elem != nil {
- append(results, v.elem);
- }
- case Dynamic_Array_Type:
- if v.elem != nil {
- append(results, v.elem);
- }
- case Selector_Expr:
- if v.expr != nil {
- append(results, value);
- }
- case Type_Assertion:
- if v.type != nil {
- append(results, v.type);
- append(results, make_bool_ast());
- }
- case:
- //log.debugf("default node get_generic_assignment %v", v);
- append(results, value);
- }
-
+get_generic_assignment :: proc (file: ast.File, value: ^ast.Expr, ast_context: ^AstContext, results: ^[dynamic]^ast.Expr) {
+
+ using ast;
+
+ ast_context.use_locals = true;
+ ast_context.use_globals = true;
+
+ switch v in value.derived {
+ case Call_Expr:
+
+ ast_context.call = cast(^ast.Call_Expr)value;
+
+ if symbol, ok := resolve_type_expression(ast_context, v.expr); ok {
+
+ if procedure, ok := symbol.value.(index.SymbolProcedureValue); ok {
+
+ for ret in procedure.return_types {
+ append(results, ret.type);
+ }
+ }
+ }
+
+ case Comp_Lit:
+ if v.type != nil {
+ append(results, v.type);
+ }
+ case Array_Type:
+ if v.elem != nil {
+ append(results, v.elem);
+ }
+ case Dynamic_Array_Type:
+ if v.elem != nil {
+ append(results, v.elem);
+ }
+ case Selector_Expr:
+ if v.expr != nil {
+ append(results, value);
+ }
+ case Type_Assertion:
+ if v.type != nil {
+ append(results, v.type);
+ append(results, make_bool_ast());
+ }
+ case:
+ //log.debugf("default node get_generic_assignment %v", v);
+ append(results, value);
+ }
}
-get_locals_value_decl :: proc(file: ast.File, value_decl: ast.Value_Decl, ast_context: ^AstContext) {
-
- using ast;
-
- if len(value_decl.names) <= 0 {
- return;
- }
-
-
- if value_decl.type != nil {
- str := common.get_ast_node_string(value_decl.names[0], file.src);
- ast_context.variables[str] = value_decl.is_mutable;
- store_local(ast_context, value_decl.type, value_decl.pos.offset, str);
- return;
- }
+get_locals_value_decl :: proc (file: ast.File, value_decl: ast.Value_Decl, ast_context: ^AstContext) {
- results := make([dynamic]^Expr, context.temp_allocator);
+ using ast;
- for value in value_decl.values {
- get_generic_assignment(file, value, ast_context, &results);
- }
-
- for name, i in value_decl.names {
- if i < len(results) {
- str := common.get_ast_node_string(name, file.src);
- ast_context.in_package[str] = get_package_from_node(results[i]);
- store_local(ast_context, results[i], name.pos.offset, str);
- ast_context.variables[str] = value_decl.is_mutable;
- }
- }
-
-}
+ if len(value_decl.names) <= 0 {
+ return;
+ }
-get_locals_stmt :: proc(file: ast.File, stmt: ^ast.Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext, save_assign := false) {
-
- ast_context.use_locals = true;
- ast_context.use_globals = true;
- ast_context.current_package = ast_context.document_package;
-
- using ast;
-
- if stmt == nil {
- return;
- }
-
- if stmt.pos.offset > document_position.position {
- return;
- }
-
- switch v in stmt.derived {
- case Value_Decl:
- get_locals_value_decl(file, v, ast_context);
- case Type_Switch_Stmt:
- get_locals_type_switch_stmt(file, v, ast_context, document_position);
- case Switch_Stmt:
- get_locals_switch_stmt(file, v, ast_context, document_position);
- case For_Stmt:
- get_locals_for_stmt(file, v, ast_context, document_position);
- case Inline_Range_Stmt:
- get_locals_stmt(file, v.body, ast_context, document_position);
- case Range_Stmt:
- get_locals_for_range_stmt(file, v, ast_context, document_position);
- case If_Stmt:
- get_locals_if_stmt(file, v, ast_context, document_position);
- case Block_Stmt:
- for stmt in v.stmts {
- get_locals_stmt(file, stmt, ast_context, document_position);
- }
- case Proc_Lit:
- get_locals_stmt(file, v.body, ast_context, document_position);
- case Assign_Stmt:
- if save_assign {
- get_locals_assign_stmt(file, v, ast_context);
- }
- case Using_Stmt:
- get_locals_using_stmt(file, v, ast_context);
- case When_Stmt:
- get_locals_stmt(file, v.else_stmt, ast_context, document_position);
- get_locals_stmt(file, v.body, ast_context, document_position);
- case:
- //log.debugf("default node local stmt %v", v);
- }
+ if value_decl.type != nil {
+ str := common.get_ast_node_string(value_decl.names[0], file.src);
+ ast_context.variables[str] = value_decl.is_mutable;
+ store_local(ast_context, value_decl.type, value_decl.pos.offset, str);
+ return;
+ }
+ results := make([dynamic]^Expr, context.temp_allocator);
+ for value in value_decl.values {
+ get_generic_assignment(file, value, ast_context, &results);
+ }
+ for name, i in value_decl.names {
+ if i < len(results) {
+ str := common.get_ast_node_string(name, file.src);
+ ast_context.in_package[str] = get_package_from_node(results[i]);
+ store_local(ast_context, results[i], name.pos.offset, str);
+ ast_context.variables[str] = value_decl.is_mutable;
+ }
+ }
}
-get_locals_using_stmt :: proc(file: ast.File, stmt: ast.Using_Stmt, ast_context: ^AstContext) {
-
- for u in stmt.list {
-
- if symbol, ok := resolve_type_expression(ast_context, u); ok {
+get_locals_stmt :: proc (file: ast.File, stmt: ^ast.Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext, save_assign := false) {
- #partial switch v in symbol.value {
- case index.SymbolPackageValue:
- if ident, ok := u.derived.(ast.Ident); ok {
- append(&ast_context.usings, ident.name);
- }
- case index.SymbolStructValue:
- for name, i in v.names {
- selector := index.new_type(ast.Selector_Expr, v.types[i].pos, v.types[i].end, context.temp_allocator);
- selector.expr = u;
- selector.field = index.new_type(ast.Ident, v.types[i].pos, v.types[i].end, context.temp_allocator);
- selector.field.name = name;
- store_local(ast_context, selector, 0, name);
- ast_context.variables[name] = true;
- }
+ ast_context.use_locals = true;
+ ast_context.use_globals = true;
+ ast_context.current_package = ast_context.document_package;
- }
-
-
- }
+ using ast;
+ if stmt == nil {
+ return;
+ }
- }
+ if stmt.pos.offset > document_position.position {
+ return;
+ }
+ switch v in stmt.derived {
+ case Value_Decl:
+ get_locals_value_decl(file, v, ast_context);
+ case Type_Switch_Stmt:
+ get_locals_type_switch_stmt(file, v, ast_context, document_position);
+ case Switch_Stmt:
+ get_locals_switch_stmt(file, v, ast_context, document_position);
+ case For_Stmt:
+ get_locals_for_stmt(file, v, ast_context, document_position);
+ case Inline_Range_Stmt:
+ get_locals_stmt(file, v.body, ast_context, document_position);
+ case Range_Stmt:
+ get_locals_for_range_stmt(file, v, ast_context, document_position);
+ case If_Stmt:
+ get_locals_if_stmt(file, v, ast_context, document_position);
+ case Block_Stmt:
+ for stmt in v.stmts {
+ get_locals_stmt(file, stmt, ast_context, document_position);
+ }
+ case Proc_Lit:
+ get_locals_stmt(file, v.body, ast_context, document_position);
+ case Assign_Stmt:
+ if save_assign {
+ get_locals_assign_stmt(file, v, ast_context);
+ }
+ case Using_Stmt:
+ get_locals_using_stmt(file, v, ast_context);
+ case When_Stmt:
+ get_locals_stmt(file, v.else_stmt, ast_context, document_position);
+ get_locals_stmt(file, v.body, ast_context, document_position);
+ case:
+ //log.debugf("default node local stmt %v", v);
+ }
}
-get_locals_assign_stmt :: proc(file: ast.File, stmt: ast.Assign_Stmt, ast_context: ^AstContext) {
-
- using ast;
+get_locals_using_stmt :: proc (file: ast.File, stmt: ast.Using_Stmt, ast_context: ^AstContext) {
+
+ for u in stmt.list {
+
+ if symbol, ok := resolve_type_expression(ast_context, u); ok {
+
+ #partial switch v in symbol.value {
+ case index.SymbolPackageValue:
+ if ident, ok := u.derived.(ast.Ident); ok {
+ append(&ast_context.usings, ident.name);
+ }
+ case index.SymbolStructValue:
+ for name, i in v.names {
+ selector := index.new_type(ast.Selector_Expr, v.types[i].pos, v.types[i].end, context.temp_allocator);
+ selector.expr = u;
+ selector.field = index.new_type(ast.Ident, v.types[i].pos, v.types[i].end, context.temp_allocator);
+ selector.field.name = name;
+ store_local(ast_context, selector, 0, name);
+ ast_context.variables[name] = true;
+ }
+ }
+ }
+ }
+}
- if stmt.lhs == nil || stmt.rhs == nil {
- return;
- }
+get_locals_assign_stmt :: proc (file: ast.File, stmt: ast.Assign_Stmt, ast_context: ^AstContext) {
- results := make([dynamic]^Expr, context.temp_allocator);
+ using ast;
- for rhs in stmt.rhs {
- get_generic_assignment(file, rhs, ast_context, &results);
- }
+ if stmt.lhs == nil || stmt.rhs == nil {
+ return;
+ }
+ results := make([dynamic]^Expr, context.temp_allocator);
- if len(stmt.lhs) != len(results) {
- return;
- }
+ for rhs in stmt.rhs {
+ get_generic_assignment(file, rhs, ast_context, &results);
+ }
- for lhs, i in stmt.lhs {
- if ident, ok := lhs.derived.(ast.Ident); ok {
- store_local(ast_context, results[i], ident.pos.offset, ident.name);
- ast_context.variables[ident.name] = true;
- }
- }
+ if len(stmt.lhs) != len(results) {
+ return;
+ }
+ for lhs, i in stmt.lhs {
+ if ident, ok := lhs.derived.(ast.Ident); ok {
+ store_local(ast_context, results[i], ident.pos.offset, ident.name);
+ ast_context.variables[ident.name] = true;
+ }
+ }
}
-get_locals_if_stmt :: proc(file: ast.File, stmt: ast.If_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
+get_locals_if_stmt :: proc (file: ast.File, stmt: ast.If_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
- if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
- return;
- }
+ if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
+ return;
+ }
- get_locals_stmt(file, stmt.init, ast_context, document_position, true);
- get_locals_stmt(file, stmt.body, ast_context, document_position);
- get_locals_stmt(file, stmt.else_stmt, ast_context, document_position);
+ get_locals_stmt(file, stmt.init, ast_context, document_position, true);
+ get_locals_stmt(file, stmt.body, ast_context, document_position);
+ get_locals_stmt(file, stmt.else_stmt, ast_context, document_position);
}
-get_locals_for_range_stmt :: proc(file: ast.File, stmt: ast.Range_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
- using ast;
-
- if !(stmt.body.pos.offset <= document_position.position && document_position.position <= stmt.body.end.offset) {
- return;
- }
-
- results := make([dynamic]^Expr, context.temp_allocator);
-
- if stmt.expr == nil {
- return;
- }
-
- if symbol, ok := resolve_type_expression(ast_context, stmt.expr); ok {
-
- if generic, ok := symbol.value.(index.SymbolGenericValue); ok {
+get_locals_for_range_stmt :: proc (file: ast.File, stmt: ast.Range_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
- switch v in generic.expr.derived {
- case Map_Type:
- if stmt.val0 != nil {
+ using ast;
- if ident, ok := stmt.val0.derived.(Ident); ok {
- store_local(ast_context, v.key, ident.pos.offset, ident.name);
- ast_context.variables[ident.name] = true;
- ast_context.in_package[ident.name] = symbol.pkg;
- }
-
- }
-
- if stmt.val1 != nil {
-
- if ident, ok := stmt.val1.derived.(Ident); ok {
- store_local(ast_context, v.value, ident.pos.offset, ident.name);
- ast_context.variables[ident.name] = true;
- ast_context.in_package[ident.name] = symbol.pkg;
- }
-
- }
- case Dynamic_Array_Type:
- if stmt.val0 != nil {
-
- if ident, ok := stmt.val0.derived.(Ident); ok {
- store_local(ast_context, v.elem, ident.pos.offset, ident.name);
- ast_context.variables[ident.name] = true;
- ast_context.in_package[ident.name] = symbol.pkg;
- }
-
- }
-
- if stmt.val1 != nil {
-
- if ident, ok := stmt.val1.derived.(Ident); ok {
- store_local(ast_context, make_int_ast(), ident.pos.offset, ident.name);
- ast_context.variables[ident.name] = true;
- ast_context.in_package[ident.name] = symbol.pkg;
- }
-
- }
- case Array_Type:
- if stmt.val0 != nil {
-
- if ident, ok := stmt.val0.derived.(Ident); ok {
- store_local(ast_context, v.elem, ident.pos.offset, ident.name);
- ast_context.variables[ident.name] = true;
- ast_context.in_package[ident.name] = symbol.pkg;
- }
-
- }
-
- if stmt.val1 != nil {
-
- if ident, ok := stmt.val1.derived.(Ident); ok {
- store_local(ast_context, make_int_ast(), ident.pos.offset, ident.name);
- ast_context.variables[ident.name] = true;
- ast_context.in_package[ident.name] = symbol.pkg;
- }
+ if !(stmt.body.pos.offset <= document_position.position && document_position.position <= stmt.body.end.offset) {
+ return;
+ }
- }
- }
+ results := make([dynamic]^Expr, context.temp_allocator);
- }
+ if stmt.expr == nil {
+ return;
+ }
- }
+ if symbol, ok := resolve_type_expression(ast_context, stmt.expr); ok {
+
+ if generic, ok := symbol.value.(index.SymbolGenericValue); ok {
+
+ switch v in generic.expr.derived {
+ case Map_Type:
+ if stmt.val0 != nil {
+
+ if ident, ok := stmt.val0.derived.(Ident); ok {
+ store_local(ast_context, v.key, ident.pos.offset, ident.name);
+ ast_context.variables[ident.name] = true;
+ ast_context.in_package[ident.name] = symbol.pkg;
+ }
+ }
+
+ if stmt.val1 != nil {
+
+ if ident, ok := stmt.val1.derived.(Ident); ok {
+ store_local(ast_context, v.value, ident.pos.offset, ident.name);
+ ast_context.variables[ident.name] = true;
+ ast_context.in_package[ident.name] = symbol.pkg;
+ }
+ }
+ case Dynamic_Array_Type:
+ if stmt.val0 != nil {
+
+ if ident, ok := stmt.val0.derived.(Ident); ok {
+ store_local(ast_context, v.elem, ident.pos.offset, ident.name);
+ ast_context.variables[ident.name] = true;
+ ast_context.in_package[ident.name] = symbol.pkg;
+ }
+ }
+
+ if stmt.val1 != nil {
+
+ if ident, ok := stmt.val1.derived.(Ident); ok {
+ store_local(ast_context, make_int_ast(), ident.pos.offset, ident.name);
+ ast_context.variables[ident.name] = true;
+ ast_context.in_package[ident.name] = symbol.pkg;
+ }
+ }
+ case Array_Type:
+ if stmt.val0 != nil {
+
+ if ident, ok := stmt.val0.derived.(Ident); ok {
+ store_local(ast_context, v.elem, ident.pos.offset, ident.name);
+ ast_context.variables[ident.name] = true;
+ ast_context.in_package[ident.name] = symbol.pkg;
+ }
+ }
+
+ if stmt.val1 != nil {
+
+ if ident, ok := stmt.val1.derived.(Ident); ok {
+ store_local(ast_context, make_int_ast(), ident.pos.offset, ident.name);
+ ast_context.variables[ident.name] = true;
+ ast_context.in_package[ident.name] = symbol.pkg;
+ }
+ }
+ }
+ }
+ }
- get_locals_stmt(file, stmt.body, ast_context, document_position);
+ get_locals_stmt(file, stmt.body, ast_context, document_position);
}
-get_locals_for_stmt :: proc(file: ast.File, stmt: ast.For_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
+get_locals_for_stmt :: proc (file: ast.File, stmt: ast.For_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
- if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
- return;
- }
+ if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
+ return;
+ }
- get_locals_stmt(file, stmt.init, ast_context, document_position, true);
- get_locals_stmt(file, stmt.body, ast_context, document_position);
+ get_locals_stmt(file, stmt.init, ast_context, document_position, true);
+ get_locals_stmt(file, stmt.body, ast_context, document_position);
}
-get_locals_switch_stmt :: proc(file: ast.File, stmt: ast.Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
+get_locals_switch_stmt :: proc (file: ast.File, stmt: ast.Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
- if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
- return;
- }
+ if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
+ return;
+ }
- get_locals_stmt(file, stmt.body, ast_context, document_position);
+ get_locals_stmt(file, stmt.body, ast_context, document_position);
}
-get_locals_type_switch_stmt :: proc(file: ast.File, stmt: ast.Type_Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
- using ast;
+get_locals_type_switch_stmt :: proc (file: ast.File, stmt: ast.Type_Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
- if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
- return;
- }
+ using ast;
- if stmt.body == nil {
- return;
- }
-
- if block, ok := stmt.body.derived.(Block_Stmt); ok {
-
- for block_stmt in block.stmts {
-
- if cause, ok := block_stmt.derived.(Case_Clause); ok && cause.pos.offset <= document_position.position && document_position.position <= cause.end.offset {
+ if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
+ return;
+ }
- for b in cause.body {
- get_locals_stmt(file, b, ast_context, document_position);
- }
+ if stmt.body == nil {
+ return;
+ }
- tag := stmt.tag.derived.(Assign_Stmt);
+ if block, ok := stmt.body.derived.(Block_Stmt); ok {
- if len(tag.lhs) == 1 && len(cause.list) == 1 {
- ident := tag.lhs[0].derived.(Ident);
- store_local(ast_context, cause.list[0], ident.pos.offset, ident.name);
- ast_context.variables[ident.name] = true;
- }
+ for block_stmt in block.stmts {
- }
+ if cause, ok := block_stmt.derived.(Case_Clause); ok && cause.pos.offset <= document_position.position && document_position.position <= cause.end.offset {
- }
+ for b in cause.body {
+ get_locals_stmt(file, b, ast_context, document_position);
+ }
- }
+ tag := stmt.tag.derived.(Assign_Stmt);
+ if len(tag.lhs) == 1 && len(cause.list) == 1 {
+ ident := tag.lhs[0].derived.(Ident);
+ store_local(ast_context, cause.list[0], ident.pos.offset, ident.name);
+ ast_context.variables[ident.name] = true;
+ }
+ }
+ }
+ }
}
-get_locals :: proc(file: ast.File, function: ^ast.Node, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
-
- proc_lit, ok := function.derived.(ast.Proc_Lit);
-
- if !ok || proc_lit.body == nil {
- return;
- }
-
-
- if proc_lit.type != nil && proc_lit.type.params != nil {
+get_locals :: proc (file: ast.File, function: ^ast.Node, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
- for arg in proc_lit.type.params.list {
+ proc_lit, ok := function.derived.(ast.Proc_Lit);
- for name in arg.names {
- if arg.type != nil {
- str := common.get_ast_node_string(name, file.src);
- store_local(ast_context, arg.type, name.pos.offset, str);
- ast_context.variables[str] = true;
- ast_context.parameters[str] = true;
-
- log.info(arg.flags);
+ if !ok || proc_lit.body == nil {
+ return;
+ }
- if .Using in arg.flags {
- using_stmt: ast.Using_Stmt;
- using_stmt.list = make([] ^ ast.Expr, 1, context.temp_allocator);
- using_stmt.list[0] = arg.type;
- get_locals_using_stmt(file, using_stmt, ast_context);
- }
- }
- }
+ if proc_lit.type != nil && proc_lit.type.params != nil {
- }
+ for arg in proc_lit.type.params.list {
+ for name in arg.names {
+ if arg.type != nil {
+ str := common.get_ast_node_string(name, file.src);
+ store_local(ast_context, arg.type, name.pos.offset, str);
+ ast_context.variables[str] = true;
+ ast_context.parameters[str] = true;
- }
+ log.info(arg.flags);
- block: ast.Block_Stmt;
- block, ok = proc_lit.body.derived.(ast.Block_Stmt);
+ if .Using in arg.flags {
+ using_stmt: ast.Using_Stmt;
+ using_stmt.list = make([]^ast.Expr, 1, context.temp_allocator);
+ using_stmt.list[0] = arg.type;
+ get_locals_using_stmt(file, using_stmt, ast_context);
+ }
+ }
+ }
+ }
+ }
- if !ok {
- log.error("Proc_List body not block");
- return;
- }
+ block: ast.Block_Stmt;
+ block, ok = proc_lit.body.derived.(ast.Block_Stmt);
- for stmt in block.stmts {
- get_locals_stmt(file, stmt, ast_context, document_position);
- }
+ if !ok {
+ log.error("Proc_List body not block");
+ return;
+ }
+ for stmt in block.stmts {
+ get_locals_stmt(file, stmt, ast_context, document_position);
+ }
}
-clear_locals :: proc(ast_context: ^AstContext) {
- clear(&ast_context.locals);
- clear(&ast_context.parameters);
- clear(&ast_context.variables);
- clear(&ast_context.usings);
+clear_locals :: proc (ast_context: ^AstContext) {
+ clear(&ast_context.locals);
+ clear(&ast_context.parameters);
+ clear(&ast_context.variables);
+ clear(&ast_context.usings);
}
-concatenate_symbols_information :: proc(ast_context: ^AstContext, symbol: index.Symbol) -> string {
-
- pkg := path.base(symbol.pkg, false, context.temp_allocator);
-
- if symbol.type == .Function {
-
- if symbol.returns != "" {
- return fmt.tprintf("%v.%v: proc %v -> %v", pkg, symbol.name, symbol.signature, symbol.returns);
- }
-
- else {
- return fmt.tprintf("%v.%v: proc%v", pkg, symbol.name, symbol.signature);
- }
+concatenate_symbols_information :: proc (ast_context: ^AstContext, symbol: index.Symbol) -> string {
- }
+ pkg := path.base(symbol.pkg, false, context.temp_allocator);
- else if symbol.type == .Package {
- return symbol.name;
- }
+ if symbol.type == .Function {
- else {
+ if symbol.returns != "" {
+ return fmt.tprintf("%v.%v: proc %v -> %v", pkg, symbol.name, symbol.signature, symbol.returns);
+ } else {
+ return fmt.tprintf("%v.%v: proc%v", pkg, symbol.name, symbol.signature);
+ }
+ } else if symbol.type == .Package {
+ return symbol.name;
+ } else {
- if symbol.signature != "" {
- return fmt.tprintf("%v.%v: %v", pkg, symbol.name, symbol.signature);
- }
-
- else {
- return fmt.tprintf("%v.%v", pkg, symbol.name);
- }
-
- }
+ if symbol.signature != "" {
+ return fmt.tprintf("%v.%v: %v", pkg, symbol.name, symbol.signature);
+ } else {
+ return fmt.tprintf("%v.%v", pkg, symbol.name);
+ }
+ }
- return ""; //weird bug requires this
+ return ""; //weird bug requires this
}
-get_definition_location :: proc(document: ^Document, position: common.Position) -> (common.Location, bool) {
-
- location: common.Location;
-
- ast_context := make_ast_context(document.ast, document.imports, document.package_name);
-
- uri: string;
-
- position_context, ok := get_document_position_context(document, position, .Definition);
-
- if !ok {
- log.warn("Failed to get position context");
- return location, false;
- }
-
- get_globals(document.ast, &ast_context);
-
- if position_context.function != nil {
- get_locals(document.ast, position_context.function, &ast_context, &position_context);
- }
-
- if position_context.selector != nil {
-
- //if the base selector is the client wants to go to.
- if base, ok := position_context.selector.derived.(ast.Ident); ok && position_context.identifier != nil {
-
- ident := position_context.identifier.derived.(ast.Ident);
-
- if ident.name == base.name {
-
- if resolved, ok := resolve_location_identifier(&ast_context, ident); ok {
- location.range = resolved.range;
-
- if resolved.uri == "" {
- location.uri = document.uri.uri;
- }
-
- else {
- location.uri = resolved.uri;
- }
-
- return location, true;
- }
-
- else {
- return location, false;
- }
+get_definition_location :: proc (document: ^Document, position: common.Position) -> (common.Location, bool) {
- }
+ location: common.Location;
- }
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name);
- //otherwise it's the field the client wants to go to.
+ uri: string;
- selector: index.Symbol;
+ position_context, ok := get_document_position_context(document, position, .Definition);
- ast_context.use_locals = true;
- ast_context.use_globals = true;
- ast_context.current_package = ast_context.document_package;
-
- selector, ok = resolve_type_expression(&ast_context, position_context.selector);
-
- if !ok {
- return location, false;
- }
-
- field: string;
-
- if position_context.field != nil {
-
- switch v in position_context.field.derived {
- case ast.Ident:
- field = v.name;
- }
-
- }
-
- uri = selector.uri;
-
- #partial switch v in selector.value {
- case index.SymbolEnumValue:
- location.range = selector.range;
- case index.SymbolStructValue:
- for name, i in v.names {
- if strings.compare(name, field) == 0 {
- location.range = common.get_token_range(v.types[i]^, document.ast.src);
- }
- }
- case index.SymbolPackageValue:
- if symbol, ok := index.lookup(field, selector.pkg); ok {
- location.range = symbol.range;
- uri = symbol.uri;
- }
- else {
- return location, false;
- }
- }
-
- if !ok {
- return location, false;
- }
-
- }
-
- else if position_context.identifier != nil {
-
- if resolved, ok := resolve_location_identifier(&ast_context, position_context.identifier.derived.(ast.Ident)); ok {
- location.range = resolved.range;
- uri = resolved.uri;
- }
-
- else {
- return location, false;
- }
+ if !ok {
+ log.warn("Failed to get position context");
+ return location, false;
+ }
- }
+ get_globals(document.ast, &ast_context);
- else {
- return location, false;
- }
+ if position_context.function != nil {
+ get_locals(document.ast, position_context.function, &ast_context, &position_context);
+ }
- //if the symbol is generated by the ast we don't set the uri.
- if uri == "" {
- location.uri = document.uri.uri;
- }
+ if position_context.selector != nil {
+
+ //if the base selector is the client wants to go to.
+ if base, ok := position_context.selector.derived.(ast.Ident); ok && position_context.identifier != nil {
+
+ ident := position_context.identifier.derived.(ast.Ident);
+
+ if ident.name == base.name {
+
+ if resolved, ok := resolve_location_identifier(&ast_context, ident); ok {
+ location.range = resolved.range;
+
+ if resolved.uri == "" {
+ location.uri = document.uri.uri;
+ } else {
+ location.uri = resolved.uri;
+ }
+
+ return location, true;
+ } else {
+ return location, false;
+ }
+ }
+ }
+
+ //otherwise it's the field the client wants to go to.
+
+ selector: index.Symbol;
+
+ ast_context.use_locals = true;
+ ast_context.use_globals = true;
+ ast_context.current_package = ast_context.document_package;
+
+ selector, ok = resolve_type_expression(&ast_context, position_context.selector);
+
+ if !ok {
+ return location, false;
+ }
+
+ field: string;
+
+ if position_context.field != nil {
+
+ switch v in position_context.field.derived {
+ case ast.Ident:
+ field = v.name;
+ }
+ }
+
+ uri = selector.uri;
+
+ #partial switch v in selector.value {
+ case index.SymbolEnumValue:
+ location.range = selector.range;
+ case index.SymbolStructValue:
+ for name, i in v.names {
+ if strings.compare(name, field) == 0 {
+ location.range = common.get_token_range(v.types[i]^, document.ast.src);
+ }
+ }
+ case index.SymbolPackageValue:
+ if symbol, ok := index.lookup(field, selector.pkg); ok {
+ location.range = symbol.range;
+ uri = symbol.uri;
+ } else {
+ return location, false;
+ }
+ }
+
+ if !ok {
+ return location, false;
+ }
+ } else if position_context.identifier != nil {
+
+ if resolved, ok := resolve_location_identifier(&ast_context, position_context.identifier.derived.(ast.Ident)); ok {
+ location.range = resolved.range;
+ uri = resolved.uri;
+ } else {
+ return location, false;
+ }
+ } else {
+ return location, false;
+ }
- else {
- location.uri = uri;
- }
+ //if the symbol is generated by the ast we don't set the uri.
+ if uri == "" {
+ location.uri = document.uri.uri;
+ } else {
+ location.uri = uri;
+ }
- return location, true;
+ return location, true;
}
-write_hover_content :: proc(ast_context: ^AstContext, symbol: index.Symbol) -> MarkupContent {
- content: MarkupContent;
+write_hover_content :: proc (ast_context: ^AstContext, symbol: index.Symbol) -> MarkupContent {
+ content: MarkupContent;
- cat := concatenate_symbols_information(ast_context, symbol);
+ cat := concatenate_symbols_information(ast_context, symbol);
- if cat != "" {
- content.kind = "markdown";
- content.value = fmt.tprintf("```odin\n %v\n```\n%v", cat, symbol.doc);
- }
-
- else {
- content.kind = "plaintext";
- }
+ if cat != "" {
+ content.kind = "markdown";
+ content.value = fmt.tprintf("```odin\n %v\n```\n%v", cat, symbol.doc);
+ } else {
+ content.kind = "plaintext";
+ }
- return content;
+ return content;
}
-get_signature :: proc(ast_context: ^AstContext, ident: ast.Ident, symbol: index.Symbol, was_variable := false) -> string {
+get_signature :: proc (ast_context: ^AstContext, ident: ast.Ident, symbol: index.Symbol, was_variable := false) -> string {
- if symbol.type == .Function {
- return symbol.signature;
- }
-
- if is_variable, ok := ast_context.variables[ident.name]; ok && is_variable {
-
- if local := get_local(ast_context, ident.pos.offset, ident.name); local != nil {
-
- if i, ok := local.derived.(ast.Ident); ok {
- return get_signature(ast_context, i, symbol, true);
- }
+ if symbol.type == .Function {
+ return symbol.signature;
+ }
- else {
- return index.node_to_string(local);
- }
- }
+ if is_variable, ok := ast_context.variables[ident.name]; ok && is_variable {
- if global, ok := ast_context.globals[ident.name]; ok {
- if i, ok := global.derived.(ast.Ident); ok {
- return get_signature(ast_context, i, symbol, true);
- }
+ if local := get_local(ast_context, ident.pos.offset, ident.name); local != nil {
- else {
- return index.node_to_string(global);
- }
- }
+ if i, ok := local.derived.(ast.Ident); ok {
+ return get_signature(ast_context, i, symbol, true);
+ } else {
+ return index.node_to_string(local);
+ }
+ }
- }
+ if global, ok := ast_context.globals[ident.name]; ok {
+ if i, ok := global.derived.(ast.Ident); ok {
+ return get_signature(ast_context, i, symbol, true);
+ } else {
+ return index.node_to_string(global);
+ }
+ }
+ }
- if !was_variable {
- #partial switch v in symbol.value {
- case index.SymbolStructValue:
- return "struct";
- case index.SymbolUnionValue:
- return "union";
- case index.SymbolEnumValue:
- return "enum";
- }
- }
+ if !was_variable {
+ #partial switch v in symbol.value {
+ case index.SymbolStructValue:
+ return "struct";
+ case index.SymbolUnionValue:
+ return "union";
+ case index.SymbolEnumValue:
+ return "enum";
+ }
+ }
- return ident.name;
+ return ident.name;
}
-get_signature_information :: proc(document: ^Document, position: common.Position) -> (SignatureHelp, bool) {
+get_signature_information :: proc (document: ^Document, position: common.Position) -> (SignatureHelp, bool) {
- signature_help: SignatureHelp;
+ signature_help: SignatureHelp;
- ast_context := make_ast_context(document.ast, document.imports, document.package_name);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name);
- position_context, ok := get_document_position_context(document, position, .SignatureHelp);
+ position_context, ok := get_document_position_context(document, position, .SignatureHelp);
- if !ok {
- return signature_help, true;
- }
+ if !ok {
+ return signature_help, true;
+ }
- if position_context.call == nil {
- return signature_help, true;
- }
+ if position_context.call == nil {
+ return signature_help, true;
+ }
- get_globals(document.ast, &ast_context);
+ get_globals(document.ast, &ast_context);
- if position_context.function != nil {
- get_locals(document.ast, position_context.function, &ast_context, &position_context);
- }
+ if position_context.function != nil {
+ get_locals(document.ast, position_context.function, &ast_context, &position_context);
+ }
- call: index.Symbol;
- call, ok = resolve_type_expression(&ast_context, position_context.call);
+ call: index.Symbol;
+ call, ok = resolve_type_expression(&ast_context, position_context.call);
- if symbol, ok := call.value.(index.SymbolProcedureValue); !ok {
- return signature_help, true;
- }
+ if symbol, ok := call.value.(index.SymbolProcedureValue); !ok {
+ return signature_help, true;
+ }
- signature_information := make([] SignatureInformation, 1, context.temp_allocator);
+ signature_information := make([]SignatureInformation, 1, context.temp_allocator);
- signature_information[0].label = concatenate_symbols_information(&ast_context, call);
- signature_information[0].documentation = call.doc;
+ signature_information[0].label = concatenate_symbols_information(&ast_context, call);
+ signature_information[0].documentation = call.doc;
- signature_help.signatures = signature_information;
+ signature_help.signatures = signature_information;
- return signature_help, true;
+ return signature_help, true;
}
-get_document_symbols :: proc(document: ^Document) -> [] DocumentSymbol {
+get_document_symbols :: proc (document: ^Document) -> []DocumentSymbol {
- ast_context := make_ast_context(document.ast, document.imports, document.package_name);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name);
- get_globals(document.ast, &ast_context);
+ get_globals(document.ast, &ast_context);
- symbols := make([dynamic] DocumentSymbol, context.temp_allocator);
+ symbols := make([dynamic]DocumentSymbol, context.temp_allocator);
- package_symbol: DocumentSymbol;
+ package_symbol: DocumentSymbol;
- if len(document.ast.decls) == 0 {
- return {};
- }
-
- package_symbol.kind = .Package;
- package_symbol.name = path.base(document.package_name, false, context.temp_allocator);
- package_symbol.range = {
- start = {
- line = document.ast.decls[0].pos.line,
- },
- end = {
- line = document.ast.decls[len(document.ast.decls)-1].end.line,
- },
- };
- package_symbol.selectionRange = package_symbol.range;
-
- children_symbols := make([dynamic] DocumentSymbol, context.temp_allocator);
-
- for k, expr in ast_context.globals {
-
- symbol: DocumentSymbol;
-
- symbol.range = common.get_token_range(expr, ast_context.file.src);
- symbol.selectionRange = symbol.range;
- symbol.name = k;
-
- switch v in expr.derived {
- case ast.Struct_Type:
- symbol.kind = .Struct;
- case ast.Proc_Lit, ast.Proc_Group:
- symbol.kind = .Function;
- case ast.Enum_Type, ast.Union_Type:
- symbol.kind = .Enum;
- case:
- symbol.kind = .Variable;
- }
+ if len(document.ast.decls) == 0 {
+ return {};
+ }
- append(&children_symbols, symbol);
- }
+ package_symbol.kind = .Package;
+ package_symbol.name = path.base(document.package_name, false, context.temp_allocator);
+ package_symbol.range = {
+ start = {
+ line = document.ast.decls[0].pos.line
+ },
+ end = {
+ line = document.ast.decls[len(document.ast.decls) - 1].end.line
+ },
+ };
+ package_symbol.selectionRange = package_symbol.range;
+
+ children_symbols := make([dynamic]DocumentSymbol, context.temp_allocator);
+
+ for k, expr in ast_context.globals {
+
+ symbol: DocumentSymbol;
+
+ symbol.range = common.get_token_range(expr, ast_context.file.src);
+ symbol.selectionRange = symbol.range;
+ symbol.name = k;
+
+ switch v in expr.derived {
+ case ast.Struct_Type:
+ symbol.kind = .Struct;
+ case ast.Proc_Lit,ast.Proc_Group:
+ symbol.kind = .Function;
+ case ast.Enum_Type,ast.Union_Type:
+ symbol.kind = .Enum;
+ case:
+ symbol.kind = .Variable;
+ }
+
+ append(&children_symbols, symbol);
+ }
- package_symbol.children = children_symbols[:];
+ package_symbol.children = children_symbols[:];
- append(&symbols, package_symbol);
+ append(&symbols, package_symbol);
- return symbols[:];
+ return symbols[:];
}
/*
- All these fallback functions are not perfect and should be fixed. A lot of weird use of the odin tokenizer and parser.
+ All these fallback functions are not perfect and should be fixed. A lot of weird use of the odin tokenizer and parser.
*/
/*
- Figure out what exactly is at the given position and whether it is in a function, struct, etc.
+ Figure out what exactly is at the given position and whether it is in a function, struct, etc.
*/
-get_document_position_context :: proc(document: ^Document, position: common.Position, hint: DocumentPositionContextHint) -> (DocumentPositionContext, bool) {
-
- position_context: DocumentPositionContext;
+get_document_position_context :: proc (document: ^Document, position: common.Position, hint: DocumentPositionContextHint) -> (DocumentPositionContext, bool) {
- position_context.hint = hint;
- position_context.file = document.ast;
- position_context.line = position.line;
+ position_context: DocumentPositionContext;
- absolute_position, ok := common.get_absolute_position(position, document.text);
+ position_context.hint = hint;
+ position_context.file = document.ast;
+ position_context.line = position.line;
- if !ok {
- log.error("failed to get absolute position");
- return position_context, false;
- }
+ absolute_position, ok := common.get_absolute_position(position, document.text);
- position_context.position = absolute_position;
+ if !ok {
+ log.error("failed to get absolute position");
+ return position_context, false;
+ }
- for decl in document.ast.decls {
- get_document_position(decl, &position_context);
- }
+ position_context.position = absolute_position;
- if hint == .Completion && position_context.selector == nil && position_context.field == nil {
- fallback_position_context_completion(document, position, &position_context);
- }
+ for decl in document.ast.decls {
+ get_document_position(decl, &position_context);
+ }
- else if hint == .SignatureHelp && position_context.call == nil {
- fallback_position_context_signature(document, position, &position_context);
- }
+ if hint == .Completion && position_context.selector == nil && position_context.field == nil {
+ fallback_position_context_completion(document, position, &position_context);
+ } else if hint == .SignatureHelp && position_context.call == nil {
+ fallback_position_context_signature(document, position, &position_context);
+ }
- return position_context, true;
+ return position_context, true;
}
+fallback_position_context_completion :: proc (document: ^Document, position: common.Position, position_context: ^DocumentPositionContext) {
+
+ paren_count: int;
+ bracket_count: int;
+ end: int;
+ start: int;
+ empty_dot: bool;
+ empty_arrow: bool;
+ last_dot: bool;
+ last_arrow: bool;
+
+ i := position_context.position - 1;
+
+ end = i;
+
+ for i > 0 {
+
+ c := position_context.file.src[i];
+
+ if c == '(' && paren_count == 0 {
+ start = i + 1;
+ break;
+ } else if c == '[' && bracket_count == 0 {
+ start = i + 1;
+ break;
+ } else if c == ']' && !last_dot {
+ start = i + 1;
+ break;
+ } else if c == ')' && !last_dot {
+ start = i + 1;
+ break;
+ } else if c == ')' {
+ paren_count -= 1;
+ } else if c == '(' {
+ paren_count += 1;
+ } else if c == '[' {
+ bracket_count += 1;
+ } else if c == ']' {
+ bracket_count -= 1;
+ } else if c == '.' {
+ last_dot = true;
+ i -= 1;
+ continue;
+ } else if position_context.file.src[max(0, i - 1)] == '-' && c == '>' {
+ last_arrow = true;
+ i -= 2;
+ continue;
+ }
+
+ //yeah..
+ if c == ' ' || c == '{' || c == ',' ||
+ c == '}' || c == '^' || c == ':' ||
+ c == '\n' || c == '\r' || c == '=' ||
+ c == '<' || c == '>' || c == '-' ||
+ c == '+' || c == '&' {
+ start = i + 1;
+ break;
+ }
+
+ last_dot = false;
+ last_arrow = false;
+
+ i -= 1;
+ }
-fallback_position_context_completion :: proc(document: ^Document, position: common.Position, position_context: ^DocumentPositionContext) {
-
- paren_count: int;
- bracket_count: int;
- end: int;
- start: int;
- empty_dot: bool;
- empty_arrow: bool;
- last_dot: bool;
- last_arrow: bool;
-
- i := position_context.position-1;
-
- end = i;
-
- for i > 0 {
-
- c := position_context.file.src[i];
-
- if c == '(' && paren_count == 0 {
- start = i+1;
- break;
- }
-
- else if c == '[' && bracket_count == 0 {
- start = i+1;
- break;
- }
-
- else if c == ']' && !last_dot {
- start = i+1;
- break;
- }
-
- else if c == ')' && !last_dot {
- start = i+1;
- break;
- }
-
- else if c == ')' {
- paren_count -= 1;
- }
-
- else if c == '(' {
- paren_count += 1;
- }
-
- else if c == '[' {
- bracket_count += 1;
- }
-
- else if c == ']' {
- bracket_count -= 1;
- }
-
- else if c == '.' {
- last_dot = true;
- i -= 1;
- continue;
- }
-
- else if position_context.file.src[max(0, i-1)] == '-' && c == '>' {
- last_arrow = true;
- i -= 2;
- continue;
- }
-
- //yeah..
- if c == ' ' || c == '{' || c == ',' ||
- c == '}' || c == '^' || c == ':' ||
- c == '\n' || c == '\r' || c == '=' ||
- c == '<' || c == '>' || c == '-' ||
- c == '+' || c == '&' {
- start = i+1;
- break;
- }
-
- last_dot = false;
- last_arrow = false;
-
- i -= 1;
- }
-
- if i >= 0 && position_context.file.src[end] == '.' {
- empty_dot = true;
- end -= 1;
- }
-
- else if i >= 0 && position_context.file.src[max(0, end-1)] == '-' && position_context.file.src[end] == '>' {
- empty_arrow = true;
- end -= 2;
- position_context.arrow = true;
- }
-
- begin_offset := max(0, start);
- end_offset := max(start, end+1);
-
- str := position_context.file.src[0:end_offset];
-
- if empty_dot && end_offset - begin_offset == 0 {
- position_context.implicit = true;
- return;
- }
-
- p := parser.Parser {
- err = parser_warning_handler, //empty
- warn = parser_warning_handler, //empty
- file = &position_context.file,
- };
-
- tokenizer.init(&p.tok, str, position_context.file.fullpath, parser_warning_handler);
+ if i >= 0 && position_context.file.src[end] == '.' {
+ empty_dot = true;
+ end -= 1;
+ } else if i >= 0 && position_context.file.src[max(0, end - 1)] == '-' && position_context.file.src[end] == '>' {
+ empty_arrow = true;
+ end -= 2;
+ position_context.arrow = true;
+ }
- p.tok.ch = ' ';
- p.tok.line_count = position.line;
- p.tok.offset = begin_offset;
- p.tok.read_offset = begin_offset;
+ begin_offset := max(0, start);
+ end_offset := max(start, end + 1);
- tokenizer.advance_rune(&p.tok);
+ str := position_context.file.src[0:end_offset];
- if p.tok.ch == utf8.RUNE_BOM {
- tokenizer.advance_rune(&p.tok);
+ if empty_dot && end_offset - begin_offset == 0 {
+ position_context.implicit = true;
+ return;
}
- parser.advance_token(&p);
-
- context.allocator = context.temp_allocator;
+ p := parser.Parser {
+ err = parser_warning_handler, //empty
+ warn = parser_warning_handler, //empty
+ file = &position_context.file,
+ };
- e := parser.parse_expr(&p, true);
+ tokenizer.init(&p.tok, str, position_context.file.fullpath, parser_warning_handler);
- if empty_dot || empty_arrow {
- position_context.selector = e;
- }
+ p.tok.ch = ' ';
+ p.tok.line_count = position.line;
+ p.tok.offset = begin_offset;
+ p.tok.read_offset = begin_offset;
- else if s, ok := e.derived.(ast.Selector_Expr); ok {
- position_context.selector = s.expr;
- position_context.field = s.field;
- }
+ tokenizer.advance_rune(&p.tok);
- else if s, ok := e.derived.(ast.Implicit_Selector_Expr); ok {
- position_context.implicit = true;
- }
+ if p.tok.ch == utf8.RUNE_BOM {
+ tokenizer.advance_rune(&p.tok);
+ }
- else if bad_expr, ok := e.derived.(ast.Bad_Expr); ok {
- //this is most likely because of use of 'in', 'context', etc.
- //try to go back one dot.
+ parser.advance_token(&p);
- src_with_dot := string(position_context.file.src[0:end_offset+1]);
- last_dot := strings.last_index(src_with_dot, ".");
+ context.allocator = context.temp_allocator;
- if last_dot == -1 {
- return;
- }
+ e := parser.parse_expr(&p, true);
- tokenizer.init(&p.tok, position_context.file.src[0:last_dot], position_context.file.fullpath, parser_warning_handler);
+ if empty_dot || empty_arrow {
+ position_context.selector = e;
+ } else if s, ok := e.derived.(ast.Selector_Expr); ok {
+ position_context.selector = s.expr;
+ position_context.field = s.field;
+ } else if s, ok := e.derived.(ast.Implicit_Selector_Expr); ok {
+ position_context.implicit = true;
+ } else if bad_expr, ok := e.derived.(ast.Bad_Expr); ok {
+ //this is most likely because of use of 'in', 'context', etc.
+ //try to go back one dot.
- p.tok.ch = ' ';
- p.tok.line_count = position.line;
- p.tok.offset = begin_offset;
- p.tok.read_offset = begin_offset;
+ src_with_dot := string(position_context.file.src[0:end_offset + 1]);
+ last_dot := strings.last_index(src_with_dot, ".");
- tokenizer.advance_rune(&p.tok);
+ if last_dot == -1 {
+ return;
+ }
- if p.tok.ch == utf8.RUNE_BOM {
- tokenizer.advance_rune(&p.tok);
- }
+ tokenizer.init(&p.tok, position_context.file.src[0:last_dot], position_context.file.fullpath, parser_warning_handler);
- parser.advance_token(&p);
+ p.tok.ch = ' ';
+ p.tok.line_count = position.line;
+ p.tok.offset = begin_offset;
+ p.tok.read_offset = begin_offset;
- e := parser.parse_expr(&p, true);
+ tokenizer.advance_rune(&p.tok);
- if e == nil {
- return;
- }
+ if p.tok.ch == utf8.RUNE_BOM {
+ tokenizer.advance_rune(&p.tok);
+ }
- position_context.selector = e;
+ parser.advance_token(&p);
- ident := index.new_type(ast.Ident, e.pos, e.end, context.temp_allocator);
- ident.name = string(position_context.file.src[last_dot+1:end_offset]);
+ e := parser.parse_expr(&p, true);
- if ident.name != "" {
- position_context.field = ident;
- }
+ if e == nil {
+ return;
+ }
- }
+ position_context.selector = e;
- else {
- position_context.identifier = e;
- }
+ ident := index.new_type(ast.Ident, e.pos, e.end, context.temp_allocator);
+ ident.name = string(position_context.file.src[last_dot + 1:end_offset]);
+ if ident.name != "" {
+ position_context.field = ident;
+ }
+ } else {
+ position_context.identifier = e;
+ }
}
-fallback_position_context_signature :: proc(document: ^Document, position: common.Position, position_context: ^DocumentPositionContext) {
-
- paren_count: int;
- end: int;
- start: int;
- first_paren: bool;
- i := position_context.position-1;
-
- for i > 0 {
-
- c := position_context.file.src[i];
-
- if c == '(' && (paren_count == 0 || paren_count == -1) {
- end = i;
- first_paren = true;
- }
-
- else if c == ')' {
- paren_count -= 1;
- }
-
- else if c == '(' {
- paren_count += 1;
- }
-
- else if c == ' ' && end != 0 {
- start = i+1;
- break;
- }
- //not good enough if you want multi function signature help
- else if c == '\n' || c == '\r' {
- start = i+1;
- break;
- }
-
- i -= 1;
- }
+fallback_position_context_signature :: proc (document: ^Document, position: common.Position, position_context: ^DocumentPositionContext) {
+
+ paren_count: int;
+ end: int;
+ start: int;
+ first_paren: bool;
+ i := position_context.position - 1;
+
+ for i > 0 {
+
+ c := position_context.file.src[i];
+
+ if c == '(' && (paren_count == 0 || paren_count == -1) {
+ end = i;
+ first_paren = true;
+ } else if c == ')' {
+ paren_count -= 1;
+ } else if c == '(' {
+ paren_count += 1;
+ } else if c == ' ' && end != 0 {
+ start = i + 1;
+ break;
+ } else
+ //not good enough if you want multi function signature help
+ if c == '\n' || c == '\r' {
+ start = i + 1;
+ break;
+ }
+
+ i -= 1;
+ }
- if !first_paren {
- return;
- }
+ if !first_paren {
+ return;
+ }
- begin_offset := max(0, start);
- end_offset := max(start, end+1);
+ begin_offset := max(0, start);
+ end_offset := max(start, end + 1);
- str := position_context.file.src[0:end_offset];
+ str := position_context.file.src[0:end_offset];
- p := parser.Parser {
- err = parser_warning_handler, //empty
+ p := parser.Parser {
+ err = parser_warning_handler, //empty
warn = parser_warning_handler, //empty
- file = &position_context.file,
+ file = &position_context.file,
};
- tokenizer.init(&p.tok, str, position_context.file.fullpath, parser_warning_handler);
+ tokenizer.init(&p.tok, str, position_context.file.fullpath, parser_warning_handler);
- p.tok.ch = ' ';
- p.tok.line_count = position.line;
- p.tok.offset = begin_offset;
- p.tok.read_offset = begin_offset;
+ p.tok.ch = ' ';
+ p.tok.line_count = position.line;
+ p.tok.offset = begin_offset;
+ p.tok.read_offset = begin_offset;
- tokenizer.advance_rune(&p.tok);
+ tokenizer.advance_rune(&p.tok);
if p.tok.ch == utf8.RUNE_BOM {
tokenizer.advance_rune(&p.tok);
}
- parser.advance_token(&p);
+ parser.advance_token(&p);
- context.allocator = context.temp_allocator;
+ context.allocator = context.temp_allocator;
- e := parser.parse_expr(&p, true);
+ e := parser.parse_expr(&p, true);
- position_context.call = e;
+ position_context.call = e;
}
+get_document_position :: proc {
+get_document_position_array,
+get_document_position_dynamic_array,
+get_document_position_node};
-get_document_position :: proc{
- get_document_position_array,
- get_document_position_dynamic_array,
- get_document_position_node,
-};
+get_document_position_array :: proc (array: $A/[]^$T, position_context: ^DocumentPositionContext) {
-get_document_position_array :: proc(array: $A/[]^$T, position_context: ^DocumentPositionContext) {
+ for elem, i in array {
+ get_document_position(elem, position_context);
+ }
+}
- for elem, i in array {
- get_document_position(elem, position_context);
- }
+get_document_position_dynamic_array :: proc (array: $A/[dynamic]^$T, position_context: ^DocumentPositionContext) {
+ for elem, i in array {
+ get_document_position(elem, position_context);
+ }
}
-get_document_position_dynamic_array :: proc(array: $A/[dynamic]^$T, position_context: ^DocumentPositionContext) {
+position_in_node :: proc (node: ^ast.Node, position: common.AbsolutePosition) -> bool {
+ return node != nil && node.pos.offset <= position && position <= node.end.offset;
+}
- for elem, i in array {
- get_document_position(elem, position_context);
- }
+get_document_position_node :: proc (node: ^ast.Node, position_context: ^DocumentPositionContext) {
-}
+ using ast;
-position_in_node :: proc(node: ^ast.Node, position: common.AbsolutePosition) -> bool {
- return node != nil && node.pos.offset <= position && position <= node.end.offset;
-}
+ if node == nil {
+ return;
+ }
-get_document_position_node :: proc(node: ^ast.Node, position_context: ^DocumentPositionContext) {
-
- using ast;
-
- if node == nil {
- return;
- }
-
- if !position_in_node(node, position_context.position) {
- return;
- }
-
- switch n in node.derived {
- case Bad_Expr:
- case Ident:
- position_context.identifier = node;
- case Implicit:
- case Undef:
- case Basic_Lit:
- case Ellipsis:
- get_document_position(n.expr, position_context);
- case Proc_Lit:
- get_document_position(n.type, position_context);
-
- if position_in_node(n.body, position_context.position) {
- position_context.function = cast(^Proc_Lit)node;
- get_document_position(n.body, position_context);
- }
- case Comp_Lit:
- //only set this for the parent comp literal, since we will need to walk through it to infer types.
- if position_context.parent_comp_lit == nil {
- position_context.parent_comp_lit = cast(^Comp_Lit)node;
- }
-
- position_context.comp_lit = cast(^Comp_Lit)node;
-
- get_document_position(n.type, position_context);
- get_document_position(n.elems, position_context);
- case Tag_Expr:
- get_document_position(n.expr, position_context);
- case Unary_Expr:
- get_document_position(n.expr, position_context);
- case Binary_Expr:
- position_context.binary = cast(^Binary_Expr)node;
- get_document_position(n.left, position_context);
- get_document_position(n.right, position_context);
- case Paren_Expr:
- get_document_position(n.expr, position_context);
- case Call_Expr:
- if position_context.hint == .SignatureHelp {
- position_context.call = cast(^Expr)node;
- }
- get_document_position(n.expr, position_context);
- get_document_position(n.args, position_context);
- case Selector_Expr:
- if position_context.hint == .Completion {
- if n.field != nil && n.field.pos.line == position_context.line {
- position_context.selector = n.expr;
- position_context.field = n.field;
- }
- }
-
- else if (position_context.hint == .Definition || position_context.hint == .Hover) && n.field != nil {
- position_context.selector = n.expr;
- position_context.field = n.field;
- get_document_position(n.expr, position_context);
- get_document_position(n.field, position_context);
- }
-
- else {
- get_document_position(n.expr, position_context);
- get_document_position(n.field, position_context);
- }
- case Index_Expr:
- get_document_position(n.expr, position_context);
- get_document_position(n.index, position_context);
- case Deref_Expr:
- get_document_position(n.expr, position_context);
- case Slice_Expr:
- get_document_position(n.expr, position_context);
- get_document_position(n.low, position_context);
- get_document_position(n.high, position_context);
- case Field_Value:
- get_document_position(n.field, position_context);
- get_document_position(n.value, position_context);
- case Ternary_Expr:
- get_document_position(n.cond, position_context);
- get_document_position(n.x, position_context);
- get_document_position(n.y, position_context);
- case Ternary_If_Expr:
- get_document_position(n.x, position_context);
- get_document_position(n.cond, position_context);
- get_document_position(n.y, position_context);
- case Ternary_When_Expr:
- get_document_position(n.x, position_context);
- get_document_position(n.cond, position_context);
- get_document_position(n.y, position_context);
- case Type_Assertion:
- get_document_position(n.expr, position_context);
- get_document_position(n.type, position_context);
- case Type_Cast:
- get_document_position(n.type, position_context);
- get_document_position(n.expr, position_context);
- case Auto_Cast:
- get_document_position(n.expr, position_context);
- case Bad_Stmt:
- case Empty_Stmt:
- case Expr_Stmt:
- get_document_position(n.expr, position_context);
- case Tag_Stmt:
- r := cast(^Tag_Stmt)node;
- get_document_position(r.stmt, position_context);
- case Assign_Stmt:
- position_context.assign = cast(^Assign_Stmt)node;
- get_document_position(n.lhs, position_context);
- get_document_position(n.rhs, position_context);
- case Block_Stmt:
- get_document_position(n.label, position_context);
- get_document_position(n.stmts, position_context);
- case If_Stmt:
- get_document_position(n.label, position_context);
- get_document_position(n.init, position_context);
- get_document_position(n.cond, position_context);
- get_document_position(n.body, position_context);
- get_document_position(n.else_stmt, position_context);
- case When_Stmt:
- get_document_position(n.cond, position_context);
- get_document_position(n.body, position_context);
- get_document_position(n.else_stmt, position_context);
- case Return_Stmt:
- position_context.returns = cast(^Return_Stmt)node;
- get_document_position(n.results, position_context);
- case Defer_Stmt:
- get_document_position(n.stmt, position_context);
- case For_Stmt:
- get_document_position(n.label, position_context);
- get_document_position(n.init, position_context);
- get_document_position(n.cond, position_context);
- get_document_position(n.post, position_context);
- get_document_position(n.body, position_context);
- case Range_Stmt:
- get_document_position(n.label, position_context);
- get_document_position(n.val0, position_context);
- get_document_position(n.val1, position_context);
- get_document_position(n.expr, position_context);
- get_document_position(n.body, position_context);
- case Case_Clause:
- position_context.case_clause = cast(^Case_Clause)node;
- get_document_position(n.list, position_context);
- get_document_position(n.body, position_context);
- case Switch_Stmt:
- position_context.switch_stmt = cast(^Switch_Stmt)node;
- get_document_position(n.label, position_context);
- get_document_position(n.init, position_context);
- get_document_position(n.cond, position_context);
- get_document_position(n.body, position_context);
- case Type_Switch_Stmt:
- position_context.switch_type_stmt = cast(^Type_Switch_Stmt)node;
- get_document_position(n.label, position_context);
- get_document_position(n.tag, position_context);
- get_document_position(n.expr, position_context);
- get_document_position(n.body, position_context);
- case Branch_Stmt:
- get_document_position(n.label, position_context);
- case Using_Stmt:
- get_document_position(n.list, position_context);
- case Bad_Decl:
- case Value_Decl:
- position_context.value_decl = cast(^Value_Decl)node;
- get_document_position(n.attributes, position_context);
- get_document_position(n.names, position_context);
- get_document_position(n.type, position_context);
- get_document_position(n.values, position_context);
- case Package_Decl:
- case Import_Decl:
- case Foreign_Block_Decl:
- get_document_position(n.attributes, position_context);
- get_document_position(n.foreign_library, position_context);
- get_document_position(n.body, position_context);
- case Foreign_Import_Decl:
- get_document_position(n.name, position_context);
- case Proc_Group:
- get_document_position(n.args, position_context);
- case Attribute:
- get_document_position(n.elems, position_context);
- case Field:
- get_document_position(n.names, position_context);
- get_document_position(n.type, position_context);
- get_document_position(n.default_value, position_context);
- case Field_List:
- get_document_position(n.list, position_context);
- case Typeid_Type:
- get_document_position(n.specialization, position_context);
- case Helper_Type:
- get_document_position(n.type, position_context);
- case Distinct_Type:
- get_document_position(n.type, position_context);
- case Poly_Type:
- get_document_position(n.type, position_context);
- get_document_position(n.specialization, position_context);
- case Proc_Type:
- get_document_position(n.params, position_context);
- get_document_position(n.results, position_context);
- case Pointer_Type:
- get_document_position(n.elem, position_context);
- case Array_Type:
- get_document_position(n.len, position_context);
- get_document_position(n.elem, position_context);
- case Dynamic_Array_Type:
- get_document_position(n.elem, position_context);
- case Struct_Type:
- get_document_position(n.poly_params, position_context);
- get_document_position(n.align, position_context);
- get_document_position(n.fields, position_context);
- case Union_Type:
- get_document_position(n.poly_params, position_context);
- get_document_position(n.align, position_context);
- get_document_position(n.variants, position_context);
- case Enum_Type:
- get_document_position(n.base_type, position_context);
- get_document_position(n.fields, position_context);
- case Bit_Set_Type:
- get_document_position(n.elem, position_context);
- get_document_position(n.underlying, position_context);
- case Map_Type:
- get_document_position(n.key, position_context);
- get_document_position(n.value, position_context);
- case Implicit_Selector_Expr:
- position_context.implicit = true;
- get_document_position(n.field, position_context);
- case:
- log.errorf("Unhandled node kind: %T", n);
- }
+ if !position_in_node(node, position_context.position) {
+ return;
+ }
+ switch n in node.derived {
+ case Bad_Expr:
+ case Ident:
+ position_context.identifier = node;
+ case Implicit:
+ case Undef:
+ case Basic_Lit:
+ case Ellipsis:
+ get_document_position(n.expr, position_context);
+ case Proc_Lit:
+ get_document_position(n.type, position_context);
+
+ if position_in_node(n.body, position_context.position) {
+ position_context.function = cast(^Proc_Lit)node;
+ get_document_position(n.body, position_context);
+ }
+ case Comp_Lit:
+ //only set this for the parent comp literal, since we will need to walk through it to infer types.
+ if position_context.parent_comp_lit == nil {
+ position_context.parent_comp_lit = cast(^Comp_Lit)node;
+ }
+
+ position_context.comp_lit = cast(^Comp_Lit)node;
+
+ get_document_position(n.type, position_context);
+ get_document_position(n.elems, position_context);
+ case Tag_Expr:
+ get_document_position(n.expr, position_context);
+ case Unary_Expr:
+ get_document_position(n.expr, position_context);
+ case Binary_Expr:
+ position_context.binary = cast(^Binary_Expr)node;
+ get_document_position(n.left, position_context);
+ get_document_position(n.right, position_context);
+ case Paren_Expr:
+ get_document_position(n.expr, position_context);
+ case Call_Expr:
+ if position_context.hint == .SignatureHelp {
+ position_context.call = cast(^Expr)node;
+ }
+ get_document_position(n.expr, position_context);
+ get_document_position(n.args, position_context);
+ case Selector_Expr:
+ if position_context.hint == .Completion {
+ if n.field != nil && n.field.pos.line == position_context.line {
+ position_context.selector = n.expr;
+ position_context.field = n.field;
+ }
+ } else if (position_context.hint == .Definition || position_context.hint == .Hover) && n.field != nil {
+ position_context.selector = n.expr;
+ position_context.field = n.field;
+ get_document_position(n.expr, position_context);
+ get_document_position(n.field, position_context);
+ } else {
+ get_document_position(n.expr, position_context);
+ get_document_position(n.field, position_context);
+ }
+ case Index_Expr:
+ get_document_position(n.expr, position_context);
+ get_document_position(n.index, position_context);
+ case Deref_Expr:
+ get_document_position(n.expr, position_context);
+ case Slice_Expr:
+ get_document_position(n.expr, position_context);
+ get_document_position(n.low, position_context);
+ get_document_position(n.high, position_context);
+ case Field_Value:
+ get_document_position(n.field, position_context);
+ get_document_position(n.value, position_context);
+ case Ternary_Expr:
+ get_document_position(n.cond, position_context);
+ get_document_position(n.x, position_context);
+ get_document_position(n.y, position_context);
+ case Ternary_If_Expr:
+ get_document_position(n.x, position_context);
+ get_document_position(n.cond, position_context);
+ get_document_position(n.y, position_context);
+ case Ternary_When_Expr:
+ get_document_position(n.x, position_context);
+ get_document_position(n.cond, position_context);
+ get_document_position(n.y, position_context);
+ case Type_Assertion:
+ get_document_position(n.expr, position_context);
+ get_document_position(n.type, position_context);
+ case Type_Cast:
+ get_document_position(n.type, position_context);
+ get_document_position(n.expr, position_context);
+ case Auto_Cast:
+ get_document_position(n.expr, position_context);
+ case Bad_Stmt:
+ case Empty_Stmt:
+ case Expr_Stmt:
+ get_document_position(n.expr, position_context);
+ case Tag_Stmt:
+ r := cast(^Tag_Stmt)node;
+ get_document_position(r.stmt, position_context);
+ case Assign_Stmt:
+ position_context.assign = cast(^Assign_Stmt)node;
+ get_document_position(n.lhs, position_context);
+ get_document_position(n.rhs, position_context);
+ case Block_Stmt:
+ get_document_position(n.label, position_context);
+ get_document_position(n.stmts, position_context);
+ case If_Stmt:
+ get_document_position(n.label, position_context);
+ get_document_position(n.init, position_context);
+ get_document_position(n.cond, position_context);
+ get_document_position(n.body, position_context);
+ get_document_position(n.else_stmt, position_context);
+ case When_Stmt:
+ get_document_position(n.cond, position_context);
+ get_document_position(n.body, position_context);
+ get_document_position(n.else_stmt, position_context);
+ case Return_Stmt:
+ position_context.returns = cast(^Return_Stmt)node;
+ get_document_position(n.results, position_context);
+ case Defer_Stmt:
+ get_document_position(n.stmt, position_context);
+ case For_Stmt:
+ get_document_position(n.label, position_context);
+ get_document_position(n.init, position_context);
+ get_document_position(n.cond, position_context);
+ get_document_position(n.post, position_context);
+ get_document_position(n.body, position_context);
+ case Range_Stmt:
+ get_document_position(n.label, position_context);
+ get_document_position(n.val0, position_context);
+ get_document_position(n.val1, position_context);
+ get_document_position(n.expr, position_context);
+ get_document_position(n.body, position_context);
+ case Case_Clause:
+ position_context.case_clause = cast(^Case_Clause)node;
+ get_document_position(n.list, position_context);
+ get_document_position(n.body, position_context);
+ case Switch_Stmt:
+ position_context.switch_stmt = cast(^Switch_Stmt)node;
+ get_document_position(n.label, position_context);
+ get_document_position(n.init, position_context);
+ get_document_position(n.cond, position_context);
+ get_document_position(n.body, position_context);
+ case Type_Switch_Stmt:
+ position_context.switch_type_stmt = cast(^Type_Switch_Stmt)node;
+ get_document_position(n.label, position_context);
+ get_document_position(n.tag, position_context);
+ get_document_position(n.expr, position_context);
+ get_document_position(n.body, position_context);
+ case Branch_Stmt:
+ get_document_position(n.label, position_context);
+ case Using_Stmt:
+ get_document_position(n.list, position_context);
+ case Bad_Decl:
+ case Value_Decl:
+ position_context.value_decl = cast(^Value_Decl)node;
+ get_document_position(n.attributes, position_context);
+ get_document_position(n.names, position_context);
+ get_document_position(n.type, position_context);
+ get_document_position(n.values, position_context);
+ case Package_Decl:
+ case Import_Decl:
+ case Foreign_Block_Decl:
+ get_document_position(n.attributes, position_context);
+ get_document_position(n.foreign_library, position_context);
+ get_document_position(n.body, position_context);
+ case Foreign_Import_Decl:
+ get_document_position(n.name, position_context);
+ case Proc_Group:
+ get_document_position(n.args, position_context);
+ case Attribute:
+ get_document_position(n.elems, position_context);
+ case Field:
+ get_document_position(n.names, position_context);
+ get_document_position(n.type, position_context);
+ get_document_position(n.default_value, position_context);
+ case Field_List:
+ get_document_position(n.list, position_context);
+ case Typeid_Type:
+ get_document_position(n.specialization, position_context);
+ case Helper_Type:
+ get_document_position(n.type, position_context);
+ case Distinct_Type:
+ get_document_position(n.type, position_context);
+ case Poly_Type:
+ get_document_position(n.type, position_context);
+ get_document_position(n.specialization, position_context);
+ case Proc_Type:
+ get_document_position(n.params, position_context);
+ get_document_position(n.results, position_context);
+ case Pointer_Type:
+ get_document_position(n.elem, position_context);
+ case Array_Type:
+ get_document_position(n.len, position_context);
+ get_document_position(n.elem, position_context);
+ case Dynamic_Array_Type:
+ get_document_position(n.elem, position_context);
+ case Struct_Type:
+ get_document_position(n.poly_params, position_context);
+ get_document_position(n.align, position_context);
+ get_document_position(n.fields, position_context);
+ case Union_Type:
+ get_document_position(n.poly_params, position_context);
+ get_document_position(n.align, position_context);
+ get_document_position(n.variants, position_context);
+ case Enum_Type:
+ get_document_position(n.base_type, position_context);
+ get_document_position(n.fields, position_context);
+ case Bit_Set_Type:
+ get_document_position(n.elem, position_context);
+ get_document_position(n.underlying, position_context);
+ case Map_Type:
+ get_document_position(n.key, position_context);
+ get_document_position(n.value, position_context);
+ case Implicit_Selector_Expr:
+ position_context.implicit = true;
+ get_document_position(n.field, position_context);
+ case:
+ log.errorf("Unhandled node kind: %T", n);
+ }
} \ No newline at end of file
diff --git a/src/server/background.odin b/src/server/background.odin
index d184a55..359e7d7 100644
--- a/src/server/background.odin
+++ b/src/server/background.odin
@@ -1,14 +1,10 @@
package server
/*
- Background thread that runs and ensures that the dynamic indexer is not stale
+ Background thread that runs and ensures that the dynamic indexer is not stale
*/
import "shared:index"
-background_main :: proc() {
-
-
-
-
+background_main :: proc () {
} \ No newline at end of file
diff --git a/src/server/completion.odin b/src/server/completion.odin
index 85c8c98..54c8b26 100644
--- a/src/server/completion.odin
+++ b/src/server/completion.odin
@@ -16,884 +16,781 @@ import "core:slice"
import "shared:common"
import "shared:index"
-get_completion_list :: proc(document: ^Document, position: common.Position) -> (CompletionList, bool) {
+get_completion_list :: proc (document: ^Document, position: common.Position) -> (CompletionList, bool) {
- list: CompletionList;
+ list: CompletionList;
- position_context, ok := get_document_position_context(document, position, .Completion);
+ position_context, ok := get_document_position_context(document, position, .Completion);
- ast_context := make_ast_context(document.ast, document.imports, document.package_name);
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name);
- get_globals(document.ast, &ast_context);
+ get_globals(document.ast, &ast_context);
- ast_context.current_package = ast_context.document_package;
- ast_context.value_decl = position_context.value_decl;
+ ast_context.current_package = ast_context.document_package;
+ ast_context.value_decl = position_context.value_decl;
- if position_context.function != nil {
- get_locals(document.ast, position_context.function, &ast_context, &position_context);
- }
+ if position_context.function != nil {
+ get_locals(document.ast, position_context.function, &ast_context, &position_context);
+ }
- if position_context.implicit {
- get_implicit_completion(&ast_context, &position_context, &list);
- }
+ if position_context.implicit {
+ get_implicit_completion(&ast_context, &position_context, &list);
+ } else if position_context.switch_type_stmt != nil && position_context.case_clause != nil && position_context.identifier != nil {
+ get_type_switch_Completion(&ast_context, &position_context, &list);
+ } else if position_context.comp_lit != nil && is_lhs_comp_lit(&position_context) {
+ get_comp_lit_completion(&ast_context, &position_context, &list);
+ } else if position_context.selector != nil {
+ get_selector_completion(&ast_context, &position_context, &list);
+ } else if position_context.identifier != nil {
+ get_identifier_completion(&ast_context, &position_context, &list);
+ }
- else if position_context.switch_type_stmt != nil && position_context.case_clause != nil && position_context.identifier != nil {
- get_type_switch_Completion(&ast_context, &position_context, &list);
- }
-
- else if position_context.comp_lit != nil && is_lhs_comp_lit(&position_context) {
- get_comp_lit_completion(&ast_context, &position_context, &list);
- }
-
- else if position_context.selector != nil {
- get_selector_completion(&ast_context, &position_context, &list);
- }
-
- else if position_context.identifier != nil {
- get_identifier_completion(&ast_context, &position_context, &list);
- }
-
- return list, true;
+ return list, true;
}
-is_lhs_comp_lit :: proc(position_context: ^DocumentPositionContext) -> bool {
-
- if len(position_context.comp_lit.elems) == 0 {
- return true;
- }
-
- for elem in position_context.comp_lit.elems {
-
- if position_in_node(elem, position_context.position) {
+is_lhs_comp_lit :: proc (position_context: ^DocumentPositionContext) -> bool {
- log.infof("in %v", elem.derived);
+ if len(position_context.comp_lit.elems) == 0 {
+ return true;
+ }
- if ident, ok := elem.derived.(ast.Ident); ok {
- return true;
- }
+ for elem in position_context.comp_lit.elems {
- else if field, ok := elem.derived.(ast.Field_Value); ok {
+ if position_in_node(elem, position_context.position) {
- if position_in_node(field.value, position_context.position) {
- return false;
- }
+ log.infof("in %v", elem.derived);
- }
+ if ident, ok := elem.derived.(ast.Ident); ok {
+ return true;
+ } else if field, ok := elem.derived.(ast.Field_Value); ok {
- }
+ if position_in_node(field.value, position_context.position) {
+ return false;
+ }
+ }
+ } else {
+ log.infof("not in %v", elem.derived);
+ }
+ }
- else {
- log.infof("not in %v", elem.derived);
- }
-
- }
-
- return true;
+ return true;
}
-field_exists_in_comp_lit :: proc(comp_lit: ^ast.Comp_Lit, name: string) -> bool {
-
- for elem in comp_lit.elems {
-
- if field, ok := elem.derived.(ast.Field_Value); ok {
-
- if field.field != nil {
-
- if ident, ok := field.field.derived.(ast.Ident); ok {
+field_exists_in_comp_lit :: proc (comp_lit: ^ast.Comp_Lit, name: string) -> bool {
- if ident.name == name {
- return true;
- }
+ for elem in comp_lit.elems {
- }
+ if field, ok := elem.derived.(ast.Field_Value); ok {
+ if field.field != nil {
- }
+ if ident, ok := field.field.derived.(ast.Ident); ok {
- }
+ if ident.name == name {
+ return true;
+ }
+ }
+ }
+ }
+ }
- }
-
- return false;
+ return false;
}
-get_directive_completion :: proc(ast_context: ^AstContext, postition_context: ^DocumentPositionContext, list: ^CompletionList) {
-
- list.isIncomplete = false;
-
- items := make([dynamic] CompletionItem, context.temp_allocator);
-
- /*
- Right now just return all the possible completions, but later on I should give the context specific ones
- */
-
- directive_list := [] string {
- "file",
- "line",
- "packed",
- "raw_union",
- "align",
- "no_nil",
- "complete",
- "no_alias",
- "caller_location",
- "require_results",
- "type",
- "bounds_check",
- "no_bounds_check",
- "assert",
- "defined",
- "procedure",
- "load",
- "partial",
- };
-
- for elem in directive_list {
-
- item := CompletionItem {
- detail = elem,
- label = elem,
- kind = .EnumMember,
- };
-
- append(&items, item);
-
- }
-
-
+get_directive_completion :: proc (ast_context: ^AstContext, postition_context: ^DocumentPositionContext, list: ^CompletionList) {
+
+ list.isIncomplete = false;
+
+ items := make([dynamic]CompletionItem, context.temp_allocator);
+
+ /*
+ Right now just return all the possible completions, but later on I should give the context specific ones
+ */
+
+ directive_list := []string {
+ "file",
+ "line",
+ "packed",
+ "raw_union",
+ "align",
+ "no_nil",
+ "complete",
+ "no_alias",
+ "caller_location",
+ "require_results",
+ "type",
+ "bounds_check",
+ "no_bounds_check",
+ "assert",
+ "defined",
+ "procedure",
+ "load",
+ "partial",
+ };
+
+ for elem in directive_list {
+
+ item := CompletionItem {
+ detail = elem,
+ label = elem,
+ kind = .EnumMember,
+ };
+
+ append(&items, item);
+ }
}
-get_comp_lit_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
-
- items := make([dynamic] CompletionItem, context.temp_allocator);
+get_comp_lit_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
- if position_context.parent_comp_lit.type == nil {
- return;
- }
+ items := make([dynamic]CompletionItem, context.temp_allocator);
- if symbol, ok := resolve_type_expression(ast_context, position_context.parent_comp_lit.type); ok {
+ if position_context.parent_comp_lit.type == nil {
+ return;
+ }
- if comp_symbol, ok := resolve_type_comp_literal(ast_context, position_context, symbol, position_context.parent_comp_lit); ok {
+ if symbol, ok := resolve_type_expression(ast_context, position_context.parent_comp_lit.type); ok {
- #partial switch v in comp_symbol.value {
- case index.SymbolStructValue:
- for name, i in v.names {
+ if comp_symbol, ok := resolve_type_comp_literal(ast_context, position_context, symbol, position_context.parent_comp_lit); ok {
- //ERROR no completion on name and hover
- if resolved, ok := resolve_type_expression(ast_context, v.types[i]); ok {
+ #partial switch v in comp_symbol.value {
+ case index.SymbolStructValue:
+ for name, i in v.names {
- if field_exists_in_comp_lit(position_context.comp_lit, name) {
- continue;
- }
+ //ERROR no completion on name and hover
+ if resolved, ok := resolve_type_expression(ast_context, v.types[i]); ok {
- resolved.signature = index.node_to_string(v.types[i]);
- resolved.pkg = comp_symbol.name;
- resolved.name = name;
- resolved.type = .Field;
+ if field_exists_in_comp_lit(position_context.comp_lit, name) {
+ continue;
+ }
- item := CompletionItem {
- label = resolved.name,
- kind = cast(CompletionItemKind) resolved.type,
- detail = concatenate_symbols_information(ast_context, resolved),
- documentation = resolved.doc,
- };
+ resolved.signature = index.node_to_string(v.types[i]);
+ resolved.pkg = comp_symbol.name;
+ resolved.name = name;
+ resolved.type = .Field;
- append(&items, item);
- }
- }
- }
- }
- }
+ item := CompletionItem {
+ label = resolved.name,
+ kind = cast(CompletionItemKind)resolved.type,
+ detail = concatenate_symbols_information(ast_context, resolved),
+ documentation = resolved.doc,
+ };
+ append(&items, item);
+ }
+ }
+ }
+ }
+ }
- list.items = items[:];
+ list.items = items[:];
}
-get_selector_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
+get_selector_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
- items := make([dynamic] CompletionItem, context.temp_allocator);
+ items := make([dynamic]CompletionItem, context.temp_allocator);
- ast_context.current_package = ast_context.document_package;
+ ast_context.current_package = ast_context.document_package;
- if ident, ok := position_context.selector.derived.(ast.Ident); ok {
+ if ident, ok := position_context.selector.derived.(ast.Ident); ok {
- if !resolve_ident_is_variable(ast_context, ident) && !resolve_ident_is_package(ast_context, ident) && ident.name != "" {
- return;
- }
+ if !resolve_ident_is_variable(ast_context, ident) && !resolve_ident_is_package(ast_context, ident) && ident.name != "" {
+ return;
+ }
+ }
- }
+ symbols := make([dynamic]index.Symbol, context.temp_allocator);
- symbols := make([dynamic] index.Symbol, context.temp_allocator);
+ selector: index.Symbol;
+ ok: bool;
- selector: index.Symbol;
- ok: bool;
+ ast_context.use_locals = true;
+ ast_context.use_globals = true;
- ast_context.use_locals = true;
- ast_context.use_globals = true;
+ selector, ok = resolve_type_expression(ast_context, position_context.selector);
- selector, ok = resolve_type_expression(ast_context, position_context.selector);
+ if !ok {
+ return;
+ }
- if !ok {
- return;
- }
+ if selector.pkg != "" {
+ ast_context.current_package = selector.pkg;
+ } else {
+ ast_context.current_package = ast_context.document_package;
+ }
- if selector.pkg != "" {
- ast_context.current_package = selector.pkg;
- }
+ field: string;
+
+ if position_context.field != nil {
- else {
- ast_context.current_package = ast_context.document_package;
- }
+ switch v in position_context.field.derived {
+ case ast.Ident:
+ field = v.name;
+ }
+ }
- field: string;
+ if s, ok := selector.value.(index.SymbolProcedureValue); ok {
+ if len(s.return_types) == 1 {
+ if selector, ok = resolve_type_expression(ast_context, s.return_types[0].type); !ok {
+ return;
+ }
+ }
+ }
- if position_context.field != nil {
+ #partial switch v in selector.value {
+ case index.SymbolUnionValue:
+ list.isIncomplete = false;
- switch v in position_context.field.derived {
- case ast.Ident:
- field = v.name;
- }
+ for name in v.names {
+ symbol: index.Symbol;
+ symbol.name = fmt.aprintf("(%v)", name);
+ symbol.pkg = selector.name;
+ symbol.type = .EnumMember;
+ append(&symbols, symbol);
+ }
- }
+ case index.SymbolEnumValue:
+ list.isIncomplete = false;
+ for name in v.names {
+ symbol: index.Symbol;
+ symbol.name = name;
+ symbol.pkg = selector.name;
+ symbol.type = .EnumMember;
+ append(&symbols, symbol);
+ }
- if s, ok := selector.value.(index.SymbolProcedureValue); ok {
- if len(s.return_types) == 1 {
- if selector, ok = resolve_type_expression(ast_context, s.return_types[0].type); !ok {
- return;
- }
- }
- }
+ case index.SymbolStructValue:
+ list.isIncomplete = false;
+ for name, i in v.names {
- #partial switch v in selector.value {
- case index.SymbolUnionValue:
- list.isIncomplete = false;
+ if selector.pkg != "" {
+ ast_context.current_package = selector.pkg;
+ } else {
+ ast_context.current_package = ast_context.document_package;
+ }
- for name in v.names {
- symbol: index.Symbol;
- symbol.name = fmt.aprintf("(%v)", name);
- symbol.pkg = selector.name;
- symbol.type = .EnumMember;
- append(&symbols, symbol);
- }
+ if symbol, ok := resolve_type_expression(ast_context, v.types[i]); ok {
- case index.SymbolEnumValue:
- list.isIncomplete = false;
+ if expr, ok := position_context.selector.derived.(ast.Selector_Expr); ok {
- for name in v.names {
- symbol: index.Symbol;
- symbol.name = name;
- symbol.pkg = selector.name;
- symbol.type = .EnumMember;
- append(&symbols, symbol);
- }
+ if expr.op.text == "->" && symbol.type != .Function {
+ continue;
+ }
+ }
- case index.SymbolStructValue:
- list.isIncomplete = false;
+ if position_context.arrow && symbol.type != .Function {
+ continue;
+ }
- for name, i in v.names {
-
- if selector.pkg != "" {
- ast_context.current_package = selector.pkg;
- }
-
- else {
- ast_context.current_package = ast_context.document_package;
- }
-
- if symbol, ok := resolve_type_expression(ast_context, v.types[i]); ok {
-
- if expr, ok := position_context.selector.derived.(ast.Selector_Expr); ok {
-
- if expr.op.text == "->" && symbol.type != .Function {
- continue;
- }
-
- }
-
- if position_context.arrow && symbol.type != .Function {
- continue;
- }
-
- symbol.name = name;
- symbol.type = .Field;
- symbol.pkg = selector.name;
- symbol.signature = index.node_to_string(v.types[i]);
- append(&symbols, symbol);
- }
-
- else {
- //just give some generic symbol with name.
- symbol: index.Symbol;
- symbol.name = name;
- symbol.type = .Field;
- append(&symbols, symbol);
- }
-
- }
-
-
- case index.SymbolPackageValue:
-
- list.isIncomplete = true;
-
- if searched, ok := index.fuzzy_search(field, {selector.pkg}); ok {
-
- for search in searched {
- append(&symbols, search.symbol);
- }
-
- }
-
- else {
- log.errorf("Failed to fuzzy search, field: %v, package: %v", field, selector.pkg);
- return;
- }
-
- }
-
- for symbol, i in symbols {
-
- item := CompletionItem {
- label = symbol.name,
- kind = cast(CompletionItemKind) symbol.type,
- detail = concatenate_symbols_information(ast_context, symbol),
- documentation = symbol.doc,
- };
-
- append(&items, item);
- }
-
- list.items = items[:];
+ symbol.name = name;
+ symbol.type = .Field;
+ symbol.pkg = selector.name;
+ symbol.signature = index.node_to_string(v.types[i]);
+ append(&symbols, symbol);
+ } else {
+ //just give some generic symbol with name.
+ symbol: index.Symbol;
+ symbol.name = name;
+ symbol.type = .Field;
+ append(&symbols, symbol);
+ }
+ }
+
+ case index.SymbolPackageValue:
+
+ list.isIncomplete = true;
+
+ if searched, ok := index.fuzzy_search(field, {selector.pkg}); ok {
+
+ for search in searched {
+ append(&symbols, search.symbol);
+ }
+ } else {
+ log.errorf("Failed to fuzzy search, field: %v, package: %v", field, selector.pkg);
+ return;
+ }
+ }
+
+ for symbol, i in symbols {
+
+ item := CompletionItem {
+ label = symbol.name,
+ kind = cast(CompletionItemKind)symbol.type,
+ detail = concatenate_symbols_information(ast_context, symbol),
+ documentation = symbol.doc,
+ };
+
+ append(&items, item);
+ }
+
+ list.items = items[:];
}
-unwrap_enum :: proc(ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolEnumValue, bool) {
-
- if enum_symbol, ok := resolve_type_expression(ast_context, node); ok {
+unwrap_enum :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolEnumValue, bool) {
- if enum_value, ok := enum_symbol.value.(index.SymbolEnumValue); ok {
- return enum_value, true;
- }
+ if enum_symbol, ok := resolve_type_expression(ast_context, node); ok {
- }
+ if enum_value, ok := enum_symbol.value.(index.SymbolEnumValue); ok {
+ return enum_value, true;
+ }
+ }
- return {}, false;
+ return {}, false;
}
-unwrap_union :: proc(ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolUnionValue, bool) {
-
- if union_symbol, ok := resolve_type_expression(ast_context, node); ok {
+unwrap_union :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolUnionValue, bool) {
- if union_value, ok := union_symbol.value.(index.SymbolUnionValue); ok {
- return union_value, true;
- }
+ if union_symbol, ok := resolve_type_expression(ast_context, node); ok {
- }
-
- return {}, false;
+ if union_value, ok := union_symbol.value.(index.SymbolUnionValue); ok {
+ return union_value, true;
+ }
+ }
+ return {}, false;
}
-unwrap_bitset :: proc(ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolEnumValue, bool) {
-
- if bitset_symbol, ok := resolve_type_expression(ast_context, node); ok {
+unwrap_bitset :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolEnumValue, bool) {
- if bitset_value, ok := bitset_symbol.value.(index.SymbolBitSetValue); ok {
+ if bitset_symbol, ok := resolve_type_expression(ast_context, node); ok {
- if enum_symbol, ok := resolve_type_expression(ast_context, bitset_value.expr); ok {
+ if bitset_value, ok := bitset_symbol.value.(index.SymbolBitSetValue); ok {
- if enum_value, ok := enum_symbol.value.(index.SymbolEnumValue); ok {
- return enum_value, true;
- }
- }
+ if enum_symbol, ok := resolve_type_expression(ast_context, bitset_value.expr); ok {
- }
+ if enum_value, ok := enum_symbol.value.(index.SymbolEnumValue); ok {
+ return enum_value, true;
+ }
+ }
+ }
+ }
- }
-
- return {}, false;
+ return {}, false;
}
-get_implicit_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
-
- items := make([dynamic] CompletionItem, context.temp_allocator);
-
- list.isIncomplete = false;
-
- selector: index.Symbol;
-
- ast_context.use_locals = true;
- ast_context.use_globals = true;
-
- if selector.pkg != "" {
- ast_context.current_package = selector.pkg;
- }
-
- else {
- ast_context.current_package = ast_context.document_package;
- }
-
- //enum switch infer
- if position_context.switch_stmt != nil && position_context.case_clause != nil && position_context.switch_stmt.cond != nil {
-
- used_enums := make(map [string]bool, 5, context.temp_allocator);
-
- if block, ok := position_context.switch_stmt.body.derived.(ast.Block_Stmt); ok {
-
- for stmt in block.stmts {
-
- if case_clause, ok := stmt.derived.(ast.Case_Clause); ok {
-
- for name in case_clause.list {
-
- if implicit, ok := name.derived.(ast.Implicit_Selector_Expr); ok {
- used_enums[implicit.field.name] = true;
- }
- }
- }
- }
- }
-
- if enum_value, ok := unwrap_enum(ast_context, position_context.switch_stmt.cond); ok {
-
- for name in enum_value.names {
-
- if name in used_enums {
- continue;
- }
-
- item := CompletionItem {
- label = name,
- kind = .EnumMember,
- detail = name,
- };
-
- append(&items, item);
-
- }
-
- }
-
- }
-
- else if position_context.comp_lit != nil && position_context.binary != nil && (position_context.binary.op.text == "&" ) {
- //bitsets
- context_node: ^ast.Expr;
- bitset_node: ^ast.Expr;
+get_implicit_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
- if position_in_node(position_context.binary.right, position_context.position) {
- context_node = position_context.binary.right;
- bitset_node = position_context.binary.left;
- }
+ items := make([dynamic]CompletionItem, context.temp_allocator);
- else if position_in_node(position_context.binary.left, position_context.position) {
- context_node = position_context.binary.left;
- bitset_node = position_context.binary.right;
- }
+ list.isIncomplete = false;
- if context_node != nil && bitset_node != nil {
+ selector: index.Symbol;
- if _, ok := context_node.derived.(ast.Comp_Lit); ok {
+ ast_context.use_locals = true;
+ ast_context.use_globals = true;
- if value, ok := unwrap_bitset(ast_context, bitset_node); ok {
+ if selector.pkg != "" {
+ ast_context.current_package = selector.pkg;
+ } else {
+ ast_context.current_package = ast_context.document_package;
+ }
- for name in value.names {
+ //enum switch infer
+ if position_context.switch_stmt != nil && position_context.case_clause != nil && position_context.switch_stmt.cond != nil {
- item := CompletionItem {
- label = name,
- kind = .EnumMember,
- detail = name,
- };
+ used_enums := make(map[string]bool, 5, context.temp_allocator);
- append(&items, item);
- }
- }
- }
- }
- }
+ if block, ok := position_context.switch_stmt.body.derived.(ast.Block_Stmt); ok {
- else if position_context.comp_lit != nil {
+ for stmt in block.stmts {
- if position_context.parent_comp_lit.type == nil {
- return;
- }
+ if case_clause, ok := stmt.derived.(ast.Case_Clause); ok {
- field_name: string;
+ for name in case_clause.list {
- for elem in position_context.comp_lit.elems {
+ if implicit, ok := name.derived.(ast.Implicit_Selector_Expr); ok {
+ used_enums[implicit.field.name] = true;
+ }
+ }
+ }
+ }
+ }
- if position_in_node(elem, position_context.position) {
+ if enum_value, ok := unwrap_enum(ast_context, position_context.switch_stmt.cond); ok {
- if field, ok := elem.derived.(ast.Field_Value); ok {
- field_name = field.field.derived.(ast.Ident).name;
- }
+ for name in enum_value.names {
- }
+ if name in used_enums {
+ continue;
+ }
- }
+ item := CompletionItem {
+ label = name,
+ kind = .EnumMember,
+ detail = name,
+ };
- if field_name == "" {
- return;
- }
+ append(&items, item);
+ }
+ }
+ } else if position_context.comp_lit != nil && position_context.binary != nil && (position_context.binary.op.text == "&") {
+ //bitsets
+ context_node: ^ast.Expr;
+ bitset_node: ^ast.Expr;
- if symbol, ok := resolve_type_expression(ast_context, position_context.parent_comp_lit.type); ok {
+ if position_in_node(position_context.binary.right, position_context.position) {
+ context_node = position_context.binary.right;
+ bitset_node = position_context.binary.left;
+ } else if position_in_node(position_context.binary.left, position_context.position) {
+ context_node = position_context.binary.left;
+ bitset_node = position_context.binary.right;
+ }
- if comp_symbol, ok := resolve_type_comp_literal(ast_context, position_context, symbol, position_context.parent_comp_lit); ok {
+ if context_node != nil && bitset_node != nil {
- if s, ok := comp_symbol.value.(index.SymbolStructValue); ok {
+ if _, ok := context_node.derived.(ast.Comp_Lit); ok {
- for name, i in s.names {
+ if value, ok := unwrap_bitset(ast_context, bitset_node); ok {
- if name != field_name {
- continue;
- }
+ for name in value.names {
- if enum_value, ok := unwrap_enum(ast_context, s.types[i]); ok {
- for enum_name in enum_value.names {
- item := CompletionItem {
- label = enum_name,
- kind = .EnumMember,
- detail = enum_name,
- };
+ item := CompletionItem {
+ label = name,
+ kind = .EnumMember,
+ detail = name,
+ };
- append(&items, item);
- }
- }
- }
- }
- }
- }
- }
+ append(&items, item);
+ }
+ }
+ }
+ }
+ } else if position_context.comp_lit != nil {
- else if position_context.binary != nil && (position_context.binary.op.text == "==" || position_context.binary.op.text == "!=") {
+ if position_context.parent_comp_lit.type == nil {
+ return;
+ }
- context_node: ^ast.Expr;
- enum_node: ^ast.Expr;
+ field_name: string;
- if position_in_node(position_context.binary.right, position_context.position) {
- context_node = position_context.binary.right;
- enum_node = position_context.binary.left;
- }
+ for elem in position_context.comp_lit.elems {
- else if position_in_node(position_context.binary.left, position_context.position) {
- context_node = position_context.binary.left;
- enum_node = position_context.binary.right;
- }
+ if position_in_node(elem, position_context.position) {
- if context_node != nil && enum_node != nil {
+ if field, ok := elem.derived.(ast.Field_Value); ok {
+ field_name = field.field.derived.(ast.Ident).name;
+ }
+ }
+ }
- if enum_value, ok := unwrap_enum(ast_context, enum_node); ok {
+ if field_name == "" {
+ return;
+ }
- for name in enum_value.names {
+ if symbol, ok := resolve_type_expression(ast_context, position_context.parent_comp_lit.type); ok {
- item := CompletionItem {
- label = name,
- kind = .EnumMember,
- detail = name,
- };
+ if comp_symbol, ok := resolve_type_comp_literal(ast_context, position_context, symbol, position_context.parent_comp_lit); ok {
- append(&items, item);
- }
- }
- }
- }
+ if s, ok := comp_symbol.value.(index.SymbolStructValue); ok {
- else if position_context.assign != nil && position_context.assign.rhs != nil && position_context.assign.lhs != nil {
+ for name, i in s.names {
- rhs_index: int;
+ if name != field_name {
+ continue;
+ }
- for elem in position_context.assign.rhs {
+ if enum_value, ok := unwrap_enum(ast_context, s.types[i]); ok {
+ for enum_name in enum_value.names {
+ item := CompletionItem {
+ label = enum_name,
+ kind = .EnumMember,
+ detail = enum_name,
+ };
- if position_in_node(elem, position_context.position) {
- break;
- }
+ append(&items, item);
+ }
+ }
+ }
+ }
+ }
+ }
+ } else if position_context.binary != nil && (position_context.binary.op.text == "==" || position_context.binary.op.text == "!=") {
- else {
+ context_node: ^ast.Expr;
+ enum_node: ^ast.Expr;
- //procedures are the only types that can return more than one value
- if symbol, ok := resolve_type_expression(ast_context, elem); ok {
+ if position_in_node(position_context.binary.right, position_context.position) {
+ context_node = position_context.binary.right;
+ enum_node = position_context.binary.left;
+ } else if position_in_node(position_context.binary.left, position_context.position) {
+ context_node = position_context.binary.left;
+ enum_node = position_context.binary.right;
+ }
- if procedure, ok := symbol.value.(index.SymbolProcedureValue); ok {
+ if context_node != nil && enum_node != nil {
- if procedure.return_types == nil {
- return;
- }
+ if enum_value, ok := unwrap_enum(ast_context, enum_node); ok {
- rhs_index += len(procedure.return_types);
- }
+ for name in enum_value.names {
- else {
- rhs_index += 1;
- }
+ item := CompletionItem {
+ label = name,
+ kind = .EnumMember,
+ detail = name,
+ };
- }
+ append(&items, item);
+ }
+ }
+ }
+ } else if position_context.assign != nil && position_context.assign.rhs != nil && position_context.assign.lhs != nil {
- }
+ rhs_index: int;
- }
+ for elem in position_context.assign.rhs {
- if len(position_context.assign.lhs) > rhs_index {
+ if position_in_node(elem, position_context.position) {
+ break;
+ } else {
- if enum_value, ok := unwrap_enum(ast_context, position_context.assign.lhs[rhs_index]); ok {
+ //procedures are the only types that can return more than one value
+ if symbol, ok := resolve_type_expression(ast_context, elem); ok {
- for name in enum_value.names {
+ if procedure, ok := symbol.value.(index.SymbolProcedureValue); ok {
- item := CompletionItem {
- label = name,
- kind = .EnumMember,
- detail = name,
- };
+ if procedure.return_types == nil {
+ return;
+ }
- append(&items, item);
+ rhs_index += len(procedure.return_types);
+ } else {
+ rhs_index += 1;
+ }
+ }
+ }
+ }
- }
+ if len(position_context.assign.lhs) > rhs_index {
- }
+ if enum_value, ok := unwrap_enum(ast_context, position_context.assign.lhs[rhs_index]); ok {
- }
+ for name in enum_value.names {
- }
+ item := CompletionItem {
+ label = name,
+ kind = .EnumMember,
+ detail = name,
+ };
- else if position_context.returns != nil && position_context.function != nil {
+ append(&items, item);
+ }
+ }
+ }
+ } else if position_context.returns != nil && position_context.function != nil {
- return_index: int;
+ return_index: int;
- if position_context.returns.results == nil {
- return;
- }
+ if position_context.returns.results == nil {
+ return;
+ }
- for result, i in position_context.returns.results {
+ for result, i in position_context.returns.results {
+
+ if position_in_node(result, position_context.position) {
+ return_index = i;
+ break;
+ }
+ }
- if position_in_node(result, position_context.position) {
- return_index = i;
- break;
- }
+ if position_context.function.type == nil {
+ return;
+ }
- }
+ if position_context.function.type.results == nil {
+ return;
+ }
- if position_context.function.type == nil {
- return;
- }
+ if len(position_context.function.type.results.list) > return_index {
- if position_context.function.type.results == nil {
- return;
- }
+ if return_symbol, ok := resolve_type_expression(ast_context, position_context.function.type.results.list[return_index].type); ok {
- if len(position_context.function.type.results.list) > return_index {
+ #partial switch v in return_symbol.value {
+ case index.SymbolEnumValue:
+ for name in v.names {
- if return_symbol, ok := resolve_type_expression(ast_context, position_context.function.type.results.list[return_index].type); ok {
+ item := CompletionItem {
+ label = name,
+ kind = .EnumMember,
+ detail = name,
+ };
- #partial switch v in return_symbol.value {
- case index.SymbolEnumValue:
- for name in v.names {
+ append(&items, item);
+ }
+ }
+ }
+ }
+ }
- item := CompletionItem {
- label = name,
- kind = .EnumMember,
- detail = name,
- };
-
- append(&items, item);
-
- }
-
- }
-
- }
-
- }
-
-
-
-
- }
-
-
- list.items = items[:];
+ list.items = items[:];
}
-get_identifier_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
-
- items := make([dynamic] CompletionItem, context.temp_allocator);
+get_identifier_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
- list.isIncomplete = true;
+ items := make([dynamic]CompletionItem, context.temp_allocator);
- CombinedResult :: struct {
- score: f32,
- symbol: index.Symbol,
- variable: ^ast.Ident,
- };
+ list.isIncomplete = true;
- combined_sort_interface :: proc(s: ^[dynamic] CombinedResult) -> sort.Interface {
- return sort.Interface{
- collection = rawptr(s),
- len = proc(it: sort.Interface) -> int {
- s := (^[dynamic] CombinedResult)(it.collection);
- return len(s^);
- },
- less = proc(it: sort.Interface, i, j: int) -> bool {
- s := (^[dynamic] CombinedResult)(it.collection);
- return s[i].score > s[j].score;
- },
- swap = proc(it: sort.Interface, i, j: int) {
- s := (^[dynamic] CombinedResult)(it.collection);
- s[i], s[j] = s[j], s[i];
- },
- };
- }
+ CombinedResult :: struct {
+ score: f32,
+ symbol: index.Symbol,
+ variable: ^ast.Ident,
+ };
- combined := make([dynamic] CombinedResult);
+ combined_sort_interface :: proc (s: ^[dynamic]CombinedResult) -> sort.Interface {
+ return sort.Interface {
+ collection = rawptr(s),
+ len = proc (it: sort.Interface) -> int {
+ s := (^[dynamic]CombinedResult)(it.collection);
+ return len(s^);
+ },
+ less = proc (it: sort.Interface, i, j: int) -> bool {
+ s := (^[dynamic]CombinedResult)(it.collection);
+ return s[i].score > s[j].score;
+ },
+ swap = proc (it: sort.Interface, i, j: int) {
+ s := (^[dynamic]CombinedResult)(it.collection);
+ s[i], s[j] = s[j], s[i];
+ },
+ };
+ };
- lookup := "";
+ combined := make([dynamic]CombinedResult);
- if ident, ok := position_context.identifier.derived.(ast.Ident); ok {
- lookup = ident.name;
- }
+ lookup := "";
- pkgs := make([dynamic] string, context.temp_allocator);
+ if ident, ok := position_context.identifier.derived.(ast.Ident); ok {
+ lookup = ident.name;
+ }
- usings := get_using_packages(ast_context);
+ pkgs := make([dynamic]string, context.temp_allocator);
- for u in usings {
- append(&pkgs, u);
- }
+ usings := get_using_packages(ast_context);
- append(&pkgs, ast_context.document_package);
+ for u in usings {
+ append(&pkgs, u);
+ }
- if results, ok := index.fuzzy_search(lookup, pkgs[:]); ok {
+ append(&pkgs, ast_context.document_package);
- for r in results {
- append(&combined, CombinedResult { score = r.score, symbol = r.symbol});
- }
+ if results, ok := index.fuzzy_search(lookup, pkgs[:]); ok {
- }
+ for r in results {
+ append(&combined, CombinedResult {score = r.score, symbol = r.symbol});
+ }
+ }
- matcher := common.make_fuzzy_matcher(lookup);
+ matcher := common.make_fuzzy_matcher(lookup);
- global: for k, v in ast_context.globals {
+ global: for k, v in ast_context.globals {
- //combined is sorted and should do binary search instead.
- for result in combined {
- if result.symbol.name == k {
- continue global;
- }
- }
+ //combined is sorted and should do binary search instead.
+ for result in combined {
+ if result.symbol.name == k {
+ continue global;
+ }
+ }
- ast_context.use_locals = true;
- ast_context.use_globals = true;
- ast_context.current_package = ast_context.document_package;
+ ast_context.use_locals = true;
+ ast_context.use_globals = true;
+ ast_context.current_package = ast_context.document_package;
- ident := index.new_type(ast.Ident, v.pos, v.end, context.temp_allocator);
- ident.name = k;
+ ident := index.new_type(ast.Ident, v.pos, v.end, context.temp_allocator);
+ ident.name = k;
- if symbol, ok := resolve_type_identifier(ast_context, ident^); ok {
- symbol.name = ident.name;
- symbol.signature = get_signature(ast_context, ident^, symbol);
+ if symbol, ok := resolve_type_identifier(ast_context, ident^); ok {
+ symbol.name = ident.name;
+ symbol.signature = get_signature(ast_context, ident^, symbol);
- if score, ok := common.fuzzy_match(matcher, symbol.name); ok {
- append(&combined, CombinedResult { score = score * 1.1, symbol = symbol, variable = ident });
- }
+ if score, ok := common.fuzzy_match(matcher, symbol.name); ok {
+ append(&combined, CombinedResult {score = score * 1.1, symbol = symbol, variable = ident});
+ }
+ }
+ }
- }
- }
+ for k, v in ast_context.locals {
+ ast_context.use_locals = true;
+ ast_context.use_globals = true;
+ ast_context.current_package = ast_context.document_package;
+ ident := index.new_type(ast.Ident, {offset = position_context.position}, {offset = position_context.position}, context.temp_allocator);
+ ident.name = k;
- for k, v in ast_context.locals {
+ if symbol, ok := resolve_type_identifier(ast_context, ident^); ok {
+ symbol.name = ident.name;
+ symbol.signature = get_signature(ast_context, ident^, symbol);
- ast_context.use_locals = true;
- ast_context.use_globals = true;
- ast_context.current_package = ast_context.document_package;
+ if score, ok := common.fuzzy_match(matcher, symbol.name); ok {
+ append(&combined, CombinedResult {score = score * 1.1, symbol = symbol, variable = ident});
+ }
+ }
+ }
+ for pkg in ast_context.imports {
- ident := index.new_type(ast.Ident, { offset = position_context.position }, { offset = position_context.position }, context.temp_allocator);
- ident.name = k;
+ symbol: index.Symbol;
- if symbol, ok := resolve_type_identifier(ast_context, ident^); ok {
- symbol.name = ident.name;
- symbol.signature = get_signature(ast_context, ident^, symbol);
+ symbol.name = pkg.base;
+ symbol.type = .Package;
- if score, ok := common.fuzzy_match(matcher, symbol.name); ok {
- append(&combined, CombinedResult { score = score * 1.1, symbol = symbol, variable = ident });
- }
+ if score, ok := common.fuzzy_match(matcher, symbol.name); ok {
+ append(&combined, CombinedResult {score = score * 1.1, symbol = symbol});
+ }
+ }
- }
- }
+ sort.sort(combined_sort_interface(&combined));
- for pkg in ast_context.imports {
+ //hard code for now
+ top_results := combined[0:(min(20, len(combined)))];
- symbol: index.Symbol;
+ for result in top_results {
- symbol.name = pkg.base;
- symbol.type = .Package;
+ item := CompletionItem {
+ label = result.symbol.name,
+ detail = concatenate_symbols_information(ast_context, result.symbol),
+ };
- if score, ok := common.fuzzy_match(matcher, symbol.name); ok {
- append(&combined, CombinedResult { score = score * 1.1, symbol = symbol });
- }
- }
+ if result.variable != nil {
+ if ok := resolve_ident_is_variable(ast_context, result.variable^); ok {
+ item.kind = .Variable;
+ } else {
+ item.kind = cast(CompletionItemKind)result.symbol.type;
+ }
+ } else {
+ item.kind = cast(CompletionItemKind)result.symbol.type;
+ }
- sort.sort(combined_sort_interface(&combined));
+ append(&items, item);
+ }
- //hard code for now
- top_results := combined[0:(min(20, len(combined)))];
-
- for result in top_results {
-
- item := CompletionItem {
- label = result.symbol.name,
- detail = concatenate_symbols_information(ast_context, result.symbol),
- };
-
- if result.variable != nil {
- if ok := resolve_ident_is_variable(ast_context, result.variable^); ok {
- item.kind = .Variable;
- }
-
- else {
- item.kind = cast(CompletionItemKind)result.symbol.type;
- }
- }
-
- else {
- item.kind = cast(CompletionItemKind)result.symbol.type;
- }
-
- append(&items, item);
- }
-
- list.items = items[:];
+ list.items = items[:];
}
-get_package_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
-
-
-
-
+get_package_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
}
-get_type_switch_Completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
-
+get_type_switch_Completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
- items := make([dynamic] CompletionItem, context.temp_allocator);
- list.isIncomplete = false;
+ items := make([dynamic]CompletionItem, context.temp_allocator);
+ list.isIncomplete = false;
- if assign, ok := position_context.switch_type_stmt.tag.derived.(ast.Assign_Stmt); ok && assign.rhs != nil && len(assign.rhs) == 1 {
+ if assign, ok := position_context.switch_type_stmt.tag.derived.(ast.Assign_Stmt); ok && assign.rhs != nil && len(assign.rhs) == 1 {
- if union_value, ok := unwrap_union(ast_context, assign.rhs[0]); ok {
+ if union_value, ok := unwrap_union(ast_context, assign.rhs[0]); ok {
- for name in union_value.names {
+ for name in union_value.names {
- item := CompletionItem {
- label = name,
- kind = .EnumMember,
- detail = name,
- };
+ item := CompletionItem {
+ label = name,
+ kind = .EnumMember,
+ detail = name,
+ };
- append(&items, item);
- }
- }
- }
+ append(&items, item);
+ }
+ }
+ }
- list.items = items[:];
+ list.items = items[:];
} \ No newline at end of file
diff --git a/src/server/documents.odin b/src/server/documents.odin
index dd4ef85..ab8b54d 100644
--- a/src/server/documents.odin
+++ b/src/server/documents.odin
@@ -15,454 +15,424 @@ import "intrinsics"
import "shared:common"
ParserError :: struct {
- message: string,
- line: int,
- column: int,
- file: string,
- offset: int,
-};
-
+ message: string,
+ line: int,
+ column: int,
+ file: string,
+ offset: int,
+}
Package :: struct {
- name: string, //the entire absolute path to the directory
- base: string,
-};
+ name: string, //the entire absolute path to the directory
+ base: string,
+}
Document :: struct {
- uri: common.Uri,
- text: [] u8,
- used_text: int, //allow for the text to be reallocated with more data than needed
- client_owned: bool,
- diagnosed_errors: bool,
- ast: ast.File,
- imports: [] Package,
- package_name: string,
- allocator: ^common.Scratch_Allocator, //because does not support freeing I use arena allocators for each document
- operating_on: int, //atomic
-};
+ uri: common.Uri,
+ text: []u8,
+ used_text: int, //allow for the text to be reallocated with more data than needed
+ client_owned: bool,
+ diagnosed_errors: bool,
+ ast: ast.File,
+ imports: []Package,
+ package_name: string,
+ allocator: ^common.Scratch_Allocator, //because does not support freeing I use arena allocators for each document
+ operating_on: int, //atomic
+}
DocumentStorage :: struct {
- documents: map [string] Document,
- free_allocators: [dynamic] ^common.Scratch_Allocator,
-};
+ documents: map[string]Document,
+ free_allocators: [dynamic]^common.Scratch_Allocator,
+}
document_storage: DocumentStorage;
-document_storage_shutdown :: proc() {
+document_storage_shutdown :: proc () {
- for k, v in document_storage.documents {
- delete(k);
- }
+ for k, v in document_storage.documents {
+ delete(k);
+ }
- for alloc in document_storage.free_allocators {
- common.scratch_allocator_destroy(alloc);
- free(alloc);
- }
+ for alloc in document_storage.free_allocators {
+ common.scratch_allocator_destroy(alloc);
+ free(alloc);
+ }
- delete(document_storage.free_allocators);
- delete(document_storage.documents);
+ delete(document_storage.free_allocators);
+ delete(document_storage.documents);
}
-document_get_allocator :: proc() -> ^common.Scratch_Allocator {
-
- if len(document_storage.free_allocators) > 0 {
- return pop(&document_storage.free_allocators);
- }
-
- else {
- allocator := new(common.Scratch_Allocator);
- common.scratch_allocator_init(allocator, mem.megabytes(1));
- return allocator;
- }
+document_get_allocator :: proc () -> ^common.Scratch_Allocator {
+ if len(document_storage.free_allocators) > 0 {
+ return pop(&document_storage.free_allocators);
+ } else {
+ allocator := new(common.Scratch_Allocator);
+ common.scratch_allocator_init(allocator, mem.megabytes(1));
+ return allocator;
+ }
}
-document_free_allocator :: proc(allocator: ^common.Scratch_Allocator) {
- append(&document_storage.free_allocators, allocator);
+document_free_allocator :: proc (allocator: ^common.Scratch_Allocator) {
+ append(&document_storage.free_allocators, allocator);
}
-document_get :: proc(uri_string: string) -> ^Document {
+document_get :: proc (uri_string: string) -> ^Document {
- uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
+ uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
- if !parsed_ok {
- return nil;
- }
+ if !parsed_ok {
+ return nil;
+ }
- document := &document_storage.documents[uri.path];
+ document := &document_storage.documents[uri.path];
- if document == nil {
- return nil;
- }
+ if document == nil {
+ return nil;
+ }
- intrinsics.atomic_add(&document.operating_on, 1);
+ intrinsics.atomic_add(&document.operating_on, 1);
- return document;
+ return document;
}
-document_release :: proc(document: ^Document) {
-
- if document != nil {
- intrinsics.atomic_sub(&document.operating_on, 1);
- }
+document_release :: proc (document: ^Document) {
+ if document != nil {
+ intrinsics.atomic_sub(&document.operating_on, 1);
+ }
}
/*
- Client opens a document with transferred text
+ Client opens a document with transferred text
*/
-document_open :: proc(uri_string: string, text: string, config: ^common.Config, writer: ^Writer) -> common.Error {
-
- uri, parsed_ok := common.parse_uri(uri_string, context.allocator);
-
- log.infof("document_open: %v", uri_string);
-
- if !parsed_ok {
- log.error("Failed to parse uri");
- return .ParseError;
- }
-
- if document := &document_storage.documents[uri.path]; document != nil {
+document_open :: proc (uri_string: string, text: string, config: ^common.Config, writer: ^Writer) -> common.Error {
- if document.client_owned {
- log.errorf("Client called open on an already open document: %v ", document.uri.path);
- return .InvalidRequest;
- }
+ uri, parsed_ok := common.parse_uri(uri_string, context.allocator);
- document.uri = uri;
- document.client_owned = true;
- document.text = transmute([] u8)text;
- document.used_text = len(document.text);
- document.allocator = document_get_allocator();
+ log.infof("document_open: %v", uri_string);
- if err := document_refresh(document, config, writer); err != .None {
- return err;
- }
+ if !parsed_ok {
+ log.error("Failed to parse uri");
+ return .ParseError;
+ }
- }
+ if document := &document_storage.documents[uri.path]; document != nil {
- else {
+ if document.client_owned {
+ log.errorf("Client called open on an already open document: %v ", document.uri.path);
+ return .InvalidRequest;
+ }
- document := Document {
- uri = uri,
- text = transmute([] u8)text,
- client_owned = true,
- used_text = len(text),
- allocator = document_get_allocator(),
- };
+ document.uri = uri;
+ document.client_owned = true;
+ document.text = transmute([]u8)text;
+ document.used_text = len(document.text);
+ document.allocator = document_get_allocator();
- if err := document_refresh(&document, config, writer); err != .None {
- return err;
- }
+ if err := document_refresh(document, config, writer); err != .None {
+ return err;
+ }
+ } else {
- document_storage.documents[strings.clone(uri.path)] = document;
- }
+ document := Document {
+ uri = uri,
+ text = transmute([]u8)text,
+ client_owned = true,
+ used_text = len(text),
+ allocator = document_get_allocator(),
+ };
+ if err := document_refresh(&document, config, writer); err != .None {
+ return err;
+ }
+ document_storage.documents[strings.clone(uri.path)] = document;
+ }
- //hmm feels like odin needs some ownership semantic
- delete(uri_string);
+ //hmm feels like odin needs some ownership semantic
+ delete(uri_string);
- return .None;
+ return .None;
}
/*
- Function that applies changes to the given document through incremental syncronization
- */
-document_apply_changes :: proc(uri_string: string, changes: [dynamic] TextDocumentContentChangeEvent, config: ^common.Config, writer: ^Writer) -> common.Error {
-
- uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
-
- if !parsed_ok {
- return .ParseError;
- }
-
- document := &document_storage.documents[uri.path];
-
- if !document.client_owned {
- log.errorf("Client called change on an document not opened: %v ", document.uri.path);
- return .InvalidRequest;
- }
-
- for change in changes {
-
- //for some reason sublime doesn't seem to care even if i tell it to do incremental sync
- if range, ok := change.range.(common.Range); ok {
-
- absolute_range, ok := common.get_absolute_range(range, document.text[:document.used_text]);
+ Function that applies changes to the given document through incremental syncronization
+*/
+document_apply_changes :: proc (uri_string: string, changes: [dynamic]TextDocumentContentChangeEvent, config: ^common.Config, writer: ^Writer) -> common.Error {
- if !ok {
- return .ParseError;
- }
+ uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
- //lower bound is before the change
- lower := document.text[:absolute_range.start];
+ if !parsed_ok {
+ return .ParseError;
+ }
- //new change between lower and upper
- middle := change.text;
+ document := &document_storage.documents[uri.path];
- //upper bound is after the change
- upper := document.text[absolute_range.end:document.used_text];
+ if !document.client_owned {
+ log.errorf("Client called change on an document not opened: %v ", document.uri.path);
+ return .InvalidRequest;
+ }
- //total new size needed
- document.used_text = len(lower) + len(change.text) + len(upper);
+ for change in changes {
- //Reduce the amount of allocation by allocating more memory than needed
- if document.used_text > len(document.text) {
- new_text := make([]u8, document.used_text * 2);
+ //for some reason sublime doesn't seem to care even if i tell it to do incremental sync
+ if range, ok := change.range.(common.Range); ok {
- //join the 3 splices into the text
- copy(new_text, lower);
- copy(new_text[len(lower):], middle);
- copy(new_text[len(lower)+len(middle):], upper);
+ absolute_range, ok := common.get_absolute_range(range, document.text[:document.used_text]);
- delete(document.text);
+ if !ok {
+ return .ParseError;
+ }
- document.text = new_text;
- }
+ //lower bound is before the change
+ lower := document.text[:absolute_range.start];
- else {
- //order matters here, we need to make sure we swap the data already in the text before the middle
- copy(document.text, lower);
- copy(document.text[len(lower)+len(middle):], upper);
- copy(document.text[len(lower):], middle);
- }
+ //new change between lower and upper
+ middle := change.text;
- }
+ //upper bound is after the change
+ upper := document.text[absolute_range.end:document.used_text];
- else {
+ //total new size needed
+ document.used_text = len(lower) + len(change.text) + len(upper);
- document.used_text = len(change.text);
+ //Reduce the amount of allocation by allocating more memory than needed
+ if document.used_text > len(document.text) {
+ new_text := make([]u8, document.used_text * 2);
- if document.used_text > len(document.text) {
- new_text := make([]u8, document.used_text * 2);
- copy(new_text, change.text);
- delete(document.text);
- document.text = new_text;
- }
+ //join the 3 splices into the text
+ copy(new_text, lower);
+ copy(new_text[len(lower):], middle);
+ copy(new_text[len(lower) + len(middle):], upper);
- else {
- copy(document.text, change.text);
- }
+ delete(document.text);
- }
+ document.text = new_text;
+ } else {
+ //order matters here, we need to make sure we swap the data already in the text before the middle
+ copy(document.text, lower);
+ copy(document.text[len(lower) + len(middle):], upper);
+ copy(document.text[len(lower):], middle);
+ }
+ } else {
+ document.used_text = len(change.text);
- }
+ if document.used_text > len(document.text) {
+ new_text := make([]u8, document.used_text * 2);
+ copy(new_text, change.text);
+ delete(document.text);
+ document.text = new_text;
+ } else {
+ copy(document.text, change.text);
+ }
+ }
+ }
- //log.info(string(document.text[:document.used_text]));
+ //log.info(string(document.text[:document.used_text]));
- return document_refresh(document, config, writer);
+ return document_refresh(document, config, writer);
}
-document_close :: proc(uri_string: string) -> common.Error {
+document_close :: proc (uri_string: string) -> common.Error {
- log.infof("document_close: %v", uri_string);
+ log.infof("document_close: %v", uri_string);
- uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
+ uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
- if !parsed_ok {
- return .ParseError;
- }
+ if !parsed_ok {
+ return .ParseError;
+ }
- document := &document_storage.documents[uri.path];
+ document := &document_storage.documents[uri.path];
- if document == nil || !document.client_owned {
- log.errorf("Client called close on a document that was never opened: %v ", document.uri.path);
- return .InvalidRequest;
- }
+ if document == nil || !document.client_owned {
+ log.errorf("Client called close on a document that was never opened: %v ", document.uri.path);
+ return .InvalidRequest;
+ }
- free_all(common.scratch_allocator(document.allocator));
- document_free_allocator(document.allocator);
- document.allocator = nil;
+ free_all(common.scratch_allocator(document.allocator));
+ document_free_allocator(document.allocator);
+ document.allocator = nil;
- document.client_owned = false;
+ document.client_owned = false;
- common.delete_uri(document.uri);
+ common.delete_uri(document.uri);
- delete(document.text);
+ delete(document.text);
- document.used_text = 0;
+ document.used_text = 0;
- return .None;
+ return .None;
}
-
-
-document_refresh :: proc(document: ^Document, config: ^common.Config, writer: ^Writer) -> common.Error {
-
- errors, ok := parse_document(document, config);
-
- if !ok {
- return .ParseError;
- }
-
- if writer != nil && len(errors) > 0 {
- document.diagnosed_errors = true;
-
- params := NotificationPublishDiagnosticsParams {
- uri = document.uri.uri,
- diagnostics = make([] Diagnostic, len(errors), context.temp_allocator),
- };
-
- for error, i in errors {
-
- params.diagnostics[i] = Diagnostic {
- range = common.Range {
- start = common.Position {
- line = error.line - 1,
- character = 0,
- },
- end = common.Position {
- line = error.line,
- character = 0,
- },
- },
- severity = DiagnosticSeverity.Error,
- code = "test",
- message = error.message,
- };
-
- }
-
- notifaction := Notification {
- jsonrpc = "2.0",
- method = "textDocument/publishDiagnostics",
- params = params,
- };
-
- send_notification(notifaction, writer);
-
- }
-
- if writer != nil && len(errors) == 0 {
-
- //send empty diagnosis to remove the clients errors
- if document.diagnosed_errors {
-
- notifaction := Notification {
- jsonrpc = "2.0",
- method = "textDocument/publishDiagnostics",
-
- params = NotificationPublishDiagnosticsParams {
- uri = document.uri.uri,
- diagnostics = make([] Diagnostic, len(errors), context.temp_allocator),
- },
- };
-
- document.diagnosed_errors = false;
-
- send_notification(notifaction, writer);
- }
-
- }
-
- return .None;
+document_refresh :: proc (document: ^Document, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+ errors, ok := parse_document(document, config);
+
+ if !ok {
+ return .ParseError;
+ }
+
+ if writer != nil && len(errors) > 0 {
+ document.diagnosed_errors = true;
+
+ params := NotificationPublishDiagnosticsParams {
+ uri = document.uri.uri,
+ diagnostics = make([]Diagnostic, len(errors), context.temp_allocator),
+ };
+
+ for error, i in errors {
+
+ params.diagnostics[i] = Diagnostic {
+ range = common.Range {
+ start = common.Position {
+ line = error.line - 1,
+ character = 0,
+ },
+ end = common.Position {
+ line = error.line,
+ character = 0,
+ },
+ },
+ severity = DiagnosticSeverity.Error,
+ code = "test",
+ message = error.message,
+ };
+ }
+
+ notifaction := Notification {
+ jsonrpc = "2.0",
+ method = "textDocument/publishDiagnostics",
+ params = params,
+ };
+
+ send_notification(notifaction, writer);
+ }
+
+ if writer != nil && len(errors) == 0 {
+
+ //send empty diagnosis to remove the clients errors
+ if document.diagnosed_errors {
+
+ notifaction := Notification {
+ jsonrpc = "2.0",
+ method = "textDocument/publishDiagnostics",
+ params = NotificationPublishDiagnosticsParams {
+ uri = document.uri.uri,
+ diagnostics = make([]Diagnostic, len(errors), context.temp_allocator),
+ },
+ };
+
+ document.diagnosed_errors = false;
+
+ send_notification(notifaction, writer);
+ }
+ }
+
+ return .None;
}
-current_errors: [dynamic] ParserError;
+current_errors: [dynamic]ParserError;
-parser_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
- error := ParserError { line = pos.line, column = pos.column, file = pos.file,
- offset = pos.offset, message = fmt.tprintf(msg, ..args) };
- append(&current_errors, error);
+parser_error_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
+ error := ParserError {
+ line = pos.line,column = pos.column,file = pos.file,
+ offset = pos.offset,message = fmt.tprintf(msg, ..args),
+ };
+ append(&current_errors, error);
}
-parser_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
-
+parser_warning_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
}
-parse_document :: proc(document: ^Document, config: ^common.Config) -> ([] ParserError, bool) {
+parse_document :: proc (document: ^Document, config: ^common.Config) -> ([]ParserError, bool) {
- p := parser.Parser {
- err = parser_error_handler,
+ p := parser.Parser {
+ err = parser_error_handler,
warn = parser_warning_handler,
};
- current_errors = make([dynamic] ParserError, context.temp_allocator);
-
- free_all(common.scratch_allocator(document.allocator));
-
- context.allocator = common.scratch_allocator(document.allocator);
+ current_errors = make([dynamic]ParserError, context.temp_allocator);
- //have to cheat the parser since it really wants to parse an entire package with the new changes...
- pkg := new(ast.Package);
- pkg.kind = .Normal;
- pkg.fullpath = document.uri.path;
+ free_all(common.scratch_allocator(document.allocator));
- document.ast = ast.File {
- fullpath = document.uri.path,
- src = document.text[:document.used_text],
- pkg = pkg,
- };
+ context.allocator = common.scratch_allocator(document.allocator);
- parser.parse_file(&p, &document.ast);
+ //have to cheat the parser since it really wants to parse an entire package with the new changes...
+ pkg := new(ast.Package);
+ pkg.kind = .Normal;
+ pkg.fullpath = document.uri.path;
- imports := make([dynamic]Package);
- document.package_name = strings.to_lower(path.dir(document.uri.path, context.temp_allocator));
+ document.ast = ast.File {
+ fullpath = document.uri.path,
+ src = document.text[:document.used_text],
+ pkg = pkg,
+ };
- for imp, index in document.ast.imports {
+ parser.parse_file(&p, &document.ast);
- if i := strings.index(imp.fullpath, "\""); i == -1 {
- continue;
- }
+ imports := make([dynamic]Package);
+ document.package_name = strings.to_lower(path.dir(document.uri.path, context.temp_allocator));
- //collection specified
- if i := strings.index(imp.fullpath, ":"); i != -1 && i > 1 && i < len(imp.fullpath) - 1 {
+ for imp, index in document.ast.imports {
- if len(imp.fullpath) < 2 {
- continue;
- }
+ if i := strings.index(imp.fullpath, "\""); i == -1 {
+ continue;
+ }
- collection := imp.fullpath[1:i];
- p := imp.fullpath[i+1:len(imp.fullpath)-1];
+ //collection specified
+ if i := strings.index(imp.fullpath, ":"); i != -1 && i > 1 && i < len(imp.fullpath) - 1 {
- dir, ok := config.collections[collection];
+ if len(imp.fullpath) < 2 {
+ continue;
+ }
- if !ok {
- continue;
- }
+ collection := imp.fullpath[1:i];
+ p := imp.fullpath[i + 1:len(imp.fullpath) - 1];
- import_: Package;
- import_.name = strings.clone(path.join(elems = {strings.to_lower(dir, context.temp_allocator), p}, allocator = context.temp_allocator));
+ dir, ok := config.collections[collection];
- if imp.name.text != "" {
- import_.base = imp.name.text;
- }
+ if !ok {
+ continue;
+ }
- else {
- import_.base = path.base(import_.name, false);
- }
+ import_: Package;
+ import_.name = strings.clone(path.join(elems = {strings.to_lower(dir, context.temp_allocator), p}, allocator = context.temp_allocator));
- append(&imports, import_);
- }
+ if imp.name.text != "" {
+ import_.base = imp.name.text;
+ } else {
+ import_.base = path.base(import_.name, false);
+ }
- //relative
- else {
+ append(&imports, import_);
+ } else
- if len(imp.fullpath) < 2 {
- continue;
- }
+ //relative
+ {
- import_: Package;
- import_.name = path.join(elems = {document.package_name, imp.fullpath[1:len(imp.fullpath)-1]}, allocator = context.temp_allocator);
- import_.name = path.clean(import_.name);
+ if len(imp.fullpath) < 2 {
+ continue;
+ }
- if imp.name.text != "" {
- import_.base = imp.name.text;
- }
+ import_: Package;
+ import_.name = path.join(elems = {document.package_name, imp.fullpath[1:len(imp.fullpath) - 1]}, allocator = context.temp_allocator);
+ import_.name = path.clean(import_.name);
- else {
- import_.base = path.base(import_.name, false);
- }
+ if imp.name.text != "" {
+ import_.base = imp.name.text;
+ } else {
+ import_.base = path.base(import_.name, false);
+ }
- append(&imports, import_);
- }
+ append(&imports, import_);
+ }
+ }
- }
+ document.imports = imports[:];
- document.imports = imports[:];
-
- return current_errors[:], true;
-}
+ return current_errors[:], true;
+} \ No newline at end of file
diff --git a/src/server/format.odin b/src/server/format.odin
index 46d21e2..3ad492c 100644
--- a/src/server/format.odin
+++ b/src/server/format.odin
@@ -5,51 +5,51 @@ import "shared:common"
import "core:odin/printer"
FormattingOptions :: struct {
- tabSize: uint,
- insertSpaces: bool, //tabs or spaces
+ tabSize: uint,
+ insertSpaces: bool, //tabs or spaces
trimTrailingWhitespace: bool,
- insertFinalNewline: bool,
- trimFinalNewlines: bool,
+ insertFinalNewline: bool,
+ trimFinalNewlines: bool,
}
DocumentFormattingParams :: struct {
textDocument: TextDocumentIdentifier,
- options: FormattingOptions,
+ options: FormattingOptions,
}
TextEdit :: struct {
- range: common.Range,
+ range: common.Range,
newText: string,
}
-get_complete_format :: proc(document: ^Document) -> ([] TextEdit, bool) {
+get_complete_format :: proc (document: ^Document) -> ([]TextEdit, bool) {
/*
- prnt := printer.make_printer(printer.default_style, context.temp_allocator);
+ prnt := printer.make_printer(printer.default_style, context.temp_allocator);
- printer.print_file(&prnt, &document.ast);
+ printer.print_file(&prnt, &document.ast);
- end_line := document.ast.decls[len(document.ast.decls)-1].end.line;
+ end_line := document.ast.decls[len(document.ast.decls)-1].end.line;
- edit := TextEdit {
+ edit := TextEdit {
newText = printer.to_string(prnt),
range = {
- start = {
- character = 0,
- line = 0,
- },
- end = {
- character = 1,
- line = end_line + 1,
- }
+ start = {
+ character = 0,
+ line = 0,
+ },
+ end = {
+ character = 1,
+ line = end_line + 1,
}
- };
+ }
+ };
- edits := make([dynamic] TextEdit, context.temp_allocator);
+ edits := make([dynamic] TextEdit, context.temp_allocator);
- append(&edits, edit);
+ append(&edits, edit);
- return edits[:], true;
+ return edits[:], true;
*/
return {}, false;
diff --git a/src/server/hover.odin b/src/server/hover.odin
index cd09f01..08d49ca 100644
--- a/src/server/hover.odin
+++ b/src/server/hover.odin
@@ -16,131 +16,123 @@ import "core:slice"
import "shared:common"
import "shared:index"
-get_hover_information :: proc(document: ^Document, position: common.Position) -> (Hover, bool) {
+get_hover_information :: proc (document: ^Document, position: common.Position) -> (Hover, bool) {
+
+ hover := Hover {
+ contents = {
+ kind = "plaintext"
+ }
+ };
+
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name);
- hover := Hover {
- contents = {
- kind = "plaintext",
- }
- };
+ position_context, ok := get_document_position_context(document, position, .Hover);
+
+ get_globals(document.ast, &ast_context);
- ast_context := make_ast_context(document.ast, document.imports, document.package_name);
-
- position_context, ok := get_document_position_context(document, position, .Hover);
-
- get_globals(document.ast, &ast_context);
-
- if position_context.function != nil {
- get_locals(document.ast, position_context.function, &ast_context, &position_context);
- }
-
- if position_context.identifier != nil {
- if ident, ok := position_context.identifier.derived.(ast.Ident); ok {
- if _, ok := common.keyword_map[ident.name]; ok {
- hover.contents.kind = "plaintext";
- hover.range = common.get_token_range(position_context.identifier^, ast_context.file.src);
- return hover, true;
- }
- }
- }
-
- if position_context.selector != nil && position_context.identifier != nil {
-
- hover.range = common.get_token_range(position_context.identifier^, ast_context.file.src);
-
- ast_context.use_locals = true;
- ast_context.use_globals = true;
- ast_context.current_package = ast_context.document_package;
-
- //if the base selector is the client wants to go to.
- if base, ok := position_context.selector.derived.(ast.Ident); ok && position_context.identifier != nil {
-
- ident := position_context.identifier.derived.(ast.Ident);
-
- if ident.name == base.name {
-
- if resolved, ok := resolve_type_identifier(&ast_context, ident); ok {
- resolved.name = ident.name;
- resolved.signature = get_signature(&ast_context, ident, resolved);
-
- if is_variable, ok := ast_context.variables[ident.name]; ok && is_variable {
- resolved.pkg = ast_context.document_package;
- }
-
- hover.contents = write_hover_content(&ast_context, resolved);
- return hover, true;
- }
-
- }
-
- }
-
- selector: index.Symbol;
- selector, ok = resolve_type_expression(&ast_context, position_context.selector);
-
- if !ok {
- return hover, true;
- }
-
- field: string;
-
- if position_context.field != nil {
-
- switch v in position_context.field.derived {
- case ast.Ident:
- field = v.name;
- }
-
- }
-
- hover.range = common.get_token_range(position_context.identifier^, document.ast.src);
-
- #partial switch v in selector.value {
- case index.SymbolStructValue:
- for name, i in v.names {
- if strings.compare(name, field) == 0 {
- if symbol, ok := resolve_type_expression(&ast_context, v.types[i]); ok {
- symbol.name = name;
- symbol.pkg = selector.name;
- symbol.signature = index.node_to_string(v.types[i]);
- hover.contents = write_hover_content(&ast_context, symbol);
- return hover, true;
- }
- }
- }
- case index.SymbolPackageValue:
- if symbol, ok := index.lookup(field, selector.pkg); ok {
- hover.contents = write_hover_content(&ast_context, symbol);
- return hover, true;
- }
- }
-
- }
-
- else if position_context.identifier != nil {
-
- ast_context.use_locals = true;
- ast_context.use_globals = true;
- ast_context.current_package = ast_context.document_package;
-
- ident := position_context.identifier.derived.(ast.Ident);
-
- hover.range = common.get_token_range(position_context.identifier^, document.ast.src);
-
- if resolved, ok := resolve_type_identifier(&ast_context, ident); ok {
- resolved.name = ident.name;
- resolved.signature = get_signature(&ast_context, ident, resolved);
-
- if is_variable, ok := ast_context.variables[ident.name]; ok && is_variable {
- resolved.pkg = ast_context.document_package;
- }
-
- hover.contents = write_hover_content(&ast_context, resolved);
- return hover, true;
- }
-
- }
-
- return hover, true;
-}
+ if position_context.function != nil {
+ get_locals(document.ast, position_context.function, &ast_context, &position_context);
+ }
+ if position_context.identifier != nil {
+ if ident, ok := position_context.identifier.derived.(ast.Ident); ok {
+ if _, ok := common.keyword_map[ident.name]; ok {
+ hover.contents.kind = "plaintext";
+ hover.range = common.get_token_range(position_context.identifier^, ast_context.file.src);
+ return hover, true;
+ }
+ }
+ }
+
+ if position_context.selector != nil && position_context.identifier != nil {
+
+ hover.range = common.get_token_range(position_context.identifier^, ast_context.file.src);
+
+ ast_context.use_locals = true;
+ ast_context.use_globals = true;
+ ast_context.current_package = ast_context.document_package;
+
+ //if the base selector is the client wants to go to.
+ if base, ok := position_context.selector.derived.(ast.Ident); ok && position_context.identifier != nil {
+
+ ident := position_context.identifier.derived.(ast.Ident);
+
+ if ident.name == base.name {
+
+ if resolved, ok := resolve_type_identifier(&ast_context, ident); ok {
+ resolved.name = ident.name;
+ resolved.signature = get_signature(&ast_context, ident, resolved);
+
+ if is_variable, ok := ast_context.variables[ident.name]; ok && is_variable {
+ resolved.pkg = ast_context.document_package;
+ }
+
+ hover.contents = write_hover_content(&ast_context, resolved);
+ return hover, true;
+ }
+ }
+ }
+
+ selector: index.Symbol;
+ selector, ok = resolve_type_expression(&ast_context, position_context.selector);
+
+ if !ok {
+ return hover, true;
+ }
+
+ field: string;
+
+ if position_context.field != nil {
+
+ switch v in position_context.field.derived {
+ case ast.Ident:
+ field = v.name;
+ }
+ }
+
+ hover.range = common.get_token_range(position_context.identifier^, document.ast.src);
+
+ #partial switch v in selector.value {
+ case index.SymbolStructValue:
+ for name, i in v.names {
+ if strings.compare(name, field) == 0 {
+ if symbol, ok := resolve_type_expression(&ast_context, v.types[i]); ok {
+ symbol.name = name;
+ symbol.pkg = selector.name;
+ symbol.signature = index.node_to_string(v.types[i]);
+ hover.contents = write_hover_content(&ast_context, symbol);
+ return hover, true;
+ }
+ }
+ }
+ case index.SymbolPackageValue:
+ if symbol, ok := index.lookup(field, selector.pkg); ok {
+ hover.contents = write_hover_content(&ast_context, symbol);
+ return hover, true;
+ }
+ }
+ } else if position_context.identifier != nil {
+
+ ast_context.use_locals = true;
+ ast_context.use_globals = true;
+ ast_context.current_package = ast_context.document_package;
+
+ ident := position_context.identifier.derived.(ast.Ident);
+
+ hover.range = common.get_token_range(position_context.identifier^, document.ast.src);
+
+ if resolved, ok := resolve_type_identifier(&ast_context, ident); ok {
+ resolved.name = ident.name;
+ resolved.signature = get_signature(&ast_context, ident, resolved);
+
+ if is_variable, ok := ast_context.variables[ident.name]; ok && is_variable {
+ resolved.pkg = ast_context.document_package;
+ }
+
+ hover.contents = write_hover_content(&ast_context, resolved);
+ return hover, true;
+ }
+ }
+
+ return hover, true;
+} \ No newline at end of file
diff --git a/src/server/log.odin b/src/server/log.odin
index d84a38d..77816a1 100644
--- a/src/server/log.odin
+++ b/src/server/log.odin
@@ -1,49 +1,47 @@
package server
-import "core:fmt";
-import "core:strings";
-import "core:os";
-import "core:time";
-import "core:log";
-
-Default_Console_Logger_Opts :: log.Options{
- .Level,
- .Terminal_Color,
- .Short_File_Path,
- .Line,
- .Procedure,
+import "core:fmt"
+import "core:strings"
+import "core:os"
+import "core:time"
+import "core:log"
+
+Default_Console_Logger_Opts :: log.Options {
+ .Level,
+ .Terminal_Color,
+ .Short_File_Path,
+ .Line,
+ .Procedure,
} | log.Full_Timestamp_Opts;
-
Lsp_Logger_Data :: struct {
- writer: ^Writer,
+ writer: ^Writer,
}
-create_lsp_logger :: proc(writer: ^Writer, lowest := log.Level.Debug, opt := Default_Console_Logger_Opts) -> log.Logger {
- data := new(Lsp_Logger_Data);
- data.writer = writer;
- return log.Logger{lsp_logger_proc, data, lowest, opt};
+create_lsp_logger :: proc (writer: ^Writer, lowest := log.Level.Debug, opt := Default_Console_Logger_Opts) -> log.Logger {
+ data := new(Lsp_Logger_Data);
+ data.writer = writer;
+ return log.Logger {lsp_logger_proc, data, lowest, opt};
}
-destroy_lsp_logger :: proc(log: ^log.Logger) {
- free(log.data);
+destroy_lsp_logger :: proc (log: ^log.Logger) {
+ free(log.data);
}
-lsp_logger_proc :: proc(logger_data: rawptr, level: log.Level, text: string, options: log.Options, location := #caller_location) {
-
- data := cast(^Lsp_Logger_Data)logger_data;
+lsp_logger_proc :: proc (logger_data: rawptr, level: log.Level, text: string, options: log.Options, location := #caller_location) {
- message := fmt.tprintf("%s", text);
+ data := cast(^Lsp_Logger_Data)logger_data;
- notification := Notification {
- jsonrpc = "2.0",
- method = "window/logMessage",
- params = NotificationLoggingParams {
- type = 1,
- message = message,
- }
- };
+ message := fmt.tprintf("%s", text);
- send_notification(notification, data.writer);
-}
+ notification := Notification {
+ jsonrpc = "2.0",
+ method = "window/logMessage",
+ params = NotificationLoggingParams {
+ type = 1,
+ message = message,
+ },
+ };
+ send_notification(notification, data.writer);
+} \ No newline at end of file
diff --git a/src/server/reader.odin b/src/server/reader.odin
index f421d67..a3d5937 100644
--- a/src/server/reader.odin
+++ b/src/server/reader.odin
@@ -4,61 +4,57 @@ import "core:os"
import "core:mem"
import "core:strings"
-ReaderFn :: proc(rawptr, [] byte) -> (int, int);
+ReaderFn :: proc (_: rawptr, _: []byte) -> (int, int);
Reader :: struct {
- reader_fn: ReaderFn,
- reader_context: rawptr,
-};
-
-make_reader :: proc(reader_fn: ReaderFn, reader_context: rawptr) -> Reader {
- return Reader { reader_context = reader_context, reader_fn = reader_fn };
+ reader_fn: ReaderFn,
+ reader_context: rawptr,
}
+make_reader :: proc (reader_fn: ReaderFn, reader_context: rawptr) -> Reader {
+ return Reader {reader_context = reader_context, reader_fn = reader_fn};
+}
-read_u8 :: proc(reader: ^Reader) -> (u8, bool) {
+read_u8 :: proc (reader: ^Reader) -> (u8, bool) {
- value : [1] byte;
+ value: [1]byte;
- read, err := reader.reader_fn(reader.reader_context, value[:]);
+ read, err := reader.reader_fn(reader.reader_context, value[:]);
- if(err != 0 || read != 1) {
- return 0, false;
- }
+ if (err != 0 || read != 1) {
+ return 0, false;
+ }
- return value[0], true;
+ return value[0], true;
}
-read_until_delimiter :: proc(reader: ^Reader, delimiter: u8, builder: ^strings.Builder) -> bool {
+read_until_delimiter :: proc (reader: ^Reader, delimiter: u8, builder: ^strings.Builder) -> bool {
- for true {
+ for true {
- value, success := read_u8(reader);
+ value, success := read_u8(reader);
- if(!success) {
- return false;
- }
+ if (!success) {
+ return false;
+ }
- strings.write_byte(builder, value);
+ strings.write_byte(builder, value);
- if(value == delimiter) {
- break;
- }
- }
+ if (value == delimiter) {
+ break;
+ }
+ }
- return true;
+ return true;
}
-read_sized :: proc(reader: ^Reader, data: []u8) -> bool {
-
- read, err := reader.reader_fn(reader.reader_context, data);
-
- if(err != 0 || read != len(data)) {
- return false;
- }
-
- return true;
-}
+read_sized :: proc (reader: ^Reader, data: []u8) -> bool {
+ read, err := reader.reader_fn(reader.reader_context, data);
+ if (err != 0 || read != len(data)) {
+ return false;
+ }
+ return true;
+} \ No newline at end of file
diff --git a/src/server/requests.odin b/src/server/requests.odin
index 71dba0c..5a02fb3 100644
--- a/src/server/requests.odin
+++ b/src/server/requests.odin
@@ -21,1064 +21,1013 @@ import "shared:common"
import "shared:index"
Header :: struct {
- content_length: int,
- content_type: string,
-};
+ content_length: int,
+ content_type: string,
+}
RequestType :: enum {
- Initialize,
- Initialized,
- Shutdown,
- Exit,
- DidOpen,
- DidChange,
- DidClose,
- DidSave,
- Definition,
- Completion,
- SignatureHelp,
- DocumentSymbol,
- SemanticTokensFull,
- SemanticTokensRange,
- FormatDocument,
- Hover,
- CancelRequest,
-};
+ Initialize,
+ Initialized,
+ Shutdown,
+ Exit,
+ DidOpen,
+ DidChange,
+ DidClose,
+ DidSave,
+ Definition,
+ Completion,
+ SignatureHelp,
+ DocumentSymbol,
+ SemanticTokensFull,
+ SemanticTokensRange,
+ FormatDocument,
+ Hover,
+ CancelRequest,
+}
RequestInfo :: struct {
- root: json.Value,
- params: json.Value,
- document: ^Document,
- id: RequestId,
- config: ^common.Config,
- writer: ^Writer,
- result: common.Error,
-};
-
+ root: json.Value,
+ params: json.Value,
+ document: ^Document,
+ id: RequestId,
+ config: ^common.Config,
+ writer: ^Writer,
+ result: common.Error,
+}
pool: common.Pool;
-
-get_request_info :: proc(task: ^common.Task) -> ^RequestInfo {
- return cast(^RequestInfo)task.data;
+get_request_info :: proc (task: ^common.Task) -> ^RequestInfo {
+ return cast(^RequestInfo)task.data;
}
-make_response_message :: proc(id: RequestId, params: ResponseParams) -> ResponseMessage {
-
- return ResponseMessage {
- jsonrpc = "2.0",
- id = id,
- result = params,
- };
+make_response_message :: proc (id: RequestId, params: ResponseParams) -> ResponseMessage {
+ return ResponseMessage {
+ jsonrpc = "2.0",
+ id = id,
+ result = params,
+ };
}
-make_response_message_error :: proc(id: RequestId, error: ResponseError) -> ResponseMessageError {
-
- return ResponseMessageError {
- jsonrpc = "2.0",
- id = id,
- error = error,
- };
+make_response_message_error :: proc (id: RequestId, error: ResponseError) -> ResponseMessageError {
+ return ResponseMessageError {
+ jsonrpc = "2.0",
+ id = id,
+ error = error,
+ };
}
-read_and_parse_header :: proc(reader: ^Reader) -> (Header, bool) {
-
- header: Header;
-
- builder := strings.make_builder(context.temp_allocator);
+read_and_parse_header :: proc (reader: ^Reader) -> (Header, bool) {
- found_content_length := false;
+ header: Header;
- for true {
+ builder := strings.make_builder(context.temp_allocator);
- strings.reset_builder(&builder);
+ found_content_length := false;
- if !read_until_delimiter(reader, '\n', &builder) {
- log.error("Failed to read with delimiter");
- return header, false;
- }
+ for true {
- message := strings.to_string(builder);
+ strings.reset_builder(&builder);
- if len(message) == 0 || message[len(message)-2] != '\r' {
- log.error("No carriage return");
- return header, false;
- }
+ if !read_until_delimiter(reader, '\n', &builder) {
+ log.error("Failed to read with delimiter");
+ return header, false;
+ }
- if len(message)==2 {
- break;
- }
+ message := strings.to_string(builder);
- index := strings.last_index_byte(message, ':');
+ if len(message) == 0 || message[len(message) - 2] != '\r' {
+ log.error("No carriage return");
+ return header, false;
+ }
- if index == -1 {
- log.error("Failed to find semicolon");
- return header, false;
- }
+ if len(message) == 2 {
+ break;
+ }
- header_name := message[0:index];
- header_value := message[len(header_name) + 2 : len(message)-2];
+ index := strings.last_index_byte(message, ':');
- if strings.compare(header_name, "Content-Length") == 0 {
+ if index == -1 {
+ log.error("Failed to find semicolon");
+ return header, false;
+ }
- if len(header_value) == 0 {
- log.error("Header value has no length");
- return header, false;
- }
+ header_name := message[0:index];
+ header_value := message[len(header_name) + 2:len(message) - 2];
- value, ok := strconv.parse_int(header_value);
+ if strings.compare(header_name, "Content-Length") == 0 {
- if !ok {
- log.error("Failed to parse content length value");
- return header, false;
- }
+ if len(header_value) == 0 {
+ log.error("Header value has no length");
+ return header, false;
+ }
- header.content_length = value;
+ value, ok := strconv.parse_int(header_value);
- found_content_length = true;
+ if !ok {
+ log.error("Failed to parse content length value");
+ return header, false;
+ }
- }
+ header.content_length = value;
- else if strings.compare(header_name, "Content-Type") == 0 {
- if len(header_value) == 0 {
- log.error("Header value has no length");
- return header, false;
- }
- }
+ found_content_length = true;
+ } else if strings.compare(header_name, "Content-Type") == 0 {
+ if len(header_value) == 0 {
+ log.error("Header value has no length");
+ return header, false;
+ }
+ }
+ }
- }
-
- return header, found_content_length;
+ return header, found_content_length;
}
-read_and_parse_body :: proc(reader: ^Reader, header: Header) -> (json.Value, bool) {
-
- value: json.Value;
-
- data := make([]u8, header.content_length, context.temp_allocator);
+read_and_parse_body :: proc (reader: ^Reader, header: Header) -> (json.Value, bool) {
- if !read_sized(reader, data) {
- log.error("Failed to read body");
- return value, false;
- }
+ value: json.Value;
- err: json.Error;
+ data := make([]u8, header.content_length, context.temp_allocator);
- value, err = json.parse(data = data, allocator = context.allocator, parse_integers = true);
+ if !read_sized(reader, data) {
+ log.error("Failed to read body");
+ return value, false;
+ }
- if(err != json.Error.None) {
- log.error("Failed to parse body");
- return value, false;
- }
+ err: json.Error;
- return value, true;
-}
+ value, err = json.parse(data = data, allocator = context.allocator, parse_integers = true);
-request_map : map [string] RequestType =
- {"initialize" = .Initialize,
- "initialized" = .Initialized,
- "shutdown" = .Shutdown,
- "exit" = .Exit,
- "textDocument/didOpen" = .DidOpen,
- "textDocument/didChange" = .DidChange,
- "textDocument/didClose" = .DidClose,
- "textDocument/didSave" = .DidSave,
- "textDocument/definition" = .Definition,
- "textDocument/completion" = .Completion,
- "textDocument/signatureHelp" = .SignatureHelp,
- "textDocument/documentSymbol" = .DocumentSymbol,
- "textDocument/semanticTokens/full" = .SemanticTokensFull,
- "textDocument/semanticTokens/range" = .SemanticTokensRange,
- "textDocument/hover" = .Hover,
- "$/cancelRequest" = .CancelRequest,
- "textDocument/formatting" = .FormatDocument };
-
-handle_error :: proc(err: common.Error, id: RequestId, writer: ^Writer) {
-
- if err != .None {
-
- response := make_response_message_error(
- id = id,
- error = ResponseError {code = err, message = ""}
- );
-
- send_error(response, writer);
- }
+ if (err != json.Error.None) {
+ log.error("Failed to parse body");
+ return value, false;
+ }
+ return value, true;
}
-handle_request :: proc(request: json.Value, config: ^common.Config, writer: ^Writer) -> bool {
-
- root, ok := request.value.(json.Object);
-
- if !ok {
- log.error("No root object");
- return false;
- }
-
- id: RequestId;
- id_value: json.Value;
- id_value, ok = root["id"];
-
- if ok {
- #partial
- switch v in id_value.value {
- case json.String:
- id = v;
- case json.Integer:
- id = v;
- case:
- id = 0;
- }
- }
-
- method := root["method"].value.(json.String);
-
- request_type: RequestType;
- request_type, ok = request_map[method];
-
- if !ok {
- response := make_response_message_error(
- id = id,
- error = ResponseError {code = .MethodNotFound, message = ""}
- );
-
- send_error(response, writer);
- }
-
- else {
-
- info := new(RequestInfo);
-
- info.root = request;
- info.params = root["params"];
- info.id = id;
- info.config = config;
- info.writer = writer;
-
- task_proc: common.Task_Proc;
-
- switch request_type {
- case .Initialize:
- task_proc = request_initialize;
- case .Initialized:
- task_proc = request_initialized;
- case .Shutdown:
- task_proc = request_shutdown;
- case .Exit:
- task_proc = notification_exit;
- case .DidOpen:
- task_proc = notification_did_open;
- case .DidChange:
- task_proc = notification_did_change;
- case .DidClose:
- task_proc = notification_did_close;
- case .DidSave:
- task_proc = notification_did_save;
- case .Definition:
- task_proc = request_definition;
- case .Completion:
- task_proc = request_completion;
- case .SignatureHelp:
- task_proc = request_signature_help;
- case .DocumentSymbol:
- task_proc = request_document_symbols;
- case .SemanticTokensFull:
- task_proc = request_semantic_token_full;
- case .SemanticTokensRange:
- task_proc = request_semantic_token_range;
- case .Hover:
- task_proc = request_hover;
- case .CancelRequest:
- case .FormatDocument:
- task_proc = request_format_document;
- }
-
- task := common.Task {
- data = info,
- procedure = task_proc,
- };
-
- #partial switch request_type {
- case .CancelRequest:
- for {
- if task, ok := common.pool_try_and_pop_task(&pool); ok {
- common.pool_do_work(&pool, &task);
- }
-
- else {
- break;
- }
- }
- case .Initialize, .Initialized:
- task_proc(&task);
- case .Completion, .Definition, .Hover, .FormatDocument:
-
- uri := root["params"].value.(json.Object)["textDocument"].value.(json.Object)["uri"].value.(json.String);
-
- document := document_get(uri);
-
- if document == nil {
- handle_error(.InternalError, id, writer);
- return false;
- }
-
- info.document = document;
-
- task_proc(&task);
-
- case .DidClose, .DidChange, .DidOpen, .DidSave:
-
- uri := root["params"].value.(json.Object)["textDocument"].value.(json.Object)["uri"].value.(json.String);
-
- document := document_get(uri);
-
- if document != nil {
-
- for intrinsics.atomic_load(&document.operating_on) > 1 {
- if task, ok := common.pool_try_and_pop_task(&pool); ok {
- common.pool_do_work(&pool, &task);
- }
- }
-
- }
-
- task_proc(&task);
-
- document_release(document);
- case .Shutdown, .Exit:
- task_proc(&task);
- case .SignatureHelp, .SemanticTokensFull, .SemanticTokensRange, .DocumentSymbol:
-
- uri := root["params"].value.(json.Object)["textDocument"].value.(json.Object)["uri"].value.(json.String);
-
- document := document_get(uri);
-
- if document == nil {
- handle_error(.InternalError, id, writer);
- return false;
- }
-
- info.document = document;
-
- if !config.debug_single_thread {
- common.pool_add_task(&pool, task_proc, info);
- }
-
- else{
- task_proc(&task);
- }
- case:
-
- if !config.debug_single_thread {
- common.pool_add_task(&pool, task_proc, info);
- }
+request_map: map[string]RequestType = {
+ "initialize" = .Initialize,
+ "initialized" = .Initialized,
+ "shutdown" = .Shutdown,
+ "exit" = .Exit,
+ "textDocument/didOpen" = .DidOpen,
+ "textDocument/didChange" = .DidChange,
+ "textDocument/didClose" = .DidClose,
+ "textDocument/didSave" = .DidSave,
+ "textDocument/definition" = .Definition,
+ "textDocument/completion" = .Completion,
+ "textDocument/signatureHelp" = .SignatureHelp,
+ "textDocument/documentSymbol" = .DocumentSymbol,
+ "textDocument/semanticTokens/full" = .SemanticTokensFull,
+ "textDocument/semanticTokens/range" = .SemanticTokensRange,
+ "textDocument/hover" = .Hover,
+ "$/cancelRequest" = .CancelRequest,
+ "textDocument/formatting" = .FormatDocument,
+};
- else {
- task_proc(&task);
- }
- }
+handle_error :: proc (err: common.Error, id: RequestId, writer: ^Writer) {
- }
+ if err != .None {
+ response := make_response_message_error(
+ id = id,
+ error = ResponseError {code = err, message = ""});
- return true;
+ send_error(response, writer);
+ }
}
-request_initialize :: proc(task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
-
- defer json.destroy_value(root);
- defer free(info);
-
- params_object, ok := params.value.(json.Object);
-
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
-
- initialize_params: RequestInitializeParams;
-
- if unmarshal(params, initialize_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
-
- config.workspace_folders = make([dynamic]common.WorkspaceFolder);
-
- for s in initialize_params.workspaceFolders {
- append(&config.workspace_folders, s);
- }
-
- thread_count := 2;
-
- enable_document_symbols: bool;
- enable_hover: bool;
- enable_format: bool;
-
- if len(config.workspace_folders) > 0 {
-
- //right now just look at the first workspace - TODO(daniel, add multiple workspace support)
- if uri, ok := common.parse_uri(config.workspace_folders[0].uri, context.temp_allocator); ok {
-
- ols_config_path := path.join(elems = {uri.path, "ols.json"}, allocator = context.temp_allocator);
-
- if data, ok := os.read_entire_file(ols_config_path, context.temp_allocator); ok {
-
- if value, err := json.parse(data = data, allocator = context.temp_allocator, parse_integers = true); err == .None {
-
- ols_config: OlsConfig;
-
- if unmarshal(value, ols_config, context.temp_allocator) == .None {
-
- thread_count = ols_config.thread_pool_count;
- enable_document_symbols = ols_config.enable_document_symbols;
- enable_hover = ols_config.enable_hover;
- enable_format = ols_config.enable_format;
- config.enable_semantic_tokens = ols_config.enable_semantic_tokens;
- config.verbose = ols_config.verbose;
-
- for p in ols_config.collections {
-
- forward_path, _ := filepath.to_slash(p.path, context.temp_allocator);
-
- if filepath.is_abs(p.path) {
- config.collections[strings.clone(p.name)] = strings.clone(forward_path);
- }
-
- else {
- config.collections[strings.clone(p.name)] = path.join(elems = {uri.path, forward_path}, allocator = context.allocator);
- }
-
- }
-
- }
-
- }
-
- }
-
- }
-
- }
-
- common.pool_init(&pool, thread_count);
- common.pool_start(&pool);
-
- for format in initialize_params.capabilities.textDocument.hover.contentFormat {
- if format == "markdown" {
- config.hover_support_md = true;
- }
- }
-
-
- for format in initialize_params.capabilities.textDocument.completion.documentationFormat {
- if format == "markdown" {
- config.completion_support_md = true;
- }
- }
+handle_request :: proc (request: json.Value, config: ^common.Config, writer: ^Writer) -> bool {
+
+ root, ok := request.value.(json.Object);
+
+ if !ok {
+ log.error("No root object");
+ return false;
+ }
+
+ id: RequestId;
+ id_value: json.Value;
+ id_value, ok = root["id"];
+
+ if ok {
+
+ #partial switch v in id_value.value {
+ case json.String:
+ id = v;
+ case json.Integer:
+ id = v;
+ case:
+ id = 0;
+ }
+ }
+
+ method := root["method"].value.(json.String);
+
+ request_type: RequestType;
+ request_type, ok = request_map[method];
+
+ if !ok {
+ response := make_response_message_error(
+ id = id,
+ error = ResponseError {code = .MethodNotFound, message = ""});
+
+ send_error(response, writer);
+ } else {
+
+ info := new(RequestInfo);
+
+ info.root = request;
+ info.params = root["params"];
+ info.id = id;
+ info.config = config;
+ info.writer = writer;
+
+ task_proc: common.Task_Proc;
+
+ switch request_type {
+ case .Initialize:
+ task_proc = request_initialize;
+ case .Initialized:
+ task_proc = request_initialized;
+ case .Shutdown:
+ task_proc = request_shutdown;
+ case .Exit:
+ task_proc = notification_exit;
+ case .DidOpen:
+ task_proc = notification_did_open;
+ case .DidChange:
+ task_proc = notification_did_change;
+ case .DidClose:
+ task_proc = notification_did_close;
+ case .DidSave:
+ task_proc = notification_did_save;
+ case .Definition:
+ task_proc = request_definition;
+ case .Completion:
+ task_proc = request_completion;
+ case .SignatureHelp:
+ task_proc = request_signature_help;
+ case .DocumentSymbol:
+ task_proc = request_document_symbols;
+ case .SemanticTokensFull:
+ task_proc = request_semantic_token_full;
+ case .SemanticTokensRange:
+ task_proc = request_semantic_token_range;
+ case .Hover:
+ task_proc = request_hover;
+ case .CancelRequest:
+ case .FormatDocument:
+ task_proc = request_format_document;
+ }
+
+ task := common.Task {
+ data = info,
+ procedure = task_proc,
+ };
+
+ #partial switch request_type {
+ case .CancelRequest:
+ for {
+ if task, ok := common.pool_try_and_pop_task(&pool); ok {
+ common.pool_do_work(&pool, &task);
+ } else {
+ break;
+ }
+ }
+ case .Initialize,.Initialized:
+ task_proc(&task);
+ case .Completion,.Definition,.Hover,.FormatDocument:
+
+ uri := root["params"].value.(json.Object)["textDocument"].value.(json.Object)["uri"].value.(json.String);
+
+ document := document_get(uri);
+
+ if document == nil {
+ handle_error(.InternalError, id, writer);
+ return false;
+ }
+
+ info.document = document;
+
+ task_proc(&task);
+
+ case .DidClose,.DidChange,.DidOpen,.DidSave:
+
+ uri := root["params"].value.(json.Object)["textDocument"].value.(json.Object)["uri"].value.(json.String);
+
+ document := document_get(uri);
+
+ if document != nil {
+
+ for intrinsics.atomic_load(&document.operating_on) > 1 {
+ if task, ok := common.pool_try_and_pop_task(&pool); ok {
+ common.pool_do_work(&pool, &task);
+ }
+ }
+ }
+
+ task_proc(&task);
+
+ document_release(document);
+ case .Shutdown,.Exit:
+ task_proc(&task);
+ case .SignatureHelp,.SemanticTokensFull,.SemanticTokensRange,.DocumentSymbol:
+
+ uri := root["params"].value.(json.Object)["textDocument"].value.(json.Object)["uri"].value.(json.String);
+
+ document := document_get(uri);
+
+ if document == nil {
+ handle_error(.InternalError, id, writer);
+ return false;
+ }
+
+ info.document = document;
+
+ if !config.debug_single_thread {
+ common.pool_add_task(&pool, task_proc, info);
+ } else {
+ task_proc(&task);
+ }
+ case:
+
+ if !config.debug_single_thread {
+ common.pool_add_task(&pool, task_proc, info);
+ } else {
+ task_proc(&task);
+ }
+ }
+ }
+
+ return true;
+}
+request_initialize :: proc (task: ^common.Task) {
+ info := get_request_info(task);
- config.signature_offset_support = initialize_params.capabilities.textDocument.signatureHelp.signatureInformation.parameterInformation.labelOffsetSupport;
+ using info;
- completionTriggerCharacters := [] string { ".", ">", "#" };
- signatureTriggerCharacters := [] string { "(" };
+ defer json.destroy_value(root);
+ defer free(info);
- token_type := type_info_of(SemanticTokenTypes).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum);
- token_modifier := type_info_of(SemanticTokenModifiers).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum);
+ params_object, ok := params.value.(json.Object);
- token_types := make([] string, len(token_type.names), context.temp_allocator);
- token_modifiers := make([] string, len(token_modifier.names), context.temp_allocator);
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- for name, i in token_type.names {
- token_types[i] = strings.to_lower(name, context.temp_allocator);
- }
+ initialize_params: RequestInitializeParams;
- for name, i in token_modifier.names {
- token_modifiers[i] = strings.to_lower(name, context.temp_allocator);
- }
+ if unmarshal(params, initialize_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- response := make_response_message(
- params = ResponseInitializeParams {
- capabilities = ServerCapabilities {
- textDocumentSync = TextDocumentSyncOptions {
- openClose = true,
- change = 2, //incremental
- save = {
- includeText = true,
- }
- },
- definitionProvider = true,
- completionProvider = CompletionOptions {
- resolveProvider = false,
- triggerCharacters = completionTriggerCharacters,
- },
- signatureHelpProvider = SignatureHelpOptions {
- triggerCharacters = signatureTriggerCharacters,
- },
- semanticTokensProvider = SemanticTokensOptions {
- range = false,
- full = config.enable_semantic_tokens,
- legend = SemanticTokensLegend {
- tokenTypes = token_types,
- tokenModifiers = token_modifiers,
- },
- },
- documentSymbolProvider = enable_document_symbols,
- hoverProvider = enable_hover,
- documentFormattingProvider = enable_format,
- },
- },
- id = id,
- );
-
- send_response(response, writer);
+ config.workspace_folders = make([dynamic]common.WorkspaceFolder);
- /*
- Temp index here, but should be some background thread that starts the indexing
- */
+ for s in initialize_params.workspaceFolders {
+ append(&config.workspace_folders, s);
+ }
- index.indexer.dynamic_index = index.make_memory_index(index.make_symbol_collection(context.allocator, config));
+ thread_count := 2;
- index.build_static_index(context.allocator, config);
+ enable_document_symbols: bool;
+ enable_hover: bool;
+ enable_format: bool;
+
+ if len(config.workspace_folders) > 0 {
+
+ //right now just look at the first workspace - TODO(daniel, add multiple workspace support)
+ if uri, ok := common.parse_uri(config.workspace_folders[0].uri, context.temp_allocator); ok {
+
+ ols_config_path := path.join(elems = {uri.path, "ols.json"}, allocator = context.temp_allocator);
+
+ if data, ok := os.read_entire_file(ols_config_path, context.temp_allocator); ok {
+
+ if value, err := json.parse(data = data, allocator = context.temp_allocator, parse_integers = true); err == .None {
+
+ ols_config: OlsConfig;
+
+ if unmarshal(value, ols_config, context.temp_allocator) == .None {
- /*
- Add the builtin and runtime package
- */
+ thread_count = ols_config.thread_pool_count;
+ enable_document_symbols = ols_config.enable_document_symbols;
+ enable_hover = ols_config.enable_hover;
+ enable_format = ols_config.enable_format;
+ config.enable_semantic_tokens = ols_config.enable_semantic_tokens;
+ config.verbose = ols_config.verbose;
+
+ for p in ols_config.collections {
- if core, ok := config.collections["core"]; ok {
- append(&index.indexer.built_in_packages, path.join(strings.to_lower(core, context.temp_allocator), "builtin"));
- append(&index.indexer.built_in_packages, path.join(strings.to_lower(core, context.temp_allocator), "runtime"));
- }
-
- log.info("Finished indexing");
+ forward_path, _ := filepath.to_slash(p.path, context.temp_allocator);
+
+ if filepath.is_abs(p.path) {
+ config.collections[strings.clone(p.name)] = strings.clone(forward_path);
+ } else {
+ config.collections[strings.clone(p.name)] = path.join(elems = {uri.path, forward_path}, allocator = context.allocator);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ common.pool_init(&pool, thread_count);
+ common.pool_start(&pool);
+
+ for format in initialize_params.capabilities.textDocument.hover.contentFormat {
+ if format == "markdown" {
+ config.hover_support_md = true;
+ }
+ }
+
+ for format in initialize_params.capabilities.textDocument.completion.documentationFormat {
+ if format == "markdown" {
+ config.completion_support_md = true;
+ }
+ }
+
+ config.signature_offset_support = initialize_params.capabilities.textDocument.signatureHelp.signatureInformation.parameterInformation.labelOffsetSupport;
+
+ completionTriggerCharacters := []string {".", ">", "#"};
+ signatureTriggerCharacters := []string {"("};
+
+ token_type := type_info_of(SemanticTokenTypes).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum);
+ token_modifier := type_info_of(SemanticTokenModifiers).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum);
+
+ token_types := make([]string, len(token_type.names), context.temp_allocator);
+ token_modifiers := make([]string, len(token_modifier.names), context.temp_allocator);
+
+ for name, i in token_type.names {
+ token_types[i] = strings.to_lower(name, context.temp_allocator);
+ }
+
+ for name, i in token_modifier.names {
+ token_modifiers[i] = strings.to_lower(name, context.temp_allocator);
+ }
+
+ response := make_response_message(
+ params = ResponseInitializeParams {
+ capabilities = ServerCapabilities {
+ textDocumentSync = TextDocumentSyncOptions {
+ openClose = true,
+ change = 2, //incremental
+ save = {
+ includeText = true
+ },
+ },
+ definitionProvider = true,
+ completionProvider = CompletionOptions {
+ resolveProvider = false,
+ triggerCharacters = completionTriggerCharacters,
+ },
+ signatureHelpProvider = SignatureHelpOptions {
+ triggerCharacters = signatureTriggerCharacters
+ },
+ semanticTokensProvider = SemanticTokensOptions {
+ range = false,
+ full = config.enable_semantic_tokens,
+ legend = SemanticTokensLegend {
+ tokenTypes = token_types,
+ tokenModifiers = token_modifiers,
+ },
+ },
+ documentSymbolProvider = enable_document_symbols,
+ hoverProvider = enable_hover,
+ documentFormattingProvider = enable_format,
+ }
+ },
+ id = id);
+
+ send_response(response, writer);
+
+ /*
+ Temp index here, but should be some background thread that starts the indexing
+ */
+
+ index.indexer.dynamic_index = index.make_memory_index(index.make_symbol_collection(context.allocator, config));
+
+ index.build_static_index(context.allocator, config);
+
+ /*
+ Add the builtin and runtime package
+ */
+
+ if core, ok := config.collections["core"]; ok {
+ append(&index.indexer.built_in_packages, path.join(strings.to_lower(core, context.temp_allocator), "builtin"));
+ append(&index.indexer.built_in_packages, path.join(strings.to_lower(core, context.temp_allocator), "runtime"));
+ }
+
+ log.info("Finished indexing");
}
-request_initialized :: proc(task: ^common.Task) {
- info := get_request_info(task);
+request_initialized :: proc (task: ^common.Task) {
+ info := get_request_info(task);
- using info;
+ using info;
- json.destroy_value(root);
- free(info);
+ json.destroy_value(root);
+ free(info);
}
-request_shutdown :: proc(task: ^common.Task) {
-
- info := get_request_info(task);
+request_shutdown :: proc (task: ^common.Task) {
- using info;
+ info := get_request_info(task);
- defer json.destroy_value(root);
- defer free(info);
+ using info;
- response := make_response_message(
- params = nil,
- id = id,
- );
-
- send_response(response, writer);
+ defer json.destroy_value(root);
+ defer free(info);
+ response := make_response_message(
+ params = nil,
+ id = id);
+ send_response(response, writer);
}
-request_definition :: proc(task: ^common.Task) {
+request_definition :: proc (task: ^common.Task) {
- info := get_request_info(task);
+ info := get_request_info(task);
- using info;
+ using info;
- defer document_release(document);
- defer free(info);
- defer json.destroy_value(root);
+ defer document_release(document);
+ defer free(info);
+ defer json.destroy_value(root);
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- definition_params: TextDocumentPositionParams;
+ definition_params: TextDocumentPositionParams;
- if unmarshal(params, definition_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if unmarshal(params, definition_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- location, ok2 := get_definition_location(document, definition_params.position);
+ location, ok2 := get_definition_location(document, definition_params.position);
- if !ok2 {
- log.warn("Failed to get definition location");
- }
+ if !ok2 {
+ log.warn("Failed to get definition location");
+ }
- response := make_response_message(
- params = location,
- id = id,
- );
+ response := make_response_message(
+ params = location,
+ id = id);
- send_response(response, writer);
+ send_response(response, writer);
}
+request_completion :: proc (task: ^common.Task) {
-request_completion :: proc(task: ^common.Task) {
-
- info := get_request_info(task);
+ info := get_request_info(task);
- using info;
+ using info;
- defer document_release(document);
- defer json.destroy_value(root);
- defer free(info);
+ defer document_release(document);
+ defer json.destroy_value(root);
+ defer free(info);
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- completition_params: CompletionParams;
+ completition_params: CompletionParams;
+ if unmarshal(params, completition_params, context.temp_allocator) != .None {
+ log.error("Failed to unmarshal completion request");
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- if unmarshal(params, completition_params, context.temp_allocator) != .None {
- log.error("Failed to unmarshal completion request");
- handle_error(.ParseError, id, writer);
- return;
- }
+ list: CompletionList;
+ list, ok = get_completion_list(document, completition_params.position);
- list: CompletionList;
- list, ok = get_completion_list(document, completition_params.position);
+ if !ok {
+ handle_error(.InternalError, id, writer);
+ return;
+ }
- if !ok {
- handle_error(.InternalError, id, writer);
- return;
- }
+ response := make_response_message(
+ params = list,
+ id = id);
- response := make_response_message(
- params = list,
- id = id,
- );
-
- send_response(response, writer);
+ send_response(response, writer);
}
-request_signature_help :: proc(task: ^common.Task) {
+request_signature_help :: proc (task: ^common.Task) {
- info := get_request_info(task);
+ info := get_request_info(task);
- using info;
+ using info;
- defer document_release(document);
- defer json.destroy_value(root);
- defer free(info);
+ defer document_release(document);
+ defer json.destroy_value(root);
+ defer free(info);
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- signature_params: SignatureHelpParams;
+ signature_params: SignatureHelpParams;
- if unmarshal(params, signature_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if unmarshal(params, signature_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- help: SignatureHelp;
- help, ok = get_signature_information(document, signature_params.position);
+ help: SignatureHelp;
+ help, ok = get_signature_information(document, signature_params.position);
- if !ok {
- handle_error(.InternalError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.InternalError, id, writer);
+ return;
+ }
- response := make_response_message(
- params = help,
- id = id,
- );
+ response := make_response_message(
+ params = help,
+ id = id);
- send_response(response, writer);
+ send_response(response, writer);
}
-request_format_document :: proc(task: ^common.Task) {
-
- info := get_request_info(task);
+request_format_document :: proc (task: ^common.Task) {
- using info;
+ info := get_request_info(task);
- defer document_release(document);
- defer json.destroy_value(root);
- defer free(info);
+ using info;
- params_object, ok := params.value.(json.Object);
+ defer document_release(document);
+ defer json.destroy_value(root);
+ defer free(info);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ params_object, ok := params.value.(json.Object);
- format_params: DocumentFormattingParams;
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- if unmarshal(params, format_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ format_params: DocumentFormattingParams;
- edit: [] TextEdit;
- edit, ok = get_complete_format(document);
+ if unmarshal(params, format_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- if !ok {
- handle_error(.InternalError, id, writer);
- return;
- }
+ edit: []TextEdit;
+ edit, ok = get_complete_format(document);
- response := make_response_message(
- params = edit,
- id = id,
- );
+ if !ok {
+ handle_error(.InternalError, id, writer);
+ return;
+ }
- send_response(response, writer);
+ response := make_response_message(
+ params = edit,
+ id = id);
+ send_response(response, writer);
}
-notification_exit :: proc(task: ^common.Task) {
- info := get_request_info(task);
- using info;
+notification_exit :: proc (task: ^common.Task) {
+ info := get_request_info(task);
+ using info;
- defer json.destroy_value(root);
- defer free(info);
+ defer json.destroy_value(root);
+ defer free(info);
- config.running = false;
+ config.running = false;
}
-notification_did_open :: proc(task: ^common.Task) {
+notification_did_open :: proc (task: ^common.Task) {
- info := get_request_info(task);
+ info := get_request_info(task);
- using info;
+ using info;
- defer json.destroy_value(root);
- defer free(info);
+ defer json.destroy_value(root);
+ defer free(info);
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- if !ok {
- log.error("Failed to parse open document notification");
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ log.error("Failed to parse open document notification");
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- open_params: DidOpenTextDocumentParams;
+ open_params: DidOpenTextDocumentParams;
- if unmarshal(params, open_params, context.allocator) != .None {
- log.error("Failed to parse open document notification");
- handle_error(.ParseError, id, writer);
- return;
- }
+ if unmarshal(params, open_params, context.allocator) != .None {
+ log.error("Failed to parse open document notification");
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- if n := document_open(open_params.textDocument.uri, open_params.textDocument.text, config, writer); n != .None {
- handle_error(n, id, writer);
- }
+ if n := document_open(open_params.textDocument.uri, open_params.textDocument.text, config, writer); n != .None {
+ handle_error(n, id, writer);
+ }
}
-notification_did_change :: proc(task: ^common.Task) {
+notification_did_change :: proc (task: ^common.Task) {
- info := get_request_info(task);
+ info := get_request_info(task);
- using info;
+ using info;
- defer json.destroy_value(root);
- defer free(info);
+ defer json.destroy_value(root);
+ defer free(info);
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- change_params: DidChangeTextDocumentParams;
+ change_params: DidChangeTextDocumentParams;
- if unmarshal(params, change_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if unmarshal(params, change_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
-
- document_apply_changes(change_params.textDocument.uri, change_params.contentChanges, config, writer);
+ document_apply_changes(change_params.textDocument.uri, change_params.contentChanges, config, writer);
}
-notification_did_close :: proc(task: ^common.Task) {
+notification_did_close :: proc (task: ^common.Task) {
- info := get_request_info(task);
+ info := get_request_info(task);
- using info;
+ using info;
- defer json.destroy_value(root);
- defer free(info);
+ defer json.destroy_value(root);
+ defer free(info);
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- close_params: DidCloseTextDocumentParams;
+ close_params: DidCloseTextDocumentParams;
- if unmarshal(params, close_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if unmarshal(params, close_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- if n := document_close(close_params.textDocument.uri); n != .None {
- handle_error(n, id, writer);
- return;
- }
+ if n := document_close(close_params.textDocument.uri); n != .None {
+ handle_error(n, id, writer);
+ return;
+ }
}
-notification_did_save :: proc(task: ^common.Task) {
- info := get_request_info(task);
-
- using info;
+notification_did_save :: proc (task: ^common.Task) {
+ info := get_request_info(task);
+ using info;
- defer json.destroy_value(root);
- defer free(info);
+ defer json.destroy_value(root);
+ defer free(info);
- //this is temporary, but will provide dynamic indexing and is syncronized
+ //this is temporary, but will provide dynamic indexing and is syncronized
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- save_params: DidSaveTextDocumentParams;
+ save_params: DidSaveTextDocumentParams;
- if unmarshal(params, save_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if unmarshal(params, save_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- uri: common.Uri;
+ uri: common.Uri;
- if uri, ok = common.parse_uri(save_params.textDocument.uri, context.temp_allocator); !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if uri, ok = common.parse_uri(save_params.textDocument.uri, context.temp_allocator); !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- fullpath := uri.path;
+ fullpath := uri.path;
- p := parser.Parser {
- err = index.log_error_handler,
- warn = index.log_warning_handler,
- };
+ p := parser.Parser {
+ err = index.log_error_handler,
+ warn = index.log_warning_handler,
+ };
- //have to cheat the parser since it really wants to parse an entire package with the new changes...
- dir := filepath.base(filepath.dir(fullpath, context.temp_allocator));
+ //have to cheat the parser since it really wants to parse an entire package with the new changes...
+ dir := filepath.base(filepath.dir(fullpath, context.temp_allocator));
- pkg := new(ast.Package);
- pkg.kind = .Normal;
- pkg.fullpath = fullpath;
- pkg.name = dir;
+ pkg := new(ast.Package);
+ pkg.kind = .Normal;
+ pkg.fullpath = fullpath;
+ pkg.name = dir;
- if dir == "runtime" {
- pkg.kind = .Runtime;
- }
+ if dir == "runtime" {
+ pkg.kind = .Runtime;
+ }
- file := ast.File {
- fullpath = fullpath,
- src = transmute([]u8)save_params.text,
- pkg = pkg,
- };
+ file := ast.File {
+ fullpath = fullpath,
+ src = transmute([]u8)save_params.text,
+ pkg = pkg,
+ };
- ok = parser.parse_file(&p, &file);
+ ok = parser.parse_file(&p, &file);
- if !ok {
- log.errorf("error in parse file for indexing %v", fullpath);
- }
+ if !ok {
+ log.errorf("error in parse file for indexing %v", fullpath);
+ }
- for key, value in index.indexer.dynamic_index.collection.symbols {
+ for key, value in index.indexer.dynamic_index.collection.symbols {
- if value.uri == save_params.textDocument.uri {
- index.free_symbol(value, context.allocator);
- index.indexer.dynamic_index.collection.symbols[key] = {};
- }
-
- }
-
- if ret := index.collect_symbols(&index.indexer.dynamic_index.collection, file, uri.uri); ret != .None {
- log.errorf("failed to collect symbols on save %v", ret);
- }
+ if value.uri == save_params.textDocument.uri {
+ index.free_symbol(value, context.allocator);
+ index.indexer.dynamic_index.collection.symbols[key] = {};
+ }
+ }
+ if ret := index.collect_symbols(&index.indexer.dynamic_index.collection, file, uri.uri); ret != .None {
+ log.errorf("failed to collect symbols on save %v", ret);
+ }
}
-request_semantic_token_full :: proc(task: ^common.Task) {
-
- info := get_request_info(task);
+request_semantic_token_full :: proc (task: ^common.Task) {
- using info;
+ info := get_request_info(task);
- defer document_release(document);
- defer json.destroy_value(root);
- defer free(info);
+ using info;
- params_object, ok := params.value.(json.Object);
+ defer document_release(document);
+ defer json.destroy_value(root);
+ defer free(info);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ params_object, ok := params.value.(json.Object);
- semantic_params: SemanticTokensParams;
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- if unmarshal(params, semantic_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ semantic_params: SemanticTokensParams;
- range := common.Range {
- start = common.Position {
- line = 0,
- },
+ if unmarshal(params, semantic_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- end = common.Position {
- line = 9000000, //should be enough
- }
- };
+ range := common.Range {
+ start = common.Position {
+ line = 0
+ },
+ end = common.Position {
+ line = 9000000 //should be enough
+ },
+ };
- symbols: SemanticTokens;
+ symbols: SemanticTokens;
- if config.enable_semantic_tokens {
- symbols = get_semantic_tokens(document, range);
- }
+ if config.enable_semantic_tokens {
+ symbols = get_semantic_tokens(document, range);
+ }
- response := make_response_message(
- params = symbols,
- id = id,
- );
+ response := make_response_message(
+ params = symbols,
+ id = id);
- send_response(response, writer);
+ send_response(response, writer);
}
-request_semantic_token_range :: proc(task: ^common.Task) {
+request_semantic_token_range :: proc (task: ^common.Task) {
- info := get_request_info(task);
+ info := get_request_info(task);
- using info;
+ using info;
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- defer document_release(document);
- defer json.destroy_value(root);
- defer free(info);
+ defer document_release(document);
+ defer json.destroy_value(root);
+ defer free(info);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- semantic_params: SemanticTokensRangeParams;
+ semantic_params: SemanticTokensRangeParams;
- if unmarshal(params, semantic_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if unmarshal(params, semantic_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- symbols: SemanticTokens;
+ symbols: SemanticTokens;
- if config.enable_semantic_tokens {
- symbols = get_semantic_tokens(document, semantic_params.range);
- }
+ if config.enable_semantic_tokens {
+ symbols = get_semantic_tokens(document, semantic_params.range);
+ }
- response := make_response_message(
- params = symbols,
- id = id,
- );
+ response := make_response_message(
+ params = symbols,
+ id = id);
- send_response(response, writer);
+ send_response(response, writer);
}
-request_document_symbols :: proc(task: ^common.Task) {
+request_document_symbols :: proc (task: ^common.Task) {
- info := get_request_info(task);
+ info := get_request_info(task);
- using info;
+ using info;
- defer document_release(document);
- defer json.destroy_value(root);
- defer free(info);
+ defer document_release(document);
+ defer json.destroy_value(root);
+ defer free(info);
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- symbol_params: DocumentSymbolParams;
+ symbol_params: DocumentSymbolParams;
- if unmarshal(params, symbol_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if unmarshal(params, symbol_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- symbols := get_document_symbols(document);
+ symbols := get_document_symbols(document);
- response := make_response_message(
- params = symbols,
- id = id,
- );
+ response := make_response_message(
+ params = symbols,
+ id = id);
- send_response(response, writer);
+ send_response(response, writer);
}
-request_hover :: proc(task: ^common.Task) {
+request_hover :: proc (task: ^common.Task) {
- info := get_request_info(task);
+ info := get_request_info(task);
- using info;
+ using info;
- defer document_release(document);
- defer json.destroy_value(root);
- defer free(info);
+ defer document_release(document);
+ defer json.destroy_value(root);
+ defer free(info);
- params_object, ok := params.value.(json.Object);
+ params_object, ok := params.value.(json.Object);
- if !ok {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- hover_params: HoverParams;
+ hover_params: HoverParams;
- if unmarshal(params, hover_params, context.temp_allocator) != .None {
- handle_error(.ParseError, id, writer);
- return;
- }
+ if unmarshal(params, hover_params, context.temp_allocator) != .None {
+ handle_error(.ParseError, id, writer);
+ return;
+ }
- hover: Hover;
- hover, ok = get_hover_information(document, hover_params.position);
+ hover: Hover;
+ hover, ok = get_hover_information(document, hover_params.position);
- if !ok {
- handle_error(.InternalError, id, writer);
- return;
- }
+ if !ok {
+ handle_error(.InternalError, id, writer);
+ return;
+ }
- response := make_response_message(
- params = hover,
- id = id,
- );
+ response := make_response_message(
+ params = hover,
+ id = id);
- send_response(response, writer);
+ send_response(response, writer);
} \ No newline at end of file
diff --git a/src/server/response.odin b/src/server/response.odin
index bd7a77e..1ef7484 100644
--- a/src/server/response.odin
+++ b/src/server/response.odin
@@ -1,68 +1,67 @@
package server
-
import "core:fmt"
import "core:encoding/json"
-send_notification :: proc(notification: Notification, writer: ^Writer) -> bool {
+send_notification :: proc (notification: Notification, writer: ^Writer) -> bool {
- data, error := json.marshal(notification, context.temp_allocator);
+ data, error := json.marshal(notification, context.temp_allocator);
- header := fmt.tprintf("Content-Length: {}\r\n\r\n", len(data));
+ header := fmt.tprintf("Content-Length: {}\r\n\r\n", len(data));
- if error != json.Marshal_Error.None {
- return false;
- }
+ if error != json.Marshal_Error.None {
+ return false;
+ }
- if(!write_sized(writer, transmute([]u8)header)) {
- return false;
- }
+ if (!write_sized(writer, transmute([]u8)header)) {
+ return false;
+ }
- if(!write_sized(writer, data)) {
- return false;
- }
+ if (!write_sized(writer, data)) {
+ return false;
+ }
- return true;
+ return true;
}
-send_response :: proc(response: ResponseMessage, writer: ^Writer) -> bool {
+send_response :: proc (response: ResponseMessage, writer: ^Writer) -> bool {
- data, error := json.marshal(response, context.temp_allocator);
+ data, error := json.marshal(response, context.temp_allocator);
- header := fmt.tprintf("Content-Length: {}\r\n\r\n", len(data));
+ header := fmt.tprintf("Content-Length: {}\r\n\r\n", len(data));
- if error != json.Marshal_Error.None {
- return false;
- }
+ if error != json.Marshal_Error.None {
+ return false;
+ }
- if(!write_sized(writer, transmute([]u8)header)) {
- return false;
- }
+ if (!write_sized(writer, transmute([]u8)header)) {
+ return false;
+ }
- if(!write_sized(writer, data)) {
- return false;
- }
+ if (!write_sized(writer, data)) {
+ return false;
+ }
- return true;
+ return true;
}
-send_error :: proc(response: ResponseMessageError, writer: ^Writer) -> bool {
+send_error :: proc (response: ResponseMessageError, writer: ^Writer) -> bool {
- data, error := json.marshal(response, context.temp_allocator);
+ data, error := json.marshal(response, context.temp_allocator);
- header := fmt.tprintf("Content-Length: {}\r\n\r\n", len(data));
+ header := fmt.tprintf("Content-Length: {}\r\n\r\n", len(data));
- if error != json.Marshal_Error.None {
- return false;
- }
+ if error != json.Marshal_Error.None {
+ return false;
+ }
- if(!write_sized(writer, transmute([]u8)header)) {
- return false;
- }
+ if (!write_sized(writer, transmute([]u8)header)) {
+ return false;
+ }
- if(!write_sized(writer, data)) {
- return false;
- }
+ if (!write_sized(writer, data)) {
+ return false;
+ }
- return true;
-}
+ return true;
+} \ No newline at end of file
diff --git a/src/server/semantic_tokens.odin b/src/server/semantic_tokens.odin
index 229ae7e..7f238d3 100644
--- a/src/server/semantic_tokens.odin
+++ b/src/server/semantic_tokens.odin
@@ -8,628 +8,572 @@ import "shared:common"
import "shared:index"
/*
- Right now I might be setting the wrong types, since there is no documentation as to what should be what, and looking at other LSP there is no consistancy.
+ Right now I might be setting the wrong types, since there is no documentation as to what should be what, and looking at other LSP there is no consistancy.
*/
SemanticTokenTypes :: enum {
- Namespace,
- Type,
- Enum,
- Struct,
- Parameter,
- Variable,
- EnumMember,
- Function,
- Member,
- Keyword,
- Modifier,
- Comment,
- String,
- Number,
- Operator,
- Property,
-};
+ Namespace,
+ Type,
+ Enum,
+ Struct,
+ Parameter,
+ Variable,
+ EnumMember,
+ Function,
+ Member,
+ Keyword,
+ Modifier,
+ Comment,
+ String,
+ Number,
+ Operator,
+ Property,
+}
SemanticTokenModifiers :: enum {
- None,
- Declaration,
- Definition,
- Deprecated,
-};
+ None,
+ Declaration,
+ Definition,
+ Deprecated,
+}
SemanticTokensClientCapabilities :: struct {
-
- requests: struct {
- range: bool,
- },
-
- tokenTypes: [] string,
- tokenModifiers: [] string,
- formats: [] string,
- overlappingTokenSupport: bool,
- multilineTokenSupport: bool,
-};
+ requests: struct {
+ range: bool,
+ },
+ tokenTypes: []string,
+ tokenModifiers: []string,
+ formats: []string,
+ overlappingTokenSupport: bool,
+ multilineTokenSupport: bool,
+}
SemanticTokensLegend :: struct {
- tokenTypes: [] string,
- tokenModifiers: [] string,
-};
+ tokenTypes: []string,
+ tokenModifiers: []string,
+}
SemanticTokensOptions :: struct {
- legend: SemanticTokensLegend,
- range: bool,
- full: bool,
-};
+ legend: SemanticTokensLegend,
+ range: bool,
+ full: bool,
+}
SemanticTokensParams :: struct {
- textDocument: TextDocumentIdentifier,
-};
+ textDocument: TextDocumentIdentifier,
+}
SemanticTokensRangeParams :: struct {
- textDocument: TextDocumentIdentifier,
- range: common.Range,
-};
+ textDocument: TextDocumentIdentifier,
+ range: common.Range,
+}
SemanticTokens :: struct {
- data: [] u32,
-};
+ data: []u32,
+}
SemanticTokenBuilder :: struct {
- current_function: ^ast.Node,
- current_start: int,
- selector_member: bool,
- selector_package: bool,
- tokens: [dynamic] u32,
-};
-
-make_token_builder :: proc(allocator := context.temp_allocator) -> SemanticTokenBuilder {
+ current_function: ^ast.Node,
+ current_start: int,
+ selector_member: bool,
+ selector_package: bool,
+ tokens: [dynamic]u32,
+}
- return {
- tokens = make([dynamic]u32, context.temp_allocator),
- };
+make_token_builder :: proc (allocator := context.temp_allocator) -> SemanticTokenBuilder {
+ return {
+ tokens = make([dynamic]u32, context.temp_allocator)
+ };
}
-get_tokens :: proc(builder: SemanticTokenBuilder) -> SemanticTokens {
- return {
- data = builder.tokens[:],
- };
+get_tokens :: proc (builder: SemanticTokenBuilder) -> SemanticTokens {
+ return {
+ data = builder.tokens[:]
+ };
}
-get_semantic_tokens :: proc(document: ^Document, range: common.Range) -> SemanticTokens {
+get_semantic_tokens :: proc (document: ^Document, range: common.Range) -> SemanticTokens {
- ast_context := make_ast_context(document.ast, document.imports, document.package_name, context.temp_allocator);
- builder := make_token_builder();
+ ast_context := make_ast_context(document.ast, document.imports, document.package_name, context.temp_allocator);
+ builder := make_token_builder();
- get_globals(document.ast, &ast_context);
+ get_globals(document.ast, &ast_context);
- for decl in document.ast.decls {
- if range.start.line <= decl.pos.line && decl.end.line <= range.end.line {
- write_semantic_tokens(decl, &builder, &ast_context);
- }
- }
+ for decl in document.ast.decls {
+ if range.start.line <= decl.pos.line && decl.end.line <= range.end.line {
+ write_semantic_tokens(decl, &builder, &ast_context);
+ }
+ }
- return get_tokens(builder);
+ return get_tokens(builder);
}
-write_semantic_node :: proc(builder: ^SemanticTokenBuilder, node: ^ast.Node, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
+write_semantic_node :: proc (builder: ^SemanticTokenBuilder, node: ^ast.Node, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
- position := common.get_relative_token_position(node.pos.offset, src, builder.current_start);
+ position := common.get_relative_token_position(node.pos.offset, src, builder.current_start);
- name := common.get_ast_node_string(node, src);
+ name := common.get_ast_node_string(node, src);
- append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, 0);
+ append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, 0);
- builder.current_start = node.pos.offset;
+ builder.current_start = node.pos.offset;
}
-write_semantic_token :: proc(builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
+write_semantic_token :: proc (builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
- position := common.get_relative_token_position(token.pos.offset, src, builder.current_start);
+ position := common.get_relative_token_position(token.pos.offset, src, builder.current_start);
- append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(token.text), cast(u32)type, 0);
+ append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(token.text), cast(u32)type, 0);
- builder.current_start = token.pos.offset;
+ builder.current_start = token.pos.offset;
}
+write_semantic_token_pos :: proc (builder: ^SemanticTokenBuilder, pos: tokenizer.Pos, name: string, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
-write_semantic_token_pos :: proc(builder: ^SemanticTokenBuilder, pos: tokenizer.Pos, name: string, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
-
- position := common.get_relative_token_position(pos.offset, src, builder.current_start);
+ position := common.get_relative_token_position(pos.offset, src, builder.current_start);
- append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, 0);
+ append(&builder.tokens, cast(u32)position.line, cast(u32)position.character, cast(u32)len(name), cast(u32)type, 0);
- builder.current_start = pos.offset;
+ builder.current_start = pos.offset;
}
-
-resolve_and_write_ident :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) -> (is_member: bool, is_package: bool) {
-
- n := node.derived.(ast.Ident);
-
- ast_context.current_package = ast_context.document_package;
- ast_context.use_globals = true;
- ast_context.use_locals = true;
-
- if resolve_ident_is_variable(ast_context, n) {
- write_semantic_node(builder, node, ast_context.file.src, .Variable, .None);
- is_member = true;
- }
-
- else if symbol, ok := resolve_type_identifier(ast_context, n); ok {
-
- #partial switch v in symbol.value {
- case index.SymbolPackageValue:
- write_semantic_node(builder, node, ast_context.file.src, .Namespace, .None);
- is_package = true;
- case index.SymbolStructValue:
- write_semantic_node(builder, node, ast_context.file.src, .Struct, .None);
- case index.SymbolEnumValue:
- write_semantic_node(builder, node, ast_context.file.src, .Enum, .None);
- case index.SymbolUnionValue:
- write_semantic_node(builder, node, ast_context.file.src, .Enum, .None);
- case index.SymbolProcedureValue:
- write_semantic_node(builder, node, ast_context.file.src, .Function, .None);
- case index.SymbolProcedureGroupValue:
- write_semantic_node(builder, node, ast_context.file.src, .Function, .None);
- case index.SymbolGenericValue:
- #partial switch symbol.type {
- case .Keyword:
- write_semantic_node(builder, node, ast_context.file.src, .Keyword, .None);
- }
- }
-
- }
-
- return;
+resolve_and_write_ident :: proc (node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) -> (is_member: bool, is_package: bool) {
+
+ n := node.derived.(ast.Ident);
+
+ ast_context.current_package = ast_context.document_package;
+ ast_context.use_globals = true;
+ ast_context.use_locals = true;
+
+ if resolve_ident_is_variable(ast_context, n) {
+ write_semantic_node(builder, node, ast_context.file.src, .Variable, .None);
+ is_member = true;
+ } else if symbol, ok := resolve_type_identifier(ast_context, n); ok {
+
+ #partial switch v in symbol.value {
+ case index.SymbolPackageValue:
+ write_semantic_node(builder, node, ast_context.file.src, .Namespace, .None);
+ is_package = true;
+ case index.SymbolStructValue:
+ write_semantic_node(builder, node, ast_context.file.src, .Struct, .None);
+ case index.SymbolEnumValue:
+ write_semantic_node(builder, node, ast_context.file.src, .Enum, .None);
+ case index.SymbolUnionValue:
+ write_semantic_node(builder, node, ast_context.file.src, .Enum, .None);
+ case index.SymbolProcedureValue:
+ write_semantic_node(builder, node, ast_context.file.src, .Function, .None);
+ case index.SymbolProcedureGroupValue:
+ write_semantic_node(builder, node, ast_context.file.src, .Function, .None);
+ case index.SymbolGenericValue:
+ #partial switch symbol.type {
+ case .Keyword:
+ write_semantic_node(builder, node, ast_context.file.src, .Keyword, .None);
+ }
+ }
+ }
+
+ return;
}
-
write_semantic_tokens :: proc {
- write_semantic_tokens_node,
- write_semantic_tokens_dynamic_array,
- write_semantic_tokens_array,
- write_semantic_tokens_stmt,
-};
+write_semantic_tokens_node,
+write_semantic_tokens_dynamic_array,
+write_semantic_tokens_array,
+write_semantic_tokens_stmt};
-write_semantic_tokens_array :: proc(array: $A/[]^$T, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
-
- for elem, i in array {
- write_semantic_tokens(elem, builder, ast_context);
- }
+write_semantic_tokens_array :: proc (array: $A/[]^$T, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+ for elem, i in array {
+ write_semantic_tokens(elem, builder, ast_context);
+ }
}
-write_semantic_tokens_dynamic_array :: proc(array: $A/[dynamic]^$T, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
-
- for elem, i in array {
- write_semantic_tokens(elem, builder, ast_context);
- }
+write_semantic_tokens_dynamic_array :: proc (array: $A/[dynamic]^$T, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+ for elem, i in array {
+ write_semantic_tokens(elem, builder, ast_context);
+ }
}
-write_semantic_tokens_stmt :: proc(node: ^ast.Stmt, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
- ast_context.current_package = ast_context.document_package;
- ast_context.use_globals = true;
- ast_context.use_locals = true;
- builder.selector_member = false;
- builder.selector_package = false;
- write_semantic_tokens_node(node, builder, ast_context);
+write_semantic_tokens_stmt :: proc (node: ^ast.Stmt, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+ ast_context.current_package = ast_context.document_package;
+ ast_context.use_globals = true;
+ ast_context.use_locals = true;
+ builder.selector_member = false;
+ builder.selector_package = false;
+ write_semantic_tokens_node(node, builder, ast_context);
}
-write_semantic_tokens_node :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
-
- using ast;
-
- if node == nil {
- return;
- }
-
- switch n in node.derived {
- case Ident:
- /*EXPENSIVE!! But alas i can't just get locals per scope, but have to the exact position, because you can do shit like this:
- log.println("hello"); //log is namespace
- log := 2; //log is now variable
- a := log + 2;
- */
-
- get_locals_at(builder.current_function, node, ast_context);
- resolve_and_write_ident(node, builder, ast_context);
- case Selector_Expr:
- write_semantic_selector(cast(^Selector_Expr)node, builder, ast_context);
- case Pointer_Type:
- write_semantic_token_pos(builder, node.pos, "^", ast_context.file.src, .Operator, .None);
- write_semantic_tokens(n.elem, builder, ast_context);
- case Value_Decl:
- write_semantic_tokens_value_decl(n, builder, ast_context);
- case Block_Stmt:
- write_semantic_tokens(n.stmts, builder, ast_context);
- case Expr_Stmt:
- write_semantic_tokens(n.expr, builder, ast_context);
- case Range_Stmt:
-
- write_semantic_token_pos(builder, n.for_pos, "for", ast_context.file.src, .Keyword, .None);
- if n.val0 != nil {
- if ident, ok := n.val0.derived.(Ident); ok {
- write_semantic_node(builder, n.val0, ast_context.file.src, .Variable, .None);
- }
- }
-
- if n.val1 != nil {
- if ident, ok := n.val1.derived.(Ident); ok {
- write_semantic_node(builder, n.val1, ast_context.file.src, .Variable, .None);
- }
- }
-
- write_semantic_token_pos(builder, n.in_pos, "in", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.expr, builder, ast_context);
- write_semantic_tokens(n.body, builder, ast_context);
- case If_Stmt:
- write_semantic_token_pos(builder, n.if_pos, "if", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.init, builder, ast_context);
- write_semantic_tokens(n.cond, builder, ast_context);
- write_semantic_tokens(n.body, builder, ast_context);
- if n.else_stmt != nil {
- write_semantic_token_pos(builder, n.else_pos, "else", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.else_stmt, builder, ast_context);
- }
- case For_Stmt:
- write_semantic_token_pos(builder, n.for_pos, "for", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.init, builder, ast_context);
- write_semantic_tokens(n.cond, builder, ast_context);
- write_semantic_tokens(n.post, builder, ast_context);
- write_semantic_tokens(n.body, builder, ast_context);
- case Switch_Stmt:
- write_semantic_token_pos(builder, n.switch_pos, "switch", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.init, builder, ast_context);
- write_semantic_tokens(n.cond, builder, ast_context);
- write_semantic_tokens(n.body, builder, ast_context);
- case Type_Switch_Stmt:
- write_semantic_token_pos(builder, n.switch_pos, "switch", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.tag, builder, ast_context);
- write_semantic_tokens(n.expr, builder, ast_context);
- write_semantic_tokens(n.body, builder, ast_context);
- case Assign_Stmt:
- for l in n.lhs {
- if ident, ok := l.derived.(Ident); ok {
- write_semantic_node(builder, l, ast_context.file.src, .Variable, .None);
- }
- else {
- write_semantic_tokens(l, builder, ast_context);
- }
- }
-
- write_semantic_token_op(builder, n.op, ast_context.file.src);
- write_semantic_tokens(n.rhs, builder, ast_context);
- case Case_Clause:
- write_semantic_token_pos(builder, n.case_pos, "case", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.list, builder, ast_context);
- write_semantic_tokens(n.body, builder, ast_context);
- case Call_Expr:
- //could there be any other type then .Function for call expr? No point of computing it if not.
- if ident, ok := n.expr.derived.(Ident); ok {
- write_semantic_node(builder, n.expr, ast_context.file.src, .Function, .None);
- }
- else {
- write_semantic_tokens(n.expr, builder, ast_context);
- }
- write_semantic_tokens(n.args, builder, ast_context);
- case Implicit_Selector_Expr:
- write_semantic_node(builder, n.field, ast_context.file.src, .Enum, .None);
- case Array_Type:
- write_semantic_tokens(n.elem, builder, ast_context);
- case Binary_Expr:
- write_semantic_tokens(n.left, builder, ast_context);
- write_semantic_token_op(builder, n.op, ast_context.file.src);
- write_semantic_tokens(n.right, builder, ast_context);
- case Comp_Lit:
- write_semantic_tokens(n.type, builder, ast_context);
- write_semantic_tokens(n.elems, builder, ast_context);
- case Struct_Type:
- write_semantic_token_pos(builder, n.pos, "struct", ast_context.file.src, .Keyword, .None);
- write_semantic_struct_fields(n, builder, ast_context);
- case Type_Assertion:
- write_semantic_tokens(n.expr, builder, ast_context);
- write_semantic_tokens(n.type, builder, ast_context);
- case Type_Cast:
- write_semantic_token_pos(builder, n.pos, "cast", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.type, builder, ast_context);
- write_semantic_tokens(n.expr, builder, ast_context);
- case Paren_Expr:
- write_semantic_tokens(n.expr, builder, ast_context);
- case Deref_Expr:
- write_semantic_tokens(n.expr, builder, ast_context);
- case Return_Stmt:
- write_semantic_token_pos(builder, n.pos, "return", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.results, builder, ast_context);
- case Dynamic_Array_Type:
- write_semantic_token_pos(builder, n.dynamic_pos, "dynamic", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.elem, builder, ast_context);
- case Field_Value:
- if ident, ok := n.field.derived.(Ident); ok {
- write_semantic_node(builder, n.field, ast_context.file.src, .Property, .None);
- }
-
- write_semantic_tokens(n.value, builder, ast_context);
- case Index_Expr:
- write_semantic_tokens(n.expr, builder, ast_context);
- write_semantic_tokens(n.index, builder, ast_context);
- case Basic_Lit:
- write_semantic_token_basic_lit(n, builder, ast_context);
- case Unary_Expr:
- write_semantic_tokens(n.expr, builder, ast_context);
- case Implicit:
- case Slice_Expr:
- write_semantic_tokens(n.expr, builder, ast_context);
- case Using_Stmt:
- write_semantic_token_pos(builder, n.pos, "using", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.list, builder, ast_context);
- case Map_Type:
- write_semantic_tokens(n.key, builder, ast_context);
- //write_semantic_token_pos(builder, n.tok_pos, "map", ast_context.file.src, .Keyword, .None);
- write_semantic_tokens(n.value, builder, ast_context);
- case:
- log.infof("unhandled write node %v", n);
- }
-
-
-
+write_semantic_tokens_node :: proc (node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+
+ using ast;
+
+ if node == nil {
+ return;
+ }
+
+ switch n in node.derived {
+ case Ident:
+ /*EXPENSIVE!! But alas i can't just get locals per scope, but have to the exact position, because you can do shit like this:
+ log.println("hello"); //log is namespace
+ log := 2; //log is now variable
+ a := log + 2;
+ */
+
+ get_locals_at(builder.current_function, node, ast_context);
+ resolve_and_write_ident(node, builder, ast_context);
+ case Selector_Expr:
+ write_semantic_selector(cast(^Selector_Expr)node, builder, ast_context);
+ case Pointer_Type:
+ write_semantic_token_pos(builder, node.pos, "^", ast_context.file.src, .Operator, .None);
+ write_semantic_tokens(n.elem, builder, ast_context);
+ case Value_Decl:
+ write_semantic_tokens_value_decl(n, builder, ast_context);
+ case Block_Stmt:
+ write_semantic_tokens(n.stmts, builder, ast_context);
+ case Expr_Stmt:
+ write_semantic_tokens(n.expr, builder, ast_context);
+ case Range_Stmt:
+
+ write_semantic_token_pos(builder, n.for_pos, "for", ast_context.file.src, .Keyword, .None);
+ if n.val0 != nil {
+ if ident, ok := n.val0.derived.(Ident); ok {
+ write_semantic_node(builder, n.val0, ast_context.file.src, .Variable, .None);
+ }
+ }
+
+ if n.val1 != nil {
+ if ident, ok := n.val1.derived.(Ident); ok {
+ write_semantic_node(builder, n.val1, ast_context.file.src, .Variable, .None);
+ }
+ }
+
+ write_semantic_token_pos(builder, n.in_pos, "in", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.expr, builder, ast_context);
+ write_semantic_tokens(n.body, builder, ast_context);
+ case If_Stmt:
+ write_semantic_token_pos(builder, n.if_pos, "if", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.init, builder, ast_context);
+ write_semantic_tokens(n.cond, builder, ast_context);
+ write_semantic_tokens(n.body, builder, ast_context);
+ if n.else_stmt != nil {
+ write_semantic_token_pos(builder, n.else_pos, "else", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.else_stmt, builder, ast_context);
+ }
+ case For_Stmt:
+ write_semantic_token_pos(builder, n.for_pos, "for", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.init, builder, ast_context);
+ write_semantic_tokens(n.cond, builder, ast_context);
+ write_semantic_tokens(n.post, builder, ast_context);
+ write_semantic_tokens(n.body, builder, ast_context);
+ case Switch_Stmt:
+ write_semantic_token_pos(builder, n.switch_pos, "switch", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.init, builder, ast_context);
+ write_semantic_tokens(n.cond, builder, ast_context);
+ write_semantic_tokens(n.body, builder, ast_context);
+ case Type_Switch_Stmt:
+ write_semantic_token_pos(builder, n.switch_pos, "switch", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.tag, builder, ast_context);
+ write_semantic_tokens(n.expr, builder, ast_context);
+ write_semantic_tokens(n.body, builder, ast_context);
+ case Assign_Stmt:
+ for l in n.lhs {
+ if ident, ok := l.derived.(Ident); ok {
+ write_semantic_node(builder, l, ast_context.file.src, .Variable, .None);
+ } else {
+ write_semantic_tokens(l, builder, ast_context);
+ }
+ }
+
+ write_semantic_token_op(builder, n.op, ast_context.file.src);
+ write_semantic_tokens(n.rhs, builder, ast_context);
+ case Case_Clause:
+ write_semantic_token_pos(builder, n.case_pos, "case", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.list, builder, ast_context);
+ write_semantic_tokens(n.body, builder, ast_context);
+ case Call_Expr:
+ //could there be any other type then .Function for call expr? No point of computing it if not.
+ if ident, ok := n.expr.derived.(Ident); ok {
+ write_semantic_node(builder, n.expr, ast_context.file.src, .Function, .None);
+ } else {
+ write_semantic_tokens(n.expr, builder, ast_context);
+ }
+ write_semantic_tokens(n.args, builder, ast_context);
+ case Implicit_Selector_Expr:
+ write_semantic_node(builder, n.field, ast_context.file.src, .Enum, .None);
+ case Array_Type:
+ write_semantic_tokens(n.elem, builder, ast_context);
+ case Binary_Expr:
+ write_semantic_tokens(n.left, builder, ast_context);
+ write_semantic_token_op(builder, n.op, ast_context.file.src);
+ write_semantic_tokens(n.right, builder, ast_context);
+ case Comp_Lit:
+ write_semantic_tokens(n.type, builder, ast_context);
+ write_semantic_tokens(n.elems, builder, ast_context);
+ case Struct_Type:
+ write_semantic_token_pos(builder, n.pos, "struct", ast_context.file.src, .Keyword, .None);
+ write_semantic_struct_fields(n, builder, ast_context);
+ case Type_Assertion:
+ write_semantic_tokens(n.expr, builder, ast_context);
+ write_semantic_tokens(n.type, builder, ast_context);
+ case Type_Cast:
+ write_semantic_token_pos(builder, n.pos, "cast", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.type, builder, ast_context);
+ write_semantic_tokens(n.expr, builder, ast_context);
+ case Paren_Expr:
+ write_semantic_tokens(n.expr, builder, ast_context);
+ case Deref_Expr:
+ write_semantic_tokens(n.expr, builder, ast_context);
+ case Return_Stmt:
+ write_semantic_token_pos(builder, n.pos, "return", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.results, builder, ast_context);
+ case Dynamic_Array_Type:
+ write_semantic_token_pos(builder, n.dynamic_pos, "dynamic", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.elem, builder, ast_context);
+ case Field_Value:
+ if ident, ok := n.field.derived.(Ident); ok {
+ write_semantic_node(builder, n.field, ast_context.file.src, .Property, .None);
+ }
+
+ write_semantic_tokens(n.value, builder, ast_context);
+ case Index_Expr:
+ write_semantic_tokens(n.expr, builder, ast_context);
+ write_semantic_tokens(n.index, builder, ast_context);
+ case Basic_Lit:
+ write_semantic_token_basic_lit(n, builder, ast_context);
+ case Unary_Expr:
+ write_semantic_tokens(n.expr, builder, ast_context);
+ case Implicit:
+ case Slice_Expr:
+ write_semantic_tokens(n.expr, builder, ast_context);
+ case Using_Stmt:
+ write_semantic_token_pos(builder, n.pos, "using", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.list, builder, ast_context);
+ case Map_Type:
+ write_semantic_tokens(n.key, builder, ast_context);
+ //write_semantic_token_pos(builder, n.tok_pos, "map", ast_context.file.src, .Keyword, .None);
+ write_semantic_tokens(n.value, builder, ast_context);
+ case:
+ log.infof("unhandled write node %v", n);
+ }
}
-write_semantic_token_basic_lit :: proc(basic_lit: ast.Basic_Lit, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
-
- if symbol, ok := resolve_basic_lit(ast_context, basic_lit); ok {
-
- if generic, ok := symbol.value.(index.SymbolGenericValue); ok {
+write_semantic_token_basic_lit :: proc (basic_lit: ast.Basic_Lit, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
- ident := generic.expr.derived.(ast.Ident);
+ if symbol, ok := resolve_basic_lit(ast_context, basic_lit); ok {
- if ident.name == string_lit {
- write_semantic_node(builder, generic.expr, ast_context.file.src, .String, .None);
- }
+ if generic, ok := symbol.value.(index.SymbolGenericValue); ok {
- else if ident.name == int_lit {
- write_semantic_node(builder, generic.expr, ast_context.file.src, .Number, .None);
- }
+ ident := generic.expr.derived.(ast.Ident);
- else {
-
- }
-
- }
-
- }
+ if ident.name == string_lit {
+ write_semantic_node(builder, generic.expr, ast_context.file.src, .String, .None);
+ } else if ident.name == int_lit {
+ write_semantic_node(builder, generic.expr, ast_context.file.src, .Number, .None);
+ } else {
+ }
+ }
+ }
}
-write_semantic_tokens_value_decl :: proc(value_decl: ast.Value_Decl, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
-
- using ast;
-
- if value_decl.type != nil {
-
- for name in value_decl.names {
- write_semantic_node(builder, name, ast_context.file.src, .Variable, .None);
- }
-
- write_semantic_tokens(value_decl.type, builder, ast_context);
-
- return;
- }
-
- if len(value_decl.values) == 1 {
-
- switch v in value_decl.values[0].derived {
- case Struct_Type:
- write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Struct, .None);
- write_semantic_token_pos(builder, v.pos, "struct", ast_context.file.src, .Keyword, .None);
- write_semantic_struct_fields(v, builder, ast_context);
- case Enum_Type:
- write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Enum, .None);
- write_semantic_token_pos(builder, v.pos, "enum", ast_context.file.src, .Keyword, .None);
- write_semantic_enum_fields(v, builder, ast_context);
- case Proc_Group:
- write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Function, .None);
- write_semantic_token_pos(builder, v.pos, "proc", ast_context.file.src, .Keyword, .None);
- for arg in v.args {
- if ident, ok := arg.derived.(Ident); ok {
- write_semantic_node(builder, arg, ast_context.file.src, .Function, .None);
- }
- }
- case Proc_Lit:
- write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Function, .None);
- write_semantic_token_pos(builder, v.pos, "proc", ast_context.file.src, .Keyword, .None);
- write_semantic_proc_type(v.type, builder, ast_context);
-
- last_function := builder.current_function;
- builder.current_function = value_decl.values[0];
- write_semantic_tokens(v.body, builder, ast_context);
- builder.current_function = last_function;
- case:
- write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Variable, .None);
- write_semantic_tokens(value_decl.values[0], builder, ast_context);
- }
-
- }
-
- else {
-
- for name in value_decl.names {
- write_semantic_node(builder, name, ast_context.file.src, .Variable, .None);
- }
-
- for value in value_decl.values {
- write_semantic_tokens(value, builder, ast_context);
- }
-
-
- }
-
-
-
-
+write_semantic_tokens_value_decl :: proc (value_decl: ast.Value_Decl, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+
+ using ast;
+
+ if value_decl.type != nil {
+
+ for name in value_decl.names {
+ write_semantic_node(builder, name, ast_context.file.src, .Variable, .None);
+ }
+
+ write_semantic_tokens(value_decl.type, builder, ast_context);
+
+ return;
+ }
+
+ if len(value_decl.values) == 1 {
+
+ switch v in value_decl.values[0].derived {
+ case Struct_Type:
+ write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Struct, .None);
+ write_semantic_token_pos(builder, v.pos, "struct", ast_context.file.src, .Keyword, .None);
+ write_semantic_struct_fields(v, builder, ast_context);
+ case Enum_Type:
+ write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Enum, .None);
+ write_semantic_token_pos(builder, v.pos, "enum", ast_context.file.src, .Keyword, .None);
+ write_semantic_enum_fields(v, builder, ast_context);
+ case Proc_Group:
+ write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Function, .None);
+ write_semantic_token_pos(builder, v.pos, "proc", ast_context.file.src, .Keyword, .None);
+ for arg in v.args {
+ if ident, ok := arg.derived.(Ident); ok {
+ write_semantic_node(builder, arg, ast_context.file.src, .Function, .None);
+ }
+ }
+ case Proc_Lit:
+ write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Function, .None);
+ write_semantic_token_pos(builder, v.pos, "proc", ast_context.file.src, .Keyword, .None);
+ write_semantic_proc_type(v.type, builder, ast_context);
+
+ last_function := builder.current_function;
+ builder.current_function = value_decl.values[0];
+ write_semantic_tokens(v.body, builder, ast_context);
+ builder.current_function = last_function;
+ case:
+ write_semantic_node(builder, value_decl.names[0], ast_context.file.src, .Variable, .None);
+ write_semantic_tokens(value_decl.values[0], builder, ast_context);
+ }
+ } else {
+
+ for name in value_decl.names {
+ write_semantic_node(builder, name, ast_context.file.src, .Variable, .None);
+ }
+
+ for value in value_decl.values {
+ write_semantic_tokens(value, builder, ast_context);
+ }
+ }
}
+write_semantic_token_op :: proc (builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: []byte) {
-write_semantic_token_op :: proc(builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: []byte) {
-
- if token.text == "=" {
- write_semantic_token_pos(builder, token.pos, token.text, src, .Operator, .None);
- }
-
- else if token.text == "in" {
- write_semantic_token_pos(builder, token.pos, token.text, src, .Keyword, .None);
- }
-
+ if token.text == "=" {
+ write_semantic_token_pos(builder, token.pos, token.text, src, .Operator, .None);
+ } else if token.text == "in" {
+ write_semantic_token_pos(builder, token.pos, token.text, src, .Keyword, .None);
+ }
}
-write_semantic_proc_type :: proc(node: ^ast.Proc_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
-
- using ast;
-
- if node == nil {
- return;
- }
-
- if node.params != nil {
-
- for param in node.params.list {
-
- for name in param.names {
+write_semantic_proc_type :: proc (node: ^ast.Proc_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
- if ident, ok := name.derived.(Ident); ok {
- write_semantic_node(builder, name, ast_context.file.src, .Parameter, .None);
- }
+ using ast;
- }
+ if node == nil {
+ return;
+ }
- write_semantic_tokens(param.type, builder, ast_context);
- }
+ if node.params != nil {
- }
+ for param in node.params.list {
- if node.results != nil {
+ for name in param.names {
- for result in node.results.list {
+ if ident, ok := name.derived.(Ident); ok {
+ write_semantic_node(builder, name, ast_context.file.src, .Parameter, .None);
+ }
+ }
+ write_semantic_tokens(param.type, builder, ast_context);
+ }
+ }
- for name in result.names {
+ if node.results != nil {
- if ident, ok := name.derived.(Ident); ok {
- //write_semantic_node(builder, name, ast_context.file.src, .Parameter, .None);
- }
+ for result in node.results.list {
- }
+ for name in result.names {
- write_semantic_tokens(result.type, builder, ast_context);
-
- }
-
- }
+ if ident, ok := name.derived.(Ident); ok {
+ //write_semantic_node(builder, name, ast_context.file.src, .Parameter, .None);
+ }
+ }
+ write_semantic_tokens(result.type, builder, ast_context);
+ }
+ }
}
-write_semantic_enum_fields :: proc(node: ast.Enum_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
-
- using ast;
+write_semantic_enum_fields :: proc (node: ast.Enum_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
- if node.fields == nil {
- return;
- }
+ using ast;
- for field in node.fields {
+ if node.fields == nil {
+ return;
+ }
- if ident, ok := field.derived.(Ident); ok {
- write_semantic_node(builder, field, ast_context.file.src, .EnumMember, .None);
- }
-
- }
+ for field in node.fields {
+ if ident, ok := field.derived.(Ident); ok {
+ write_semantic_node(builder, field, ast_context.file.src, .EnumMember, .None);
+ }
+ }
}
-write_semantic_struct_fields :: proc(node: ast.Struct_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
-
- using ast;
+write_semantic_struct_fields :: proc (node: ast.Struct_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
- if node.fields == nil {
- return;
- }
+ using ast;
- for field in node.fields.list {
+ if node.fields == nil {
+ return;
+ }
- for name in field.names {
- if ident, ok := name.derived.(Ident); ok {
- write_semantic_node(builder, name, ast_context.file.src, .Property, .None);
- }
- }
+ for field in node.fields.list {
+ for name in field.names {
+ if ident, ok := name.derived.(Ident); ok {
+ write_semantic_node(builder, name, ast_context.file.src, .Property, .None);
+ }
+ }
- write_semantic_tokens(field.type, builder, ast_context);
-
- }
-
+ write_semantic_tokens(field.type, builder, ast_context);
+ }
}
-write_semantic_selector :: proc(selector: ^ast.Selector_Expr, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
-
- using ast;
-
-
- if ident, ok := selector.expr.derived.(Ident); ok {
- get_locals_at(builder.current_function, selector.expr, ast_context);
- builder.selector_member, builder.selector_package = resolve_and_write_ident(selector.expr, builder, ast_context); //base
-
- if builder.selector_package && selector.field != nil && resolve_ident_is_variable(ast_context, selector.field^) {
- builder.selector_member = true;
- }
- }
-
- else {
- write_semantic_tokens(selector.expr, builder, ast_context);
- }
-
- if symbol, ok := resolve_type_expression(ast_context, selector); ok && !builder.selector_member {
-
- #partial switch v in symbol.value {
- case index.SymbolPackageValue:
- write_semantic_node(builder, selector.field, ast_context.file.src, .Namespace, .None);
- case index.SymbolStructValue:
- write_semantic_node(builder, selector.field, ast_context.file.src, .Struct, .None);
- case index.SymbolEnumValue:
- write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None);
- case index.SymbolUnionValue:
- write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None);
- case index.SymbolProcedureGroupValue:
- write_semantic_node(builder, selector.field, ast_context.file.src, .Function, .None);
- case index.SymbolGenericValue:
- #partial switch symbol.type {
- case .Keyword:
- write_semantic_node(builder, selector.field, ast_context.file.src, .Keyword, .None);
- }
- }
-
- }
-
- else if (builder.selector_member) {
- write_semantic_node(builder, selector.field, ast_context.file.src, .Property, .None);
- }
-
+write_semantic_selector :: proc (selector: ^ast.Selector_Expr, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+
+ using ast;
+
+ if ident, ok := selector.expr.derived.(Ident); ok {
+ get_locals_at(builder.current_function, selector.expr, ast_context);
+ builder.selector_member, builder.selector_package = resolve_and_write_ident(selector.expr, builder, ast_context); //base
+
+ if builder.selector_package && selector.field != nil && resolve_ident_is_variable(ast_context, selector.field^) {
+ builder.selector_member = true;
+ }
+ } else {
+ write_semantic_tokens(selector.expr, builder, ast_context);
+ }
+
+ if symbol, ok := resolve_type_expression(ast_context, selector); ok && !builder.selector_member {
+
+ #partial switch v in symbol.value {
+ case index.SymbolPackageValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Namespace, .None);
+ case index.SymbolStructValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Struct, .None);
+ case index.SymbolEnumValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None);
+ case index.SymbolUnionValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Enum, .None);
+ case index.SymbolProcedureGroupValue:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Function, .None);
+ case index.SymbolGenericValue:
+ #partial switch symbol.type {
+ case .Keyword:
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Keyword, .None);
+ }
+ }
+ } else if (builder.selector_member) {
+ write_semantic_node(builder, selector.field, ast_context.file.src, .Property, .None);
+ }
}
-get_locals_at :: proc(function: ^ast.Node, position: ^ast.Node, ast_context: ^AstContext) {
+get_locals_at :: proc (function: ^ast.Node, position: ^ast.Node, ast_context: ^AstContext) {
- clear_locals(ast_context);
+ clear_locals(ast_context);
- if function == nil {
- return;
- }
+ if function == nil {
+ return;
+ }
- if position == nil {
- return;
- }
+ if position == nil {
+ return;
+ }
- document_position := DocumentPositionContext {
- position = position.end.offset,
- };
+ document_position := DocumentPositionContext {
+ position = position.end.offset
+ };
- get_locals(ast_context.file, function, ast_context, &document_position);
+ get_locals(ast_context.file, function, ast_context, &document_position);
} \ No newline at end of file
diff --git a/src/server/types.odin b/src/server/types.odin
index 9b8ecd3..2ee477b 100644
--- a/src/server/types.odin
+++ b/src/server/types.odin
@@ -5,347 +5,345 @@ import "core:encoding/json"
import "shared:common"
/*
- General types
+ General types
*/
//TODO(Daniel, move some of the more specific structs to their appropriate place)
RequestId :: union {
- string,
- i64,
-};
+ string,
+ i64,
+}
ResponseParams :: union {
- ResponseInitializeParams,
- rawptr,
- common.Location,
- CompletionList,
- SignatureHelp,
- [] DocumentSymbol,
- SemanticTokens,
- Hover,
- [] TextEdit,
-};
+ ResponseInitializeParams,
+ rawptr,
+ common.Location,
+ CompletionList,
+ SignatureHelp,
+ []DocumentSymbol,
+ SemanticTokens,
+ Hover,
+ []TextEdit,
+}
ResponseMessage :: struct {
- jsonrpc: string,
- id: RequestId,
- result: ResponseParams,
-};
+ jsonrpc: string,
+ id: RequestId,
+ result: ResponseParams,
+}
ResponseMessageError :: struct {
- jsonrpc: string,
- id: RequestId,
- error: ResponseError,
-};
+ jsonrpc: string,
+ id: RequestId,
+ error: ResponseError,
+}
ResponseError :: struct {
- code: common.Error,
- message: string,
-};
+ code: common.Error,
+ message: string,
+}
NotificationLoggingParams :: struct {
- type: int,
- message: string,
-};
+ type: int,
+ message: string,
+}
NotificationPublishDiagnosticsParams :: struct {
- uri: string,
- diagnostics: [] Diagnostic,
-};
+ uri: string,
+ diagnostics: []Diagnostic,
+}
NotificationParams :: union {
- NotificationLoggingParams,
- NotificationPublishDiagnosticsParams,
-};
+ NotificationLoggingParams,
+ NotificationPublishDiagnosticsParams,
+}
Notification :: struct {
- jsonrpc: string,
- method: string,
- params: NotificationParams
-};
+ jsonrpc: string,
+ method: string,
+ params: NotificationParams,
+}
ResponseInitializeParams :: struct {
- capabilities: ServerCapabilities,
-};
+ capabilities: ServerCapabilities,
+}
RequestInitializeParams :: struct {
- trace: string,
- workspaceFolders: [dynamic] common.WorkspaceFolder,
- capabilities: ClientCapabilities,
-};
+ trace: string,
+ workspaceFolders: [dynamic]common.WorkspaceFolder,
+ capabilities: ClientCapabilities,
+}
MarkupContent :: struct {
- kind: string,
+ kind: string,
value: string,
-};
+}
ServerCapabilities :: struct {
- textDocumentSync: TextDocumentSyncOptions,
- definitionProvider: bool,
- completionProvider: CompletionOptions,
- signatureHelpProvider: SignatureHelpOptions,
- semanticTokensProvider: SemanticTokensOptions,
- documentSymbolProvider: bool,
- hoverProvider: bool,
- documentFormattingProvider: bool,
-};
-
-CompletionOptions :: struct {
- resolveProvider: bool,
- triggerCharacters: [] string,
-};
+ textDocumentSync: TextDocumentSyncOptions,
+ definitionProvider: bool,
+ completionProvider: CompletionOptions,
+ signatureHelpProvider: SignatureHelpOptions,
+ semanticTokensProvider: SemanticTokensOptions,
+ documentSymbolProvider: bool,
+ hoverProvider: bool,
+ documentFormattingProvider: bool,
+}
+
+CompletionOptions :: struct {
+ resolveProvider: bool,
+ triggerCharacters: []string,
+}
SaveOptions :: struct {
includeText: bool,
}
HoverClientCapabilities :: struct {
- dynamicRegistration: bool,
- contentFormat: [dynamic] string,
-};
+ dynamicRegistration: bool,
+ contentFormat: [dynamic]string,
+}
DocumentSymbolClientCapabilities :: struct {
-
- symbolKind: struct {
- valueSet: [dynamic] SymbolKind,
- },
-
- hierarchicalDocumentSymbolSupport: bool,
-};
+ symbolKind: struct {
+ valueSet: [dynamic]SymbolKind,
+ },
+ hierarchicalDocumentSymbolSupport: bool,
+}
TextDocumentClientCapabilities :: struct {
- completion: CompletionClientCapabilities,
- hover: HoverClientCapabilities,
- signatureHelp: SignatureHelpClientCapabilities,
- documentSymbol: DocumentSymbolClientCapabilities,
-};
+ completion: CompletionClientCapabilities,
+ hover: HoverClientCapabilities,
+ signatureHelp: SignatureHelpClientCapabilities,
+ documentSymbol: DocumentSymbolClientCapabilities,
+}
CompletionClientCapabilities :: struct {
- documentationFormat: [dynamic] string,
-};
+ documentationFormat: [dynamic]string,
+}
ParameterInformationCapabilities :: struct {
- labelOffsetSupport: bool,
-};
+ labelOffsetSupport: bool,
+}
SignatureInformationCapabilities :: struct {
- parameterInformation: ParameterInformationCapabilities,
-};
+ parameterInformation: ParameterInformationCapabilities,
+}
SignatureHelpClientCapabilities :: struct {
- dynamicRegistration: bool,
- signatureInformation: SignatureInformationCapabilities,
- contextSupport: bool,
-};
+ dynamicRegistration: bool,
+ signatureInformation: SignatureInformationCapabilities,
+ contextSupport: bool,
+}
SignatureHelpOptions :: struct {
- triggerCharacters: [] string,
- retriggerCharacters: [] string,
-};
+ triggerCharacters: []string,
+ retriggerCharacters: []string,
+}
ClientCapabilities :: struct {
- textDocument: TextDocumentClientCapabilities,
-};
+ textDocument: TextDocumentClientCapabilities,
+}
RangeOptional :: union {
- common.Range,
-};
+ common.Range
+}
TextDocumentContentChangeEvent :: struct {
- range: RangeOptional,
- text: string,
-};
+ range: RangeOptional,
+ text: string,
+}
Version :: union {
- int,
- json.Null,
-};
+ int,
+ json.Null,
+}
-VersionedTextDocumentIdentifier :: struct {
- uri: string,
-};
+VersionedTextDocumentIdentifier :: struct {
+ uri: string,
+}
TextDocumentIdentifier :: struct {
- uri: string,
-};
+ uri: string,
+}
TextDocumentItem :: struct {
- uri: string,
- text: string,
-};
+ uri: string,
+ text: string,
+}
DiagnosticSeverity :: enum {
- Error = 1,
- Warning = 2,
- Information = 3,
- Hint = 4,
-};
+ Error = 1,
+ Warning = 2,
+ Information = 3,
+ Hint = 4,
+}
Diagnostic :: struct {
- range: common.Range,
- severity: DiagnosticSeverity,
- code: string,
- message: string,
-};
+ range: common.Range,
+ severity: DiagnosticSeverity,
+ code: string,
+ message: string,
+}
DidOpenTextDocumentParams :: struct {
- textDocument: TextDocumentItem,
-};
+ textDocument: TextDocumentItem,
+}
-DocumentSymbolParams :: struct {
- textDocument: TextDocumentIdentifier,
-};
+DocumentSymbolParams :: struct {
+ textDocument: TextDocumentIdentifier,
+}
DidChangeTextDocumentParams :: struct {
- textDocument: VersionedTextDocumentIdentifier,
- contentChanges: [dynamic] TextDocumentContentChangeEvent,
-};
+ textDocument: VersionedTextDocumentIdentifier,
+ contentChanges: [dynamic]TextDocumentContentChangeEvent,
+}
DidCloseTextDocumentParams :: struct {
- textDocument: TextDocumentIdentifier,
-};
+ textDocument: TextDocumentIdentifier,
+}
DidSaveTextDocumentParams :: struct {
- textDocument: TextDocumentIdentifier,
- text: string,
-};
+ textDocument: TextDocumentIdentifier,
+ text: string,
+}
TextDocumentPositionParams :: struct {
- textDocument: TextDocumentIdentifier,
- position: common.Position,
-};
+ textDocument: TextDocumentIdentifier,
+ position: common.Position,
+}
SignatureHelpParams :: struct {
- textDocument: TextDocumentIdentifier,
- position: common.Position,
-};
+ textDocument: TextDocumentIdentifier,
+ position: common.Position,
+}
CompletionParams :: struct {
- textDocument: TextDocumentIdentifier,
- position: common.Position,
-};
+ textDocument: TextDocumentIdentifier,
+ position: common.Position,
+}
CompletionItemKind :: enum {
- Text = 1,
- Method = 2,
- Function = 3,
- Constructor = 4,
- Field = 5,
- Variable = 6,
- Class = 7,
- Interface = 8,
- Module = 9,
- Property = 10,
- Unit = 11,
- Value = 12,
- Enum = 13,
- Keyword = 14,
- Snippet = 15,
- Color = 16,
- File = 17,
- Reference = 18,
- Folder = 19,
- EnumMember = 20,
- Constant = 21,
- Struct = 22,
- Event = 23,
- Operator = 24,
- TypeParameter = 25,
-};
+ Text = 1,
+ Method = 2,
+ Function = 3,
+ Constructor = 4,
+ Field = 5,
+ Variable = 6,
+ Class = 7,
+ Interface = 8,
+ Module = 9,
+ Property = 10,
+ Unit = 11,
+ Value = 12,
+ Enum = 13,
+ Keyword = 14,
+ Snippet = 15,
+ Color = 16,
+ File = 17,
+ Reference = 18,
+ Folder = 19,
+ EnumMember = 20,
+ Constant = 21,
+ Struct = 22,
+ Event = 23,
+ Operator = 24,
+ TypeParameter = 25,
+}
CompletionItem :: struct {
- label: string,
- kind: CompletionItemKind,
- detail: string,
- documentation: string,
-};
+ label: string,
+ kind: CompletionItemKind,
+ detail: string,
+ documentation: string,
+}
CompletionList :: struct {
- isIncomplete: bool,
- items: [] CompletionItem,
-};
+ isIncomplete: bool,
+ items: []CompletionItem,
+}
TextDocumentSyncOptions :: struct {
- openClose: bool,
- change: int,
- save: SaveOptions,
-};
+ openClose: bool,
+ change: int,
+ save: SaveOptions,
+}
SignatureHelp :: struct {
- signatures: [] SignatureInformation,
- activeSignature: int,
- activeParameter: int,
-};
+ signatures: []SignatureInformation,
+ activeSignature: int,
+ activeParameter: int,
+}
SignatureInformation :: struct {
- label: string,
- documentation: string,
- parameters: [] ParameterInformation,
-};
+ label: string,
+ documentation: string,
+ parameters: []ParameterInformation,
+}
ParameterInformation :: struct {
- label: [2] int,
-};
+ label: [2]int,
+}
OlsConfig :: struct {
- collections: [dynamic] OlsConfigCollection,
- thread_pool_count: int,
- enable_semantic_tokens: bool,
- enable_document_symbols: bool,
- enable_hover: bool,
- enable_format: bool,
- verbose: bool,
-};
+ collections: [dynamic]OlsConfigCollection,
+ thread_pool_count: int,
+ enable_semantic_tokens: bool,
+ enable_document_symbols: bool,
+ enable_hover: bool,
+ enable_format: bool,
+ verbose: bool,
+}
OlsConfigCollection :: struct {
- name: string,
- path: string,
-};
+ name: string,
+ path: string,
+}
SymbolKind :: enum {
- File = 1,
- Module = 2,
- Namespace = 3,
- Package = 4,
- Class = 5,
- Method = 6,
- Property = 7,
- Field = 8,
- Constructor = 9,
- Enum = 10,
- Interface = 11,
- Function = 12,
- Variable = 13,
- Constant = 14,
- String = 15,
- Number = 16,
- Boolean = 17,
- Array = 18,
- Object = 19,
- Key = 20,
- Null = 21,
- EnumMember = 22,
- Struct = 23,
- Event = 24,
- Operator = 25,
- TypeParameter = 26,
-};
+ File = 1,
+ Module = 2,
+ Namespace = 3,
+ Package = 4,
+ Class = 5,
+ Method = 6,
+ Property = 7,
+ Field = 8,
+ Constructor = 9,
+ Enum = 10,
+ Interface = 11,
+ Function = 12,
+ Variable = 13,
+ Constant = 14,
+ String = 15,
+ Number = 16,
+ Boolean = 17,
+ Array = 18,
+ Object = 19,
+ Key = 20,
+ Null = 21,
+ EnumMember = 22,
+ Struct = 23,
+ Event = 24,
+ Operator = 25,
+ TypeParameter = 26,
+}
DocumentSymbol :: struct {
- name: string,
- kind: SymbolKind,
- range: common.Range,
- selectionRange: common.Range,
- children: [] DocumentSymbol,
-};
+ name: string,
+ kind: SymbolKind,
+ range: common.Range,
+ selectionRange: common.Range,
+ children: []DocumentSymbol,
+}
HoverParams :: struct {
- textDocument: TextDocumentIdentifier,
- position: common.Position,
-};
+ textDocument: TextDocumentIdentifier,
+ position: common.Position,
+}
Hover :: struct {
- contents: MarkupContent,
- range: common.Range,
-}; \ No newline at end of file
+ contents: MarkupContent,
+ range: common.Range,
+} \ No newline at end of file
diff --git a/src/server/unmarshal.odin b/src/server/unmarshal.odin
index 908f83c..77459bf 100644
--- a/src/server/unmarshal.odin
+++ b/src/server/unmarshal.odin
@@ -9,146 +9,141 @@ import "core:fmt"
//Note(Daniel, investigate if you can use some sort of attribute not to be forced to have the same variable name as the json name)
/*
- Right now union handling is type specific so you can only have one struct type, int type, etc.
- */
-
-unmarshal :: proc(json_value: json.Value, v: any, allocator: mem.Allocator) -> json.Marshal_Error {
-
- using runtime;
-
- if v == nil {
- return .None;
- }
-
- if json_value.value == nil {
- return .None;
- }
-
- type_info := type_info_base(type_info_of(v.id));
-
- #partial
- switch j in json_value.value {
- case json.Object:
- #partial switch variant in type_info.variant {
- case Type_Info_Struct:
- for field, i in variant.names {
- a := any{rawptr(uintptr(v.data) + uintptr(variant.offsets[i])), variant.types[i].id};
- if ret := unmarshal(j[field], a, allocator); ret != .None {
- return ret;
- }
- }
-
- case Type_Info_Union:
-
- //Note(Daniel, THIS IS REALLY SCUFFED. Need to talk to gingerbill about unmarshalling unions)
-
- //This only works for unions with one object - made to handle optionals
- tag_ptr := uintptr(v.data) + variant.tag_offset;
- tag_any := any{rawptr(tag_ptr), variant.tag_type.id};
-
- not_optional := 1;
-
- mem.copy(cast(rawptr)tag_ptr, &not_optional, size_of(variant.tag_type));
-
- id := variant.variants[0].id;
-
- unmarshal(json_value, any{v.data, id}, allocator);
-
- }
- case json.Array:
- #partial switch variant in type_info.variant {
- case Type_Info_Dynamic_Array:
- array := (^mem.Raw_Dynamic_Array)(v.data);
- if array.data == nil {
- array.data = mem.alloc(len(j)*variant.elem_size, variant.elem.align, allocator);
- array.len = len(j);
- array.cap = len(j);
- array.allocator = allocator;
- }
- else {
- return .Invalid_Data;
- }
-
- for i in 0..<array.len {
- a := any{rawptr(uintptr(array.data) + uintptr(variant.elem_size * i)), variant.elem.id};
-
- if ret := unmarshal(j[i], a, allocator); ret != .None {
- return ret;
- }
- }
-
- case:
- return .Unsupported_Type;
- }
- case json.String:
- #partial switch variant in type_info.variant {
- case Type_Info_String:
- str := (^string)(v.data);
- str^ = strings.clone(j, allocator);
-
- case Type_Info_Enum:
- for name, i in variant.names {
-
- lower_name := strings.to_lower(name, allocator);
- lower_j := strings.to_lower(string(j), allocator);
-
- if lower_name == lower_j {
- mem.copy(v.data, &variant.values[i], size_of(variant.base));
- }
-
- delete(lower_name, allocator);
- delete(lower_j, allocator);
- }
- }
- case json.Integer:
- #partial switch variant in &type_info.variant {
- case Type_Info_Integer:
- switch type_info.size {
- case 8:
- tmp := i64(j);
- mem.copy(v.data, &tmp, type_info.size);
-
- case 4:
- tmp := i32(j);
- mem.copy(v.data, &tmp, type_info.size);
-
- case 2:
- tmp := i16(j);
- mem.copy(v.data, &tmp, type_info.size);
-
- case 1:
- tmp := i8(j);
- mem.copy(v.data, &tmp, type_info.size);
- case:
- return .Invalid_Data;
- }
- case Type_Info_Union:
- tag_ptr := uintptr(v.data) + variant.tag_offset;
- }
- case json.Float:
- if _, ok := type_info.variant.(Type_Info_Float); ok {
- switch type_info.size {
- case 8:
- tmp := f64(j);
- mem.copy(v.data, &tmp, type_info.size);
- case 4:
- tmp := f32(j);
- mem.copy(v.data, &tmp, type_info.size);
- case:
- return .Invalid_Data;
- }
-
- }
- case json.Null:
- case json.Boolean :
- if _, ok := type_info.variant.(Type_Info_Boolean); ok {
- tmp := bool(j);
- mem.copy(v.data, &tmp, type_info.size);
- }
- case:
- return .Unsupported_Type;
- }
-
- return .None;
-}
-
+ Right now union handling is type specific so you can only have one struct type, int type, etc.
+*/
+
+unmarshal :: proc (json_value: json.Value, v: any, allocator: mem.Allocator) -> json.Marshal_Error {
+
+ using runtime;
+
+ if v == nil {
+ return .None;
+ }
+
+ if json_value.value == nil {
+ return .None;
+ }
+
+ type_info := type_info_base(type_info_of(v.id));
+
+ #partial switch j in json_value.value {
+ case json.Object:
+ #partial switch variant in type_info.variant {
+ case Type_Info_Struct:
+ for field, i in variant.names {
+ a := any {rawptr(uintptr(v.data) + uintptr(variant.offsets[i])), variant.types[i].id};
+ if ret := unmarshal(j[field], a, allocator); ret != .None {
+ return ret;
+ }
+ }
+
+ case Type_Info_Union:
+
+ //Note(Daniel, THIS IS REALLY SCUFFED. Need to talk to gingerbill about unmarshalling unions)
+
+ //This only works for unions with one object - made to handle optionals
+ tag_ptr := uintptr(v.data) + variant.tag_offset;
+ tag_any := any {rawptr(tag_ptr), variant.tag_type.id};
+
+ not_optional := 1;
+
+ mem.copy(cast(rawptr)tag_ptr, &not_optional, size_of(variant.tag_type));
+
+ id := variant.variants[0].id;
+
+ unmarshal(json_value, any {v.data, id}, allocator);
+ }
+ case json.Array:
+ #partial switch variant in type_info.variant {
+ case Type_Info_Dynamic_Array:
+ array := (^mem.Raw_Dynamic_Array)(v.data);
+ if array.data == nil {
+ array.data = mem.alloc(len(j) * variant.elem_size, variant.elem.align, allocator);
+ array.len = len(j);
+ array.cap = len(j);
+ array.allocator = allocator;
+ } else {
+ return .Invalid_Data;
+ }
+
+ for i in 0..<array.len {
+ a := any {rawptr(uintptr(array.data) + uintptr(variant.elem_size * i)), variant.elem.id};
+
+ if ret := unmarshal(j[i], a, allocator); ret != .None {
+ return ret;
+ }
+ }
+
+ case:
+ return .Unsupported_Type;
+ }
+ case json.String:
+ #partial switch variant in type_info.variant {
+ case Type_Info_String:
+ str := (^string)(v.data);
+ str^ = strings.clone(j, allocator);
+
+ case Type_Info_Enum:
+ for name, i in variant.names {
+
+ lower_name := strings.to_lower(name, allocator);
+ lower_j := strings.to_lower(string(j), allocator);
+
+ if lower_name == lower_j {
+ mem.copy(v.data, &variant.values[i], size_of(variant.base));
+ }
+
+ delete(lower_name, allocator);
+ delete(lower_j, allocator);
+ }
+ }
+ case json.Integer:
+ #partial switch variant in &type_info.variant {
+ case Type_Info_Integer:
+ switch type_info.size {
+ case 8:
+ tmp := i64(j);
+ mem.copy(v.data, &tmp, type_info.size);
+
+ case 4:
+ tmp := i32(j);
+ mem.copy(v.data, &tmp, type_info.size);
+
+ case 2:
+ tmp := i16(j);
+ mem.copy(v.data, &tmp, type_info.size);
+
+ case 1:
+ tmp := i8(j);
+ mem.copy(v.data, &tmp, type_info.size);
+ case:
+ return .Invalid_Data;
+ }
+ case Type_Info_Union:
+ tag_ptr := uintptr(v.data) + variant.tag_offset;
+ }
+ case json.Float:
+ if _, ok := type_info.variant.(Type_Info_Float); ok {
+ switch type_info.size {
+ case 8:
+ tmp := f64(j);
+ mem.copy(v.data, &tmp, type_info.size);
+ case 4:
+ tmp := f32(j);
+ mem.copy(v.data, &tmp, type_info.size);
+ case:
+ return .Invalid_Data;
+ }
+ }
+ case json.Null:
+ case json.Boolean:
+ if _, ok := type_info.variant.(Type_Info_Boolean); ok {
+ tmp := bool(j);
+ mem.copy(v.data, &tmp, type_info.size);
+ }
+ case:
+ return .Unsupported_Type;
+ }
+
+ return .None;
+} \ No newline at end of file
diff --git a/src/server/workspace.odin b/src/server/workspace.odin
index abb4e43..f275077 100644
--- a/src/server/workspace.odin
+++ b/src/server/workspace.odin
@@ -1 +1 @@
-package server
+package server \ No newline at end of file
diff --git a/src/server/writer.odin b/src/server/writer.odin
index 3edc4ab..6a1b8ed 100644
--- a/src/server/writer.odin
+++ b/src/server/writer.odin
@@ -6,32 +6,30 @@ import "core:fmt"
import "core:strings"
import "core:sync"
-WriterFn :: proc(rawptr, [] byte) -> (int, int);
+WriterFn :: proc (_: rawptr, _: []byte) -> (int, int);
Writer :: struct {
- writer_fn: WriterFn,
- writer_context: rawptr,
- writer_mutex: sync.Mutex,
-};
-
-make_writer :: proc(writer_fn: WriterFn, writer_context: rawptr) -> Writer {
- writer := Writer { writer_context = writer_context, writer_fn = writer_fn };
- sync.mutex_init(&writer.writer_mutex);
- return writer;
+ writer_fn: WriterFn,
+ writer_context: rawptr,
+ writer_mutex: sync.Mutex,
}
-write_sized :: proc(writer: ^Writer, data: []byte) -> bool {
-
- sync.mutex_lock(&writer.writer_mutex);
- defer sync.mutex_unlock(&writer.writer_mutex);
+make_writer :: proc (writer_fn: WriterFn, writer_context: rawptr) -> Writer {
+ writer := Writer {writer_context = writer_context, writer_fn = writer_fn};
+ sync.mutex_init(&writer.writer_mutex);
+ return writer;
+}
- written, err := writer.writer_fn(writer.writer_context, data);
+write_sized :: proc (writer: ^Writer, data: []byte) -> bool {
- if(err != 0) {
- return false;
- }
+ sync.mutex_lock(&writer.writer_mutex);
+ defer sync.mutex_unlock(&writer.writer_mutex);
- return true;
-}
+ written, err := writer.writer_fn(writer.writer_context, data);
+ if (err != 0) {
+ return false;
+ }
+ return true;
+} \ No newline at end of file
diff --git a/src/session/capture.odin b/src/session/capture.odin
index 6c9f3f5..e71c79f 100644
--- a/src/session/capture.odin
+++ b/src/session/capture.odin
@@ -1,2 +1 @@
-package session
-
+package session \ No newline at end of file
diff --git a/src/session/replay.odin b/src/session/replay.odin
index e69de29..0519ecb 100644
--- a/src/session/replay.odin
+++ b/src/session/replay.odin
@@ -0,0 +1 @@
+ \ No newline at end of file