aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorDanielGavin <danielgavin5@hotmail.com>2021-03-18 10:44:09 +0100
committerDanielGavin <danielgavin5@hotmail.com>2021-03-18 10:44:09 +0100
commit6ff8c1b780759d02156bf4c69c3c443ece12c160 (patch)
tree1ce5fc6f27310952b814670cf8bffc8ddccd532b /src
parent2313142359f69769b838b835f15c726d1ce2ebb4 (diff)
ran odinfmt
Diffstat (limited to 'src')
-rw-r--r--src/common/allocator.odin14
-rw-r--r--src/common/ast.odin40
-rw-r--r--src/common/fuzzy.odin26
-rw-r--r--src/common/pool.odin20
-rw-r--r--src/common/position.odin18
-rw-r--r--src/common/pretty.odin15
-rw-r--r--src/common/sha1.odin18
-rw-r--r--src/common/track_allocator.odin30
-rw-r--r--src/common/uri.odin19
-rw-r--r--src/index/build.odin10
-rw-r--r--src/index/clone.odin65
-rw-r--r--src/index/collector.odin54
-rw-r--r--src/index/indexer.odin12
-rw-r--r--src/index/memory_index.odin10
-rw-r--r--src/index/symbol.odin12
-rw-r--r--src/index/util.odin17
-rw-r--r--src/server/analysis.odin214
-rw-r--r--src/server/background.odin2
-rw-r--r--src/server/completion.odin64
-rw-r--r--src/server/documents.odin60
-rw-r--r--src/server/format.odin5
-rw-r--r--src/server/hover.odin6
-rw-r--r--src/server/log.odin6
-rw-r--r--src/server/reader.odin10
-rw-r--r--src/server/requests.odin24
-rw-r--r--src/server/response.odin12
-rw-r--r--src/server/semantic_tokens.odin50
-rw-r--r--src/server/types.odin2
-rw-r--r--src/server/unmarshal.odin2
-rw-r--r--src/server/writer.odin6
30 files changed, 411 insertions, 432 deletions
diff --git a/src/common/allocator.odin b/src/common/allocator.odin
index a7bfae7..4f72f81 100644
--- a/src/common/allocator.odin
+++ b/src/common/allocator.odin
@@ -30,15 +30,15 @@ scratch_allocator_destroy :: proc (s: ^Scratch_Allocator) {
s^ = {};
}
-scratch_allocator_proc :: proc (allocator_data: rawptr, mode: mem.Allocator_Mode,
-size, alignment: int,
+scratch_allocator_proc :: proc (allocator_data: rawptr, mode: mem.Allocator_Mode,
+size, alignment: int,
old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
s := (^Scratch_Allocator)(allocator_data);
if s.data == nil {
DEFAULT_BACKING_SIZE :: 1 << 22;
- if !(context.allocator.procedure != scratch_allocator_proc &&
+ if !(context.allocator.procedure != scratch_allocator_proc &&
context.allocator.data != allocator_data) {
panic("cyclic initialization of the scratch allocator with itself");
}
@@ -54,7 +54,7 @@ old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> r
switch {
case s.curr_offset + size <= len(s.data):
start := uintptr(raw_data(s.data));
- ptr := start + uintptr(s.curr_offset);
+ ptr := start + uintptr(s.curr_offset);
ptr = mem.align_forward_uintptr(ptr, uintptr(alignment));
mem.zero(rawptr(ptr), size);
@@ -79,7 +79,7 @@ old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> r
case .Free:
case .Free_All:
- s.curr_offset = 0;
+ s.curr_offset = 0;
s.prev_allocation = nil;
for ptr in s.leaked_allocations {
free(ptr, s.backup_allocator);
@@ -87,8 +87,8 @@ old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> r
clear(&s.leaked_allocations);
case .Resize:
- begin := uintptr(raw_data(s.data));
- end := begin + uintptr(len(s.data));
+ begin := uintptr(raw_data(s.data));
+ end := begin + uintptr(len(s.data));
old_ptr := uintptr(old_memory);
//if begin <= old_ptr && old_ptr < end && old_ptr+uintptr(size) < end {
// s.curr_offset = int(old_ptr-begin)+size;
diff --git a/src/common/ast.odin b/src/common/ast.odin
index c807ce8..7532148 100644
--- a/src/common/ast.odin
+++ b/src/common/ast.odin
@@ -34,7 +34,7 @@ GlobalExpr :: struct {
}
//TODO(add a sub procedure to avoid repeating the value decl work)
-collect_globals :: proc (file: ast.File) -> []GlobalExpr {
+collect_globals :: proc(file: ast.File) -> []GlobalExpr {
exprs := make([dynamic]GlobalExpr, context.temp_allocator);
@@ -151,17 +151,18 @@ collect_globals :: proc (file: ast.File) -> []GlobalExpr {
return exprs[:];
}
-get_ast_node_string :: proc (node: ^ast.Node, src: []byte) -> string {
+get_ast_node_string :: proc(node: ^ast.Node, src: []byte) -> string {
return string(src[node.pos.offset:node.end.offset]);
}
-free_ast :: proc {
-free_ast_node,
-free_ast_array,
-free_ast_dynamic_array,
-free_ast_comment};
+free_ast :: proc{
+ free_ast_node,
+ free_ast_array,
+ free_ast_dynamic_array,
+ free_ast_comment,
+};
-free_ast_comment :: proc (a: ^ast.Comment_Group, allocator: mem.Allocator) {
+free_ast_comment :: proc(a: ^ast.Comment_Group, allocator: mem.Allocator) {
if a == nil {
return;
}
@@ -173,14 +174,14 @@ free_ast_comment :: proc (a: ^ast.Comment_Group, allocator: mem.Allocator) {
free(a, allocator);
}
-free_ast_array :: proc (array: $A/[]^$T, allocator: mem.Allocator) {
+free_ast_array :: proc(array: $A/[]^$T, allocator: mem.Allocator) {
for elem, i in array {
free_ast(elem, allocator);
}
delete(array, allocator);
}
-free_ast_dynamic_array :: proc (array: $A/[dynamic]^$T, allocator: mem.Allocator) {
+free_ast_dynamic_array :: proc(array: $A/[dynamic]^$T, allocator: mem.Allocator) {
for elem, i in array {
free_ast(elem, allocator);
}
@@ -188,7 +189,7 @@ free_ast_dynamic_array :: proc (array: $A/[dynamic]^$T, allocator: mem.Allocator
delete(array);
}
-free_ast_node :: proc (node: ^ast.Node, allocator: mem.Allocator) {
+free_ast_node :: proc(node: ^ast.Node, allocator: mem.Allocator) {
using ast;
@@ -397,7 +398,7 @@ free_ast_node :: proc (node: ^ast.Node, allocator: mem.Allocator) {
mem.free(node, allocator);
}
-free_ast_file :: proc (file: ast.File, allocator := context.allocator) {
+free_ast_file :: proc(file: ast.File, allocator := context.allocator) {
for decl in file.decls {
free_ast(decl, allocator);
@@ -414,12 +415,13 @@ free_ast_file :: proc (file: ast.File, allocator := context.allocator) {
delete(file.decls);
}
-node_equal :: proc {
-node_equal_node,
-node_equal_array,
-node_equal_dynamic_array};
+node_equal :: proc{
+ node_equal_node,
+ node_equal_array,
+ node_equal_dynamic_array,
+};
-node_equal_array :: proc (a, b: $A/[]^$T) -> bool {
+node_equal_array :: proc(a, b: $A/[]^$T) -> bool {
ret := true;
@@ -434,7 +436,7 @@ node_equal_array :: proc (a, b: $A/[]^$T) -> bool {
return ret;
}
-node_equal_dynamic_array :: proc (a, b: $A/[dynamic]^$T) -> bool {
+node_equal_dynamic_array :: proc(a, b: $A/[dynamic]^$T) -> bool {
ret := true;
@@ -449,7 +451,7 @@ node_equal_dynamic_array :: proc (a, b: $A/[dynamic]^$T) -> bool {
return ret;
}
-node_equal_node :: proc (a, b: ^ast.Node) -> bool {
+node_equal_node :: proc(a, b: ^ast.Node) -> bool {
using ast;
diff --git a/src/common/fuzzy.odin b/src/common/fuzzy.odin
index ccd8f0b..cf4d82c 100644
--- a/src/common/fuzzy.odin
+++ b/src/common/fuzzy.odin
@@ -77,7 +77,7 @@ char_types: []u8 = {
0x55,0x55,0x55,0x55,0x55,0x55,0x55,0x55,
};
-make_fuzzy_matcher :: proc (pattern: string, allocator := context.temp_allocator) -> ^FuzzyMatcher {
+make_fuzzy_matcher :: proc(pattern: string, allocator := context.temp_allocator) -> ^FuzzyMatcher {
matcher := new(FuzzyMatcher, allocator);
@@ -116,7 +116,7 @@ make_fuzzy_matcher :: proc (pattern: string, allocator := context.temp_allocator
return matcher;
}
-fuzzy_to_acronym :: proc (word: string) -> (string, bool) {
+fuzzy_to_acronym :: proc(word: string) -> (string, bool) {
builder := strings.make_builder(context.temp_allocator);
@@ -149,7 +149,7 @@ fuzzy_to_acronym :: proc (word: string) -> (string, bool) {
return str, true;
}
-fuzzy_match :: proc (matcher: ^FuzzyMatcher, word: string) -> (f32, bool) {
+fuzzy_match :: proc(matcher: ^FuzzyMatcher, word: string) -> (f32, bool) {
if !fuzzy_init(matcher, word) {
return 0, false;
@@ -168,7 +168,7 @@ fuzzy_match :: proc (matcher: ^FuzzyMatcher, word: string) -> (f32, bool) {
fuzzy_build_graph(matcher);
best := max(cast(int)matcher.scores[matcher.pattern_count][matcher.word_count][miss].score,
- cast(int)matcher.scores[matcher.pattern_count][matcher.word_count][match].score);
+ cast(int)matcher.scores[matcher.pattern_count][matcher.word_count][match].score);
if fuzzy_is_awful(best) {
return 0.0, false;
@@ -183,11 +183,11 @@ fuzzy_match :: proc (matcher: ^FuzzyMatcher, word: string) -> (f32, bool) {
return score, true;
}
-fuzzy_is_awful :: proc (s: int) -> bool {
+fuzzy_is_awful :: proc(s: int) -> bool {
return s < awful_score / 2;
}
-fuzzy_calculate_roles :: proc (text: string, roles: ^[]FuzzyCharRole) -> FuzzyCharTypeSet {
+fuzzy_calculate_roles :: proc(text: string, roles: ^[]FuzzyCharRole) -> FuzzyCharTypeSet {
assert(len(text) == len(roles));
@@ -217,15 +217,15 @@ fuzzy_calculate_roles :: proc (text: string, roles: ^[]FuzzyCharRole) -> FuzzyCh
return type_set;
}
-fuzzy_rotate :: proc (t: FuzzyCharType, types: ^FuzzyCharType) {
+fuzzy_rotate :: proc(t: FuzzyCharType, types: ^FuzzyCharType) {
types^ = cast(FuzzyCharType)(((cast(uint)types^ << 2) | cast(uint)t) & 0x3f);
}
-fuzzy_packed_lookup :: proc (data: $A/[]$T, i: uint) -> T {
+fuzzy_packed_lookup :: proc(data: $A/[]$T, i: uint) -> T {
return (data[i >> 2] >> ((i & 3) * 2)) & 3;
}
-fuzzy_init :: proc (matcher: ^FuzzyMatcher, word: string) -> bool {
+fuzzy_init :: proc(matcher: ^FuzzyMatcher, word: string) -> bool {
matcher.word = word;
matcher.word_count = min(max_word, len(matcher.word));
@@ -259,7 +259,7 @@ fuzzy_init :: proc (matcher: ^FuzzyMatcher, word: string) -> bool {
return true;
}
-fuzzy_skip_penalty :: proc (matcher: ^FuzzyMatcher, w: int) -> int {
+fuzzy_skip_penalty :: proc(matcher: ^FuzzyMatcher, w: int) -> int {
if w == 0 { // Skipping the first character.
return 3;
@@ -272,7 +272,7 @@ fuzzy_skip_penalty :: proc (matcher: ^FuzzyMatcher, w: int) -> int {
return 0;
}
-fuzzy_build_graph :: proc (matcher: ^FuzzyMatcher) {
+fuzzy_build_graph :: proc(matcher: ^FuzzyMatcher) {
for w := 0; w < matcher.word_count; w += 1 {
@@ -340,7 +340,7 @@ fuzzy_build_graph :: proc (matcher: ^FuzzyMatcher) {
}
}
-fuzzy_match_bonus :: proc (matcher: ^FuzzyMatcher, p: int, w: int, last: int) -> int {
+fuzzy_match_bonus :: proc(matcher: ^FuzzyMatcher, p: int, w: int, last: int) -> int {
assert(matcher.lower_pattern[p] == matcher.lower_word[w]);
@@ -389,7 +389,7 @@ fuzzy_match_bonus :: proc (matcher: ^FuzzyMatcher, p: int, w: int, last: int) ->
return s;
}
-fuzzy_allow_match :: proc (matcher: ^FuzzyMatcher, p: int, w: int, last: int) -> bool {
+fuzzy_allow_match :: proc(matcher: ^FuzzyMatcher, p: int, w: int, last: int) -> bool {
if matcher.lower_pattern[p] != matcher.lower_word[w] {
return false;
diff --git a/src/common/pool.odin b/src/common/pool.odin
index 422b8f7..2efed98 100644
--- a/src/common/pool.odin
+++ b/src/common/pool.odin
@@ -12,7 +12,7 @@ Task_Status :: enum i32 {
Term,
}
-Task_Proc :: proc (task: ^Task);
+Task_Proc :: proc(task: ^Task);
Task :: struct {
procedure: Task_Proc,
@@ -33,8 +33,8 @@ Pool :: struct {
tasks: [dynamic]Task,
}
-pool_init :: proc (pool: ^Pool, thread_count: int, allocator := context.allocator) {
- worker_thread_internal :: proc (t: ^thread.Thread) {
+pool_init :: proc(pool: ^Pool, thread_count: int, allocator := context.allocator) {
+ worker_thread_internal :: proc(t: ^thread.Thread) {
pool := (^Pool)(t.data);
temp_allocator: Scratch_Allocator;
@@ -75,7 +75,7 @@ pool_init :: proc (pool: ^Pool, thread_count: int, allocator := context.allocato
}
}
-pool_destroy :: proc (pool: ^Pool) {
+pool_destroy :: proc(pool: ^Pool) {
delete(pool.tasks);
for t in &pool.threads {
@@ -88,13 +88,13 @@ pool_destroy :: proc (pool: ^Pool) {
sync.semaphore_destroy(&pool.sem_available);
}
-pool_start :: proc (pool: ^Pool) {
+pool_start :: proc(pool: ^Pool) {
for t in pool.threads {
thread.start(t);
}
}
-pool_join :: proc (pool: ^Pool) {
+pool_join :: proc(pool: ^Pool) {
pool.is_running = false;
sync.semaphore_post(&pool.sem_available, len(pool.threads));
@@ -106,7 +106,7 @@ pool_join :: proc (pool: ^Pool) {
}
}
-pool_add_task :: proc (pool: ^Pool, procedure: Task_Proc, data: rawptr, user_index: int = 0) {
+pool_add_task :: proc(pool: ^Pool, procedure: Task_Proc, data: rawptr, user_index: int = 0) {
sync.mutex_lock(&pool.mutex);
defer sync.mutex_unlock(&pool.mutex);
@@ -119,7 +119,7 @@ pool_add_task :: proc (pool: ^Pool, procedure: Task_Proc, data: rawptr, user_ind
sync.semaphore_post(&pool.sem_available, 1);
}
-pool_try_and_pop_task :: proc (pool: ^Pool) -> (task: Task, got_task: bool = false) {
+pool_try_and_pop_task :: proc(pool: ^Pool) -> (task: Task, got_task: bool = false) {
if sync.mutex_try_lock(&pool.mutex) {
if len(pool.tasks) != 0 {
intrinsics.atomic_add(&pool.processing_task_count, 1);
@@ -131,12 +131,12 @@ pool_try_and_pop_task :: proc (pool: ^Pool) -> (task: Task, got_task: bool = fal
return;
}
-pool_do_work :: proc (pool: ^Pool, task: ^Task) {
+pool_do_work :: proc(pool: ^Pool, task: ^Task) {
task.procedure(task);
intrinsics.atomic_sub(&pool.processing_task_count, 1);
}
-pool_wait_and_process :: proc (pool: ^Pool) {
+pool_wait_and_process :: proc(pool: ^Pool) {
for len(pool.tasks) != 0 || intrinsics.atomic_load(&pool.processing_task_count) != 0 {
if task, ok := pool_try_and_pop_task(pool); ok {
pool_do_work(pool, &task);
diff --git a/src/common/position.odin b/src/common/position.odin
index c409b2f..865d0c8 100644
--- a/src/common/position.odin
+++ b/src/common/position.odin
@@ -33,7 +33,7 @@ AbsoluteRange :: struct {
AbsolutePosition :: int;
-get_absolute_position :: proc (position: Position, document_text: []u8) -> (AbsolutePosition, bool) {
+get_absolute_position :: proc(position: Position, document_text: []u8) -> (AbsolutePosition, bool) {
absolute: AbsolutePosition;
if len(document_text) == 0 {
@@ -54,7 +54,7 @@ get_absolute_position :: proc (position: Position, document_text: []u8) -> (Abso
return absolute, true;
}
-get_relative_token_position :: proc (offset: int, document_text: []u8, current_start: int) -> Position {
+get_relative_token_position :: proc(offset: int, document_text: []u8, current_start: int) -> Position {
start_index := current_start;
@@ -91,10 +91,10 @@ get_relative_token_position :: proc (offset: int, document_text: []u8, current_s
/*
Get the range of a token in utf16 space
*/
-get_token_range :: proc (node: ast.Node, document_text: []u8) -> Range {
+get_token_range :: proc(node: ast.Node, document_text: []u8) -> Range {
range: Range;
- go_backwards_to_endline :: proc (offset: int, document_text: []u8) -> int {
+ go_backwards_to_endline :: proc(offset: int, document_text: []u8) -> int {
index := offset;
@@ -125,7 +125,7 @@ get_token_range :: proc (node: ast.Node, document_text: []u8) -> Range {
return range;
}
-get_absolute_range :: proc (range: Range, document_text: []u8) -> (AbsoluteRange, bool) {
+get_absolute_range :: proc(range: Range, document_text: []u8) -> (AbsoluteRange, bool) {
absolute: AbsoluteRange;
@@ -160,7 +160,7 @@ get_absolute_range :: proc (range: Range, document_text: []u8) -> (AbsoluteRange
return absolute, true;
}
-get_index_at_line :: proc (current_index: ^int, current_line: ^int, last: ^u8, document_text: []u8, end_line: int) -> bool {
+get_index_at_line :: proc(current_index: ^int, current_line: ^int, last: ^u8, document_text: []u8, end_line: int) -> bool {
if end_line == 0 {
current_index^ = 0;
@@ -199,7 +199,7 @@ get_index_at_line :: proc (current_index: ^int, current_line: ^int, last: ^u8, d
return false;
}
-get_character_offset_u16_to_u8 :: proc (character_offset: int, document_text: []u8) -> int {
+get_character_offset_u16_to_u8 :: proc(character_offset: int, document_text: []u8) -> int {
utf8_idx := 0;
utf16_idx := 0;
@@ -224,7 +224,7 @@ get_character_offset_u16_to_u8 :: proc (character_offset: int, document_text: []
return utf8_idx;
}
-get_character_offset_u8_to_u16 :: proc (character_offset: int, document_text: []u8) -> int {
+get_character_offset_u8_to_u16 :: proc(character_offset: int, document_text: []u8) -> int {
utf8_idx := 0;
utf16_idx := 0;
@@ -249,7 +249,7 @@ get_character_offset_u8_to_u16 :: proc (character_offset: int, document_text: []
return utf16_idx;
}
-get_end_line_u16 :: proc (document_text: []u8) -> int {
+get_end_line_u16 :: proc(document_text: []u8) -> int {
utf8_idx := 0;
utf16_idx := 0;
diff --git a/src/common/pretty.odin b/src/common/pretty.odin
index 929bf80..f7aa705 100644
--- a/src/common/pretty.odin
+++ b/src/common/pretty.odin
@@ -7,19 +7,20 @@ import "core:fmt"
Ast visualization to help in debugging and development
*/
-print_ast :: proc {
-print_ast_array,
-print_ast_dynamic_array,
-print_ast_node};
+print_ast :: proc{
+ print_ast_array,
+ print_ast_dynamic_array,
+ print_ast_node,
+};
-print_ast_array :: proc (array: $A/[]^$T, depth: int, src: []byte, newline := false) {
+print_ast_array :: proc(array: $A/[]^$T, depth: int, src: []byte, newline := false) {
for elem, i in array {
print_ast(elem, depth, src);
}
}
-print_ast_dynamic_array :: proc (array: $A/[dynamic]^$T, depth: int, src: []byte, newline := false) {
+print_ast_dynamic_array :: proc(array: $A/[dynamic]^$T, depth: int, src: []byte, newline := false) {
for elem, i in array {
print_ast(elem, depth, src);
@@ -30,7 +31,7 @@ print_ast_dynamic_array :: proc (array: $A/[dynamic]^$T, depth: int, src: []byte
Not fully printed out, filling it in as needed.
*/
-print_ast_node :: proc (node: ^ast.Node, depth: int, src: []byte, newline := false) {
+print_ast_node :: proc(node: ^ast.Node, depth: int, src: []byte, newline := false) {
using ast;
diff --git a/src/common/sha1.odin b/src/common/sha1.odin
index 67cbc4b..d4621dd 100644
--- a/src/common/sha1.odin
+++ b/src/common/sha1.odin
@@ -13,7 +13,7 @@ blk0 :: proc (buf: []u32, i: int) -> u32 {
}
blk :: proc (buf: []u32, i: int) -> u32 {
- buf[i & 15] = rol(buf[(i + 13) & 15] ~ buf[(i + 8) & 15] ~ buf[(i + 2) & 15] ~
+ buf[i & 15] = rol(buf[(i + 13) & 15] ~ buf[(i + 8) & 15] ~ buf[(i + 2) & 15] ~
buf[i & 15], 1);
return buf[i & 15];
@@ -181,7 +181,7 @@ sha1_add_uncounted :: proc (state_context: ^Sha1context, data: byte) {
when ODIN_ENDIAN == "big" {
state_context.buf.c[state_context.buf_offset] = data;
- } else
+ } else
{
state_context.buf.c[state_context.buf_offset ~ 3] = data;
@@ -225,9 +225,9 @@ sha1_update :: proc (state_context: ^Sha1context, data: []byte) {
for i := 0; i < BLOCK_LENGTH_32; i += 1 {
n := (transmute([]u32)current_data)[i];
- state_context.buf.l[i] = (((n & 0xFF) << 24) |
- ((n & 0xFF00) << 8) |
- ((n & 0xFF0000) >> 8) |
+ state_context.buf.l[i] = (((n & 0xFF) << 24) |
+ ((n & 0xFF00) << 8) |
+ ((n & 0xFF0000) >> 8) |
((n & 0xFF000000) >> 24));
}
@@ -267,13 +267,13 @@ sha1_final :: proc (state_context: ^Sha1context, result: ^[5]u32) {
for i := 0; i < 5; i += 1 {
result[i] = state_context.state[i];
}
- } else
+ } else
{
for i := 0; i < 5; i += 1 {
- result[i] = (((state_context.state[i]) << 24) & 0xff000000) |
- (((state_context.state[i]) << 8) & 0x00ff0000) |
- (((state_context.state[i]) >> 8) & 0x0000ff00) |
+ result[i] = (((state_context.state[i]) << 24) & 0xff000000) |
+ (((state_context.state[i]) << 8) & 0x00ff0000) |
+ (((state_context.state[i]) >> 8) & 0x0000ff00) |
(((state_context.state[i]) >> 24) & 0x000000ff);
}
}
diff --git a/src/common/track_allocator.odin b/src/common/track_allocator.odin
index afb1de2..978a74e 100644
--- a/src/common/track_allocator.odin
+++ b/src/common/track_allocator.odin
@@ -19,7 +19,7 @@ ThreadSafe_Allocator_Data :: struct {
// ----------------------------------------------------------------------------------------------------
-threadsafe_allocator :: proc (allocator: mem.Allocator) -> mem.Allocator {
+threadsafe_allocator :: proc(allocator: mem.Allocator) -> mem.Allocator {
data := new(ThreadSafe_Allocator_Data);
data.actual_allocator = allocator;
sync.mutex_init(&data.mutex);
@@ -29,7 +29,7 @@ threadsafe_allocator :: proc (allocator: mem.Allocator) -> mem.Allocator {
// ----------------------------------------------------------------------------------------------------
-threadsafe_allocator_proc :: proc (allocator_data: rawptr, mode: mem.Allocator_Mode, size, alignment: int,
+threadsafe_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode, size, alignment: int,
old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
data := cast(^ThreadSafe_Allocator_Data)allocator_data;
@@ -62,7 +62,7 @@ Memleak_Entry :: struct {
// ----------------------------------------------------------------------------------------------------
-memleak_allocator_proc :: proc (allocator_data: rawptr, mode: mem.Allocator_Mode, size, alignment: int,
+memleak_allocator_proc :: proc(allocator_data: rawptr, mode: mem.Allocator_Mode, size, alignment: int,
old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> rawptr {
memleak := cast(^Memleak_Allocator_Data)allocator_data;
@@ -91,10 +91,10 @@ old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> r
if memleak.track_frees {
memleak.frees[old_memory] = Memleak_Entry {
- location = loc,
- size = size,
- index = 0,
- };
+ location = loc,
+ size = size,
+ index = 0,
+ };
}
}
}
@@ -110,10 +110,10 @@ old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> r
// can be very useful for inspecting the stack trace of a particular allocation
memleak.allocations[result] = Memleak_Entry {
- location = loc,
- size = size,
- index = memleak.allocation_count,
- };
+ location = loc,
+ size = size,
+ index = memleak.allocation_count,
+ };
memleak.allocation_count += 1;
@@ -129,7 +129,7 @@ old_memory: rawptr, old_size: int, flags: u64 = 0, loc := #caller_location) -> r
// ----------------------------------------------------------------------------------------------------
-memleak_allocator :: proc (track_frees: bool) -> mem.Allocator {
+memleak_allocator :: proc(track_frees: bool) -> mem.Allocator {
make([]byte, 1, context.temp_allocator); // so the temp allocation doesn't clutter our results
@@ -149,7 +149,7 @@ memleak_allocator :: proc (track_frees: bool) -> mem.Allocator {
// ----------------------------------------------------------------------------------------------------
-memleak_detected_leaks :: proc () -> bool {
+memleak_detected_leaks :: proc() -> bool {
if context.allocator.procedure == memleak_allocator_proc {
memleak := cast(^Memleak_Allocator_Data)context.allocator.data;
return len(memleak.allocations) > 0;
@@ -160,7 +160,7 @@ memleak_detected_leaks :: proc () -> bool {
// ----------------------------------------------------------------------------------------------------
-memleak_dump :: proc (memleak_alloc: mem.Allocator, dump_proc: proc (message: string, user_data: rawptr), user_data: rawptr) {
+memleak_dump :: proc(memleak_alloc: mem.Allocator, dump_proc: proc(message: string, user_data: rawptr), user_data: rawptr) {
memleak := cast(^Memleak_Allocator_Data)memleak_alloc.data;
context.allocator = memleak.actual_allocator;
@@ -187,6 +187,6 @@ memleak_dump :: proc (memleak_alloc: mem.Allocator, dump_proc: proc (message: st
// ----------------------------------------------------------------------------------------------------
-log_dump :: proc (message: string, user_data: rawptr) {
+log_dump :: proc(message: string, user_data: rawptr) {
log.info(message);
}
diff --git a/src/common/uri.odin b/src/common/uri.odin
index b979634..ee25393 100644
--- a/src/common/uri.odin
+++ b/src/common/uri.odin
@@ -15,7 +15,7 @@ Uri :: struct {
//Note(Daniel, This is an extremely incomplete uri parser and for now ignores fragment and query and only handles file schema)
-parse_uri :: proc (value: string, allocator: mem.Allocator) -> (Uri, bool) {
+parse_uri :: proc(value: string, allocator: mem.Allocator) -> (Uri, bool) {
uri: Uri;
@@ -45,7 +45,7 @@ parse_uri :: proc (value: string, allocator: mem.Allocator) -> (Uri, bool) {
}
//Note(Daniel, Again some really incomplete and scuffed uri writer)
-create_uri :: proc (path: string, allocator: mem.Allocator) -> Uri {
+create_uri :: proc(path: string, allocator: mem.Allocator) -> Uri {
path_forward, _ := filepath.to_slash(path, context.temp_allocator);
builder := strings.make_builder(allocator);
@@ -53,8 +53,7 @@ create_uri :: proc (path: string, allocator: mem.Allocator) -> Uri {
//bad
when ODIN_OS == "windows" {
strings.write_string(&builder, "file:///");
- } else
- {
+ } else {
strings.write_string(&builder, "file://");
}
@@ -69,7 +68,7 @@ create_uri :: proc (path: string, allocator: mem.Allocator) -> Uri {
return uri;
}
-delete_uri :: proc (uri: Uri) {
+delete_uri :: proc(uri: Uri) {
if uri.uri != "" {
delete(uri.uri);
@@ -80,7 +79,7 @@ delete_uri :: proc (uri: Uri) {
}
}
-encode_percent :: proc (value: string, allocator: mem.Allocator) -> string {
+encode_percent :: proc(value: string, allocator: mem.Allocator) -> string {
builder := strings.make_builder(allocator);
@@ -94,8 +93,8 @@ encode_percent :: proc (value: string, allocator: mem.Allocator) -> string {
if r > 127 || r == ':' {
for i := 0; i < w; i += 1 {
- strings.write_string(&builder, strings.concatenate({"%", fmt.tprintf("%X", data[index + i])},
- context.temp_allocator));
+ strings.write_string(&builder, strings.concatenate({"%", fmt.tprintf("%X", data[index + i])},
+ context.temp_allocator));
}
} else {
strings.write_byte(&builder, data[index]);
@@ -108,7 +107,7 @@ encode_percent :: proc (value: string, allocator: mem.Allocator) -> string {
}
@(private)
-starts_with :: proc (value: string, starts_with: string) -> bool {
+starts_with :: proc(value: string, starts_with: string) -> bool {
if len(value) < len(starts_with) {
return false;
@@ -125,7 +124,7 @@ starts_with :: proc (value: string, starts_with: string) -> bool {
}
@(private)
-decode_percent :: proc (value: string, allocator: mem.Allocator) -> (string, bool) {
+decode_percent :: proc(value: string, allocator: mem.Allocator) -> (string, bool) {
builder := strings.make_builder(allocator);
diff --git a/src/index/build.odin b/src/index/build.odin
index 47af974..26b1cb2 100644
--- a/src/index/build.odin
+++ b/src/index/build.odin
@@ -30,7 +30,7 @@ platform_os: map[string]bool = {
"freebsd" = true,
};
-walk_static_index_build :: proc (info: os.File_Info, in_err: os.Errno) -> (err: os.Errno, skip_dir: bool) {
+walk_static_index_build :: proc(info: os.File_Info, in_err: os.Errno) -> (err: os.Errno, skip_dir: bool) {
if info.is_dir {
return 0, false;
@@ -62,7 +62,7 @@ walk_static_index_build :: proc (info: os.File_Info, in_err: os.Errno) -> (err:
return 0, false;
}
-build_static_index :: proc (allocator := context.allocator, config: ^common.Config) {
+build_static_index :: proc(allocator := context.allocator, config: ^common.Config) {
symbol_collection = make_symbol_collection(allocator, config);
@@ -129,14 +129,14 @@ build_static_index :: proc (allocator := context.allocator, config: ^common.Conf
indexer.static_index = make_memory_index(symbol_collection);
}
-free_static_index :: proc () {
+free_static_index :: proc() {
delete_symbol_collection(symbol_collection);
}
-log_error_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
+log_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
log.warnf("%v %v %v", pos, msg, args);
}
-log_warning_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
+log_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
log.warnf("%v %v %v", pos, msg, args);
}
diff --git a/src/index/clone.odin b/src/index/clone.odin
index a10d2e7..94bc1ef 100644
--- a/src/index/clone.odin
+++ b/src/index/clone.odin
@@ -7,7 +7,7 @@ import "core:odin/ast"
import "core:strings"
import "core:log"
-new_type :: proc ($T: typeid, pos, end: tokenizer.Pos, allocator: mem.Allocator) -> ^T {
+new_type :: proc($T: typeid, pos, end: tokenizer.Pos, allocator: mem.Allocator) -> ^T {
n := mem.new(T, allocator);
n.pos = pos;
n.end = end;
@@ -17,13 +17,14 @@ new_type :: proc ($T: typeid, pos, end: tokenizer.Pos, allocator: mem.Allocator)
return n;
}
-clone_type :: proc {
-clone_node,
-clone_expr,
-clone_array,
-clone_dynamic_array};
+clone_type :: proc{
+ clone_node,
+ clone_expr,
+ clone_array,
+ clone_dynamic_array,
+};
-clone_array :: proc (array: $A/[]^$T, allocator: mem.Allocator, unique_strings: ^map[string]string) -> A {
+clone_array :: proc(array: $A/[]^$T, allocator: mem.Allocator, unique_strings: ^map[string]string) -> A {
if len(array) == 0 {
return nil;
}
@@ -34,7 +35,7 @@ clone_array :: proc (array: $A/[]^$T, allocator: mem.Allocator, unique_strings:
return res;
}
-clone_dynamic_array :: proc (array: $A/[dynamic]^$T, allocator: mem.Allocator, unique_strings: ^map[string]string) -> A {
+clone_dynamic_array :: proc(array: $A/[dynamic]^$T, allocator: mem.Allocator, unique_strings: ^map[string]string) -> A {
if len(array) == 0 {
return nil;
}
@@ -45,11 +46,11 @@ clone_dynamic_array :: proc (array: $A/[dynamic]^$T, allocator: mem.Allocator, u
return res;
}
-clone_expr :: proc (node: ^ast.Expr, allocator: mem.Allocator, unique_strings: ^map[string]string) -> ^ast.Expr {
+clone_expr :: proc(node: ^ast.Expr, allocator: mem.Allocator, unique_strings: ^map[string]string) -> ^ast.Expr {
return cast(^ast.Expr)clone_node(node, allocator, unique_strings);
}
-clone_node :: proc (node: ^ast.Node, allocator: mem.Allocator, unique_strings: ^map[string]string) -> ^ast.Node {
+clone_node :: proc(node: ^ast.Node, allocator: mem.Allocator, unique_strings: ^map[string]string) -> ^ast.Node {
using ast;
@@ -109,14 +110,14 @@ clone_node :: proc (node: ^ast.Node, allocator: mem.Allocator, unique_strings: ^
r.expr = clone_type(r.expr, allocator, unique_strings);
case Binary_Expr:
r := cast(^Binary_Expr)res;
- r.left = clone_type(r.left, allocator, unique_strings);
+ r.left = clone_type(r.left, allocator, unique_strings);
r.right = clone_type(r.right, allocator, unique_strings);
case Paren_Expr:
r := cast(^Paren_Expr)res;
r.expr = clone_type(r.expr, allocator, unique_strings);
case Selector_Expr:
r := cast(^Selector_Expr)res;
- r.expr = clone_type(r.expr, allocator, unique_strings);
+ r.expr = clone_type(r.expr, allocator, unique_strings);
r.field = auto_cast clone_type(r.field, allocator, unique_strings);
case Implicit_Selector_Expr:
r := cast(^Implicit_Selector_Expr)res;
@@ -124,7 +125,7 @@ clone_node :: proc (node: ^ast.Node, allocator: mem.Allocator, unique_strings: ^
case Slice_Expr:
r := cast(^Slice_Expr)res;
r.expr = clone_type(r.expr, allocator, unique_strings);
- r.low = clone_type(r.low, allocator, unique_strings);
+ r.low = clone_type(r.low, allocator, unique_strings);
r.high = clone_type(r.high, allocator, unique_strings);
case Attribute:
r := cast(^Attribute)res;
@@ -134,30 +135,30 @@ clone_node :: proc (node: ^ast.Node, allocator: mem.Allocator, unique_strings: ^
r.type = clone_type(r.type, allocator, unique_strings);
case Proc_Type:
r := cast(^Proc_Type)res;
- r.params = auto_cast clone_type(r.params, allocator, unique_strings);
+ r.params = auto_cast clone_type(r.params, allocator, unique_strings);
r.results = auto_cast clone_type(r.results, allocator, unique_strings);
case Pointer_Type:
r := cast(^Pointer_Type)res;
r.elem = clone_type(r.elem, allocator, unique_strings);
case Array_Type:
r := cast(^Array_Type)res;
- r.len = clone_type(r.len, allocator, unique_strings);
+ r.len = clone_type(r.len, allocator, unique_strings);
r.elem = clone_type(r.elem, allocator, unique_strings);
- r.tag = clone_type(r.tag, allocator, unique_strings);
+ r.tag = clone_type(r.tag, allocator, unique_strings);
case Dynamic_Array_Type:
r := cast(^Dynamic_Array_Type)res;
r.elem = clone_type(r.elem, allocator, unique_strings);
- r.tag = clone_type(r.tag, allocator, unique_strings);
+ r.tag = clone_type(r.tag, allocator, unique_strings);
case Struct_Type:
r := cast(^Struct_Type)res;
- r.poly_params = auto_cast clone_type(r.poly_params, allocator, unique_strings);
- r.align = clone_type(r.align, allocator, unique_strings);
- r.fields = auto_cast clone_type(r.fields, allocator, unique_strings);
+ r.poly_params = auto_cast clone_type(r.poly_params, allocator, unique_strings);
+ r.align = clone_type(r.align, allocator, unique_strings);
+ r.fields = auto_cast clone_type(r.fields, allocator, unique_strings);
r.where_clauses = clone_type(r.where_clauses, allocator, unique_strings);
case Field:
r := cast(^Field)res;
- r.names = clone_type(r.names, allocator, unique_strings);
- r.type = clone_type(r.type, allocator, unique_strings);
+ r.names = clone_type(r.names, allocator, unique_strings);
+ r.type = clone_type(r.type, allocator, unique_strings);
r.default_value = clone_type(r.default_value, allocator, unique_strings);
case Field_List:
r := cast(^Field_List)res;
@@ -168,21 +169,21 @@ clone_node :: proc (node: ^ast.Node, allocator: mem.Allocator, unique_strings: ^
r.value = clone_type(r.value, allocator, unique_strings);
case Union_Type:
r := cast(^Union_Type)res;
- r.poly_params = auto_cast clone_type(r.poly_params, allocator, unique_strings);
- r.align = clone_type(r.align, allocator, unique_strings);
- r.variants = clone_type(r.variants, allocator, unique_strings);
+ r.poly_params = auto_cast clone_type(r.poly_params, allocator, unique_strings);
+ r.align = clone_type(r.align, allocator, unique_strings);
+ r.variants = clone_type(r.variants, allocator, unique_strings);
r.where_clauses = clone_type(r.where_clauses, allocator, unique_strings);
case Enum_Type:
r := cast(^Enum_Type)res;
r.base_type = clone_type(r.base_type, allocator, unique_strings);
- r.fields = clone_type(r.fields, allocator, unique_strings);
+ r.fields = clone_type(r.fields, allocator, unique_strings);
case Bit_Set_Type:
r := cast(^Bit_Set_Type)res;
- r.elem = clone_type(r.elem, allocator, unique_strings);
+ r.elem = clone_type(r.elem, allocator, unique_strings);
r.underlying = clone_type(r.underlying, allocator, unique_strings);
case Map_Type:
r := cast(^Map_Type)res;
- r.key = clone_type(r.key, allocator, unique_strings);
+ r.key = clone_type(r.key, allocator, unique_strings);
r.value = clone_type(r.value, allocator, unique_strings);
case Call_Expr:
r := cast(^Call_Expr)res;
@@ -193,19 +194,19 @@ clone_node :: proc (node: ^ast.Node, allocator: mem.Allocator, unique_strings: ^
r.specialization = clone_type(r.specialization, allocator, unique_strings);
case Ternary_When_Expr:
r := cast(^Ternary_When_Expr)res;
- r.x = clone_type(r.x, allocator, unique_strings);
+ r.x = clone_type(r.x, allocator, unique_strings);
r.cond = clone_type(r.cond, allocator, unique_strings);
- r.y = clone_type(r.y, allocator, unique_strings);
+ r.y = clone_type(r.y, allocator, unique_strings);
case Poly_Type:
r := cast(^Poly_Type)res;
- r.type = auto_cast clone_type(r.type, allocator, unique_strings);
+ r.type = auto_cast clone_type(r.type, allocator, unique_strings);
r.specialization = clone_type(r.specialization, allocator, unique_strings);
case Proc_Group:
r := cast(^Proc_Group)res;
r.args = clone_type(r.args, allocator, unique_strings);
case Comp_Lit:
r := cast(^Comp_Lit)res;
- r.type = clone_type(r.type, allocator, unique_strings);
+ r.type = clone_type(r.type, allocator, unique_strings);
r.elems = clone_type(r.elems, allocator, unique_strings);
case:
log.warn("Clone type Unhandled node kind: %T", n);
diff --git a/src/index/collector.odin b/src/index/collector.odin
index 410a92a..9604527 100644
--- a/src/index/collector.odin
+++ b/src/index/collector.odin
@@ -19,16 +19,16 @@ SymbolCollection :: struct {
unique_strings: map[string]string, //store all our strings as unique strings and reference them to save memory.
}
-get_index_unique_string :: proc {
+get_index_unique_string :: proc{
get_index_unique_string_collection,
get_index_unique_string_collection_raw,
};
-get_index_unique_string_collection :: proc (collection: ^SymbolCollection, s: string) -> string {
+get_index_unique_string_collection :: proc(collection: ^SymbolCollection, s: string) -> string {
return get_index_unique_string_collection_raw(&collection.unique_strings, collection.allocator, s);
}
-get_index_unique_string_collection_raw :: proc (unique_strings: ^map[string]string, allocator: mem.Allocator, s: string) -> string {
+get_index_unique_string_collection_raw :: proc(unique_strings: ^map[string]string, allocator: mem.Allocator, s: string) -> string {
//i'm hashing this string way to much
if _, ok := unique_strings[s]; !ok {
str := strings.clone(s, allocator);
@@ -38,7 +38,7 @@ get_index_unique_string_collection_raw :: proc (unique_strings: ^map[string]stri
return unique_strings[s];
}
-make_symbol_collection :: proc (allocator := context.allocator, config: ^common.Config) -> SymbolCollection {
+make_symbol_collection :: proc(allocator := context.allocator, config: ^common.Config) -> SymbolCollection {
return SymbolCollection {
allocator = allocator,
config = config,
@@ -47,7 +47,7 @@ make_symbol_collection :: proc (allocator := context.allocator, config: ^common.
};
}
-delete_symbol_collection :: proc (collection: SymbolCollection) {
+delete_symbol_collection :: proc(collection: SymbolCollection) {
for k, v in collection.symbols {
free_symbol(v, collection.allocator);
@@ -61,7 +61,7 @@ delete_symbol_collection :: proc (collection: SymbolCollection) {
delete(collection.unique_strings);
}
-collect_procedure_fields :: proc (collection: ^SymbolCollection, proc_type: ^ast.Proc_Type, arg_list: ^ast.Field_List, return_list: ^ast.Field_List, package_map: map[string]string) -> SymbolProcedureValue {
+collect_procedure_fields :: proc(collection: ^SymbolCollection, proc_type: ^ast.Proc_Type, arg_list: ^ast.Field_List, return_list: ^ast.Field_List, package_map: map[string]string) -> SymbolProcedureValue {
returns := make([dynamic]^ast.Field, 0, collection.allocator);
args := make([dynamic]^ast.Field, 0, collection.allocator);
@@ -93,7 +93,7 @@ collect_procedure_fields :: proc (collection: ^SymbolCollection, proc_type: ^ast
return value;
}
-collect_struct_fields :: proc (collection: ^SymbolCollection, struct_type: ast.Struct_Type, package_map: map[string]string) -> SymbolStructValue {
+collect_struct_fields :: proc(collection: ^SymbolCollection, struct_type: ast.Struct_Type, package_map: map[string]string) -> SymbolStructValue {
names := make([dynamic]string, 0, collection.allocator);
types := make([dynamic]^ast.Expr, 0, collection.allocator);
@@ -124,7 +124,7 @@ collect_struct_fields :: proc (collection: ^SymbolCollection, struct_type: ast.S
return value;
}
-collect_enum_fields :: proc (collection: ^SymbolCollection, fields: []^ast.Expr, package_map: map[string]string) -> SymbolEnumValue {
+collect_enum_fields :: proc(collection: ^SymbolCollection, fields: []^ast.Expr, package_map: map[string]string) -> SymbolEnumValue {
names := make([dynamic]string, 0, collection.allocator);
@@ -139,13 +139,13 @@ collect_enum_fields :: proc (collection: ^SymbolCollection, fields: []^ast.Expr,
}
value := SymbolEnumValue {
- names = names[:]
+ names = names[:],
};
return value;
}
-collect_union_fields :: proc (collection: ^SymbolCollection, union_type: ast.Union_Type, package_map: map[string]string) -> SymbolUnionValue {
+collect_union_fields :: proc(collection: ^SymbolCollection, union_type: ast.Union_Type, package_map: map[string]string) -> SymbolUnionValue {
names := make([dynamic]string, 0, collection.allocator);
types := make([dynamic]^ast.Expr, 0, collection.allocator);
@@ -172,28 +172,28 @@ collect_union_fields :: proc (collection: ^SymbolCollection, union_type: ast.Uni
return value;
}
-collect_bitset_field :: proc (collection: ^SymbolCollection, bitset_type: ast.Bit_Set_Type, package_map: map[string]string) -> SymbolBitSetValue {
+collect_bitset_field :: proc(collection: ^SymbolCollection, bitset_type: ast.Bit_Set_Type, package_map: map[string]string) -> SymbolBitSetValue {
value := SymbolBitSetValue {
- expr = clone_type(bitset_type.elem, collection.allocator, &collection.unique_strings)
+ expr = clone_type(bitset_type.elem, collection.allocator, &collection.unique_strings),
};
return value;
}
-collect_generic :: proc (collection: ^SymbolCollection, expr: ^ast.Expr, package_map: map[string]string) -> SymbolGenericValue {
+collect_generic :: proc(collection: ^SymbolCollection, expr: ^ast.Expr, package_map: map[string]string) -> SymbolGenericValue {
cloned := clone_type(expr, collection.allocator, &collection.unique_strings);
replace_package_alias(cloned, package_map, collection);
value := SymbolGenericValue {
- expr = cloned
+ expr = cloned,
};
return value;
}
-collect_symbols :: proc (collection: ^SymbolCollection, file: ast.File, uri: string) -> common.Error {
+collect_symbols :: proc(collection: ^SymbolCollection, file: ast.File, uri: string) -> common.Error {
forward, _ := filepath.to_slash(file.fullpath, context.temp_allocator);
directory := strings.to_lower(path.dir(forward, context.temp_allocator), context.temp_allocator);
@@ -231,12 +231,12 @@ collect_symbols :: proc (collection: ^SymbolCollection, file: ast.File, uri: str
if v.type.params != nil {
symbol.signature = strings.concatenate({"(", string(file.src[v.type.params.pos.offset:v.type.params.end.offset]), ")"},
- collection.allocator);
+ collection.allocator);
}
if v.type.results != nil {
symbol.returns = strings.concatenate({"(", string(file.src[v.type.results.pos.offset:v.type.results.end.offset]), ")"},
- collection.allocator);
+ collection.allocator);
}
if v.type != nil {
@@ -248,12 +248,12 @@ collect_symbols :: proc (collection: ^SymbolCollection, file: ast.File, uri: str
if v.params != nil {
symbol.signature = strings.concatenate({"(", string(file.src[v.params.pos.offset:v.params.end.offset]), ")"},
- collection.allocator);
+ collection.allocator);
}
if v.results != nil {
symbol.returns = strings.concatenate({"(", string(file.src[v.results.pos.offset:v.results.end.offset]), ")"},
- collection.allocator);
+ collection.allocator);
}
symbol.value = collect_procedure_fields(collection, cast(^ast.Proc_Type)col_expr, v.params, v.results, package_map);
@@ -261,8 +261,8 @@ collect_symbols :: proc (collection: ^SymbolCollection, file: ast.File, uri: str
token = v;
token_type = .Function;
symbol.value = SymbolProcedureGroupValue {
- group = clone_type(col_expr, collection.allocator, &collection.unique_strings)
- };
+ group = clone_type(col_expr, collection.allocator, &collection.unique_strings),
+ };
case ast.Struct_Type:
token = v;
token_type = .Struct;
@@ -340,7 +340,7 @@ collect_symbols :: proc (collection: ^SymbolCollection, file: ast.File, uri: str
/*
Gets the map from import alias to absolute package directory
*/
-get_package_mapping :: proc (file: ast.File, config: ^common.Config, uri: string) -> map[string]string {
+get_package_mapping :: proc(file: ast.File, config: ^common.Config, uri: string) -> map[string]string {
package_map := make(map[string]string, 0, context.temp_allocator);
@@ -399,32 +399,32 @@ get_package_mapping :: proc (file: ast.File, config: ^common.Config, uri: string
package name(absolute directory path)
*/
-replace_package_alias :: proc {
+replace_package_alias :: proc{
replace_package_alias_node,
replace_package_alias_expr,
replace_package_alias_array,
replace_package_alias_dynamic_array,
};
-replace_package_alias_array :: proc (array: $A/[]^$T, package_map: map[string]string, collection: ^SymbolCollection) {
+replace_package_alias_array :: proc(array: $A/[]^$T, package_map: map[string]string, collection: ^SymbolCollection) {
for elem, i in array {
replace_package_alias(elem, package_map, collection);
}
}
-replace_package_alias_dynamic_array :: proc (array: $A/[dynamic]^$T, package_map: map[string]string, collection: ^SymbolCollection) {
+replace_package_alias_dynamic_array :: proc(array: $A/[dynamic]^$T, package_map: map[string]string, collection: ^SymbolCollection) {
for elem, i in array {
replace_package_alias(elem, package_map, collection);
}
}
-replace_package_alias_expr :: proc (node: ^ast.Expr, package_map: map[string]string, collection: ^SymbolCollection) {
+replace_package_alias_expr :: proc(node: ^ast.Expr, package_map: map[string]string, collection: ^SymbolCollection) {
replace_package_alias_node(node, package_map, collection);
}
-replace_package_alias_node :: proc (node: ^ast.Node, package_map: map[string]string, collection: ^SymbolCollection) {
+replace_package_alias_node :: proc(node: ^ast.Node, package_map: map[string]string, collection: ^SymbolCollection) {
using ast;
diff --git a/src/index/indexer.odin b/src/index/indexer.odin
index 1d21bf7..2020992 100644
--- a/src/index/indexer.odin
+++ b/src/index/indexer.odin
@@ -46,7 +46,7 @@ FuzzyResult :: struct {
score: f32,
}
-lookup :: proc (name: string, pkg: string, loc := #caller_location) -> (Symbol, bool) {
+lookup :: proc(name: string, pkg: string, loc := #caller_location) -> (Symbol, bool) {
if symbol, ok := memory_index_lookup(&indexer.dynamic_index, name, pkg); ok {
log.infof("lookup dynamic name: %v pkg: %v, symbol %v location %v", name, pkg, symbol, loc);
@@ -70,7 +70,7 @@ lookup :: proc (name: string, pkg: string, loc := #caller_location) -> (Symbol,
return {}, false;
}
-fuzzy_search :: proc (name: string, pkgs: []string) -> ([]FuzzyResult, bool) {
+fuzzy_search :: proc(name: string, pkgs: []string) -> ([]FuzzyResult, bool) {
dynamic_results, dynamic_ok := memory_index_fuzzy_search(&indexer.dynamic_index, name, pkgs);
static_results, static_ok := memory_index_fuzzy_search(&indexer.static_index, name, pkgs);
result := make([dynamic]FuzzyResult, context.temp_allocator);
@@ -99,18 +99,18 @@ fuzzy_search :: proc (name: string, pkgs: []string) -> ([]FuzzyResult, bool) {
return result[:], true;
}
-fuzzy_sort_interface :: proc (s: ^[dynamic]FuzzyResult) -> sort.Interface {
+fuzzy_sort_interface :: proc(s: ^[dynamic]FuzzyResult) -> sort.Interface {
return sort.Interface {
collection = rawptr(s),
- len = proc (it: sort.Interface) -> int {
+ len = proc(it: sort.Interface) -> int {
s := (^[dynamic]FuzzyResult)(it.collection);
return len(s^);
},
- less = proc (it: sort.Interface, i, j: int) -> bool {
+ less = proc(it: sort.Interface, i, j: int) -> bool {
s := (^[dynamic]FuzzyResult)(it.collection);
return s[i].score > s[j].score;
},
- swap = proc (it: sort.Interface, i, j: int) {
+ swap = proc(it: sort.Interface, i, j: int) {
s := (^[dynamic]FuzzyResult)(it.collection);
s[i], s[j] = s[j], s[i];
},
diff --git a/src/index/memory_index.odin b/src/index/memory_index.odin
index 159e01c..4ec12f6 100644
--- a/src/index/memory_index.odin
+++ b/src/index/memory_index.odin
@@ -18,19 +18,19 @@ MemoryIndex :: struct {
collection: SymbolCollection,
}
-make_memory_index :: proc (collection: SymbolCollection) -> MemoryIndex {
+make_memory_index :: proc(collection: SymbolCollection) -> MemoryIndex {
return MemoryIndex {
- collection = collection
+ collection = collection,
};
}
-memory_index_lookup :: proc (index: ^MemoryIndex, name: string, pkg: string) -> (Symbol, bool) {
+memory_index_lookup :: proc(index: ^MemoryIndex, name: string, pkg: string) -> (Symbol, bool) {
id := get_symbol_id(strings.concatenate({pkg, name}, context.temp_allocator));
return index.collection.symbols[id];
}
-memory_index_fuzzy_search :: proc (index: ^MemoryIndex, name: string, pkgs: []string) -> ([]FuzzyResult, bool) {
+memory_index_fuzzy_search :: proc(index: ^MemoryIndex, name: string, pkgs: []string) -> ([]FuzzyResult, bool) {
symbols := make([dynamic]FuzzyResult, 0, context.temp_allocator);
@@ -59,7 +59,7 @@ memory_index_fuzzy_search :: proc (index: ^MemoryIndex, name: string, pkgs: []st
return symbols[:min(top, len(symbols))], true;
}
-exists_in_scope :: proc (symbol_scope: string, scope: []string) -> bool {
+exists_in_scope :: proc(symbol_scope: string, scope: []string) -> bool {
for s in scope {
if strings.compare(symbol_scope, s) == 0 {
diff --git a/src/index/symbol.odin b/src/index/symbol.odin
index 59b47fb..9fd656d 100644
--- a/src/index/symbol.odin
+++ b/src/index/symbol.odin
@@ -79,13 +79,7 @@ Symbol :: struct {
value: SymbolValue,
}
-SymbolType :: enum
-
-//set by ast symbol
-
-//set by ast symbol
-
-{
+SymbolType :: enum {
Function = 3,
Field = 5,
Variable = 6,
@@ -96,7 +90,7 @@ SymbolType :: enum
Struct = 22,
}
-free_symbol :: proc (symbol: Symbol, allocator: mem.Allocator) {
+free_symbol :: proc(symbol: Symbol, allocator: mem.Allocator) {
if symbol.signature != "" && symbol.signature != "struct" &&
symbol.signature != "union" && symbol.signature != "enum" &&
@@ -133,7 +127,7 @@ free_symbol :: proc (symbol: Symbol, allocator: mem.Allocator) {
}
}
-get_symbol_id :: proc (str: string) -> uint {
+get_symbol_id :: proc(str: string) -> uint {
ret := common.sha1_hash(transmute([]byte)str);
r := cast(^uint)slice.first_ptr(ret[:]);
return r^;
diff --git a/src/index/util.odin b/src/index/util.odin
index e3c6da4..e87af2b 100644
--- a/src/index/util.odin
+++ b/src/index/util.odin
@@ -7,7 +7,7 @@ import "core:path"
/*
Returns the string representation of a type. This allows us to print the signature without storing it in the indexer as a string(saving memory).
*/
-node_to_string :: proc (node: ^ast.Node) -> string {
+node_to_string :: proc(node: ^ast.Node) -> string {
builder := strings.make_builder(context.temp_allocator);
@@ -16,26 +16,27 @@ node_to_string :: proc (node: ^ast.Node) -> string {
return strings.to_string(builder);
}
-build_string :: proc {
-build_string_ast_array,
-build_string_dynamic_array,
-build_string_node};
+build_string :: proc{
+ build_string_ast_array,
+ build_string_dynamic_array,
+ build_string_node,
+};
-build_string_dynamic_array :: proc (array: $A/[]^$T, builder: ^strings.Builder) {
+build_string_dynamic_array :: proc(array: $A/[]^$T, builder: ^strings.Builder) {
for elem, i in array {
build_string(elem, builder);
}
}
-build_string_ast_array :: proc (array: $A/[dynamic]^$T, builder: ^strings.Builder) {
+build_string_ast_array :: proc(array: $A/[dynamic]^$T, builder: ^strings.Builder) {
for elem, i in array {
build_string(elem, builder);
}
}
-build_string_node :: proc (node: ^ast.Node, builder: ^strings.Builder) {
+build_string_node :: proc(node: ^ast.Node, builder: ^strings.Builder) {
using ast;
diff --git a/src/server/analysis.odin b/src/server/analysis.odin
index c8c1827..1ca1b43 100644
--- a/src/server/analysis.odin
+++ b/src/server/analysis.odin
@@ -82,7 +82,7 @@ AstContext :: struct {
field_name: string,
}
-make_ast_context :: proc (file: ast.File, imports: []Package, package_name: string, allocator := context.temp_allocator) -> AstContext {
+make_ast_context :: proc(file: ast.File, imports: []Package, package_name: string, allocator := context.temp_allocator) -> AstContext {
ast_context := AstContext {
locals = make(map[string][dynamic]DocumentLocal, 0, allocator),
@@ -101,19 +101,20 @@ make_ast_context :: proc (file: ast.File, imports: []Package, package_name: stri
return ast_context;
}
-tokenizer_error_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
+tokenizer_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
}
/*
Walk through the type expression while both the call expression and specialization type are the same
*/
-resolve_poly_spec :: proc {
-resolve_poly_spec_node,
-resolve_poly_spec_array,
-resolve_poly_spec_dynamic_array};
+resolve_poly_spec :: proc{
+ resolve_poly_spec_node,
+ resolve_poly_spec_array,
+ resolve_poly_spec_dynamic_array,
+};
-resolve_poly_spec_array :: proc (ast_context: ^AstContext, call_array: $A/[]^$T, spec_array: $D/[]^$K, poly_map: ^map[string]^ast.Expr) {
+resolve_poly_spec_array :: proc(ast_context: ^AstContext, call_array: $A/[]^$T, spec_array: $D/[]^$K, poly_map: ^map[string]^ast.Expr) {
if len(call_array) != len(spec_array) {
return;
@@ -124,7 +125,7 @@ resolve_poly_spec_array :: proc (ast_context: ^AstContext, call_array: $A/[]^$T,
}
}
-resolve_poly_spec_dynamic_array :: proc (ast_context: ^AstContext, call_array: $A/[dynamic]^$T, spec_array: $D/[dynamic]^$K, poly_map: ^map[string]^ast.Expr) {
+resolve_poly_spec_dynamic_array :: proc(ast_context: ^AstContext, call_array: $A/[dynamic]^$T, spec_array: $D/[dynamic]^$K, poly_map: ^map[string]^ast.Expr) {
if len(call_array) != len(spec_array) {
return;
@@ -135,7 +136,7 @@ resolve_poly_spec_dynamic_array :: proc (ast_context: ^AstContext, call_array: $
}
}
-get_poly_node_to_expr :: proc (node: ^ast.Node) -> ^ast.Expr {
+get_poly_node_to_expr :: proc(node: ^ast.Node) -> ^ast.Expr {
using ast;
@@ -149,7 +150,7 @@ get_poly_node_to_expr :: proc (node: ^ast.Node) -> ^ast.Expr {
return nil;
}
-resolve_poly_spec_node :: proc (ast_context: ^AstContext, call_node: ^ast.Node, spec_node: ^ast.Node, poly_map: ^map[string]^ast.Expr) {
+resolve_poly_spec_node :: proc(ast_context: ^AstContext, call_node: ^ast.Node, spec_node: ^ast.Node, poly_map: ^map[string]^ast.Expr) {
/*
Note(Daniel, uncertain about the switch cases being enough or too little)
@@ -281,7 +282,7 @@ resolve_poly_spec_node :: proc (ast_context: ^AstContext, call_node: ^ast.Node,
}
}
-resolve_type_comp_literal :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, current_symbol: index.Symbol, current_comp_lit: ^ast.Comp_Lit) -> (index.Symbol, bool) {
+resolve_type_comp_literal :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, current_symbol: index.Symbol, current_comp_lit: ^ast.Comp_Lit) -> (index.Symbol, bool) {
if position_context.comp_lit == current_comp_lit {
return current_symbol, true;
@@ -316,11 +317,12 @@ resolve_type_comp_literal :: proc (ast_context: ^AstContext, position_context: ^
return current_symbol, true;
}
-resolve_generic_function :: proc {
-resolve_generic_function_ast,
-resolve_generic_function_symbol};
+resolve_generic_function :: proc{
+ resolve_generic_function_ast,
+ resolve_generic_function_symbol,
+};
-resolve_generic_function_symbol :: proc (ast_context: ^AstContext, params: []^ast.Field, results: []^ast.Field) -> (index.Symbol, bool) {
+resolve_generic_function_symbol :: proc(ast_context: ^AstContext, params: []^ast.Field, results: []^ast.Field) -> (index.Symbol, bool) {
using ast;
if params == nil {
@@ -410,16 +412,16 @@ resolve_generic_function_symbol :: proc (ast_context: ^AstContext, params: []^as
}
symbol.value = index.SymbolProcedureValue {
- return_types = return_types[:],
- arg_types = params,
- };
+ return_types = return_types[:],
+ arg_types = params,
+ };
//log.infof("return %v", poly_map);
return symbol, true;
}
-resolve_generic_function_ast :: proc (ast_context: ^AstContext, proc_lit: ast.Proc_Lit) -> (index.Symbol, bool) {
+resolve_generic_function_ast :: proc(ast_context: ^AstContext, proc_lit: ast.Proc_Lit) -> (index.Symbol, bool) {
using ast;
@@ -441,7 +443,7 @@ resolve_generic_function_ast :: proc (ast_context: ^AstContext, proc_lit: ast.Pr
/*
Figure out which function the call expression is using out of the list from proc group
*/
-resolve_function_overload :: proc (ast_context: ^AstContext, group: ast.Proc_Group) -> (index.Symbol, bool) {
+resolve_function_overload :: proc(ast_context: ^AstContext, group: ast.Proc_Group) -> (index.Symbol, bool) {
using ast;
@@ -491,7 +493,7 @@ resolve_function_overload :: proc (ast_context: ^AstContext, group: ast.Proc_Gro
return index.Symbol {}, false;
}
-resolve_basic_lit :: proc (ast_context: ^AstContext, basic_lit: ast.Basic_Lit) -> (index.Symbol, bool) {
+resolve_basic_lit :: proc(ast_context: ^AstContext, basic_lit: ast.Basic_Lit) -> (index.Symbol, bool) {
/*
This is temporary, since basic lit is untyped, but either way it's going to be an ident representing a keyword.
@@ -502,7 +504,7 @@ resolve_basic_lit :: proc (ast_context: ^AstContext, basic_lit: ast.Basic_Lit) -
ident := index.new_type(ast.Ident, basic_lit.pos, basic_lit.end, context.temp_allocator);
symbol := index.Symbol {
- type = .Keyword
+ type = .Keyword,
};
if v, ok := strconv.parse_bool(basic_lit.tok.text); ok {
@@ -514,13 +516,13 @@ resolve_basic_lit :: proc (ast_context: ^AstContext, basic_lit: ast.Basic_Lit) -
}
symbol.value = index.SymbolGenericValue {
- expr = ident
- };
+ expr = ident,
+ };
return symbol, true;
}
-resolve_type_expression :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (index.Symbol, bool) {
+resolve_type_expression :: proc(ast_context: ^AstContext, node: ^ast.Expr) -> (index.Symbol, bool) {
if node == nil {
return {}, false;
@@ -660,7 +662,7 @@ resolve_type_expression :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (
return index.Symbol {}, false;
}
-store_local :: proc (ast_context: ^AstContext, expr: ^ast.Expr, offset: int, name: string) {
+store_local :: proc(ast_context: ^AstContext, expr: ^ast.Expr, offset: int, name: string) {
local_stack := &ast_context.locals[name];
@@ -672,7 +674,7 @@ store_local :: proc (ast_context: ^AstContext, expr: ^ast.Expr, offset: int, nam
append(local_stack, DocumentLocal {expr = expr, offset = offset});
}
-get_local :: proc (ast_context: ^AstContext, offset: int, name: string) -> ^ast.Expr {
+get_local :: proc(ast_context: ^AstContext, offset: int, name: string) -> ^ast.Expr {
previous := 0;
@@ -708,7 +710,7 @@ get_local :: proc (ast_context: ^AstContext, offset: int, name: string) -> ^ast.
Function recusively goes through the identifier until it hits a struct, enum, procedure literals, since you can
have chained variable declarations. ie. a := foo { test = 2}; b := a; c := b;
*/
-resolve_type_identifier :: proc (ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
+resolve_type_identifier :: proc(ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
using ast;
@@ -823,7 +825,7 @@ resolve_type_identifier :: proc (ast_context: ^AstContext, node: ast.Ident) -> (
signature = node.name,
pkg = ast_context.current_package,
value = index.SymbolGenericValue {
- expr = ident
+ expr = ident,
},
};
return symbol, true;
@@ -883,7 +885,7 @@ resolve_type_identifier :: proc (ast_context: ^AstContext, node: ast.Ident) -> (
return index.Symbol {}, false;
}
-resolve_ident_is_variable :: proc (ast_context: ^AstContext, node: ast.Ident) -> bool {
+resolve_ident_is_variable :: proc(ast_context: ^AstContext, node: ast.Ident) -> bool {
if v, ok := ast_context.variables[node.name]; ok && v {
return true;
@@ -896,7 +898,7 @@ resolve_ident_is_variable :: proc (ast_context: ^AstContext, node: ast.Ident) ->
return false;
}
-resolve_ident_is_package :: proc (ast_context: ^AstContext, node: ast.Ident) -> bool {
+resolve_ident_is_package :: proc(ast_context: ^AstContext, node: ast.Ident) -> bool {
if strings.contains(node.name, "/") {
return true;
@@ -913,7 +915,7 @@ resolve_ident_is_package :: proc (ast_context: ^AstContext, node: ast.Ident) ->
return false;
}
-expand_struct_usings :: proc (ast_context: ^AstContext, symbol: index.Symbol, value: index.SymbolStructValue) -> index.SymbolStructValue {
+expand_struct_usings :: proc(ast_context: ^AstContext, symbol: index.Symbol, value: index.SymbolStructValue) -> index.SymbolStructValue {
//ERROR no completion or over on names and types - generic resolve error
names := slice.to_dynamic(value.names, context.temp_allocator);
@@ -958,7 +960,7 @@ expand_struct_usings :: proc (ast_context: ^AstContext, symbol: index.Symbol, va
};
}
-resolve_symbol_return :: proc (ast_context: ^AstContext, symbol: index.Symbol, ok := true) -> (index.Symbol, bool) {
+resolve_symbol_return :: proc(ast_context: ^AstContext, symbol: index.Symbol, ok := true) -> (index.Symbol, bool) {
if !ok {
return symbol, ok;
@@ -995,7 +997,7 @@ resolve_symbol_return :: proc (ast_context: ^AstContext, symbol: index.Symbol, o
return symbol, true;
}
-resolve_location_identifier :: proc (ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
+resolve_location_identifier :: proc(ast_context: ^AstContext, node: ast.Ident) -> (index.Symbol, bool) {
symbol: index.Symbol;
@@ -1010,7 +1012,7 @@ resolve_location_identifier :: proc (ast_context: ^AstContext, node: ast.Ident)
return index.lookup(node.name, ast_context.document_package);
}
-resolve_first_symbol_from_binary_expression :: proc (ast_context: ^AstContext, binary: ^ast.Binary_Expr) -> (index.Symbol, bool) {
+resolve_first_symbol_from_binary_expression :: proc(ast_context: ^AstContext, binary: ^ast.Binary_Expr) -> (index.Symbol, bool) {
//Fairly simple function to find the earliest identifier symbol in binary expression.
@@ -1020,14 +1022,11 @@ resolve_first_symbol_from_binary_expression :: proc (ast_context: ^AstContext, b
if s, ok := resolve_type_identifier(ast_context, ident); ok {
return s, ok;
}
- }
-
- else if _, ok := binary.left.derived.(ast.Binary_Expr); ok {
+ } else if _, ok := binary.left.derived.(ast.Binary_Expr); ok {
if s, ok := resolve_first_symbol_from_binary_expression(ast_context, cast(^ast.Binary_Expr)binary.left); ok {
return s, ok;
}
}
-
}
if binary.right != nil {
@@ -1035,9 +1034,7 @@ resolve_first_symbol_from_binary_expression :: proc (ast_context: ^AstContext, b
if s, ok := resolve_type_identifier(ast_context, ident); ok {
return s, ok;
}
- }
-
- else if _, ok := binary.right.derived.(ast.Binary_Expr); ok {
+ } else if _, ok := binary.right.derived.(ast.Binary_Expr); ok {
if s, ok := resolve_first_symbol_from_binary_expression(ast_context, cast(^ast.Binary_Expr)binary.right); ok {
return s, ok;
}
@@ -1047,31 +1044,31 @@ resolve_first_symbol_from_binary_expression :: proc (ast_context: ^AstContext, b
return {}, false;
}
-make_pointer_ast :: proc (elem: ^ast.Expr) -> ^ast.Pointer_Type {
+make_pointer_ast :: proc(elem: ^ast.Expr) -> ^ast.Pointer_Type {
pointer := index.new_type(ast.Pointer_Type, elem.pos, elem.end, context.temp_allocator);
pointer.elem = elem;
return pointer;
}
-make_bool_ast :: proc () -> ^ast.Ident {
+make_bool_ast :: proc() -> ^ast.Ident {
ident := index.new_type(ast.Ident, {}, {}, context.temp_allocator);
ident.name = bool_lit;
return ident;
}
-make_int_ast :: proc () -> ^ast.Ident {
+make_int_ast :: proc() -> ^ast.Ident {
ident := index.new_type(ast.Ident, {}, {}, context.temp_allocator);
ident.name = int_lit;
return ident;
}
-get_package_from_node :: proc (node: ast.Node) -> string {
+get_package_from_node :: proc(node: ast.Node) -> string {
slashed, _ := filepath.to_slash(node.pos.file, context.temp_allocator);
ret := strings.to_lower(path.dir(slashed, context.temp_allocator), context.temp_allocator);
return ret;
}
-get_using_packages :: proc (ast_context: ^AstContext) -> []string {
+get_using_packages :: proc(ast_context: ^AstContext) -> []string {
usings := make([]string, len(ast_context.usings), context.temp_allocator);
@@ -1093,7 +1090,7 @@ get_using_packages :: proc (ast_context: ^AstContext) -> []string {
return usings;
}
-make_symbol_procedure_from_ast :: proc (ast_context: ^AstContext, n: ^ast.Node, v: ast.Proc_Type, name: string) -> index.Symbol {
+make_symbol_procedure_from_ast :: proc(ast_context: ^AstContext, n: ^ast.Node, v: ast.Proc_Type, name: string) -> index.Symbol {
symbol := index.Symbol {
range = common.get_token_range(n^, ast_context.file.src),
@@ -1125,14 +1122,14 @@ make_symbol_procedure_from_ast :: proc (ast_context: ^AstContext, n: ^ast.Node,
}
symbol.value = index.SymbolProcedureValue {
- return_types = return_types[:],
- arg_types = arg_types[:],
- };
+ return_types = return_types[:],
+ arg_types = arg_types[:],
+ };
return symbol;
}
-make_symbol_generic_from_ast :: proc (ast_context: ^AstContext, expr: ^ast.Expr) -> index.Symbol {
+make_symbol_generic_from_ast :: proc(ast_context: ^AstContext, expr: ^ast.Expr) -> index.Symbol {
symbol := index.Symbol {
range = common.get_token_range(expr, ast_context.file.src),
@@ -1142,13 +1139,13 @@ make_symbol_generic_from_ast :: proc (ast_context: ^AstContext, expr: ^ast.Expr)
};
symbol.value = index.SymbolGenericValue {
- expr = expr
- };
+ expr = expr,
+ };
return symbol;
}
-make_symbol_union_from_ast :: proc (ast_context: ^AstContext, v: ast.Union_Type, ident: ast.Ident) -> index.Symbol {
+make_symbol_union_from_ast :: proc(ast_context: ^AstContext, v: ast.Union_Type, ident: ast.Ident) -> index.Symbol {
symbol := index.Symbol {
range = common.get_token_range(v, ast_context.file.src),
@@ -1163,9 +1160,7 @@ make_symbol_union_from_ast :: proc (ast_context: ^AstContext, v: ast.Union_Type,
if ident, ok := variant.derived.(ast.Ident); ok {
append(&names, ident.name);
- }
-
- else if selector, ok := variant.derived.(ast.Selector_Expr); ok {
+ } else if selector, ok := variant.derived.(ast.Selector_Expr); ok {
if ident, ok := selector.field.derived.(ast.Ident); ok {
append(&names, ident.name);
@@ -1174,14 +1169,14 @@ make_symbol_union_from_ast :: proc (ast_context: ^AstContext, v: ast.Union_Type,
}
symbol.value = index.SymbolUnionValue {
- names = names[:],
- types = v.variants,
+ names = names[:],
+ types = v.variants,
};
return symbol;
}
-make_symbol_enum_from_ast :: proc (ast_context: ^AstContext, v: ast.Enum_Type, ident: ast.Ident) -> index.Symbol {
+make_symbol_enum_from_ast :: proc(ast_context: ^AstContext, v: ast.Enum_Type, ident: ast.Ident) -> index.Symbol {
symbol := index.Symbol {
range = common.get_token_range(v, ast_context.file.src),
@@ -1202,13 +1197,13 @@ make_symbol_enum_from_ast :: proc (ast_context: ^AstContext, v: ast.Enum_Type, i
}
symbol.value = index.SymbolEnumValue {
- names = names[:]
- };
+ names = names[:],
+ };
return symbol;
}
-make_symbol_bitset_from_ast :: proc (ast_context: ^AstContext, v: ast.Bit_Set_Type, ident: ast.Ident) -> index.Symbol {
+make_symbol_bitset_from_ast :: proc(ast_context: ^AstContext, v: ast.Bit_Set_Type, ident: ast.Ident) -> index.Symbol {
symbol := index.Symbol {
range = common.get_token_range(v, ast_context.file.src),
@@ -1218,13 +1213,13 @@ make_symbol_bitset_from_ast :: proc (ast_context: ^AstContext, v: ast.Bit_Set_Ty
};
symbol.value = index.SymbolBitSetValue {
- expr = v.elem
- };
+ expr = v.elem,
+ };
return symbol;
}
-make_symbol_struct_from_ast :: proc (ast_context: ^AstContext, v: ast.Struct_Type, ident: ast.Ident) -> index.Symbol {
+make_symbol_struct_from_ast :: proc(ast_context: ^AstContext, v: ast.Struct_Type, ident: ast.Ident) -> index.Symbol {
symbol := index.Symbol {
range = common.get_token_range(v, ast_context.file.src),
@@ -1252,10 +1247,10 @@ make_symbol_struct_from_ast :: proc (ast_context: ^AstContext, v: ast.Struct_Typ
}
symbol.value = index.SymbolStructValue {
- names = names[:],
- types = types[:],
- usings = usings,
- };
+ names = names[:],
+ types = types[:],
+ usings = usings,
+ };
if v.poly_params != nil {
resolve_poly_struct(ast_context, v, &symbol);
@@ -1269,7 +1264,7 @@ make_symbol_struct_from_ast :: proc (ast_context: ^AstContext, v: ast.Struct_Typ
return symbol;
}
-resolve_poly_struct :: proc (ast_context: ^AstContext, v: ast.Struct_Type, symbol: ^index.Symbol) {
+resolve_poly_struct :: proc(ast_context: ^AstContext, v: ast.Struct_Type, symbol: ^index.Symbol) {
if ast_context.call == nil {
log.infof("no call");
@@ -1336,7 +1331,7 @@ resolve_poly_struct :: proc (ast_context: ^AstContext, v: ast.Struct_Type, symbo
}
}
-get_globals :: proc (file: ast.File, ast_context: ^AstContext) {
+get_globals :: proc(file: ast.File, ast_context: ^AstContext) {
ast_context.variables["context"] = true;
@@ -1348,7 +1343,7 @@ get_globals :: proc (file: ast.File, ast_context: ^AstContext) {
}
}
-get_generic_assignment :: proc (file: ast.File, value: ^ast.Expr, ast_context: ^AstContext, results: ^[dynamic]^ast.Expr) {
+get_generic_assignment :: proc(file: ast.File, value: ^ast.Expr, ast_context: ^AstContext, results: ^[dynamic]^ast.Expr) {
using ast;
@@ -1397,7 +1392,7 @@ get_generic_assignment :: proc (file: ast.File, value: ^ast.Expr, ast_context: ^
}
}
-get_locals_value_decl :: proc (file: ast.File, value_decl: ast.Value_Decl, ast_context: ^AstContext) {
+get_locals_value_decl :: proc(file: ast.File, value_decl: ast.Value_Decl, ast_context: ^AstContext) {
using ast;
@@ -1428,7 +1423,7 @@ get_locals_value_decl :: proc (file: ast.File, value_decl: ast.Value_Decl, ast_c
}
}
-get_locals_stmt :: proc (file: ast.File, stmt: ^ast.Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext, save_assign := false) {
+get_locals_stmt :: proc(file: ast.File, stmt: ^ast.Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext, save_assign := false) {
ast_context.use_locals = true;
ast_context.use_globals = true;
@@ -1479,7 +1474,7 @@ get_locals_stmt :: proc (file: ast.File, stmt: ^ast.Stmt, ast_context: ^AstConte
}
}
-get_locals_using_stmt :: proc (file: ast.File, stmt: ast.Using_Stmt, ast_context: ^AstContext) {
+get_locals_using_stmt :: proc(file: ast.File, stmt: ast.Using_Stmt, ast_context: ^AstContext) {
for u in stmt.list {
@@ -1504,7 +1499,7 @@ get_locals_using_stmt :: proc (file: ast.File, stmt: ast.Using_Stmt, ast_context
}
}
-get_locals_assign_stmt :: proc (file: ast.File, stmt: ast.Assign_Stmt, ast_context: ^AstContext) {
+get_locals_assign_stmt :: proc(file: ast.File, stmt: ast.Assign_Stmt, ast_context: ^AstContext) {
using ast;
@@ -1530,7 +1525,7 @@ get_locals_assign_stmt :: proc (file: ast.File, stmt: ast.Assign_Stmt, ast_conte
}
}
-get_locals_if_stmt :: proc (file: ast.File, stmt: ast.If_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
+get_locals_if_stmt :: proc(file: ast.File, stmt: ast.If_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
return;
@@ -1541,7 +1536,7 @@ get_locals_if_stmt :: proc (file: ast.File, stmt: ast.If_Stmt, ast_context: ^Ast
get_locals_stmt(file, stmt.else_stmt, ast_context, document_position);
}
-get_locals_for_range_stmt :: proc (file: ast.File, stmt: ast.Range_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
+get_locals_for_range_stmt :: proc(file: ast.File, stmt: ast.Range_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
using ast;
@@ -1621,7 +1616,7 @@ get_locals_for_range_stmt :: proc (file: ast.File, stmt: ast.Range_Stmt, ast_con
get_locals_stmt(file, stmt.body, ast_context, document_position);
}
-get_locals_for_stmt :: proc (file: ast.File, stmt: ast.For_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
+get_locals_for_stmt :: proc(file: ast.File, stmt: ast.For_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
return;
@@ -1631,7 +1626,7 @@ get_locals_for_stmt :: proc (file: ast.File, stmt: ast.For_Stmt, ast_context: ^A
get_locals_stmt(file, stmt.body, ast_context, document_position);
}
-get_locals_switch_stmt :: proc (file: ast.File, stmt: ast.Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
+get_locals_switch_stmt :: proc(file: ast.File, stmt: ast.Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
if !(stmt.pos.offset <= document_position.position && document_position.position <= stmt.end.offset) {
return;
@@ -1640,7 +1635,7 @@ get_locals_switch_stmt :: proc (file: ast.File, stmt: ast.Switch_Stmt, ast_conte
get_locals_stmt(file, stmt.body, ast_context, document_position);
}
-get_locals_type_switch_stmt :: proc (file: ast.File, stmt: ast.Type_Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
+get_locals_type_switch_stmt :: proc(file: ast.File, stmt: ast.Type_Switch_Stmt, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
using ast;
@@ -1674,7 +1669,7 @@ get_locals_type_switch_stmt :: proc (file: ast.File, stmt: ast.Type_Switch_Stmt,
}
}
-get_locals :: proc (file: ast.File, function: ^ast.Node, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
+get_locals :: proc(file: ast.File, function: ^ast.Node, ast_context: ^AstContext, document_position: ^DocumentPositionContext) {
proc_lit, ok := function.derived.(ast.Proc_Lit);
@@ -1719,14 +1714,14 @@ get_locals :: proc (file: ast.File, function: ^ast.Node, ast_context: ^AstContex
}
}
-clear_locals :: proc (ast_context: ^AstContext) {
+clear_locals :: proc(ast_context: ^AstContext) {
clear(&ast_context.locals);
clear(&ast_context.parameters);
clear(&ast_context.variables);
clear(&ast_context.usings);
}
-concatenate_symbols_information :: proc (ast_context: ^AstContext, symbol: index.Symbol) -> string {
+concatenate_symbols_information :: proc(ast_context: ^AstContext, symbol: index.Symbol) -> string {
pkg := path.base(symbol.pkg, false, context.temp_allocator);
@@ -1751,7 +1746,7 @@ concatenate_symbols_information :: proc (ast_context: ^AstContext, symbol: index
return ""; //weird bug requires this
}
-get_definition_location :: proc (document: ^Document, position: common.Position) -> (common.Location, bool) {
+get_definition_location :: proc(document: ^Document, position: common.Position) -> (common.Location, bool) {
location: common.Location;
@@ -1866,7 +1861,7 @@ get_definition_location :: proc (document: ^Document, position: common.Position)
return location, true;
}
-write_hover_content :: proc (ast_context: ^AstContext, symbol: index.Symbol) -> MarkupContent {
+write_hover_content :: proc(ast_context: ^AstContext, symbol: index.Symbol) -> MarkupContent {
content: MarkupContent;
cat := concatenate_symbols_information(ast_context, symbol);
@@ -1881,7 +1876,7 @@ write_hover_content :: proc (ast_context: ^AstContext, symbol: index.Symbol) ->
return content;
}
-get_signature :: proc (ast_context: ^AstContext, ident: ast.Ident, symbol: index.Symbol, was_variable := false) -> string {
+get_signature :: proc(ast_context: ^AstContext, ident: ast.Ident, symbol: index.Symbol, was_variable := false) -> string {
if symbol.type == .Function {
return symbol.signature;
@@ -1921,7 +1916,7 @@ get_signature :: proc (ast_context: ^AstContext, ident: ast.Ident, symbol: index
return ident.name;
}
-get_signature_information :: proc (document: ^Document, position: common.Position) -> (SignatureHelp, bool) {
+get_signature_information :: proc(document: ^Document, position: common.Position) -> (SignatureHelp, bool) {
signature_help: SignatureHelp;
@@ -1960,7 +1955,7 @@ get_signature_information :: proc (document: ^Document, position: common.Positio
return signature_help, true;
}
-get_document_symbols :: proc (document: ^Document) -> []DocumentSymbol {
+get_document_symbols :: proc(document: ^Document) -> []DocumentSymbol {
ast_context := make_ast_context(document.ast, document.imports, document.package_name);
@@ -1977,13 +1972,13 @@ get_document_symbols :: proc (document: ^Document) -> []DocumentSymbol {
package_symbol.kind = .Package;
package_symbol.name = path.base(document.package_name, false, context.temp_allocator);
package_symbol.range = {
- start = {
- line = document.ast.decls[0].pos.line
- },
- end = {
- line = document.ast.decls[len(document.ast.decls) - 1].end.line
- },
- };
+ start = {
+ line = document.ast.decls[0].pos.line,
+ },
+ end = {
+ line = document.ast.decls[len(document.ast.decls) - 1].end.line,
+ },
+ };
package_symbol.selectionRange = package_symbol.range;
children_symbols := make([dynamic]DocumentSymbol, context.temp_allocator);
@@ -2024,7 +2019,7 @@ get_document_symbols :: proc (document: ^Document) -> []DocumentSymbol {
/*
Figure out what exactly is at the given position and whether it is in a function, struct, etc.
*/
-get_document_position_context :: proc (document: ^Document, position: common.Position, hint: DocumentPositionContextHint) -> (DocumentPositionContext, bool) {
+get_document_position_context :: proc(document: ^Document, position: common.Position, hint: DocumentPositionContextHint) -> (DocumentPositionContext, bool) {
position_context: DocumentPositionContext;
@@ -2074,7 +2069,7 @@ get_document_position_context :: proc (document: ^Document, position: common.Pos
return position_context, true;
}
-fallback_position_context_completion :: proc (document: ^Document, position: common.Position, position_context: ^DocumentPositionContext) {
+fallback_position_context_completion :: proc(document: ^Document, position: common.Position, position_context: ^DocumentPositionContext) {
paren_count: int;
bracket_count: int;
@@ -2243,7 +2238,7 @@ fallback_position_context_completion :: proc (document: ^Document, position: com
}
}
-fallback_position_context_signature :: proc (document: ^Document, position: common.Position, position_context: ^DocumentPositionContext) {
+fallback_position_context_signature :: proc(document: ^Document, position: common.Position, position_context: ^DocumentPositionContext) {
paren_count: int;
end: int;
@@ -2312,30 +2307,31 @@ fallback_position_context_signature :: proc (document: ^Document, position: comm
position_context.call = e;
}
-get_document_position :: proc {
-get_document_position_array,
-get_document_position_dynamic_array,
-get_document_position_node};
+get_document_position :: proc{
+ get_document_position_array,
+ get_document_position_dynamic_array,
+ get_document_position_node,
+};
-get_document_position_array :: proc (array: $A/[]^$T, position_context: ^DocumentPositionContext) {
+get_document_position_array :: proc(array: $A/[]^$T, position_context: ^DocumentPositionContext) {
for elem, i in array {
get_document_position(elem, position_context);
}
}
-get_document_position_dynamic_array :: proc (array: $A/[dynamic]^$T, position_context: ^DocumentPositionContext) {
+get_document_position_dynamic_array :: proc(array: $A/[dynamic]^$T, position_context: ^DocumentPositionContext) {
for elem, i in array {
get_document_position(elem, position_context);
}
}
-position_in_node :: proc (node: ^ast.Node, position: common.AbsolutePosition) -> bool {
+position_in_node :: proc(node: ^ast.Node, position: common.AbsolutePosition) -> bool {
return node != nil && node.pos.offset <= position && position <= node.end.offset;
}
-get_document_position_node :: proc (node: ^ast.Node, position_context: ^DocumentPositionContext) {
+get_document_position_node :: proc(node: ^ast.Node, position_context: ^DocumentPositionContext) {
using ast;
diff --git a/src/server/background.odin b/src/server/background.odin
index 3cd8064..dd2d719 100644
--- a/src/server/background.odin
+++ b/src/server/background.odin
@@ -6,5 +6,5 @@ package server
import "shared:index"
-background_main :: proc () {
+background_main :: proc() {
}
diff --git a/src/server/completion.odin b/src/server/completion.odin
index 9793845..bd0e1f5 100644
--- a/src/server/completion.odin
+++ b/src/server/completion.odin
@@ -24,7 +24,7 @@ Completion_Type :: enum {
Comp_Lit,
}
-get_completion_list :: proc (document: ^Document, position: common.Position) -> (CompletionList, bool) {
+get_completion_list :: proc(document: ^Document, position: common.Position) -> (CompletionList, bool) {
list: CompletionList;
@@ -64,11 +64,8 @@ get_completion_list :: proc (document: ^Document, position: common.Position) ->
if union_value, ok := symbol.value.(index.SymbolUnionValue); ok {
completion_type = .Switch_Type;
}
-
}
-
}
-
}
switch completion_type {
@@ -87,7 +84,7 @@ get_completion_list :: proc (document: ^Document, position: common.Position) ->
return list, true;
}
-is_lhs_comp_lit :: proc (position_context: ^DocumentPositionContext) -> bool {
+is_lhs_comp_lit :: proc(position_context: ^DocumentPositionContext) -> bool {
if len(position_context.comp_lit.elems) == 0 {
return true;
@@ -111,7 +108,7 @@ is_lhs_comp_lit :: proc (position_context: ^DocumentPositionContext) -> bool {
return true;
}
-field_exists_in_comp_lit :: proc (comp_lit: ^ast.Comp_Lit, name: string) -> bool {
+field_exists_in_comp_lit :: proc(comp_lit: ^ast.Comp_Lit, name: string) -> bool {
for elem in comp_lit.elems {
@@ -132,11 +129,10 @@ field_exists_in_comp_lit :: proc (comp_lit: ^ast.Comp_Lit, name: string) -> bool
return false;
}
-get_attribute_completion :: proc (ast_context: ^AstContext, postition_context: ^DocumentPositionContext, list: ^CompletionList) {
-
+get_attribute_completion :: proc(ast_context: ^AstContext, postition_context: ^DocumentPositionContext, list: ^CompletionList) {
}
-get_directive_completion :: proc (ast_context: ^AstContext, postition_context: ^DocumentPositionContext, list: ^CompletionList) {
+get_directive_completion :: proc(ast_context: ^AstContext, postition_context: ^DocumentPositionContext, list: ^CompletionList) {
list.isIncomplete = false;
@@ -181,7 +177,7 @@ get_directive_completion :: proc (ast_context: ^AstContext, postition_context: ^
list.items = items[:];
}
-get_comp_lit_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
+get_comp_lit_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
items := make([dynamic]CompletionItem, context.temp_allocator);
@@ -196,7 +192,6 @@ get_comp_lit_completion :: proc (ast_context: ^AstContext, position_context: ^Do
#partial switch v in comp_symbol.value {
case index.SymbolStructValue:
for name, i in v.names {
-
//ERROR no completion on name and hover
if resolved, ok := resolve_type_expression(ast_context, v.types[i]); ok {
@@ -226,7 +221,7 @@ get_comp_lit_completion :: proc (ast_context: ^AstContext, position_context: ^Do
list.items = items[:];
}
-get_selector_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
+get_selector_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
items := make([dynamic]CompletionItem, context.temp_allocator);
@@ -287,9 +282,7 @@ get_selector_completion :: proc (ast_context: ^AstContext, position_context: ^Do
if symbol.pkg == ast_context.document_package {
symbol.name = fmt.aprintf("(%v)", name);
- }
-
- else {
+ } else {
symbol.name = fmt.aprintf("(%v.%v)", path.base(symbol.pkg, false, context.temp_allocator), name);
}
@@ -378,7 +371,7 @@ get_selector_completion :: proc (ast_context: ^AstContext, position_context: ^Do
list.items = items[:];
}
-unwrap_enum :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolEnumValue, bool) {
+unwrap_enum :: proc(ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolEnumValue, bool) {
if enum_symbol, ok := resolve_type_expression(ast_context, node); ok {
@@ -390,7 +383,7 @@ unwrap_enum :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (index.Symbol
return {}, false;
}
-unwrap_union :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolUnionValue, bool) {
+unwrap_union :: proc(ast_context: ^AstContext, node: ^ast.Expr) -> (index.SymbolUnionValue, bool) {
if union_symbol, ok := resolve_type_expression(ast_context, node); ok {
@@ -402,7 +395,7 @@ unwrap_union :: proc (ast_context: ^AstContext, node: ^ast.Expr) -> (index.Symbo
return {}, false;
}
-unwrap_bitset :: proc (ast_context: ^AstContext, bitset_symbol: index.Symbol) -> (index.SymbolEnumValue, bool) {
+unwrap_bitset :: proc(ast_context: ^AstContext, bitset_symbol: index.Symbol) -> (index.SymbolEnumValue, bool) {
if bitset_value, ok := bitset_symbol.value.(index.SymbolBitSetValue); ok {
if enum_symbol, ok := resolve_type_expression(ast_context, bitset_value.expr); ok {
@@ -415,7 +408,7 @@ unwrap_bitset :: proc (ast_context: ^AstContext, bitset_symbol: index.Symbol) ->
return {}, false;
}
-get_implicit_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
+get_implicit_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
items := make([dynamic]CompletionItem, context.temp_allocator);
@@ -488,7 +481,7 @@ get_implicit_completion :: proc (ast_context: ^AstContext, position_context: ^Do
}
}
}
- } else if position_context.comp_lit != nil && position_context.assign != nil && position_context.assign.lhs != nil && len(position_context.assign.lhs) == 1 && is_bitset_assignment_operator(position_context.assign.op.text) {
+ } else if position_context.comp_lit != nil && position_context.assign != nil && position_context.assign.lhs != nil && len(position_context.assign.lhs) == 1 && is_bitset_assignment_operator(position_context.assign.op.text) {
if symbol, ok := resolve_type_expression(ast_context, position_context.assign.lhs[0]); ok {
@@ -505,10 +498,7 @@ get_implicit_completion :: proc (ast_context: ^AstContext, position_context: ^Do
append(&items, item);
}
}
-
}
-
-
} else if position_context.comp_lit != nil {
if position_context.parent_comp_lit.type == nil {
@@ -678,7 +668,7 @@ get_implicit_completion :: proc (ast_context: ^AstContext, position_context: ^Do
list.items = items[:];
}
-get_identifier_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
+get_identifier_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
items := make([dynamic]CompletionItem, context.temp_allocator);
@@ -690,18 +680,18 @@ get_identifier_completion :: proc (ast_context: ^AstContext, position_context: ^
variable: ^ast.Ident,
};
- combined_sort_interface :: proc (s: ^[dynamic]CombinedResult) -> sort.Interface {
+ combined_sort_interface :: proc(s: ^[dynamic]CombinedResult) -> sort.Interface {
return sort.Interface {
collection = rawptr(s),
- len = proc (it: sort.Interface) -> int {
+ len = proc(it: sort.Interface) -> int {
s := (^[dynamic]CombinedResult)(it.collection);
return len(s^);
},
- less = proc (it: sort.Interface, i, j: int) -> bool {
+ less = proc(it: sort.Interface, i, j: int) -> bool {
s := (^[dynamic]CombinedResult)(it.collection);
return s[i].score > s[j].score;
},
- swap = proc (it: sort.Interface, i, j: int) {
+ swap = proc(it: sort.Interface, i, j: int) {
s := (^[dynamic]CombinedResult)(it.collection);
s[i], s[j] = s[j], s[i];
},
@@ -822,10 +812,10 @@ get_identifier_completion :: proc (ast_context: ^AstContext, position_context: ^
list.items = items[:];
}
-get_package_completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
+get_package_completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
}
-get_type_switch_Completion :: proc (ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
+get_type_switch_Completion :: proc(ast_context: ^AstContext, position_context: ^DocumentPositionContext, list: ^CompletionList) {
items := make([dynamic]CompletionItem, context.temp_allocator);
list.isIncomplete = false;
@@ -865,12 +855,10 @@ get_type_switch_Completion :: proc (ast_context: ^AstContext, position_context:
};
if symbol.pkg == ast_context.document_package {
- item.label = fmt.aprintf("%v", name);
+ item.label = fmt.aprintf("%v", name);
item.detail = item.label;
- }
-
- else {
- item.label = fmt.aprintf("%v.%v", path.base(symbol.pkg, false, context.temp_allocator), name);
+ } else {
+ item.label = fmt.aprintf("%v.%v", path.base(symbol.pkg, false, context.temp_allocator), name);
item.detail = item.label;
}
@@ -900,10 +888,10 @@ bitset_assignment_operators: map[string]bool = {
"=" = true,
};
-is_bitset_binary_operator :: proc (op: string) -> bool {
+is_bitset_binary_operator :: proc(op: string) -> bool {
return op in bitset_operators;
}
-is_bitset_assignment_operator :: proc (op: string) -> bool {
+is_bitset_assignment_operator :: proc(op: string) -> bool {
return op in bitset_assignment_operators;
-} \ No newline at end of file
+}
diff --git a/src/server/documents.odin b/src/server/documents.odin
index 7f92cb0..92cab27 100644
--- a/src/server/documents.odin
+++ b/src/server/documents.odin
@@ -47,7 +47,7 @@ DocumentStorage :: struct {
document_storage: DocumentStorage;
-document_storage_shutdown :: proc () {
+document_storage_shutdown :: proc() {
for k, v in document_storage.documents {
delete(k);
@@ -62,7 +62,7 @@ document_storage_shutdown :: proc () {
delete(document_storage.documents);
}
-document_get_allocator :: proc () -> ^common.Scratch_Allocator {
+document_get_allocator :: proc() -> ^common.Scratch_Allocator {
if len(document_storage.free_allocators) > 0 {
return pop(&document_storage.free_allocators);
@@ -73,11 +73,11 @@ document_get_allocator :: proc () -> ^common.Scratch_Allocator {
}
}
-document_free_allocator :: proc (allocator: ^common.Scratch_Allocator) {
+document_free_allocator :: proc(allocator: ^common.Scratch_Allocator) {
append(&document_storage.free_allocators, allocator);
}
-document_get :: proc (uri_string: string) -> ^Document {
+document_get :: proc(uri_string: string) -> ^Document {
uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
@@ -96,7 +96,7 @@ document_get :: proc (uri_string: string) -> ^Document {
return document;
}
-document_release :: proc (document: ^Document) {
+document_release :: proc(document: ^Document) {
if document != nil {
intrinsics.atomic_sub(&document.operating_on, 1);
@@ -107,7 +107,7 @@ document_release :: proc (document: ^Document) {
Client opens a document with transferred text
*/
-document_open :: proc (uri_string: string, text: string, config: ^common.Config, writer: ^Writer) -> common.Error {
+document_open :: proc(uri_string: string, text: string, config: ^common.Config, writer: ^Writer) -> common.Error {
uri, parsed_ok := common.parse_uri(uri_string, context.allocator);
@@ -160,7 +160,7 @@ document_open :: proc (uri_string: string, text: string, config: ^common.Config,
/*
Function that applies changes to the given document through incremental syncronization
*/
-document_apply_changes :: proc (uri_string: string, changes: [dynamic]TextDocumentContentChangeEvent, config: ^common.Config, writer: ^Writer) -> common.Error {
+document_apply_changes :: proc(uri_string: string, changes: [dynamic]TextDocumentContentChangeEvent, config: ^common.Config, writer: ^Writer) -> common.Error {
uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
@@ -236,7 +236,7 @@ document_apply_changes :: proc (uri_string: string, changes: [dynamic]TextDocume
return document_refresh(document, config, writer);
}
-document_close :: proc (uri_string: string) -> common.Error {
+document_close :: proc(uri_string: string) -> common.Error {
log.infof("document_close: %v", uri_string);
@@ -268,7 +268,7 @@ document_close :: proc (uri_string: string) -> common.Error {
return .None;
}
-document_refresh :: proc (document: ^Document, config: ^common.Config, writer: ^Writer) -> common.Error {
+document_refresh :: proc(document: ^Document, config: ^common.Config, writer: ^Writer) -> common.Error {
errors, ok := parse_document(document, config);
@@ -287,20 +287,20 @@ document_refresh :: proc (document: ^Document, config: ^common.Config, writer: ^
for error, i in errors {
params.diagnostics[i] = Diagnostic {
- range = common.Range {
- start = common.Position {
- line = error.line - 1,
- character = 0,
- },
- end = common.Position {
- line = error.line,
- character = 0,
- },
+ range = common.Range {
+ start = common.Position {
+ line = error.line - 1,
+ character = 0,
},
- severity = DiagnosticSeverity.Error,
- code = "test",
- message = error.message,
- };
+ end = common.Position {
+ line = error.line,
+ character = 0,
+ },
+ },
+ severity = DiagnosticSeverity.Error,
+ code = "test",
+ message = error.message,
+ };
}
notifaction := Notification {
@@ -337,7 +337,7 @@ document_refresh :: proc (document: ^Document, config: ^common.Config, writer: ^
current_errors: [dynamic]ParserError;
-parser_error_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
+parser_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
error := ParserError {
line = pos.line,column = pos.column,file = pos.file,
offset = pos.offset,message = fmt.tprintf(msg, ..args),
@@ -345,10 +345,10 @@ parser_error_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
append(&current_errors, error);
}
-parser_warning_handler :: proc (pos: tokenizer.Pos, msg: string, args: ..any) {
+parser_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
}
-parse_document :: proc (document: ^Document, config: ^common.Config) -> ([]ParserError, bool) {
+parse_document :: proc(document: ^Document, config: ^common.Config) -> ([]ParserError, bool) {
p := parser.Parser {
err = parser_error_handler,
@@ -367,10 +367,10 @@ parse_document :: proc (document: ^Document, config: ^common.Config) -> ([]Parse
pkg.fullpath = document.uri.path;
document.ast = ast.File {
- fullpath = document.uri.path,
- src = document.text[:document.used_text],
- pkg = pkg,
- };
+ fullpath = document.uri.path,
+ src = document.text[:document.used_text],
+ pkg = pkg,
+ };
parser.parse_file(&p, &document.ast);
@@ -409,7 +409,7 @@ parse_document :: proc (document: ^Document, config: ^common.Config) -> ([]Parse
}
append(&imports, import_);
- } else
+ } else
//relative
{
diff --git a/src/server/format.odin b/src/server/format.odin
index 180d0d2..791bab8 100644
--- a/src/server/format.odin
+++ b/src/server/format.odin
@@ -3,7 +3,7 @@ package server
import "shared:common"
-//import "core:odin/printer"
+import "core:odin/printer"
FormattingOptions :: struct {
tabSize: uint,
@@ -25,7 +25,6 @@ TextEdit :: struct {
get_complete_format :: proc (document: ^Document) -> ([]TextEdit, bool) {
- /*
prnt := printer.make_printer(printer.default_style, context.temp_allocator);
printer.print_file(&prnt, &document.ast);
@@ -51,6 +50,4 @@ get_complete_format :: proc (document: ^Document) -> ([]TextEdit, bool) {
append(&edits, edit);
return edits[:], true;
- */
- return {}, true;
}
diff --git a/src/server/hover.odin b/src/server/hover.odin
index 30a8fb0..b8cd5bc 100644
--- a/src/server/hover.odin
+++ b/src/server/hover.odin
@@ -16,12 +16,12 @@ import "core:slice"
import "shared:common"
import "shared:index"
-get_hover_information :: proc (document: ^Document, position: common.Position) -> (Hover, bool) {
+get_hover_information :: proc(document: ^Document, position: common.Position) -> (Hover, bool) {
hover := Hover {
contents = {
- kind = "plaintext"
- }
+ kind = "plaintext",
+ },
};
ast_context := make_ast_context(document.ast, document.imports, document.package_name);
diff --git a/src/server/log.odin b/src/server/log.odin
index 1527fa2..cf83979 100644
--- a/src/server/log.odin
+++ b/src/server/log.odin
@@ -18,17 +18,17 @@ Lsp_Logger_Data :: struct {
writer: ^Writer,
}
-create_lsp_logger :: proc (writer: ^Writer, lowest := log.Level.Debug, opt := Default_Console_Logger_Opts) -> log.Logger {
+create_lsp_logger :: proc(writer: ^Writer, lowest := log.Level.Debug, opt := Default_Console_Logger_Opts) -> log.Logger {
data := new(Lsp_Logger_Data);
data.writer = writer;
return log.Logger {lsp_logger_proc, data, lowest, opt};
}
-destroy_lsp_logger :: proc (log: ^log.Logger) {
+destroy_lsp_logger :: proc(log: ^log.Logger) {
free(log.data);
}
-lsp_logger_proc :: proc (logger_data: rawptr, level: log.Level, text: string, options: log.Options, location := #caller_location) {
+lsp_logger_proc :: proc(logger_data: rawptr, level: log.Level, text: string, options: log.Options, location := #caller_location) {
data := cast(^Lsp_Logger_Data)logger_data;
diff --git a/src/server/reader.odin b/src/server/reader.odin
index 7679db7..b8843de 100644
--- a/src/server/reader.odin
+++ b/src/server/reader.odin
@@ -4,18 +4,18 @@ import "core:os"
import "core:mem"
import "core:strings"
-ReaderFn :: proc (_: rawptr, _: []byte) -> (int, int);
+ReaderFn :: proc(_: rawptr, _: []byte) -> (int, int);
Reader :: struct {
reader_fn: ReaderFn,
reader_context: rawptr,
}
-make_reader :: proc (reader_fn: ReaderFn, reader_context: rawptr) -> Reader {
+make_reader :: proc(reader_fn: ReaderFn, reader_context: rawptr) -> Reader {
return Reader {reader_context = reader_context, reader_fn = reader_fn};
}
-read_u8 :: proc (reader: ^Reader) -> (u8, bool) {
+read_u8 :: proc(reader: ^Reader) -> (u8, bool) {
value: [1]byte;
@@ -28,7 +28,7 @@ read_u8 :: proc (reader: ^Reader) -> (u8, bool) {
return value[0], true;
}
-read_until_delimiter :: proc (reader: ^Reader, delimiter: u8, builder: ^strings.Builder) -> bool {
+read_until_delimiter :: proc(reader: ^Reader, delimiter: u8, builder: ^strings.Builder) -> bool {
for true {
@@ -48,7 +48,7 @@ read_until_delimiter :: proc (reader: ^Reader, delimiter: u8, builder: ^strings.
return true;
}
-read_sized :: proc (reader: ^Reader, data: []u8) -> bool {
+read_sized :: proc(reader: ^Reader, data: []u8) -> bool {
read, err := reader.reader_fn(reader.reader_context, data);
diff --git a/src/server/requests.odin b/src/server/requests.odin
index 0dc9b7c..6085115 100644
--- a/src/server/requests.odin
+++ b/src/server/requests.odin
@@ -193,7 +193,7 @@ handle_error :: proc (err: common.Error, id: RequestId, writer: ^Writer) {
if err != .None {
response := make_response_message_error(
- id = id,
+ id = id,
error = ResponseError {code = err, message = ""});
send_error(response, writer);
@@ -232,7 +232,7 @@ handle_request :: proc (request: json.Value, config: ^common.Config, writer: ^Wr
if !ok {
response := make_response_message_error(
- id = id,
+ id = id,
error = ResponseError {code = .MethodNotFound, message = ""});
send_error(response, writer);
@@ -502,7 +502,7 @@ request_initialize :: proc (task: ^common.Task) {
hoverProvider = enable_hover,
documentFormattingProvider = enable_format,
}
- },
+ },
id = id);
send_response(response, writer);
@@ -546,7 +546,7 @@ request_shutdown :: proc (task: ^common.Task) {
defer free(info);
response := make_response_message(
- params = nil,
+ params = nil,
id = id);
send_response(response, writer);
@@ -583,7 +583,7 @@ request_definition :: proc (task: ^common.Task) {
}
response := make_response_message(
- params = location,
+ params = location,
id = id);
send_response(response, writer);
@@ -623,7 +623,7 @@ request_completion :: proc (task: ^common.Task) {
}
response := make_response_message(
- params = list,
+ params = list,
id = id);
send_response(response, writer);
@@ -662,7 +662,7 @@ request_signature_help :: proc (task: ^common.Task) {
}
response := make_response_message(
- params = help,
+ params = help,
id = id);
send_response(response, writer);
@@ -701,7 +701,7 @@ request_format_document :: proc (task: ^common.Task) {
}
response := make_response_message(
- params = edit,
+ params = edit,
id = id);
send_response(response, writer);
@@ -917,7 +917,7 @@ request_semantic_token_full :: proc (task: ^common.Task) {
}
response := make_response_message(
- params = symbols,
+ params = symbols,
id = id);
send_response(response, writer);
@@ -954,7 +954,7 @@ request_semantic_token_range :: proc (task: ^common.Task) {
}
response := make_response_message(
- params = symbols,
+ params = symbols,
id = id);
send_response(response, writer);
@@ -987,7 +987,7 @@ request_document_symbols :: proc (task: ^common.Task) {
symbols := get_document_symbols(document);
response := make_response_message(
- params = symbols,
+ params = symbols,
id = id);
send_response(response, writer);
@@ -1026,7 +1026,7 @@ request_hover :: proc (task: ^common.Task) {
}
response := make_response_message(
- params = hover,
+ params = hover,
id = id);
send_response(response, writer);
diff --git a/src/server/response.odin b/src/server/response.odin
index d5d3e7b..4e5b680 100644
--- a/src/server/response.odin
+++ b/src/server/response.odin
@@ -13,11 +13,11 @@ send_notification :: proc (notification: Notification, writer: ^Writer) -> bool
return false;
}
- if (!write_sized(writer, transmute([]u8)header)) {
+ if !write_sized(writer, transmute([]u8)header) {
return false;
}
- if (!write_sized(writer, data)) {
+ if !write_sized(writer, data) {
return false;
}
@@ -34,11 +34,11 @@ send_response :: proc (response: ResponseMessage, writer: ^Writer) -> bool {
return false;
}
- if (!write_sized(writer, transmute([]u8)header)) {
+ if !write_sized(writer, transmute([]u8)header) {
return false;
}
- if (!write_sized(writer, data)) {
+ if !write_sized(writer, data) {
return false;
}
@@ -55,11 +55,11 @@ send_error :: proc (response: ResponseMessageError, writer: ^Writer) -> bool {
return false;
}
- if (!write_sized(writer, transmute([]u8)header)) {
+ if !write_sized(writer, transmute([]u8)header) {
return false;
}
- if (!write_sized(writer, data)) {
+ if !write_sized(writer, data) {
return false;
}
diff --git a/src/server/semantic_tokens.odin b/src/server/semantic_tokens.odin
index 4db13ca..9b2e669 100644
--- a/src/server/semantic_tokens.odin
+++ b/src/server/semantic_tokens.odin
@@ -81,20 +81,20 @@ SemanticTokenBuilder :: struct {
tokens: [dynamic]u32,
}
-make_token_builder :: proc (allocator := context.temp_allocator) -> SemanticTokenBuilder {
+make_token_builder :: proc(allocator := context.temp_allocator) -> SemanticTokenBuilder {
return {
- tokens = make([dynamic]u32, context.temp_allocator)
+ tokens = make([dynamic]u32, context.temp_allocator),
};
}
-get_tokens :: proc (builder: SemanticTokenBuilder) -> SemanticTokens {
+get_tokens :: proc(builder: SemanticTokenBuilder) -> SemanticTokens {
return {
- data = builder.tokens[:]
+ data = builder.tokens[:],
};
}
-get_semantic_tokens :: proc (document: ^Document, range: common.Range) -> SemanticTokens {
+get_semantic_tokens :: proc(document: ^Document, range: common.Range) -> SemanticTokens {
ast_context := make_ast_context(document.ast, document.imports, document.package_name, context.temp_allocator);
builder := make_token_builder();
@@ -110,7 +110,7 @@ get_semantic_tokens :: proc (document: ^Document, range: common.Range) -> Semant
return get_tokens(builder);
}
-write_semantic_node :: proc (builder: ^SemanticTokenBuilder, node: ^ast.Node, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
+write_semantic_node :: proc(builder: ^SemanticTokenBuilder, node: ^ast.Node, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
position := common.get_relative_token_position(node.pos.offset, src, builder.current_start);
@@ -121,7 +121,7 @@ write_semantic_node :: proc (builder: ^SemanticTokenBuilder, node: ^ast.Node, sr
builder.current_start = node.pos.offset;
}
-write_semantic_token :: proc (builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
+write_semantic_token :: proc(builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
position := common.get_relative_token_position(token.pos.offset, src, builder.current_start);
@@ -130,7 +130,7 @@ write_semantic_token :: proc (builder: ^SemanticTokenBuilder, token: tokenizer.T
builder.current_start = token.pos.offset;
}
-write_semantic_token_pos :: proc (builder: ^SemanticTokenBuilder, pos: tokenizer.Pos, name: string, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
+write_semantic_token_pos :: proc(builder: ^SemanticTokenBuilder, pos: tokenizer.Pos, name: string, src: []byte, type: SemanticTokenTypes, modifier: SemanticTokenModifiers) {
position := common.get_relative_token_position(pos.offset, src, builder.current_start);
@@ -139,11 +139,11 @@ write_semantic_token_pos :: proc (builder: ^SemanticTokenBuilder, pos: tokenizer
builder.current_start = pos.offset;
}
-resolve_and_write_ident :: proc (node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) -> (is_member: bool, is_package: bool, package_name: string) {
+resolve_and_write_ident :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) -> (is_member: bool, is_package: bool, package_name: string) {
n := node.derived.(ast.Ident);
- package_name = ast_context.document_package;
+ package_name = ast_context.document_package;
ast_context.current_package = ast_context.document_package;
ast_context.use_globals = true;
ast_context.use_locals = true;
@@ -155,7 +155,7 @@ resolve_and_write_ident :: proc (node: ^ast.Node, builder: ^SemanticTokenBuilder
#partial switch v in symbol.value {
case index.SymbolPackageValue:
write_semantic_node(builder, node, ast_context.file.src, .Namespace, .None);
- is_package = true;
+ is_package = true;
package_name = symbol.pkg;
case index.SymbolStructValue:
write_semantic_node(builder, node, ast_context.file.src, .Struct, .None);
@@ -178,28 +178,28 @@ resolve_and_write_ident :: proc (node: ^ast.Node, builder: ^SemanticTokenBuilder
return;
}
-write_semantic_tokens :: proc {
+write_semantic_tokens :: proc{
write_semantic_tokens_node,
write_semantic_tokens_dynamic_array,
write_semantic_tokens_array,
write_semantic_tokens_stmt,
};
-write_semantic_tokens_array :: proc (array: $A/[]^$T, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_tokens_array :: proc(array: $A/[]^$T, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
for elem, i in array {
write_semantic_tokens(elem, builder, ast_context);
}
}
-write_semantic_tokens_dynamic_array :: proc (array: $A/[dynamic]^$T, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_tokens_dynamic_array :: proc(array: $A/[dynamic]^$T, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
for elem, i in array {
write_semantic_tokens(elem, builder, ast_context);
}
}
-write_semantic_tokens_stmt :: proc (node: ^ast.Stmt, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_tokens_stmt :: proc(node: ^ast.Stmt, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
ast_context.current_package = ast_context.document_package;
ast_context.use_globals = true;
ast_context.use_locals = true;
@@ -208,7 +208,7 @@ write_semantic_tokens_stmt :: proc (node: ^ast.Stmt, builder: ^SemanticTokenBuil
write_semantic_tokens_node(node, builder, ast_context);
}
-write_semantic_tokens_node :: proc (node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_tokens_node :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
using ast;
@@ -365,7 +365,7 @@ write_semantic_tokens_node :: proc (node: ^ast.Node, builder: ^SemanticTokenBuil
}
}
-write_semantic_token_basic_lit :: proc (basic_lit: ast.Basic_Lit, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_token_basic_lit :: proc(basic_lit: ast.Basic_Lit, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
if symbol, ok := resolve_basic_lit(ast_context, basic_lit); ok {
@@ -383,7 +383,7 @@ write_semantic_token_basic_lit :: proc (basic_lit: ast.Basic_Lit, builder: ^Sema
}
}
-write_semantic_tokens_value_decl :: proc (value_decl: ast.Value_Decl, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_tokens_value_decl :: proc(value_decl: ast.Value_Decl, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
using ast;
@@ -445,7 +445,7 @@ write_semantic_tokens_value_decl :: proc (value_decl: ast.Value_Decl, builder: ^
}
}
-write_semantic_token_op :: proc (builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: []byte) {
+write_semantic_token_op :: proc(builder: ^SemanticTokenBuilder, token: tokenizer.Token, src: []byte) {
if token.text == "=" {
write_semantic_token_pos(builder, token.pos, token.text, src, .Operator, .None);
@@ -454,7 +454,7 @@ write_semantic_token_op :: proc (builder: ^SemanticTokenBuilder, token: tokenize
}
}
-write_semantic_proc_type :: proc (node: ^ast.Proc_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_proc_type :: proc(node: ^ast.Proc_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
using ast;
@@ -493,7 +493,7 @@ write_semantic_proc_type :: proc (node: ^ast.Proc_Type, builder: ^SemanticTokenB
}
}
-write_semantic_enum_fields :: proc (node: ast.Enum_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_enum_fields :: proc(node: ast.Enum_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
using ast;
@@ -509,7 +509,7 @@ write_semantic_enum_fields :: proc (node: ast.Enum_Type, builder: ^SemanticToken
}
}
-write_semantic_struct_fields :: proc (node: ast.Struct_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_struct_fields :: proc(node: ast.Struct_Type, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
using ast;
@@ -529,7 +529,7 @@ write_semantic_struct_fields :: proc (node: ast.Struct_Type, builder: ^SemanticT
}
}
-write_semantic_selector :: proc (selector: ^ast.Selector_Expr, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
+write_semantic_selector :: proc(selector: ^ast.Selector_Expr, builder: ^SemanticTokenBuilder, ast_context: ^AstContext) {
using ast;
@@ -568,7 +568,7 @@ write_semantic_selector :: proc (selector: ^ast.Selector_Expr, builder: ^Semanti
}
}
-get_locals_at :: proc (function: ^ast.Node, position: ^ast.Node, ast_context: ^AstContext) {
+get_locals_at :: proc(function: ^ast.Node, position: ^ast.Node, ast_context: ^AstContext) {
clear_locals(ast_context);
@@ -581,7 +581,7 @@ get_locals_at :: proc (function: ^ast.Node, position: ^ast.Node, ast_context: ^A
}
document_position := DocumentPositionContext {
- position = position.end.offset
+ position = position.end.offset,
};
get_locals(ast_context.file, function, ast_context, &document_position);
diff --git a/src/server/types.odin b/src/server/types.odin
index 22a8451..0efbd39 100644
--- a/src/server/types.odin
+++ b/src/server/types.odin
@@ -147,7 +147,7 @@ ClientCapabilities :: struct {
}
RangeOptional :: union {
- common.Range
+ common.Range,
}
TextDocumentContentChangeEvent :: struct {
diff --git a/src/server/unmarshal.odin b/src/server/unmarshal.odin
index c61b7c8..ba37497 100644
--- a/src/server/unmarshal.odin
+++ b/src/server/unmarshal.odin
@@ -12,7 +12,7 @@ import "core:fmt"
Right now union handling is type specific so you can only have one struct type, int type, etc.
*/
-unmarshal :: proc (json_value: json.Value, v: any, allocator: mem.Allocator) -> json.Marshal_Error {
+unmarshal :: proc(json_value: json.Value, v: any, allocator: mem.Allocator) -> json.Marshal_Error {
using runtime;
diff --git a/src/server/writer.odin b/src/server/writer.odin
index 4e9ebae..46a340e 100644
--- a/src/server/writer.odin
+++ b/src/server/writer.odin
@@ -6,7 +6,7 @@ import "core:fmt"
import "core:strings"
import "core:sync"
-WriterFn :: proc (_: rawptr, _: []byte) -> (int, int);
+WriterFn :: proc(_: rawptr, _: []byte) -> (int, int);
Writer :: struct {
writer_fn: WriterFn,
@@ -14,13 +14,13 @@ Writer :: struct {
writer_mutex: sync.Mutex,
}
-make_writer :: proc (writer_fn: WriterFn, writer_context: rawptr) -> Writer {
+make_writer :: proc(writer_fn: WriterFn, writer_context: rawptr) -> Writer {
writer := Writer {writer_context = writer_context, writer_fn = writer_fn};
sync.mutex_init(&writer.writer_mutex);
return writer;
}
-write_sized :: proc (writer: ^Writer, data: []byte) -> bool {
+write_sized :: proc(writer: ^Writer, data: []byte) -> bool {
sync.mutex_lock(&writer.writer_mutex);
defer sync.mutex_unlock(&writer.writer_mutex);