diff options
32 files changed, 870 insertions, 526 deletions
@@ -11,18 +11,25 @@ if "%1" == "CI" ( set "PATH=%cd%\Odin;!PATH!" odin test tests -collection:src=src -define:ODIN_TEST_THREADS=1 - if %errorlevel% neq 0 exit /b 1 + if errorlevel 1 exit /b 1 odin build src\ -collection:src=src -out:ols.exe -o:speed -no-bounds-check -extra-linker-flags:"/STACK:4000000,2000000" -define:VERSION=%OLS_VERSION% + if errorlevel 1 exit /b 1 pushd . call "tools/odinfmt/tests.bat" - if %errorlevel% neq 0 exit /b 1 + if errorlevel 1 ( + popd + exit /b 1 + ) popd odin build tools\odinfmt\main.odin -file -collection:src=src -out:odinfmt.exe -o:speed -no-bounds-check -extra-linker-flags:"/STACK:4000000,2000000" + if errorlevel 1 exit /b 1 ) else ( odin build src\ -collection:src=src -out:ols.exe -o:speed -no-bounds-check -extra-linker-flags:"/STACK:4000000,2000000" -define:VERSION=%OLS_VERSION% + if errorlevel 1 exit /b 1 odin build tools\odinfmt\main.odin -file -collection:src=src -out:odinfmt.exe -o:speed -no-bounds-check -extra-linker-flags:"/STACK:4000000,2000000" + if errorlevel 1 exit /b 1 ) diff --git a/editors/vscode/language-configuration.json b/editors/vscode/language-configuration.json index c0c2a06..489ef65 100644 --- a/editors/vscode/language-configuration.json +++ b/editors/vscode/language-configuration.json @@ -28,5 +28,10 @@ "increaseIndentPattern": "^.*\\{[^}\"']*$|^.*\\([^\\)\"']*$|^\\s*case.*:$", "decreaseIndentPattern": "^\\s*(\\s*\\/[*].*[*]\\/\\s*)*[})]" }, - + "folding": { + "markers": { + "start": "^\\s*//\\s*?region\\b", + "end": "^\\s*//\\s*?endregion\\b" + } + } }
\ No newline at end of file diff --git a/src/common/uri.odin b/src/common/uri.odin index 32c32e0..ece873e 100644 --- a/src/common/uri.odin +++ b/src/common/uri.odin @@ -45,7 +45,7 @@ parse_uri :: proc(value: string, allocator: mem.Allocator) -> (Uri, bool) { //Note(Daniel, Again some really incomplete and scuffed uri writer) create_uri :: proc(path: string, allocator: mem.Allocator) -> Uri { - path_forward, _ := filepath.to_slash(path, context.temp_allocator) + path_forward, _ := filepath.replace_path_separators(path, '/', context.temp_allocator) builder := strings.builder_make(allocator) diff --git a/src/common/util.odin b/src/common/util.odin index 974fe6b..f19bb0e 100644 --- a/src/common/util.odin +++ b/src/common/util.odin @@ -5,9 +5,7 @@ import "core:fmt" import "core:log" import "core:mem" import "core:os" -import "core:os/os2" import "core:path/filepath" -import "core:path/slashpath" import "core:strings" import "core:time" @@ -19,15 +17,12 @@ when ODIN_OS == .Windows { delimiter :: ":" } -//TODO(daniel): This is temporary and should not be needed after os2 -File_Mode_User_Executable :: os.File_Mode(1 << 8) - lookup_in_path :: proc(name: string) -> (string, bool) { path := os.get_env("PATH", context.temp_allocator) for directory in strings.split_iterator(&path, delimiter) { when ODIN_OS == .Windows { - possibility := filepath.join( + possibility, _ := filepath.join( elems = {directory, fmt.tprintf("%v.exe", name)}, allocator = context.temp_allocator, ) @@ -35,11 +30,11 @@ lookup_in_path :: proc(name: string) -> (string, bool) { return possibility, true } } else { - possibility := filepath.join(elems = {directory, name}, allocator = context.temp_allocator) + possibility, _ := filepath.join(elems = {directory, name}, allocator = context.temp_allocator) possibility = resolve_home_dir(possibility, context.temp_allocator) if os.exists(possibility) { if info, err := os.stat(possibility, context.temp_allocator); - err == os.ERROR_NONE && (File_Mode_User_Executable & info.mode) != 0 { + err == os.ERROR_NONE && .Execute_User in info.mode { return possibility, true } } @@ -66,7 +61,8 @@ resolve_home_dir :: proc( return path, false } - return filepath.join({home, path[1:]}, allocator), true + path, _ := filepath.join({home, path[1:]}, allocator) + return path, true } else if strings.has_prefix(path, "$HOME") { home := os.get_env("HOME", context.temp_allocator) if home == "" { @@ -74,13 +70,14 @@ resolve_home_dir :: proc( return path, false } - return filepath.join({home, path[5:]}, allocator), true + path, _ := filepath.join({home, path[5:]}, allocator) + return path, true } return path, false } } - FILE :: struct {} +FILE :: struct {} when ODIN_OS == .Darwin || ODIN_OS == .FreeBSD || ODIN_OS == .Linux || ODIN_OS == .NetBSD { run_executable :: proc(command: string, stdout: ^[]byte) -> (u32, bool, []byte) { @@ -118,7 +115,7 @@ when ODIN_OS == .Darwin || ODIN_OS == .FreeBSD || ODIN_OS == .Linux || ODIN_OS = return 0, true, stdout[0:index] } - foreign libc + foreign libc { popen :: proc(command: cstring, type: cstring) -> ^FILE --- pclose :: proc(stream: ^FILE) -> i32 --- @@ -127,7 +124,7 @@ when ODIN_OS == .Darwin || ODIN_OS == .FreeBSD || ODIN_OS == .Linux || ODIN_OS = } get_executable_path :: proc(allocator := context.temp_allocator) -> string { - exe_dir, err := os2.get_executable_directory(context.temp_allocator) + exe_dir, err := os.get_executable_directory(context.temp_allocator) if err != nil { log.error("Failed to resolve executable path: ", err) @@ -136,4 +133,3 @@ get_executable_path :: proc(allocator := context.temp_allocator) -> string { return exe_dir } - diff --git a/src/main.odin b/src/main.odin index b4aad6f..c08f803 100644 --- a/src/main.odin +++ b/src/main.odin @@ -14,14 +14,14 @@ import "src:server" VERSION := #config(VERSION, "dev") os_read :: proc(handle: rawptr, data: []byte) -> (int, int) { - ptr := cast(^os.Handle)handle - a, b := os.read(ptr^, data) + ptr := cast(^os.File)handle + a, b := os.read(ptr, data) return a, cast(int)(b != nil) } os_write :: proc(handle: rawptr, data: []byte) -> (int, int) { - ptr := cast(^os.Handle)handle - a, b := os.write(ptr^, data) + ptr := cast(^os.File)handle + a, b := os.write(ptr, data) return a, cast(int)(b != nil) } @@ -102,8 +102,8 @@ main :: proc() { fmt.println("ols version", VERSION) os.exit(0) } - reader := server.make_reader(os_read, cast(rawptr)&os.stdin) - writer := server.make_writer(os_write, cast(rawptr)&os.stdout) + reader := server.make_reader(os_read, cast(rawptr)os.stdin) + writer := server.make_writer(os_write, cast(rawptr)os.stdout) /* fh, err := os.open("log.txt", os.O_RDWR|os.O_CREATE) diff --git a/src/odin/format/format.odin b/src/odin/format/format.odin index 2ab6a94..9b32721 100644 --- a/src/odin/format/format.odin +++ b/src/odin/format/format.odin @@ -18,8 +18,8 @@ find_config_file_or_default :: proc(path: string) -> printer.Config { //go up the directory until we find odinfmt.json path := path - ok: bool - if path, ok = filepath.abs(path); !ok { + err: os.Error + if path, err = filepath.abs(path, context.temp_allocator); err != nil { return default_style } @@ -27,14 +27,14 @@ find_config_file_or_default :: proc(path: string) -> printer.Config { found := false config := default_style - if (os.exists(name)) { - if data, ok := os.read_entire_file(name, context.temp_allocator); ok { + if os.exists(name) { + if data, err := os.read_entire_file(name, context.temp_allocator); err == nil { if json.unmarshal(data, &config) == nil { found = true } } } else { - new_path := filepath.join(elems = {path, ".."}, allocator = context.temp_allocator) + new_path, _ := filepath.join(elems = {path, ".."}, allocator = context.temp_allocator) //Currently the filepath implementation seems to stop at the root level, this might not be the best solution. if new_path == path { return default_style @@ -53,13 +53,13 @@ find_config_file_or_default :: proc(path: string) -> printer.Config { // of searching for it up a directory tree of a path read_config_file_from_path_or_default :: proc(config_path: string) -> printer.Config { path := config_path - ok: bool - if path, ok = filepath.abs(config_path); !ok { + err: os.Error + if path, err = filepath.abs(config_path, context.temp_allocator); err != nil { return default_style } config := default_style - if (os.exists(path)) { - if data, ok := os.read_entire_file(path, context.temp_allocator); ok { + if os.exists(path) { + if data, err := os.read_entire_file(path, context.temp_allocator); err == nil { if json.unmarshal(data, &config) == nil { return config } diff --git a/src/odin/printer/printer.odin b/src/odin/printer/printer.odin index 48897b8..61011ed 100644 --- a/src/odin/printer/printer.odin +++ b/src/odin/printer/printer.odin @@ -1,7 +1,5 @@ package odin_printer -import "core:fmt" -import "core:log" import "core:mem" import "core:odin/ast" import "core:odin/tokenizer" diff --git a/src/odin/printer/visit.odin b/src/odin/printer/visit.odin index d025108..b874dd7 100644 --- a/src/odin/printer/visit.odin +++ b/src/odin/printer/visit.odin @@ -1,7 +1,6 @@ #+feature using-stmt package odin_printer -import "core:fmt" import "core:log" import "core:odin/ast" import "core:odin/parser" diff --git a/src/server/action.odin b/src/server/action.odin index 64516a3..11e8f86 100644 --- a/src/server/action.odin +++ b/src/server/action.odin @@ -73,7 +73,6 @@ get_code_actions :: proc(document: ^Document, range: common.Range, config: ^comm if position_context.switch_stmt != nil || position_context.switch_type_stmt != nil { add_populate_switch_cases_action( - document, &ast_context, &position_context, strings.clone(document.uri.uri), diff --git a/src/server/action_populate_switch_cases.odin b/src/server/action_populate_switch_cases.odin index f038e7c..1e60f15 100644 --- a/src/server/action_populate_switch_cases.odin +++ b/src/server/action_populate_switch_cases.odin @@ -4,6 +4,7 @@ package server import "core:fmt" import "core:odin/ast" +import "core:odin/tokenizer" import "core:strings" import "src:common" @@ -17,6 +18,7 @@ get_line_start_offset :: proc(src: string, offset: int) -> int { } return line_start } + get_block_original_text :: proc(block: []^ast.Stmt, document_text: string) -> string { if len(block) == 0 { return "" @@ -26,54 +28,57 @@ get_block_original_text :: proc(block: []^ast.Stmt, document_text: string) -> st return string(document_text[start:end]) } -SwitchCaseInfo :: struct { - names: []string, - body_indentation: string, - body: string, -} SwitchBlockInfo :: struct { - existing_cases: []SwitchCaseInfo, - all_covered_case_names: []string, - all_case_names: []string, - switch_indentation: string, - is_enum: bool, + names: []string, + existing_cases: map[string]struct{}, + switch_indentation: string, + is_enum: bool, + pos: tokenizer.Pos, } + get_switch_cases_info :: proc( - document: ^Document, ast_context: ^AstContext, position_context: ^DocumentPositionContext, ) -> ( - info: SwitchBlockInfo, - ok: bool, + SwitchBlockInfo, + bool, ) { - if (position_context.switch_stmt == nil && position_context.switch_type_stmt == nil) || - (position_context.switch_stmt != nil && position_context.switch_stmt.cond == nil) { + if position_context.switch_stmt == nil && position_context.switch_type_stmt == nil { return {}, false } + + if position_context.switch_stmt != nil && position_context.switch_stmt.cond == nil { + return {}, false + } + switch_block: ^ast.Block_Stmt found_switch_block: bool is_enum: bool + pos: tokenizer.Pos if position_context.switch_stmt != nil { switch_block, found_switch_block = position_context.switch_stmt.body.derived.(^ast.Block_Stmt) is_enum = true + pos = position_context.switch_stmt.pos } + if !found_switch_block && position_context.switch_type_stmt != nil { switch_block, found_switch_block = position_context.switch_type_stmt.body.derived.(^ast.Block_Stmt) + pos = position_context.switch_type_stmt.pos } + if !found_switch_block { return {}, false } - switch_indentation := get_line_indentation(string(document.text), switch_block.pos.offset) - existing_cases_in_order := make([dynamic]SwitchCaseInfo, context.temp_allocator) - all_covered_names := make([dynamic]string, context.temp_allocator) + switch_indentation := get_line_indentation(ast_context.file.src, switch_block.pos.offset) + existing_cases := make(map[string]struct{}, context.temp_allocator) + + for stmt in switch_block.stmts { if case_clause, ok := stmt.derived.(^ast.Case_Clause); ok { - case_names := make([dynamic]string, context.temp_allocator) for clause in case_clause.list { if is_enum { if name, ok := get_used_switch_name(clause); ok && name != "" { - append(&case_names, name) - append(&all_covered_names, name) + existing_cases[name] = {} } } else { reset_ast_context(ast_context) @@ -85,19 +90,12 @@ get_switch_cases_info :: proc( name = get_signature(ast_context, symbol) } if name != "" { - append(&case_names, name) - append(&all_covered_names, name) + existing_cases[name] = {} } } } } - if len(case_names) > 0 { - case_info := SwitchCaseInfo { - names = case_names[:], - body = get_block_original_text(case_clause.body, string(document.text)), - } - append(&existing_cases_in_order, case_info) - } + pos = case_clause.stmt_base.end } } if is_enum { @@ -106,47 +104,47 @@ get_switch_cases_info :: proc( return {}, false } return SwitchBlockInfo { - existing_cases = existing_cases_in_order[:], - all_covered_case_names = all_covered_names[:], - all_case_names = enum_value.names, + names = enum_value.names, + existing_cases = existing_cases, switch_indentation = switch_indentation, is_enum = !was_super_enum, + pos = pos, }, true - } else { - st := position_context.switch_type_stmt - if st == nil { - return {}, false - } + } + + st := position_context.switch_type_stmt + if st == nil { + return {}, false + } + reset_ast_context(ast_context) + union_value, unwrap_ok := unwrap_union(ast_context, st.tag.derived.(^ast.Assign_Stmt).rhs[0]) + if !unwrap_ok { + return {}, false + } + all_case_names := make([]string, len(union_value.types), context.temp_allocator) + for t, i in union_value.types { reset_ast_context(ast_context) - union_value, unwrap_ok := unwrap_union(ast_context, st.tag.derived.(^ast.Assign_Stmt).rhs[0]) - if !unwrap_ok { - return {}, false - } - all_case_names := make([]string, len(union_value.types), context.temp_allocator) - for t, i in union_value.types { - reset_ast_context(ast_context) - if symbol, ok := resolve_type_expression(ast_context, t); ok { - case_name := get_qualified_union_case_name(&symbol, ast_context, position_context) - //TODO: this is wrong for anonymous enums and structs, where the name field is "enum" or "struct" respectively but we want to use the full signature - //we also can't use the signature all the time because type aliases need to use specifically the alias name here and not the signature - if case_name == "" { - case_name = get_signature(ast_context, symbol) - } - all_case_names[i] = case_name - } else { - all_case_names[i] = "invalid type expression" + if symbol, ok := resolve_type_expression(ast_context, t); ok { + case_name := get_qualified_union_case_name(&symbol, ast_context, position_context) + //TODO: this is wrong for anonymous enums and structs, where the name field is "enum" or "struct" respectively but we want to use the full signature + //we also can't use the signature all the time because type aliases need to use specifically the alias name here and not the signature + if case_name == "" { + case_name = get_signature(ast_context, symbol) } + all_case_names[i] = case_name + } else { + all_case_names[i] = "invalid type expression" } - return SwitchBlockInfo { - existing_cases = existing_cases_in_order[:], - all_covered_case_names = all_covered_names[:], - all_case_names = all_case_names, - switch_indentation = switch_indentation, - is_enum = false, - }, - true } + return SwitchBlockInfo { + names = all_case_names, + existing_cases = existing_cases, + switch_indentation = switch_indentation, + is_enum = false, + pos = pos, + }, + true } create_populate_switch_cases_edit :: proc( @@ -160,65 +158,39 @@ create_populate_switch_cases_edit :: proc( if position_context.switch_stmt == nil && position_context.switch_type_stmt == nil { return {}, false } - //entirety of the switch block - range: common.Range - if info.is_enum { - range = common.get_token_range(position_context.switch_stmt.body.stmt_base, position_context.file.src) - } else { - range = common.get_token_range(position_context.switch_type_stmt.body.stmt_base, position_context.file.src) + + pos := info.pos + pos.line += 1 + pos.column = 1 + + position := common.token_pos_to_position(pos, position_context.file.src) + + range := common.Range { + start = position, + end = position, } + replacement_builder := strings.builder_make() dot := info.is_enum ? "." : "" b := &replacement_builder - fmt.sbprintln(b, "{") - for case_info in info.existing_cases { - fmt.sbprint(b, info.switch_indentation, "case ", sep = "") - for name, i in case_info.names { - fmt.sbprint(b, dot, name, sep = "") - if i != len(case_info.names) - 1 { - fmt.sbprint(b, ", ", sep = "") - } - } - fmt.sbprintln(b, ":", sep = "") - case_body := case_info.body - if case_body != "" { - fmt.sbprintln(b, case_info.body) - } - } - existing_case_names := map[string]struct{}{} - for name in info.all_covered_case_names { - existing_case_names[name] = {} - } - for name in info.all_case_names { - if name in existing_case_names {continue} //covered by prev loop + for name in info.names { + if name in info.existing_cases {continue} fmt.sbprintln(b, info.switch_indentation, "case ", dot, name, ":", sep = "") } - fmt.sbprint(b, info.switch_indentation, "}", sep = "") return TextEdit{range = range, newText = strings.to_string(replacement_builder)}, true } + @(private = "package") add_populate_switch_cases_action :: proc( - document: ^Document, ast_context: ^AstContext, position_context: ^DocumentPositionContext, uri: string, actions: ^[dynamic]CodeAction, ) { - info, ok := get_switch_cases_info(document, ast_context, position_context) + info, ok := get_switch_cases_info(ast_context, position_context) if !ok {return} - all_cases_covered := true - { - existing_case_names := map[string]struct{}{} - for name in info.all_covered_case_names { - existing_case_names[name] = {} - } - for name in info.all_case_names { - if name not_in existing_case_names { - all_cases_covered = false - } - } - } - if all_cases_covered {return} //action not needed + + if len(info.existing_cases) == len(info.names) {return} //action not needed edit, edit_ok := create_populate_switch_cases_edit(position_context, info) if !edit_ok {return} textEdits := make([dynamic]TextEdit, context.temp_allocator) diff --git a/src/server/analysis.odin b/src/server/analysis.odin index c9cd863..1a85708 100644 --- a/src/server/analysis.odin +++ b/src/server/analysis.odin @@ -438,7 +438,7 @@ is_symbol_same_typed :: proc(ast_context: ^AstContext, a, b: Symbol, flags: ast. case SymbolBasicValue: b_value := b.value.(SymbolBasicValue) return a_value.ident.name == b_value.ident.name && a.pkg == b.pkg - case SymbolStructValue, SymbolEnumValue, SymbolUnionValue, SymbolBitSetValue: + case SymbolStructValue, SymbolEnumValue, SymbolUnionValue, SymbolBitSetValue, SymbolBitFieldValue: return a.name == b.name && a.pkg == b.pkg case SymbolSliceValue: b_value := b.value.(SymbolSliceValue) @@ -1381,7 +1381,9 @@ resolve_call_directive :: proc(ast_context: ^AstContext, call: ^ast.Call_Expr) - switch directive.name { case "config": - return resolve_type_expression(ast_context, call.args[1]) + if len(call.args) > 1 { + return resolve_type_expression(ast_context, call.args[1]) + } case "load": if len(call.args) == 1 { ident := new_type(ast.Ident, call.pos, call.end, ast_context.allocator) @@ -1812,7 +1814,7 @@ internal_resolve_type_identifier :: proc(ast_context: ^AstContext, node: ast.Ide try_build_package(symbol.pkg) - return symbol, true + return resolve_symbol_return(ast_context, symbol) } } @@ -1823,8 +1825,9 @@ internal_resolve_type_identifier :: proc(ast_context: ^AstContext, node: ast.Ide pkg = indexer.runtime_package, value = SymbolPackageValue{}, } + try_build_package(symbol.pkg) - return symbol, true + return resolve_symbol_return(ast_context, symbol) } if global, ok := ast_context.globals[node.name]; @@ -1853,7 +1856,7 @@ internal_resolve_type_identifier :: proc(ast_context: ^AstContext, node: ast.Ide try_build_package(symbol.pkg) - return symbol, true + return resolve_symbol_return(ast_context, symbol) } is_runtime := strings.contains(ast_context.current_package, "base/runtime") @@ -1896,7 +1899,7 @@ resolve_local_identifier :: proc(ast_context: ^AstContext, node: ast.Ident, loca value = SymbolPackageValue{}, } - return symbol, true + return resolve_symbol_return(ast_context, symbol) } } } @@ -2639,6 +2642,16 @@ resolve_symbol_return :: proc(ast_context: ^AstContext, symbol: Symbol, ok := tr } #partial switch &v in symbol.value { + case SymbolPackageValue: + if pkg, ok := indexer.index.collection.packages[symbol.pkg]; ok { + if symbol.doc == "" { + symbol.doc = strings.to_string(pkg.doc) + } + if symbol.comment == "" { + symbol.comment = strings.to_string(pkg.comment) + } + } + return symbol, true case SymbolProcedureGroupValue: if s, ok := resolve_function_overload(ast_context, v.group.derived.(^ast.Proc_Group)); ok { if s.doc == "" { @@ -3396,7 +3409,7 @@ get_package_from_node :: proc(node: ast.Node) -> string { } get_package_from_filepath :: proc(file_path: string) -> string { - slashed, _ := filepath.to_slash(file_path, context.temp_allocator) + slashed, _ := filepath.replace_path_separators(file_path, '/', context.temp_allocator) ret := path.dir(slashed, context.temp_allocator) return ret } diff --git a/src/server/build.odin b/src/server/build.odin index 59dc63d..6df21a8 100644 --- a/src/server/build.odin +++ b/src/server/build.odin @@ -2,6 +2,7 @@ package server import "base:runtime" +import "core:slice" import "core:fmt" import "core:log" @@ -126,6 +127,25 @@ skip_file :: proc(filename: string) -> bool { return false } +// Finds all packages under the provided path by walking the file system +// and appends them to the provided dynamic array +append_packages :: proc( + path: string, + pkgs: ^[dynamic]string, + allocator := context.temp_allocator, +) { + w := os.walker_create(path) + defer os.walker_destroy(&w) + for info in os.walker_walk(&w) { + if info.type != .Directory && filepath.ext(info.name) == ".odin" { + dir := filepath.dir(info.fullpath, allocator) + if !slice.contains(pkgs[:], dir) { + append(pkgs, dir) + } + } + } +} + should_collect_file :: proc(file_tags: parser.File_Tags) -> bool { if file_tags.ignore { return false @@ -165,8 +185,8 @@ try_build_package :: proc(pkg_name: string) { matches, err := filepath.glob(fmt.tprintf("%v/*.odin", pkg_name), context.temp_allocator) - if err != .None { - log.errorf("Failed to glob %v for indexing package", pkg_name) + if err != nil && err != .Not_Exist { + log.errorf("Failed to glob %v for indexing package: %v", pkg_name, err) return } @@ -182,10 +202,10 @@ try_build_package :: proc(pkg_name: string) { continue } - data, ok := os.read_entire_file(fullpath, context.allocator) + data, err := os.read_entire_file(fullpath, context.allocator) - if !ok { - log.errorf("failed to read entire file for indexing %v", fullpath) + if err != nil { + log.errorf("failed to read entire file for indexing %v: %v", fullpath, err) continue } @@ -212,10 +232,11 @@ try_build_package :: proc(pkg_name: string) { pkg = pkg, } - ok = parser.parse_file(&p, &file) + ok := parser.parse_file(&p, &file) if !ok { - if !strings.contains(fullpath, "builtin.odin") && !strings.contains(fullpath, "intrinsics.odin") { + if !strings.contains(fullpath, "builtin.odin") && + !strings.contains(fullpath, "intrinsics.odin") { log.errorf("error in parse file for indexing %v", fullpath) } continue @@ -229,9 +250,10 @@ try_build_package :: proc(pkg_name: string) { } } - build_cache.loaded_pkgs[strings.clone(pkg_name, indexer.index.collection.allocator)] = PackageCacheInfo { - timestamp = time.now(), - } + build_cache.loaded_pkgs[strings.clone(pkg_name, indexer.index.collection.allocator)] = + PackageCacheInfo { + timestamp = time.now(), + } } @@ -241,7 +263,7 @@ remove_index_file :: proc(uri: common.Uri) -> common.Error { fullpath := uri.path when ODIN_OS == .Windows { - fullpath, _ = filepath.to_slash(fullpath, context.temp_allocator) + fullpath, _ = filepath.replace_path_separators(fullpath, '/', context.temp_allocator) } corrected_uri := common.create_uri(fullpath, context.temp_allocator) @@ -273,14 +295,14 @@ index_file :: proc(uri: common.Uri, text: string) -> common.Error { fullpath := uri.path p := parser.Parser { - err = log_error_handler, - warn = log_warning_handler, - flags = {.Optional_Semicolons}, - } + err = log_error_handler, + warn = log_warning_handler, + flags = {.Optional_Semicolons}, + } when ODIN_OS == .Windows { correct := common.get_case_sensitive_path(fullpath, context.temp_allocator) - fullpath, _ = filepath.to_slash(correct, context.temp_allocator) + fullpath, _ = filepath.replace_path_separators(correct, '/', context.temp_allocator) } dir := filepath.base(filepath.dir(fullpath, context.temp_allocator)) @@ -295,10 +317,10 @@ index_file :: proc(uri: common.Uri, text: string) -> common.Error { } file := ast.File { - fullpath = fullpath, - src = text, - pkg = pkg, - } + fullpath = fullpath, + src = text, + pkg = pkg, + } { allocator := context.allocator @@ -308,7 +330,8 @@ index_file :: proc(uri: common.Uri, text: string) -> common.Error { ok = parser.parse_file(&p, &file) if !ok { - if !strings.contains(fullpath, "builtin.odin") && !strings.contains(fullpath, "intrinsics.odin") { + if !strings.contains(fullpath, "builtin.odin") && + !strings.contains(fullpath, "intrinsics.odin") { log.errorf("error in parse file for indexing %v", fullpath) } } diff --git a/src/server/caches.odin b/src/server/caches.odin index ff7a422..519ce6c 100644 --- a/src/server/caches.odin +++ b/src/server/caches.odin @@ -76,26 +76,13 @@ clear_all_package_aliases :: proc() { //Go through all the collections to find all the possible packages that exists find_all_package_aliases :: proc() { - walk_proc :: proc(info: os.File_Info, in_err: os.Errno, user_data: rawptr) -> (err: os.Errno, skip_dir: bool) { - data := cast(^[dynamic]string)user_data - - if !info.is_dir && filepath.ext(info.name) == ".odin" { - dir := filepath.dir(info.fullpath, context.temp_allocator) - if !slice.contains(data[:], dir) { - append(data, dir) - } - } - - return in_err, false - } - for k, v in common.config.collections { pkgs := make([dynamic]string, context.temp_allocator) - filepath.walk(v, walk_proc, &pkgs) + append_packages(v, &pkgs, context.temp_allocator) for pkg in pkgs { if pkg, err := filepath.rel(v, pkg, context.temp_allocator); err == .None { - forward_pkg, _ := filepath.to_slash(pkg, context.temp_allocator) + forward_pkg, _ := filepath.replace_path_separators(pkg, '/', context.temp_allocator) if k not_in build_cache.pkg_aliases { build_cache.pkg_aliases[k] = make([dynamic]string) } diff --git a/src/server/check.odin b/src/server/check.odin index fed3a3f..1280b76 100644 --- a/src/server/check.odin +++ b/src/server/check.odin @@ -1,6 +1,5 @@ package server -import "base:intrinsics" import "base:runtime" import "core:encoding/json" @@ -11,11 +10,7 @@ import "core:os" import "core:path/filepath" import path "core:path/slashpath" import "core:slice" -import "core:strconv" import "core:strings" -import "core:sync" -import "core:text/scanner" -import "core:thread" import "src:common" @@ -41,24 +36,11 @@ Json_Errors :: struct { //If the user does not specify where to call odin check, it'll just find all directory with odin, and call them seperately. fallback_find_odin_directories :: proc(config: ^common.Config) -> []string { - walk_proc :: proc(info: os.File_Info, in_err: os.Errno, user_data: rawptr) -> (err: os.Errno, skip_dir: bool) { - data := cast(^[dynamic]string)user_data - - if !info.is_dir && filepath.ext(info.name) == ".odin" { - dir := filepath.dir(info.fullpath, context.temp_allocator) - if !slice.contains(data[:], dir) { - append(data, dir) - } - } - - return in_err, false - } - data := make([dynamic]string, context.temp_allocator) if len(config.workspace_folders) > 0 { if uri, ok := common.parse_uri(config.workspace_folders[0].uri, context.temp_allocator); ok { - filepath.walk(uri.path, walk_proc, &data) + append_packages(uri.path, &data, context.temp_allocator) } } @@ -148,7 +130,7 @@ check :: proc(paths: []string, uri: common.Uri, config: ^common.Config) { entry_point_opt, config.checker_args, "-json-errors", - ODIN_OS == .Linux || ODIN_OS == .Darwin ? "2>&1" : "", + ODIN_OS in runtime.Odin_OS_Types{.Linux, .Darwin, .FreeBSD, .OpenBSD, .NetBSD} ? "2>&1" : "", ), &data, ); !ok { diff --git a/src/server/collector.odin b/src/server/collector.odin index 708c96b..0f85774 100644 --- a/src/server/collector.odin +++ b/src/server/collector.odin @@ -1,6 +1,7 @@ #+feature using-stmt package server +import "core:fmt" import "core:mem" import "core:odin/ast" import "core:path/filepath" @@ -38,6 +39,8 @@ SymbolPackage :: struct { methods: map[Method][dynamic]Symbol, imports: [dynamic]string, //Used for references to figure whether the package is even able to reference the symbol proc_group_members: map[string]bool, // Tracks procedure names that are part of proc groups (used by fake methods) + doc: strings.Builder, + comment: strings.Builder, } get_index_unique_string :: proc { @@ -477,6 +480,8 @@ get_or_create_package :: proc(collection: ^SymbolCollection, pkg_name: string) - pkg.methods = make(map[Method][dynamic]Symbol, 100, collection.allocator) pkg.objc_structs = make(map[string]ObjcStruct, 5, collection.allocator) pkg.proc_group_members = make(map[string]bool, 10, collection.allocator) + pkg.doc = strings.builder_make(collection.allocator) + pkg.comment = strings.builder_make(collection.allocator) } return pkg } @@ -662,13 +667,62 @@ collect_imports :: proc(collection: ^SymbolCollection, file: ast.File, directory } +@(private = "file") +get_symbol_package_name :: proc( + collection: ^SymbolCollection, + directory: string, + uri: string, + treat_as_builtin := false, +) -> string { + if treat_as_builtin || strings.contains(uri, "builtin.odin") { + return "$builtin" + } + + if strings.contains(uri, "intrinsics.odin") { + intrinsics_path, _ := filepath.join( + elems = {common.config.collections["base"], "/intrinsics"}, + allocator = context.temp_allocator, + ) + intrinsics_path, _ = filepath.replace_path_separators(intrinsics_path, '/', context.temp_allocator) + return get_index_unique_string(collection, intrinsics_path) + } + + return get_index_unique_string(collection, directory) +} + +@(private = "file") +get_package_decl_doc_comment :: proc(file: ast.File, allocator := context.temp_allocator) -> (string, string) { + if file.pkg_decl != nil { + docs := get_comment(file.pkg_decl.docs, allocator = allocator) + comment := get_comment(file.pkg_decl.comment, allocator = allocator) + return docs, comment + } + return "", "" +} + +@(private = "file") +write_doc_string :: proc(sb: ^strings.Builder, doc: string) { + if doc != "" { + if strings.builder_len(sb^) > 0 { + fmt.sbprintf(sb, "\n%s", doc) + } else { + strings.write_string(sb, doc) + } + } +} collect_symbols :: proc(collection: ^SymbolCollection, file: ast.File, uri: string) -> common.Error { - forward, _ := filepath.to_slash(file.fullpath, context.temp_allocator) + forward, _ := filepath.replace_path_separators(file.fullpath, '/', context.temp_allocator) directory := path.dir(forward, context.temp_allocator) package_map := get_package_mapping(file, collection.config, directory) exprs := collect_globals(file) + file_pkg_name := get_symbol_package_name(collection, directory, uri) + file_pkg := get_or_create_package(collection, file_pkg_name) + doc, comment := get_package_decl_doc_comment(file, collection.allocator) + write_doc_string(&file_pkg.doc, doc) + write_doc_string(&file_pkg.comment, comment) + for expr in exprs { symbol: Symbol @@ -694,18 +748,7 @@ collect_symbols :: proc(collection: ^SymbolCollection, file: ast.File, uri: stri } // Compute pkg early so it's available inside the switch - if expr.builtin || strings.contains(uri, "builtin.odin") { - symbol.pkg = "$builtin" - } else if strings.contains(uri, "intrinsics.odin") { - intrinsics_path := filepath.join( - elems = {common.config.collections["base"], "/intrinsics"}, - allocator = context.temp_allocator, - ) - intrinsics_path, _ = filepath.to_slash(intrinsics_path, context.temp_allocator) - symbol.pkg = get_index_unique_string(collection, intrinsics_path) - } else { - symbol.pkg = get_index_unique_string(collection, directory) - } + symbol.pkg = get_symbol_package_name(collection, directory, uri, expr.builtin) #partial switch v in col_expr.derived { case ^ast.Matrix_Type: @@ -899,17 +942,7 @@ collect_symbols :: proc(collection: ^SymbolCollection, file: ast.File, uri: stri symbol.flags |= {.Mutable} } - pkg: ^SymbolPackage - ok: bool - - if pkg, ok = &collection.packages[symbol.pkg]; !ok { - collection.packages[symbol.pkg] = {} - pkg = &collection.packages[symbol.pkg] - pkg.symbols = make(map[string]Symbol, 100, collection.allocator) - pkg.methods = make(map[Method][dynamic]Symbol, 100, collection.allocator) - pkg.objc_structs = make(map[string]ObjcStruct, 5, collection.allocator) - pkg.proc_group_members = make(map[string]bool, 10, collection.allocator) - } + pkg := get_or_create_package(collection, symbol.pkg) if .ObjC in symbol.flags { collect_objc(collection, expr.attributes, symbol) @@ -942,19 +975,7 @@ collect_symbols :: proc(collection: ^SymbolCollection, file: ast.File, uri: stri collect_fake_methods :: proc(collection: ^SymbolCollection, exprs: []GlobalExpr, directory: string, uri: string) { for expr in exprs { // Determine the package name (same logic as in collect_symbols) - pkg_name: string - if expr.builtin || strings.contains(uri, "builtin.odin") { - pkg_name = "$builtin" - } else if strings.contains(uri, "intrinsics.odin") { - intrinsics_path := filepath.join( - elems = {common.config.collections["base"], "/intrinsics"}, - allocator = context.temp_allocator, - ) - intrinsics_path, _ = filepath.to_slash(intrinsics_path, context.temp_allocator) - pkg_name = get_index_unique_string(collection, intrinsics_path) - } else { - pkg_name = get_index_unique_string(collection, directory) - } + pkg_name := get_symbol_package_name(collection, directory, uri, expr.builtin) pkg, ok := &collection.packages[pkg_name] if !ok { diff --git a/src/server/completion.odin b/src/server/completion.odin index a7b1720..31d75ca 100644 --- a/src/server/completion.odin +++ b/src/server/completion.odin @@ -966,9 +966,17 @@ get_selector_completion :: proc( case SymbolPackageValue: is_incomplete = true - pkg := selector.pkg + packages := make([dynamic]string, context.temp_allocator) + if is_builtin_pkg(selector.pkg) { + append(&packages, "$builtin") + for built in indexer.builtin_packages { + append(&packages, built) + } + } else { + append(&packages, selector.pkg) + } - if searched, ok := fuzzy_search(field, {pkg}, ast_context.fullpath); ok { + if searched, ok := fuzzy_search(field, packages[:], ast_context.fullpath); ok { for search in searched { symbol := search.symbol @@ -1804,6 +1812,12 @@ get_identifier_completion :: proc( symbol := Symbol { name = pkg.base, type = .Package, + pkg = pkg.name, + value = SymbolPackageValue{}, + } + try_build_package(symbol.pkg) + if resolved, ok := resolve_symbol_return(ast_context, symbol); ok { + symbol = resolved } if score, ok := common.fuzzy_match(matcher, symbol.name); ok == 1 { @@ -1864,7 +1878,7 @@ get_package_completion :: proc( c := without_quotes[0:colon_index] if colon_index + 1 < len(without_quotes) { - absolute_path = filepath.join( + absolute_path, _ = filepath.join( elems = { config.collections[c], filepath.dir(without_quotes[colon_index + 1:], context.temp_allocator), @@ -1877,7 +1891,7 @@ get_package_completion :: proc( } else { import_file_dir := filepath.dir(position_context.import_stmt.pos.file, context.temp_allocator) import_dir := filepath.dir(without_quotes, context.temp_allocator) - absolute_path = filepath.join(elems = {import_file_dir, import_dir}, allocator = context.temp_allocator) + absolute_path, _ = filepath.join(elems = {import_file_dir, import_dir}, allocator = context.temp_allocator) } if !strings.contains(position_context.import_stmt.fullpath, "/") && @@ -1927,7 +1941,7 @@ search_for_packages :: proc(fullpath: string) -> []string { if files, err := os.read_dir(fh, 0, context.temp_allocator); err == 0 { for file in files { - if file.is_dir { + if file.type == .Directory { append(&packages, file.fullpath) } } @@ -1957,18 +1971,38 @@ get_qualified_union_case_name :: proc( ast_context: ^AstContext, position_context: ^DocumentPositionContext, ) -> string { - if symbol.pkg == ast_context.document_package { - return fmt.aprintf("%v%v", repeat("^", symbol.pointers, context.temp_allocator), symbol.name) - } else { - return fmt.aprintf( - "%v%v.%v", - repeat("^", symbol.pointers, context.temp_allocator), - get_symbol_pkg_name(ast_context, symbol), - symbol.name, - ) + sb := strings.builder_make(context.temp_allocator) + pointer_prefix := repeat("^", symbol.pointers, ast_context.allocator) + strings.write_string(&sb, pointer_prefix) + if symbol.pkg != ast_context.document_package { + strings.write_string(&sb, get_symbol_pkg_name(ast_context, symbol)) + strings.write_string(&sb, ".") + } + strings.write_string(&sb, symbol.name) + #partial switch v in symbol.value { + case SymbolUnionValue: + write_poly_names(&sb, v.poly_names) + case SymbolStructValue: + write_poly_names(&sb, v.poly_names) + } + + return strings.to_string(sb) +} + +write_poly_names :: proc(sb: ^strings.Builder, poly_names: []string) { + if len(poly_names) > 0 { + strings.write_string(sb, "(") + for name, i in poly_names { + strings.write_string(sb, name) + if i != len(poly_names) - 1 { + strings.write_string(sb, ", ") + } + } + strings.write_string(sb, ")") } } + get_type_switch_completion :: proc( ast_context: ^AstContext, position_context: ^DocumentPositionContext, diff --git a/src/server/definition.odin b/src/server/definition.odin index 869816c..7ff32d8 100644 --- a/src/server/definition.odin +++ b/src/server/definition.odin @@ -2,17 +2,9 @@ package server import "core:fmt" import "core:log" -import "core:mem" import "core:odin/ast" -import "core:odin/parser" import "core:odin/tokenizer" -import "core:os" import "core:path/filepath" -import path "core:path/slashpath" -import "core:slice" -import "core:sort" -import "core:strconv" -import "core:strings" import "src:common" diff --git a/src/server/documents.odin b/src/server/documents.odin index ed1fb52..3ac0263 100644 --- a/src/server/documents.odin +++ b/src/server/documents.odin @@ -4,12 +4,10 @@ import "base:intrinsics" import "core:fmt" import "core:log" -import "core:mem" import "core:mem/virtual" import "core:odin/ast" import "core:odin/parser" import "core:odin/tokenizer" -import "core:os" import "core:path/filepath" import path "core:path/slashpath" import "core:strings" @@ -166,7 +164,7 @@ document_setup :: proc(document: ^Document) { //Right now not all clients return the case correct windows path, and that causes issues with indexing, so we ensure that it's case correct. when ODIN_OS == .Windows { package_name := path.dir(document.uri.path, context.temp_allocator) - forward, _ := filepath.to_slash(common.get_case_sensitive_path(package_name), context.temp_allocator) + forward, _ := filepath.replace_path_separators(common.get_case_sensitive_path(package_name), '/', context.temp_allocator) if forward == "" { document.package_name = package_name } else { @@ -181,9 +179,9 @@ document_setup :: proc(document: ^Document) { fullpath: string if correct == "" { //This is basically here to handle the tests where the physical file doesn't actual exist. - document.fullpath, _ = filepath.to_slash(document.uri.path) + document.fullpath, _ = filepath.replace_path_separators(document.uri.path, '/', context.temp_allocator) } else { - document.fullpath, _ = filepath.to_slash(correct) + document.fullpath, _ = filepath.replace_path_separators(correct, '/', context.temp_allocator) } } else { document.fullpath = document.uri.path diff --git a/src/server/file_resolve.odin b/src/server/file_resolve.odin index f381aac..c4b2467 100644 --- a/src/server/file_resolve.odin +++ b/src/server/file_resolve.odin @@ -51,10 +51,6 @@ resolve_ranged_file :: proc( margin := 20 for decl in document.ast.decls { - if _, is_value := decl.derived.(^ast.Value_Decl); !is_value { - continue - } - //Look for declarations that overlap with range if range.start.line - margin <= decl.end.line && decl.pos.line <= range.end.line + margin { resolve_decl(&position_context, &ast_context, document, decl, &symbols, .None, allocator) @@ -88,10 +84,6 @@ resolve_entire_file :: proc( symbols := make(map[uintptr]SymbolAndNode, 10000, allocator) for decl in document.ast.decls { - if _, is_value := decl.derived.(^ast.Value_Decl); !is_value { - continue - } - resolve_decl(&position_context, &ast_context, document, decl, &symbols, flag, allocator) clear(&ast_context.locals) } diff --git a/src/server/format.odin b/src/server/format.odin index 9f30c49..6a95bed 100644 --- a/src/server/format.odin +++ b/src/server/format.odin @@ -5,8 +5,6 @@ import "src:common" import "src:odin/format" import "src:odin/printer" -import "core:log" - FormattingOptions :: struct { tabSize: uint, insertSpaces: bool, //tabs or spaces diff --git a/src/server/generics.odin b/src/server/generics.odin index 8aa111f..347f484 100644 --- a/src/server/generics.odin +++ b/src/server/generics.odin @@ -69,42 +69,37 @@ resolve_poly :: proc( } } + return resolve_poly_specialization(ast_context, call_node, call_symbol, specialization, poly_map) +} + +resolve_poly_specialization :: proc( + ast_context: ^AstContext, + call_node: ^ast.Expr, + call_symbol: Symbol, + specialization: ^ast.Expr, + poly_map: ^map[string]^ast.Expr, +) -> bool { + if call_node == nil || specialization == nil { + return false + } + #partial switch p in specialization.derived { case ^ast.Matrix_Type: if call_matrix, ok := call_node.derived.(^ast.Matrix_Type); ok { found := false - if poly_type, ok := p.row_count.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_matrix.row_count, poly_map) - } - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_matrix.row_count, call_symbol, p.row_count, poly_map) - } - found |= true + if expr_contains_poly(p.row_count) { + found |= resolve_poly_expression(ast_context, call_matrix.row_count, p.row_count, poly_map) } - if poly_type, ok := p.column_count.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_matrix.column_count, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_matrix.column_count, call_symbol, p.column_count, poly_map) - } - found |= true + if expr_contains_poly(p.column_count) { + found |= resolve_poly_expression(ast_context, call_matrix.column_count, p.column_count, poly_map) } - if poly_type, ok := p.elem.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_matrix.elem, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_matrix.elem, call_symbol, p.elem, poly_map) - } - found |= true + if expr_contains_poly(p.elem) { + found |= resolve_poly_expression(ast_context, call_matrix.elem, p.elem, poly_map) } + return found } case ^ast.Call_Expr: @@ -143,15 +138,8 @@ resolve_poly :: proc( } } - if poly_type, ok := p.elem.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_array.elem, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_array.elem, call_symbol, p.elem, poly_map) - } - return true + if expr_contains_poly(p.elem) { + return resolve_poly_expression(ast_context, call_array.elem, p.elem, poly_map) } } case ^ast.Array_Type: @@ -172,114 +160,54 @@ resolve_poly :: proc( } } - if poly_type, ok := p.elem.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_array.elem, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_array.elem, call_symbol, p.elem, poly_map) - } - found |= true + if expr_contains_poly(p.elem) { + found |= resolve_poly_expression(ast_context, call_array.elem, p.elem, poly_map) } - if p.len != nil { - if poly_type, ok := p.len.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_array.len, poly_map) - } - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_array.len, call_symbol, p.len, poly_map) - } - found |= true - } + if p.len != nil && expr_contains_poly(p.len) { + found |= resolve_poly_expression(ast_context, call_array.len, p.len, poly_map) } return found } case ^ast.Ellipsis: if call_array, ok := call_node.derived.(^ast.Array_Type); ok { - found := false - if array_is_soa(call_array^) { return false } - if poly_type, ok := p.expr.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_array.elem, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_array.elem, call_symbol, p.expr, poly_map) - } - found |= true + if expr_contains_poly(p.expr) { + return resolve_poly_expression(ast_context, call_array.elem, p.expr, poly_map) } - return found } case ^ast.Map_Type: if call_map, ok := call_node.derived.(^ast.Map_Type); ok { found := false - if poly_type, ok := p.key.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_map.key, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_map.key, call_symbol, p.key, poly_map) - } - found |= true + if expr_contains_poly(p.key) { + found |= resolve_poly_expression(ast_context, call_map.key, p.key, poly_map) } - if poly_type, ok := p.value.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_map.value, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_map.value, call_symbol, p.value, poly_map) - } - found |= true + if expr_contains_poly(p.value) { + found |= resolve_poly_expression(ast_context, call_map.value, p.value, poly_map) } return found } case ^ast.Multi_Pointer_Type: if call_pointer, ok := call_node.derived.(^ast.Multi_Pointer_Type); ok { - if poly_type, ok := p.elem.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_pointer.elem, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_pointer.elem, call_symbol, p.elem, poly_map) - } - return true + if expr_contains_poly(p.elem) { + return resolve_poly_expression(ast_context, call_pointer.elem, p.elem, poly_map) } } case ^ast.Pointer_Type: if call_pointer, ok := call_node.derived.(^ast.Pointer_Type); ok { - if poly_type, ok := p.elem.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, call_pointer.elem, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, call_pointer.elem, call_symbol, p.elem, poly_map) - } - return true + if expr_contains_poly(p.elem) { + return resolve_poly_expression(ast_context, call_pointer.elem, p.elem, poly_map) } } case ^ast.Comp_Lit: if comp_lit, ok := call_node.derived.(^ast.Comp_Lit); ok { - if poly_type, ok := p.type.derived.(^ast.Poly_Type); ok { - if ident, ok := unwrap_ident(poly_type.type); ok { - save_poly_map(ident, comp_lit.type, poly_map) - } - - if poly_type.specialization != nil { - return resolve_poly(ast_context, comp_lit.type, call_symbol, p.type, poly_map) - } - return true + if expr_contains_poly(p.type) { + return resolve_poly_expression(ast_context, comp_lit.type, p.type, poly_map) } } case ^ast.Struct_Type, ^ast.Proc_Type: @@ -292,6 +220,27 @@ resolve_poly :: proc( return false } +resolve_poly_expression :: proc( + ast_context: ^AstContext, + call_node: ^ast.Expr, + poly_node: ^ast.Expr, + poly_map: ^map[string]^ast.Expr, +) -> bool { + if poly_type, ok := poly_node.derived.(^ast.Poly_Type); ok { + if ident, ok := unwrap_ident(poly_type.type); ok { + save_poly_map(ident, call_node, poly_map) + } + + if poly_type.specialization == nil { + return true + } + } + + call_symbol := Symbol{} + internal_resolve_type_expression(ast_context, call_node, &call_symbol) + return resolve_poly(ast_context, call_node, call_symbol, poly_node, poly_map) +} + is_generic_type_recursive :: proc(expr: ^ast.Expr, name: string) -> bool { Data :: struct { name: string, @@ -503,6 +452,18 @@ resolve_generic_function_ast :: proc( return resolve_generic_function_symbol(ast_context, params, results, proc_lit.inlining, proc_symbol) } +get_proc_return_value_count :: proc(fields: []^ast.Field) -> int { + total := 0 + for field in fields { + if len(field.names) == 0 { + total += 1 + } else { + total += len(field.names) + } + } + + return total +} resolve_generic_function_symbol :: proc( ast_context: ^AstContext, @@ -524,6 +485,8 @@ resolve_generic_function_symbol :: proc( i := 0 count_required_params := 0 + // Total number of args passed in the call when expanded to include functions that may return multiple values + call_arg_count := 0 for param in params { if param.default_value == nil { @@ -560,6 +523,7 @@ resolve_generic_function_symbol :: proc( //If we have a function call, we should instead look at the return value: bar(foo(123)) if symbol_value, ok := symbol.value.(SymbolProcedureValue); ok && len(symbol_value.return_types) > 0 { + call_arg_count += get_proc_return_value_count(symbol_value.return_types) if _, ok := call_expr.args[i].derived.(^ast.Call_Expr); ok { if symbol_value.return_types[0].type != nil { if symbol, ok = resolve_type_expression(ast_context, symbol_value.return_types[0].type); @@ -575,6 +539,8 @@ resolve_generic_function_symbol :: proc( } } } + } else { + call_arg_count += 1 } // We set the offset so we can find it as a local if it's based on the type of a local var @@ -600,7 +566,7 @@ resolve_generic_function_symbol :: proc( find_and_replace_poly_type(v, &poly_map) } - if count_required_params > len(call_expr.args) || count_required_params == 0 || len(call_expr.args) == 0 { + if count_required_params > call_arg_count || count_required_params == 0 || call_arg_count == 0 { return {}, false } @@ -894,7 +860,7 @@ resolve_poly_union :: proc(ast_context: ^AstContext, poly_params: ^ast.Field_Lis for arg, i in call_expr.args { if ident, ok := arg.derived.(^ast.Ident); ok { if expr, ok := poly_map[ident.name]; ok { - symbol_value.types[i] = expr + call_expr.args[i] = expr } } } diff --git a/src/server/hover.odin b/src/server/hover.odin index 9b42f7e..991ea1b 100644 --- a/src/server/hover.odin +++ b/src/server/hover.odin @@ -55,10 +55,45 @@ get_hover_information :: proc(document: ^Document, position: common.Position) -> get_locals(document.ast, position_context.function, &ast_context, &position_context) } - if position_context.import_stmt != nil { + if position_context.import_stmt != nil && position_in_node(position_context.import_stmt, position_context.position) { + for imp in document.imports { + if imp.original != position_context.import_stmt.fullpath { + continue + } + + symbol := Symbol { + name = imp.base, + type = .Package, + pkg = imp.name, + value = SymbolPackageValue{}, + } + try_build_package(symbol.pkg) + if symbol, ok = resolve_symbol_return(&ast_context, symbol); ok { + hover.range = common.get_token_range(document.ast.pkg_decl, ast_context.file.src) + hover.contents = write_hover_content(&ast_context, symbol) + return hover, true, true + } + } + return {}, false, true } + if document.ast.pkg_decl != nil && position_in_node(document.ast.pkg_decl, position_context.position) { + symbol := Symbol { + name = document.ast.pkg_name, + type = .Package, + pkg = ast_context.document_package, + value = SymbolPackageValue{}, + } + try_build_package(symbol.pkg) + if symbol, ok = resolve_symbol_return(&ast_context, symbol); ok { + hover.range = common.get_token_range(document.ast.pkg_decl, ast_context.file.src) + hover.contents = write_hover_content(&ast_context, symbol) + return hover, true, true + } + + } + if position_context.type_cast != nil && !position_in_node(position_context.type_cast.type, position_context.position) && !position_in_node(position_context.type_cast.expr, position_context.position) { // check that we're actually on the 'cast' word diff --git a/src/server/indexer.odin b/src/server/indexer.odin index 3337b3b..cbf1bba 100644 --- a/src/server/indexer.odin +++ b/src/server/indexer.odin @@ -42,11 +42,38 @@ should_skip_private_symbol :: proc(symbol: Symbol, current_pkg, current_file: st return false } +is_builtin_pkg :: proc(pkg: string) -> bool { + return strings.equal_fold(pkg, "$builtin") || strings.has_suffix(pkg, "/builtin") +} + +lookup_builtin_symbol :: proc(name: string, current_file: string) -> (Symbol, bool) { + if symbol, ok := lookup_symbol(name, "$builtin", current_file); ok { + return symbol, true + } + + for built in indexer.builtin_packages { + if symbol, ok := lookup_symbol(name, built, current_file); ok { + return symbol, true + } + } + + return {}, false +} + lookup :: proc(name: string, pkg: string, current_file: string, loc := #caller_location) -> (Symbol, bool) { if name == "" { return {}, false } + if is_builtin_pkg(pkg) { + return lookup_builtin_symbol(name, current_file) + } + + return lookup_symbol(name, pkg, current_file) +} + +@(private = "file") +lookup_symbol ::proc(name: string, pkg: string, current_file: string) -> (Symbol, bool) { if symbol, ok := memory_index_lookup(&indexer.index, name, pkg); ok { current_pkg := get_package_from_filepath(current_file) if should_skip_private_symbol(symbol, current_pkg, current_file) { diff --git a/src/server/references.odin b/src/server/references.odin index ee49c4e..aeebd43 100644 --- a/src/server/references.odin +++ b/src/server/references.odin @@ -16,10 +16,10 @@ import "src:common" fullpaths: [dynamic]string -walk_directories :: proc(info: os.File_Info, in_err: os.Errno, user_data: rawptr) -> (err: os.Error, skip_dir: bool) { +walk_directories :: proc(info: os.File_Info, in_err: os.Error, user_data: rawptr) -> (err: os.Error, skip_dir: bool) { document := cast(^Document)user_data - if info.is_dir { + if info.type == .Directory { return nil, false } @@ -28,7 +28,7 @@ walk_directories :: proc(info: os.File_Info, in_err: os.Errno, user_data: rawptr } if strings.contains(info.name, ".odin") { - slash_path, _ := filepath.to_slash(info.fullpath, context.temp_allocator) + slash_path, _ := filepath.replace_path_separators(info.fullpath, '/', context.temp_allocator) if slash_path != document.fullpath { append(&fullpaths, strings.clone(info.fullpath, context.temp_allocator)) } @@ -277,7 +277,24 @@ resolve_references :: proc( when !ODIN_TEST { for workspace in common.config.workspace_folders { uri, _ := common.parse_uri(workspace.uri, context.temp_allocator) - filepath.walk(uri.path, walk_directories, document) + w := os.walker_create(uri.path) + defer os.walker_destroy(&w) + for info in os.walker_walk(&w) { + if info.type == .Directory { + continue + } + + if info.fullpath == "" { + continue + } + + if strings.contains(info.name, ".odin") { + slash_path, _ := filepath.replace_path_separators(info.fullpath, '/', context.temp_allocator) + if slash_path != document.fullpath { + append(&fullpaths, strings.clone(info.fullpath, context.temp_allocator)) + } + } + } } } @@ -297,12 +314,12 @@ resolve_references :: proc( for fullpath in fullpaths { dir := filepath.dir(fullpath) base := filepath.base(dir) - forward_dir, _ := filepath.to_slash(dir) + forward_dir, _ := filepath.replace_path_separators(dir, '/', context.allocator) - data, ok := os.read_entire_file(fullpath, context.allocator) + data, err := os.read_entire_file(fullpath, context.allocator) - if !ok { - log.errorf("failed to read entire file for indexing %v", fullpath) + if err != nil { + log.errorf("failed to read entire file for indexing %v: %v", fullpath, err) continue } @@ -328,7 +345,7 @@ resolve_references :: proc( pkg = pkg, } - ok = parser.parse_file(&p, &file) + ok := parser.parse_file(&p, &file) if !ok { if !strings.contains(fullpath, "builtin.odin") && !strings.contains(fullpath, "intrinsics.odin") { diff --git a/src/server/requests.odin b/src/server/requests.odin index 64142ed..3aec455 100644 --- a/src/server/requests.odin +++ b/src/server/requests.odin @@ -7,9 +7,6 @@ import "base:runtime" import "core:encoding/json" import "core:fmt" import "core:log" -import "core:mem" -import "core:odin/ast" -import "core:odin/parser" import "core:os" import "core:path/filepath" import path "core:path/slashpath" @@ -17,7 +14,6 @@ import "core:slice" import "core:strconv" import "core:strings" import "core:sync" -import "core:thread" import "core:time" import "src:common" @@ -218,7 +214,12 @@ read_and_parse_body :: proc(reader: ^Reader, header: Header) -> (json.Value, boo return value, true } -call_map: map[string]proc(_: json.Value, _: RequestId, _: ^common.Config, _: ^Writer) -> common.Error = { +call_map: map[string]proc( + _: json.Value, + _: RequestId, + _: ^common.Config, + _: ^Writer, +) -> common.Error = { "initialize" = request_initialize, "initialized" = request_initialized, "shutdown" = request_shutdown, @@ -278,6 +279,7 @@ consume_requests :: proc(config: ^common.Config, writer: ^Writer) -> bool { ordered_remove(&requests, delete_index) } } + clear(&deletings) for request in requests { append(&temp_requests, request) @@ -329,7 +331,10 @@ call :: proc(value: json.Value, id: RequestId, writer: ^Writer, config: ^common. if !ok { log.errorf("Failed to find method: %#v", root) - response := make_response_message_error(id = id, error = ResponseError{code = .MethodNotFound, message = ""}) + response := make_response_message_error( + id = id, + error = ResponseError{code = .MethodNotFound, message = ""}, + ) send_error(response, writer) return } @@ -347,7 +352,10 @@ call :: proc(value: json.Value, id: RequestId, writer: ^Writer, config: ^common. } else { err := fn(root["params"], id, config, writer) if err != .None { - response := make_response_message_error(id = id, error = ResponseError{code = err, message = ""}) + response := make_response_message_error( + id = id, + error = ResponseError{code = err, message = ""}, + ) send_error(response, writer) } } @@ -356,13 +364,20 @@ call :: proc(value: json.Value, id: RequestId, writer: ^Writer, config: ^common. //log.errorf("time duration %v for %v", time.duration_milliseconds(diff), method) } -read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfig, uri: common.Uri) { - config.disable_parser_errors = ols_config.disable_parser_errors.(bool) or_else config.disable_parser_errors +read_ols_initialize_options :: proc( + config: ^common.Config, + ols_config: OlsConfig, + uri: common.Uri, +) { + config.disable_parser_errors = + ols_config.disable_parser_errors.(bool) or_else config.disable_parser_errors config.thread_count = ols_config.thread_pool_count.(int) or_else config.thread_count - config.enable_document_symbols = ols_config.enable_document_symbols.(bool) or_else config.enable_document_symbols + config.enable_document_symbols = + ols_config.enable_document_symbols.(bool) or_else config.enable_document_symbols config.enable_format = ols_config.enable_format.(bool) or_else config.enable_format config.enable_hover = ols_config.enable_hover.(bool) or_else config.enable_hover - config.enable_semantic_tokens = ols_config.enable_semantic_tokens.(bool) or_else config.enable_semantic_tokens + config.enable_semantic_tokens = + ols_config.enable_semantic_tokens.(bool) or_else config.enable_semantic_tokens config.enable_unused_imports_reporting = ols_config.enable_unused_imports_reporting.(bool) or_else config.enable_unused_imports_reporting config.enable_procedure_context = @@ -373,7 +388,8 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi ols_config.enable_document_highlights.(bool) or_else config.enable_document_highlights config.enable_completion_matching = ols_config.enable_completion_matching.(bool) or_else config.enable_completion_matching - config.enable_document_links = ols_config.enable_document_links.(bool) or_else config.enable_document_links + config.enable_document_links = + ols_config.enable_document_links.(bool) or_else config.enable_document_links config.enable_comp_lit_signature_help = ols_config.enable_comp_lit_signature_help.(bool) or_else config.enable_comp_lit_signature_help config.enable_comp_lit_signature_help_use_docs = @@ -384,7 +400,8 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi config.enable_procedure_snippet = ols_config.enable_procedure_snippet.(bool) or_else config.enable_procedure_snippet - config.enable_auto_import = ols_config.enable_auto_import.(bool) or_else config.enable_auto_import + config.enable_auto_import = + ols_config.enable_auto_import.(bool) or_else config.enable_auto_import config.enable_checker_only_saved = ols_config.enable_checker_only_saved.(bool) or_else config.enable_checker_only_saved @@ -400,7 +417,10 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi } if ols_config.odin_root_override != "" { - config.odin_root_override = strings.clone(ols_config.odin_root_override, context.temp_allocator) + config.odin_root_override = strings.clone( + ols_config.odin_root_override, + context.temp_allocator, + ) allocated: bool config.odin_root_override, allocated = common.resolve_home_dir(config.odin_root_override) @@ -453,7 +473,8 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi config.enable_inlay_hints_implicit_return = ols_config.enable_inlay_hints_implicit_return.(bool) or_else config.enable_inlay_hints_implicit_return - config.enable_fake_method = ols_config.enable_fake_methods.(bool) or_else config.enable_fake_method + config.enable_fake_method = + ols_config.enable_fake_methods.(bool) or_else config.enable_fake_method config.enable_overload_resolution = ols_config.enable_overload_resolution.(bool) or_else config.enable_overload_resolution @@ -475,7 +496,7 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi // Apply custom collections. for it in ols_config.collections { - forward_path, _ := filepath.to_slash(it.path, context.temp_allocator) + forward_path, _ := filepath.replace_path_separators(it.path, '/', context.temp_allocator) forward_path = common.resolve_home_dir(forward_path, context.temp_allocator) @@ -483,16 +504,21 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi when ODIN_OS == .Windows { if filepath.is_abs(it.path) { - final_path, _ = filepath.to_slash( + final_path, _ = filepath.replace_path_separators( common.get_case_sensitive_path(forward_path, context.temp_allocator), + '/', context.temp_allocator, ) } else { - final_path, _ = filepath.to_slash( + final_path, _ = filepath.replace_path_separators( common.get_case_sensitive_path( - path.join(elems = {uri.path, forward_path}, allocator = context.temp_allocator), + path.join( + elems = {uri.path, forward_path}, + allocator = context.temp_allocator, + ), context.temp_allocator, ), + '/', context.temp_allocator, ) } @@ -506,13 +532,18 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi } } - if abs_final_path, ok := filepath.abs(final_path); ok { - slashed_path, _ := filepath.to_slash(abs_final_path, context.temp_allocator) + abs_final_path, err := filepath.abs(final_path, context.temp_allocator) + if err != nil { + log.errorf("Failed to find absolute address of collection: %v", final_path, err) + config.collections[strings.clone(it.name)] = strings.clone(final_path) + } else { + slashed_path, _ := filepath.replace_path_separators( + abs_final_path, + '/', + context.temp_allocator, + ) config.collections[strings.clone(it.name)] = strings.clone(slashed_path) - } else { - log.errorf("Failed to find absolute address of collection: %v", final_path) - config.collections[strings.clone(it.name)] = strings.clone(final_path) } } @@ -556,7 +587,8 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi } if odin_core_env != "" { - if abs_core_env, ok := filepath.abs(odin_core_env, context.temp_allocator); ok { + if abs_core_env, err := filepath.abs(odin_core_env, context.temp_allocator); + err == nil { odin_core_env = abs_core_env } } @@ -567,7 +599,11 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi // Insert the default collections if they are not specified in the config. if odin_core_env != "" { - forward_path, _ := filepath.to_slash(odin_core_env, context.temp_allocator) + forward_path, _ := filepath.replace_path_separators( + odin_core_env, + '/', + context.temp_allocator, + ) // base if "base" not_in config.collections { @@ -595,7 +631,10 @@ read_ols_initialize_options :: proc(config: ^common.Config, ols_config: OlsConfi // shared if "shared" not_in config.collections { - shared_path := path.join(elems = {forward_path, "shared"}, allocator = context.allocator) + shared_path := path.join( + elems = {forward_path, "shared"}, + allocator = context.allocator, + ) if os.exists(shared_path) { config.collections[strings.clone("shared")] = shared_path } else { @@ -665,17 +704,18 @@ request_initialize :: proc( config.enable_auto_import = true read_ols_config :: proc(file: string, config: ^common.Config, uri: common.Uri) { - if data, ok := os.read_entire_file(file, context.temp_allocator); ok { - ols_config: OlsConfig + data, err := os.read_entire_file(file, context.temp_allocator) + if err != nil { + log.warnf("Failed to read/find %v: %v", file, err) + return + } + ols_config: OlsConfig - err := json.unmarshal(data, &ols_config, allocator = context.temp_allocator) - if err == nil { - read_ols_initialize_options(config, ols_config, uri) - } else { - log.errorf("Failed to unmarshal %v: %v", file, err) - } + json_err := json.unmarshal(data, &ols_config, allocator = context.temp_allocator) + if json_err == nil { + read_ols_initialize_options(config, ols_config, uri) } else { - log.warnf("Failed to read/find %v", file) + log.errorf("Failed to unmarshal %v: %v", file, json_err) } } @@ -699,7 +739,10 @@ request_initialize :: proc( read_ols_initialize_options(config, initialize_params.initializationOptions, uri) // Apply ols.json config. - ols_config_path := path.join(elems = {uri.path, "ols.json"}, allocator = context.temp_allocator) + ols_config_path := path.join( + elems = {uri.path, "ols.json"}, + allocator = context.temp_allocator, + ) read_ols_config(ols_config_path, config, uri) } else { read_ols_initialize_options(config, initialize_params.initializationOptions, {}) @@ -720,7 +763,8 @@ request_initialize :: proc( config.enable_label_details = initialize_params.capabilities.textDocument.completion.completionItem.labelDetailsSupport - config.enable_snippets &= initialize_params.capabilities.textDocument.completion.completionItem.snippetSupport + config.enable_snippets &= + initialize_params.capabilities.textDocument.completion.completionItem.snippetSupport config.signature_offset_support = initialize_params.capabilities.textDocument.signatureHelp.signatureInformation.parameterInformation.labelOffsetSupport @@ -729,12 +773,17 @@ request_initialize :: proc( signatureTriggerCharacters := []string{"(", ","} signatureRetriggerCharacters := []string{","} - semantic_range_support := initialize_params.capabilities.textDocument.semanticTokens.requests.range + semantic_range_support := + initialize_params.capabilities.textDocument.semanticTokens.requests.range response := make_response_message( params = ResponseInitializeParams { capabilities = ServerCapabilities { - textDocumentSync = TextDocumentSyncOptions{openClose = true, change = 2, save = {includeText = true}}, + textDocumentSync = TextDocumentSyncOptions { + openClose = true, + change = 2, + save = {includeText = true}, + }, renameProvider = RenameOptions{prepareProvider = true}, workspaceSymbolProvider = true, referencesProvider = config.enable_references, @@ -765,7 +814,10 @@ request_initialize :: proc( hoverProvider = config.enable_hover, documentFormattingProvider = config.enable_format, documentLinkProvider = {resolveProvider = false}, - codeActionProvider = {resolveProvider = false, codeActionKinds = {"refactor.rewrite"}}, + codeActionProvider = { + resolveProvider = false, + codeActionKinds = {"refactor.rewrite"}, + }, }, }, id = id, @@ -831,7 +883,12 @@ request_initialized :: proc( return .None } -request_shutdown :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_shutdown :: proc( + params: json.Value, + id: RequestId, + config: ^common.Config, + writer: ^Writer, +) -> common.Error { response := make_response_message(params = nil, id = id) send_response(response, writer) @@ -946,7 +1003,12 @@ request_completion :: proc( } list: CompletionList - list, ok = get_completion_list(document, completition_params.position, completition_params.context_, config) + list, ok = get_completion_list( + document, + completition_params.position, + completition_params.context_, + config, + ) if !ok { return .InternalError @@ -1040,7 +1102,12 @@ request_format_document :: proc( return .None } -notification_exit :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +notification_exit :: proc( + params: json.Value, + id: RequestId, + config: ^common.Config, + writer: ^Writer, +) -> common.Error { config.running = false return .None } @@ -1067,7 +1134,12 @@ notification_did_open :: proc( defer delete(open_params.textDocument.uri) - if n := document_open(open_params.textDocument.uri, open_params.textDocument.text, config, writer); n != .None { + if n := document_open( + open_params.textDocument.uri, + open_params.textDocument.text, + config, + writer, + ); n != .None { return .InternalError } @@ -1166,7 +1238,7 @@ notification_did_save :: proc( when ODIN_OS == .Windows { correct := common.get_case_sensitive_path(fullpath, context.temp_allocator) - fullpath, _ = filepath.to_slash(correct, context.temp_allocator) + fullpath, _ = filepath.replace_path_separators(correct, '/', context.temp_allocator) } corrected_uri := common.create_uri(fullpath, context.temp_allocator) @@ -1303,7 +1375,12 @@ request_document_symbols :: proc( return .None } -request_hover :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_hover :: proc( + params: json.Value, + id: RequestId, + config: ^common.Config, + writer: ^Writer, +) -> common.Error { params_object, ok := params.(json.Object) if !ok { @@ -1451,7 +1528,12 @@ request_prepare_rename :: proc( return .None } -request_rename :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_rename :: proc( + params: json.Value, + id: RequestId, + config: ^common.Config, + writer: ^Writer, +) -> common.Error { params_object, ok := params.(json.Object) if !ok { @@ -1628,7 +1710,7 @@ notification_did_change_watched_files :: proc( find_all_package_aliases() } else { if uri, ok := common.parse_uri(change.uri, context.temp_allocator); ok { - if data, ok := os.read_entire_file(uri.path, context.temp_allocator); ok { + if data, err := os.read_entire_file(uri.path, context.temp_allocator); err == nil { index_file(uri, cast(string)data) } } @@ -1701,6 +1783,11 @@ request_workspace_symbols :: proc( return .None } -request_noop :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error { +request_noop :: proc( + params: json.Value, + id: RequestId, + config: ^common.Config, + writer: ^Writer, +) -> common.Error { return .None } diff --git a/src/server/workspace_symbols.odin b/src/server/workspace_symbols.odin index 11e7a8a..de346d1 100644 --- a/src/server/workspace_symbols.odin +++ b/src/server/workspace_symbols.odin @@ -1,7 +1,6 @@ package server import "core:fmt" -import "core:log" import "core:os" import "core:path/filepath" import "core:strings" @@ -12,32 +11,13 @@ import "src:common" dir_blacklist :: []string{"node_modules", ".git"} WorkspaceCache :: struct { - time: time.Time, - pkgs: [dynamic]string, + time: time.Time, + pkgs: [dynamic]string, } @(thread_local, private = "file") cache: WorkspaceCache -@(private) -walk_dir :: proc(info: os.File_Info, in_err: os.Errno, user_data: rawptr) -> (err: os.Error, skip_dir: bool) { - pkgs := cast(^[dynamic]string)user_data - - if info.is_dir { - dir, _ := filepath.to_slash(info.fullpath, context.temp_allocator) - dir_name := filepath.base(dir) - - for blacklist in dir_blacklist { - if blacklist == dir_name { - return nil, true - } - } - append(pkgs, dir) - } - - return nil, false -} - get_workspace_symbols :: proc(query: string) -> (workspace_symbols: []WorkspaceSymbol, ok: bool) { if time.since(cache.time) > 20 * time.Second { for pkg in cache.pkgs { @@ -48,7 +28,25 @@ get_workspace_symbols :: proc(query: string) -> (workspace_symbols: []WorkspaceS uri := common.parse_uri(workspace.uri, context.temp_allocator) or_return pkgs := make([dynamic]string, 0, context.temp_allocator) - filepath.walk(uri.path, walk_dir, &pkgs) + w := os.walker_create(uri.path) + defer os.walker_destroy(&w) + for info in os.walker_walk(&w) { + if info.type == .Directory { + dir := strings.clone(info.fullpath, context.temp_allocator) + dir_name := filepath.base(dir) + found := false + for blacklist in dir_blacklist { + if blacklist == dir_name { + found = true + os.walker_skip_dir(&w) + break + } + } + if !found { + append(&pkgs, dir) + } + } + } _pkg: for pkg in pkgs { matches, err := filepath.glob(fmt.tprintf("%v/*.odin", pkg), context.temp_allocator) @@ -58,7 +56,7 @@ get_workspace_symbols :: proc(query: string) -> (workspace_symbols: []WorkspaceS } for exclude_path in common.config.profile.exclude_path { - exclude_forward, _ := filepath.to_slash(exclude_path, context.temp_allocator) + exclude_forward, _ := filepath.replace_path_separators(exclude_path, '/', context.temp_allocator) if exclude_forward[len(exclude_forward) - 2:] == "**" { lower_pkg := strings.to_lower(pkg) diff --git a/tests/actions_test.odin b/tests/actions_test.odin new file mode 100644 index 0000000..935e168 --- /dev/null +++ b/tests/actions_test.odin @@ -0,0 +1,23 @@ +package tests + +import "core:testing" + +import test "src:testing" + +@(test) +action_remove_unsed_import_when_stmt :: proc(t: ^testing.T) { + source := test.Source { + main = `package test + import "core:fm{*}t" + + when true { + main :: proc() { + _ = fmt.printf + } + } + `, + packages = {}, + } + + test.expect_action(t, &source, {}) +} diff --git a/tests/completions_test.odin b/tests/completions_test.odin index 56553c3..bf05f6a 100644 --- a/tests/completions_test.odin +++ b/tests/completions_test.odin @@ -5500,3 +5500,31 @@ ast_completion_fake_method_proc_group_single_arg_cursor_position :: proc(t: ^tes // The proc group 'negate' should have cursor AFTER parentheses since no additional args test.expect_completion_edit_text(t, &source, ".", "negate", "methods.negate(n)$0") } + +@(test) +ast_completion_package_docs :: proc(t: ^testing.T) { + packages := make([dynamic]test.Package, context.temp_allocator) + + append( + &packages, + test.Package { + pkg = "my_package", + source = `// Package docs + package my_package + Foo :: struct{} + `, + }, + ) + + source := test.Source { + main = `package test + import "my_package" + main :: proc() { + my_pack{*} + } + `, + packages = packages[:], + } + + test.expect_completion_docs(t, &source, "", {"my_package: package\n---\nPackage docs"}) +} diff --git a/tests/hover_test.odin b/tests/hover_test.odin index 02cde4e..c207da1 100644 --- a/tests/hover_test.odin +++ b/tests/hover_test.odin @@ -6034,6 +6034,150 @@ ast_hover_constant_unary_expr :: proc(t: ^testing.T) { } test.expect_hover(t, &source, "test.FOO :: ~u32(0)") } + +@(test) +ast_hover_union_multiple_poly :: proc(t: ^testing.T) { + source := test.Source { + main = `package test + Foo :: struct($T: typeid) {} + Bar :: struct{} + + Bazz :: union($T: typeid) { + Foo(T), + Bar, + } + + main :: proc() { + T :: distinct int + bazz: Ba{*}zz(T) + } + `, + } + test.expect_hover(t, &source, "test.Bazz :: union(T) {\n\tFoo(T),\n\tBar,\n}") +} + +@(test) +ast_hover_poly_proc_passthrough :: proc(t: ^testing.T) { + source := test.Source { + main = `package test + make :: proc() -> (int, bool) { + return 1, true + } + + confirm_bool_one :: #force_inline proc(v: $T, ok: $B) -> (T, bool) { + return v, bool(ok) + } + + main :: proc() { + v{*}alue, ok := confirm_bool_one(make()) + } + `, + } + test.expect_hover(t, &source, "test.value: int") +} + +@(test) +ast_hover_parapoly_overloaded_proc_with_bitfield :: proc(t: ^testing.T) { + source := test.Source { + main = `package test + Entry :: struct($T, $H: typeid) { + handle: H, + } + + SmallHandle :: bit_field int { + valid: bool | 1, + generation: int | 7, + index: int | 24, + } + + make :: proc { + makeEntry, + } + + makeEntry :: proc($T: typeid/Entry($D, $H), handle: H) -> (entry: T) { + return + } + + main :: proc() { + e{*}ntry := make(Entry(int, SmallHandle), SmallHandle{}) + } + `, + } + test.expect_hover(t, &source, "test.entry: test.Entry(int, SmallHandle)") +} + +@(test) +ast_hover_package_docs :: proc(t: ^testing.T) { + packages := make([dynamic]test.Package, context.temp_allocator) + + append( + &packages, + test.Package { + pkg = "my_package", + source = `// Package docs + package my_package + Foo :: struct{} + `, + }, + ) + source := test.Source { + main = `package test + import "my_package" + main :: proc() { + foo := my_packa{*}ge.Foo{} + } + `, + packages = packages[:], + } + + test.expect_hover(t, &source, "my_package: package\n---\nPackage docs") +} + +@(test) +ast_hover_import_path_package_docs :: proc(t: ^testing.T) { + packages := make([dynamic]test.Package, context.temp_allocator) + + append( + &packages, + test.Package { + pkg = "my_package", + source = `// Package docs + package my_package + `, + }, + ) + source := test.Source { + main = `package test + import "my_packa{*}ge" + `, + packages = packages[:], + } + + test.expect_hover(t, &source, "my_package: package\n---\nPackage docs") +} + +@(test) +ast_hover_proc_overload_generic_array_pointer_types :: proc(t: ^testing.T) { + source := test.Source { + main = `package test + foo_dynamic_array :: proc(array: $A/[dynamic]^$T) {} + foo_slice :: proc(array: $A/[]^$T) {} + + foo :: proc{ + foo_dynamic_array, + foo_slice, + } + + main :: proc() { + array: [dynamic]^int + f{*}oo(array) + } + `, + } + + test.expect_hover(t, &source, "test.foo :: proc(array: $A/[dynamic]^$T)") +} + /* Waiting for odin fix diff --git a/tools/odinfmt/main.odin b/tools/odinfmt/main.odin index b44af5c..69ad5cf 100644 --- a/tools/odinfmt/main.odin +++ b/tools/odinfmt/main.odin @@ -1,12 +1,9 @@ package odinfmt -import "core:encoding/json" import "core:flags" import "core:fmt" -import "core:io" import "core:mem" import vmem "core:mem/virtual" -import "core:odin/tokenizer" import "core:os" import "core:path/filepath" import "core:strings" @@ -15,36 +12,27 @@ import "src:odin/format" import "src:odin/printer" Args :: struct { - write: bool `args:"name=w" usage:"write the new format to file"`, - stdin: bool `usage:"formats code from standard input"`, - path: string `args:"pos=0" usage:"set the file or directory to format"`, - config: string `usage:"path to a config file"` + write: bool `args:"name=w" usage:"write the new format to file"`, + stdin: bool `usage:"formats code from standard input"`, + path: string `args:"pos=0" usage:"set the file or directory to format"`, + config: string `usage:"path to a config file"`, } -format_file :: proc(filepath: string, config: printer.Config, allocator := context.allocator) -> (string, bool) { - if data, ok := os.read_entire_file(filepath, allocator); ok { +format_file :: proc( + filepath: string, + config: printer.Config, + allocator := context.allocator, +) -> ( + string, + bool, +) { + if data, err := os.read_entire_file(filepath, allocator); err == nil { return format.format(filepath, string(data), config, {.Optional_Semicolons}, allocator) } else { return "", false } } -files: [dynamic]string - -walk_files :: proc(info: os.File_Info, in_err: os.Errno, user_data: rawptr) -> (err: os.Error, skip_dir: bool) { - if info.is_dir { - return nil, false - } - - if filepath.ext(info.name) != ".odin" { - return nil, false - } - - append(&files, strings.clone(info.fullpath)) - - return nil, false -} - main :: proc() { arena: vmem.Arena arena_err := vmem.arena_init_growing(&arena) @@ -63,7 +51,7 @@ main :: proc() { args.path = "." } else { fmt.fprint(os.stderr, "Missing path to format\n") - flags.write_usage(os.stream_from_handle(os.stderr), Args, os.args[0]) + flags.write_usage(os.to_stream(os.stderr), Args, os.args[0]) os.exit(1) } } @@ -72,14 +60,14 @@ main :: proc() { write_failure := false - watermark : uint = 0 + watermark: uint = 0 - config: printer.Config - if args.config == "" { - config = format.find_config_file_or_default(args.path) - } else { - config = format.read_config_file_from_path_or_default(args.config) - } + config: printer.Config + if args.config == "" { + config = format.find_config_file_or_default(args.path) + } else { + config = format.read_config_file_from_path_or_default(args.config) + } if args.stdin { data := make([dynamic]byte, arena_allocator) @@ -93,7 +81,13 @@ main :: proc() { append(&data, ..tmp[:r]) } - source, ok := format.format("<stdin>", string(data[:]), config, {.Optional_Semicolons}, arena_allocator) + source, ok := format.format( + "<stdin>", + string(data[:]), + config, + {.Optional_Semicolons}, + arena_allocator, + ) if ok { fmt.println(source) @@ -108,7 +102,7 @@ main :: proc() { if data, ok := format_file(args.path, config, arena_allocator); ok { os.rename(args.path, backup_path) - if os.write_entire_file(args.path, transmute([]byte)data) { + if err := os.write_entire_file(args.path, transmute([]byte)data); err == nil { os.remove(backup_path) } } else { @@ -121,7 +115,20 @@ main :: proc() { } } } else if os.is_dir(args.path) { - filepath.walk(args.path, walk_files, nil) + files: [dynamic]string + w := os.walker_create(args.path) + defer os.walker_destroy(&w) + for info in os.walker_walk(&w) { + if info.type == .Directory { + continue + } + + if filepath.ext(info.name) != ".odin" { + continue + } + + append(&files, strings.clone(info.fullpath)) + } for file in files { fmt.println(file) @@ -133,7 +140,7 @@ main :: proc() { if args.write { os.rename(file, backup_path) - if os.write_entire_file(file, transmute([]byte)data) { + if err := os.write_entire_file(file, transmute([]byte)data); err == nil { os.remove(backup_path) } } else { diff --git a/tools/odinfmt/snapshot/snapshot.odin b/tools/odinfmt/snapshot/snapshot.odin index 3220845..c51c216 100644 --- a/tools/odinfmt/snapshot/snapshot.odin +++ b/tools/odinfmt/snapshot/snapshot.odin @@ -5,14 +5,13 @@ import "core:fmt" import "core:os" import "core:path/filepath" import "core:strings" -import "core:testing" import "core:text/scanner" import "src:odin/format" import "src:odin/printer" format_file :: proc(filepath: string, allocator := context.allocator) -> (string, bool) { - if data, ok := os.read_entire_file(filepath, allocator); ok { + if data, err := os.read_entire_file(filepath, allocator); err == nil { config := read_config_file_or_default(filepath) return format.format(filepath, string(data), config, {.Optional_Semicolons}, allocator) } else { @@ -30,7 +29,7 @@ read_config_file_or_default :: proc(fullpath: string, allocator := context.alloc if (os.exists(configpath)) { json_config := default_style - if data, ok := os.read_entire_file(configpath, allocator); ok { + if data, err := os.read_entire_file(configpath, allocator); err == nil { if json.unmarshal(data, &json_config) == nil { return json_config } @@ -44,7 +43,7 @@ read_config_file_or_default :: proc(fullpath: string, allocator := context.alloc snapshot_directory :: proc(directory: string) -> bool { matches, err := filepath.glob(fmt.tprintf("%v/*", directory)) - if err != .None { + if err != nil { fmt.eprintf("Error in globbing directory: %v", directory) } @@ -69,7 +68,7 @@ snapshot_file :: proc(path: string) -> bool { fmt.printf("Testing snapshot %v", path) - snapshot_path := filepath.join( + snapshot_path, _ := filepath.join( elems = {filepath.dir(path, context.temp_allocator), "/.snapshots", filepath.base(path)}, allocator = context.temp_allocator, ) @@ -82,7 +81,7 @@ snapshot_file :: proc(path: string) -> bool { } if os.exists(snapshot_path) { - if snapshot_data, ok := os.read_entire_file(snapshot_path, context.temp_allocator); ok { + if snapshot_data, err := os.read_entire_file(snapshot_path, context.temp_allocator); err == nil { snapshot_scanner := scanner.Scanner{} scanner.init(&snapshot_scanner, string(snapshot_data)) formatted_scanner := scanner.Scanner{} @@ -107,7 +106,7 @@ snapshot_file :: proc(path: string) -> bool { if s_ch != f_ch { fmt.eprintf("\nFormatted file was different from snapshot file: %v\n", snapshot_path) - os.write_entire_file(fmt.tprintf("%v_failed", snapshot_path), transmute([]u8)formatted) + _ = os.write_entire_file(fmt.tprintf("%v_failed", snapshot_path), transmute([]u8)formatted) return false } } @@ -118,9 +117,8 @@ snapshot_file :: proc(path: string) -> bool { } } else { os.make_directory(filepath.dir(snapshot_path, context.temp_allocator)) - ok = os.write_entire_file(snapshot_path, transmute([]byte)formatted) - if !ok { - fmt.eprintf("Failed to write snapshot file %v", snapshot_path) + if err := os.write_entire_file(snapshot_path, transmute([]byte)formatted); err != nil { + fmt.eprintf("Failed to write snapshot file %v: %v", snapshot_path, err) return false } } diff --git a/tools/odinfmt/tests.odin b/tools/odinfmt/tests.odin index 313e33c..0917838 100644 --- a/tools/odinfmt/tests.odin +++ b/tools/odinfmt/tests.odin @@ -1,8 +1,6 @@ package odinfmt_tests -import "core:testing" import "core:os" -import "core:fmt" import "core:mem" import "snapshot" |