aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorDanielGavin <danielgavin5@hotmail.com>2020-11-05 20:43:29 +0100
committerDanielGavin <danielgavin5@hotmail.com>2020-11-05 20:43:29 +0100
commit336de49de48c397677f9e9ddb522b004263d3ea6 (patch)
tree589ce082c8a2855ffdf3ac76207bf8f7b88a2b54 /src
parent0c9083922da9c20b9c12325276ac361095f9da51 (diff)
started working on the indexer and goto definition
Diffstat (limited to 'src')
-rw-r--r--src/analysis.odin132
-rw-r--r--src/documents.odin131
-rw-r--r--src/index.odin76
-rw-r--r--src/position.odin87
-rw-r--r--src/requests.odin39
-rw-r--r--src/types.odin10
6 files changed, 378 insertions, 97 deletions
diff --git a/src/analysis.odin b/src/analysis.odin
index 3e0fc4c..c56408c 100644
--- a/src/analysis.odin
+++ b/src/analysis.odin
@@ -8,127 +8,101 @@ import "core:log"
import "core:strings"
import "core:path"
-ParserError :: struct {
- message: string,
- line: int,
- column: int,
- file: string,
- offset: int,
-};
-
-StructSymbol :: struct {
-
-};
-
-ProcedureSymbol :: struct {
-
-};
-
-PackageSymbol :: struct {
+DocumentPositionContextType :: enum {
+ GlobalVariable,
+ DottedVariable,
+ Unknown,
};
-Symbol :: union {
- StructSymbol,
- ProcedureSymbol,
- PackageSymbol
-};
-
-DocumentSymbols :: struct {
- file: ast.File,
- globals: map [string] Symbol,
- imports: [] string,
+DocumentPositionContextValue :: union {
+ string,
+ int,
};
DocumentPositionContext :: struct {
- symbol: Symbol,
+ type: DocumentPositionContextType,
+ value: DocumentPositionContextValue,
};
-current_errors: [dynamic] ParserError;
-
-parser_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
- error := ParserError { line = pos.line, column = pos.column, file = pos.file,
- offset = pos.offset, message = fmt.tprintf(msg, ..args) };
- append(&current_errors, error);
-}
-parser_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
+tokenizer_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
}
-
/*
- Parses and walks through the ast saving all the global symbols for the document. Local symbols are not saved
- because they are determined by the position.
-
- Document is responsible in freeing the DocumentSymbols with free_document_symbols
-
- Returns DocumentSymbols, Errors, file package name, imports processed with correct path directory
+ Figure out what exactly is at the given position and whether it is in a function, struct, etc.
*/
+get_document_position_context :: proc(document: ^Document, position: Position) -> (DocumentPositionContext, bool) {
-parse_document_symbols :: proc(document: ^Document, config: ^Config) -> (DocumentSymbols, [dynamic] ParserError, string, []string, bool) {
+ position_context: DocumentPositionContext;
- symbols: DocumentSymbols;
+ absolute_position, ok := get_absolute_position(position, document.text);
- p := parser.Parser {
- err = parser_error_handler,
- warn = parser_warning_handler,
- };
+ if !ok {
+ return position_context, false;
+ }
- current_errors = make([dynamic] ParserError, context.temp_allocator);
- symbols.file = ast.File {
- fullpath = document.path,
- src = document.text[:document.used_text],
- };
+ //Using the ast is not really viable since this may be broken code
+ t: tokenizer.Tokenizer;
- parser.parse_file(&p, &symbols.file);
+ tokenizer.init(&t, document.text, document.path, tokenizer_error_handler);
- symbols.imports = make([]string, len(symbols.file.imports));
+ stack := make([dynamic] tokenizer.Token, context.temp_allocator);
- for imp, index in symbols.file.imports {
+ current_token: tokenizer.Token;
- //collection specified
- if i := strings.index(imp.fullpath, ":"); i != -1 {
+ /*
+ Idea is to push and pop into braces, brackets, etc, and use the final stack to infer context
+ */
- collection := imp.fullpath[1:i];
- p := imp.fullpath[i+1:len(imp.fullpath)-1];
+ for true {
- dir, ok := config.collections[collection];
+ current_token = tokenizer.scan(&t);
- if !ok {
- continue;
- }
+ #partial switch current_token.kind {
+ case .Open_Paren:
- symbols.imports[index] = path.join(allocator = context.temp_allocator, elems = {dir, p});
+ case .EOF:
+ break;
}
- //relative
- else {
+ //fmt.println(current_token.text);
+ //fmt.println();
+ if current_token.pos.offset+len(current_token.text) >= absolute_position {
+ break;
}
- }
+ }
+ #partial switch current_token.kind {
+ case .Ident:
+ position_context.type = .GlobalVariable;
+ position_context.value = current_token.text;
+ case:
+ position_context.type = .Unknown;
+ }
- return symbols, current_errors, symbols.file.pkg_name, symbols.imports, true;
+ return position_context, true;
}
-free_document_symbols :: proc(symbols: DocumentSymbols) {
-}
+get_definition_location :: proc(document: ^Document, position: Position) -> (Location, bool) {
+ location: Location;
-/*
- Figure out what exactly is at the given position and whether it is in a function, struct, etc.
-*/
-get_document_position_context :: proc(document: ^Document, position: Position) -> DocumentPositionContext {
+ position_context, ok := get_document_position_context(document, position);
+
+ if !ok {
+ return location, false;
+ }
- position_context: DocumentPositionContext;
- return position_context;
-}
+ return location, true;
+}
diff --git a/src/documents.odin b/src/documents.odin
index 84e495c..96326ab 100644
--- a/src/documents.odin
+++ b/src/documents.odin
@@ -4,6 +4,19 @@ import "core:strings"
import "core:fmt"
import "core:log"
import "core:os"
+import "core:odin/parser"
+import "core:odin/ast"
+import "core:odin/tokenizer"
+import "core:path"
+
+ParserError :: struct {
+ message: string,
+ line: int,
+ column: int,
+ file: string,
+ offset: int,
+};
+
Package :: struct {
documents: [dynamic]^Document,
@@ -16,15 +29,31 @@ Document :: struct {
used_text: int, //allow for the text to be reallocated with more data than needed
client_owned: bool,
diagnosed_errors: bool,
- symbols: DocumentSymbols,
+ indexed: bool,
+ ast: ast.File,
+ package_name: string,
+ imports: [] string,
};
DocumentStorage :: struct {
documents: map [string] Document,
+ packages: map [string] Package,
};
document_storage: DocumentStorage;
+
+document_get :: proc(uri_string: string) -> ^Document {
+
+ uri, parsed_ok := parse_uri(uri_string, context.temp_allocator);
+
+ if !parsed_ok {
+ return nil;
+ }
+
+ return &document_storage.documents[uri.path];
+}
+
/*
Note(Daniel, Should there be reference counting of documents or just clear everything on workspace change?
You usually always need the documents that are loaded in core files, your own files, etc.)
@@ -40,6 +69,7 @@ document_new :: proc(path: string, config: ^Config) -> Error {
cloned_path := strings.clone(path);
if !ok {
+ log.error("Failed to parse uri");
return .ParseError;
}
@@ -51,7 +81,13 @@ document_new :: proc(path: string, config: ^Config) -> Error {
used_text = len(text),
};
- if err := document_refresh(&document, config, nil); err != .None {
+ if err := document_refresh(&document, config, nil, false); err != .None {
+ log.error("Failed to refresh new document");
+ return err;
+ }
+
+ if err := index_document(&document); err != .None {
+ log.error("Failed to index new document");
return err;
}
@@ -68,7 +104,10 @@ document_open :: proc(uri_string: string, text: string, config: ^Config, writer:
uri, parsed_ok := parse_uri(uri_string);
+ log.infof("document_open: %v", uri_string);
+
if !parsed_ok {
+ log.error("Failed to parse uri");
return .ParseError;
}
@@ -93,7 +132,7 @@ document_open :: proc(uri_string: string, text: string, config: ^Config, writer:
document.text = transmute([] u8)text;
document.used_text = len(document.text);
- if err := document_refresh(document, config, writer); err != .None {
+ if err := document_refresh(document, config, writer, true); err != .None {
return err;
}
@@ -109,7 +148,7 @@ document_open :: proc(uri_string: string, text: string, config: ^Config, writer:
used_text = len(text),
};
- if err := document_refresh(&document, config, writer); err != .None {
+ if err := document_refresh(&document, config, writer, true); err != .None {
return err;
}
@@ -185,7 +224,7 @@ document_apply_changes :: proc(uri_string: string, changes: [dynamic] TextDocume
}
- return document_refresh(document, config, writer);
+ return document_refresh(document, config, writer, true);
}
document_close :: proc(uri_string: string) -> Error {
@@ -210,12 +249,9 @@ document_close :: proc(uri_string: string) -> Error {
-document_refresh :: proc(document: ^Document, config: ^Config, writer: ^Writer) -> Error {
-
+document_refresh :: proc(document: ^Document, config: ^Config, writer: ^Writer, parse_imports: bool) -> Error {
- document_symbols, errors, package_name, imports, ok := parse_document_symbols(document, config);
-
- document.symbols = document_symbols;
+ errors, ok := parse_document(document, config);
if !ok {
return .ParseError;
@@ -282,15 +318,18 @@ document_refresh :: proc(document: ^Document, config: ^Config, writer: ^Writer)
}
+ if parse_imports {
- /*
- go through the imports from this document and see if we need to load them into memory(not owned by client),
- and also refresh them if needed
- */
- for imp in imports {
+ /*
+ go through the imports from this document and see if we need to load them into memory(not owned by client),
+ and also refresh them if needed
+ */
+ for imp in document.imports {
+
+ if err := document_load_package(imp, config); err != .None {
+ return err;
+ }
- if err := document_load_package(imp, config); err != .None {
- return err;
}
}
@@ -325,3 +364,61 @@ document_load_package :: proc(package_directory: string, config: ^Config) -> Err
return .None;
}
+
+
+current_errors: [dynamic] ParserError;
+
+parser_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
+ error := ParserError { line = pos.line, column = pos.column, file = pos.file,
+ offset = pos.offset, message = fmt.tprintf(msg, ..args) };
+ append(&current_errors, error);
+}
+
+parser_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
+
+}
+
+parse_document :: proc(document: ^Document, config: ^Config) -> ([] ParserError, bool) {
+
+ p := parser.Parser {
+ err = parser_error_handler,
+ warn = parser_warning_handler,
+ };
+
+ current_errors = make([dynamic] ParserError, context.temp_allocator);
+
+ document.ast = ast.File {
+ fullpath = document.path,
+ src = document.text[:document.used_text],
+ };
+
+ parser.parse_file(&p, &document.ast);
+
+ document.imports = make([]string, len(document.ast.imports));
+
+ for imp, index in document.ast.imports {
+
+ //collection specified
+ if i := strings.index(imp.fullpath, ":"); i != -1 {
+
+ collection := imp.fullpath[1:i];
+ p := imp.fullpath[i+1:len(imp.fullpath)-1];
+
+ dir, ok := config.collections[collection];
+
+ if !ok {
+ continue;
+ }
+
+ document.imports[index] = path.join(dir, p);
+
+ }
+
+ //relative
+ else {
+
+ }
+ }
+
+ return current_errors[:], true;
+} \ No newline at end of file
diff --git a/src/index.odin b/src/index.odin
new file mode 100644
index 0000000..5ba66ef
--- /dev/null
+++ b/src/index.odin
@@ -0,0 +1,76 @@
+package main
+
+import "core:odin/ast"
+import "core:fmt"
+import "core:strings"
+
+
+/*
+ File is responsible for implementing the indexing of symbols for static files.
+
+ This is to solve the scaling problem of large projects with many files and symbols, as most of these files will be static.
+
+ Possible scopes for static files:
+ global scope (we don't have hiarachy of namespaces and therefore only need to look at the global scope)
+
+ Scopes not part of the indexer:
+ function scope, file scope, package scope(these are only relevant for dynamic active files in your project, that use the ast instead of indexing)
+
+ Potential features:
+ Allow for saving the indexer, instead of recreating it everytime the lsp starts(but you would have to account for stale data).
+
+ TODO(Daniel, Look into data structure for fuzzy searching)
+
+ */
+
+ProcedureSymbol :: struct {
+ range: Range,
+}
+
+Symbol :: union {
+ ProcedureSymbol,
+};
+
+Indexer :: struct {
+ symbol_table: map [string] Symbol,
+};
+
+indexer: Indexer;
+
+index_document :: proc(document: ^Document) -> Error {
+
+ for decl in document.ast.decls {
+
+ if value_decl, ok := decl.derived.(ast.Value_Decl); ok {
+
+ name := string(document.text[value_decl.names[0].pos.offset:value_decl.names[0].end.offset]);
+
+ if len(value_decl.values) == 1 {
+
+ if proc_lit, ok := value_decl.values[0].derived.(ast.Proc_Lit); ok {
+
+ symbol: ProcedureSymbol;
+
+ symbol.range = get_token_range(proc_lit, document.text);
+
+ indexer.symbol_table[strings.concatenate({document.package_name, name}, context.temp_allocator)] = symbol;
+
+ //fmt.println(proc_lit.type);
+
+ }
+
+ }
+
+ }
+ }
+
+ //fmt.println(indexer.symbol_table);
+
+ return .None;
+}
+
+indexer_get_symbol :: proc(id: string) -> (Symbol, bool) {
+
+
+
+}
diff --git a/src/position.odin b/src/position.odin
index 18cb547..3278f10 100644
--- a/src/position.odin
+++ b/src/position.odin
@@ -3,6 +3,7 @@ package main
import "core:strings"
import "core:unicode/utf8"
import "core:fmt"
+import "core:odin/ast"
/*
This file handles the conversion between utf-16 and utf-8 offsets in the text document
@@ -13,6 +14,64 @@ AbsoluteRange :: struct {
end: int,
};
+AbsolutePosition :: int;
+
+get_absolute_position :: proc(position: Position, document_text: [] u8) -> (AbsolutePosition, bool) {
+ absolute: AbsolutePosition;
+
+ if len(document_text) == 0 {
+ absolute = 0;
+ return absolute, true;
+ }
+
+ line_count := 0;
+ index := 1;
+ last := document_text[0];
+
+ if !get_index_at_line(&index, &line_count, &last, document_text, position.line) {
+ return absolute, false;
+ }
+
+ absolute = index + get_character_offset_u16_to_u8(position.character, document_text[index:]);
+
+ return absolute, true;
+}
+
+/*
+ Get the range of a token in utf16 space
+ */
+get_token_range :: proc(node: ast.Node, document_text: [] u8) -> Range {
+ range: Range;
+
+
+ go_backwards_to_endline :: proc(offset: int, document_text: [] u8) -> int {
+
+ index := offset;
+
+ for index > 0 && (document_text[index] != '\n' || document_text[index] != '\r') {
+ index -= 1;
+ }
+
+ if index == 0 {
+ return 0;
+ }
+
+ return index+1;
+ }
+
+ offset := go_backwards_to_endline(node.pos.offset, document_text);
+
+ range.start.line = node.pos.line-1;
+ range.start.character = get_character_offset_u8_to_u16(node.pos.column-1, document_text[offset:]);
+
+ offset = go_backwards_to_endline(node.end.offset, document_text);
+
+ range.end.line = node.end.line-1;
+ range.end.character = get_character_offset_u8_to_u16(node.end.column-1, document_text[offset:]);
+
+ return range;
+}
+
get_absolute_range :: proc(range: Range, document_text: [] u8) -> (AbsoluteRange, bool) {
absolute: AbsoluteRange;
@@ -122,6 +181,34 @@ get_character_offset_u16_to_u8 :: proc(character_offset: int, document_text: []
return utf8_idx;
}
+get_character_offset_u8_to_u16 :: proc(character_offset: int, document_text: [] u8) -> int {
+
+ utf8_idx := 0;
+ utf16_idx := 0;
+
+ for utf16_idx < character_offset {
+
+ r, w := utf8.decode_rune(document_text[utf8_idx:]);
+
+ if r == '\n' {
+ return utf16_idx;
+ }
+
+ else if r < 0x10000 {
+ utf16_idx += 1;
+ }
+
+ else {
+ utf16_idx += 2;
+ }
+
+ utf8_idx += w;
+
+ }
+
+ return utf16_idx;
+
+}
get_end_line_u16 :: proc(document_text: [] u8) -> int {
diff --git a/src/requests.odin b/src/requests.odin
index 43b96aa..6318949 100644
--- a/src/requests.odin
+++ b/src/requests.odin
@@ -164,7 +164,8 @@ handle_request :: proc(request: json.Value, config: ^Config, writer: ^Writer) ->
"textDocument/didOpen" = notification_did_open,
"textDocument/didChange" = notification_did_change,
"textDocument/didClose" = notification_did_close,
- "textDocument/didSave" = notification_did_save };
+ "textDocument/didSave" = notification_did_save,
+ "textDocument/definition" = request_definition };
fn: proc(json.Value, RequestId, ^Config, ^Writer) -> Error;
fn, ok = call_map[method];
@@ -253,6 +254,40 @@ request_shutdown :: proc(params: json.Value, id: RequestId, config: ^Config, wri
return .None;
}
+request_definition :: proc(params: json.Value, id: RequestId, config: ^Config, writer: ^Writer) -> Error {
+
+ params_object, ok := params.value.(json.Object);
+
+ if !ok {
+ return .ParseError;
+ }
+
+ definition_params: TextDocumentPositionParams;
+
+ if unmarshal(params, definition_params, context.temp_allocator) != .None {
+ return .ParseError;
+ }
+
+
+ document := document_get(definition_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
+
+ location, ok2 := get_definition_location(document, definition_params.position);
+
+ if !ok2 {
+ return .InternalError;
+ }
+
+
+
+
+
+ return .None;
+}
+
notification_exit :: proc(params: json.Value, id: RequestId, config: ^Config, writer: ^Writer) -> Error {
running = false;
return .None;
@@ -263,12 +298,14 @@ notification_did_open :: proc(params: json.Value, id: RequestId, config: ^Config
params_object, ok := params.value.(json.Object);
if !ok {
+ log.error("Failed to parse open document notification");
return .ParseError;
}
open_params: DidOpenTextDocumentParams;
if unmarshal(params, open_params, context.allocator) != .None {
+ log.error("Failed to parse open document notification");
return .ParseError;
}
diff --git a/src/types.odin b/src/types.odin
index 274caae..dafe910 100644
--- a/src/types.odin
+++ b/src/types.odin
@@ -129,6 +129,11 @@ Range :: struct {
end: Position,
};
+Location :: struct {
+ uri: string,
+ range: Range,
+};
+
TextDocumentContentChangeEvent :: struct {
range: Range,
text: string,
@@ -178,3 +183,8 @@ DidChangeTextDocumentParams :: struct {
DidCloseTextDocumentParams :: struct {
textDocument: TextDocumentIdentifier,
};
+
+TextDocumentPositionParams :: struct {
+ textDocument: TextDocumentIdentifier,
+ position: Position,
+}; \ No newline at end of file