aboutsummaryrefslogtreecommitdiff
path: root/src/server
diff options
context:
space:
mode:
Diffstat (limited to 'src/server')
-rw-r--r--src/server/analysis.odin12
-rw-r--r--src/server/requests.odin5
-rw-r--r--src/server/semantic_tokens.odin28
3 files changed, 38 insertions, 7 deletions
diff --git a/src/server/analysis.odin b/src/server/analysis.odin
index e026023..4f021b7 100644
--- a/src/server/analysis.odin
+++ b/src/server/analysis.odin
@@ -348,7 +348,7 @@ resolve_generic_function_symbol :: proc(ast_context: ^AstContext, params: []^ast
}
if ident, ok := result.type.derived.(Ident); ok {
- field := cast(^Field)index.clone_node(result, context.temp_allocator);
+ field := cast(^Field)index.clone_node(result, context.temp_allocator, nil);
if m := &poly_map[ident.name]; m != nil {
field.type = poly_map[ident.name];
@@ -938,7 +938,7 @@ make_symbol_struct_from_ast :: proc(ast_context: ^AstContext, v: ast.Struct_Type
}
else {
- append(&types, index.clone_type(field.type, context.temp_allocator));
+ append(&types, index.clone_type(field.type, context.temp_allocator, nil));
}
}
}
@@ -1558,7 +1558,7 @@ get_completion_list :: proc(document: ^Document, position: common.Position) -> (
//if there is no field we had to recover from bad expr and create a node (remove when parser can accept temp_allocator)
if position_context.field == nil {
- common.free_ast(position_context.selector);
+ common.free_ast(position_context.selector, context.allocator);
}
list.items = items[:];
@@ -1725,7 +1725,11 @@ get_document_position_node :: proc(node: ^ast.Node, position_context: ^DocumentP
str := position_context.file.src[node.pos.offset:max(0, node.end.offset-1)];
- p := parser.default_parser();
+ p := parser.Parser {
+ err = parser_warning_handler, //empty
+ warn = parser_warning_handler, //empty
+ file = &position_context.file,
+ };
tokenizer.init(&p.tok, str, position_context.file.fullpath);
diff --git a/src/server/requests.odin b/src/server/requests.odin
index 4eed483..149ea3a 100644
--- a/src/server/requests.odin
+++ b/src/server/requests.odin
@@ -269,8 +269,8 @@ request_initialize :: proc(params: json.Value, id: RequestId, config: ^common.Co
token_type := type_info_of(SemanticTokenTypes).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum);
token_modifier := type_info_of(SemanticTokenModifiers).variant.(runtime.Type_Info_Named).base.variant.(runtime.Type_Info_Enum);
- token_types := make([] string, len(token_type.names));
- token_modifiers := make([] string, len(token_modifier.names));
+ token_types := make([] string, len(token_type.names), context.temp_allocator);
+ token_modifiers := make([] string, len(token_modifier.names), context.temp_allocator);
for name, i in token_type.names {
token_types[i] = strings.to_lower(name, context.temp_allocator);
@@ -350,7 +350,6 @@ request_definition :: proc(params: json.Value, id: RequestId, config: ^common.Co
return .ParseError;
}
-
document := document_get(definition_params.textDocument.uri);
if document == nil {
diff --git a/src/server/semantic_tokens.odin b/src/server/semantic_tokens.odin
index d374cc0..03f739b 100644
--- a/src/server/semantic_tokens.odin
+++ b/src/server/semantic_tokens.odin
@@ -63,10 +63,38 @@ SemanticTokens :: struct {
data: [] uint,
};
+SemanticTokenInternal :: struct {
+ line: uint,
+ column: uint,
+ length: uint,
+};
+
+
+convert_to_finished_tokens :: proc(tokens: [dynamic]SemanticTokenInternal) -> [] SemanticTokens {
+ return {};
+}
+
get_semantic_tokens :: proc(document: ^Document) -> [] SemanticTokens {
+ tokens := make([dynamic]SemanticTokenInternal, context.temp_allocator);
+ /*
+ Temp parse the document again, right now there is too many leaks that need to be fixued in the parser.
+ */
return {};
}
+
+
+/*
+extract_semantic_tokens :: proc {
+ extract_semantic_tokens_node,
+ extract_semantic_tokens_dynamic_array,
+ extract_semantic_tokens_array,
+};
+
+extract_semantic_tokens_node :: proc() {
+
+}
+*/ \ No newline at end of file