aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Gavin <danielgavin5@hotmail.com>2022-03-14 01:02:43 +0100
committerDaniel Gavin <danielgavin5@hotmail.com>2022-03-14 01:02:43 +0100
commit06562b8ff7d3d4ad3bb22b66eef8c8111fe243da (patch)
treec292c2437a7afba8defef0da57019642e376ab28
parente2780708f9a590a62882d1316997dc2248b3e9b1 (diff)
Fix comp_lit error and start work on magic completion
-rw-r--r--src/analysis/analysis.odin25
-rw-r--r--src/server/completion.odin57
-rw-r--r--src/server/requests.odin2
-rw-r--r--src/server/semantic_tokens.odin2
4 files changed, 61 insertions, 25 deletions
diff --git a/src/analysis/analysis.odin b/src/analysis/analysis.odin
index 47f52a4..7ccd5bd 100644
--- a/src/analysis/analysis.odin
+++ b/src/analysis/analysis.odin
@@ -295,12 +295,13 @@ resolve_type_comp_literal :: proc(ast_context: ^AstContext, position_context: ^D
return {}, nil, false
}
- if current_comp_lit == nil {
- return {}, nil, false
- }
-
element_index := 0
+ prev_package := ast_context.current_package
+ ast_context.current_package = current_symbol.pkg
+
+ defer ast_context.current_package = prev_package
+
for elem, i in current_comp_lit.elems {
if position_in_node(elem, position_context.position) {
element_index = i
@@ -2933,6 +2934,16 @@ fallback_position_context_completion :: proc(document: ^common.Document, positio
begin_offset := max(0, start)
end_offset := max(start, end + 1)
+ line_offset := begin_offset
+
+ for line_offset > 0 {
+ c := position_context.file.src[line_offset]
+ if c == '\n' || c == '\r' {
+ line_offset += 1
+ break
+ }
+ line_offset -= 1
+ }
str := position_context.file.src[0:end_offset]
@@ -2961,6 +2972,7 @@ fallback_position_context_completion :: proc(document: ^common.Document, positio
p := parser.Parser {
err = common.parser_warning_handler, //empty
warn = common.parser_warning_handler, //empty
+ flags = {.Optional_Semicolons},
file = &position_context.file,
}
@@ -2968,7 +2980,7 @@ fallback_position_context_completion :: proc(document: ^common.Document, positio
p.tok.ch = ' '
p.tok.line_count = position.line + 1
- p.tok.line_offset = begin_offset
+ p.tok.line_offset = line_offset
p.tok.offset = begin_offset
p.tok.read_offset = begin_offset
@@ -3007,7 +3019,8 @@ fallback_position_context_completion :: proc(document: ^common.Document, positio
tokenizer.init(&p.tok, position_context.file.src[0:last_dot], position_context.file.fullpath, common.parser_warning_handler)
p.tok.ch = ' '
- p.tok.line_count = position.line
+ p.tok.line_count = position.line + 1
+ p.tok.line_offset = line_offset
p.tok.offset = begin_offset
p.tok.read_offset = begin_offset
diff --git a/src/server/completion.odin b/src/server/completion.odin
index f7ce075..8d6d93a 100644
--- a/src/server/completion.odin
+++ b/src/server/completion.odin
@@ -350,7 +350,7 @@ get_selector_completion :: proc(ast_context: ^analysis.AstContext, position_cont
case index.SymbolUnionValue:
list.isIncomplete = false
- //append_magic_union_completion(position_context, selector, &items)
+ append_magic_union_completion(position_context, selector, &items)
for type in v.types {
if symbol, ok := resolve_type_expression(ast_context, type); ok {
@@ -459,7 +459,7 @@ get_selector_completion :: proc(ast_context: ^analysis.AstContext, position_cont
}
case index.SymbolDynamicArrayValue:
list.isIncomplete = false
- //append_magic_dynamic_array_completion(position_context, selector, &items)
+ append_magic_dynamic_array_completion(position_context, selector, &items)
}
list.items = items[:]
@@ -1233,13 +1233,24 @@ get_range_from_selection_start_to_dot :: proc(position_context: ^analysis.Docume
}
append_magic_dynamic_array_completion :: proc(position_context: ^analysis.DocumentPositionContext, symbol: index.Symbol, items: ^[dynamic]CompletionItem) {
-
range, ok := get_range_from_selection_start_to_dot(position_context)
if !ok {
return
}
-
+
+ remove_range := common.Range {
+ start = range.start,
+ end = range.end,
+ }
+
+ remove_edit := TextEdit {
+ range = remove_range,
+ newText = "",
+ }
+
+ additionalTextEdits := make([]TextEdit, 1, context.temp_allocator)
+ additionalTextEdits[0] = remove_edit
//len
{
@@ -1252,23 +1263,13 @@ append_magic_dynamic_array_completion :: proc(position_context: ^analysis.Docume
textEdit = TextEdit {
newText = text,
range = {
- start = range.start,
+ start = range.end,
end = range.end,
},
},
+ additionalTextEdits = additionalTextEdits,
}
- log.error(item)
- append(items, item)
- }
- //append
- {
- item := CompletionItem {
- label = "append",
- kind = .Function,
- detail = "append",
- insertText = fmt.tprintf("append(%v)$0", symbol.name),
- }
append(items, item)
}
@@ -1281,15 +1282,37 @@ append_magic_union_completion :: proc(position_context: ^analysis.DocumentPositi
return
}
+ remove_range := common.Range {
+ start = range.start,
+ end = range.end,
+ }
+
+ remove_edit := TextEdit {
+ range = remove_range,
+ newText = "",
+ }
+
+ additionalTextEdits := make([]TextEdit, 1, context.temp_allocator)
+ additionalTextEdits[0] = remove_edit
+
//switch
{
item := CompletionItem {
label = "switch",
kind = .Snippet,
detail = "switch",
- insertText = fmt.tprintf("len(%v)$0", symbol.name),
+ additionalTextEdits = additionalTextEdits,
+ textEdit = TextEdit {
+ newText = fmt.tprintf("switch v in %v {{\n\t$0 \n}}", symbol.name),
+ range = {
+ start = range.end,
+ end = range.end,
+ },
+ },
insertTextFormat = .Snippet,
+ InsertTextMode = .adjustIndentation,
}
+
append(items, item)
}
diff --git a/src/server/requests.odin b/src/server/requests.odin
index 49636dc..0c63e46 100644
--- a/src/server/requests.odin
+++ b/src/server/requests.odin
@@ -624,7 +624,7 @@ request_completion :: proc (params: json.Value, id: RequestId, config: ^common.C
if document == nil {
return .InternalError
}
- log.error(completition_params.position)
+
list: CompletionList
list, ok = get_completion_list(document, completition_params.position, completition_params.context_)
diff --git a/src/server/semantic_tokens.odin b/src/server/semantic_tokens.odin
index bd89bb3..98f534b 100644
--- a/src/server/semantic_tokens.odin
+++ b/src/server/semantic_tokens.odin
@@ -218,7 +218,7 @@ visit_node :: proc(node: ^ast.Node, builder: ^SemanticTokenBuilder, ast_context:
case ^Expr_Stmt:
visit(n.expr, builder, ast_context)
case ^Branch_Stmt:
- write_semantic_token(builder, n.tok, ast_context.file.src, .Type, .None)
+ write_semantic_token(builder, n.tok, ast_context.file.src, .Keyword, .None)
case ^Poly_Type:
write_semantic_string(builder, n.dollar, "$", ast_context.file.src, .Operator, .None)
visit(n.type, builder, ast_context)