summaryrefslogtreecommitdiff
path: root/src/server/check.odin
diff options
context:
space:
mode:
Diffstat (limited to 'src/server/check.odin')
-rw-r--r--src/server/check.odin89
1 files changed, 33 insertions, 56 deletions
diff --git a/src/server/check.odin b/src/server/check.odin
index b8f0c7c..5b8f2a1 100644
--- a/src/server/check.odin
+++ b/src/server/check.odin
@@ -19,9 +19,6 @@ import "core:thread"
import "src:common"
-//Store uris we have reported on since last save. We use this to clear them on next save.
-uris_reported: [dynamic]string
-
Json_Error :: struct {
type: string,
pos: Json_Type_Error,
@@ -68,7 +65,33 @@ fallback_find_odin_directories :: proc(config: ^common.Config) -> []string {
return data[:]
}
-check :: proc(paths: []string, uri: common.Uri, writer: ^Writer, config: ^common.Config) {
+check_unused_imports :: proc(document: ^Document, config: ^common.Config) {
+ if !config.enable_unused_imports_reporting {
+ return
+ }
+
+ diagnostics := make([dynamic]Diagnostic, context.temp_allocator)
+
+ unused_imports := find_unused_imports(document, context.temp_allocator)
+
+ remove_diagnostics(.Unused, document.uri.uri)
+
+ for imp in unused_imports {
+ add_diagnostics(
+ .Unused,
+ document.uri.uri,
+ Diagnostic {
+ range = common.get_token_range(imp.import_decl, document.ast.src),
+ severity = DiagnosticSeverity.Hint,
+ code = "Unused",
+ message = "unused import",
+ tags = {.Unnecessary},
+ },
+ )
+ }
+}
+
+check :: proc(paths: []string, uri: common.Uri, config: ^common.Config) {
paths := paths
if len(paths) == 0 {
@@ -138,6 +161,8 @@ check :: proc(paths: []string, uri: common.Uri, writer: ^Writer, config: ^common
log.errorf("Failed to unmarshal check results: %v, %v", res, string(buffer))
}
+ clear_diagnostics(.Check)
+
for error in json_errors.errors {
if len(error.msgs) == 0 {
break
@@ -149,12 +174,11 @@ check :: proc(paths: []string, uri: common.Uri, writer: ^Writer, config: ^common
continue
}
- if error.pos.file not_in errors {
- errors[error.pos.file] = make([dynamic]Diagnostic, context.temp_allocator)
- }
+ uri := common.create_uri(error.pos.file, context.temp_allocator)
- append(
- &errors[error.pos.file],
+ add_diagnostics(
+ .Check,
+ uri.uri,
Diagnostic {
code = "checker",
severity = .Error,
@@ -168,51 +192,4 @@ check :: proc(paths: []string, uri: common.Uri, writer: ^Writer, config: ^common
)
}
}
-
- for uri in uris_reported {
- params := NotificationPublishDiagnosticsParams {
- uri = uri,
- diagnostics = {},
- }
-
- notification := Notification {
- jsonrpc = "2.0",
- method = "textDocument/publishDiagnostics",
- params = params,
- }
-
- if writer != nil {
- send_notification(notification, writer)
- }
-
- delete(uri)
- }
-
- clear(&uris_reported)
-
- for k, v in errors {
- uri := common.create_uri(k, context.temp_allocator)
-
- //Find the unique diagnostics, since some poor profile settings make the checker check the same file multiple times
- unique := slice.unique(v[:])
-
- params := NotificationPublishDiagnosticsParams {
- uri = uri.uri,
- diagnostics = unique,
- }
-
- notifaction := Notification {
- jsonrpc = "2.0",
- method = "textDocument/publishDiagnostics",
- params = params,
- }
-
- append(&uris_reported, strings.clone(uri.uri))
-
- if writer != nil {
- send_notification(notifaction, writer)
- }
- }
-
-
}