aboutsummaryrefslogtreecommitdiff
path: root/src/server
diff options
context:
space:
mode:
Diffstat (limited to 'src/server')
-rw-r--r--src/server/analysis.odin158
-rw-r--r--src/server/documents.odin402
-rw-r--r--src/server/log.odin64
-rw-r--r--src/server/reader.odin64
-rw-r--r--src/server/requests.odin364
-rw-r--r--src/server/response.odin68
-rw-r--r--src/server/types.odin154
-rw-r--r--src/server/unmarshal.odin129
-rw-r--r--src/server/workspace.odin1
-rw-r--r--src/server/writer.odin29
10 files changed, 1433 insertions, 0 deletions
diff --git a/src/server/analysis.odin b/src/server/analysis.odin
new file mode 100644
index 0000000..e88532f
--- /dev/null
+++ b/src/server/analysis.odin
@@ -0,0 +1,158 @@
+package server
+
+import "core:odin/parser"
+import "core:odin/ast"
+import "core:odin/tokenizer"
+import "core:fmt"
+import "core:log"
+import "core:strings"
+import "core:path"
+
+import "shared:common"
+import "shared:index"
+
+
+
+DocumentPositionContextDottedValue :: struct {
+ prefix: string,
+ postfix: string,
+};
+
+DocumentPositionContextGlobalValue :: struct {
+
+};
+
+DocumentPositionContextUnknownValue :: struct {
+
+}
+
+DocumentPositionContextValue :: union {
+ DocumentPositionContextDottedValue,
+ DocumentPositionContextGlobalValue,
+ DocumentPositionContextUnknownValue
+};
+
+DocumentPositionContext :: struct {
+ value: DocumentPositionContextValue,
+};
+
+
+tokenizer_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
+
+}
+
+
+/*
+ Figure out what exactly is at the given position and whether it is in a function, struct, etc.
+*/
+get_document_position_context :: proc(document: ^Document, position: common.Position) -> (DocumentPositionContext, bool) {
+
+ position_context: DocumentPositionContext;
+
+ absolute_position, ok := common.get_absolute_position(position, document.text);
+
+ if !ok {
+ return position_context, false;
+ }
+
+
+ //Using the ast is not really viable since the code may be broken code
+ t: tokenizer.Tokenizer;
+
+ tokenizer.init(&t, document.text, document.uri.path, tokenizer_error_handler);
+
+ stack := make([dynamic] tokenizer.Token, context.temp_allocator);
+
+ current_token: tokenizer.Token;
+ last_token: tokenizer.Token;
+
+ struct_or_package_dotted: bool;
+ struct_or_package: tokenizer.Token;
+
+ /*
+ Idea is to push and pop into braces, brackets, etc, and use the final stack to infer context
+ */
+
+ for true {
+
+ current_token = tokenizer.scan(&t);
+
+ #partial switch current_token.kind {
+ case .Period:
+ if last_token.kind == .Ident {
+ struct_or_package_dotted = true;
+ struct_or_package = last_token;
+ }
+ case .Ident:
+ case .EOF:
+ break;
+ case:
+ struct_or_package_dotted = false;
+
+ }
+
+ if current_token.pos.offset+len(current_token.text) >= absolute_position {
+ break;
+ }
+
+ last_token = current_token;
+ }
+
+ #partial switch current_token.kind {
+ case .Ident:
+ if struct_or_package_dotted {
+ position_context.value = DocumentPositionContextDottedValue {
+ prefix = struct_or_package.text,
+ postfix = current_token.text,
+ };
+ }
+ else {
+
+ }
+ case:
+ position_context.value = DocumentPositionContextUnknownValue {
+
+ };
+ }
+
+ //fmt.println(position_context);
+
+ return position_context, true;
+}
+
+
+get_definition_location :: proc(document: ^Document, position: common.Position) -> (common.Location, bool) {
+
+
+ location: common.Location;
+
+
+
+ position_context, ok := get_document_position_context(document, position);
+
+ if !ok {
+ return location, false;
+ }
+
+ symbol: index.Symbol;
+
+ #partial switch v in position_context.value {
+ case DocumentPositionContextDottedValue:
+ symbol, ok = index.lookup(strings.concatenate({v.prefix, v.postfix}, context.temp_allocator));
+ case:
+ return location, false;
+ }
+
+ //fmt.println(indexer.symbol_table);
+
+ if !ok {
+ return location, false;
+ }
+
+ location.range = symbol.range;
+ location.uri = symbol.uri;
+
+
+ return location, true;
+}
+
diff --git a/src/server/documents.odin b/src/server/documents.odin
new file mode 100644
index 0000000..59eeadd
--- /dev/null
+++ b/src/server/documents.odin
@@ -0,0 +1,402 @@
+package server
+
+import "core:strings"
+import "core:fmt"
+import "core:log"
+import "core:os"
+import "core:odin/parser"
+import "core:odin/ast"
+import "core:odin/tokenizer"
+import "core:path"
+
+import "shared:common"
+
+ParserError :: struct {
+ message: string,
+ line: int,
+ column: int,
+ file: string,
+ offset: int,
+};
+
+
+Package :: struct {
+ documents: [dynamic]^Document,
+};
+
+Document :: struct {
+ uri: common.Uri,
+ text: [] u8,
+ used_text: int, //allow for the text to be reallocated with more data than needed
+ client_owned: bool,
+ diagnosed_errors: bool,
+ ast: ast.File,
+ package_name: string,
+ imports: [] string,
+};
+
+DocumentStorage :: struct {
+ documents: map [string] Document,
+ packages: map [string] Package,
+};
+
+document_storage: DocumentStorage;
+
+
+document_get :: proc(uri_string: string) -> ^Document {
+
+ uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
+
+ if !parsed_ok {
+ return nil;
+ }
+
+ return &document_storage.documents[uri.path];
+}
+
+/*
+ Note(Daniel, Should there be reference counting of documents or just clear everything on workspace change?
+ You usually always need the documents that are loaded in core files, your own files, etc.)
+ */
+
+/*
+ Server opens a new document with text from filesystem
+*/
+document_new :: proc(path: string, config: ^common.Config) -> common.Error {
+
+ text, ok := os.read_entire_file(path);
+
+ uri := common.create_uri(path);
+
+ if !ok {
+ log.error("Failed to parse uri");
+ return .ParseError;
+ }
+
+ document := Document {
+ uri = uri,
+ text = transmute([] u8)text,
+ client_owned = false,
+ used_text = len(text),
+ };
+
+
+ document_storage.documents[path] = document;
+
+ return .None;
+}
+
+/*
+ Client opens a document with transferred text
+*/
+
+document_open :: proc(uri_string: string, text: string, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+ uri, parsed_ok := common.parse_uri(uri_string);
+
+ log.infof("document_open: %v", uri_string);
+
+ if !parsed_ok {
+ log.error("Failed to parse uri");
+ return .ParseError;
+ }
+
+ if document := &document_storage.documents[uri.path]; document != nil {
+
+ if document.client_owned {
+ log.errorf("Client called open on an already open document: %v ", document.uri.path);
+ return .InvalidRequest;
+ }
+
+ if document.text != nil {
+ delete(document.text);
+ }
+
+ if len(document.uri.uri) > 0 {
+ common.delete_uri(document.uri);
+ }
+
+ document.uri = uri;
+ document.client_owned = true;
+ document.text = transmute([] u8)text;
+ document.used_text = len(document.text);
+
+ if err := document_refresh(document, config, writer, true); err != .None {
+ return err;
+ }
+
+ }
+
+ else {
+
+ document := Document {
+ uri = uri,
+ text = transmute([] u8)text,
+ client_owned = true,
+ used_text = len(text),
+ };
+
+ if err := document_refresh(&document, config, writer, true); err != .None {
+ return err;
+ }
+
+ document_storage.documents[uri.path] = document;
+ }
+
+
+
+ //hmm feels like odin needs some ownership semantic
+ delete(uri_string);
+
+ return .None;
+}
+
+/*
+ Function that applies changes to the given document through incremental syncronization
+ */
+document_apply_changes :: proc(uri_string: string, changes: [dynamic] TextDocumentContentChangeEvent, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+ uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
+
+ if !parsed_ok {
+ return .ParseError;
+ }
+
+ document := &document_storage.documents[uri.path];
+
+ if !document.client_owned {
+ log.errorf("Client called change on an document not opened: %v ", document.uri.path);
+ return .InvalidRequest;
+ }
+
+ for change in changes {
+
+ absolute_range, ok := common.get_absolute_range(change.range, document.text[:document.used_text]);
+
+ if !ok {
+ return .ParseError;
+ }
+
+ //lower bound is before the change
+ lower := document.text[:absolute_range.start];
+
+ //new change between lower and upper
+ middle := change.text;
+
+ //upper bound is after the change
+ upper := document.text[absolute_range.end:document.used_text];
+
+ //total new size needed
+ document.used_text = len(lower) + len(change.text) + len(upper);
+
+ //Reduce the amount of allocation by allocating more memory than needed
+ if document.used_text > len(document.text) {
+ new_text := make([]u8, document.used_text * 2);
+
+ //join the 3 splices into the text
+ copy(new_text, lower);
+ copy(new_text[len(lower):], middle);
+ copy(new_text[len(lower)+len(middle):], upper);
+
+ delete(document.text);
+
+ document.text = new_text;
+ }
+
+ else {
+ //order matters here, we need to make sure we swap the data already in the text before the middle
+ copy(document.text, lower);
+ copy(document.text[len(lower)+len(middle):], upper);
+ copy(document.text[len(lower):], middle);
+ }
+
+ }
+
+ return document_refresh(document, config, writer, true);
+}
+
+document_close :: proc(uri_string: string) -> common.Error {
+
+ uri, parsed_ok := common.parse_uri(uri_string, context.temp_allocator);
+
+ if !parsed_ok {
+ return .ParseError;
+ }
+
+ document := &document_storage.documents[uri.path];
+
+ if document == nil || !document.client_owned {
+ log.errorf("Client called close on a document that was never opened: %v ", document.uri.path);
+ return .InvalidRequest;
+ }
+
+ document.client_owned = false;
+
+ return .None;
+}
+
+
+
+document_refresh :: proc(document: ^Document, config: ^common.Config, writer: ^Writer, parse_imports: bool) -> common.Error {
+
+ errors, ok := parse_document(document, config);
+
+ if !ok {
+ return .ParseError;
+ }
+
+ //right now we don't allow to writer errors out from files read from the file directory, core files, etc.
+ if writer != nil && len(errors) > 0 {
+ document.diagnosed_errors = true;
+
+ params := NotificationPublishDiagnosticsParams {
+ uri = document.uri.uri,
+ diagnostics = make([] Diagnostic, len(errors), context.temp_allocator),
+ };
+
+ for error, i in errors {
+
+ params.diagnostics[i] = Diagnostic {
+ range = common.Range {
+ start = common.Position {
+ line = error.line - 1,
+ character = 0,
+ },
+ end = common.Position {
+ line = error.line,
+ character = 0,
+ },
+ },
+ severity = DiagnosticSeverity.Error,
+ code = "test",
+ message = error.message,
+ };
+
+ }
+
+ notifaction := Notification {
+ jsonrpc = "2.0",
+ method = "textDocument/publishDiagnostics",
+ params = params,
+ };
+
+ send_notification(notifaction, writer);
+
+ }
+
+ if writer != nil && len(errors) == 0 {
+
+ //send empty diagnosis to remove the clients errors
+ if document.diagnosed_errors {
+
+ notifaction := Notification {
+ jsonrpc = "2.0",
+ method = "textDocument/publishDiagnostics",
+
+ params = NotificationPublishDiagnosticsParams {
+ uri = document.uri.uri,
+ diagnostics = make([] Diagnostic, len(errors), context.temp_allocator),
+ },
+ };
+
+ document.diagnosed_errors = false;
+
+ send_notification(notifaction, writer);
+ }
+
+ }
+
+ return .None;
+}
+
+document_load_package :: proc(package_directory: string, config: ^common.Config) -> common.Error {
+
+ fd, err := os.open(package_directory);
+
+ if err != 0 {
+ return .ParseError;
+ }
+
+ files: []os.File_Info;
+ files, err = os.read_dir(fd, 100, context.temp_allocator);
+
+ for file in files {
+
+ //if we have never encountered the document
+ if _, ok := document_storage.documents[file.fullpath]; !ok {
+
+ if doc_err := document_new(file.fullpath, config); doc_err != .None {
+ return doc_err;
+ }
+
+ }
+
+ }
+
+ return .None;
+}
+
+
+current_errors: [dynamic] ParserError;
+
+parser_error_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
+ error := ParserError { line = pos.line, column = pos.column, file = pos.file,
+ offset = pos.offset, message = fmt.tprintf(msg, ..args) };
+ append(&current_errors, error);
+}
+
+parser_warning_handler :: proc(pos: tokenizer.Pos, msg: string, args: ..any) {
+
+}
+
+parse_document :: proc(document: ^Document, config: ^common.Config) -> ([] ParserError, bool) {
+
+ p := parser.Parser {
+ err = parser_error_handler,
+ warn = parser_warning_handler,
+ };
+
+ current_errors = make([dynamic] ParserError, context.temp_allocator);
+
+ document.ast = ast.File {
+ fullpath = document.uri.path,
+ src = document.text[:document.used_text],
+ };
+
+ parser.parse_file(&p, &document.ast);
+
+ /*
+ if document.imports != nil {
+ delete(document.imports);
+ delete(document.package_name);
+ }
+ document.imports = make([]string, len(document.ast.imports));
+ document.package_name = document.ast.pkg_name;
+
+ for imp, index in document.ast.imports {
+
+ //collection specified
+ if i := strings.index(imp.fullpath, ":"); i != -1 {
+
+ collection := imp.fullpath[1:i];
+ p := imp.fullpath[i+1:len(imp.fullpath)-1];
+
+ dir, ok := config.collections[collection];
+
+ if !ok {
+ continue;
+ }
+
+ document.imports[index] = path.join(dir, p);
+
+ }
+
+ //relative
+ else {
+
+ }
+ }
+ */
+
+ return current_errors[:], true;
+} \ No newline at end of file
diff --git a/src/server/log.odin b/src/server/log.odin
new file mode 100644
index 0000000..5ed007e
--- /dev/null
+++ b/src/server/log.odin
@@ -0,0 +1,64 @@
+package server
+
+import "core:fmt";
+import "core:strings";
+import "core:os";
+import "core:time";
+import "core:log";
+
+
+Default_Console_Logger_Opts :: log.Options{
+ .Level,
+ .Terminal_Color,
+ .Short_File_Path,
+ .Line,
+ .Procedure,
+} | log.Full_Timestamp_Opts;
+
+
+Lsp_Logger_Data :: struct {
+ writer: ^Writer,
+}
+
+create_lsp_logger :: proc(writer: ^Writer, lowest := log.Level.Debug, opt := Default_Console_Logger_Opts) -> log.Logger {
+ data := new(Lsp_Logger_Data);
+ data.writer = writer;
+ return log.Logger{lsp_logger_proc, data, lowest, opt};
+}
+
+destroy_lsp_logger :: proc(log: ^log.Logger) {
+ free(log.data);
+}
+
+lsp_logger_proc :: proc(logger_data: rawptr, level: log.Level, text: string, options: log.Options, location := #caller_location) {
+ data := cast(^Lsp_Logger_Data)logger_data;
+
+ backing: [1024]byte; //NOTE(Hoej): 1024 might be too much for a header backing, unless somebody has really long paths.
+ buf := strings.builder_from_slice(backing[:]);
+
+ when time.IS_SUPPORTED {
+ if log.Full_Timestamp_Opts & options != nil {
+ fmt.sbprint(&buf, "[");
+ t := time.now();
+ y, m, d := time.date(t);
+ h, min, s := time.clock(t);
+ if .Date in options { fmt.sbprintf(&buf, "%d-%02d-%02d ", y, m, d); }
+ if .Time in options { fmt.sbprintf(&buf, "%02d:%02d:%02d", h, min, s); }
+ fmt.sbprint(&buf, "] ");
+ }
+ }
+
+ message := fmt.tprintf("%s", text);
+
+ notification := Notification {
+ jsonrpc = "2.0",
+ method = "window/logMessage",
+ params = NotificationLoggingParams {
+ type = 1,
+ message = message,
+ }
+ };
+
+ send_notification(notification, data.writer);
+}
+
diff --git a/src/server/reader.odin b/src/server/reader.odin
new file mode 100644
index 0000000..f421d67
--- /dev/null
+++ b/src/server/reader.odin
@@ -0,0 +1,64 @@
+package server
+
+import "core:os"
+import "core:mem"
+import "core:strings"
+
+ReaderFn :: proc(rawptr, [] byte) -> (int, int);
+
+Reader :: struct {
+ reader_fn: ReaderFn,
+ reader_context: rawptr,
+};
+
+make_reader :: proc(reader_fn: ReaderFn, reader_context: rawptr) -> Reader {
+ return Reader { reader_context = reader_context, reader_fn = reader_fn };
+}
+
+
+read_u8 :: proc(reader: ^Reader) -> (u8, bool) {
+
+ value : [1] byte;
+
+ read, err := reader.reader_fn(reader.reader_context, value[:]);
+
+ if(err != 0 || read != 1) {
+ return 0, false;
+ }
+
+ return value[0], true;
+}
+
+read_until_delimiter :: proc(reader: ^Reader, delimiter: u8, builder: ^strings.Builder) -> bool {
+
+ for true {
+
+ value, success := read_u8(reader);
+
+ if(!success) {
+ return false;
+ }
+
+ strings.write_byte(builder, value);
+
+ if(value == delimiter) {
+ break;
+ }
+ }
+
+ return true;
+}
+
+read_sized :: proc(reader: ^Reader, data: []u8) -> bool {
+
+ read, err := reader.reader_fn(reader.reader_context, data);
+
+ if(err != 0 || read != len(data)) {
+ return false;
+ }
+
+ return true;
+}
+
+
+
diff --git a/src/server/requests.odin b/src/server/requests.odin
new file mode 100644
index 0000000..cd8e734
--- /dev/null
+++ b/src/server/requests.odin
@@ -0,0 +1,364 @@
+package server
+
+import "core:fmt"
+import "core:log"
+import "core:mem"
+import "core:os"
+import "core:strings"
+import "core:slice"
+import "core:strconv"
+import "core:encoding/json"
+
+import "shared:common"
+
+
+Header :: struct {
+ content_length: int,
+ content_type: string,
+};
+
+make_response_message :: proc(id: RequestId, params: ResponseParams) -> ResponseMessage {
+
+ return ResponseMessage {
+ jsonrpc = "2.0",
+ id = id,
+ result = params,
+ };
+
+}
+
+make_response_message_error :: proc(id: RequestId, error: ResponseError) -> ResponseMessageError {
+
+ return ResponseMessageError {
+ jsonrpc = "2.0",
+ id = id,
+ error = error,
+ };
+
+}
+
+read_and_parse_header :: proc(reader: ^Reader) -> (Header, bool) {
+
+ header: Header;
+
+ builder := strings.make_builder(context.temp_allocator);
+
+ found_content_length := false;
+
+ for true {
+
+ strings.reset_builder(&builder);
+
+ if !read_until_delimiter(reader, '\n', &builder) {
+ log.error("Failed to read with delimiter");
+ return header, false;
+ }
+
+ message := strings.to_string(builder);
+
+ if len(message) == 0 || message[len(message)-2] != '\r' {
+ log.error("No carriage return");
+ return header, false;
+ }
+
+ if len(message)==2 {
+ break;
+ }
+
+ index := strings.last_index_byte (message, ':');
+
+ if index == -1 {
+ log.error("Failed to find semicolon");
+ return header, false;
+ }
+
+ header_name := message[0 : index];
+ header_value := message[len(header_name) + 2 : len(message)-1];
+
+ if strings.compare(header_name, "Content-Length") == 0 {
+
+ if len(header_value) == 0 {
+ log.error("Header value has no length");
+ return header, false;
+ }
+
+ value, ok := strconv.parse_int(header_value);
+
+ if !ok {
+ log.error("Failed to parse content length value");
+ return header, false;
+ }
+
+ header.content_length = value;
+
+ found_content_length = true;
+
+ }
+
+ else if strings.compare(header_name, "Content-Type") == 0 {
+ if len(header_value) == 0 {
+ log.error("Header value has no length");
+ return header, false;
+ }
+ }
+
+ }
+
+ return header, found_content_length;
+}
+
+read_and_parse_body :: proc(reader: ^Reader, header: Header) -> (json.Value, bool) {
+
+ value: json.Value;
+
+ data := make([]u8, header.content_length, context.temp_allocator);
+
+ if !read_sized(reader, data) {
+ log.error("Failed to read body");
+ return value, false;
+ }
+
+ err: json.Error;
+
+ value, err = json.parse(data = data, allocator = context.temp_allocator, parse_integers = true);
+
+ if(err != json.Error.None) {
+ log.error("Failed to parse body");
+ return value, false;
+ }
+
+ return value, true;
+}
+
+
+handle_request :: proc(request: json.Value, config: ^common.Config, writer: ^Writer) -> bool {
+
+ root, ok := request.value.(json.Object);
+
+ if !ok {
+ log.error("No root object");
+ return false;
+ }
+
+ id: RequestId;
+ id_value: json.Value;
+ id_value, ok = root["id"];
+
+ if ok {
+ #partial
+ switch v in id_value.value {
+ case json.String:
+ id = v;
+ case json.Integer:
+ id = v;
+ case:
+ id = 0;
+ }
+ }
+
+ method := root["method"].value.(json.String);
+
+ call_map : map [string] proc(json.Value, RequestId, ^common.Config, ^Writer) -> common.Error =
+ {"initialize" = request_initialize,
+ "initialized" = request_initialized,
+ "shutdown" = request_shutdown,
+ "exit" = notification_exit,
+ "textDocument/didOpen" = notification_did_open,
+ "textDocument/didChange" = notification_did_change,
+ "textDocument/didClose" = notification_did_close,
+ "textDocument/didSave" = notification_did_save,
+ "textDocument/definition" = request_definition };
+
+ fn: proc(json.Value, RequestId, ^common.Config, ^Writer) -> common.Error;
+ fn, ok = call_map[method];
+
+
+ if !ok {
+ response := make_response_message_error(
+ id = id,
+ error = ResponseError {code = .MethodNotFound, message = ""}
+ );
+
+ send_error(response, writer);
+ }
+
+ else {
+ err := fn(root["params"], id, config, writer);
+
+ if err != .None {
+
+ response := make_response_message_error(
+ id = id,
+ error = ResponseError {code = err, message = ""}
+ );
+
+ send_error(response, writer);
+ }
+ }
+
+ return true;
+}
+
+request_initialize :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+ params_object, ok := params.value.(json.Object);
+
+ if !ok {
+ return .ParseError;
+ }
+
+ initialize_params: RequestInitializeParams;
+
+ if unmarshal(params, initialize_params, context.temp_allocator) != .None {
+ return .ParseError;
+ }
+
+ config.workspace_folders = make([dynamic]common.WorkspaceFolder);
+
+ for s in initialize_params.workspaceFolders {
+ append_elem(&config.workspace_folders, s);
+ }
+
+ for format in initialize_params.capabilities.textDocument.hover.contentFormat {
+ if format == .Markdown {
+ config.hover_support_md = true;
+ }
+ }
+
+ response := make_response_message(
+ params = ResponseInitializeParams {
+ capabilities = ServerCapabilities {
+ textDocumentSync = 2, //incremental
+ definitionProvider = true,
+ },
+ },
+ id = id,
+ );
+
+ send_response(response, writer);
+
+ return .None;
+}
+
+request_initialized :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+ return .None;
+}
+
+request_shutdown :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+ response := make_response_message(
+ params = nil,
+ id = id,
+ );
+
+ send_response(response, writer);
+
+ return .None;
+}
+
+request_definition :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+ params_object, ok := params.value.(json.Object);
+
+ if !ok {
+ return .ParseError;
+ }
+
+ definition_params: TextDocumentPositionParams;
+
+ if unmarshal(params, definition_params, context.temp_allocator) != .None {
+ return .ParseError;
+ }
+
+
+ document := document_get(definition_params.textDocument.uri);
+
+ if document == nil {
+ return .InternalError;
+ }
+
+ location, ok2 := get_definition_location(document, definition_params.position);
+
+ if !ok2 {
+ log.error("Failed to get definition location");
+ return .InternalError;
+ }
+
+ response := make_response_message(
+ params = location,
+ id = id,
+ );
+
+ send_response(response, writer);
+
+
+ return .None;
+}
+
+notification_exit :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+ config.running = false;
+ return .None;
+}
+
+notification_did_open :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+ params_object, ok := params.value.(json.Object);
+
+ if !ok {
+ log.error("Failed to parse open document notification");
+ return .ParseError;
+ }
+
+ open_params: DidOpenTextDocumentParams;
+
+ if unmarshal(params, open_params, context.allocator) != .None {
+ log.error("Failed to parse open document notification");
+ return .ParseError;
+ }
+
+ return document_open(open_params.textDocument.uri, open_params.textDocument.text, config, writer);
+}
+
+notification_did_change :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+ params_object, ok := params.value.(json.Object);
+
+ if !ok {
+ return .ParseError;
+ }
+
+ change_params: DidChangeTextDocumentParams;
+
+ if unmarshal(params, change_params, context.temp_allocator) != .None {
+ return .ParseError;
+ }
+
+ document_apply_changes(change_params.textDocument.uri, change_params.contentChanges, config, writer);
+
+ return .None;
+}
+
+notification_did_close :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+ params_object, ok := params.value.(json.Object);
+
+ if !ok {
+ return .ParseError;
+ }
+
+ close_params: DidCloseTextDocumentParams;
+
+ if unmarshal(params, close_params, context.temp_allocator) != .None {
+ return .ParseError;
+ }
+
+ return document_close(close_params.textDocument.uri);
+}
+
+notification_did_save :: proc(params: json.Value, id: RequestId, config: ^common.Config, writer: ^Writer) -> common.Error {
+
+
+
+ return .None;
+}
+
diff --git a/src/server/response.odin b/src/server/response.odin
new file mode 100644
index 0000000..bd7a77e
--- /dev/null
+++ b/src/server/response.odin
@@ -0,0 +1,68 @@
+package server
+
+
+import "core:fmt"
+import "core:encoding/json"
+
+send_notification :: proc(notification: Notification, writer: ^Writer) -> bool {
+
+ data, error := json.marshal(notification, context.temp_allocator);
+
+ header := fmt.tprintf("Content-Length: {}\r\n\r\n", len(data));
+
+ if error != json.Marshal_Error.None {
+ return false;
+ }
+
+ if(!write_sized(writer, transmute([]u8)header)) {
+ return false;
+ }
+
+ if(!write_sized(writer, data)) {
+ return false;
+ }
+
+ return true;
+}
+
+send_response :: proc(response: ResponseMessage, writer: ^Writer) -> bool {
+
+ data, error := json.marshal(response, context.temp_allocator);
+
+ header := fmt.tprintf("Content-Length: {}\r\n\r\n", len(data));
+
+ if error != json.Marshal_Error.None {
+ return false;
+ }
+
+ if(!write_sized(writer, transmute([]u8)header)) {
+ return false;
+ }
+
+ if(!write_sized(writer, data)) {
+ return false;
+ }
+
+ return true;
+}
+
+send_error :: proc(response: ResponseMessageError, writer: ^Writer) -> bool {
+
+ data, error := json.marshal(response, context.temp_allocator);
+
+ header := fmt.tprintf("Content-Length: {}\r\n\r\n", len(data));
+
+ if error != json.Marshal_Error.None {
+ return false;
+ }
+
+ if(!write_sized(writer, transmute([]u8)header)) {
+ return false;
+ }
+
+ if(!write_sized(writer, data)) {
+ return false;
+ }
+
+ return true;
+}
diff --git a/src/server/types.odin b/src/server/types.odin
new file mode 100644
index 0000000..262f464
--- /dev/null
+++ b/src/server/types.odin
@@ -0,0 +1,154 @@
+package server
+
+import "core:encoding/json"
+
+import "shared:common"
+
+/*
+ General types
+*/
+
+//TODO(Daniel, move some of the more specific structs to their appropriate place)
+
+RequestId :: union {
+ string,
+ i64,
+};
+
+ResponseParams :: union {
+ ResponseInitializeParams,
+ rawptr,
+ common.Location,
+};
+
+ResponseMessage :: struct {
+ jsonrpc: string,
+ id: RequestId,
+ result: ResponseParams,
+};
+
+ResponseMessageError :: struct {
+ jsonrpc: string,
+ id: RequestId,
+ error: ResponseError,
+};
+
+ResponseError :: struct {
+ code: common.Error,
+ message: string,
+};
+
+NotificationLoggingParams :: struct {
+ type: int,
+ message: string,
+};
+
+NotificationPublishDiagnosticsParams :: struct {
+ uri: string,
+ diagnostics: [] Diagnostic,
+};
+
+NotificationParams :: union {
+ NotificationLoggingParams,
+ NotificationPublishDiagnosticsParams,
+};
+
+Notification :: struct {
+ jsonrpc: string,
+ method: string,
+ params: NotificationParams
+};
+
+ResponseInitializeParams :: struct {
+ capabilities: ServerCapabilities,
+};
+
+RequestInitializeParams :: struct {
+ trace: string,
+ workspaceFolders: [dynamic] common.WorkspaceFolder,
+ capabilities: ClientCapabilities,
+};
+
+//Can't really follow the uppercase style for enums when i need to represent it as text as well
+MarkupKind :: enum {
+ Plaintext,
+ Markdown,
+};
+
+ServerCapabilities :: struct {
+ textDocumentSync: int,
+ definitionProvider: bool,
+};
+
+CompletionClientCapabilities :: struct {
+
+};
+
+HoverClientCapabilities :: struct {
+ dynamicRegistration: bool,
+ contentFormat: [dynamic] MarkupKind,
+};
+
+TextDocumentClientCapabilities :: struct {
+ completion: CompletionClientCapabilities,
+ hover: HoverClientCapabilities,
+};
+
+ClientCapabilities :: struct {
+ textDocument: TextDocumentClientCapabilities,
+};
+
+TextDocumentContentChangeEvent :: struct {
+ range: common.Range,
+ text: string,
+};
+
+Version :: union {
+ int,
+ json.Null,
+};
+
+VersionedTextDocumentIdentifier :: struct {
+ uri: string,
+};
+
+TextDocumentIdentifier :: struct {
+ uri: string,
+};
+
+TextDocumentItem :: struct {
+ uri: string,
+ text: string,
+};
+
+DiagnosticSeverity :: enum {
+ Error = 1,
+ Warning = 2,
+ Information = 3,
+ Hint = 4,
+};
+
+Diagnostic :: struct {
+ range: common.Range,
+ severity: DiagnosticSeverity,
+ code: string,
+ message: string,
+};
+
+DidOpenTextDocumentParams :: struct {
+ textDocument: TextDocumentItem,
+};
+
+DidChangeTextDocumentParams :: struct {
+ textDocument: VersionedTextDocumentIdentifier,
+ contentChanges: [dynamic] TextDocumentContentChangeEvent,
+};
+
+DidCloseTextDocumentParams :: struct {
+ textDocument: TextDocumentIdentifier,
+};
+
+TextDocumentPositionParams :: struct {
+ textDocument: TextDocumentIdentifier,
+ position: common.Position,
+}; \ No newline at end of file
diff --git a/src/server/unmarshal.odin b/src/server/unmarshal.odin
new file mode 100644
index 0000000..e5ca619
--- /dev/null
+++ b/src/server/unmarshal.odin
@@ -0,0 +1,129 @@
+package server
+
+import "core:encoding/json"
+import "core:strings"
+import "core:runtime"
+import "core:mem"
+import "core:fmt"
+
+//Note(Daniel, investigate if you can use some sort of attribute not to be forced to have the same variable name as the json name)
+
+unmarshal :: proc(json_value: json.Value, v: any, allocator := context.allocator) -> json.Marshal_Error {
+
+ using runtime;
+
+ if v == nil {
+ return .None;
+ }
+
+ type_info := type_info_base(type_info_of(v.id));
+
+ #partial
+ switch j in json_value.value {
+ case json.Object:
+ #partial switch variant in type_info.variant {
+ case Type_Info_Struct:
+ for field, i in variant.names {
+ a := any{rawptr(uintptr(v.data) + uintptr(variant.offsets[i])), variant.types[i].id};
+ if ret := unmarshal(j[field], a, allocator); ret != .None {
+ return ret;
+ }
+ }
+ }
+ case json.Array:
+ #partial switch variant in type_info.variant {
+ case Type_Info_Dynamic_Array:
+ array := (^mem.Raw_Dynamic_Array)(v.data);
+ if array.data == nil {
+ array.data = mem.alloc(len(j)*variant.elem_size, variant.elem.align, allocator);
+ array.len = len(j);
+ array.cap = len(j);
+ array.allocator = allocator;
+ }
+ else {
+ return .Invalid_Data;
+ }
+
+ for i in 0..<array.len {
+ a := any{rawptr(uintptr(array.data) + uintptr(variant.elem_size * i)), variant.elem.id};
+
+ if ret := unmarshal(j[i], a, allocator); ret != .None {
+ return ret;
+ }
+ }
+
+ case:
+ return .Unsupported_Type;
+ }
+ case json.String:
+ #partial switch variant in type_info.variant {
+ case Type_Info_String:
+ str := (^string)(v.data);
+ str^ = strings.clone(j, allocator);
+
+ case Type_Info_Enum:
+ for name, i in variant.names {
+
+ lower_name := strings.to_lower(name, allocator);
+ lower_j := strings.to_lower(string(j), allocator);
+
+ if lower_name == lower_j {
+ mem.copy(v.data, &variant.values[i], size_of(variant.base));
+ }
+
+ delete(lower_name, allocator);
+ delete(lower_j, allocator);
+ }
+ }
+ case json.Integer:
+ #partial switch variant in &type_info.variant {
+ case Type_Info_Integer:
+ switch type_info.size {
+ case 8:
+ tmp := i64(j);
+ mem.copy(v.data, &tmp, type_info.size);
+
+ case 4:
+ tmp := i32(j);
+ mem.copy(v.data, &tmp, type_info.size);
+
+ case 2:
+ tmp := i16(j);
+ mem.copy(v.data, &tmp, type_info.size);
+
+ case 1:
+ tmp := i8(j);
+ mem.copy(v.data, &tmp, type_info.size);
+ case:
+ return .Invalid_Data;
+ }
+ case Type_Info_Union:
+ tag_ptr := uintptr(v.data) + variant.tag_offset;
+ }
+ case json.Float:
+ if _, ok := type_info.variant.(Type_Info_Float); ok {
+ switch type_info.size {
+ case 8:
+ tmp := f64(j);
+ mem.copy(v.data, &tmp, type_info.size);
+ case 4:
+ tmp := f32(j);
+ mem.copy(v.data, &tmp, type_info.size);
+ case:
+ return .Invalid_Data;
+ }
+
+ }
+ case json.Null:
+ case json.Boolean :
+ if _, ok := type_info.variant.(Type_Info_Boolean); ok {
+ tmp := bool(j);
+ mem.copy(v.data, &tmp, type_info.size);
+ }
+ case:
+ return .Unsupported_Type;
+ }
+
+ return .None;
+}
+
diff --git a/src/server/workspace.odin b/src/server/workspace.odin
new file mode 100644
index 0000000..abb4e43
--- /dev/null
+++ b/src/server/workspace.odin
@@ -0,0 +1 @@
+package server
diff --git a/src/server/writer.odin b/src/server/writer.odin
new file mode 100644
index 0000000..3c82451
--- /dev/null
+++ b/src/server/writer.odin
@@ -0,0 +1,29 @@
+package server
+
+import "core:os"
+import "core:mem"
+import "core:fmt"
+import "core:strings"
+
+WriterFn :: proc(rawptr, [] byte) -> (int, int);
+
+Writer :: struct {
+ writer_fn: WriterFn,
+ writer_context: rawptr,
+};
+
+make_writer :: proc(writer_fn: WriterFn, writer_context: rawptr) -> Writer {
+ return Writer { writer_context = writer_context, writer_fn = writer_fn };
+}
+
+write_sized :: proc(writer: ^Writer, data: []byte) -> bool {
+ written, err := writer.writer_fn(writer.writer_context, data);
+
+ if(err != 0 || written != len(data)) {
+ return false;
+ }
+
+ return true;
+}
+
+