From bce2de50293048ffa835715af39bf459e408f54e Mon Sep 17 00:00:00 2001 From: Techatrix <19954306+Techatrix@users.noreply.github.com> Date: Sat, 30 Dec 2023 05:40:21 +0100 Subject: [PATCH] move global error/enum completions to `completions.zig` This removes dependency on any LSP types from the DocumentScope and DocumentStore. Behavior is unchanged except that global error set completions were not showing doc comments. --- src/DocumentScope.zig | 112 +++++++++++++----------------- src/DocumentStore.zig | 37 ---------- src/features/completions.zig | 106 +++++++++++++++++++++++++++- tests/lsp_features/completion.zig | 103 +++++++++++++++++++++++++-- 4 files changed, 249 insertions(+), 109 deletions(-) diff --git a/src/DocumentScope.zig b/src/DocumentScope.zig index 88361656f..42e7e85a5 100644 --- a/src/DocumentScope.zig +++ b/src/DocumentScope.zig @@ -1,7 +1,6 @@ const std = @import("std"); const ast = @import("ast.zig"); const Ast = std.zig.Ast; -const types = @import("lsp.zig"); const tracy = @import("tracy.zig"); const offsets = @import("offsets.zig"); const Analyser = @import("analysis.zig"); @@ -14,31 +13,36 @@ declarations: std.MultiArrayList(Declaration) = .{}, /// used for looking up a child declaration in a given scope declaration_lookup_map: DeclarationLookupMap = .{}, extra: std.ArrayListUnmanaged(u32) = .{}, -// TODO: make this lighter; -// error completions: just store the name, the logic has no other moving parts -// enum completions: same, but determine whether to store docs somewhere or fetch them on-demand (on-demand likely better) -error_completions: CompletionSet = .{}, -enum_completions: CompletionSet = .{}, - -const CompletionContext = struct { - pub fn hash(self: @This(), item: types.CompletionItem) u32 { - _ = self; - return @truncate(std.hash.Wyhash.hash(0, item.label)); - } +/// All identifier token that are in error sets. +/// When there are multiple error sets that contain the same error, only one of them is stored. +/// A token that has a doc comment takes priority. +/// This means that if there a multiple error sets with the same name, only one of them is included. +global_error_set: IdentifierSet = .{}, +/// All identifier token that are in enums. +/// When there are multiple enums that contain the field name, only one of them is stored. +/// A token that has a doc comment takes priority. +/// This means that if there a multiple enums with the same name, only one of them is included. +global_enum_set: IdentifierSet = .{}, + +/// Stores a set of identifier tokens with unique names +pub const IdentifierSet = std.ArrayHashMapUnmanaged(Ast.TokenIndex, void, IdentifierTokenContext, true); + +pub const IdentifierTokenContext = struct { + tree: Ast, - pub fn eql(self: @This(), a: types.CompletionItem, b: types.CompletionItem, b_index: usize) bool { - _ = self; + pub fn eql(self: @This(), a: Ast.TokenIndex, b: Ast.TokenIndex, b_index: usize) bool { _ = b_index; - return std.mem.eql(u8, a.label, b.label); + if (a == b) return true; + const a_name = offsets.identifierTokenToNameSlice(self.tree, a); + const b_name = offsets.identifierTokenToNameSlice(self.tree, b); + return std.mem.eql(u8, a_name, b_name); } -}; -pub const CompletionSet = std.ArrayHashMapUnmanaged( - types.CompletionItem, - void, - CompletionContext, - false, -); + pub fn hash(self: @This(), token: Ast.TokenIndex) u32 { + const name = offsets.identifierTokenToNameSlice(self.tree, token); + return std.array_hash_map.hashString(name); + } +}; /// Every `index` inside this `ArrayhashMap` is equivalent to a `Declaration.Index` /// This means that every declaration is only the child of a single scope @@ -351,23 +355,8 @@ pub fn deinit(scope: *DocumentScope, allocator: std.mem.Allocator) void { scope.declaration_lookup_map.deinit(allocator); scope.extra.deinit(allocator); - for (scope.enum_completions.keys()) |item| { - if (item.detail) |detail| allocator.free(detail); - switch (item.documentation orelse continue) { - .string => |str| allocator.free(str), - .MarkupContent => |content| allocator.free(content.value), - } - } - scope.enum_completions.deinit(allocator); - - for (scope.error_completions.keys()) |item| { - if (item.detail) |detail| allocator.free(detail); - switch (item.documentation orelse continue) { - .string => |str| allocator.free(str), - .MarkupContent => |content| allocator.free(content.value), - } - } - scope.error_completions.deinit(allocator); + scope.global_enum_set.deinit(allocator); + scope.global_error_set.deinit(allocator); } fn locToSmallLoc(loc: offsets.Loc) Scope.SmallLoc { @@ -665,28 +654,27 @@ noinline fn walkContainerDecl( continue; } - if (token_tags[main_tokens[decl]] != .identifier) { + const main_token = main_tokens[decl]; + if (token_tags[main_token] != .identifier) { // TODO this code path should not be reachable continue; } - const name = offsets.identifierTokenToNameSlice(tree, main_tokens[decl]); + const name = offsets.identifierTokenToNameSlice(tree, main_token); try scope.pushDeclaration(name, .{ .ast_node = decl }, .field); if (is_enum_or_tagged_union) { if (std.mem.eql(u8, name, "_")) continue; - const doc = try Analyser.getDocComments(allocator, tree, decl); - errdefer if (doc) |d| allocator.free(d); - // TODO: Fix allocation; just store indices - const gop_res = try context.doc_scope.enum_completions.getOrPut(allocator, .{ - .label = name, - .kind = .EnumMember, - .insertText = name, - .insertTextFormat = .PlainText, - .documentation = if (doc) |d| .{ .MarkupContent = types.MarkupContent{ .kind = .markdown, .value = d } } else null, - }); - if (gop_res.found_existing) { - if (doc) |d| allocator.free(d); + const gop = try context.doc_scope.global_enum_set.getOrPutContext( + context.allocator, + main_token, + IdentifierTokenContext{ .tree = tree }, + ); + if (!gop.found_existing) { + gop.key_ptr.* = main_token; + } else if (gop.found_existing and token_tags[main_token - 1] == .doc_comment) { + // a token with a doc comment takes priority. + gop.key_ptr.* = main_token; } } }, @@ -754,16 +742,16 @@ noinline fn walkErrorSetNode( .identifier => { const name = offsets.identifierTokenToNameSlice(tree, tok_i); try scope.pushDeclaration(name, .{ .error_token = tok_i }, .other); - const gop = try context.doc_scope.error_completions.getOrPut(context.allocator, .{ - .label = name, - .kind = .Constant, - //.detail = - .insertText = name, - .insertTextFormat = .PlainText, - }); - // TODO: use arena + const gop = try context.doc_scope.global_error_set.getOrPutContext( + context.allocator, + tok_i, + IdentifierTokenContext{ .tree = tree }, + ); if (!gop.found_existing) { - gop.key_ptr.detail = try std.fmt.allocPrint(context.allocator, "error.{s}", .{name}); + gop.key_ptr.* = tok_i; + } else if (gop.found_existing and token_tags[tok_i - 1] == .doc_comment) { + // a token with a doc comment takes priority. + gop.key_ptr.* = tok_i; } }, else => {}, diff --git a/src/DocumentStore.zig b/src/DocumentStore.zig index 12fc54ff7..e1533818b 100644 --- a/src/DocumentStore.zig +++ b/src/DocumentStore.zig @@ -1,6 +1,5 @@ const std = @import("std"); const builtin = @import("builtin"); -const types = @import("lsp.zig"); const URI = @import("uri.zig"); const analysis = @import("analysis.zig"); const offsets = @import("offsets.zig"); @@ -1456,39 +1455,3 @@ pub fn uriFromImportStr(self: *DocumentStore, allocator: std.mem.Allocator, hand }; } } - -/// **Thread safe** takes a shared lock -fn tagStoreCompletionItems(self: *DocumentStore, arena: std.mem.Allocator, handle: Handle, comptime name: []const u8) error{OutOfMemory}![]types.CompletionItem { - const tracy_zone = tracy.trace(@src()); - defer tracy_zone.end(); - - var dependencies = std.ArrayListUnmanaged(Uri){}; - try dependencies.append(arena, handle.uri); - try self.collectDependenciesInternal(arena, handle, &dependencies, true); - - // TODO Better solution for deciding what tags to include - var result_set = DocumentScope.CompletionSet{}; - - for (dependencies.items) |uri| { - // not every dependency is loaded which results in incomplete completion - const hdl = self.getHandle(uri) orelse continue; // takes a shared lock - const document_scope = try hdl.getDocumentScope(); - const curr_set = @field(document_scope, name); - try result_set.ensureUnusedCapacity(arena, curr_set.count()); - for (curr_set.keys()) |completion| { - result_set.putAssumeCapacity(completion, {}); - } - } - - return result_set.keys(); -} - -/// **Thread safe** takes a shared lock -pub fn errorCompletionItems(self: *DocumentStore, arena: std.mem.Allocator, handle: Handle) error{OutOfMemory}![]types.CompletionItem { - return try self.tagStoreCompletionItems(arena, handle, "error_completions"); -} - -/// **Thread safe** takes a shared lock -pub fn enumCompletionItems(self: *DocumentStore, arena: std.mem.Allocator, handle: Handle) error{OutOfMemory}![]types.CompletionItem { - return try self.tagStoreCompletionItems(arena, handle, "enum_completions"); -} diff --git a/src/features/completions.zig b/src/features/completions.zig index c04737c0a..3b5ac646a 100644 --- a/src/features/completions.zig +++ b/src/features/completions.zig @@ -738,7 +738,7 @@ fn completeError(server: *Server, arena: std.mem.Allocator, handle: *DocumentSto const tracy_zone = tracy.trace(@src()); defer tracy_zone.end(); - return try server.document_store.errorCompletionItems(arena, handle.*); + return try globalSetCompletions(&server.document_store, arena, handle, .error_set); } fn kindToSortScore(kind: types.CompletionItemKind) ?[]const u8 { @@ -808,7 +808,7 @@ fn completeDot(document_store: *DocumentStore, analyser: *Analyser, arena: std.m if (token_tags[dot_token_index - 1] == .number_literal or token_tags[dot_token_index - 1] != .equal) return &.{}; // `var enum_val = .` or the get*Context logic failed because of syntax errors (parser didn't create the necessary node(s)) - const enum_completions = try document_store.enumCompletionItems(arena, handle.*); + const enum_completions = try globalSetCompletions(document_store, arena, handle, .enum_set); return enum_completions; } @@ -823,7 +823,7 @@ fn completeFileSystemStringLiteral( handle: DocumentStore.Handle, pos_context: Analyser.PositionContext, ) ![]types.CompletionItem { - var completions: DocumentScope.CompletionSet = .{}; + var completions: CompletionSet = .{}; const loc = switch (pos_context) { .import_string_literal, @@ -1037,6 +1037,106 @@ pub fn completionAtIndex(server: *Server, analyser: *Analyser, arena: std.mem.Al return .{ .isIncomplete = false, .items = completions }; } +// <---------------------------------------------------------------------------> +// global error set / enum field set +// <---------------------------------------------------------------------------> + +pub const CompletionSet = std.ArrayHashMapUnmanaged(types.CompletionItem, void, CompletionContext, false); + +const CompletionContext = struct { + pub fn hash(self: @This(), item: types.CompletionItem) u32 { + _ = self; + return std.array_hash_map.hashString(item.label); + } + + pub fn eql(self: @This(), a: types.CompletionItem, b: types.CompletionItem, b_index: usize) bool { + _ = self; + _ = b_index; + return std.mem.eql(u8, a.label, b.label); + } +}; + +const CompletionNameAdapter = struct { + pub fn hash(ctx: @This(), name: []const u8) u32 { + _ = ctx; + return std.array_hash_map.hashString(name); + } + + pub fn eql(ctx: @This(), a: []const u8, b: types.CompletionItem, b_map_index: usize) bool { + _ = ctx; + _ = b_map_index; + return std.mem.eql(u8, a, b.label); + } +}; + +/// Every `DocumentScope` store a set of all error names and a set of all enum field names. +/// This function collects all of these sets from all dependencies and returns them as completions. +fn globalSetCompletions( + store: *DocumentStore, + arena: std.mem.Allocator, + handle: *DocumentStore.Handle, + kind: enum { error_set, enum_set }, +) error{OutOfMemory}![]types.CompletionItem { + const tracy_zone = tracy.trace(@src()); + defer tracy_zone.end(); + + var dependencies = std.ArrayListUnmanaged(DocumentStore.Uri){}; + try dependencies.append(arena, handle.uri); + try store.collectDependencies(arena, handle.*, &dependencies); + + // TODO Better solution for deciding what tags to include + var result_set = CompletionSet{}; + + for (dependencies.items) |uri| { + // not every dependency is loaded which results in incomplete completion + const dependency_handle = store.getHandle(uri) orelse continue; + const document_scope: DocumentScope = try dependency_handle.getDocumentScope(); + const curr_set: DocumentScope.IdentifierSet = switch (kind) { + .error_set => @field(document_scope, "global_error_set"), + .enum_set => @field(document_scope, "global_enum_set"), + }; + try result_set.ensureUnusedCapacity(arena, curr_set.count()); + for (curr_set.keys()) |identifier_token| { + const name = offsets.identifierTokenToNameSlice(dependency_handle.tree, identifier_token); + + const gop = result_set.getOrPutAssumeCapacityAdapted( + name, + CompletionNameAdapter{}, + ); + + if (!gop.found_existing) { + gop.key_ptr.* = types.CompletionItem{ + .label = name, + // TODO check if client supports label_details_support + .detail = switch (kind) { + .error_set => try std.fmt.allocPrint(arena, "error.{}", .{std.zig.fmtId(name)}), + .enum_set => null, + }, + .kind = switch (kind) { + .error_set => .Constant, + .enum_set => .EnumMember, + }, + .documentation = null, // will be set below + }; + } + + if (gop.key_ptr.documentation == null) { + if (try Analyser.getDocCommentsBeforeToken(arena, dependency_handle.tree, identifier_token)) |documentation| { + gop.key_ptr.documentation = .{ + .MarkupContent = types.MarkupContent{ + // TODO check if client supports markdown + .kind = .markdown, + .value = documentation, + }, + }; + } + } + } + } + + return result_set.keys(); +} + // <---------------------------------------------------------------------------> // completions/enum_literal.zig staging area // <---------------------------------------------------------------------------> diff --git a/tests/lsp_features/completion.zig b/tests/lsp_features/completion.zig index 22d15127c..15368c6c1 100644 --- a/tests/lsp_features/completion.zig +++ b/tests/lsp_features/completion.zig @@ -934,6 +934,44 @@ test "completion - enum" { }); } +test "completion - global enum set" { + try testCompletion( + \\const SomeError = error{ e }; + \\const E1 = enum { + \\ foo, + \\ bar, + \\}; + \\const E2 = enum { + \\ baz, + \\ ///hello + \\ qux, + \\}; + \\const baz = . + , &.{ + .{ .label = "foo", .kind = .EnumMember }, + .{ .label = "bar", .kind = .EnumMember }, + .{ .label = "baz", .kind = .EnumMember }, + .{ .label = "qux", .kind = .EnumMember, .documentation = "hello" }, + }); + try testCompletion( + \\const SomeError = error{ e }; + \\const Enum1 = enum { + \\ ///hello world + \\ foo, + \\ bar, + \\}; + \\const Enum2 = enum { + \\ foo, + \\ ///hallo welt + \\ bar, + \\}; + \\const baz = . + , &.{ + .{ .label = "foo", .kind = .EnumMember, .documentation = "hello world" }, + .{ .label = "bar", .kind = .EnumMember, .documentation = "hallo welt" }, + }); +} + test "completion - switch cases" { // Because current logic is to list all enums if all else fails, // the following tests include an extra enum to ensure that we're not just 'getting lucky' @@ -1059,24 +1097,75 @@ test "completion - switch cases" { test "completion - error set" { try testCompletion( \\const E = error { - \\ Foo, - \\ Bar, + \\ foo, + \\ bar, \\}; - \\const baz = error. + \\const baz = E. , &.{ - .{ .label = "Foo", .kind = .Constant, .detail = "error.Foo" }, - .{ .label = "Bar", .kind = .Constant, .detail = "error.Bar" }, + .{ .label = "foo", .kind = .Constant, .detail = "error.foo" }, + .{ .label = "bar", .kind = .Constant, .detail = "error.bar" }, }); + try testCompletion( + \\const E1 = error { + \\ foo, + \\ bar, + \\}; + \\const E2 = error { + \\ baz, + \\ ///hello + \\ qux, + \\}; + \\const baz = E2. + , &.{ + .{ .label = "baz", .kind = .Constant, .detail = "error.baz" }, + .{ .label = "qux", .kind = .Constant, .detail = "error.qux", .documentation = "hello" }, + }); +} +test "completion - global error set" { try testCompletion( - \\const E = error { + \\const SomeEnum = enum { e }; + \\const Error1 = error { \\ foo, \\ bar, \\}; - \\const baz = E. + \\const Error2 = error { + \\ baz, + \\ ///hello + \\ qux, + \\}; + \\const baz = error. , &.{ .{ .label = "foo", .kind = .Constant, .detail = "error.foo" }, .{ .label = "bar", .kind = .Constant, .detail = "error.bar" }, + .{ .label = "baz", .kind = .Constant, .detail = "error.baz" }, + .{ .label = "qux", .kind = .Constant, .detail = "error.qux", .documentation = "hello" }, + }); + try testCompletion( + \\const SomeEnum = enum { e }; + \\const Error1 = error { + \\ ///hello world + \\ foo, + \\ bar, + \\}; + \\const Error2 = error { + \\ foo, + \\ ///hallo welt + \\ bar, + \\}; + \\const baz = error. + , &.{ + .{ .label = "foo", .kind = .Constant, .detail = "error.foo", .documentation = "hello world" }, + .{ .label = "bar", .kind = .Constant, .detail = "error.bar", .documentation = "hallo welt" }, + }); + try testCompletion( + \\const Error = error { + \\ ///hello world + \\ @"some name", + \\}; + \\const baz = error. + , &.{ + .{ .label = "some name", .kind = .Constant, .detail = "error.@\"some name\"", .documentation = "hello world" }, }); }