Skip to content

Commit

Permalink
move global error/enum completions from DocumentScope.zig to `compl…
Browse files Browse the repository at this point in the history
…etions.zig`

This removes dependency on any LSP types from the DocumentScope and DocumentStore.
Behavior is unchanged except that global error set completions were not showing doc comments.
  • Loading branch information
Techatrix committed Dec 30, 2023
1 parent 5c0bebe commit 2f03d66
Show file tree
Hide file tree
Showing 4 changed files with 249 additions and 109 deletions.
112 changes: 50 additions & 62 deletions src/DocumentScope.zig
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
const std = @import("std");
const ast = @import("ast.zig");
const Ast = std.zig.Ast;
const types = @import("lsp.zig");
const tracy = @import("tracy.zig");
const offsets = @import("offsets.zig");
const Analyser = @import("analysis.zig");
Expand All @@ -14,31 +13,36 @@ declarations: std.MultiArrayList(Declaration) = .{},
/// used for looking up a child declaration in a given scope
declaration_lookup_map: DeclarationLookupMap = .{},
extra: std.ArrayListUnmanaged(u32) = .{},
// TODO: make this lighter;
// error completions: just store the name, the logic has no other moving parts
// enum completions: same, but determine whether to store docs somewhere or fetch them on-demand (on-demand likely better)
error_completions: CompletionSet = .{},
enum_completions: CompletionSet = .{},

const CompletionContext = struct {
pub fn hash(self: @This(), item: types.CompletionItem) u32 {
_ = self;
return @truncate(std.hash.Wyhash.hash(0, item.label));
}
/// All identifier token that are in error sets.
/// When there are multiple error sets that contain the same error, only one of them is stored.
/// A token that has a doc comment takes priority.
/// This means that if there a multiple error sets with the same name, only one of them is included.
global_error_set: IdentifierSet = .{},
/// All identifier token that are in enums.
/// When there are multiple enums that contain the field name, only one of them is stored.
/// A token that has a doc comment takes priority.
/// This means that if there a multiple enums with the same name, only one of them is included.
global_enum_set: IdentifierSet = .{},

/// Stores a set of identifier tokens with unique names
pub const IdentifierSet = std.ArrayHashMapUnmanaged(Ast.TokenIndex, void, IdentifierTokenContext, true);

pub const IdentifierTokenContext = struct {
tree: Ast,

pub fn eql(self: @This(), a: types.CompletionItem, b: types.CompletionItem, b_index: usize) bool {
_ = self;
pub fn eql(self: @This(), a: Ast.TokenIndex, b: Ast.TokenIndex, b_index: usize) bool {
_ = b_index;
return std.mem.eql(u8, a.label, b.label);
if (a == b) return true;
const a_name = offsets.identifierTokenToNameSlice(self.tree, a);
const b_name = offsets.identifierTokenToNameSlice(self.tree, b);
return std.mem.eql(u8, a_name, b_name);
}
};

pub const CompletionSet = std.ArrayHashMapUnmanaged(
types.CompletionItem,
void,
CompletionContext,
false,
);
pub fn hash(self: @This(), token: Ast.TokenIndex) u32 {
const name = offsets.identifierTokenToNameSlice(self.tree, token);
return std.array_hash_map.hashString(name);
}
};

/// Every `index` inside this `ArrayhashMap` is equivalent to a `Declaration.Index`
/// This means that every declaration is only the child of a single scope
Expand Down Expand Up @@ -351,23 +355,8 @@ pub fn deinit(scope: *DocumentScope, allocator: std.mem.Allocator) void {
scope.declaration_lookup_map.deinit(allocator);
scope.extra.deinit(allocator);

for (scope.enum_completions.keys()) |item| {
if (item.detail) |detail| allocator.free(detail);
switch (item.documentation orelse continue) {
.string => |str| allocator.free(str),
.MarkupContent => |content| allocator.free(content.value),
}
}
scope.enum_completions.deinit(allocator);

for (scope.error_completions.keys()) |item| {
if (item.detail) |detail| allocator.free(detail);
switch (item.documentation orelse continue) {
.string => |str| allocator.free(str),
.MarkupContent => |content| allocator.free(content.value),
}
}
scope.error_completions.deinit(allocator);
scope.global_enum_set.deinit(allocator);
scope.global_error_set.deinit(allocator);
}

fn locToSmallLoc(loc: offsets.Loc) Scope.SmallLoc {
Expand Down Expand Up @@ -665,28 +654,27 @@ noinline fn walkContainerDecl(
continue;
}

if (token_tags[main_tokens[decl]] != .identifier) {
const main_token = main_tokens[decl];
if (token_tags[main_token] != .identifier) {
// TODO this code path should not be reachable
continue;
}
const name = offsets.identifierTokenToNameSlice(tree, main_tokens[decl]);
const name = offsets.identifierTokenToNameSlice(tree, main_token);
try scope.pushDeclaration(name, .{ .ast_node = decl }, .field);

if (is_enum_or_tagged_union) {
if (std.mem.eql(u8, name, "_")) continue;

const doc = try Analyser.getDocComments(allocator, tree, decl);
errdefer if (doc) |d| allocator.free(d);
// TODO: Fix allocation; just store indices
const gop_res = try context.doc_scope.enum_completions.getOrPut(allocator, .{
.label = name,
.kind = .EnumMember,
.insertText = name,
.insertTextFormat = .PlainText,
.documentation = if (doc) |d| .{ .MarkupContent = types.MarkupContent{ .kind = .markdown, .value = d } } else null,
});
if (gop_res.found_existing) {
if (doc) |d| allocator.free(d);
const gop = try context.doc_scope.global_enum_set.getOrPutContext(
context.allocator,
main_token,
IdentifierTokenContext{ .tree = tree },
);
if (!gop.found_existing) {
gop.key_ptr.* = main_token;
} else if (gop.found_existing and token_tags[main_token - 1] == .doc_comment) {
// a token with a doc comment takes priority.
gop.key_ptr.* = main_token;
}
}
},
Expand Down Expand Up @@ -754,16 +742,16 @@ noinline fn walkErrorSetNode(
.identifier => {
const name = offsets.identifierTokenToNameSlice(tree, tok_i);
try scope.pushDeclaration(name, .{ .error_token = tok_i }, .other);
const gop = try context.doc_scope.error_completions.getOrPut(context.allocator, .{
.label = name,
.kind = .Constant,
//.detail =
.insertText = name,
.insertTextFormat = .PlainText,
});
// TODO: use arena
const gop = try context.doc_scope.global_error_set.getOrPutContext(
context.allocator,
tok_i,
IdentifierTokenContext{ .tree = tree },
);
if (!gop.found_existing) {
gop.key_ptr.detail = try std.fmt.allocPrint(context.allocator, "error.{s}", .{name});
gop.key_ptr.* = tok_i;
} else if (gop.found_existing and token_tags[tok_i - 1] == .doc_comment) {
// a token with a doc comment takes priority.
gop.key_ptr.* = tok_i;
}
},
else => {},
Expand Down
37 changes: 0 additions & 37 deletions src/DocumentStore.zig
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
const std = @import("std");
const builtin = @import("builtin");
const types = @import("lsp.zig");
const URI = @import("uri.zig");
const analysis = @import("analysis.zig");
const offsets = @import("offsets.zig");
Expand Down Expand Up @@ -1456,39 +1455,3 @@ pub fn uriFromImportStr(self: *DocumentStore, allocator: std.mem.Allocator, hand
};
}
}

/// **Thread safe** takes a shared lock
fn tagStoreCompletionItems(self: *DocumentStore, arena: std.mem.Allocator, handle: Handle, comptime name: []const u8) error{OutOfMemory}![]types.CompletionItem {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();

var dependencies = std.ArrayListUnmanaged(Uri){};
try dependencies.append(arena, handle.uri);
try self.collectDependenciesInternal(arena, handle, &dependencies, true);

// TODO Better solution for deciding what tags to include
var result_set = DocumentScope.CompletionSet{};

for (dependencies.items) |uri| {
// not every dependency is loaded which results in incomplete completion
const hdl = self.getHandle(uri) orelse continue; // takes a shared lock
const document_scope = try hdl.getDocumentScope();
const curr_set = @field(document_scope, name);
try result_set.ensureUnusedCapacity(arena, curr_set.count());
for (curr_set.keys()) |completion| {
result_set.putAssumeCapacity(completion, {});
}
}

return result_set.keys();
}

/// **Thread safe** takes a shared lock
pub fn errorCompletionItems(self: *DocumentStore, arena: std.mem.Allocator, handle: Handle) error{OutOfMemory}![]types.CompletionItem {
return try self.tagStoreCompletionItems(arena, handle, "error_completions");
}

/// **Thread safe** takes a shared lock
pub fn enumCompletionItems(self: *DocumentStore, arena: std.mem.Allocator, handle: Handle) error{OutOfMemory}![]types.CompletionItem {
return try self.tagStoreCompletionItems(arena, handle, "enum_completions");
}
106 changes: 103 additions & 3 deletions src/features/completions.zig
Original file line number Diff line number Diff line change
Expand Up @@ -738,7 +738,7 @@ fn completeError(server: *Server, arena: std.mem.Allocator, handle: *DocumentSto
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();

return try server.document_store.errorCompletionItems(arena, handle.*);
return try globalSetCompletions(&server.document_store, arena, handle, .error_set);
}

fn kindToSortScore(kind: types.CompletionItemKind) ?[]const u8 {
Expand Down Expand Up @@ -808,7 +808,7 @@ fn completeDot(document_store: *DocumentStore, analyser: *Analyser, arena: std.m
if (token_tags[dot_token_index - 1] == .number_literal or token_tags[dot_token_index - 1] != .equal) return &.{};

// `var enum_val = .` or the get*Context logic failed because of syntax errors (parser didn't create the necessary node(s))
const enum_completions = try document_store.enumCompletionItems(arena, handle.*);
const enum_completions = try globalSetCompletions(document_store, arena, handle, .enum_set);
return enum_completions;
}

Expand All @@ -823,7 +823,7 @@ fn completeFileSystemStringLiteral(
handle: DocumentStore.Handle,
pos_context: Analyser.PositionContext,
) ![]types.CompletionItem {
var completions: DocumentScope.CompletionSet = .{};
var completions: CompletionSet = .{};

const loc = switch (pos_context) {
.import_string_literal,
Expand Down Expand Up @@ -1037,6 +1037,106 @@ pub fn completionAtIndex(server: *Server, analyser: *Analyser, arena: std.mem.Al
return .{ .isIncomplete = false, .items = completions };
}

// <--------------------------------------------------------------------------->
// global error set / enum field set
// <--------------------------------------------------------------------------->

pub const CompletionSet = std.ArrayHashMapUnmanaged(types.CompletionItem, void, CompletionContext, false);

const CompletionContext = struct {
pub fn hash(self: @This(), item: types.CompletionItem) u32 {
_ = self;
return std.array_hash_map.hashString(item.label);
}

pub fn eql(self: @This(), a: types.CompletionItem, b: types.CompletionItem, b_index: usize) bool {
_ = self;
_ = b_index;
return std.mem.eql(u8, a.label, b.label);
}
};

const CompletionNameAdapter = struct {
pub fn hash(ctx: @This(), name: []const u8) u32 {
_ = ctx;
return std.array_hash_map.hashString(name);
}

pub fn eql(ctx: @This(), a: []const u8, b: types.CompletionItem, b_map_index: usize) bool {
_ = ctx;
_ = b_map_index;
return std.mem.eql(u8, a, b.label);
}
};

/// Every `DocumentScope` store a set of all error names and a set of all enum field names.
/// This function collects all of these sets from all dependencies and returns them as completions.
fn globalSetCompletions(
store: *DocumentStore,
arena: std.mem.Allocator,
handle: *DocumentStore.Handle,
kind: enum { error_set, enum_set },
) error{OutOfMemory}![]types.CompletionItem {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();

var dependencies = std.ArrayListUnmanaged(DocumentStore.Uri){};
try dependencies.append(arena, handle.uri);
try store.collectDependencies(arena, handle.*, &dependencies);

// TODO Better solution for deciding what tags to include
var result_set = CompletionSet{};

for (dependencies.items) |uri| {
// not every dependency is loaded which results in incomplete completion
const dependency_handle = store.getHandle(uri) orelse continue;
const document_scope: DocumentScope = try dependency_handle.getDocumentScope();
const curr_set: DocumentScope.IdentifierSet = switch (kind) {
.error_set => @field(document_scope, "global_error_set"),
.enum_set => @field(document_scope, "global_enum_set"),
};
try result_set.ensureUnusedCapacity(arena, curr_set.count());
for (curr_set.keys()) |identifier_token| {
const name = offsets.identifierTokenToNameSlice(dependency_handle.tree, identifier_token);

const gop = result_set.getOrPutAssumeCapacityAdapted(
name,
CompletionNameAdapter{},
);

if (!gop.found_existing) {
gop.key_ptr.* = types.CompletionItem{
.label = name,
// TODO check if client supports label_details_support
.detail = switch (kind) {
.error_set => try std.fmt.allocPrint(arena, "error.{}", .{std.zig.fmtId(name)}),
.enum_set => null,
},
.kind = switch (kind) {
.error_set => .Constant,
.enum_set => .EnumMember,
},
.documentation = null, // will be set below
};
}

if (gop.key_ptr.documentation == null) {
if (try Analyser.getDocCommentsBeforeToken(arena, dependency_handle.tree, identifier_token)) |documentation| {
gop.key_ptr.documentation = .{
.MarkupContent = types.MarkupContent{
// TODO check if client supports markdown
.kind = .markdown,
.value = documentation,
},
};
}
}
}
}

return result_set.keys();
}

// <--------------------------------------------------------------------------->
// completions/enum_literal.zig staging area
// <--------------------------------------------------------------------------->
Expand Down
Loading

0 comments on commit 2f03d66

Please sign in to comment.