Skip to content

Commit

Permalink
ZIR based semantic analysis
Browse files Browse the repository at this point in the history
fmt
  • Loading branch information
Techatrix committed Dec 24, 2023
1 parent 932d132 commit 57c82aa
Show file tree
Hide file tree
Showing 37 changed files with 4,167 additions and 21 deletions.
22 changes: 18 additions & 4 deletions build.zig
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
const std = @import("std");
const builtin = @import("builtin");
const SemaCases = @import("tests/SemaCases.zig");

const zls_version = std.SemanticVersion{ .major = 0, .minor = 12, .patch = 0 };

Expand Down Expand Up @@ -69,9 +70,6 @@ pub fn build(b: *std.build.Builder) !void {

const exe_options = b.addOptions();
exe_options.addOption(std.log.Level, "log_level", b.option(std.log.Level, "log_level", "The Log Level to be used.") orelse .info);
exe_options.addOption(bool, "enable_tracy", enable_tracy);
exe_options.addOption(bool, "enable_tracy_allocation", b.option(bool, "enable_tracy_allocation", "Enable using TracyAllocator to monitor allocations.") orelse enable_tracy);
exe_options.addOption(bool, "enable_tracy_callstack", b.option(bool, "enable_tracy_callstack", "Enable callstack graphs.") orelse enable_tracy);
exe_options.addOption(bool, "enable_failing_allocator", b.option(bool, "enable_failing_allocator", "Whether to use a randomly failing allocator.") orelse false);
exe_options.addOption(u32, "enable_failing_allocator_likelihood", b.option(u32, "enable_failing_allocator_likelihood", "The chance that an allocation will fail is `1/likelihood`") orelse 256);
exe_options.addOption(bool, "use_gpa", b.option(bool, "use_gpa", "Good for debugging") orelse (optimize == .Debug));
Expand All @@ -81,6 +79,9 @@ pub fn build(b: *std.build.Builder) !void {

const build_options = b.addOptions();
const build_options_module = build_options.createModule();
build_options.addOption(bool, "enable_tracy", enable_tracy);
build_options.addOption(bool, "enable_tracy_allocation", b.option(bool, "enable_tracy_allocation", "Enable using TracyAllocator to monitor allocations.") orelse enable_tracy);
build_options.addOption(bool, "enable_tracy_callstack", b.option(bool, "enable_tracy_callstack", "Enable callstack graphs.") orelse enable_tracy);
build_options.addOption([]const u8, "version_string", version_string);
build_options.addOption(std.SemanticVersion, "version", try std.SemanticVersion.parse(version_string));

Expand Down Expand Up @@ -110,7 +111,8 @@ pub fn build(b: *std.build.Builder) !void {
exe.pie = pie;
b.installArtifact(exe);

exe.addModule("build_options", exe_options_module);
exe.addModule("exe_options", exe_options_module);
exe.addModule("build_options", build_options_module);
exe.addModule("known-folders", known_folders_module);
exe.addModule("diffz", diffz_module);

Expand Down Expand Up @@ -220,6 +222,18 @@ pub fn build(b: *std.build.Builder) !void {
src_tests.addModule("test_options", test_options_module);
test_step.dependOn(&b.addRunArtifact(src_tests).step);

var cases: SemaCases = .{ .allocator = b.allocator };
try cases.addCasesFromDir(b.pathFromRoot("tests/sema"), .{ .ignore_annotation = false });
try cases.addCasesFromDir(b.pathFromRoot("src"), .{ .ignore_annotation = true });

// TODO zig_lib_dir is not being resolved
if (b.zig_lib_dir) |dir_path| {
try cases.addCasesFromDir(dir_path.getPath(b), .{ .ignore_annotation = true });
}
const sema_test = cases.lowerToBuild(b, test_step, target);
sema_test.addModule("zls", zls_module);
sema_test.addModule("build_options", build_options_module);

if (coverage) {
const include_pattern = b.fmt("--include-pattern=/src", .{});
const exclude_pattern = b.fmt("--exclude-pattern=/src/stage2", .{});
Expand Down
5 changes: 5 additions & 0 deletions src/ComptimeInterpreter.zig
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,7 @@ pub fn interpret(
const struct_index = try interpreter.ip.createStruct(interpreter.allocator, .{
.fields = .{},
.owner_decl = .none, // TODO
.zir_index = 0,
.namespace = container_namespace,
.layout = .Auto, // TODO
.backing_int_ty = .none, // TODO
Expand Down Expand Up @@ -272,6 +273,7 @@ pub fn interpret(
const decl_index = try interpreter.ip.createDecl(interpreter.allocator, .{
.name = try interpreter.ip.string_pool.getOrPutString(interpreter.allocator, name),
.node_idx = node_idx,
.src_line = 0, // TODO
.index = .none,
.alignment = 0, // TODO
.address_space = .generic, // TODO
Expand Down Expand Up @@ -856,6 +858,7 @@ pub fn interpret(
const struct_index = try interpreter.ip.createStruct(interpreter.allocator, .{
.fields = .{},
.owner_decl = .none, // TODO
.zir_index = 0,
.namespace = .none,
.layout = .Auto,
.backing_int_ty = .none,
Expand Down Expand Up @@ -1059,6 +1062,7 @@ pub fn interpret(
const decl_index = try interpreter.ip.createDecl(interpreter.allocator, .{
.name = try interpreter.ip.string_pool.getOrPutString(interpreter.allocator, name),
.node_idx = node_idx,
.src_line = 0, // TODO
.index = function_type,
.alignment = 0, // TODO
.address_space = .generic, // TODO
Expand Down Expand Up @@ -1252,6 +1256,7 @@ pub fn call(
const decl_index = try interpreter.ip.createDecl(interpreter.allocator, .{
.name = try interpreter.ip.string_pool.getOrPutString(interpreter.allocator, name),
.node_idx = name_token,
.src_line = 0, // TODO
.index = arguments[arg_index].index,
.alignment = 0, // TODO
.address_space = .generic, // TODO
Expand Down
60 changes: 53 additions & 7 deletions src/DocumentStore.zig
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ const translate_c = @import("translate_c.zig");
const ComptimeInterpreter = @import("ComptimeInterpreter.zig");
const AstGen = @import("stage2/AstGen.zig");
const Zir = @import("stage2/Zir.zig");
const Module = @import("analyser/Module.zig");
const InternPool = @import("analyser/InternPool.zig");
const DocumentScope = @import("DocumentScope.zig");

Expand Down Expand Up @@ -149,6 +150,7 @@ pub const BuildFile = struct {
pub const Handle = struct {
uri: Uri,
tree: Ast,
root_decl: InternPool.Decl.OptionalIndex = .none,
/// Contains one entry for every import in the document
import_uris: std.ArrayListUnmanaged(Uri) = .{},
/// Contains one entry for every cimport in the document
Expand Down Expand Up @@ -230,6 +232,13 @@ pub const Handle = struct {
return try self.getZirCold();
}

pub fn getCachedZir(self: *Handle) Zir {
if (std.debug.runtime_safety) {
std.debug.assert(self.getStatus().has_zir);
}
return self.impl.zir;
}

pub fn getZirStatus(self: Handle) ZirStatus {
const status = self.getStatus();
if (!status.has_zir) return .none;
Expand Down Expand Up @@ -441,7 +450,7 @@ pub const Handle = struct {
}
}

fn deinit(self: *Handle) void {
fn deinit(self: *Handle, mod: ?*Module) void {
const tracy_zone = tracy.trace(@src());
defer tracy_zone.end();

Expand All @@ -453,6 +462,7 @@ pub const Handle = struct {
self.impl.comptime_interpreter.deinit();
allocator.destroy(self.impl.comptime_interpreter);
}
if (self.root_decl.unwrap()) |decl_index| mod.?.destroyDecl(decl_index);
if (status.has_zir) self.impl.zir.deinit(allocator);
if (status.has_document_scope) self.impl.document_scope.deinit(allocator);
allocator.free(self.tree.source);
Expand Down Expand Up @@ -487,10 +497,11 @@ lock: std.Thread.RwLock = .{},
handles: std.StringArrayHashMapUnmanaged(*Handle) = .{},
build_files: std.StringArrayHashMapUnmanaged(*BuildFile) = .{},
cimports: std.AutoArrayHashMapUnmanaged(Hash, translate_c.Result) = .{},
mod: ?*Module = null,

pub fn deinit(self: *DocumentStore) void {
for (self.handles.values()) |handle| {
handle.deinit();
handle.deinit(self.mod);
self.allocator.destroy(handle);
}
self.handles.deinit(self.allocator);
Expand Down Expand Up @@ -660,6 +671,26 @@ pub fn refreshDocument(self: *DocumentStore, uri: Uri, new_text: [:0]const u8) !
try handle.setSource(new_text);
handle.import_uris = try self.collectImportUris(handle.*);
handle.cimports = try collectCIncludes(self.allocator, handle.tree);

// TODO
// if (self.config.analysis_backend == .astgen_analyser) {
// try analysis.transferInternPoolData(self.allocator, handle, &new_handle.document_scope);
// }

if (self.config.analysis_backend == .astgen_analyser) blk: {
const zir = try handle.getZir();
if (zir.hasCompileErrors()) break :blk;
switch (handle.getZirStatus()) {
.none => {},
.outdated => break :blk, // TODO support oudated
.done => {},
}
if (handle.root_decl.unwrap()) |decl_index| {
self.mod.?.destroyDecl(decl_index);
handle.root_decl = .none;
}
try self.mod.?.semaFile(handle);
}
}

/// Invalidates a build files.
Expand Down Expand Up @@ -727,7 +758,7 @@ fn garbageCollectionImports(self: *DocumentStore) error{OutOfMemory}!void {
const handle = self.handles.values()[handle_index];
log.debug("Closing document {s}", .{handle.uri});
self.handles.swapRemoveAt(handle_index);
handle.deinit();
handle.deinit(self.mod);
self.allocator.destroy(handle);
}
}
Expand Down Expand Up @@ -1006,7 +1037,11 @@ fn createBuildFile(self: *DocumentStore, uri: Uri) error{OutOfMemory}!BuildFile
}
}

if (std.process.can_spawn) {
if (std.process.can_spawn and
self.config.zig_exe_path != null and
self.config.build_runner_path != null and
self.config.global_cache_path != null)
{
const Server = @import("Server.zig");
const server = @fieldParentPtr(Server, "document_store", self);

Expand Down Expand Up @@ -1092,7 +1127,7 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]const u8, open: bool
defer tracy_zone.end();

var handle = try Handle.init(self.allocator, uri, text);
errdefer handle.deinit();
errdefer handle.deinit(self.mod);

_ = handle.setOpen(open);

Expand Down Expand Up @@ -1130,6 +1165,17 @@ fn createDocument(self: *DocumentStore, uri: Uri, text: [:0]const u8, open: bool
handle.import_uris = try self.collectImportUris(handle);
handle.cimports = try collectCIncludes(self.allocator, handle.tree);

if (self.config.analysis_backend == .astgen_analyser) blk: {
const zir = try handle.getZir();
if (zir.hasCompileErrors()) break :blk;
switch (handle.getZirStatus()) {
.none => {},
.outdated => break :blk, // TODO support oudated
.done => {},
}
try self.mod.?.semaFile(&handle);
}

return handle;
}

Expand All @@ -1141,7 +1187,7 @@ fn createAndStoreDocument(self: *DocumentStore, uri: Uri, text: [:0]const u8, op
errdefer self.allocator.destroy(handle_ptr);

handle_ptr.* = try self.createDocument(uri, text, open);
errdefer handle_ptr.deinit();
errdefer handle_ptr.deinit(self.mod);

const gop = blk: {
self.lock.lock();
Expand All @@ -1150,7 +1196,7 @@ fn createAndStoreDocument(self: *DocumentStore, uri: Uri, text: [:0]const u8, op
};

if (gop.found_existing) {
handle_ptr.deinit();
handle_ptr.deinit(self.mod);
self.allocator.destroy(handle_ptr);
}

Expand Down
21 changes: 21 additions & 0 deletions src/Server.zig
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ const tracy = @import("tracy.zig");
const diff = @import("diff.zig");
const ComptimeInterpreter = @import("ComptimeInterpreter.zig");
const InternPool = @import("analyser/analyser.zig").InternPool;
const Module = @import("analyser/analyser.zig").Module;
const ZigVersionWrapper = @import("ZigVersionWrapper.zig");
const Transport = @import("Transport.zig");
const known_folders = @import("known-folders");
Expand Down Expand Up @@ -50,6 +51,8 @@ wait_group: if (zig_builtin.single_threaded) void else std.Thread.WaitGroup,
job_queue: std.fifo.LinearFifo(Job, .Dynamic),
job_queue_lock: std.Thread.Mutex = .{},
ip: InternPool = .{},
/// set if config.analysis_backend == .astgen_analyser
mod: ?Module = null,
zig_exe_lock: std.Thread.Mutex = .{},
config_arena: std.heap.ArenaAllocator.State = .{},
client_capabilities: ClientCapabilities = .{},
Expand Down Expand Up @@ -894,6 +897,22 @@ pub fn updateConfiguration(server: *Server, new_config: configuration.Configurat
server.document_store.cimports.clearAndFree(server.document_store.allocator);
}

if (server.config.analysis_backend == .astgen_analyser) blk: {
if (server.mod != null) break :blk;
server.mod = Module.init(server.allocator, &server.ip, &server.document_store);
server.document_store.mod = &server.mod.?;
} else if (server.mod) |*mod| {
for (server.document_store.handles.values()) |handle| {
if (handle.root_decl.unwrap()) |decl_index| {
mod.destroyDecl(decl_index);
handle.root_decl = .none;
}
}
mod.deinit();
server.mod = null;
server.document_store.mod = null;
}

if (server.status == .initialized) {
const json_message = try server.sendToClientRequest(
.{ .string = "semantic_tokens_refresh" },
Expand Down Expand Up @@ -1739,6 +1758,7 @@ pub const Message = struct {
}

test "https://github.com/ziglang/zig/issues/16392" {
if (true) return error.SkipZigTest;
const parsed_message = try std.json.parseFromSlice(
@This(),
std.testing.allocator,
Expand Down Expand Up @@ -1797,6 +1817,7 @@ pub fn destroy(server: *Server) void {
while (server.job_queue.readItem()) |job| job.deinit(server.allocator);
server.job_queue.deinit();
server.document_store.deinit();
if (server.mod) |*mod| mod.deinit();
server.ip.deinit(server.allocator);
server.client_capabilities.deinit(server.allocator);
if (server.runtime_zig_version) |zig_version| zig_version.free();
Expand Down
36 changes: 34 additions & 2 deletions src/analyser/InternPool.zig
Original file line number Diff line number Diff line change
Expand Up @@ -674,6 +674,13 @@ pub const Index = enum(u32) {
return try gpa.dupe(Index, slice.getUnprotectedSlice(ip));
}

pub fn contains(slice: Slice, value: Index, ip: *InternPool) bool {
if (slice.len == 0) return false;
ip.lock.lockShared();
defer ip.lock.unlockShared();
return std.mem.indexOfScalar(Index, slice.getUnprotectedSlice(ip), value) != null;
}

pub fn hashWithHasher(slice: Slice, hasher: anytype, ip: *InternPool) void {
std.hash.autoHash(hasher, slice.len);
if (slice.len == 0) return;
Expand Down Expand Up @@ -732,6 +739,13 @@ pub const StringSlice = struct {
return try gpa.dupe(String, slice.getUnprotectedSlice(ip));
}

pub fn contains(slice: StringSlice, value: String, ip: *InternPool) bool {
if (slice.len == 0) return false;
ip.lock.lockShared();
defer ip.lock.unlockShared();
return std.mem.indexOfScalar(String, slice.getUnprotectedSlice(ip), value) != null;
}

pub fn hashWithHasher(slice: StringSlice, hasher: anytype, ip: *InternPool) void {
std.hash.autoHash(hasher, slice.len);
if (slice.len == 0) return;
Expand Down Expand Up @@ -949,13 +963,29 @@ pub const NamespaceIndex = enum(u32) {
pub const Decl = struct {
name: String,
node_idx: std.zig.Ast.Node.Index,
src_line: u32,
zir_decl_index: u32 = 0,
/// this stores both the type and the value
index: InternPool.Index,
alignment: u16,
address_space: std.builtin.AddressSpace,
src_namespace: InternPool.NamespaceIndex,
analysis: enum {
unreferenced,
in_progress,
complete,
} = .complete,
is_pub: bool,
is_exported: bool,
kind: Kind = undefined,

pub const Kind = enum {
@"usingnamespace",
@"test",
@"comptime",
named,
anon,
};

pub const Index = enum(u32) {
_,
Expand Down Expand Up @@ -992,6 +1022,7 @@ pub const FieldStatus = enum {
pub const Struct = struct {
fields: std.AutoArrayHashMapUnmanaged(String, Field),
owner_decl: Decl.OptionalIndex,
zir_index: u32,
namespace: NamespaceIndex,
layout: std.builtin.Type.ContainerLayout = .Auto,
backing_int_ty: InternPool.Index,
Expand Down Expand Up @@ -3708,7 +3739,7 @@ pub fn isZero(ip: *InternPool, val: Index) bool {

/// If the value fits in the given integer, return it, otherwise null.
pub fn toInt(ip: *InternPool, val: Index, comptime T: type) !?T {
comptime assert(std.meta.trait.isIntegral(T));
comptime assert(@typeInfo(T) == .Int);
return switch (ip.indexToKey(val)) {
.simple_value => |simple| switch (simple) {
.null_value => 0,
Expand All @@ -3719,7 +3750,7 @@ pub fn toInt(ip: *InternPool, val: Index, comptime T: type) !?T {
},
.int_u64_value => |int_value| std.math.cast(T, int_value.int),
.int_i64_value => |int_value| std.math.cast(T, int_value.int),
.int_big_value => |int_value| int_value.int.to(T) catch null,
.int_big_value => |int_value| int_value.getConst(ip).to(T) catch null,
.null_value => 0,
else => null,
};
Expand Down Expand Up @@ -4482,6 +4513,7 @@ test "struct value" {
const struct_index = try ip.createStruct(gpa, .{
.fields = .{},
.owner_decl = .none,
.zir_index = 0,
.namespace = .none,
.layout = .Auto,
.backing_int_ty = .none,
Expand Down
Loading

0 comments on commit 57c82aa

Please sign in to comment.