Substitute nodes

This commit is contained in:
David Gonzalez Martin 2024-04-24 18:22:11 -06:00
parent 89c43471c2
commit de930c0729
4 changed files with 45 additions and 41 deletions

View File

@ -118,6 +118,7 @@ pub fn compileBuildExecutable(context: *const Context, arguments: []const []cons
.tokens = try PinnedArray(Token).init_with_default_granularity(),
.line_offsets = try PinnedArray(u32).init_with_default_granularity(),
},
.node_buffer = try PinnedArray(Node).init_with_default_granularity(),
};
try unit.compile(context);
@ -3014,6 +3015,7 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o
.tokens = try PinnedArray(Token).init_with_default_granularity(),
.line_offsets = try PinnedArray(u32).init_with_default_granularity(),
},
.node_buffer = try PinnedArray(Node).init_with_default_granularity(),
};
try unit.compile(context);
@ -4617,7 +4619,7 @@ pub const Builder = struct {
fn resolveIntrinsic(builder: *Builder, unit: *Unit, context: *const Context, type_expect: Type.Expect, node_index: Node.Index, side: Side) anyerror!V {
const node = unit.getNode(node_index);
const intrinsic_id: IntrinsicId = @enumFromInt(Node.unwrap(node.right));
const intrinsic_id: IntrinsicId = @enumFromInt(@intFromEnum(node.right));
const argument_node_list = unit.getNodeList(node.left);
switch (intrinsic_id) {
@ -9604,7 +9606,7 @@ pub const Builder = struct {
switch (node.id) {
.intrinsic => {
const argument_node_list = unit.getNodeList(node.left);
const intrinsic_id: IntrinsicId = @enumFromInt(Node.unwrap(node.right));
const intrinsic_id: IntrinsicId = @enumFromInt(@intFromEnum(node.right));
switch (intrinsic_id) {
.import => {
assert(argument_node_list.len == 1);
@ -16802,7 +16804,7 @@ pub const Enum = struct {
};
pub const Unit = struct {
node_buffer: Node.List = .{},
node_buffer: PinnedArray(Node),
token_buffer: Token.Buffer,
files: Debug.File.List = .{},
types: Type.List = .{},
@ -17153,7 +17155,7 @@ pub const Unit = struct {
fn getNodeListFromNode(unit: *Unit, node: *const Node) []const Node.Index {
assert(node.id == .node_list);
const list_index = node.left;
const node_list = unit.node_lists.slice()[Node.unwrap(list_index)];
const node_list = unit.node_lists.slice()[@intFromEnum(list_index)];
return node_list.pointer[0..node_list.length];
}

View File

@ -58,11 +58,11 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
const time_start = std.time.Instant.now() catch unreachable;
token_buffer.line_offsets.append(0);
_ = token_buffer.line_offsets.append(0);
for (text, 0..) |byte, index| {
if (byte == '\n') {
token_buffer.line_offsets.append(@intCast(index + 1));
_ = token_buffer.line_offsets.append(@intCast(index + 1));
}
}
@ -483,7 +483,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
},
// Asm statement (special treatment)
'`' => {
token_buffer.tokens.append(.{
_ = token_buffer.tokens.append(.{
.id = .operator_backtick,
.line = line_index,
.offset = start_index,
@ -510,7 +510,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
}
}
token_buffer.tokens.append(.{
_ = token_buffer.tokens.append(.{
.id = .identifier,
.offset = start_i,
.length = index - start_i,
@ -518,7 +518,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
});
},
',' => {
token_buffer.tokens.append(.{
_ = token_buffer.tokens.append(.{
.id = .operator_comma,
.line = line_index,
.offset = start_i,
@ -527,7 +527,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
index += 1;
},
';' => {
token_buffer.tokens.append(.{
_ = token_buffer.tokens.append(.{
.id = .operator_semicolon,
.line = line_index,
.offset = start_i,
@ -536,7 +536,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
index += 1;
},
'{' => {
token_buffer.tokens.append(.{
_ = token_buffer.tokens.append(.{
.id = .operator_left_brace,
.line = line_index,
.offset = start_i,
@ -545,7 +545,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
index += 1;
},
'}' => {
token_buffer.tokens.append(.{
_ = token_buffer.tokens.append(.{
.id = .operator_right_brace,
.line = line_index,
.offset = start_i,
@ -574,7 +574,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
}
}
token_buffer.tokens.append(.{
_ = token_buffer.tokens.append(.{
.id = .number_literal,
.line = line_index,
.offset = start_i,
@ -588,7 +588,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
}
}
token_buffer.tokens.append(.{
_ = token_buffer.tokens.append(.{
.id = .operator_backtick,
.line = line_index,
.length = 1,
@ -608,19 +608,14 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result {
const end_index = index;
const token_length = end_index - start_index;
token_buffer.tokens.append(.{
_ = token_buffer.tokens.append(.{
.id = token_id,
.offset = start_index,
.length = token_length,
.line = line_index,
});
// const line_offset = token_buffer.line_offsets.pointer[line_index];
// const column = start_index - line_offset;
// logln(.lexer, .new_token, "T at line {}, column {}, byte offset {}, with length {} -line offset: {}- ({s})", .{ line_index, column, start_index, token_length, line_offset, @tagName(token_id) });
}
// logln(.lexer, .end, "END LEXER - TOKEN OFFSET: {} - LINE OFFSET: {}", .{ Token.unwrap(lexer.offset), lexer.line_offset });
lexer.count = token_buffer.tokens.length - @intFromEnum(lexer.offset);
lexer.line_count = token_buffer.line_offsets.length - lexer.line_offset;

View File

@ -2,10 +2,13 @@ const std = @import("std");
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
const data_structures = @import("../library.zig");
const UnpinnedArray = data_structures.UnpinnedArray;
const BlockList = data_structures.BlockList;
const enumFromString = data_structures.enumFromString;
const library = @import("../library.zig");
const byte_equal = library.byte_equal;
const BlockList = library.BlockList;
const enumFromString = library.enumFromString;
const PinnedArray = library.PinnedArray;
const MyAllocator = library.MyAllocator;
const UnpinnedArray = library.UnpinnedArray;
const lexer = @import("lexer.zig");
@ -62,8 +65,7 @@ pub const Node = struct {
token: Token.Index,
id: Id,
pub const List = BlockList(@This(), enum {});
pub usingnamespace List.Index;
pub const Index = PinnedArray(Node).Index;
pub const Range = struct {
start: u32,
@ -226,11 +228,11 @@ const Analyzer = struct {
lexer: lexer.Result,
token_i: Token.Index,
token_buffer: *Token.Buffer,
nodes: *Node.List,
nodes: *PinnedArray(Node),
node_lists: *UnpinnedArray(UnpinnedArray(Node.Index)),
source_file: []const u8,
allocator: Allocator,
my_allocator: *data_structures.MyAllocator,
my_allocator: *MyAllocator,
suffix_depth: usize = 0,
fn expectToken(analyzer: *Analyzer, expected_token_id: Token.Id) !Token.Index {
@ -339,7 +341,7 @@ const Analyzer = struct {
const identifier_name = analyzer.bytes(identifier);
const attribute_node = inline for (@typeInfo(Compilation.Debug.Declaration.Global.Attribute).Enum.fields) |enum_field| {
if (data_structures.byte_equal(identifier_name, enum_field.name)) {
if (byte_equal(identifier_name, enum_field.name)) {
const attribute = @field(Compilation.Debug.Declaration.Global.Attribute, enum_field.name);
const attribute_node = switch (attribute) {
.@"export",
@ -427,7 +429,7 @@ const Analyzer = struct {
const identifier_name = analyzer.bytes(identifier);
const attribute_node = inline for (@typeInfo(Compilation.Function.Attribute).Enum.fields) |enum_field| {
if (data_structures.byte_equal(identifier_name, enum_field.name)) {
if (byte_equal(identifier_name, enum_field.name)) {
const attribute = @field(Compilation.Function.Attribute, enum_field.name);
const attribute_node = switch (attribute) {
.naked => try analyzer.addNode(.{
@ -926,7 +928,7 @@ const Analyzer = struct {
const intrinsic_name = analyzer.bytes(intrinsic_token)[1..];
const intrinsic_id = inline for (@typeInfo(Compilation.IntrinsicId).Enum.fields) |enum_field| {
if (data_structures.byte_equal(enum_field.name, intrinsic_name)) {
if (byte_equal(enum_field.name, intrinsic_name)) {
break @field(Compilation.IntrinsicId, enum_field.name);
}
} else @panic(intrinsic_name);
@ -2316,7 +2318,7 @@ const Analyzer = struct {
.right = blk: {
const t = analyzer.token_i;
analyzer.consumeToken();
break :blk Node.wrap(@intFromEnum(t));
break :blk @enumFromInt(@intFromEnum(t));
},
}),
else => |t| @panic(@tagName(t)),
@ -2328,7 +2330,8 @@ const Analyzer = struct {
}
fn addNode(analyzer: *Analyzer, node: Node) !Node.Index {
const node_index = try analyzer.nodes.append(analyzer.my_allocator, node);
const node_pointer = analyzer.nodes.append(node);
const node_index = analyzer.nodes.get_index(node_pointer);
// logln(.parser, .node_creation, "Adding node #{} {s} to file #{} (left: {}, right: {})", .{ Node.unwrap(node_index), @tagName(node.id), File.unwrap(analyzer.file_index), switch (node.left) {
// .null => 0xffff_ffff,
// else => Node.unwrap(node.left),
@ -2377,7 +2380,7 @@ const Analyzer = struct {
};
// Here it is assumed that left brace is consumed
pub fn analyze(allocator: Allocator, my_allocator: *data_structures.MyAllocator, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *Node.List, node_lists: *UnpinnedArray(UnpinnedArray(Node.Index))) !Result {
pub fn analyze(allocator: Allocator, my_allocator: *MyAllocator, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *PinnedArray(Node), node_lists: *UnpinnedArray(UnpinnedArray(Node.Index))) !Result {
const start = std.time.Instant.now() catch unreachable;
var analyzer = Analyzer{
.lexer = lexer_result,

View File

@ -110,9 +110,11 @@ pub fn PinnedArray(comptime T: type) type {
return array.get_unchecked(i);
}
pub fn get_index(array: *Array, item: *const T) Index{
assert(item - array.pointer > (@divExact(pinned_array_max_size, @sizeOf(T))));
return @enumFromInt(item - array.pointer);
pub fn get_index(array: *Array, item: *T) Index{
const many_item: [*]T = @ptrCast(item);
const result = @intFromPtr(many_item) - @intFromPtr(array.pointer);
assert(result < pinned_array_max_size);
return @enumFromInt(@divExact(result, @sizeOf(T)));
}
pub fn init(granularity: u32) !Array{
@ -129,7 +131,7 @@ pub fn PinnedArray(comptime T: type) type {
return try Array.init(pinned_array_default_granularity);
}
pub fn append(array: *Array, item: T) void {
pub fn append(array: *Array, item: T) *T {
if (((array.length + 1) * @sizeOf(T)) & (array.granularity - 1) == 0) {
const length: u64 = array.length;
assert((length + 1) * @sizeOf(T) <= pinned_array_max_size);
@ -137,14 +139,16 @@ pub fn PinnedArray(comptime T: type) type {
commit(ptr + ((length + 1) * @sizeOf(T)), array.granularity) catch unreachable;
}
array.append_with_capacity(item);
return array.append_with_capacity(item);
}
pub fn append_with_capacity(array: *Array, item: T) void {
pub fn append_with_capacity(array: *Array, item: T) *T {
const index = array.length;
assert(index * @sizeOf(T) < pinned_array_max_size);
array.length += 1;
array.pointer[index] = item;
const ptr = &array.pointer[index];
ptr.* = item;
return ptr;
}
};
}