From 89c43471c264dc7026a2db4f1d19cb86326ba56c Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Wed, 24 Apr 2024 18:04:24 -0600 Subject: [PATCH 01/14] Substitute tokens --- bootstrap/Compilation.zig | 190 ++++++++-------------------------- bootstrap/frontend/lexer.zig | 46 ++++---- bootstrap/frontend/parser.zig | 53 ++++------ bootstrap/library.zig | 72 ++++++++++++- 4 files changed, 161 insertions(+), 200 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index d1d708c..f7dcccb 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -8,6 +8,7 @@ const byte_equal = data_structures.byte_equal; const byte_equal_terminated = data_structures.byte_equal_terminated; const first_slice = data_structures.first_slice; const starts_with_slice = data_structures.starts_with_slice; +const PinnedArray = data_structures.PinnedArray; const UnpinnedArray = data_structures.UnpinnedArray; const BlockList = data_structures.BlockList; const MyAllocator = data_structures.MyAllocator; @@ -113,6 +114,10 @@ pub fn compileBuildExecutable(context: *const Context, arguments: []const []cons .is_test = false, .c_source_files = &.{}, }, + .token_buffer = Token.Buffer{ + .tokens = try PinnedArray(Token).init_with_default_granularity(), + .line_offsets = try PinnedArray(u32).init_with_default_granularity(), + }, }; try unit.compile(context); @@ -3005,6 +3010,10 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o .is_test = options.is_test, .c_source_files = c_source_files.slice(), }, + .token_buffer = Token.Buffer{ + .tokens = try PinnedArray(Token).init_with_default_granularity(), + .line_offsets = try PinnedArray(u32).init_with_default_granularity(), + }, }; try unit.compile(context); @@ -5617,16 +5626,12 @@ pub const Builder = struct { column: u32, }; - fn getTokenDebugInfo(builder: *Builder, unit: *Unit, token: Token.Index) TokenDebugInfo { + fn getTokenDebugInfo(builder: *Builder, unit: *Unit, token_index: Token.Index) TokenDebugInfo { const file = unit.files.get(builder.current_file); - const index = Token.unwrap(token); - assert(index < unit.token_buffer.length); - const line_offset_index = unit.token_buffer.lines[index]; - const line = line_offset_index - file.lexer.line_offset; - const offset = unit.token_buffer.offsets[index]; - assert(line_offset_index < unit.token_buffer.line_offsets.length); - const line_offset = unit.token_buffer.line_offsets.pointer[line_offset_index]; - const column = offset - line_offset; + const token = unit.token_buffer.tokens.get(token_index); + const line = token.line - file.lexer.line_offset; + const line_offset = unit.token_buffer.line_offsets.get_unchecked(token.line).*; + const column = token.offset - line_offset; return .{ .line = line, @@ -6708,33 +6713,6 @@ pub const Builder = struct { else => |t| @panic(@tagName(t)), } } else { - // var scope_it: ?*Debug.Scope = builder.current_scope; - // const indentation_size = 4; - // var indentation: u32 = 0; - // - // var file_path: []const u8 = ""; - // while (scope_it) |scope| : (scope_it = scope.parent) { - // for (0..indentation * indentation_size) |_| { - // std.debug.print(" ", .{}); - // } - // std.debug.print("> Scope {s} ", .{@tagName(scope.kind)}); - // switch (scope.kind) { - // .compilation_unit => {}, - // .file_container, .container => {}, - // .function => {}, - // .file => { - // const global_scope = @fieldParentPtr(Debug.Scope.Global, "scope", scope); - // const file = @fieldParentPtr(Debug.File, "scope", global_scope); - // std.debug.print("{s}", .{file.relative_path}); - // file_path = file.relative_path; - // }, - // .block => {}, - // } - // - // std.debug.print("\n", .{}); - // indentation += 1; - // } - try write(.panic, "identifier '"); try write(.panic, identifier); try write(.panic, "' not found\n"); @@ -7425,7 +7403,7 @@ pub const Builder = struct { .comptime_argument_declaration => switch (polymorphic_call_argument_node.id) { .comptime_expression => { const comptime_argument = try builder.resolveComptimeValue(unit, context, Type.Expect{ .type = argument_type }, .{}, polymorphic_call_argument_node.left, null, .right, &.{}, null, &.{}); - const name = unit.getExpectedTokenBytes(Token.addInt(argument_declaration_node.token, 1), .identifier); + const name = unit.getExpectedTokenBytes(@enumFromInt(@intFromEnum(argument_declaration_node.token) + 1), .identifier); const name_hash = try unit.processIdentifier(context, name); const debug_info = builder.getTokenDebugInfo(unit, argument_declaration_node.token); try comptime_parameter_declarations.append(context.my_allocator, .{ @@ -7505,7 +7483,7 @@ pub const Builder = struct { } fn put_argument_in_scope(builder: *Builder, unit: *Unit, context: *const Context, argument_node: *const Node, argument_index: usize, argument_type_index: Type.Index) !void { - const argument_name = switch (unit.getTokenId(argument_node.token)) { + const argument_name = switch (unit.token_buffer.tokens.get(argument_node.token).id) { .identifier => b: { const argument_name = unit.getExpectedTokenBytes(argument_node.token, .identifier); @@ -8707,7 +8685,7 @@ pub const Builder = struct { .constant_symbol_declaration, .variable_symbol_declaration, => { - const expected_identifier_token_index = Token.addInt(declaration_node.token, 1); + const expected_identifier_token_index: Token.Index = @enumFromInt(@intFromEnum(declaration_node.token) + 1); const identifier = unit.getExpectedTokenBytes(expected_identifier_token_index, .identifier); // logln(.compilation, .identifier, "Analyzing global declaration {s}", .{identifier}); const identifier_hash = try unit.processIdentifier(context, identifier); @@ -8823,7 +8801,7 @@ pub const Builder = struct { for (field_nodes.slice(), 0..) |field_node_index, index| { const field_node = unit.getNode(field_node_index); - const identifier = switch (unit.getTokenId(field_node.token)) { + const identifier = switch (unit.token_buffer.tokens.get(field_node.token).id) { .identifier => unit.getExpectedTokenBytes(field_node.token, .identifier), .string_literal => try unit.fixupStringLiteral(context, field_node.token), .discard => try std.mem.concat(context.allocator, u8, &.{ "_", &.{'0' + b: { @@ -9875,7 +9853,7 @@ pub const Builder = struct { switch (type_expect) { .type => |type_index| { const expected_type = unit.types.get(type_index); - const identifier = unit.getExpectedTokenBytes(Token.addInt(node.token, 1), .identifier); + const identifier = unit.getExpectedTokenBytes(@enumFromInt(@intFromEnum(node.token) + 1), .identifier); const hash = try unit.processIdentifier(context, identifier); switch (expected_type.*) { .integer => |*integer| switch (integer.kind) { @@ -10587,7 +10565,7 @@ pub const Builder = struct { switch (expected_type.*) { .integer => |*integer| switch (integer.kind) { .@"enum" => |*enum_type| { - const identifier = unit.getExpectedTokenBytes(Token.addInt(node.token, 1), .identifier); + const identifier = unit.getExpectedTokenBytes(@enumFromInt(@intFromEnum(node.token) + 1), .identifier); const hash = try unit.processIdentifier(context, identifier); for (enum_type.fields.slice()) |field_index| { const field = unit.enum_fields.get(field_index); @@ -13045,7 +13023,7 @@ pub const Builder = struct { assert(initialization_node.id == .container_field_initialization); assert(initialization_node.left != .null); assert(initialization_node.right == .null); - const field_name = unit.getExpectedTokenBytes(Token.addInt(initialization_node.token, 1), .identifier); + const field_name = unit.getExpectedTokenBytes(@enumFromInt(@intFromEnum(initialization_node.token) + 1), .identifier); const field_name_hash = try unit.processIdentifier(context, field_name); if (field_name_hash == field.name) { @@ -14124,12 +14102,11 @@ pub const Builder = struct { } } - fn emitLocalVariableDeclaration(builder: *Builder, unit: *Unit, context: *const Context, token: Token.Index, mutability: Mutability, declaration_type: Type.Index, initialization: V, emit: bool, maybe_name: ?[]const u8) !Instruction.Index { + fn emitLocalVariableDeclaration(builder: *Builder, unit: *Unit, context: *const Context, token_index: Token.Index, mutability: Mutability, declaration_type: Type.Index, initialization: V, emit: bool, maybe_name: ?[]const u8) !Instruction.Index { assert(builder.current_scope.local); - const index = Token.unwrap(token); - const id = unit.token_buffer.ids[index]; - const identifier = if (maybe_name) |name| name else switch (id) { - .identifier => unit.getExpectedTokenBytes(token, .identifier), + const token = unit.token_buffer.tokens.get(token_index); + const identifier = if (maybe_name) |name| name else switch (token.id) { + .identifier => unit.getExpectedTokenBytes(token_index, .identifier), .discard => blk: { const name = try join_name(context, "_", unit.discard_identifiers, 10); unit.discard_identifiers += 1; @@ -14139,7 +14116,7 @@ pub const Builder = struct { }; // logln(.compilation, .identifier, "Analyzing local declaration {s}", .{identifier}); const identifier_hash = try unit.processIdentifier(context, identifier); - const token_debug_info = builder.getTokenDebugInfo(unit, token); + const token_debug_info = builder.getTokenDebugInfo(unit, token_index); const look_in_parent_scopes = true; if (builder.current_scope.lookupDeclaration(identifier_hash, look_in_parent_scopes)) |lookup| { @@ -14255,7 +14232,7 @@ pub const Builder = struct { => { // All variables here are local assert(builder.current_scope.local); - const expected_identifier_token_index = Token.addInt(statement_node.token, 1); + const expected_identifier_token_index: Token.Index = @enumFromInt(@intFromEnum(statement_node.token) + 1); const mutability: Mutability = switch (statement_node.id) { .constant_symbol_declaration => .@"const", @@ -16826,6 +16803,7 @@ pub const Enum = struct { pub const Unit = struct { node_buffer: Node.List = .{}, + token_buffer: Token.Buffer, files: Debug.File.List = .{}, types: Type.List = .{}, structs: Struct.List = .{}, @@ -16847,7 +16825,6 @@ pub const Unit = struct { constant_arrays: V.Comptime.ConstantArray.List = .{}, constant_slices: V.Comptime.ConstantSlice.List = .{}, error_fields: Type.Error.Field.List = .{}, - token_buffer: Token.Buffer = .{}, node_lists: UnpinnedArray(UnpinnedArray(Node.Index)) = .{}, file_token_offsets: MyHashMap(Token.Range, Debug.File.Index) = .{}, file_map: MyHashMap([]const u8, Debug.File.Index) = .{}, @@ -17095,7 +17072,7 @@ pub const Unit = struct { switch (switch_case_condition_node.id) { .dot_literal => { - if (try unit.typeCheckEnumLiteral(context, Token.addInt(switch_case_condition_node.token, 1), enum_type)) |enum_field_index| { + if (try unit.typeCheckEnumLiteral(context, @enumFromInt(@intFromEnum(switch_case_condition_node.token) + 1), enum_type)) |enum_field_index| { for (existing_enums.slice()) |existing| { if (enum_field_index == existing) { // Duplicate case @@ -17117,7 +17094,7 @@ pub const Unit = struct { const case_condition_node = unit.getNode(case_condition_node_index); switch (case_condition_node.id) { .dot_literal => { - if (try unit.typeCheckEnumLiteral(context, Token.addInt(case_condition_node.token, 1), enum_type)) |enum_field_index| { + if (try unit.typeCheckEnumLiteral(context, @enumFromInt(@intFromEnum(case_condition_node.token) + 1), enum_type)) |enum_field_index| { for (existing_enums.slice()) |existing| { if (enum_field_index == existing) { // Duplicate case @@ -17193,23 +17170,12 @@ pub const Unit = struct { unreachable; } - fn getTokenId(unit: *Unit, token_index: Token.Index) Token.Id { - const index = Token.unwrap(token_index); - assert(index < unit.token_buffer.length); - const id = unit.token_buffer.ids[index]; - return id; - } - fn getExpectedTokenBytes(unit: *Unit, token_index: Token.Index, expected_id: Token.Id) []const u8 { - const id = unit.getTokenId(token_index); - // logln(.compilation, .token_bytes, "trying to get {s} from token of id {s}", .{ @tagName(expected_id), @tagName(id) }); - if (id != expected_id) @panic("Unexpected token"); - const index = Token.unwrap(token_index); - const offset = unit.token_buffer.offsets[index]; - const len = unit.token_buffer.lengths[index]; + const token = unit.token_buffer.tokens.get(token_index); const file_index = unit.findTokenFile(token_index); const file = unit.files.get(file_index); - const bytes = file.source_code[offset..][0..len]; + if (token.id != expected_id) @panic("Unexpected token"); + const bytes = file.source_code[token.offset..][0..token.length]; return bytes; } @@ -17411,7 +17377,7 @@ pub const Unit = struct { file.status = .loaded_into_memory; assert(file.status == .loaded_into_memory); - file.lexer = try lexer.analyze(context.my_allocator, file.source_code, &unit.token_buffer); + file.lexer = try lexer.analyze(file.source_code, &unit.token_buffer); assert(file.status == .loaded_into_memory); file.status = .lexed; try unit.file_token_offsets.put_no_clobber(context.my_allocator, .{ @@ -17741,84 +17707,11 @@ pub const Token = struct { length: u32, id: Token.Id, - pub const Buffer = struct { - lines: [*]u32 = undefined, - offsets: [*]u32 = undefined, - lengths: [*]u32 = undefined, - ids: [*]Token.Id = undefined, - line_offsets: UnpinnedArray(u32) = .{}, - length: data_structures.IndexType = 0, - capacity: data_structures.IndexType = 0, - - const factor = 2; - const initial_item_count = 16; - - pub fn append_with_capacity(buffer: *Buffer, token: Token) void { - const index = buffer.length; - assert(index < buffer.capacity); - - buffer.lines[index] = token.line; - buffer.offsets[index] = token.offset; - buffer.lengths[index] = token.length; - buffer.ids[index] = token.id; - - buffer.length += 1; - } - - pub fn ensure_with_capacity(buffer: *Buffer, allocator: *MyAllocator, unused_capacity: data_structures.IndexType) !void { - const desired_capacity = buffer.length + unused_capacity; - var new_capacity = @max(buffer.capacity, initial_item_count); - while (new_capacity < desired_capacity) { - new_capacity *= factor; - } - - if (new_capacity > buffer.capacity) { - { - const line_byte_ptr: [*]u8 = @ptrCast(buffer.lines); - const line_bytes = line_byte_ptr[0 .. buffer.length * @sizeOf(u32)]; - const new_line_bytes = try allocator.reallocate(line_bytes, new_capacity * @sizeOf(u32), @alignOf(u32)); - buffer.lines = @ptrCast(@alignCast(new_line_bytes)); - } - - { - const offset_byte_ptr: [*]u8 = @ptrCast(buffer.offsets); - const offset_bytes = offset_byte_ptr[0 .. buffer.length * @sizeOf(u32)]; - const new_offset_bytes = try allocator.reallocate(offset_bytes, new_capacity * @sizeOf(u32), @alignOf(u32)); - buffer.offsets = @ptrCast(@alignCast(new_offset_bytes)); - } - - { - const length_byte_ptr: [*]u8 = @ptrCast(buffer.lengths); - const length_bytes = length_byte_ptr[0 .. buffer.length * @sizeOf(u32)]; - const new_length_bytes = try allocator.reallocate(length_bytes, new_capacity * @sizeOf(u32), @alignOf(u32)); - buffer.lengths = @ptrCast(@alignCast(new_length_bytes)); - } - - { - const id_byte_ptr: [*]u8 = @ptrCast(buffer.ids); - const id_bytes = id_byte_ptr[0 .. buffer.length * @sizeOf(Token.Id)]; - const new_id_bytes = try allocator.reallocate(id_bytes, new_capacity * @sizeOf(Token.Id), @alignOf(Token.Id)); - buffer.ids = @ptrCast(@alignCast(new_id_bytes)); - } - - buffer.capacity = new_capacity; - } - } - - pub fn getOffset(buffer: *const Buffer) Token.Index { - return @enumFromInt(buffer.length); - } - - pub fn getLineOffset(buffer: *const Buffer) u32 { - return @intCast(buffer.line_offsets.length); - } + pub const Buffer = struct{ + line_offsets: PinnedArray(u32) = .{}, + tokens: PinnedArray(Token) = .{}, }; - - pub const Range = struct { - start: Token.Index, - count: u32, - }; - + pub const Id = enum { keyword_unsigned_integer, keyword_signed_integer, @@ -17992,7 +17885,12 @@ pub const Token = struct { } }; - pub usingnamespace data_structures.getIndexForType(@This(), enum {}); + pub const Index = PinnedArray(Token).Index; + + pub const Range = struct { + start: Token.Index, + count: u32, + }; }; pub const InlineAssembly = struct { diff --git a/bootstrap/frontend/lexer.zig b/bootstrap/frontend/lexer.zig index b125f62..53491bd 100644 --- a/bootstrap/frontend/lexer.zig +++ b/bootstrap/frontend/lexer.zig @@ -3,10 +3,12 @@ const Allocator = std.mem.Allocator; const assert = std.debug.assert; const log = std.log; -const data_structures = @import("../library.zig"); -const enumFromString = data_structures.enumFromString; -const MyAllocator = data_structures.MyAllocator; -const UnpinnedArray = data_structures.UnpinnedArray; +const library = @import("../library.zig"); +const byte_equal = library.byte_equal; +const enumFromString = library.enumFromString; +const MyAllocator = library.MyAllocator; +const PinnedArray = library.PinnedArray; +const UnpinnedArray = library.UnpinnedArray; const Compilation = @import("../Compilation.zig"); const File = Compilation.File; @@ -43,31 +45,31 @@ pub const Logger = enum { }); }; -pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.Buffer) !Result { +pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { assert(text.len <= std.math.maxInt(u32)); const len: u32 = @intCast(text.len); var lexer = Result{ - .offset = token_buffer.getOffset(), - .line_offset = token_buffer.getLineOffset(), + .offset = @enumFromInt(token_buffer.tokens.length), + .line_offset = token_buffer.line_offsets.length, .count = 0, .line_count = 0, }; const time_start = std.time.Instant.now() catch unreachable; - try token_buffer.line_offsets.append(allocator, 0); + token_buffer.line_offsets.append(0); for (text, 0..) |byte, index| { if (byte == '\n') { - try token_buffer.line_offsets.append(allocator, @intCast(index + 1)); + token_buffer.line_offsets.append(@intCast(index + 1)); } } var index: u32 = 0; var line_index: u32 = lexer.line_offset; - try token_buffer.ensure_with_capacity(allocator, len / 3); + // try token_buffer.ensure_with_capacity(allocator, len / 3); // logln(.lexer, .end, "START LEXER - TOKEN OFFSET: {} - LINE OFFSET: {}", .{ Token.unwrap(lexer.offset), lexer.line_offset }); @@ -110,7 +112,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B const string = text[start_index..][0 .. index - start_index]; break :blk if (enumFromString(Compilation.FixedKeyword, string)) |fixed_keyword| switch (fixed_keyword) { inline else => |comptime_fixed_keyword| @field(Token.Id, "fixed_keyword_" ++ @tagName(comptime_fixed_keyword)), - } else if (data_structures.byte_equal(string, "_")) .discard else .identifier; + } else if (byte_equal(string, "_")) .discard else .identifier; }, '0'...'9' => blk: { // Detect other non-decimal literals @@ -481,7 +483,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B }, // Asm statement (special treatment) '`' => { - token_buffer.append_with_capacity(.{ + token_buffer.tokens.append(.{ .id = .operator_backtick, .line = line_index, .offset = start_index, @@ -508,7 +510,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B } } - token_buffer.append_with_capacity(.{ + token_buffer.tokens.append(.{ .id = .identifier, .offset = start_i, .length = index - start_i, @@ -516,7 +518,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B }); }, ',' => { - token_buffer.append_with_capacity(.{ + token_buffer.tokens.append(.{ .id = .operator_comma, .line = line_index, .offset = start_i, @@ -525,7 +527,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B index += 1; }, ';' => { - token_buffer.append_with_capacity(.{ + token_buffer.tokens.append(.{ .id = .operator_semicolon, .line = line_index, .offset = start_i, @@ -534,7 +536,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B index += 1; }, '{' => { - token_buffer.append_with_capacity(.{ + token_buffer.tokens.append(.{ .id = .operator_left_brace, .line = line_index, .offset = start_i, @@ -543,7 +545,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B index += 1; }, '}' => { - token_buffer.append_with_capacity(.{ + token_buffer.tokens.append(.{ .id = .operator_right_brace, .line = line_index, .offset = start_i, @@ -572,7 +574,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B } } - token_buffer.append_with_capacity(.{ + token_buffer.tokens.append(.{ .id = .number_literal, .line = line_index, .offset = start_i, @@ -586,7 +588,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B } } - token_buffer.append_with_capacity(.{ + token_buffer.tokens.append(.{ .id = .operator_backtick, .line = line_index, .length = 1, @@ -606,7 +608,7 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B const end_index = index; const token_length = end_index - start_index; - token_buffer.append_with_capacity(.{ + token_buffer.tokens.append(.{ .id = token_id, .offset = start_index, .length = token_length, @@ -619,8 +621,8 @@ pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.B // logln(.lexer, .end, "END LEXER - TOKEN OFFSET: {} - LINE OFFSET: {}", .{ Token.unwrap(lexer.offset), lexer.line_offset }); - lexer.count = Token.sub(token_buffer.getOffset(), lexer.offset); - lexer.line_count = token_buffer.getLineOffset() - lexer.line_offset; + lexer.count = token_buffer.tokens.length - @intFromEnum(lexer.offset); + lexer.line_count = token_buffer.line_offsets.length - lexer.line_offset; const time_end = std.time.Instant.now() catch unreachable; lexer.time = time_end.since(time_start); diff --git a/bootstrap/frontend/parser.zig b/bootstrap/frontend/parser.zig index 78c1a0e..b097756 100644 --- a/bootstrap/frontend/parser.zig +++ b/bootstrap/frontend/parser.zig @@ -259,18 +259,14 @@ const Analyzer = struct { } fn getTokenOffset(analyzer: *Analyzer, token_index: Token.Index) u32 { - const index = Token.unwrap(token_index); - assert(index < analyzer.token_buffer.length); - const offset = analyzer.token_buffer.offsets[index]; - return offset; + const token = analyzer.token_buffer.tokens.get(token_index); + return token.offset; } fn peekTokenAhead(analyzer: *Analyzer, ahead_offset: u32) Token.Id { - const token_index = Token.addInt(analyzer.token_i, ahead_offset); - const index = Token.unwrap(token_index); - assert(index < analyzer.token_buffer.length); - const token = analyzer.token_buffer.ids[index]; - return token; + const index = @intFromEnum(analyzer.token_i) + ahead_offset; + const token = analyzer.token_buffer.tokens.get_unchecked(index); + return token.id; } fn peekToken(analyzer: *Analyzer) Token.Id { @@ -280,11 +276,11 @@ const Analyzer = struct { fn hasTokens(analyzer: *Analyzer) bool { const token_end = analyzer.getTokenEnd(); - return Token.unwrap(analyzer.token_i) < token_end; + return @intFromEnum(analyzer.token_i) < token_end; } fn getTokenEnd(analyzer: *const Analyzer) u32 { - return @intFromEnum(Token.addInt(analyzer.lexer.offset, analyzer.lexer.count)); + return @intFromEnum(analyzer.lexer.offset) + analyzer.lexer.count; } fn consumeToken(analyzer: *Analyzer) void { @@ -292,29 +288,26 @@ const Analyzer = struct { } fn consumeTokens(analyzer: *Analyzer, token_count: u32) void { - assert(Token.unwrap(Token.addInt(analyzer.token_i, token_count)) <= analyzer.getTokenEnd()); + assert((@intFromEnum(analyzer.token_i) + token_count) <= analyzer.getTokenEnd()); // log(.parser, .consume_token, "Consuming {} {s}: ", .{ token_count, if (token_count == 1) "token" else "tokens" }); - for (0..token_count) |i_usize| { - const i: u32 = @intCast(i_usize); - const token_id = analyzer.peekTokenAhead(i); - _ = token_id; // autofix - const token_index = Token.addInt(analyzer.token_i, i); - const token_bytes = analyzer.bytes(token_index); - _ = token_bytes; // autofix - // log(.parser, .consume_token, "{s}, '{s}'", .{ @tagName(token_id), token_bytes }); - } + // for (0..token_count) |i_usize| { + // const i: u32 = @intCast(i_usize); + // const token_id = analyzer.peekTokenAhead(i); + // _ = token_id; // autofix + // const token_index = @intFromEnum(analyzer.token_i) + i; + // const token_bytes = analyzer.bytes(token_index); + // _ = token_bytes; // autofix + // // log(.parser, .consume_token, "{s}, '{s}'", .{ @tagName(token_id), token_bytes }); + // } // log(.parser, .consume_token, "\n", .{}); - analyzer.token_i = Token.addInt(analyzer.token_i, token_count); + analyzer.token_i = @enumFromInt(@intFromEnum(analyzer.token_i) + token_count); } fn bytes(analyzer: *const Analyzer, token_index: Token.Index) []const u8 { - const index = Token.unwrap(token_index); - assert(index < analyzer.token_buffer.length); - const offset = analyzer.token_buffer.offsets[index]; - const len = analyzer.token_buffer.lengths[index]; - const slice = analyzer.source_file[offset..][0..len]; + const token = analyzer.token_buffer.tokens.get(token_index); + const slice = analyzer.source_file[token.offset..][0..token.length]; return slice; } @@ -1814,7 +1807,7 @@ const Analyzer = struct { fn processContainerType(analyzer: *Analyzer, maybe_token_id: ?Token.Id) !Node.Index { const token_i = if (maybe_token_id) |tid| try analyzer.expectToken(tid) else analyzer.token_i; - assert(Token.unwrap(analyzer.token_i) < analyzer.token_buffer.length); + assert(@intFromEnum(analyzer.token_i) < analyzer.token_buffer.tokens.length); const token_id = maybe_token_id orelse .fixed_keyword_struct; const container_type: Compilation.ContainerType = switch (token_id) { .fixed_keyword_struct => .@"struct", @@ -2323,7 +2316,7 @@ const Analyzer = struct { .right = blk: { const t = analyzer.token_i; analyzer.consumeToken(); - break :blk Node.wrap(Token.unwrap(t)); + break :blk Node.wrap(@intFromEnum(t)); }, }), else => |t| @panic(@tagName(t)), @@ -2363,7 +2356,7 @@ const Analyzer = struct { try analyzer.node_lists.append(analyzer.my_allocator, node_list); return try analyzer.addNode(.{ .id = .node_list, - .token = Token.wrap(0), + .token = @enumFromInt(0), .left = @enumFromInt(index), .right = Node.Index.null, }); diff --git a/bootstrap/library.zig b/bootstrap/library.zig index 3f1ca56..b68c495 100644 --- a/bootstrap/library.zig +++ b/bootstrap/library.zig @@ -26,7 +26,7 @@ pub const Arena = struct{ pub fn init(requested_size: u64) !*Arena { var size = requested_size; - const size_roundup_granularity = 64 * 1024 * 1024; + const size_roundup_granularity = commit_granularity; size += size_roundup_granularity - 1; size -= size % size_roundup_granularity; const initial_commit_size = commit_granularity; @@ -82,14 +82,82 @@ pub const Arena = struct{ } }; +const pinned_array_page_size = 2 * 1024 * 1024; +const pinned_array_max_size = std.math.maxInt(u32) - pinned_array_page_size; +const pinned_array_default_granularity = pinned_array_page_size; +/// This must be used with big arrays +pub fn PinnedArray(comptime T: type) type { + return struct{ + pointer: [*]T = @constCast((&[_]T{}).ptr), + length: u32 = 0, + granularity: u32 = 0, + + pub const Index = enum(u32){ + null = 0xffff_ffff, + _, + }; + + const Array = @This(); + + pub fn get_unchecked(array: *Array, index: u32) *T { + const slice = array.pointer[0..array.length]; + return &slice[index]; + } + + pub fn get(array: *Array, index: Index) *T { + assert(index != .null); + const i = @intFromEnum(index); + return array.get_unchecked(i); + } + + pub fn get_index(array: *Array, item: *const T) Index{ + assert(item - array.pointer > (@divExact(pinned_array_max_size, @sizeOf(T)))); + return @enumFromInt(item - array.pointer); + } + + pub fn init(granularity: u32) !Array{ + const raw_ptr = try reserve(pinned_array_max_size); + try commit(raw_ptr, granularity); + return Array{ + .pointer = @alignCast(@ptrCast(raw_ptr)), + .length = 0, + .granularity = granularity, + }; + } + + pub fn init_with_default_granularity() !Array{ + return try Array.init(pinned_array_default_granularity); + } + + pub fn append(array: *Array, item: T) void { + if (((array.length + 1) * @sizeOf(T)) & (array.granularity - 1) == 0) { + const length: u64 = array.length; + assert((length + 1) * @sizeOf(T) <= pinned_array_max_size); + const ptr: [*]u8 = @ptrCast(array.pointer); + commit(ptr + ((length + 1) * @sizeOf(T)), array.granularity) catch unreachable; + } + + array.append_with_capacity(item); + } + + pub fn append_with_capacity(array: *Array, item: T) void { + const index = array.length; + assert(index * @sizeOf(T) < pinned_array_max_size); + array.length += 1; + array.pointer[index] = item; + } + }; +} + pub fn reserve(size: u64) ![*]u8{ - return switch (os) { + const slice = switch (os) { .linux, .macos => try std.posix.mmap(null, size, std.posix.PROT.NONE, .{ .ANONYMOUS = true, .TYPE = .PRIVATE, }, -1, 0), else => @compileError("OS not supported"), }; + return slice.ptr; } pub fn commit(bytes: [*]u8, size: u64) !void{ From de930c07295e3370c838af2bca1bae555f9d8f88 Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Wed, 24 Apr 2024 18:22:11 -0600 Subject: [PATCH 02/14] Substitute nodes --- bootstrap/Compilation.zig | 10 ++++++---- bootstrap/frontend/lexer.zig | 27 +++++++++++---------------- bootstrap/frontend/parser.zig | 31 +++++++++++++++++-------------- bootstrap/library.zig | 18 +++++++++++------- 4 files changed, 45 insertions(+), 41 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index f7dcccb..cf805c4 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -118,6 +118,7 @@ pub fn compileBuildExecutable(context: *const Context, arguments: []const []cons .tokens = try PinnedArray(Token).init_with_default_granularity(), .line_offsets = try PinnedArray(u32).init_with_default_granularity(), }, + .node_buffer = try PinnedArray(Node).init_with_default_granularity(), }; try unit.compile(context); @@ -3014,6 +3015,7 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o .tokens = try PinnedArray(Token).init_with_default_granularity(), .line_offsets = try PinnedArray(u32).init_with_default_granularity(), }, + .node_buffer = try PinnedArray(Node).init_with_default_granularity(), }; try unit.compile(context); @@ -4617,7 +4619,7 @@ pub const Builder = struct { fn resolveIntrinsic(builder: *Builder, unit: *Unit, context: *const Context, type_expect: Type.Expect, node_index: Node.Index, side: Side) anyerror!V { const node = unit.getNode(node_index); - const intrinsic_id: IntrinsicId = @enumFromInt(Node.unwrap(node.right)); + const intrinsic_id: IntrinsicId = @enumFromInt(@intFromEnum(node.right)); const argument_node_list = unit.getNodeList(node.left); switch (intrinsic_id) { @@ -9604,7 +9606,7 @@ pub const Builder = struct { switch (node.id) { .intrinsic => { const argument_node_list = unit.getNodeList(node.left); - const intrinsic_id: IntrinsicId = @enumFromInt(Node.unwrap(node.right)); + const intrinsic_id: IntrinsicId = @enumFromInt(@intFromEnum(node.right)); switch (intrinsic_id) { .import => { assert(argument_node_list.len == 1); @@ -16802,7 +16804,7 @@ pub const Enum = struct { }; pub const Unit = struct { - node_buffer: Node.List = .{}, + node_buffer: PinnedArray(Node), token_buffer: Token.Buffer, files: Debug.File.List = .{}, types: Type.List = .{}, @@ -17153,7 +17155,7 @@ pub const Unit = struct { fn getNodeListFromNode(unit: *Unit, node: *const Node) []const Node.Index { assert(node.id == .node_list); const list_index = node.left; - const node_list = unit.node_lists.slice()[Node.unwrap(list_index)]; + const node_list = unit.node_lists.slice()[@intFromEnum(list_index)]; return node_list.pointer[0..node_list.length]; } diff --git a/bootstrap/frontend/lexer.zig b/bootstrap/frontend/lexer.zig index 53491bd..5c96a73 100644 --- a/bootstrap/frontend/lexer.zig +++ b/bootstrap/frontend/lexer.zig @@ -58,11 +58,11 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { const time_start = std.time.Instant.now() catch unreachable; - token_buffer.line_offsets.append(0); + _ = token_buffer.line_offsets.append(0); for (text, 0..) |byte, index| { if (byte == '\n') { - token_buffer.line_offsets.append(@intCast(index + 1)); + _ = token_buffer.line_offsets.append(@intCast(index + 1)); } } @@ -483,7 +483,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { }, // Asm statement (special treatment) '`' => { - token_buffer.tokens.append(.{ + _ = token_buffer.tokens.append(.{ .id = .operator_backtick, .line = line_index, .offset = start_index, @@ -510,7 +510,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { } } - token_buffer.tokens.append(.{ + _ = token_buffer.tokens.append(.{ .id = .identifier, .offset = start_i, .length = index - start_i, @@ -518,7 +518,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { }); }, ',' => { - token_buffer.tokens.append(.{ + _ = token_buffer.tokens.append(.{ .id = .operator_comma, .line = line_index, .offset = start_i, @@ -527,7 +527,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { index += 1; }, ';' => { - token_buffer.tokens.append(.{ + _ = token_buffer.tokens.append(.{ .id = .operator_semicolon, .line = line_index, .offset = start_i, @@ -536,7 +536,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { index += 1; }, '{' => { - token_buffer.tokens.append(.{ + _ = token_buffer.tokens.append(.{ .id = .operator_left_brace, .line = line_index, .offset = start_i, @@ -545,7 +545,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { index += 1; }, '}' => { - token_buffer.tokens.append(.{ + _ = token_buffer.tokens.append(.{ .id = .operator_right_brace, .line = line_index, .offset = start_i, @@ -574,7 +574,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { } } - token_buffer.tokens.append(.{ + _ = token_buffer.tokens.append(.{ .id = .number_literal, .line = line_index, .offset = start_i, @@ -588,7 +588,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { } } - token_buffer.tokens.append(.{ + _ = token_buffer.tokens.append(.{ .id = .operator_backtick, .line = line_index, .length = 1, @@ -608,19 +608,14 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { const end_index = index; const token_length = end_index - start_index; - token_buffer.tokens.append(.{ + _ = token_buffer.tokens.append(.{ .id = token_id, .offset = start_index, .length = token_length, .line = line_index, }); - // const line_offset = token_buffer.line_offsets.pointer[line_index]; - // const column = start_index - line_offset; - // logln(.lexer, .new_token, "T at line {}, column {}, byte offset {}, with length {} -line offset: {}- ({s})", .{ line_index, column, start_index, token_length, line_offset, @tagName(token_id) }); } - // logln(.lexer, .end, "END LEXER - TOKEN OFFSET: {} - LINE OFFSET: {}", .{ Token.unwrap(lexer.offset), lexer.line_offset }); - lexer.count = token_buffer.tokens.length - @intFromEnum(lexer.offset); lexer.line_count = token_buffer.line_offsets.length - lexer.line_offset; diff --git a/bootstrap/frontend/parser.zig b/bootstrap/frontend/parser.zig index b097756..fcefd89 100644 --- a/bootstrap/frontend/parser.zig +++ b/bootstrap/frontend/parser.zig @@ -2,10 +2,13 @@ const std = @import("std"); const Allocator = std.mem.Allocator; const assert = std.debug.assert; -const data_structures = @import("../library.zig"); -const UnpinnedArray = data_structures.UnpinnedArray; -const BlockList = data_structures.BlockList; -const enumFromString = data_structures.enumFromString; +const library = @import("../library.zig"); +const byte_equal = library.byte_equal; +const BlockList = library.BlockList; +const enumFromString = library.enumFromString; +const PinnedArray = library.PinnedArray; +const MyAllocator = library.MyAllocator; +const UnpinnedArray = library.UnpinnedArray; const lexer = @import("lexer.zig"); @@ -62,8 +65,7 @@ pub const Node = struct { token: Token.Index, id: Id, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(Node).Index; pub const Range = struct { start: u32, @@ -226,11 +228,11 @@ const Analyzer = struct { lexer: lexer.Result, token_i: Token.Index, token_buffer: *Token.Buffer, - nodes: *Node.List, + nodes: *PinnedArray(Node), node_lists: *UnpinnedArray(UnpinnedArray(Node.Index)), source_file: []const u8, allocator: Allocator, - my_allocator: *data_structures.MyAllocator, + my_allocator: *MyAllocator, suffix_depth: usize = 0, fn expectToken(analyzer: *Analyzer, expected_token_id: Token.Id) !Token.Index { @@ -339,7 +341,7 @@ const Analyzer = struct { const identifier_name = analyzer.bytes(identifier); const attribute_node = inline for (@typeInfo(Compilation.Debug.Declaration.Global.Attribute).Enum.fields) |enum_field| { - if (data_structures.byte_equal(identifier_name, enum_field.name)) { + if (byte_equal(identifier_name, enum_field.name)) { const attribute = @field(Compilation.Debug.Declaration.Global.Attribute, enum_field.name); const attribute_node = switch (attribute) { .@"export", @@ -427,7 +429,7 @@ const Analyzer = struct { const identifier_name = analyzer.bytes(identifier); const attribute_node = inline for (@typeInfo(Compilation.Function.Attribute).Enum.fields) |enum_field| { - if (data_structures.byte_equal(identifier_name, enum_field.name)) { + if (byte_equal(identifier_name, enum_field.name)) { const attribute = @field(Compilation.Function.Attribute, enum_field.name); const attribute_node = switch (attribute) { .naked => try analyzer.addNode(.{ @@ -926,7 +928,7 @@ const Analyzer = struct { const intrinsic_name = analyzer.bytes(intrinsic_token)[1..]; const intrinsic_id = inline for (@typeInfo(Compilation.IntrinsicId).Enum.fields) |enum_field| { - if (data_structures.byte_equal(enum_field.name, intrinsic_name)) { + if (byte_equal(enum_field.name, intrinsic_name)) { break @field(Compilation.IntrinsicId, enum_field.name); } } else @panic(intrinsic_name); @@ -2316,7 +2318,7 @@ const Analyzer = struct { .right = blk: { const t = analyzer.token_i; analyzer.consumeToken(); - break :blk Node.wrap(@intFromEnum(t)); + break :blk @enumFromInt(@intFromEnum(t)); }, }), else => |t| @panic(@tagName(t)), @@ -2328,7 +2330,8 @@ const Analyzer = struct { } fn addNode(analyzer: *Analyzer, node: Node) !Node.Index { - const node_index = try analyzer.nodes.append(analyzer.my_allocator, node); + const node_pointer = analyzer.nodes.append(node); + const node_index = analyzer.nodes.get_index(node_pointer); // logln(.parser, .node_creation, "Adding node #{} {s} to file #{} (left: {}, right: {})", .{ Node.unwrap(node_index), @tagName(node.id), File.unwrap(analyzer.file_index), switch (node.left) { // .null => 0xffff_ffff, // else => Node.unwrap(node.left), @@ -2377,7 +2380,7 @@ const Analyzer = struct { }; // Here it is assumed that left brace is consumed -pub fn analyze(allocator: Allocator, my_allocator: *data_structures.MyAllocator, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *Node.List, node_lists: *UnpinnedArray(UnpinnedArray(Node.Index))) !Result { +pub fn analyze(allocator: Allocator, my_allocator: *MyAllocator, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *PinnedArray(Node), node_lists: *UnpinnedArray(UnpinnedArray(Node.Index))) !Result { const start = std.time.Instant.now() catch unreachable; var analyzer = Analyzer{ .lexer = lexer_result, diff --git a/bootstrap/library.zig b/bootstrap/library.zig index b68c495..cf74d68 100644 --- a/bootstrap/library.zig +++ b/bootstrap/library.zig @@ -110,9 +110,11 @@ pub fn PinnedArray(comptime T: type) type { return array.get_unchecked(i); } - pub fn get_index(array: *Array, item: *const T) Index{ - assert(item - array.pointer > (@divExact(pinned_array_max_size, @sizeOf(T)))); - return @enumFromInt(item - array.pointer); + pub fn get_index(array: *Array, item: *T) Index{ + const many_item: [*]T = @ptrCast(item); + const result = @intFromPtr(many_item) - @intFromPtr(array.pointer); + assert(result < pinned_array_max_size); + return @enumFromInt(@divExact(result, @sizeOf(T))); } pub fn init(granularity: u32) !Array{ @@ -129,7 +131,7 @@ pub fn PinnedArray(comptime T: type) type { return try Array.init(pinned_array_default_granularity); } - pub fn append(array: *Array, item: T) void { + pub fn append(array: *Array, item: T) *T { if (((array.length + 1) * @sizeOf(T)) & (array.granularity - 1) == 0) { const length: u64 = array.length; assert((length + 1) * @sizeOf(T) <= pinned_array_max_size); @@ -137,14 +139,16 @@ pub fn PinnedArray(comptime T: type) type { commit(ptr + ((length + 1) * @sizeOf(T)), array.granularity) catch unreachable; } - array.append_with_capacity(item); + return array.append_with_capacity(item); } - pub fn append_with_capacity(array: *Array, item: T) void { + pub fn append_with_capacity(array: *Array, item: T) *T { const index = array.length; assert(index * @sizeOf(T) < pinned_array_max_size); array.length += 1; - array.pointer[index] = item; + const ptr = &array.pointer[index]; + ptr.* = item; + return ptr; } }; } From d7301d532f295769d56b594cd8f59e26b066fcdb Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Wed, 24 Apr 2024 18:25:02 -0600 Subject: [PATCH 03/14] Remove residual use of ArrayList --- bootstrap/main.zig | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/bootstrap/main.zig b/bootstrap/main.zig index 7fb0578..99e7d35 100644 --- a/bootstrap/main.zig +++ b/bootstrap/main.zig @@ -9,6 +9,7 @@ const library = @import("library.zig"); const byte_equal = library.byte_equal; const MyAllocator = library.MyAllocator; const PageAllocator = library.PageAllocator; +const UnpinnedArray = library.UnpinnedArray; const env_detecting_libc_paths = "NATIVITY_IS_DETECTING_LIBC_PATHS"; @@ -38,13 +39,13 @@ pub fn main() !void { assert(arguments.len > 0); const home_dir = std.posix.getenv("HOME") orelse unreachable; const timestamp = std.time.milliTimestamp(); - var argument_list = std.ArrayList(u8).init(std.heap.page_allocator); + var argument_list = UnpinnedArray(u8){}; for (arguments) |arg| { - argument_list.appendSlice(arg) catch {}; - argument_list.append(' ') catch {}; + argument_list.append_slice(context.my_allocator, arg) catch {}; + argument_list.append(context.my_allocator, ' ') catch {}; } - argument_list.append('\n') catch {}; - std.fs.cwd().writeFile(std.fmt.allocPrint(std.heap.page_allocator, "{s}/dev/nativity/nat/invocation_log_{}", .{ home_dir, timestamp }) catch unreachable, argument_list.items) catch {}; + argument_list.append(context.my_allocator, '\n') catch {}; + std.fs.cwd().writeFile(std.fmt.allocPrint(std.heap.page_allocator, "{s}/dev/nativity/nat/invocation_log_{}", .{ home_dir, timestamp }) catch unreachable, argument_list.slice()) catch {}; } if (arguments.len <= 1) { From cabe2736706757907e0e07072ee6b5e220bbd4dd Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Wed, 24 Apr 2024 19:02:00 -0600 Subject: [PATCH 04/14] Pave the way for more changes --- bootstrap/Compilation.zig | 88 +++++++++++++++++++---------------- bootstrap/frontend/parser.zig | 48 ++++++++++--------- bootstrap/library.zig | 6 +-- lib/std/std.nat | 2 +- 4 files changed, 79 insertions(+), 65 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index cf805c4..2280119 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -2,19 +2,25 @@ const std = @import("std"); const Allocator = std.mem.Allocator; -const data_structures = @import("library.zig"); -const assert = data_structures.assert; -const byte_equal = data_structures.byte_equal; -const byte_equal_terminated = data_structures.byte_equal_terminated; -const first_slice = data_structures.first_slice; -const starts_with_slice = data_structures.starts_with_slice; -const PinnedArray = data_structures.PinnedArray; -const UnpinnedArray = data_structures.UnpinnedArray; -const BlockList = data_structures.BlockList; -const MyAllocator = data_structures.MyAllocator; -const MyHashMap = data_structures.MyHashMap; -const span = data_structures.span; -const format_int = data_structures.format_int; +const library = @import("library.zig"); +const assert = library.assert; +const align_forward = library.align_forward; +const Arena = library.Arena; +const byte_equal = library.byte_equal; +const enumFromString = library.enumFromString; +const byte_equal_terminated = library.byte_equal_terminated; +const last_byte = library.last_byte; +const first_byte = library.first_byte; +const first_slice = library.first_slice; +const starts_with_slice = library.starts_with_slice; +const PinnedArray = library.PinnedArray; +const UnpinnedArray = library.UnpinnedArray; +const BlockList = library.BlockList; +const MyAllocator = library.MyAllocator; +const MyHashMap = library.MyHashMap; +const span = library.span; +const format_int = library.format_int; +const my_hash = library.my_hash; const lexer = @import("frontend/lexer.zig"); const parser = @import("frontend/parser.zig"); @@ -73,6 +79,7 @@ pub fn createContext(allocator: Allocator, my_allocator: *MyAllocator) !*const C .executable_absolute_path = self_exe_path, .directory_absolute_path = self_exe_dir_path, .build_directory = try std.fs.cwd().makeOpenPath("nat", .{}), + .arena = try Arena.init(4 * 1024 * 1024), }; try context.build_directory.makePath(cache_dir_name); @@ -119,6 +126,7 @@ pub fn compileBuildExecutable(context: *const Context, arguments: []const []cons .line_offsets = try PinnedArray(u32).init_with_default_granularity(), }, .node_buffer = try PinnedArray(Node).init_with_default_granularity(), + .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), }; try unit.compile(context); @@ -226,7 +234,7 @@ fn compileMusl(context: *const Context) MuslContext { const basename = std.fs.path.basename(src_file_relative_path); const target = try context.allocator.dupe(u8, basename); target[target.len - 1] = 'o'; - const hash = data_structures.my_hash(src_file_relative_path); + const hash = my_hash(src_file_relative_path); const hash_string = format_int(&buffer, hash, 16, false); const target_path = try std.mem.concat(context.allocator, u8, &.{ musl.global_cache_dir, hash_string, target }); try musl.compileFileWithClang(context, src_file_relative_path, target_path); @@ -238,7 +246,7 @@ fn compileMusl(context: *const Context) MuslContext { const basename = std.fs.path.basename(src_file_relative_path); const target = try context.allocator.dupe(u8, basename); target[target.len - 1] = 'o'; - const hash = data_structures.my_hash(src_file_relative_path); + const hash = my_hash(src_file_relative_path); const hash_string = format_int(&buffer, hash, 16, false); const target_path = try std.mem.concat(context.allocator, u8, &.{ musl.global_cache_dir, hash_string, target }); @@ -336,7 +344,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 const argument = arguments[argument_index]; if (argument[0] != '-') { - if (data_structures.last_byte(argument, '.')) |dot_index| { + if (last_byte(argument, '.')) |dot_index| { const extension_string = argument[dot_index..]; const extension: Extension = if (byte_equal(extension_string, ".c")) .c else if (byte_equal(extension_string, ".cpp") or byte_equal(extension_string, ".cxx") or byte_equal(extension_string, ".cc")) .cpp else if (byte_equal(extension_string, ".S")) .assembly else if (byte_equal(extension_string, ".o")) .object else if (byte_equal(extension_string, ".a")) .static_library else if (byte_equal(extension_string, ".so") or @@ -480,7 +488,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 try ld_argv.append(context.my_allocator, "-dylib"); } else if (starts_with_slice(argument, "-Wl,")) { const wl_arg = argument["-Wl,".len..]; - if (data_structures.first_byte(wl_arg, ',')) |comma_index| { + if (first_byte(wl_arg, ',')) |comma_index| { const key = wl_arg[0..comma_index]; const value = wl_arg[comma_index + 1 ..]; try ld_argv.append(context.my_allocator, key); @@ -2943,7 +2951,7 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o i += 1; const optimize_string = arguments[i]; - optimization = data_structures.enumFromString(Optimization, optimize_string) orelse unreachable; + optimization = enumFromString(Optimization, optimize_string) orelse unreachable; } else { reportUnterminatedArgumentError(current_argument); } @@ -3016,6 +3024,7 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o .line_offsets = try PinnedArray(u32).init_with_default_granularity(), }, .node_buffer = try PinnedArray(Node).init_with_default_granularity(), + .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), }; try unit.compile(context); @@ -3154,11 +3163,11 @@ fn getTypeAbiSize(ty: *Type, unit: *Unit) u32 { const field_type = unit.types.get(field.type); const field_size = getTypeAbiSize(field_type, unit); const field_alignment = getTypeAbiAlignment(field_type, unit); - total_byte_size = @intCast(data_structures.align_forward(total_byte_size, field_alignment)); + total_byte_size = @intCast(align_forward(total_byte_size, field_alignment)); total_byte_size += field_size; } - total_byte_size = @intCast(data_structures.align_forward(total_byte_size, struct_alignment)); + total_byte_size = @intCast(align_forward(total_byte_size, struct_alignment)); break :b total_byte_size; }, @@ -3169,11 +3178,11 @@ fn getTypeAbiSize(ty: *Type, unit: *Unit) u32 { const field_type = unit.types.get(type_index); const field_size = getTypeAbiSize(field_type, unit); const field_alignment = getTypeAbiAlignment(field_type, unit); - total_byte_size = @intCast(data_structures.align_forward(total_byte_size, field_alignment)); + total_byte_size = @intCast(align_forward(total_byte_size, field_alignment)); total_byte_size += field_size; } - total_byte_size = @intCast(data_structures.align_forward(total_byte_size, struct_alignment)); + total_byte_size = @intCast(align_forward(total_byte_size, struct_alignment)); break :b total_byte_size; }, @@ -3384,7 +3393,7 @@ pub const Type = union(enum) { } fn hash(types: []const V.Comptime) u32 { - const result = data_structures.my_hash(std.mem.sliceAsBytes(types)); + const result = my_hash(std.mem.sliceAsBytes(types)); return result; } }; @@ -4063,6 +4072,7 @@ pub const Struct = struct { pub const Context = struct { allocator: Allocator, my_allocator: *MyAllocator, + arena: *Arena, cwd_absolute_path: []const u8, directory_absolute_path: []const u8, executable_absolute_path: []const u8, @@ -4128,7 +4138,7 @@ pub const PolymorphicFunction = struct { } fn hash(parameters: []const V.Comptime) u32 { - const result = data_structures.my_hash(std.mem.sliceAsBytes(parameters)); + const result = my_hash(std.mem.sliceAsBytes(parameters)); return result; } }; @@ -7353,7 +7363,7 @@ pub const Builder = struct { const enum_field = unit.enum_fields.get(enum_field_index); const enum_name = unit.getIdentifier(enum_field.name); - function_prototype.calling_convention = data_structures.enumFromString(Function.CallingConvention, enum_name) orelse unreachable; + function_prototype.calling_convention = enumFromString(Function.CallingConvention, enum_name) orelse unreachable; }, else => |t| @panic(@tagName(t)), } @@ -7580,7 +7590,7 @@ pub const Builder = struct { }; assert(alignment == 8 or alignment == 16); - const aligned_size = data_structures.align_forward(size, alignment); + const aligned_size = align_forward(size, alignment); if (alignment == 16) { unreachable; } else { @@ -7687,7 +7697,7 @@ pub const Builder = struct { }; } else { const alignment = ty.getAbiAlignment(unit); - const aligned_size: u16 = @intCast(data_structures.align_forward(size, 8)); + const aligned_size: u16 = @intCast(align_forward(size, 8)); if (alignment < 16 and aligned_size == 16) { const array_type = unit.getArrayType(context, .{ .count = 2, @@ -7928,7 +7938,7 @@ pub const Builder = struct { const offset = base_offset + member_offset; const member_size = field_type.getAbiSize(unit); const member_alignment = field_type.getAbiAlignment(unit); - member_offset = @intCast(data_structures.align_forward(member_offset + member_size, alignment)); + member_offset = @intCast(align_forward(member_offset + member_size, alignment)); // TODO: const native_vector_size = 16; if (size > 16 and ((!is_union and size != member_size) or size > native_vector_size)) { @@ -8033,7 +8043,7 @@ pub const Builder = struct { .type = field.type, .offset = offset_it, }; - offset_it = @intCast(data_structures.align_forward(offset_it + unit.types.get(field.type).getAbiSize(unit), struct_alignment)); + offset_it = @intCast(align_forward(offset_it + unit.types.get(field.type).getAbiSize(unit), struct_alignment)); } assert(last_match != null); @@ -8270,7 +8280,7 @@ pub const Builder = struct { fn get_argument_pair(unit: *Unit, types: [2]Type.Index) Function.AbiInfo { const low_size = unit.types.get(types[0]).getAbiSize(unit); const high_alignment = unit.types.get(types[1]).getAbiAlignment(unit); - const high_start = data_structures.align_forward(low_size, high_alignment); + const high_start = align_forward(low_size, high_alignment); assert(high_start == 8); return .{ .kind = .{ @@ -8436,7 +8446,7 @@ pub const Builder = struct { for (struct_options_struct.kind.@"struct".fields.slice(), constant_struct.fields) |field_index, field_value| { const field = unit.struct_fields.get(field_index); const name = unit.getIdentifier(field.name); - const option_id = data_structures.enumFromString(Struct.Options.Id, name) orelse unreachable; + const option_id = enumFromString(Struct.Options.Id, name) orelse unreachable; switch (option_id) { .sliceable => switch (field_value.bool) { true => struct_options.sliceable = .{ @@ -9225,9 +9235,9 @@ pub const Builder = struct { const sizes = [2]u32{ types[0].getAbiSize(unit), types[1].getAbiSize(unit) }; const alignment = @max(alignments[0], alignments[1]); _ = alignment; // autofix - const high_aligned_size: u32 = @intCast(data_structures.align_forward(sizes[1], alignments[1])); + const high_aligned_size: u32 = @intCast(align_forward(sizes[1], alignments[1])); _ = high_aligned_size; // autofix - const high_offset: u32 = @intCast(data_structures.align_forward(sizes[0], alignments[1])); + const high_offset: u32 = @intCast(align_forward(sizes[0], alignments[1])); assert(high_offset + sizes[1] <= argument_type.getAbiSize(unit)); const stack = try builder.createStackVariable(unit, context, argument_type_index, null); @@ -16806,6 +16816,7 @@ pub const Enum = struct { pub const Unit = struct { node_buffer: PinnedArray(Node), token_buffer: Token.Buffer, + node_lists: PinnedArray([]const Node.Index), files: Debug.File.List = .{}, types: Type.List = .{}, structs: Struct.List = .{}, @@ -16827,7 +16838,6 @@ pub const Unit = struct { constant_arrays: V.Comptime.ConstantArray.List = .{}, constant_slices: V.Comptime.ConstantSlice.List = .{}, error_fields: Type.Error.Field.List = .{}, - node_lists: UnpinnedArray(UnpinnedArray(Node.Index)) = .{}, file_token_offsets: MyHashMap(Token.Range, Debug.File.Index) = .{}, file_map: MyHashMap([]const u8, Debug.File.Index) = .{}, identifiers: MyHashMap(u32, []const u8) = .{}, @@ -17155,8 +17165,8 @@ pub const Unit = struct { fn getNodeListFromNode(unit: *Unit, node: *const Node) []const Node.Index { assert(node.id == .node_list); const list_index = node.left; - const node_list = unit.node_lists.slice()[@intFromEnum(list_index)]; - return node_list.pointer[0..node_list.length]; + const node_list = unit.node_lists.get_unchecked(@intFromEnum(list_index)).*; + return node_list; } // TODO: make this fast @@ -17277,7 +17287,7 @@ pub const Unit = struct { } fn processIdentifier(unit: *Unit, context: *const Context, string: []const u8) !u32 { - const hash = data_structures.my_hash(string); + const hash = my_hash(string); if (unit.identifiers.get_pointer(hash) == null) { try unit.identifiers.put_no_clobber(context.my_allocator, hash, string); } @@ -17388,7 +17398,7 @@ pub const Unit = struct { }, file_index); // logln(.parser, .file, "[START PARSING FILE #{} {s}]", .{ file_index, file.package.source_path }); - file.parser = try parser.analyze(context.allocator, context.my_allocator, file.lexer, file.source_code, &unit.token_buffer, &unit.node_buffer, &unit.node_lists); + file.parser = try parser.analyze(context.allocator, context.my_allocator, context.arena, file.lexer, file.source_code, &unit.token_buffer, &unit.node_buffer, &unit.node_lists); // logln(.parser, .file, "[END PARSING FILE #{} {s}]", .{ file_index, file.package.source_path }); assert(file.status == .lexed); file.status = .parsed; @@ -17592,7 +17602,7 @@ pub const Unit = struct { }); for (unit.descriptor.c_source_files) |c_source_file| { - const dot_index = data_structures.last_byte(c_source_file, '.') orelse unreachable; + const dot_index = last_byte(c_source_file, '.') orelse unreachable; const path_without_extension = c_source_file[0..dot_index]; const basename = std.fs.path.basename(path_without_extension); const o_file = try std.mem.concat(context.allocator, u8, &.{ basename, ".o" }); diff --git a/bootstrap/frontend/parser.zig b/bootstrap/frontend/parser.zig index fcefd89..ae710d0 100644 --- a/bootstrap/frontend/parser.zig +++ b/bootstrap/frontend/parser.zig @@ -3,6 +3,7 @@ const Allocator = std.mem.Allocator; const assert = std.debug.assert; const library = @import("../library.zig"); +const Arena = library.Arena; const byte_equal = library.byte_equal; const BlockList = library.BlockList; const enumFromString = library.enumFromString; @@ -229,10 +230,11 @@ const Analyzer = struct { token_i: Token.Index, token_buffer: *Token.Buffer, nodes: *PinnedArray(Node), - node_lists: *UnpinnedArray(UnpinnedArray(Node.Index)), + node_lists: *PinnedArray([]const Node.Index), source_file: []const u8, allocator: Allocator, my_allocator: *MyAllocator, + arena: *Arena, suffix_depth: usize = 0, fn expectToken(analyzer: *Analyzer, expected_token_id: Token.Id) !Token.Index { @@ -356,6 +358,7 @@ const Analyzer = struct { break attribute_node; } } else @panic(identifier_name); + try list.append(analyzer.my_allocator, attribute_node); switch (analyzer.peekToken()) { @@ -365,7 +368,7 @@ const Analyzer = struct { } } - break :b try analyzer.nodeList(list); + break :b try analyzer.nodeList(list.slice()); } else .null; break :blk try analyzer.addNode(.{ @@ -470,7 +473,7 @@ const Analyzer = struct { .id = .function_prototype, .token = token, .left = arguments, - .right = try analyzer.nodeList(attribute_and_return_type_node_list), + .right = try analyzer.nodeList(attribute_and_return_type_node_list.slice()), }); return function_prototype; @@ -514,7 +517,7 @@ const Analyzer = struct { _ = try analyzer.expectToken(end_token); if (list.length != 0) { - return try analyzer.nodeList(list); + return try analyzer.nodeList(list.slice()); } else { return Node.Index.null; } @@ -553,7 +556,7 @@ const Analyzer = struct { return try analyzer.addNode(.{ .id = .block, .token = left_brace, - .left = try analyzer.nodeList(list), + .left = try analyzer.nodeList(list.slice()), .right = Node.Index.null, }); } @@ -632,7 +635,7 @@ const Analyzer = struct { break :blk switch (array_list.length) { 0 => unreachable, 1 => array_list.pointer[0], - else => try analyzer.nodeList(array_list), + else => try analyzer.nodeList(array_list.slice()), }; }, }; @@ -662,7 +665,7 @@ const Analyzer = struct { .id = .@"switch", .token = switch_token, .left = switch_expression, - .right = try analyzer.nodeList(list), + .right = try analyzer.nodeList(list.slice()), }); } @@ -811,8 +814,8 @@ const Analyzer = struct { const for_condition_node = try analyzer.addNode(.{ .id = .for_condition, .token = token, - .left = try analyzer.nodeList(for_expression_list), - .right = try analyzer.nodeList(payload_nodes), + .left = try analyzer.nodeList(for_expression_list.slice()), + .right = try analyzer.nodeList(payload_nodes.slice()), }); const true_expression = switch (analyzer.peekToken()) { @@ -994,7 +997,7 @@ const Analyzer = struct { .id = .assembly_instruction, .token = instruction_token, .left = instruction_name, - .right = try analyzer.nodeList(operand_list), + .right = try analyzer.nodeList(operand_list.slice()), }); try instruction_list.append(analyzer.my_allocator, instruction); @@ -1006,7 +1009,7 @@ const Analyzer = struct { const assembly_block = try analyzer.addNode(.{ .id = .assembly_code_block, .token = backtick, - .left = try analyzer.nodeList(instruction_list), + .left = try analyzer.nodeList(instruction_list.slice()), .right = .null, }); try list.append(analyzer.my_allocator, assembly_block); @@ -1014,7 +1017,7 @@ const Analyzer = struct { const intrinsic = try analyzer.addNode(.{ .id = .intrinsic, .token = intrinsic_token, - .left = try analyzer.nodeList(list), + .left = try analyzer.nodeList(list.slice()), .right = @enumFromInt(@intFromEnum(intrinsic_id)), }); @@ -1038,7 +1041,7 @@ const Analyzer = struct { return try analyzer.addNode(.{ .id = .intrinsic, .token = intrinsic_token, - .left = try analyzer.nodeList(list), + .left = try analyzer.nodeList(list.slice()), .right = @enumFromInt(@intFromEnum(intrinsic_id)), }); } @@ -1528,7 +1531,7 @@ const Analyzer = struct { assert(type_expression != .null); try list.append(analyzer.my_allocator, type_expression); - const node_list = try analyzer.nodeList(list); + const node_list = try analyzer.nodeList(list.slice()); const node = Node{ .id = expression_type, @@ -1676,7 +1679,7 @@ const Analyzer = struct { .id = .call, .token = left_parenthesis, .left = result, - .right = try analyzer.nodeList(expression_list), + .right = try analyzer.nodeList(expression_list.slice()), }); } else { return result; @@ -1791,7 +1794,7 @@ const Analyzer = struct { }, .token = token, .left = type_node, - .right = try analyzer.nodeList(list), + .right = try analyzer.nodeList(list.slice()), }); } @@ -1839,7 +1842,7 @@ const Analyzer = struct { analyzer.consumeToken(); - break :b try analyzer.nodeList(list); + break :b try analyzer.nodeList(list.slice()); } else Node.Index.null; if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_left_brace); @@ -1948,7 +1951,7 @@ const Analyzer = struct { return try analyzer.addNode(.{ .id = node_id, .token = token_i, - .left = try analyzer.nodeList(node_list), + .left = try analyzer.nodeList(node_list.slice()), .right = parameters_node, }); } @@ -2170,7 +2173,7 @@ const Analyzer = struct { break :blk try analyzer.addNode(.{ .id = .error_type, .token = token_i, - .left = try analyzer.nodeList(list), + .left = try analyzer.nodeList(list.slice()), .right = backing_type, }); }, @@ -2354,9 +2357,9 @@ const Analyzer = struct { return node_index; } - fn nodeList(analyzer: *Analyzer, node_list: UnpinnedArray(Node.Index)) !Node.Index { + fn nodeList(analyzer: *Analyzer, node_list: []const Node.Index) !Node.Index { const index = analyzer.node_lists.length; - try analyzer.node_lists.append(analyzer.my_allocator, node_list); + _ = analyzer.node_lists.append(node_list); return try analyzer.addNode(.{ .id = .node_list, .token = @enumFromInt(0), @@ -2380,7 +2383,7 @@ const Analyzer = struct { }; // Here it is assumed that left brace is consumed -pub fn analyze(allocator: Allocator, my_allocator: *MyAllocator, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *PinnedArray(Node), node_lists: *UnpinnedArray(UnpinnedArray(Node.Index))) !Result { +pub fn analyze(allocator: Allocator, my_allocator: *MyAllocator, arena: *Arena, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *PinnedArray(Node), node_lists: *PinnedArray([]const Node.Index)) !Result { const start = std.time.Instant.now() catch unreachable; var analyzer = Analyzer{ .lexer = lexer_result, @@ -2392,6 +2395,7 @@ pub fn analyze(allocator: Allocator, my_allocator: *MyAllocator, lexer_result: l .my_allocator = my_allocator, .nodes = node_list, .node_lists = node_lists, + .arena = arena, }; const main_node_index = try analyzer.processContainerType(null); diff --git a/bootstrap/library.zig b/bootstrap/library.zig index cf74d68..b746f30 100644 --- a/bootstrap/library.zig +++ b/bootstrap/library.zig @@ -35,7 +35,7 @@ pub const Arena = struct{ const reserved_memory = try reserve(size); try commit(reserved_memory, initial_commit_size); - const arena: *Arena = @ptrCast(reserved_memory); + const arena: *Arena = @alignCast(@ptrCast(reserved_memory)); arena.* = .{ .position = @sizeOf(Arena), .commit_position = initial_commit_size, @@ -55,7 +55,7 @@ pub const Arena = struct{ const result = base + arena.position + alignment; arena.position += size + alignment; - if (arena.commit_position < arena.position - arena.commit_position) { + if (arena.commit_position < arena.position) { var size_to_commit = arena.position - arena.commit_position; size_to_commit += commit_granularity - 1; size_to_commit -= size_to_commit % commit_granularity; @@ -72,7 +72,7 @@ pub const Arena = struct{ } pub inline fn new(arena: *Arena, comptime T: type) !*T{ - const result: *T = @ptrCast(try arena.allocate(@sizeOf(T))); + const result: *T = @ptrCast(@alignCast(try arena.allocate(@sizeOf(T)))); return result; } diff --git a/lib/std/std.nat b/lib/std/std.nat index 336722e..bc7271c 100644 --- a/lib/std/std.nat +++ b/lib/std/std.nat @@ -141,7 +141,7 @@ const Arena = struct{ const result = base + arena.position + alignment; arena.position += size + alignment; - if (arena.commit_position < arena.position - arena.commit_position) { + if (arena.commit_position < arena.position) { var size_to_commit = arena.position - arena.commit_position; size_to_commit += commit_granularity - 1; size_to_commit -= size_to_commit % commit_granularity; From 7bd4095cd723d35df78b01011c2bc931d9413ae1 Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Wed, 24 Apr 2024 19:36:38 -0600 Subject: [PATCH 05/14] Get rid of UnpinnedArray in the parser --- bootstrap/Compilation.zig | 28 ++++--- bootstrap/frontend/parser.zig | 146 ++++++++++++++++++---------------- bootstrap/library.zig | 4 +- 3 files changed, 95 insertions(+), 83 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index 2280119..6b7faec 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -8671,10 +8671,14 @@ pub const Builder = struct { break :blk result; }; - var declaration_nodes = try UnpinnedArray(Node.Index).initialize_with_capacity(context.my_allocator, count.declarations); - var field_nodes = try UnpinnedArray(Node.Index).initialize_with_capacity(context.my_allocator, count.fields); - var comptime_block_nodes = try UnpinnedArray(Node.Index).initialize_with_capacity(context.my_allocator, count.comptime_blocks); - var test_declarations = try UnpinnedArray(Node.Index).initialize_with_capacity(context.my_allocator, count.test_declarations); + var declaration_nodes = try context.arena.new_array(Node.Index, count.declarations); + var field_nodes = try context.arena.new_array(Node.Index, count.fields); + var comptime_block_nodes = try context.arena.new_array(Node.Index, count.comptime_blocks); + var test_declarations = try context.arena.new_array(Node.Index, count.test_declarations); + declaration_nodes.len = 0; + field_nodes.len = 0; + comptime_block_nodes.len = 0; + test_declarations.len = 0; for (container_nodes) |member_index| { const member_node = unit.getNode(member_index); @@ -8685,12 +8689,14 @@ pub const Builder = struct { .field => &field_nodes, .test_declaration => &test_declarations, }; - array_list.append_with_capacity(member_index); + const index = array_list.len; + array_list.len += 1; + array_list.*[index] = member_index; } var export_declarations = UnpinnedArray(*Debug.Declaration.Global){}; if (count.declarations > 0) { - for (declaration_nodes.slice()) |declaration_node_index| { + for (declaration_nodes) |declaration_node_index| { const declaration_node = unit.getNode(declaration_node_index); switch (declaration_node.id) { @@ -8786,7 +8792,7 @@ pub const Builder = struct { if (count.fields > 0) { const ty = unit.types.get(data.plain); - const field_count = field_nodes.length; + const field_count: u32 = @intCast(field_nodes.len); switch (container_type) { .@"enum" => { const integer_type = &ty.integer; @@ -8794,7 +8800,7 @@ pub const Builder = struct { try enum_type.fields.ensure_capacity(context.my_allocator, field_count); if (integer_type.bit_count == 0) { - integer_type.bit_count = @bitSizeOf(@TypeOf(field_nodes.length)) - @clz(field_nodes.length); + integer_type.bit_count = @bitSizeOf(@TypeOf(field_nodes.len)) - @clz(field_nodes.len); } assert(integer_type.bit_count > 0); }, @@ -8811,7 +8817,7 @@ pub const Builder = struct { var sliceable_length_index: ?u32 = null; var ignore_field_count: u8 = 0; - for (field_nodes.slice(), 0..) |field_node_index, index| { + for (field_nodes, 0..) |field_node_index, index| { const field_node = unit.getNode(field_node_index); const identifier = switch (unit.token_buffer.tokens.get(field_node.token).id) { .identifier => unit.getExpectedTokenBytes(field_node.token, .identifier), @@ -8920,7 +8926,7 @@ pub const Builder = struct { builder.emit_ir = false; defer builder.emit_ir = emit_ir; - for (comptime_block_nodes.slice()) |comptime_node_index| { + for (comptime_block_nodes) |comptime_node_index| { const comptime_node = unit.getNode(comptime_node_index); assert(comptime_node.id == .@"comptime"); @@ -8965,7 +8971,7 @@ pub const Builder = struct { break :b function_type; }; - for (test_declarations.slice()) |test_declaration_node_index| { + for (test_declarations) |test_declaration_node_index| { const test_node = unit.getNode(test_declaration_node_index); assert(test_node.id == .test_declaration); diff --git a/bootstrap/frontend/parser.zig b/bootstrap/frontend/parser.zig index ae710d0..78562e6 100644 --- a/bootstrap/frontend/parser.zig +++ b/bootstrap/frontend/parser.zig @@ -6,10 +6,10 @@ const library = @import("../library.zig"); const Arena = library.Arena; const byte_equal = library.byte_equal; const BlockList = library.BlockList; +const BoundedArray = library.BoundedArray; const enumFromString = library.enumFromString; const PinnedArray = library.PinnedArray; const MyAllocator = library.MyAllocator; -const UnpinnedArray = library.UnpinnedArray; const lexer = @import("lexer.zig"); @@ -66,6 +66,9 @@ pub const Node = struct { token: Token.Index, id: Id, + // TODO: this is dangerous + pub const StackList = BoundedArray(Node.Index, 512); + pub const Index = PinnedArray(Node).Index; pub const Range = struct { @@ -337,7 +340,8 @@ const Analyzer = struct { const attribute_node_index: Node.Index = if (analyzer.peekToken() == .operator_colon) b: { analyzer.consumeToken(); - var list = UnpinnedArray(Node.Index){}; + var stack_list = Node.StackList{}; + while (analyzer.peekToken() != .operator_assign) { const identifier = try analyzer.expectToken(.identifier); const identifier_name = analyzer.bytes(identifier); @@ -359,7 +363,7 @@ const Analyzer = struct { } } else @panic(identifier_name); - try list.append(analyzer.my_allocator, attribute_node); + try stack_list.append(attribute_node); switch (analyzer.peekToken()) { .operator_assign => {}, @@ -368,7 +372,7 @@ const Analyzer = struct { } } - break :b try analyzer.nodeList(list.slice()); + break :b try analyzer.nodeList(&stack_list); } else .null; break :blk try analyzer.addNode(.{ @@ -425,7 +429,7 @@ const Analyzer = struct { fn functionPrototype(analyzer: *Analyzer) !Node.Index { const token = analyzer.token_i; - var attribute_and_return_type_node_list = UnpinnedArray(Node.Index){}; + var attribute_and_return_type_node_list = Node.StackList{}; while (analyzer.peekToken() != .operator_left_parenthesis) { const identifier = try analyzer.expectToken(.identifier); @@ -458,7 +462,7 @@ const Analyzer = struct { } } else @panic(identifier_name); - try attribute_and_return_type_node_list.append(analyzer.my_allocator, attribute_node); + try attribute_and_return_type_node_list.append(attribute_node); if (analyzer.peekToken() == .operator_comma) analyzer.consumeToken(); } @@ -467,13 +471,13 @@ const Analyzer = struct { const arguments = try analyzer.argumentList(.operator_left_parenthesis, .operator_right_parenthesis); const return_type = try analyzer.typeExpression(); - try attribute_and_return_type_node_list.append(analyzer.my_allocator, return_type); + try attribute_and_return_type_node_list.append(return_type); const function_prototype = try analyzer.addNode(.{ .id = .function_prototype, .token = token, .left = arguments, - .right = try analyzer.nodeList(attribute_and_return_type_node_list.slice()), + .right = try analyzer.nodeList(&attribute_and_return_type_node_list), }); return function_prototype; @@ -484,7 +488,7 @@ const Analyzer = struct { _ = try analyzer.expectToken(start_token); } - var list = UnpinnedArray(Node.Index){}; + var list = Node.StackList{}; while (analyzer.peekToken() != end_token) { const identifier_token = analyzer.token_i; @@ -506,7 +510,7 @@ const Analyzer = struct { analyzer.consumeToken(); } - try list.append(analyzer.my_allocator, try analyzer.addNode(.{ + try list.append(try analyzer.addNode(.{ .id = id, .token = identifier_token, .left = type_expression, @@ -516,8 +520,8 @@ const Analyzer = struct { _ = try analyzer.expectToken(end_token); - if (list.length != 0) { - return try analyzer.nodeList(list.slice()); + if (list.len != 0) { + return try analyzer.nodeList(&list); } else { return Node.Index.null; } @@ -531,7 +535,7 @@ const Analyzer = struct { fn block(analyzer: *Analyzer) anyerror!Node.Index { const left_brace = try analyzer.expectToken(.operator_left_brace); - var list = UnpinnedArray(Node.Index){}; + var list = Node.StackList{}; while (analyzer.peekToken() != .operator_right_brace) { const first_statement_token = analyzer.peekToken(); @@ -548,7 +552,7 @@ const Analyzer = struct { => try analyzer.symbolDeclaration(), }; - try list.append(analyzer.my_allocator, statement_index); + try list.append(statement_index); } _ = try analyzer.expectToken(.operator_right_brace); @@ -556,7 +560,7 @@ const Analyzer = struct { return try analyzer.addNode(.{ .id = .block, .token = left_brace, - .left = try analyzer.nodeList(list.slice()), + .left = try analyzer.nodeList(&list), .right = Node.Index.null, }); } @@ -594,7 +598,7 @@ const Analyzer = struct { // logln(.parser, .@"switch", "Parsed switch expression...", .{}); _ = try analyzer.expectToken(.operator_left_brace); - var list = UnpinnedArray(Node.Index){}; + var list = Node.StackList{}; while (analyzer.peekToken() != .operator_right_brace) { const case_token = analyzer.token_i; @@ -605,7 +609,7 @@ const Analyzer = struct { break :blk Node.Index.null; }, else => blk: { - var array_list = UnpinnedArray(Node.Index){}; + var array_list = Node.StackList{}; while (true) { const token = analyzer.token_i; const left = try analyzer.expression(); @@ -623,7 +627,7 @@ const Analyzer = struct { else => left, }; - try array_list.append(analyzer.my_allocator, switch_case_node); + try array_list.append(switch_case_node); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), @@ -632,10 +636,10 @@ const Analyzer = struct { } } - break :blk switch (array_list.length) { + break :blk switch (array_list.len) { 0 => unreachable, - 1 => array_list.pointer[0], - else => try analyzer.nodeList(array_list.slice()), + 1 => array_list.buffer[0], + else => try analyzer.nodeList(&array_list), }; }, }; @@ -656,7 +660,7 @@ const Analyzer = struct { .right = expr, }); - try list.append(analyzer.my_allocator, node); + try list.append(node); } _ = try analyzer.expectToken(.operator_right_brace); @@ -665,7 +669,7 @@ const Analyzer = struct { .id = .@"switch", .token = switch_token, .left = switch_expression, - .right = try analyzer.nodeList(list.slice()), + .right = try analyzer.nodeList(&list), }); } @@ -741,7 +745,7 @@ const Analyzer = struct { const token = try analyzer.expectToken(.fixed_keyword_for); _ = try analyzer.expectToken(.operator_left_parenthesis); - var for_expression_list = UnpinnedArray(Node.Index){}; + var for_expression_list = Node.StackList{}; while (analyzer.peekToken() != .operator_right_parenthesis) { const expression_token = analyzer.token_i; @@ -766,7 +770,7 @@ const Analyzer = struct { else => |t| @panic(@tagName(t)), }; - try for_expression_list.append(analyzer.my_allocator, node_index); + try for_expression_list.append(node_index); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), @@ -779,7 +783,7 @@ const Analyzer = struct { _ = try analyzer.expectToken(.operator_bar); - var payload_nodes = UnpinnedArray(Node.Index){}; + var payload_nodes = Node.StackList{}; while (analyzer.peekToken() != .operator_bar) { const payload_token = analyzer.token_i; @@ -797,7 +801,7 @@ const Analyzer = struct { else => |t| @panic(@tagName(t)), } - try payload_nodes.append(analyzer.my_allocator, try analyzer.addNode(.{ + try payload_nodes.append(try analyzer.addNode(.{ .id = id, .token = payload_token, .left = Node.Index.null, @@ -807,15 +811,15 @@ const Analyzer = struct { _ = try analyzer.expectToken(.operator_bar); - if (payload_nodes.length != for_expression_list.length) { + if (payload_nodes.len != for_expression_list.len) { unreachable; } const for_condition_node = try analyzer.addNode(.{ .id = .for_condition, .token = token, - .left = try analyzer.nodeList(for_expression_list.slice()), - .right = try analyzer.nodeList(payload_nodes.slice()), + .left = try analyzer.nodeList(&for_expression_list), + .right = try analyzer.nodeList(&payload_nodes), }); const true_expression = switch (analyzer.peekToken()) { @@ -936,17 +940,17 @@ const Analyzer = struct { } } else @panic(intrinsic_name); - var list = UnpinnedArray(Node.Index){}; + var list = Node.StackList{}; if (intrinsic_id == .@"asm") { const backtick = try analyzer.expectToken(.operator_backtick); - var instruction_list = UnpinnedArray(Node.Index){}; + var instruction_list = Node.StackList{}; while (analyzer.peekToken() != .operator_backtick) { const instruction_token = analyzer.token_i; const instruction_name = try analyzer.identifierNode(); - var operand_list = UnpinnedArray(Node.Index){}; + var operand_list = Node.StackList{}; while (analyzer.peekToken() != .operator_semicolon) { const node = switch (analyzer.peekToken()) { .identifier => try analyzer.addNode(.{ @@ -988,7 +992,7 @@ const Analyzer = struct { .operator_semicolon => {}, else => |t| @panic(@tagName(t)), } - try operand_list.append(analyzer.my_allocator, node); + try operand_list.append(node); } analyzer.consumeToken(); @@ -997,10 +1001,10 @@ const Analyzer = struct { .id = .assembly_instruction, .token = instruction_token, .left = instruction_name, - .right = try analyzer.nodeList(operand_list.slice()), + .right = try analyzer.nodeList(&operand_list), }); - try instruction_list.append(analyzer.my_allocator, instruction); + try instruction_list.append(instruction); } _ = try analyzer.expectToken(.operator_backtick); @@ -1009,15 +1013,15 @@ const Analyzer = struct { const assembly_block = try analyzer.addNode(.{ .id = .assembly_code_block, .token = backtick, - .left = try analyzer.nodeList(instruction_list.slice()), + .left = try analyzer.nodeList(&instruction_list), .right = .null, }); - try list.append(analyzer.my_allocator, assembly_block); + try list.append(assembly_block); const intrinsic = try analyzer.addNode(.{ .id = .intrinsic, .token = intrinsic_token, - .left = try analyzer.nodeList(list.slice()), + .left = try analyzer.nodeList(&list), .right = @enumFromInt(@intFromEnum(intrinsic_id)), }); @@ -1025,7 +1029,7 @@ const Analyzer = struct { } else { while (analyzer.peekToken() != .operator_right_parenthesis) { const parameter = try analyzer.expression(); - try list.append(analyzer.my_allocator, parameter); + try list.append(parameter); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), @@ -1041,7 +1045,7 @@ const Analyzer = struct { return try analyzer.addNode(.{ .id = .intrinsic, .token = intrinsic_token, - .left = try analyzer.nodeList(list.slice()), + .left = try analyzer.nodeList(&list), .right = @enumFromInt(@intFromEnum(intrinsic_id)), }); } @@ -1452,7 +1456,7 @@ const Analyzer = struct { fn pointerOrArrayTypeExpression(analyzer: *Analyzer, expected: PointerOrArrayTypeExpectedExpression) !Node.Index { const first = analyzer.token_i; - var list = UnpinnedArray(Node.Index){}; + var list = Node.StackList{}; const expression_type: Node.Id = switch (expected) { .single_pointer_type => blk: { @@ -1461,7 +1465,7 @@ const Analyzer = struct { break :blk .pointer_type; }, .many_pointer_type => blk: { - try list.append(analyzer.my_allocator, try analyzer.addNode(.{ + try list.append(try analyzer.addNode(.{ .id = .many_pointer_expression, .token = analyzer.token_i, .left = Node.Index.null, @@ -1471,7 +1475,7 @@ const Analyzer = struct { _ = try analyzer.expectToken(.operator_ampersand); switch (analyzer.peekToken()) { .operator_right_bracket => {}, - .operator_colon => try list.append(analyzer.my_allocator, try analyzer.parseTermination()), + .operator_colon => try list.append(try analyzer.parseTermination()), else => |t| @panic(@tagName(t)), } _ = try analyzer.expectToken(.operator_right_bracket); @@ -1486,17 +1490,17 @@ const Analyzer = struct { break :blk .slice_type; }, .operator_colon => { - try list.append(analyzer.my_allocator, try analyzer.parseTermination()); + try list.append(try analyzer.parseTermination()); _ = try analyzer.expectToken(.operator_right_bracket); break :blk .slice_type; }, else => { const length_expression = try analyzer.expression(); - try list.append(analyzer.my_allocator, length_expression); + try list.append(length_expression); switch (analyzer.peekToken()) { .operator_right_bracket => {}, - .operator_colon => try list.append(analyzer.my_allocator, try analyzer.parseTermination()), + .operator_colon => try list.append(try analyzer.parseTermination()), else => |t| @panic(@tagName(t)), } @@ -1521,17 +1525,17 @@ const Analyzer = struct { analyzer.consumeTokens(@intFromBool(analyzer.peekToken() == .fixed_keyword_const)); if (const_node != .null) { - try list.append(analyzer.my_allocator, const_node); + try list.append(const_node); } } else { - assert(list.length > 0); + assert(list.len > 0); } const type_expression = try analyzer.typeExpression(); assert(type_expression != .null); - try list.append(analyzer.my_allocator, type_expression); + try list.append(type_expression); - const node_list = try analyzer.nodeList(list.slice()); + const node_list = try analyzer.nodeList(&list); const node = Node{ .id = expression_type, @@ -1640,7 +1644,7 @@ const Analyzer = struct { const left_parenthesis = analyzer.token_i; analyzer.consumeToken(); - var expression_list = UnpinnedArray(Node.Index){}; + var expression_list = Node.StackList{}; // logln(.parser, .suffix, "[DEPTH={}] Initializating suffix call-like expression", .{analyzer.suffix_depth}); while (analyzer.peekToken() != .operator_right_parenthesis) { const current_token = analyzer.token_i; @@ -1659,7 +1663,7 @@ const Analyzer = struct { }); } - try expression_list.append(analyzer.my_allocator, parameter); + try expression_list.append(parameter); switch (analyzer.peekToken()) { .operator_right_parenthesis => {}, @@ -1679,7 +1683,7 @@ const Analyzer = struct { .id = .call, .token = left_parenthesis, .left = result, - .right = try analyzer.nodeList(expression_list.slice()), + .right = try analyzer.nodeList(&expression_list), }); } else { return result; @@ -1693,7 +1697,7 @@ const Analyzer = struct { fn containerLiteral(analyzer: *Analyzer, type_node: Node.Index) anyerror!Node.Index { const token = try analyzer.expectToken(.operator_left_brace); - var list = UnpinnedArray(Node.Index){}; + var list = Node.StackList{}; const InitializationType = enum { anonymous, @@ -1722,7 +1726,7 @@ const Analyzer = struct { .right = Node.Index.null, }); - try list.append(analyzer.my_allocator, field_initialization); + try list.append(field_initialization); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), else => {}, @@ -1733,7 +1737,7 @@ const Analyzer = struct { else => |t| @panic(@tagName(t)), }, else => blk: { - try list.append(analyzer.my_allocator, try analyzer.anonymousExpression()); + try list.append(try analyzer.anonymousExpression()); _ = try analyzer.expectToken(.operator_comma); break :blk .anonymous; }, @@ -1750,7 +1754,7 @@ const Analyzer = struct { else => {}, } - try list.append(analyzer.my_allocator, field_expression_initializer); + try list.append(field_expression_initializer); break :blk .anonymous; }, else => |t| @panic(@tagName(t)), @@ -1794,7 +1798,7 @@ const Analyzer = struct { }, .token = token, .left = type_node, - .right = try analyzer.nodeList(list.slice()), + .right = try analyzer.nodeList(&list), }); } @@ -1830,10 +1834,10 @@ const Analyzer = struct { const parameters_node = if (analyzer.hasTokens() and analyzer.peekToken() == .operator_left_parenthesis) b: { analyzer.consumeToken(); - var list = UnpinnedArray(Node.Index){}; + var list = Node.StackList{}; while (analyzer.peekToken() != .operator_right_parenthesis) { const parameter_node = try analyzer.expression(); - try list.append(analyzer.my_allocator, parameter_node); + try list.append(parameter_node); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), else => {}, @@ -1842,11 +1846,11 @@ const Analyzer = struct { analyzer.consumeToken(); - break :b try analyzer.nodeList(list.slice()); + break :b try analyzer.nodeList(&list); } else Node.Index.null; if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_left_brace); - var node_list = UnpinnedArray(Node.Index){}; + var list = Node.StackList{}; while (analyzer.hasTokens() and analyzer.peekToken() != .operator_right_brace) { const first = analyzer.token_i; @@ -1931,7 +1935,7 @@ const Analyzer = struct { // logln(.parser, .container_members, "Container member {s}", .{@tagName(member_node.id)}); assert(member_node.id != .identifier); - try node_list.append(analyzer.my_allocator, member_node_index); + try list.append(member_node_index); } if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_right_brace); @@ -1951,7 +1955,7 @@ const Analyzer = struct { return try analyzer.addNode(.{ .id = node_id, .token = token_i, - .left = try analyzer.nodeList(node_list.slice()), + .left = try analyzer.nodeList(&list), .right = parameters_node, }); } @@ -2139,7 +2143,7 @@ const Analyzer = struct { } else Node.Index.null; _ = try analyzer.expectToken(.operator_left_brace); - var list = UnpinnedArray(Node.Index){}; + var list = Node.StackList{}; while (analyzer.peekToken() != .operator_right_brace) { const tok_i = analyzer.token_i; @@ -2165,7 +2169,7 @@ const Analyzer = struct { .right = value_associated, }); - try list.append(analyzer.my_allocator, error_field_node); + try list.append(error_field_node); } analyzer.consumeToken(); @@ -2173,7 +2177,7 @@ const Analyzer = struct { break :blk try analyzer.addNode(.{ .id = .error_type, .token = token_i, - .left = try analyzer.nodeList(list.slice()), + .left = try analyzer.nodeList(&list), .right = backing_type, }); }, @@ -2357,9 +2361,11 @@ const Analyzer = struct { return node_index; } - fn nodeList(analyzer: *Analyzer, node_list: []const Node.Index) !Node.Index { + fn nodeList(analyzer: *Analyzer, stack_list: *Node.StackList) !Node.Index { + const heap_list = try analyzer.arena.new_array(Node.Index, stack_list.len); + @memcpy(heap_list, stack_list.slice()); const index = analyzer.node_lists.length; - _ = analyzer.node_lists.append(node_list); + _ = analyzer.node_lists.append(heap_list); return try analyzer.addNode(.{ .id = .node_list, .token = @enumFromInt(0), diff --git a/bootstrap/library.zig b/bootstrap/library.zig index b746f30..02c048b 100644 --- a/bootstrap/library.zig +++ b/bootstrap/library.zig @@ -77,8 +77,8 @@ pub const Arena = struct{ } pub inline fn new_array(arena: *Arena, comptime T: type, count: usize) ![]T { - const result: [*]T = @ptrCast(try arena.allocate(@sizeOf(T) * count)); - return result; + const result: [*]T = @ptrCast(@alignCast(try arena.allocate(@sizeOf(T) * count))); + return result[0..count]; } }; From 8ae3b0caa4773be65a9213f3ccce9956797ce4d3 Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Thu, 25 Apr 2024 00:10:36 -0600 Subject: [PATCH 06/14] Reduce usage of UnpinnedArray to just Compilation --- bootstrap/backend/llvm.zig | 250 +++++++++++++++++++--------------- bootstrap/frontend/lexer.zig | 1 - bootstrap/frontend/parser.zig | 62 ++++----- bootstrap/library.zig | 66 ++++----- bootstrap/linker/lld.zig | 92 ++++++------- bootstrap/main.zig | 34 ++--- 6 files changed, 253 insertions(+), 252 deletions(-) diff --git a/bootstrap/backend/llvm.zig b/bootstrap/backend/llvm.zig index 8784efb..f197892 100644 --- a/bootstrap/backend/llvm.zig +++ b/bootstrap/backend/llvm.zig @@ -7,8 +7,9 @@ const write = Compilation.write; // const logln = Compilation.logln; const Module = Compilation.Module; const data_structures = @import("../library.zig"); +const BoundedArray = data_structures.BoundedArray; const MyHashMap = data_structures.MyHashMap; -const UnpinnedArray = data_structures.UnpinnedArray; +const PinnedArray = data_structures.PinnedArray; pub const bindings = @import("llvm_bindings.zig"); @@ -1221,19 +1222,20 @@ pub const LLVM = struct { return llvm_type; } else { const sema_type = unit.types.get(type_index); + var type_buffer = BoundedArray(*LLVM.Type, 512){}; const llvm_type: *LLVM.Type = switch (sema_type.*) { .function => |function_prototype_index| blk: { const sema_function_prototype = unit.function_prototypes.get(function_prototype_index); const llvm_return_type = try llvm.getType(unit, context, sema_function_prototype.abi.return_type); - var parameter_types = try UnpinnedArray(*LLVM.Type).initialize_with_capacity(context.my_allocator, @intCast(sema_function_prototype.abi.parameter_types.len)); + var parameter_types = BoundedArray(*LLVM.Type, 512){}; for (sema_function_prototype.abi.parameter_types) |argument_type_index| { - parameter_types.append_with_capacity(try llvm.getType(unit, context, argument_type_index)); + parameter_types.appendAssumeCapacity(try llvm.getType(unit, context, argument_type_index)); } const is_var_args = false; - const llvm_function_type = LLVM.Context.getFunctionType(llvm_return_type, parameter_types.pointer, parameter_types.length, is_var_args) orelse return Type.Error.function; + const llvm_function_type = LLVM.Context.getFunctionType(llvm_return_type, ¶meter_types.buffer, parameter_types.len, is_var_args) orelse return Type.Error.function; break :blk llvm_function_type.toType(); }, .integer => |integer| switch (integer.kind) { @@ -1294,16 +1296,15 @@ pub const LLVM = struct { break :blk struct_type.toType(); }, .@"struct" => |*sema_struct_type| blk: { - var field_type_list = try UnpinnedArray(*LLVM.Type).initialize_with_capacity(context.my_allocator, sema_struct_type.fields.length); for (sema_struct_type.fields.slice()) |sema_field_index| { const sema_field = unit.struct_fields.get(sema_field_index); const llvm_type = try llvm.getType(unit, context, sema_field.type); - field_type_list.append_with_capacity(llvm_type); + type_buffer.appendAssumeCapacity(llvm_type); } // TODO: const is_packed = false; - const struct_type = llvm.context.getStructType(field_type_list.pointer, field_type_list.length, is_packed) orelse return Type.Error.@"struct"; + const struct_type = llvm.context.getStructType(&type_buffer.buffer, type_buffer.len, is_packed) orelse return Type.Error.@"struct"; break :blk struct_type.toType(); }, @@ -1363,6 +1364,7 @@ pub const LLVM = struct { return result; } else { const sema_type = unit.types.get(sema_type_index); + var name = BoundedArray(u8, 4096){}; const result: []const u8 = switch (sema_type.*) { .integer => |integer| switch (integer.kind) { .materialized_int => b: { @@ -1382,14 +1384,13 @@ pub const LLVM = struct { }, // .bool => "bool", .pointer => |pointer| b: { - var name = UnpinnedArray(u8){}; - try name.append(context.my_allocator, '&'); + name.appendAssumeCapacity('&'); if (pointer.mutability == .@"const") { - try name.append_slice(context.my_allocator, "const"); + name.appendSliceAssumeCapacity("const"); } - try name.append(context.my_allocator, ' '); + name.appendAssumeCapacity(' '); const element_type_name = try llvm.renderTypeName(unit, context, pointer.type); - try name.append_slice(context.my_allocator, element_type_name); + name.appendSliceAssumeCapacity(element_type_name); break :b name.slice(); }, .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { @@ -1398,24 +1399,22 @@ pub const LLVM = struct { }, // TODO: termination .slice => |slice| b: { - var name = UnpinnedArray(u8){}; - try name.append_slice(context.my_allocator, "[] "); + name.appendSliceAssumeCapacity("[] "); if (slice.mutability == .@"const") { - try name.append_slice(context.my_allocator, "const "); + name.appendSliceAssumeCapacity("const "); } const element_type_name = try llvm.renderTypeName(unit, context, slice.child_type); - try name.append_slice(context.my_allocator, element_type_name); + name.appendSliceAssumeCapacity(element_type_name); break :b name.slice(); }, .array => |array| b: { - var name = UnpinnedArray(u8){}; - try name.append(context.my_allocator, '['); + name.appendAssumeCapacity('['); var buffer: [65]u8 = undefined; const array_count = data_structures.format_int(&buffer, array.count, 10, false); - try name.append_slice(context.my_allocator, array_count); - try name.append(context.my_allocator, ']'); + name.appendSliceAssumeCapacity(array_count); + name.appendAssumeCapacity(']'); const element_type_name = try llvm.renderTypeName(unit, context, array.type); - try name.append_slice(context.my_allocator, element_type_name); + name.appendSliceAssumeCapacity(element_type_name); break :b name.slice(); }, @@ -1425,7 +1424,12 @@ pub const LLVM = struct { else => |t| @panic(@tagName(t)), }; - try llvm.type_name_map.put(context.my_allocator, sema_type_index, result); + + try llvm.type_name_map.put(context.my_allocator, sema_type_index, if (name.len > 0) b: { + const new_name = try context.arena.new_array(u8, name.len); + @memcpy(new_name, result); + break :b new_name; + } else result); return result; } @@ -1530,7 +1534,7 @@ pub const LLVM = struct { }); try llvm.debug_type_map.put_no_clobber(context.my_allocator, sema_type_index, struct_type.toType()); - var field_types = try UnpinnedArray(*LLVM.DebugInfo.Type).initialize_with_capacity(context.my_allocator, @intCast(fields.len)); + var field_types = BoundedArray(*LLVM.DebugInfo.Type, 512){}; bit_size = 0; for (fields) |struct_field_index| { @@ -1541,11 +1545,11 @@ pub const LLVM = struct { const field_name = unit.getIdentifier(struct_field.name); const alignment = struct_field_bit_size; const member_type = llvm.debug_info_builder.createMemberType(null, field_name.ptr, field_name.len, file, 0, struct_field_bit_size, alignment, bit_size, flags, field_type).toType(); - field_types.append_with_capacity(member_type); + field_types.appendAssumeCapacity(member_type); bit_size += struct_field_bit_size; } - llvm.debug_info_builder.replaceCompositeTypes(struct_type, field_types.pointer, field_types.length); + llvm.debug_info_builder.replaceCompositeTypes(struct_type, &field_types.buffer, field_types.len); return struct_type.toType(); } @@ -1629,14 +1633,17 @@ pub const LLVM = struct { break :b boolean_type; }, .@"enum" => |*enum_type| b: { - var enumerators = try UnpinnedArray(*LLVM.DebugInfo.Type.Enumerator).initialize_with_capacity(context.my_allocator, enum_type.fields.length); + var enumerators = try context.arena.new_array(*LLVM.DebugInfo.Type.Enumerator, enum_type.fields.length); + enumerators.len = 0; for (enum_type.fields.slice()) |enum_field_index| { const enum_field = unit.enum_fields.get(enum_field_index); const enum_field_name = unit.getIdentifier(enum_field.name); const is_unsigned = true; const enumerator = llvm.debug_info_builder.createEnumerator(enum_field_name.ptr, enum_field_name.len, enum_field.value, is_unsigned) orelse unreachable; - enumerators.append_with_capacity(enumerator); + const index = enumerators.len; + enumerators.len += 1; + enumerators[index] = enumerator; } const type_declaration = unit.type_declarations.get(sema_type_index).?; @@ -1651,18 +1658,21 @@ pub const LLVM = struct { const alignment = 0; const line = type_declaration.declaration.line + 1; const scope = try llvm.getScope(unit, context, enum_type.scope.scope.parent.?); - const enumeration_type = llvm.debug_info_builder.createEnumerationType(scope, name.ptr, name.len, file, line, bit_size, alignment, enumerators.pointer, enumerators.length, backing_type) orelse unreachable; + const enumeration_type = llvm.debug_info_builder.createEnumerationType(scope, name.ptr, name.len, file, line, bit_size, alignment, enumerators.ptr, enumerators.len, backing_type) orelse unreachable; break :b enumeration_type.toType(); }, .@"error" => |*error_type| b: { - var enumerators = try UnpinnedArray(*LLVM.DebugInfo.Type.Enumerator).initialize_with_capacity(context.my_allocator, error_type.fields.length); + var enumerators = try context.arena.new_array(*LLVM.DebugInfo.Type.Enumerator, error_type.fields.length); + enumerators.len = 0; for (error_type.fields.slice()) |error_field_index| { const error_field = unit.error_fields.get(error_field_index); const error_field_name = unit.getIdentifier(error_field.name); const is_unsigned = true; const enumerator = llvm.debug_info_builder.createEnumerator(error_field_name.ptr, error_field_name.len, error_field.value, is_unsigned) orelse unreachable; - enumerators.append_with_capacity(enumerator); + const index = enumerators.len; + enumerators.len += 1; + enumerators[index] = enumerator; } const type_declaration = unit.type_declarations.get(sema_type_index).?; @@ -1677,7 +1687,7 @@ pub const LLVM = struct { const alignment = 0; const line = type_declaration.declaration.line + 1; const scope = try llvm.getScope(unit, context, error_type.scope.scope.parent.?); - const enumeration_type = llvm.debug_info_builder.createEnumerationType(scope, name.ptr, name.len, file, line, bit_size, alignment, enumerators.pointer, enumerators.length, backing_type) orelse unreachable; + const enumeration_type = llvm.debug_info_builder.createEnumerationType(scope, name.ptr, name.len, file, line, bit_size, alignment, enumerators.ptr, enumerators.len, backing_type) orelse unreachable; break :b enumeration_type.toType(); }, else => |t| @panic(@tagName(t)), @@ -1791,10 +1801,10 @@ pub const LLVM = struct { }, .function => |function_prototype_index| b: { const function_prototype = unit.function_prototypes.get(function_prototype_index); - var parameter_types = try UnpinnedArray(*LLVM.DebugInfo.Type).initialize_with_capacity(context.my_allocator, @intCast(function_prototype.argument_types.len)); + var parameter_types = BoundedArray(*LLVM.DebugInfo.Type, 512){}; for (function_prototype.argument_types) |argument_type_index| { const argument_type = try llvm.getDebugType(unit, context, argument_type_index); - parameter_types.append_with_capacity(argument_type); + parameter_types.appendAssumeCapacity(argument_type); } const subroutine_type_flags = LLVM.DebugInfo.Node.Flags{ .visibility = .none, @@ -1826,7 +1836,7 @@ pub const LLVM = struct { .all_calls_described = false, }; const subroutine_type_calling_convention = LLVM.DebugInfo.CallingConvention.none; - const subroutine_type = llvm.debug_info_builder.createSubroutineType(parameter_types.pointer, parameter_types.length, subroutine_type_flags, subroutine_type_calling_convention) orelse unreachable; + const subroutine_type = llvm.debug_info_builder.createSubroutineType(¶meter_types.buffer, parameter_types.len, subroutine_type_flags, subroutine_type_calling_convention) orelse unreachable; break :b subroutine_type.toType(); }, else => |t| @panic(@tagName(t)), @@ -2072,7 +2082,9 @@ pub const LLVM = struct { const sema_array_type = unit.types.get(constant_array.type).array; const constant_type = try llvm.getType(unit, context, constant_array.type); const array_type = constant_type.toArray() orelse unreachable; - var list = try UnpinnedArray(*LLVM.Value.Constant).initialize_with_capacity(context.my_allocator, @intCast(constant_array.values.len)); + + var list = try context.arena.new_array(*LLVM.Value.Constant, constant_array.values.len); + list.len = 0; for (constant_array.values) |sema_value| { const value = switch (sema_value) { .constant_int => |const_int| b: { @@ -2099,16 +2111,19 @@ pub const LLVM = struct { }, else => |t| @panic(@tagName(t)), }; - list.append_with_capacity(value); + + const index = list.len; + list.len += 1; + list[index] = value; } - const result = array_type.getConstant(list.pointer, list.length) orelse unreachable; + const result = array_type.getConstant(list.ptr, list.len) orelse unreachable; return result; } fn getConstantStruct(llvm: *LLVM, unit: *Compilation.Unit, context: *const Compilation.Context, constant_struct_index: Compilation.V.Comptime.ConstantStruct.Index) !*LLVM.Value.Constant { const constant_struct = unit.constant_structs.get(constant_struct_index); - var field_values = try UnpinnedArray(*LLVM.Value.Constant).initialize_with_capacity(context.my_allocator, @intCast(constant_struct.fields.len)); + var field_values = BoundedArray(*LLVM.Value.Constant, 512){}; const sema_struct_index = unit.types.get(constant_struct.type).@"struct"; const sema_struct = unit.structs.get(sema_struct_index); const llvm_type = try llvm.getType(unit, context, constant_struct.type); @@ -2119,7 +2134,7 @@ pub const LLVM = struct { for (constant_struct.fields, sema_struct_type.fields.slice()) |field_value, field_index| { const field = unit.struct_fields.get(field_index); const constant = try llvm.emitComptimeRightValue(unit, context, field_value, field.type); - field_values.append_with_capacity(constant); + field_values.appendAssumeCapacity(constant); } }, .error_union => |error_union| { @@ -2130,7 +2145,7 @@ pub const LLVM = struct { const field_types = [2]Compilation.Type.Index{ err_union_base_type, .bool }; for (field_types, constant_struct.fields) |field_type_index, field_value| { const constant = try llvm.emitComptimeRightValue(unit, context, field_value, field_type_index); - field_values.append_with_capacity(constant); + field_values.appendAssumeCapacity(constant); } }, else => |t| @panic(@tagName(t)), @@ -2141,7 +2156,7 @@ pub const LLVM = struct { else => |t| @panic(@tagName(t)), } - const const_struct = struct_type.getConstant(field_values.pointer, field_values.length) orelse unreachable; + const const_struct = struct_type.getConstant(&field_values.buffer, field_values.len) orelse unreachable; return const_struct; } @@ -2162,9 +2177,9 @@ pub const LLVM = struct { } fn emitParameterAttributes(llvm: *LLVM, unit: *Compilation.Unit, context: *const Compilation.Context, abi: Compilation.Function.AbiInfo, is_return: bool) !*const LLVM.Attribute.Set { - var attributes = UnpinnedArray(*LLVM.Attribute){}; + var attributes = BoundedArray(*LLVM.Attribute, 256){}; if (abi.attributes.by_reg) { - try attributes.append(context.my_allocator, llvm.attributes.inreg); + attributes.appendAssumeCapacity(llvm.attributes.inreg); } switch (abi.kind) { .ignore => { @@ -2175,39 +2190,39 @@ pub const LLVM = struct { const indirect_type = try llvm.getType(unit, context, indirect.type); if (is_return) { const sret = llvm.context.getAttributeFromType(.StructRet, indirect_type); - try attributes.append(context.my_allocator, sret); - try attributes.append(context.my_allocator, llvm.attributes.@"noalias"); + attributes.appendAssumeCapacity(sret); + attributes.appendAssumeCapacity(llvm.attributes.@"noalias"); // TODO: alignment } else { if (abi.attributes.by_value) { const byval = llvm.context.getAttributeFromType(.ByVal, indirect_type); - try attributes.append(context.my_allocator, byval); + attributes.appendAssumeCapacity(byval); } //TODO: alignment } }, else => |t| @panic(@tagName(t)), } - const attribute_set = llvm.context.getAttributeSet(attributes.pointer, attributes.length); + const attribute_set = llvm.context.getAttributeSet(&attributes.buffer, attributes.len); return attribute_set; } - fn getFunctionAttributes(llvm: *LLVM, unit: *Compilation.Unit, context: *const Compilation.Context, function_prototype: *Compilation.Function.Prototype) !*const LLVM.Attribute.Set { - var function_attributes = UnpinnedArray(*LLVM.Attribute){}; - try function_attributes.append(context.my_allocator, llvm.attributes.nounwind); + fn getFunctionAttributes(llvm: *LLVM, unit: *Compilation.Unit, function_prototype: *Compilation.Function.Prototype) *const LLVM.Attribute.Set { + var function_attributes = BoundedArray(*LLVM.Attribute, 256){}; + function_attributes.appendAssumeCapacity(llvm.attributes.nounwind); switch (unit.types.get(function_prototype.return_type).*) { .noreturn => { - try function_attributes.append(context.my_allocator, llvm.attributes.noreturn); + function_attributes.appendAssumeCapacity(llvm.attributes.noreturn); }, else => {}, } if (function_prototype.attributes.naked) { - try function_attributes.append(context.my_allocator, llvm.attributes.naked); + function_attributes.appendAssumeCapacity(llvm.attributes.naked); } - const function_attribute_set = llvm.context.getAttributeSet(function_attributes.pointer, function_attributes.length); + const function_attribute_set = llvm.context.getAttributeSet(&function_attributes.buffer, function_attributes.len); return function_attribute_set; } @@ -2217,14 +2232,14 @@ pub const LLVM = struct { }; fn setCallOrFunctionAttributes(llvm: *LLVM, unit: *Compilation.Unit, context: *const Compilation.Context, function_prototype: *Compilation.Function.Prototype, call_or_function: CallOrFunction) !void { - const function_attribute_set = try llvm.getFunctionAttributes(unit, context, function_prototype); + const function_attribute_set = llvm.getFunctionAttributes(unit, function_prototype); - var parameter_attribute_sets = try UnpinnedArray(*const LLVM.Attribute.Set).initialize_with_capacity(context.my_allocator, @intCast(function_prototype.abi.parameter_types_abi.len + @intFromBool(function_prototype.abi.return_type_abi.kind == .indirect))); + var parameter_attribute_sets = BoundedArray(*const LLVM.Attribute.Set, 512){}; const return_attribute_set = blk: { const attribute_set = try llvm.emitParameterAttributes(unit, context, function_prototype.abi.return_type_abi, true); break :blk switch (function_prototype.abi.return_type_abi.kind) { .indirect => b: { - parameter_attribute_sets.append_with_capacity(attribute_set); + parameter_attribute_sets.appendAssumeCapacity(attribute_set); break :b llvm.context.getAttributeSet(null, 0); }, else => attribute_set, @@ -2233,18 +2248,18 @@ pub const LLVM = struct { for (function_prototype.abi.parameter_types_abi) |parameter_type_abi| { const parameter_attribute_set = try llvm.emitParameterAttributes(unit, context, parameter_type_abi, false); - parameter_attribute_sets.append_with_capacity(parameter_attribute_set); + parameter_attribute_sets.appendAssumeCapacity(parameter_attribute_set); } const calling_convention = getCallingConvention(function_prototype.calling_convention); switch (call_or_function) { .call => |call| { - call.setAttributes(llvm.context, function_attribute_set, return_attribute_set, parameter_attribute_sets.pointer, parameter_attribute_sets.length); + call.setAttributes(llvm.context, function_attribute_set, return_attribute_set, ¶meter_attribute_sets.buffer, parameter_attribute_sets.len); call.setCallingConvention(calling_convention); }, .function => |function| { - function.setAttributes(llvm.context, function_attribute_set, return_attribute_set, parameter_attribute_sets.pointer, parameter_attribute_sets.length); + function.setAttributes(llvm.context, function_attribute_set, return_attribute_set, ¶meter_attribute_sets.buffer, parameter_attribute_sets.len); function.setCallingConvention(calling_convention); }, } @@ -2286,10 +2301,10 @@ pub const LLVM = struct { if (generate_debug_information) { // if (data_structures.byte_equal(name, "nat_split_struct_ints")) @breakpoint(); const debug_file = try llvm.getDebugInfoFile(unit, context, declaration.declaration.scope.file); - var parameter_types = try UnpinnedArray(*LLVM.DebugInfo.Type).initialize_with_capacity(context.my_allocator, @intCast(function_prototype.argument_types.len)); + var parameter_types = BoundedArray(*LLVM.DebugInfo.Type, 512){}; for (function_prototype.argument_types) |argument_type_index| { const argument_type = try llvm.getDebugType(unit, context, argument_type_index); - parameter_types.append_with_capacity(argument_type); + parameter_types.appendAssumeCapacity(argument_type); } const subroutine_type_flags = LLVM.DebugInfo.Node.Flags{ @@ -2322,7 +2337,7 @@ pub const LLVM = struct { .all_calls_described = false, }; const subroutine_type_calling_convention = LLVM.DebugInfo.CallingConvention.none; - const subroutine_type = llvm.debug_info_builder.createSubroutineType(parameter_types.pointer, parameter_types.length, subroutine_type_flags, subroutine_type_calling_convention) orelse unreachable; + const subroutine_type = llvm.debug_info_builder.createSubroutineType(¶meter_types.buffer, parameter_types.len, subroutine_type_flags, subroutine_type_calling_convention) orelse unreachable; const subprogram_flags = LLVM.DebugInfo.Subprogram.Flags{ .virtuality = .none, .local_to_unit = !export_or_extern, @@ -2570,10 +2585,10 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo .inline_assembly => |inline_assembly_index| { const assembly_block = unit.inline_assembly.get(inline_assembly_index); - var assembly_statements = UnpinnedArray(u8){}; - var constraints = UnpinnedArray(u8){}; - var operand_values = UnpinnedArray(*LLVM.Value){}; - var operand_types = UnpinnedArray(*LLVM.Type){}; + var assembly_statements = BoundedArray(u8, 4096){}; + var constraints = BoundedArray(u8, 4096){}; + var operand_values = BoundedArray(*LLVM.Value, 256){}; + var operand_types = BoundedArray(*LLVM.Type, 256){}; switch (unit.descriptor.arch) { .x86_64 => { @@ -2581,13 +2596,13 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const instruction = unit.assembly_instructions.get(assembly_instruction_index); const instruction_id: Compilation.InlineAssembly.x86_64.Instruction = @enumFromInt(instruction.id); - try assembly_statements.append_slice(context.my_allocator, switch (instruction_id) { + assembly_statements.appendSliceAssumeCapacity(switch (instruction_id) { .xor => "xorl", .mov => "movq", .@"and" => "andq", .call => "callq", }); - try assembly_statements.append(context.my_allocator, ' '); + assembly_statements.appendAssumeCapacity(' '); if (instruction.operands.len > 0) { var reverse_operand_iterator = std.mem.reverseIterator(instruction.operands); @@ -2596,8 +2611,8 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo switch (operand) { .register => |register_value| { const register: Compilation.InlineAssembly.x86_64.Register = @enumFromInt(register_value); - try assembly_statements.append(context.my_allocator, '%'); - try assembly_statements.append_slice(context.my_allocator, @tagName(register)); + assembly_statements.appendAssumeCapacity('%'); + assembly_statements.appendSliceAssumeCapacity(@tagName(register)); }, .number_literal => |literal| { var buffer: [65]u8 = undefined; @@ -2608,7 +2623,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo literal_slice[1] = '$'; literal_slice[2] = '0'; literal_slice[3] = 'x'; - try assembly_statements.append_slice(context.my_allocator, try context.my_allocator.duplicate_bytes(literal_slice)); + assembly_statements.appendSliceAssumeCapacity(try context.my_allocator.duplicate_bytes(literal_slice)); }, .value => |sema_value| { if (llvm.llvm_value_map.get(sema_value)) |v| { @@ -2617,7 +2632,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo } else { const value = try llvm.emitLeftValue(unit, context, sema_value); var buffer: [65]u8 = undefined; - const operand_number = data_structures.format_int(&buffer, operand_values.length, 16, false); + const operand_number = data_structures.format_int(&buffer, operand_values.len, 16, false); const slice_ptr = operand_number.ptr - 2; const operand_slice = slice_ptr[0 .. operand_number.len + 2]; operand_slice[0] = '$'; @@ -2628,23 +2643,23 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo new_buffer[operand_slice.len + 1] = 'P'; new_buffer[operand_slice.len + 2] = '}'; const new_slice = try context.my_allocator.duplicate_bytes(new_buffer[0 .. operand_slice.len + 3]); - try assembly_statements.append_slice(context.my_allocator, new_slice); - try operand_values.append(context.my_allocator, value); + assembly_statements.appendSliceAssumeCapacity(new_slice); + operand_values.appendAssumeCapacity(value); const value_type = value.getType(); - try operand_types.append(context.my_allocator, value_type); - try constraints.append(context.my_allocator, 'X'); + operand_types.appendAssumeCapacity(value_type); + constraints.appendAssumeCapacity('X'); } }, } - try assembly_statements.append_slice(context.my_allocator, ", "); + assembly_statements.appendSliceAssumeCapacity(", "); } _ = assembly_statements.pop(); _ = assembly_statements.pop(); } - try assembly_statements.append_slice(context.my_allocator, "\n\t"); + assembly_statements.appendSliceAssumeCapacity("\n\t"); } // try constraints.append_slice(context.allocator, ",~{dirflag},~{fpsr},~{flags}"); @@ -2653,14 +2668,14 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo } const is_var_args = false; - const function_type = LLVM.Context.getFunctionType(try llvm.getType(unit, context, Compilation.Type.Index.void), operand_types.pointer, operand_types.length, is_var_args) orelse unreachable; + const function_type = LLVM.Context.getFunctionType(try llvm.getType(unit, context, Compilation.Type.Index.void), &operand_types.buffer, operand_types.len, is_var_args) orelse unreachable; const has_side_effects = true; const is_align_stack = true; const dialect = LLVM.Value.InlineAssembly.Dialect.@"at&t"; const can_throw = false; - const inline_assembly = LLVM.Value.InlineAssembly.get(function_type, assembly_statements.pointer, assembly_statements.length, constraints.pointer, constraints.length, has_side_effects, is_align_stack, dialect, can_throw) orelse return LLVM.Value.Error.inline_assembly; - const call = llvm.builder.createCall(function_type, inline_assembly.toValue(), operand_values.pointer, operand_values.length, "", "".len, null) orelse return LLVM.Value.Instruction.Error.call; + const inline_assembly = LLVM.Value.InlineAssembly.get(function_type, &assembly_statements.buffer, assembly_statements.len, &constraints.buffer, constraints.len, has_side_effects, is_align_stack, dialect, can_throw) orelse return LLVM.Value.Error.inline_assembly; + const call = llvm.builder.createCall(function_type, inline_assembly.toValue(), &operand_values.buffer, operand_values.len, "", "".len, null) orelse return LLVM.Value.Instruction.Error.call; try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, call.toValue()); }, .stack_slot => |stack_slot| { @@ -2876,27 +2891,27 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const return_type = try llvm.getType(unit, context, Compilation.Type.usize); const is_var_args = false; const function_type = LLVM.Context.getFunctionType(return_type, syscall_argument_types.ptr, syscall_argument_types.len, is_var_args) orelse unreachable; - var constraints = UnpinnedArray(u8){}; + var constraints = BoundedArray(u8, 4096){}; const inline_asm = switch (unit.descriptor.arch) { .x86_64 => blk: { - try constraints.append_slice(context.my_allocator, "={rax}"); + constraints.appendSliceAssumeCapacity("={rax}"); const syscall_registers = [7][]const u8{ "rax", "rdi", "rsi", "rdx", "r10", "r8", "r9" }; for (syscall_registers[0..syscall_argument_count]) |syscall_register| { - try constraints.append(context.my_allocator, ','); - try constraints.append(context.my_allocator, '{'); - try constraints.append_slice(context.my_allocator, syscall_register); - try constraints.append(context.my_allocator, '}'); + constraints.appendAssumeCapacity(','); + constraints.appendAssumeCapacity('{'); + constraints.appendSliceAssumeCapacity(syscall_register); + constraints.appendAssumeCapacity('}'); } - try constraints.append_slice(context.my_allocator, ",~{rcx},~{r11},~{memory}"); + constraints.appendSliceAssumeCapacity(",~{rcx},~{r11},~{memory}"); const assembly = "syscall"; const has_side_effects = true; const is_align_stack = true; const can_throw = false; - const inline_assembly = LLVM.Value.InlineAssembly.get(function_type, assembly, assembly.len, constraints.pointer, constraints.length, has_side_effects, is_align_stack, LLVM.Value.InlineAssembly.Dialect.@"at&t", can_throw) orelse return LLVM.Value.Error.inline_assembly; + const inline_assembly = LLVM.Value.InlineAssembly.get(function_type, assembly, assembly.len, &constraints.buffer, constraints.len, has_side_effects, is_align_stack, LLVM.Value.InlineAssembly.Dialect.@"at&t", can_throw) orelse return LLVM.Value.Error.inline_assembly; break :blk inline_assembly; }, else => |t| @panic(@tagName(t)), @@ -3129,8 +3144,10 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo last_block = else_block_node; break :b bb; } else null; - var basic_block_array = try UnpinnedArray(*LLVM.Value.BasicBlock).initialize_with_capacity(context.my_allocator, switch_expression.cases.length); - var condition_array = try UnpinnedArray(*LLVM.Value.Constant.Int).initialize_with_capacity(context.my_allocator, switch_expression.cases.length); + + var basic_block_array = BoundedArray(*LLVM.Value.BasicBlock, 4096){}; + var condition_array = BoundedArray(*LLVM.Value.Constant.Int, 4096){}; + for (switch_expression.cases.pointer[0..switch_expression.cases.length]) |case| { const constant_value = try llvm.emitComptimeRightValue(unit, context, case.condition, switch_expression.condition.type); const constant_int = constant_value.toInt() orelse unreachable; @@ -3141,13 +3158,13 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const block = llvm.llvm_block_map.get(case.basic_block).?; break :b block; }; - condition_array.append_with_capacity(constant_int); - basic_block_array.append_with_capacity(block); + condition_array.appendAssumeCapacity(constant_int); + basic_block_array.appendAssumeCapacity(block); } const branch_weights = null; const unpredictable = null; - const switch_instruction = llvm.builder.createSwitch(condition, else_block, condition_array.pointer, basic_block_array.pointer, condition_array.length, branch_weights, unpredictable); + const switch_instruction = llvm.builder.createSwitch(condition, else_block, &condition_array.buffer, &basic_block_array.buffer, condition_array.len, branch_weights, unpredictable); try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, switch_instruction.toValue()); }, .memcpy => |memcpy| { @@ -3207,8 +3224,6 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const result = llvm.function.verify(&message_ptr, &message_len); if (!result) { - // std.debug.print("PANIC: Failed to verify function:\n{s}\n", .{error_message}); - var module_len: usize = 0; const module_ptr = llvm.module.toString(&module_len); const module_dump = module_ptr[0..module_len]; @@ -3269,25 +3284,34 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo .windows => "x86_64-windows-gnu", }; const cpu = "generic"; - const temp_use_native_features = true; - const features = if (temp_use_native_features) blk: { - var buffer = UnpinnedArray(u8){}; - for (@import("builtin").cpu.arch.allFeaturesList(), 0..) |feature, index_usize| { - const index = @as(std.Target.Cpu.Feature.Set.Index, @intCast(index_usize)); - const is_enabled = @import("builtin").cpu.features.isEnabled(index); - if (feature.llvm_name) |llvm_name| { - const plus_or_minus = "-+"[@intFromBool(is_enabled)]; - try buffer.append(context.my_allocator, plus_or_minus); - try buffer.append_slice(context.my_allocator, llvm_name); - try buffer.append_slice(context.my_allocator, ","); + var features = PinnedArray(u8){ + .pointer = @constCast(@ptrCast("")), + .length = 0, + .granularity = 0, + }; + + const temp_use_native_features = true; + if (temp_use_native_features) { + const feature_list = @import("builtin").cpu.arch.allFeaturesList(); + if (feature_list.len > 0) { + features = try PinnedArray(u8).init_with_default_granularity(); + for (feature_list, 0..) |feature, index_usize| { + const index = @as(std.Target.Cpu.Feature.Set.Index, @intCast(index_usize)); + const is_enabled = @import("builtin").cpu.features.isEnabled(index); + + if (feature.llvm_name) |llvm_name| { + const plus_or_minus = "-+"[@intFromBool(is_enabled)]; + _ = features.append(plus_or_minus); + features.append_slice(llvm_name); + features.append_slice(","); + } } + + assert(std.mem.endsWith(u8, features.slice(), ",")); + features.length -= 1; } - if (buffer.length == 0) break :blk ""; - assert(std.mem.endsWith(u8, buffer.slice(), ",")); - buffer.slice()[buffer.length - 1] = 0; - break :blk buffer.slice()[0 .. buffer.length - 1 :0]; - } else ""; + } const target = blk: { var error_message: [*]const u8 = undefined; @@ -3306,7 +3330,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo .optimize_for_speed, .optimize_for_size => .default, .aggressively_optimize_for_speed, .aggressively_optimize_for_size => .aggressive, }; - const target_machine = target.createTargetMachine(target_triple.ptr, target_triple.len, cpu, cpu.len, features.ptr, features.len, LLVM.RelocationModel.static, code_model, is_code_model_present, codegen_optimization_level, jit) orelse unreachable; + const target_machine = target.createTargetMachine(target_triple.ptr, target_triple.len, cpu, cpu.len, features.pointer, features.length, LLVM.RelocationModel.static, code_model, is_code_model_present, codegen_optimization_level, jit) orelse unreachable; llvm.module.setTargetMachineDataLayout(target_machine); llvm.module.setTargetTriple(target_triple.ptr, target_triple.len); const file_path = unit.descriptor.executable_path; diff --git a/bootstrap/frontend/lexer.zig b/bootstrap/frontend/lexer.zig index 5c96a73..420b967 100644 --- a/bootstrap/frontend/lexer.zig +++ b/bootstrap/frontend/lexer.zig @@ -8,7 +8,6 @@ const byte_equal = library.byte_equal; const enumFromString = library.enumFromString; const MyAllocator = library.MyAllocator; const PinnedArray = library.PinnedArray; -const UnpinnedArray = library.UnpinnedArray; const Compilation = @import("../Compilation.zig"); const File = Compilation.File; diff --git a/bootstrap/frontend/parser.zig b/bootstrap/frontend/parser.zig index 78562e6..775a94a 100644 --- a/bootstrap/frontend/parser.zig +++ b/bootstrap/frontend/parser.zig @@ -363,7 +363,7 @@ const Analyzer = struct { } } else @panic(identifier_name); - try stack_list.append(attribute_node); + stack_list.appendAssumeCapacity(attribute_node); switch (analyzer.peekToken()) { .operator_assign => {}, @@ -462,7 +462,7 @@ const Analyzer = struct { } } else @panic(identifier_name); - try attribute_and_return_type_node_list.append(attribute_node); + attribute_and_return_type_node_list.appendAssumeCapacity(attribute_node); if (analyzer.peekToken() == .operator_comma) analyzer.consumeToken(); } @@ -471,7 +471,7 @@ const Analyzer = struct { const arguments = try analyzer.argumentList(.operator_left_parenthesis, .operator_right_parenthesis); const return_type = try analyzer.typeExpression(); - try attribute_and_return_type_node_list.append(return_type); + attribute_and_return_type_node_list.appendAssumeCapacity(return_type); const function_prototype = try analyzer.addNode(.{ .id = .function_prototype, @@ -510,7 +510,7 @@ const Analyzer = struct { analyzer.consumeToken(); } - try list.append(try analyzer.addNode(.{ + list.appendAssumeCapacity(try analyzer.addNode(.{ .id = id, .token = identifier_token, .left = type_expression, @@ -552,7 +552,7 @@ const Analyzer = struct { => try analyzer.symbolDeclaration(), }; - try list.append(statement_index); + list.appendAssumeCapacity(statement_index); } _ = try analyzer.expectToken(.operator_right_brace); @@ -627,7 +627,7 @@ const Analyzer = struct { else => left, }; - try array_list.append(switch_case_node); + array_list.appendAssumeCapacity(switch_case_node); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), @@ -660,7 +660,7 @@ const Analyzer = struct { .right = expr, }); - try list.append(node); + list.appendAssumeCapacity(node); } _ = try analyzer.expectToken(.operator_right_brace); @@ -770,7 +770,7 @@ const Analyzer = struct { else => |t| @panic(@tagName(t)), }; - try for_expression_list.append(node_index); + for_expression_list.appendAssumeCapacity(node_index); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), @@ -801,7 +801,7 @@ const Analyzer = struct { else => |t| @panic(@tagName(t)), } - try payload_nodes.append(try analyzer.addNode(.{ + payload_nodes.appendAssumeCapacity(try analyzer.addNode(.{ .id = id, .token = payload_token, .left = Node.Index.null, @@ -992,7 +992,7 @@ const Analyzer = struct { .operator_semicolon => {}, else => |t| @panic(@tagName(t)), } - try operand_list.append(node); + operand_list.appendAssumeCapacity(node); } analyzer.consumeToken(); @@ -1004,7 +1004,7 @@ const Analyzer = struct { .right = try analyzer.nodeList(&operand_list), }); - try instruction_list.append(instruction); + instruction_list.appendAssumeCapacity(instruction); } _ = try analyzer.expectToken(.operator_backtick); @@ -1016,7 +1016,7 @@ const Analyzer = struct { .left = try analyzer.nodeList(&instruction_list), .right = .null, }); - try list.append(assembly_block); + list.appendAssumeCapacity(assembly_block); const intrinsic = try analyzer.addNode(.{ .id = .intrinsic, @@ -1029,7 +1029,7 @@ const Analyzer = struct { } else { while (analyzer.peekToken() != .operator_right_parenthesis) { const parameter = try analyzer.expression(); - try list.append(parameter); + list.appendAssumeCapacity(parameter); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), @@ -1465,7 +1465,7 @@ const Analyzer = struct { break :blk .pointer_type; }, .many_pointer_type => blk: { - try list.append(try analyzer.addNode(.{ + list.appendAssumeCapacity(try analyzer.addNode(.{ .id = .many_pointer_expression, .token = analyzer.token_i, .left = Node.Index.null, @@ -1475,7 +1475,7 @@ const Analyzer = struct { _ = try analyzer.expectToken(.operator_ampersand); switch (analyzer.peekToken()) { .operator_right_bracket => {}, - .operator_colon => try list.append(try analyzer.parseTermination()), + .operator_colon => list.appendAssumeCapacity(try analyzer.parseTermination()), else => |t| @panic(@tagName(t)), } _ = try analyzer.expectToken(.operator_right_bracket); @@ -1490,17 +1490,17 @@ const Analyzer = struct { break :blk .slice_type; }, .operator_colon => { - try list.append(try analyzer.parseTermination()); + list.appendAssumeCapacity(try analyzer.parseTermination()); _ = try analyzer.expectToken(.operator_right_bracket); break :blk .slice_type; }, else => { const length_expression = try analyzer.expression(); - try list.append(length_expression); + list.appendAssumeCapacity(length_expression); switch (analyzer.peekToken()) { .operator_right_bracket => {}, - .operator_colon => try list.append(try analyzer.parseTermination()), + .operator_colon => list.appendAssumeCapacity(try analyzer.parseTermination()), else => |t| @panic(@tagName(t)), } @@ -1525,7 +1525,7 @@ const Analyzer = struct { analyzer.consumeTokens(@intFromBool(analyzer.peekToken() == .fixed_keyword_const)); if (const_node != .null) { - try list.append(const_node); + list.appendAssumeCapacity(const_node); } } else { assert(list.len > 0); @@ -1533,7 +1533,7 @@ const Analyzer = struct { const type_expression = try analyzer.typeExpression(); assert(type_expression != .null); - try list.append(type_expression); + list.appendAssumeCapacity(type_expression); const node_list = try analyzer.nodeList(&list); @@ -1544,15 +1544,7 @@ const Analyzer = struct { .right = Node.Index.null, }; - // logln(.parser, .pointer_like_type_expression, "ARRAY START\n===========", .{}); - // for (list.slice()) |ni| { - // const n = analyzer.nodes.get(ni); - // logln(.parser, .pointer_like_type_expression, "{s} node element: {s}", .{ @tagName(expression_type), @tagName(n.id) }); - // } - // logln(.parser, .pointer_like_type_expression, "ARRAY END\n=========", .{}); - const node_index = try analyzer.addNode(node); - // logln(.parser, .pointer_like_type_expression, "Pointer end", .{}); switch (analyzer.peekToken()) { .operator_comma, @@ -1663,7 +1655,7 @@ const Analyzer = struct { }); } - try expression_list.append(parameter); + expression_list.appendAssumeCapacity(parameter); switch (analyzer.peekToken()) { .operator_right_parenthesis => {}, @@ -1726,7 +1718,7 @@ const Analyzer = struct { .right = Node.Index.null, }); - try list.append(field_initialization); + list.appendAssumeCapacity(field_initialization); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), else => {}, @@ -1737,7 +1729,7 @@ const Analyzer = struct { else => |t| @panic(@tagName(t)), }, else => blk: { - try list.append(try analyzer.anonymousExpression()); + list.appendAssumeCapacity(try analyzer.anonymousExpression()); _ = try analyzer.expectToken(.operator_comma); break :blk .anonymous; }, @@ -1754,7 +1746,7 @@ const Analyzer = struct { else => {}, } - try list.append(field_expression_initializer); + list.appendAssumeCapacity(field_expression_initializer); break :blk .anonymous; }, else => |t| @panic(@tagName(t)), @@ -1837,7 +1829,7 @@ const Analyzer = struct { var list = Node.StackList{}; while (analyzer.peekToken() != .operator_right_parenthesis) { const parameter_node = try analyzer.expression(); - try list.append(parameter_node); + list.appendAssumeCapacity(parameter_node); switch (analyzer.peekToken()) { .operator_comma => analyzer.consumeToken(), else => {}, @@ -1935,7 +1927,7 @@ const Analyzer = struct { // logln(.parser, .container_members, "Container member {s}", .{@tagName(member_node.id)}); assert(member_node.id != .identifier); - try list.append(member_node_index); + list.appendAssumeCapacity(member_node_index); } if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_right_brace); @@ -2169,7 +2161,7 @@ const Analyzer = struct { .right = value_associated, }); - try list.append(error_field_node); + list.appendAssumeCapacity(error_field_node); } analyzer.consumeToken(); diff --git a/bootstrap/library.zig b/bootstrap/library.zig index 02c048b..070b30c 100644 --- a/bootstrap/library.zig +++ b/bootstrap/library.zig @@ -99,9 +99,16 @@ pub fn PinnedArray(comptime T: type) type { const Array = @This(); + pub fn const_slice(array: *const Array) []const T{ + return array.pointer[0..array.length]; + } + pub fn slice(array: *Array) []T{ + return array.pointer[0..array.length]; + } + pub fn get_unchecked(array: *Array, index: u32) *T { - const slice = array.pointer[0..array.length]; - return &slice[index]; + const array_slice = array.slice(); + return &array_slice[index]; } pub fn get(array: *Array, index: Index) *T { @@ -142,6 +149,18 @@ pub fn PinnedArray(comptime T: type) type { return array.append_with_capacity(item); } + pub fn append_slice(array: *Array, items: []const T) void { + const count: u32 = @intCast(items.len); + if (((array.length + count) * @sizeOf(T)) & (array.granularity - 1) == 0) { + const length: u64 = array.length; + assert((length + count) * @sizeOf(T) <= pinned_array_max_size); + const ptr: [*]u8 = @ptrCast(array.pointer); + commit(ptr + ((length + count) * @sizeOf(T)), array.granularity) catch unreachable; + } + + array.append_slice_with_capacity(items); + } + pub fn append_with_capacity(array: *Array, item: T) *T { const index = array.length; assert(index * @sizeOf(T) < pinned_array_max_size); @@ -150,6 +169,14 @@ pub fn PinnedArray(comptime T: type) type { ptr.* = item; return ptr; } + + pub fn append_slice_with_capacity(array: *Array, items: []const T) void { + const index = array.length; + const count: u32 = @intCast(items.len); + assert((index + count - 1) * @sizeOf(T) < pinned_array_max_size); + array.length += count; + @memcpy(array.pointer[index..][0..count], items); + } }; } @@ -711,41 +738,6 @@ fn copy_backwards(comptime T: type, destination: []T, source: []const T) void { } } -test { - var page_allocator = PageAllocator{}; - const allocator = &page_allocator.allocator; - var foo = UnpinnedArray(u32){}; - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); - try foo.append(allocator, 1); -} - pub fn equal(a: anytype, b: @TypeOf(a)) bool { const T = @TypeOf(a); diff --git a/bootstrap/linker/lld.zig b/bootstrap/linker/lld.zig index f437bc8..2b8bf95 100644 --- a/bootstrap/linker/lld.zig +++ b/bootstrap/linker/lld.zig @@ -3,97 +3,97 @@ const assert = std.debug.assert; const linker = @import("linker.zig"); const library = @import("../library.zig"); -const UnpinnedArray = library.UnpinnedArray; +const PinnedArray = library.PinnedArray; const Compilation = @import("../Compilation.zig"); const write = Compilation.write; pub fn link(context: *const Compilation.Context, options: linker.Options) !void { assert(options.backend == .lld); - var argv = UnpinnedArray([]const u8){}; + var argv = try PinnedArray([]const u8).init_with_default_granularity(); const driver_program = switch (@import("builtin").os.tag) { .windows => "lld-link", .linux => "ld.lld", .macos => "ld64.lld", else => @compileError("OS not supported"), }; - try argv.append(context.my_allocator, driver_program); - try argv.append(context.my_allocator, "--error-limit=0"); + _ = argv.append(driver_program); + _ = argv.append("--error-limit=0"); // const output_path = out_path orelse "a.out"; - try argv.append(context.my_allocator, "-o"); - try argv.append(context.my_allocator, options.output_file_path); + _ = argv.append("-o"); + _ = argv.append(options.output_file_path); - try argv.append_slice(context.my_allocator, options.extra_arguments); + argv.append_slice(options.extra_arguments); for (options.objects) |object| { - try argv.append(context.my_allocator, object.path); + _ = argv.append(object.path); } const ci = @import("configuration").ci; switch (@import("builtin").os.tag) { .macos => { - try argv.append(context.my_allocator, "-dynamic"); - try argv.append_slice(context.my_allocator, &.{ "-platform_version", "macos", "13.4.1", "13.3" }); - try argv.append(context.my_allocator, "-arch"); - try argv.append(context.my_allocator, switch (@import("builtin").cpu.arch) { + _ = argv.append("-dynamic"); + argv.append_slice(context.my_allocator, &.{ "-platform_version", "macos", "13.4.1", "13.3" }); + _ = argv.append("-arch"); + _ = argv.append(switch (@import("builtin").cpu.arch) { .aarch64 => "arm64", else => |t| @panic(@tagName(t)), }); - try argv.append_slice(context.my_allocator, &.{ "-syslibroot", "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk" }); + argv.append_slice(&.{ "-syslibroot", "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk" }); if (!library.ends_with_slice(options.output_file_path, ".dylib")) { - try argv.append_slice(context.my_allocator, &.{ "-e", "_main" }); + argv.append_slice(&.{ "-e", "_main" }); } - try argv.append(context.my_allocator, "-lSystem"); + _ = argv.append("-lSystem"); if (options.link_libcpp) { - try argv.append(context.my_allocator, "-L/Library/Developer/CommandLineTools/SDKs/MacOSX13.3.sdk/usr/lib"); - try argv.append(context.my_allocator, "-lc++"); + _ = argv.append("-L/Library/Developer/CommandLineTools/SDKs/MacOSX13.3.sdk/usr/lib"); + _ = argv.append("-lc++"); } }, .linux => { if (ci) { if (options.link_libcpp) { assert(options.link_libc); - try argv.append(context.my_allocator, "/lib/x86_64-linux-gnu/libstdc++.so.6"); + _ = argv.append("/lib/x86_64-linux-gnu/libstdc++.so.6"); } if (options.link_libc) { - try argv.append(context.my_allocator, "/lib/x86_64-linux-gnu/crt1.o"); - try argv.append(context.my_allocator, "/lib/x86_64-linux-gnu/crti.o"); - try argv.append_slice(context.my_allocator, &.{ "-L", "/lib/x86_64-linux-gnu" }); - try argv.append_slice(context.my_allocator, &.{ "-dynamic-linker", "/lib64/ld-linux-x86-64.so.2" }); - try argv.append(context.my_allocator, "--as-needed"); - try argv.append(context.my_allocator, "-lm"); - try argv.append(context.my_allocator, "-lpthread"); - try argv.append(context.my_allocator, "-lc"); - try argv.append(context.my_allocator, "-ldl"); - try argv.append(context.my_allocator, "-lrt"); - try argv.append(context.my_allocator, "-lutil"); - try argv.append(context.my_allocator, "/lib/x86_64-linux-gnu/crtn.o"); + _ = argv.append("/lib/x86_64-linux-gnu/crt1.o"); + _ = argv.append("/lib/x86_64-linux-gnu/crti.o"); + argv.append_slice(&.{ "-L", "/lib/x86_64-linux-gnu" }); + argv.append_slice(&.{ "-dynamic-linker", "/lib64/ld-linux-x86-64.so.2" }); + _ = argv.append("--as-needed"); + _ = argv.append("-lm"); + _ = argv.append("-lpthread"); + _ = argv.append("-lc"); + _ = argv.append("-ldl"); + _ = argv.append("-lrt"); + _ = argv.append("-lutil"); + _ = argv.append("/lib/x86_64-linux-gnu/crtn.o"); } } else { if (options.link_libcpp) { assert(options.link_libc); - try argv.append(context.my_allocator, "/usr/lib/libstdc++.so"); + _ = argv.append("/usr/lib/libstdc++.so"); } if (options.link_libc) { - try argv.append(context.my_allocator, "/usr/lib/crt1.o"); - try argv.append(context.my_allocator, "/usr/lib/crti.o"); - try argv.append_slice(context.my_allocator, &.{ "-L", "/usr/lib" }); - try argv.append_slice(context.my_allocator, &.{ "-dynamic-linker", "/lib64/ld-linux-x86-64.so.2" }); - try argv.append(context.my_allocator, "--as-needed"); - try argv.append(context.my_allocator, "-lm"); - try argv.append(context.my_allocator, "-lpthread"); - try argv.append(context.my_allocator, "-lc"); - try argv.append(context.my_allocator, "-ldl"); - try argv.append(context.my_allocator, "-lrt"); - try argv.append(context.my_allocator, "-lutil"); - try argv.append(context.my_allocator, "/usr/lib/crtn.o"); + _ = argv.append("/usr/lib/crt1.o"); + _ = argv.append("/usr/lib/crti.o"); + argv.append_slice(&.{ "-L", "/usr/lib" }); + argv.append_slice(&.{ "-dynamic-linker", "/lib64/ld-linux-x86-64.so.2" }); + _ = argv.append("--as-needed"); + _ = argv.append("-lm"); + _ = argv.append("-lpthread"); + _ = argv.append("-lc"); + _ = argv.append("-ldl"); + _ = argv.append("-lrt"); + _ = argv.append("-lutil"); + _ = argv.append("/usr/lib/crtn.o"); } } }, @@ -102,10 +102,10 @@ pub fn link(context: *const Compilation.Context, options: linker.Options) !void } for (options.libraries) |lib| { - try argv.append(context.my_allocator, try std.mem.concat(context.allocator, u8, &.{ "-l", lib.path })); + _ = argv.append(try std.mem.concat(context.allocator, u8, &.{ "-l", lib.path })); } - const argv_zero_terminated = try Compilation.argsCopyZ(context.allocator, argv.slice()); + const argv_zero_terminated = try Compilation.argsCopyZ(context.allocator, argv.const_slice()); var stdout_ptr: [*]const u8 = undefined; var stdout_len: usize = 0; @@ -121,7 +121,7 @@ pub fn link(context: *const Compilation.Context, options: linker.Options) !void if (!result) { const stdout = stdout_ptr[0..stdout_len]; const stderr = stderr_ptr[0..stderr_len]; - for (argv.slice()) |arg| { + for (argv.const_slice()) |arg| { try write(.panic, arg); try write(.panic, " "); } diff --git a/bootstrap/main.zig b/bootstrap/main.zig index 99e7d35..846e3de 100644 --- a/bootstrap/main.zig +++ b/bootstrap/main.zig @@ -9,7 +9,6 @@ const library = @import("library.zig"); const byte_equal = library.byte_equal; const MyAllocator = library.MyAllocator; const PageAllocator = library.PageAllocator; -const UnpinnedArray = library.UnpinnedArray; const env_detecting_libc_paths = "NATIVITY_IS_DETECTING_LIBC_PATHS"; @@ -27,26 +26,21 @@ var my_allocator = PageAllocator{}; pub fn main() !void { var arena_allocator = std.heap.ArenaAllocator.init(std.heap.page_allocator); const allocator = arena_allocator.allocator(); - var arg_it = try std.process.ArgIterator.initWithAllocator(allocator); - var args = library.UnpinnedArray([]const u8){}; + const arguments: []const []const u8 = try std.process.argsAlloc(allocator); const context = try Compilation.createContext(allocator, &my_allocator.allocator); - while (arg_it.next()) |arg| { - try args.append(context.my_allocator, arg); - } - const arguments = args.slice(); - const debug_args = false; - if (debug_args and @import("builtin").os.tag != .windows) { - assert(arguments.len > 0); - const home_dir = std.posix.getenv("HOME") orelse unreachable; - const timestamp = std.time.milliTimestamp(); - var argument_list = UnpinnedArray(u8){}; - for (arguments) |arg| { - argument_list.append_slice(context.my_allocator, arg) catch {}; - argument_list.append(context.my_allocator, ' ') catch {}; - } - argument_list.append(context.my_allocator, '\n') catch {}; - std.fs.cwd().writeFile(std.fmt.allocPrint(std.heap.page_allocator, "{s}/dev/nativity/nat/invocation_log_{}", .{ home_dir, timestamp }) catch unreachable, argument_list.slice()) catch {}; - } + // const debug_args = false; + // if (debug_args and @import("builtin").os.tag != .windows) { + // assert(arguments.len > 0); + // const home_dir = std.posix.getenv("HOME") orelse unreachable; + // const timestamp = std.time.milliTimestamp(); + // var argument_list = UnpinnedArray(u8){}; + // for (arguments) |arg| { + // argument_list.append_slice(context.my_allocator, arg) catch {}; + // argument_list.append(context.my_allocator, ' ') catch {}; + // } + // argument_list.append(context.my_allocator, '\n') catch {}; + // std.fs.cwd().writeFile(std.fmt.allocPrint(std.heap.page_allocator, "{s}/dev/nativity/nat/invocation_log_{}", .{ home_dir, timestamp }) catch unreachable, argument_list.slice()) catch {}; + // } if (arguments.len <= 1) { return error.InvalidInput; From 0a50781ba408f539d2c6999fb425f55ba8af303a Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Thu, 25 Apr 2024 00:48:48 -0600 Subject: [PATCH 07/14] Heavily reduce dependency on UnpinnedArray --- bootstrap/Compilation.zig | 1049 ++++++++++++++++++++---------------- bootstrap/backend/llvm.zig | 20 +- bootstrap/library.zig | 47 +- 3 files changed, 635 insertions(+), 481 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index 6b7faec..0d6e8ae 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -6,9 +6,11 @@ const library = @import("library.zig"); const assert = library.assert; const align_forward = library.align_forward; const Arena = library.Arena; +const BoundedArray = library.BoundedArray; const byte_equal = library.byte_equal; const enumFromString = library.enumFromString; const byte_equal_terminated = library.byte_equal_terminated; +const DynamicBoundedArray = library.DynamicBoundedArray; const last_byte = library.last_byte; const first_byte = library.first_byte; const first_slice = library.first_slice; @@ -127,6 +129,7 @@ pub fn compileBuildExecutable(context: *const Context, arguments: []const []cons }, .node_buffer = try PinnedArray(Node).init_with_default_granularity(), .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), + .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), }; try unit.compile(context); @@ -225,10 +228,10 @@ fn compileMusl(context: *const Context) MuslContext { if (!exists) { var buffer: [65]u8 = undefined; - var ar_args = try UnpinnedArray([]const u8).initialize_with_capacity(context.my_allocator, @intCast(generic_musl_source_files.len + musl_x86_64_source_files.len + 3)); - ar_args.append_with_capacity("ar"); - ar_args.append_with_capacity("rcs"); - ar_args.append_with_capacity(try std.mem.concat(context.allocator, u8, &.{ musl.global_cache_dir, "libc.a" })); + var ar_args = BoundedArray([]const u8, 4096){}; + ar_args.appendAssumeCapacity("ar"); + ar_args.appendAssumeCapacity("rcs"); + ar_args.appendAssumeCapacity(try std.mem.concat(context.allocator, u8, &.{ musl.global_cache_dir, "libc.a" })); for (generic_musl_source_files) |src_file_relative_path| { const basename = std.fs.path.basename(src_file_relative_path); @@ -239,7 +242,7 @@ fn compileMusl(context: *const Context) MuslContext { const target_path = try std.mem.concat(context.allocator, u8, &.{ musl.global_cache_dir, hash_string, target }); try musl.compileFileWithClang(context, src_file_relative_path, target_path); - ar_args.append_with_capacity(target_path); + ar_args.appendAssumeCapacity(target_path); } for (musl_x86_64_source_files) |src_file_relative_path| { @@ -251,7 +254,7 @@ fn compileMusl(context: *const Context) MuslContext { const target_path = try std.mem.concat(context.allocator, u8, &.{ musl.global_cache_dir, hash_string, target }); try musl.compileFileWithClang(context, src_file_relative_path, target_path); - ar_args.append_with_capacity(target_path); + ar_args.appendAssumeCapacity(target_path); } if (try arMain(context.allocator, ar_args.slice()) != 0) { @@ -334,11 +337,11 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 var stack_protector: ?bool = null; var link_arch: ?LinkArch = null; - var cc_argv = UnpinnedArray([]const u8){}; - var ld_argv = UnpinnedArray([]const u8){}; - var c_source_files = UnpinnedArray(CSourceFile){}; - var link_objects = UnpinnedArray(linker.Object){}; - var link_libraries = UnpinnedArray(linker.Library){}; + var cc_argv = BoundedArray([]const u8, 4096){}; + var ld_argv = BoundedArray([]const u8, 4096){}; + var c_source_files = BoundedArray(CSourceFile, 4096){}; + var link_objects = BoundedArray(linker.Object, 4096){}; + var link_libraries = BoundedArray(linker.Library, 4096){}; while (argument_index < arguments.len) { const argument = arguments[argument_index]; @@ -357,13 +360,13 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 }; switch (extension) { .c, .cpp, .assembly => { - try c_source_files.append(context.my_allocator, .{ + c_source_files.appendAssumeCapacity(.{ .path = argument, .extension = extension, }); }, .object, .static_library, .shared_library => { - try link_objects.append(context.my_allocator, .{ + link_objects.appendAssumeCapacity(.{ .path = argument, }); }, @@ -387,137 +390,137 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 const arch_argument = arguments[argument_index]; if (byte_equal(arch_argument, "arm64")) { link_arch = .arm64; - try cc_argv.append(context.my_allocator, "-arch"); - try cc_argv.append(context.my_allocator, "arm64"); + cc_argv.appendAssumeCapacity("-arch"); + cc_argv.appendAssumeCapacity("arm64"); } else { unreachable; } } else if (byte_equal(argument, "-bundle")) { - try ld_argv.append(context.my_allocator, argument); + ld_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-pthread")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-fPIC")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-MD")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-MT")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); argument_index += 1; const arg = arguments[argument_index]; - try cc_argv.append(context.my_allocator, arg); + cc_argv.appendAssumeCapacity(arg); } else if (byte_equal(argument, "-MF")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); argument_index += 1; const arg = arguments[argument_index]; - try cc_argv.append(context.my_allocator, arg); + cc_argv.appendAssumeCapacity(arg); } else if (byte_equal(argument, "-isysroot")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); argument_index += 1; const arg = arguments[argument_index]; - try cc_argv.append(context.my_allocator, arg); + cc_argv.appendAssumeCapacity(arg); } else if (byte_equal(argument, "-isystem")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); argument_index += 1; const arg = arguments[argument_index]; - try cc_argv.append(context.my_allocator, arg); + cc_argv.appendAssumeCapacity(arg); } else if (byte_equal(argument, "-h")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-framework")) { - try ld_argv.append(context.my_allocator, argument); + ld_argv.appendAssumeCapacity(argument); argument_index += 1; const framework = arguments[argument_index]; - try ld_argv.append(context.my_allocator, framework); + ld_argv.appendAssumeCapacity(framework); } else if (byte_equal(argument, "--coverage")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-pedantic")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-pedantic-errors")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-?")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-v")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-V")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "--version")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-version")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-qversion")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-print-resource-dir")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-shared")) { - try ld_argv.append(context.my_allocator, argument); + ld_argv.appendAssumeCapacity(argument); } else if (byte_equal(argument, "-compatibility_version")) { - try ld_argv.append(context.my_allocator, argument); + ld_argv.appendAssumeCapacity(argument); argument_index += 1; const arg = arguments[argument_index]; - try ld_argv.append(context.my_allocator, arg); + ld_argv.appendAssumeCapacity(arg); } else if (byte_equal(argument, "-current_version")) { - try ld_argv.append(context.my_allocator, argument); + ld_argv.appendAssumeCapacity(argument); argument_index += 1; const arg = arguments[argument_index]; - try ld_argv.append(context.my_allocator, arg); + ld_argv.appendAssumeCapacity(arg); } else if (byte_equal(argument, "-install_name")) { - try ld_argv.append(context.my_allocator, argument); + ld_argv.appendAssumeCapacity(argument); argument_index += 1; const arg = arguments[argument_index]; - try ld_argv.append(context.my_allocator, arg); + ld_argv.appendAssumeCapacity(arg); } else if (starts_with_slice(argument, "-f")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (starts_with_slice(argument, "-wd")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (starts_with_slice(argument, "-D")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (starts_with_slice(argument, "-I")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (starts_with_slice(argument, "-W")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (starts_with_slice(argument, "-l")) { - try link_libraries.append(context.my_allocator, .{ + link_libraries.appendAssumeCapacity(.{ .path = argument[2..], }); } else if (starts_with_slice(argument, "-O")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (starts_with_slice(argument, "-std=")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else if (starts_with_slice(argument, "-rdynamic")) { - try ld_argv.append(context.my_allocator, "-export_dynamic"); + ld_argv.appendAssumeCapacity("-export_dynamic"); } else if (starts_with_slice(argument, "-dynamiclib")) { - try ld_argv.append(context.my_allocator, "-dylib"); + ld_argv.appendAssumeCapacity("-dylib"); } else if (starts_with_slice(argument, "-Wl,")) { const wl_arg = argument["-Wl,".len..]; if (first_byte(wl_arg, ',')) |comma_index| { const key = wl_arg[0..comma_index]; const value = wl_arg[comma_index + 1 ..]; - try ld_argv.append(context.my_allocator, key); - try ld_argv.append(context.my_allocator, value); + ld_argv.appendAssumeCapacity(key); + ld_argv.appendAssumeCapacity(value); } else { - try ld_argv.append(context.my_allocator, wl_arg); + ld_argv.appendAssumeCapacity(wl_arg); } } else if (starts_with_slice(argument, "-m")) { - try cc_argv.append(context.my_allocator, argument); + cc_argv.appendAssumeCapacity(argument); } else { - const debug_args = true; - if (debug_args) { - const home_dir = switch (@import("builtin").os.tag) { - .linux, .macos => std.posix.getenv("HOME") orelse unreachable, - .windows => try std.process.getEnvVarOwned(context.allocator, "USERPROFILE"), - else => @compileError("OS not supported"), - }; - var list = UnpinnedArray(u8){}; - for (arguments) |arg| { - try list.append_slice(context.my_allocator, arg); - try list.append(context.my_allocator, ' '); - } - try list.append(context.my_allocator, '\n'); - try list.append_slice(context.my_allocator, "Unhandled argument: "); - try list.append_slice(context.my_allocator, argument); - try list.append(context.my_allocator, '\n'); - - try std.fs.cwd().writeFile(try std.fmt.allocPrint(context.allocator, "{s}/dev/nativity/nat/unhandled_arg_{}", .{ home_dir, std.time.milliTimestamp() }), list.slice()); - } + // const debug_args = true; + // if (debug_args) { + // const home_dir = switch (@import("builtin").os.tag) { + // .linux, .macos => std.posix.getenv("HOME") orelse unreachable, + // .windows => try std.process.getEnvVarOwned(context.allocator, "USERPROFILE"), + // else => @compileError("OS not supported"), + // }; + // var list = PinnedArray(u8){}; + // for (arguments) |arg| { + // list.append_slice(context.my_allocator, arg); + // list.append(context.my_allocator, ' '); + // } + // list.append(context.my_allocator, '\n'); + // list.append_slice(context.my_allocator, "Unhandled argument: "); + // list.append_slice(context.my_allocator, argument); + // list.append(context.my_allocator, '\n'); + // + // std.fs.cwd().writeFile(try std.fmt.allocPrint(context.allocator, "{s}/dev/nativity/nat/unhandled_arg_{}", .{ home_dir, std.time.milliTimestamp() }), list.slice()); + // } try write(.panic, "unhandled argument: '"); try write(.panic, argument); try write(.panic, "'\n"); @@ -530,37 +533,37 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 const link_libcpp = true; const mode = out_mode orelse .link; - if (c_source_files.length > 0) { + var argv = BoundedArray([]const u8, 4096){}; + if (c_source_files.len > 0) { for (c_source_files.slice()) |c_source_file| { - var argv = UnpinnedArray([]const u8){}; - try argv.append(context.my_allocator, context.executable_absolute_path); - try argv.append(context.my_allocator, "clang"); - try argv.append(context.my_allocator, "--no-default-config"); + argv.appendAssumeCapacity(context.executable_absolute_path); + argv.appendAssumeCapacity("clang"); + argv.appendAssumeCapacity("--no-default-config"); - try argv.append(context.my_allocator, c_source_file.path); + argv.appendAssumeCapacity(c_source_file.path); if (c_source_file.extension == .cpp) { - try argv.append(context.my_allocator, "-nostdinc++"); + argv.appendAssumeCapacity("-nostdinc++"); } const caret = true; if (!caret) { - try argv.append(context.my_allocator, "-fno-caret-diagnostics"); + argv.appendAssumeCapacity("-fno-caret-diagnostics"); } const function_sections = false; if (function_sections) { - try argv.append(context.my_allocator, "-ffunction-sections"); + argv.appendAssumeCapacity("-ffunction-sections"); } const data_sections = false; if (data_sections) { - try argv.append(context.my_allocator, "-fdata-sections"); + argv.appendAssumeCapacity("-fdata-sections"); } const builtin = true; if (!builtin) { - try argv.append(context.my_allocator, "-fno-builtin"); + argv.appendAssumeCapacity("-fno-builtin"); } if (link_libcpp) { @@ -576,77 +579,77 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 unreachable; } + var target_triple_buffer = BoundedArray(u8, 512){}; const target_triple = blk: { // Emit target - var target_triple_buffer = UnpinnedArray(u8){}; switch (@import("builtin").target.cpu.arch) { .x86_64 => { - try target_triple_buffer.append_slice(context.my_allocator, "x86_64-"); + target_triple_buffer.appendSliceAssumeCapacity("x86_64-"); }, .aarch64 => { - try target_triple_buffer.append_slice(context.my_allocator, "aarch64-"); + target_triple_buffer.appendSliceAssumeCapacityppend_slice("aarch64-"); }, else => @compileError("Architecture not supported"), } if (@import("builtin").target.cpu.arch == .aarch64 and @import("builtin").target.os.tag == .macos) { - try target_triple_buffer.append_slice(context.my_allocator, "apple-"); + target_triple_buffer.appendSliceAssumeCapacity("apple-"); } else { - try target_triple_buffer.append_slice(context.my_allocator, "pc-"); + target_triple_buffer.appendSliceAssumeCapacity("pc-"); } switch (@import("builtin").target.os.tag) { .linux => { - try target_triple_buffer.append_slice(context.my_allocator, "linux-"); + target_triple_buffer.appendSliceAssumeCapacity("linux-"); }, .macos => { - try target_triple_buffer.append_slice(context.my_allocator, "macos-"); + target_triple_buffer.appendSliceAssumeCapacity("macos-"); }, .windows => { - try target_triple_buffer.append_slice(context.my_allocator, "windows-"); + target_triple_buffer.appendSliceAssumeCapacity("windows-"); }, else => @compileError("OS not supported"), } switch (@import("builtin").target.abi) { .musl => { - try target_triple_buffer.append_slice(context.my_allocator, "musl"); + target_triple_buffer.appendSliceAssumeCapacity("musl"); }, .gnu => { - try target_triple_buffer.append_slice(context.my_allocator, "gnu"); + target_triple_buffer.appendSliceAssumeCapacity("gnu"); }, .none => { - try target_triple_buffer.append_slice(context.my_allocator, "unknown"); + target_triple_buffer.appendSliceAssumeCapacity("unknown"); }, else => @compileError("OS not supported"), } break :blk target_triple_buffer.slice(); }; - try argv.append_slice(context.my_allocator, &.{ "-target", target_triple }); + argv.appendSliceAssumeCapacity(&.{ "-target", target_triple }); const object_path = switch (mode) { .object => out_path.?, .link => try std.mem.concat(context.allocator, u8, &.{ if (out_path) |op| op else "a.o", ".o" }), }; - try link_objects.append(context.my_allocator, .{ + link_objects.appendAssumeCapacity(.{ .path = object_path, }); switch (c_source_file.extension) { .c, .cpp => { - try argv.append(context.my_allocator, "-nostdinc"); - try argv.append(context.my_allocator, "-fno-spell-checking"); + argv.appendAssumeCapacity("-nostdinc"); + argv.appendAssumeCapacity("-fno-spell-checking"); const lto = false; if (lto) { - try argv.append(context.my_allocator, "-flto"); + argv.appendAssumeCapacity("-flto"); } const mm = false; if (mm) { - try argv.append(context.my_allocator, "-ObjC++"); + argv.appendAssumeCapacity("-ObjC++"); } const libc_framework_dirs: []const []const u8 = switch (@import("builtin").os.tag) { @@ -654,12 +657,12 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 else => &.{}, }; for (libc_framework_dirs) |framework_dir| { - try argv.append_slice(context.my_allocator, &.{ "-iframework", framework_dir }); + argv.appendSliceAssumeCapacity(&.{ "-iframework", framework_dir }); } const framework_dirs = &[_][]const u8{}; for (framework_dirs) |framework_dir| { - try argv.append_slice(context.my_allocator, &.{ "-F", framework_dir }); + argv.appendSliceAssumeCapacity(&.{ "-F", framework_dir }); } // TODO: c headers dir @@ -691,7 +694,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 }; for (libc_include_dirs) |include_dir| { - try argv.append_slice(context.my_allocator, &.{ "-isystem", include_dir }); + argv.appendSliceAssumeCapacity(&.{ "-isystem", include_dir }); } // TODO: cpu model @@ -701,42 +704,42 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 // TODO: sanitize flags // const red_zone = true; // if (red_zone) { - // try argv.append(context.my_allocator, "-mred-zone"); + // argv.appendAssumeCapacity("-mred-zone"); // } else { // unreachable; // } const omit_frame_pointer = false; if (omit_frame_pointer) { - try argv.append(context.my_allocator, "-fomit-frame-pointer"); + argv.appendAssumeCapacity("-fomit-frame-pointer"); } else { - try argv.append(context.my_allocator, "-fno-omit-frame-pointer"); + argv.appendAssumeCapacity("-fno-omit-frame-pointer"); } if (stack_protector orelse false) { - try argv.append(context.my_allocator, "-fstack-protector-strong"); + argv.appendAssumeCapacity("-fstack-protector-strong"); } else { - try argv.append(context.my_allocator, "-fno-stack-protector"); + argv.appendAssumeCapacity("-fno-stack-protector"); } const is_debug = true; if (is_debug) { - try argv.append(context.my_allocator, "-D_DEBUG"); - try argv.append(context.my_allocator, "-O0"); + argv.appendAssumeCapacity("-D_DEBUG"); + argv.appendAssumeCapacity("-O0"); } else { unreachable; } const pic = false; if (pic) { - try argv.append(context.my_allocator, "-fPIC"); + argv.appendAssumeCapacity("-fPIC"); } const unwind_tables = false; if (unwind_tables) { - try argv.append(context.my_allocator, "-funwind-tables"); + argv.appendAssumeCapacity("-funwind-tables"); } else { - try argv.append(context.my_allocator, "-fno-unwind-tables"); + argv.appendAssumeCapacity("-fno-unwind-tables"); } }, .assembly => { @@ -747,7 +750,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 const has_debug_info = true; if (has_debug_info) { - try argv.append(context.my_allocator, "-g"); + argv.appendAssumeCapacity("-g"); } else { unreachable; } @@ -755,17 +758,17 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 // TODO: machine ABI const freestanding = false; if (freestanding) { - try argv.append(context.my_allocator, "-ffrestanding"); + argv.appendAssumeCapacity("-ffrestanding"); } // TODO: native system include paths // TODO: global cc argv - try argv.append_slice(context.my_allocator, cc_argv.slice()); + argv.appendSliceAssumeCapacity(cc_argv.slice()); // TODO: extra flags // TODO: cache exempt flags - try argv.append_slice(context.my_allocator, &.{ "-c", "-o", object_path }); + argv.appendSliceAssumeCapacity(&.{ "-c", "-o", object_path }); // TODO: emit ASM/LLVM IR const debug_clang_args = false; @@ -777,12 +780,11 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 unreachable; } } - } else if (link_objects.length == 0) { - var argv = UnpinnedArray([]const u8){}; - try argv.append(context.my_allocator, context.executable_absolute_path); - try argv.append(context.my_allocator, "clang"); - try argv.append(context.my_allocator, "--no-default-config"); - try argv.append_slice(context.my_allocator, cc_argv.slice()); + } else if (link_objects.len == 0) { + argv.appendAssumeCapacity(context.executable_absolute_path); + argv.appendAssumeCapacity("clang"); + argv.appendAssumeCapacity("--no-default-config"); + argv.appendSliceAssumeCapacity(cc_argv.slice()); const result = try clangMain(context.allocator, argv.slice()); if (result != 0) { unreachable; @@ -791,7 +793,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 } if (mode == .link) { - assert(link_objects.length > 0); + assert(link_objects.len > 0); try linker.link(context, .{ .backend = .lld, .output_file_path = out_path orelse "a.out", @@ -804,19 +806,19 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 } // if (kind == .cpp) { - // try clang_args.append(context.my_allocator, "-nostdinc++"); + // try clang_args.appendAssumeCapacity("-nostdinc++"); // // switch (@import("builtin").os.tag) { // .linux => { // switch (@import("builtin").abi) { // .gnu => { - // try clang_args.append_slice(context.my_allocator, &.{ + // try clang_args.appendSliceAssumeCapacity(&.{ // "-isystem", "/usr/include/c++/13.2.1", // "-isystem", "/usr/include/c++/13.2.1/x86_64-pc-linux-gnu", // }); // }, // .musl => { - // try clang_args.append_slice(context.my_allocator, &.{ + // try clang_args.appendSliceAssumeCapacity(&.{ // "-isystem", try context.pathFromCompiler("lib/libcxx/include"), // "-isystem", try context.pathFromCompiler("lib/libcxxabi/include"), // "-D_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS", @@ -831,7 +833,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 // } // }, // .macos => { - // try clang_args.append_slice(context.my_allocator, &.{ + // try clang_args.appendSliceAssumeCapacity(&.{ // "-isystem", try context.pathFromCompiler("lib/libcxx/include"), // "-isystem", try context.pathFromCompiler("lib/libcxxabi/include"), // "-D_LIBCPP_DISABLE_VISIBILITY_ANNOTATIONS", @@ -847,20 +849,20 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 // } // // if (kind == .c or kind == .cpp) { - // try clang_args.append(context.my_allocator, "-nostdinc"); + // try clang_args.appendAssumeCapacity("-nostdinc"); // // switch (@import("builtin").os.tag) { // .linux => { // switch (@import("builtin").abi) { // .gnu => { - // try clang_args.append_slice(context.my_allocator, &.{ + // try clang_args.appendSliceAssumeCapacity(&.{ // "-isystem", "/usr/lib/clang/17/include", // "-isystem", "/usr/include", // "-isystem", "/usr/include/linux", // }); // }, // .musl => { - // try clang_args.append_slice(context.my_allocator, &.{ + // try clang_args.appendSliceAssumeCapacity(&.{ // "-isystem", try context.pathFromCompiler("lib/include"), // "-isystem", try context.pathFromCompiler("lib/libc/include/x86_64-linux-gnu"), // "-isystem", try context.pathFromCompiler("lib/libc/include/generic-glibc"), @@ -872,7 +874,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 // } // }, // .macos => { - // try clang_args.append_slice(context.my_allocator, &.{ + // try clang_args.appendSliceAssumeCapacity(&.{ // "-iframework", "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/System/Library/Frameworks", // "-isystem", try context.pathFromCompiler("lib/include"), // "-isystem", "/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/include", @@ -887,7 +889,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 // .linux => { // switch (@import("builtin").abi) { // .gnu => { - // try clang_args.append(context.my_allocator, "-lstdc++"); + // try clang_args.appendAssumeCapacity("-lstdc++"); // }, // .musl => { // unreachable; @@ -901,7 +903,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 // } // // for (arguments) |arg| { - // try clang_args.append(context.my_allocator, span(arg)); + // try clang_args.appendAssumeCapacity(span(arg)); // } // // const result = try clangMain(context.allocator, clang_args.slice()); @@ -917,25 +919,25 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 // const link = false; // if (link) { - // var lld_args = UnpinnedArray([*:0]const u8){}; - // try lld_args.append(context.my_allocator, "ld.lld"); - // try lld_args.append(context.my_allocator, "--error-limit=0"); - // try lld_args.append(context.my_allocator, "--entry"); - // try lld_args.append(context.my_allocator, "_start"); - // try lld_args.append(context.my_allocator, "-z"); - // try lld_args.append(context.my_allocator, "stack-size=16777216"); - // try lld_args.append(context.my_allocator, "--image-base=16777216"); - // try lld_args.append(context.my_allocator, "-m"); - // try lld_args.append(context.my_allocator, "elf_x86_64"); - // try lld_args.append(context.my_allocator, "-static"); - // try lld_args.append(context.my_allocator, "-o"); - // try lld_args.append(context.my_allocator, "nat/main"); - // try lld_args.append(context.my_allocator, try std.mem.joinZ(context.allocator, "", &.{ musl.global_cache_dir, "crt1.o" })); - // try lld_args.append(context.my_allocator, try std.mem.joinZ(context.allocator, "", &.{ musl.global_cache_dir, "crti.o" })); - // try lld_args.append(context.my_allocator, output_object_file); - // try lld_args.append(context.my_allocator, "--as-needed"); - // try lld_args.append(context.my_allocator, try std.mem.joinZ(context.allocator, "", &.{ musl.global_cache_dir, "libc.a" })); - // try lld_args.append(context.my_allocator, try std.mem.joinZ(context.allocator, "", &.{ musl.global_cache_dir, "crtn.o" })); + // var lld_args = PinnedArray([*:0]const u8){}; + // try lld_args.appendAssumeCapacity("ld.lld"); + // try lld_args.appendAssumeCapacity("--error-limit=0"); + // try lld_args.appendAssumeCapacity("--entry"); + // try lld_args.appendAssumeCapacity("_start"); + // try lld_args.appendAssumeCapacity("-z"); + // try lld_args.appendAssumeCapacity("stack-size=16777216"); + // try lld_args.appendAssumeCapacity("--image-base=16777216"); + // try lld_args.appendAssumeCapacity("-m"); + // try lld_args.appendAssumeCapacity("elf_x86_64"); + // try lld_args.appendAssumeCapacity("-static"); + // try lld_args.appendAssumeCapacity("-o"); + // try lld_args.appendAssumeCapacity("nat/main"); + // try lld_args.appendAssumeCapacity(try std.mem.joinZ(context.allocator, "", &.{ musl.global_cache_dir, "crt1.o" })); + // try lld_args.appendAssumeCapacity(try std.mem.joinZ(context.allocator, "", &.{ musl.global_cache_dir, "crti.o" })); + // try lld_args.appendAssumeCapacity(output_object_file); + // try lld_args.appendAssumeCapacity("--as-needed"); + // try lld_args.appendAssumeCapacity(try std.mem.joinZ(context.allocator, "", &.{ musl.global_cache_dir, "libc.a" })); + // try lld_args.appendAssumeCapacity(try std.mem.joinZ(context.allocator, "", &.{ musl.global_cache_dir, "crtn.o" })); // // var stdout_ptr: [*]const u8 = undefined; // var stdout_len: usize = 0; @@ -2827,7 +2829,7 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o var maybe_only_parse: ?bool = null; var link_libc = false; var maybe_executable_name: ?[]const u8 = null; - var c_source_files = UnpinnedArray([]const u8){}; + var c_source_files = BoundedArray([]const u8, 4096){}; var optimization = Optimization.none; var generate_debug_information = true; @@ -2939,10 +2941,8 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o if (i + 1 != arguments.len) { i += 1; - try c_source_files.ensure_capacity(context.my_allocator, @intCast(arguments.len - i)); - while (i < arguments.len) : (i += 1) { - c_source_files.append_with_capacity(arguments[i]); - } + c_source_files.appendSliceAssumeCapacity( arguments[i..]); + i = arguments.len; } else { reportUnterminatedArgumentError(current_argument); } @@ -3025,6 +3025,7 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o }, .node_buffer = try PinnedArray(Node).init_with_default_granularity(), .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), + .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), }; try unit.compile(context); @@ -3106,7 +3107,7 @@ pub fn panic(message: []const u8, stack_trace: ?*std.builtin.StackTrace, return_ } const TypeCheckSwitchEnums = struct { - switch_case_groups: UnpinnedArray(UnpinnedArray(Enum.Field.Index)), + switch_case_groups: []const []const Enum.Field.Index, else_switch_case_group_index: ?usize = null, }; @@ -3126,7 +3127,7 @@ fn getTypeBitSize(ty: *Type, unit: *Unit) u32 { .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => |*struct_type| { var bit_size: u32 = 0; - for (struct_type.fields.slice()) |field_index| { + for (struct_type.fields) |field_index| { const field = unit.struct_fields.get(field_index); const field_type = unit.types.get(field.type); const field_bit_size = field_type.getBitSize(unit); @@ -3158,7 +3159,7 @@ fn getTypeAbiSize(ty: *Type, unit: *Unit) u32 { .@"struct" => |*struct_type| b: { const struct_alignment = ty.getAbiAlignment(unit); var total_byte_size: u32 = 0; - for (struct_type.fields.slice()) |field_index| { + for (struct_type.fields) |field_index| { const field = unit.struct_fields.get(field_index); const field_type = unit.types.get(field.type); const field_size = getTypeAbiSize(field_type, unit); @@ -3203,7 +3204,7 @@ fn getTypeAbiAlignment(ty: *Type, unit: *Unit) u32 { .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => |*struct_type| b: { var alignment: u32 = 1; - for (struct_type.fields.slice()) |field_index| { + for (struct_type.fields) |field_index| { const field = unit.struct_fields.get(field_index); const field_ty = unit.types.get(field.type); const field_alignment = field_ty.getAbiAlignment(unit); @@ -3264,7 +3265,7 @@ fn getTypeHomogeneousAggregate(ty: *Type, unit: *Unit) ?HomogeneousAggregate { return switch (ty.*) { .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => |*struct_type| { - for (struct_type.fields.slice()) |field_index| { + for (struct_type.fields) |field_index| { const field = unit.struct_fields.get(field_index); const field_type = unit.types.get(field.type); while (field_type.* == .array) { @@ -3297,11 +3298,11 @@ const _usize: Type.Index = .u64; const _ssize: Type.Index = .s64; fn serialize_comptime_parameters(unit: *Unit, context: *const Context, original_declaration: *Debug.Declaration, parameters: []const V.Comptime) !u32 { - var name = UnpinnedArray(u8){}; + var name = BoundedArray(u8, 4096){}; const original_name = unit.getIdentifier(original_declaration.name); - try name.append_slice(context.my_allocator, original_name); + name.appendSliceAssumeCapacity(original_name); assert(parameters.len > 0); - try name.append(context.my_allocator, '('); + name.appendAssumeCapacity('('); for (parameters) |parameter| { switch (parameter) { @@ -3315,10 +3316,10 @@ fn serialize_comptime_parameters(unit: *Unit, context: *const Context, original_ .signed => 's', .unsigned => 'u', }; - try name.append(context.my_allocator, char); + name.appendAssumeCapacity(char); var bit_buffer: [32]u8 = undefined; const formatted_int = format_int(&bit_buffer, integer.bit_count, 10, false); - try name.append_slice(context.my_allocator, formatted_int); + name.appendSliceAssumeCapacity(formatted_int); }, else => |t| @panic(@tagName(t)), }, @@ -3327,16 +3328,21 @@ fn serialize_comptime_parameters(unit: *Unit, context: *const Context, original_ else => |t| @panic(@tagName(t)), } - try name.append(context.my_allocator, ','); - try name.append(context.my_allocator, ' '); + name.appendAssumeCapacity(','); + name.appendAssumeCapacity(' '); } - name.length -= 2; - name.pointer[name.length] = ')'; - name.length += 1; + name.len -= 2; + name.buffer[name.len] = ')'; + name.len += 1; - const name_hash = try unit.processIdentifier(context, name.slice()); - return name_hash; + const hash = my_hash(name.slice()); + // Don't allocate memory if not necessary + if (unit.identifiers.get(hash) == null) { + try unit.identifiers.put_no_clobber(context.my_allocator, hash, name.slice()); + } + + return hash; } pub const Type = union(enum) { @@ -3455,7 +3461,7 @@ pub const Type = union(enum) { }; const Error = struct { - fields: UnpinnedArray(Type.Error.Field.Index) = .{}, + fields: DynamicBoundedArray(Type.Error.Field.Index), scope: Debug.Scope.Global, id: u32, @@ -3471,12 +3477,6 @@ pub const Type = union(enum) { pub usingnamespace @This().List.Index; }; - const Set = struct { - values: UnpinnedArray(Type.Index) = .{}, // Empty means all errors - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; - }; - const Union = struct { @"error": Type.Index, type: Type.Index, @@ -3504,7 +3504,6 @@ pub const Type = union(enum) { materialized_int, disguised_pointer, @"error": Type.Error, - error_set: Type.Error.Set, bitfield: Bitfield, }; @@ -3515,7 +3514,7 @@ pub const Type = union(enum) { }; pub const Bitfield = struct { - fields: UnpinnedArray(Struct.Field.Index) = .{}, + fields: []const Struct.Field.Index = &.{}, scope: Debug.Scope.Global, }; @@ -3720,7 +3719,7 @@ pub const Instruction = union(enum) { const Switch = struct { condition: V, - cases: UnpinnedArray(Case) = .{}, + cases: []const Case = &.{}, else_block: BasicBlock.Index = .null, block_type: Type.Index, @@ -3731,15 +3730,23 @@ pub const Instruction = union(enum) { }; const Phi = struct { - values: UnpinnedArray(V) = .{}, - basic_blocks: UnpinnedArray(BasicBlock.Index) = .{}, + values: *BoundedArray(Phi.Value, max_value_count), type: Type.Index, - pub fn addIncoming(phi: *Phi, context: *const Context, value: V, basic_block: BasicBlock.Index) !void { + pub const max_value_count = 32; + + const Value = struct{ + value: V, + basic_block: BasicBlock.Index, + }; + + pub fn addIncoming(phi: *Phi, value: V, basic_block: BasicBlock.Index) void { assert(phi.type == value.type); assert(basic_block != .null); - try phi.values.append(context.my_allocator, value); - try phi.basic_blocks.append(context.my_allocator, basic_block); + phi.values.appendAssumeCapacity(.{ + .value = value, + .basic_block = basic_block, + }); } }; @@ -3923,10 +3930,21 @@ pub const Instruction = union(enum) { pub const BasicBlock = struct { instructions: UnpinnedArray(Instruction.Index) = .{}, - predecessors: UnpinnedArray(BasicBlock.Index) = .{}, + predecessors: PinnedArray(BasicBlock.Index) = .{ + .pointer = undefined, + .length = 0, + .granularity = 0, + }, // TODO: not use a bool terminated: bool = false, + fn add_predecessor(basic_block: *BasicBlock, predecessor: BasicBlock.Index) !void { + if (basic_block.predecessors.length == 0) { + basic_block.predecessors = try PinnedArray(BasicBlock.Index).init(std.mem.page_size); + } + _ = basic_block.predecessors.append(predecessor); + } + pub const List = BlockList(@This(), enum {}); pub usingnamespace @This().List.Index; }; @@ -4040,7 +4058,7 @@ pub const Struct = struct { pub const Descriptor = struct { scope: Debug.Scope.Global, - fields: UnpinnedArray(Struct.Field.Index) = .{}, + fields: []const Struct.Field.Index = &.{}, options: Options, }; @@ -4435,7 +4453,7 @@ pub const Builder = struct { current_file: Debug.File.Index = .null, current_function: Function.Definition.Index = .null, current_basic_block: BasicBlock.Index = .null, - exit_blocks: UnpinnedArray(BasicBlock.Index) = .{}, + exit_blocks: BoundedArray(BasicBlock.Index, 16) = .{}, loop_exit_block: BasicBlock.Index = .null, loop_header_block: BasicBlock.Index = .null, return_phi: Instruction.Index = .null, @@ -4621,7 +4639,7 @@ pub const Builder = struct { try unit.string_literal_globals.put_no_clobber(context.my_allocator, hash, string_global); - try unit.data_to_emit.append(context.my_allocator, string_global); + _ = unit.data_to_emit.append(string_global); return string_global; } @@ -4651,7 +4669,8 @@ pub const Builder = struct { assert(argument_node_list.len == 1); const assembly_block_node = unit.getNode(argument_node_list[0]); const instruction_node_list = unit.getNodeList(assembly_block_node.left); - var instructions = try UnpinnedArray(InlineAssembly.Instruction.Index).initialize_with_capacity(context.my_allocator, @intCast(instruction_node_list.len)); + var instructions = try context.arena.new_array(InlineAssembly.Instruction.Index, instruction_node_list.len); + instructions.len = 0; for (instruction_node_list) |assembly_statement_node_index| { const assembly_instruction_node = unit.getNode(assembly_statement_node_index); @@ -4664,7 +4683,8 @@ pub const Builder = struct { } else unreachable; const operand_nodes = unit.getNodeList(assembly_instruction_node.right); - var operands = try UnpinnedArray(InlineAssembly.Operand).initialize_with_capacity(context.my_allocator, @intCast(operand_nodes.len)); + var operands = try context.arena.new_array(InlineAssembly.Operand, operand_nodes.len); + operands.len = 0; for (operand_nodes) |operand_node_index| { const operand_node = unit.getNode(operand_node_index); @@ -4693,19 +4713,23 @@ pub const Builder = struct { else => |t| @panic(@tagName(t)), }; - operands.append_with_capacity(operand); + const index = operands.len; + operands.len += 1; + operands[index] = operand; } const instruction_index = try unit.assembly_instructions.append(context.my_allocator, .{ .id = @intFromEnum(instruction), - .operands = operands.slice(), + .operands = operands, }); - instructions.append_with_capacity(instruction_index); + const index = instructions.len; + instructions.len += 1; + instructions[index] = instruction_index; } const inline_assembly = try unit.inline_assembly.append(context.my_allocator, .{ - .instructions = instructions.slice(), + .instructions = instructions, }); const inline_asm = try unit.instructions.append(context.my_allocator, .{ @@ -4875,20 +4899,20 @@ pub const Builder = struct { }, .syscall => { if (argument_node_list.len > 0 and argument_node_list.len <= 6 + 1) { - var instruction_list = try UnpinnedArray(V).initialize_with_capacity(context.my_allocator, @intCast(argument_node_list.len)); + var instruction_list = try context.arena.new_array(V, argument_node_list.len); // TODO const arg_type_expect = Type.Expect{ .type = Type.usize, }; - for (argument_node_list) |argument_node_index| { + for (argument_node_list, 0..) |argument_node_index, i| { const argument_value = try builder.resolveRuntimeValue(unit, context, arg_type_expect, argument_node_index, .right); - instruction_list.append_with_capacity(argument_value); + instruction_list[i] = argument_value; } const syscall = try unit.instructions.append(context.my_allocator, .{ .syscall = .{ - .arguments = instruction_list.slice(), + .arguments = instruction_list, }, }); @@ -5013,8 +5037,8 @@ pub const Builder = struct { .integer => |*integer| switch (integer.kind) { .@"enum" => { const name_function = try builder.get_name_function(unit, context, v.type); - var args = try UnpinnedArray(V).initialize_with_capacity(context.my_allocator, 1); - args.append_with_capacity(v); + var args = try context.arena.new_array(V, 1); + args[0] = v; const call = try unit.instructions.append(context.my_allocator, .{ .call = .{ .callable = .{ @@ -5026,10 +5050,11 @@ pub const Builder = struct { .type = name_function.declaration.type, }, .function_type = name_function.declaration.type, - .arguments = args.slice(), + .arguments = args, }, }); try builder.appendInstruction(unit, context, call); + return V{ .value = .{ .runtime = call, @@ -5115,8 +5140,8 @@ pub const Builder = struct { fn get_name_function(builder: *Builder, unit: *Unit, context: *const Context, type_index: Type.Index) !*Debug.Declaration.Global { if (unit.name_functions.get(type_index)) |result| return result else { - var argument_types = try UnpinnedArray(Type.Index).initialize_with_capacity(context.my_allocator, 1); - argument_types.append_with_capacity(type_index); + var argument_types = try context.arena.new_array(Type.Index, 1); + argument_types[0] = type_index; const return_type_index = try unit.getSliceType(context, .{ .child_pointer_type = try unit.getPointerType(context, .{ .type = .u8, @@ -5133,11 +5158,11 @@ pub const Builder = struct { .nullable = false, }); const function_prototype_index = try unit.function_prototypes.append(context.my_allocator, .{ - .argument_types = argument_types.slice(), + .argument_types = argument_types, .return_type = return_type_index, .abi = .{ .return_type = return_type_index, - .parameter_types = argument_types.slice(), + .parameter_types = argument_types, }, }); const function_type_index = try unit.types.append(context.my_allocator, .{ @@ -5214,6 +5239,7 @@ pub const Builder = struct { const phi_instruction_index = try unit.instructions.append(context.my_allocator, .{ .phi = .{ .type = return_type_index, + .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), }, }); const phi = &unit.instructions.get(phi_instruction_index).phi; @@ -5221,8 +5247,8 @@ pub const Builder = struct { const cases = switch (unit.types.get(type_index).*) { .integer => |*integer| switch (integer.kind) { .@"enum" => |*enum_type| b: { - var cases = try UnpinnedArray(Instruction.Switch.Case).initialize_with_capacity(context.my_allocator, enum_type.fields.length); - for (enum_type.fields.slice()) |enum_field_index| { + var cases = try context.arena.new_array(Instruction.Switch.Case, enum_type.fields.len); + for (enum_type.fields, 0..) |enum_field_index, i| { builder.current_basic_block = entry_block; const enum_field = unit.enum_fields.get(enum_field_index); const case_block = try builder.newBasicBlock(unit, context); @@ -5247,7 +5273,7 @@ pub const Builder = struct { }, .type = return_type_index, }; - try phi.addIncoming(context, v, builder.current_basic_block); + phi.addIncoming(v, builder.current_basic_block); try builder.jump(unit, context, exit_block); const case = Instruction.Switch.Case{ @@ -5256,7 +5282,7 @@ pub const Builder = struct { }, .basic_block = case_block, }; - cases.append_with_capacity(case); + cases[i] = case; } break :b cases; @@ -5315,20 +5341,20 @@ pub const Builder = struct { switch (container_type.*) { .integer => |*integer| switch (integer.kind) { .@"enum" => |*enum_type| { - const enum_count = enum_type.fields.length; + const enum_count = enum_type.fields.len; const array_type = try unit.getArrayType(context, .{ .type = container_type_index, .count = enum_count, .termination = .none, }); - var fields = try UnpinnedArray(V.Comptime).initialize_with_capacity(context.my_allocator, enum_count); - for (enum_type.fields.slice()) |enum_field_index| { - fields.append_with_capacity(V.Comptime{ + var fields = try context.arena.new_array(V.Comptime, enum_count); + for (enum_type.fields, 0..) |enum_field_index, i| { + fields[i] = V.Comptime{ .enum_value = enum_field_index, - }); + }; } const constant_array = try unit.constant_arrays.append(context.my_allocator, .{ - .values = fields.slice(), + .values = fields, .type = array_type, }); @@ -5355,7 +5381,7 @@ pub const Builder = struct { }), }); const global_declaration = unit.global_declarations.get(global_declaration_index); - try unit.data_to_emit.append(context.my_allocator, global_declaration); + _ = unit.data_to_emit.append(global_declaration); try unit.fields_array.put_no_clobber(context.my_allocator, container_type_index, global_declaration); @@ -5822,7 +5848,7 @@ pub const Builder = struct { }, else => { if (global_declaration.attributes.contains(.@"export") or declaration.mutability == .@"var") { - try unit.data_to_emit.append(context.my_allocator, global_declaration); + _ = unit.data_to_emit.append(global_declaration); } }, } @@ -6980,10 +7006,10 @@ pub const Builder = struct { fn newBasicBlock(builder: *Builder, unit: *Unit, context: *const Context) !BasicBlock.Index { const function = unit.function_definitions.get(builder.current_function); - const entry_basic_block = try unit.basic_blocks.append(context.my_allocator, .{}); - try function.basic_blocks.append(context.my_allocator, entry_basic_block); + const basic_block = try unit.basic_blocks.append(context.my_allocator, .{}); + try function.basic_blocks.append(context.my_allocator, basic_block); - return entry_basic_block; + return basic_block; } fn resolveIntegerType(builder: *Builder, unit: *Unit, context: *const Context, node_index: Node.Index) anyerror!Type.Index { @@ -7251,6 +7277,11 @@ pub const Builder = struct { }, }, .id = std.math.maxInt(u32), + .fields = .{ + .pointer = undefined, + .length = 0, + .capacity = 0, + }, }, }, }, @@ -7262,14 +7293,14 @@ pub const Builder = struct { .call => { // const parameterized_type_index = try builder.resolveType(unit, context, node.left); const parameter_nodes = unit.getNodeList(node.right); - var parameters = UnpinnedArray(V.Comptime){}; + var parameters = try context.arena.new_array(V.Comptime, parameter_nodes.len); - for (parameter_nodes) |parameter_node_index| { + for (parameter_nodes, 0..) |parameter_node_index, i| { const parameter = try builder.resolveComptimeValue(unit, context, Type.Expect.none, .{}, parameter_node_index, null, .right, &.{}, null, &.{}); - try parameters.append(context.my_allocator, parameter); + parameters[i] = parameter; } - const instantiated_type = try builder.resolveType(unit, context, node.left, parameters.slice()); + const instantiated_type = try builder.resolveType(unit, context, node.left, parameters); const instantiated_ty = unit.types.get(instantiated_type); assert(instantiated_ty.* != .polymorphic); @@ -7331,7 +7362,7 @@ pub const Builder = struct { } } - fn resolveFunctionPrototype(builder: *Builder, unit: *Unit, context: *const Context, node_index: Node.Index, global_attributes: Debug.Declaration.Global.Attributes, member: ?V, polymorphic_argument_nodes: []const Node.Index, maybe_scope: ?*Debug.Scope.Function, maybe_comptime_argument_declarations: ?*UnpinnedArray(ComptimeParameterDeclaration), maybe_comptime_argument_instantiations: ?*UnpinnedArray(V.Comptime), is_member: *bool, maybe_global: ?*Debug.Declaration.Global) !Type.Index { + fn resolveFunctionPrototype(builder: *Builder, unit: *Unit, context: *const Context, node_index: Node.Index, global_attributes: Debug.Declaration.Global.Attributes, member: ?V, polymorphic_argument_nodes: []const Node.Index, maybe_scope: ?*Debug.Scope.Function, maybe_comptime_argument_declarations: ?*[]const ComptimeParameterDeclaration, maybe_comptime_argument_instantiations: ?*[]const V.Comptime, is_member: *bool, maybe_global: ?*Debug.Declaration.Global) !Type.Index { _ = maybe_global; // autofix const node = unit.getNode(node_index); assert(node.id == .function_prototype); @@ -7384,11 +7415,12 @@ pub const Builder = struct { if (node.left != .null) { const argument_node_list = unit.getNodeList(node.left); - var argument_types = try UnpinnedArray(Type.Index).initialize_with_capacity(context.my_allocator, @intCast(argument_node_list.len)); + var argument_types = try context.arena.new_array(Type.Index, argument_node_list.len); + argument_types.len = 0; if (polymorphic_argument_nodes.len > 0) { - const comptime_parameter_declarations = maybe_comptime_argument_declarations orelse unreachable; - const comptime_parameter_instantiations = maybe_comptime_argument_instantiations orelse unreachable; + var comptime_parameter_instantiations = BoundedArray(V.Comptime, 512){}; + var comptime_parameter_declarations = BoundedArray(ComptimeParameterDeclaration, 512){}; is_member.* = polymorphic_argument_nodes.len + 1 == argument_node_list.len; const scope = maybe_scope orelse unreachable; assert(&scope.scope == builder.current_scope); @@ -7403,7 +7435,9 @@ pub const Builder = struct { } try builder.put_argument_in_scope(unit, context, member_node, 0, member_type); - argument_types.append_with_capacity(member_type); + const index = argument_types.len; + argument_types.len += 1; + argument_types[index] = member_type; } for (argument_node_list[@intFromBool(is_member.*)..], polymorphic_argument_nodes, 0..) |argument_declaration_node_index, polymorphic_call_argument_node_index, index| { @@ -7418,13 +7452,13 @@ pub const Builder = struct { const name = unit.getExpectedTokenBytes(@enumFromInt(@intFromEnum(argument_declaration_node.token) + 1), .identifier); const name_hash = try unit.processIdentifier(context, name); const debug_info = builder.getTokenDebugInfo(unit, argument_declaration_node.token); - try comptime_parameter_declarations.append(context.my_allocator, .{ + comptime_parameter_declarations.appendAssumeCapacity(.{ .type = argument_type, .name_token = argument_declaration_node.token, .index = @intCast(index), }); - try comptime_parameter_instantiations.append(context.my_allocator, comptime_argument); + comptime_parameter_instantiations.appendAssumeCapacity(comptime_argument); const look_in_parent_scopes = true; if (builder.current_scope.lookupDeclaration(name_hash, look_in_parent_scopes)) |_| { @@ -7454,7 +7488,9 @@ pub const Builder = struct { }, .argument_declaration => { const argument_type_index = try builder.resolveType(unit, context, argument_declaration_node.left, &.{}); - argument_types.append_with_capacity(argument_type_index); + const i = argument_types.len; + argument_types.len += 1; + argument_types[i] = argument_type_index; try builder.put_argument_in_scope(unit, context, argument_declaration_node, index, argument_type_index); }, else => unreachable, @@ -7463,15 +7499,32 @@ pub const Builder = struct { function_prototype.has_polymorphic_parameters = true; - // function_prototype.comptime_parameter_declarations = comptime_parameter_declarations.slice(); - // function_prototype.comptime_parameter_instantiations = comptime_parameter_instantiations.slice(); + assert(comptime_parameter_declarations.len > 0); + assert(comptime_parameter_instantiations.len > 0); + + const heap_comptime_parameter_declarations = try context.arena.new_array(ComptimeParameterDeclaration, comptime_parameter_declarations.len); + const heap_comptime_parameter_instantiations = try context.arena.new_array(V.Comptime, comptime_parameter_instantiations.len); + @memcpy(heap_comptime_parameter_declarations, comptime_parameter_declarations.slice()); + @memcpy(heap_comptime_parameter_instantiations, comptime_parameter_instantiations.slice()); + + maybe_comptime_argument_declarations.?.* = heap_comptime_parameter_declarations; + maybe_comptime_argument_instantiations.?.* = heap_comptime_parameter_instantiations; } else { + if (maybe_comptime_argument_instantiations) |p| { + p.* = &.{}; + } + if (maybe_comptime_argument_declarations) |p| { + p.* = &.{}; + } for (argument_node_list, 0..) |argument_node_index, i| { const argument_node = unit.getNode(argument_node_index); assert(argument_node.id == .argument_declaration); const argument_type_index = try builder.resolveType(unit, context, argument_node.left, &.{}); - argument_types.append_with_capacity(argument_type_index); + + const index = argument_types.len; + argument_types.len += 1; + argument_types[index] = argument_type_index; if (maybe_scope) |scope| { assert(&scope.scope == builder.current_scope); @@ -7480,7 +7533,14 @@ pub const Builder = struct { } } - function_prototype.argument_types = argument_types.slice(); + function_prototype.argument_types = argument_types; + } else { + if (maybe_comptime_argument_instantiations) |p| { + p.* = &.{}; + } + if (maybe_comptime_argument_declarations) |p| { + p.* = &.{}; + } } function_prototype.return_type = try builder.resolveType(unit, context, return_type_node_index, &.{}); @@ -7748,14 +7808,19 @@ pub const Builder = struct { } } - fn resolveFunctionPrototypeAbiAarch64(builder: *Builder, unit: *Unit, context: *const Context, function_prototype: *Function.Prototype, parameter_types_abi: *UnpinnedArray(Function.AbiInfo)) !void { + fn resolveFunctionPrototypeAbiAarch64(builder: *Builder, unit: *Unit, context: *const Context, function_prototype: *Function.Prototype) !void { + var parameter_types_abi = BoundedArray(Function.AbiInfo, 512){}; const return_type_abi = builder.classify_return_type_aarch64(unit, context, function_prototype.return_type); for (function_prototype.argument_types) |argument_type_index| { const abi_arg = builder.classify_argument_type_aarch64(unit, context, argument_type_index); - parameter_types_abi.append_with_capacity(abi_arg); + parameter_types_abi.appendAssumeCapacity(abi_arg); } + function_prototype.abi.return_type_abi = return_type_abi; - function_prototype.abi.parameter_types_abi = parameter_types_abi.slice(); + + const parameter_abis = try context.arena.new_array(Function.AbiInfo, parameter_types_abi.len); + @memcpy(parameter_abis, parameter_types_abi.slice()); + function_prototype.abi.parameter_types_abi = parameter_abis; } fn resolveFunctionPrototypeAbi(builder: *Builder, unit: *Unit, context: *const Context, function_prototype: *Function.Prototype) !void { @@ -7767,59 +7832,63 @@ pub const Builder = struct { .kind = if (function_prototype.return_type == .void or function_prototype.return_type == .noreturn) .ignore else .direct, }; - var parameter_abis = try UnpinnedArray(Function.AbiInfo).initialize_with_capacity(context.my_allocator, @intCast(function_prototype.argument_types.len)); - for (function_prototype.argument_types, 0..) |_, i| { + var parameter_abis = try context.arena.new_array(Function.AbiInfo, function_prototype.argument_types.len); + + for (0..function_prototype.argument_types.len) |i| { const index: u16 = @intCast(i); - parameter_abis.append_with_capacity(.{ + parameter_abis[i] = .{ .kind = .direct, .indices = .{ index, index + 1 }, - }); + }; } - function_prototype.abi.parameter_types_abi = parameter_abis.slice(); + function_prototype.abi.parameter_types_abi = parameter_abis; }, .c => { - var parameter_types_abi = try UnpinnedArray(Function.AbiInfo).initialize_with_capacity(context.my_allocator, @intCast(function_prototype.argument_types.len)); switch (unit.descriptor.arch) { .x86_64 => switch (unit.descriptor.os) { - .linux => try builder.resolveFunctionPrototypeAbiSystemVx86_64(unit, context, function_prototype, ¶meter_types_abi), + .linux => try builder.resolveFunctionPrototypeAbiSystemVx86_64(unit, context, function_prototype), else => |t| @panic(@tagName(t)), }, - .aarch64 => try builder.resolveFunctionPrototypeAbiAarch64(unit, context, function_prototype, ¶meter_types_abi), + .aarch64 => try builder.resolveFunctionPrototypeAbiAarch64(unit, context, function_prototype), } - var abi_parameter_types = UnpinnedArray(Type.Index){}; + var abi_parameter_types = BoundedArray(Type.Index, 512){}; const abi_return_type = switch (function_prototype.abi.return_type_abi.kind) { .ignore => function_prototype.return_type, .direct_pair => |direct_pair| try unit.getTwoStruct(context, direct_pair), .direct => function_prototype.return_type, .indirect => |indirect| b: { - try abi_parameter_types.append(context.my_allocator, indirect.pointer); + abi_parameter_types.appendAssumeCapacity(indirect.pointer); break :b .void; }, .direct_coerce => |coerced_type| coerced_type, else => |t| @panic(@tagName(t)), }; - for (parameter_types_abi.slice(), function_prototype.argument_types) |*parameter_abi, parameter_type_index| { - const start: u16 = @intCast(abi_parameter_types.length); - switch (parameter_abi.kind) { - .direct => try abi_parameter_types.append(context.my_allocator, parameter_type_index), - .direct_coerce => |coerced_type| try abi_parameter_types.append(context.my_allocator, coerced_type), - .indirect => |indirect| try abi_parameter_types.append(context.my_allocator, indirect.pointer), + for (function_prototype.abi.parameter_types_abi, function_prototype.argument_types) |*const_parameter_abi, parameter_type_index| { + const start: u16 = @intCast(abi_parameter_types.len); + switch (const_parameter_abi.kind) { + .direct => abi_parameter_types.appendAssumeCapacity(parameter_type_index), + .direct_coerce => |coerced_type| abi_parameter_types.appendAssumeCapacity(coerced_type), + .indirect => |indirect| abi_parameter_types.appendAssumeCapacity(indirect.pointer), .direct_pair => |direct_pair| { - try abi_parameter_types.append(context.my_allocator, direct_pair[0]); - try abi_parameter_types.append(context.my_allocator, direct_pair[1]); + abi_parameter_types.appendAssumeCapacity(direct_pair[0]); + abi_parameter_types.appendAssumeCapacity(direct_pair[1]); }, else => |t| @panic(@tagName(t)), } - const end: u16 = @intCast(abi_parameter_types.length); + const parameter_abi: *Function.AbiInfo = @constCast(const_parameter_abi); + const end: u16 = @intCast(abi_parameter_types.len); parameter_abi.indices = .{ start, end }; } + const heap_abi_parameter_types = try context.arena.new_array(Type.Index, abi_parameter_types.len); + @memcpy(heap_abi_parameter_types, abi_parameter_types.slice()); + function_prototype.abi.return_type = abi_return_type; - function_prototype.abi.parameter_types = abi_parameter_types.slice(); + function_prototype.abi.parameter_types = heap_abi_parameter_types; }, } @@ -7827,7 +7896,8 @@ pub const Builder = struct { assert(function_prototype.abi.return_type != .null); } - fn resolveFunctionPrototypeAbiSystemVx86_64(builder: *Builder, unit: *Unit, context: *const Context, function_prototype: *Function.Prototype, parameter_types_abi: *UnpinnedArray(Function.AbiInfo)) !void { + fn resolveFunctionPrototypeAbiSystemVx86_64(builder: *Builder, unit: *Unit, context: *const Context, function_prototype: *Function.Prototype) !void { + var parameter_types_abi = BoundedArray(Function.AbiInfo, 512){}; const return_abi = builder.classify_return_type_systemv_x86_64(unit, context, function_prototype.return_type); var available_registers = SystemV_x86_64_Registers{ .gp_registers = 6, @@ -7852,11 +7922,13 @@ pub const Builder = struct { available_registers.sse_registers -= parameter_classification.needed_registers.sse_registers; break :b parameter_classification.abi; }; - parameter_types_abi.append_with_capacity(parameter_abi); + parameter_types_abi.appendAssumeCapacity(parameter_abi); } function_prototype.abi.return_type_abi = return_abi; - function_prototype.abi.parameter_types_abi = parameter_types_abi.slice(); + const abi_infos = try context.arena.new_array(Function.AbiInfo, parameter_types_abi.len); + @memcpy(abi_infos, parameter_types_abi.slice()); + function_prototype.abi.parameter_types_abi = abi_infos; } const Class_SystemVx86_64 = enum { @@ -7932,7 +8004,7 @@ pub const Builder = struct { const is_union = false; var member_offset: u32 = 0; - for (struct_type.fields.slice()) |field_index| { + for (struct_type.fields) |field_index| { const field = unit.struct_fields.get(field_index); const field_type = unit.types.get(field.type); const offset = base_offset + member_offset; @@ -8036,7 +8108,7 @@ pub const Builder = struct { var offset_it: u32 = 0; var last_match: ?Member = null; - for (struct_type_descriptor.fields.slice()) |field_index| { + for (struct_type_descriptor.fields) |field_index| { const field = unit.struct_fields.get(field_index); if (offset_it > offset) break; last_match = .{ @@ -8058,7 +8130,7 @@ pub const Builder = struct { .@"struct" => |*struct_type| { var offset: u32 = 0; - for (struct_type.fields.slice()) |field_index| { + for (struct_type.fields) |field_index| { const field = unit.struct_fields.get(field_index); if (offset >= end) break; const field_type = unit.types.get(field.type); @@ -8420,12 +8492,11 @@ pub const Builder = struct { const struct_type = unit.structs.get(struct_index); const struct_options = &struct_type.kind.@"struct".options; - var parameter_types = UnpinnedArray(Token.Index){}; + var parameter_types = BoundedArray(Token.Index, 64){}; if (container_node.right != .null) { const struct_option_nodes = unit.getNodeList(container_node.right); var struct_options_value = false; - _ = ¶meter_types; for (struct_option_nodes) |struct_option_node_index| { const struct_option_node = unit.getNode(struct_option_node_index); @@ -8443,7 +8514,7 @@ pub const Builder = struct { const struct_options_struct_index = unit.types.get(struct_options_declaration_type_index).@"struct"; const struct_options_struct = unit.structs.get(struct_options_struct_index); - for (struct_options_struct.kind.@"struct".fields.slice(), constant_struct.fields) |field_index, field_value| { + for (struct_options_struct.kind.@"struct".fields, constant_struct.fields) |field_index, field_value| { const field = unit.struct_fields.get(field_index); const name = unit.getIdentifier(field.name); const option_id = enumFromString(Struct.Options.Id, name) orelse unreachable; @@ -8463,7 +8534,7 @@ pub const Builder = struct { assert(struct_option_node.right == .null); const left = unit.getNode(struct_option_node.left); assert(left.id == .identifier); - try parameter_types.append(context.my_allocator, left.token); + parameter_types.appendAssumeCapacity(left.token); }, else => |t| @panic(@tagName(t)), } @@ -8474,7 +8545,7 @@ pub const Builder = struct { .@"struct" = struct_index, }); - assert(new_parameters.len == parameter_types.length); + assert(new_parameters.len == parameter_types.len); for (parameter_types.slice(), new_parameters) |parameter_type_token, parameter_value| { const parameter_type = switch (parameter_value) { @@ -8504,11 +8575,15 @@ pub const Builder = struct { try struct_type.kind.@"struct".scope.scope.declarations.put_no_clobber(context.my_allocator, hash, &global_declaration.declaration); } - const polymorphic_type_index = switch (parameter_types.length > 0) { + const polymorphic_type_index = switch (parameter_types.len > 0) { true => blk: { const polymorphic_type_index = try unit.types.append(context.my_allocator, .{ .polymorphic = .{ - .parameters = parameter_types.slice(), + .parameters = param: { + const heap_parameter_types = try context.arena.new_array(Token.Index, parameter_types.len); + @memcpy(heap_parameter_types, parameter_types.slice()); + break :param heap_parameter_types; + }, .node = container_node_index, }, }); @@ -8694,7 +8769,6 @@ pub const Builder = struct { array_list.*[index] = member_index; } - var export_declarations = UnpinnedArray(*Debug.Declaration.Global){}; if (count.declarations > 0) { for (declaration_nodes) |declaration_node_index| { const declaration_node = unit.getNode(declaration_node_index); @@ -8711,7 +8785,6 @@ pub const Builder = struct { const look_in_parent_scopes = true; if (builder.current_scope.lookupDeclaration(identifier_hash, look_in_parent_scopes)) |lookup_result| { _ = lookup_result; // autofix - _ = UnpinnedArray; // autofix @panic("Symbol already on scope"); //std.debug.panic("Symbol {s} already on scope", .{identifier}); } @@ -8781,9 +8854,6 @@ pub const Builder = struct { const global_declaration = unit.global_declarations.get(global_declaration_index); try builder.current_scope.declarations.put_no_clobber(context.my_allocator, identifier_hash, &global_declaration.declaration); - if (attributes.contains(.@"export")) { - try export_declarations.append(context.my_allocator, global_declaration); - } }, else => unreachable, } @@ -8793,23 +8863,20 @@ pub const Builder = struct { if (count.fields > 0) { const ty = unit.types.get(data.plain); const field_count: u32 = @intCast(field_nodes.len); + var enum_fields: []Enum.Field.Index = undefined; + var struct_fields: []Struct.Field.Index = undefined; switch (container_type) { .@"enum" => { const integer_type = &ty.integer; - const enum_type = &integer_type.kind.@"enum"; - try enum_type.fields.ensure_capacity(context.my_allocator, field_count); + enum_fields = try context.arena.new_array(Enum.Field.Index, field_count); if (integer_type.bit_count == 0) { integer_type.bit_count = @bitSizeOf(@TypeOf(field_nodes.len)) - @clz(field_nodes.len); } assert(integer_type.bit_count > 0); }, - .@"struct" => { - const struct_type = unit.structs.get(ty.@"struct"); - try struct_type.kind.@"struct".fields.ensure_capacity(context.my_allocator, field_count); - }, - .bitfield => { - try ty.integer.kind.bitfield.fields.ensure_capacity(context.my_allocator, field_count); + .@"struct", .bitfield => { + struct_fields = try context.arena.new_array(Struct.Field.Index, field_count); }, } @@ -8855,7 +8922,7 @@ pub const Builder = struct { .value = enum_value, .parent = data.plain, }); - ty.integer.kind.@"enum".fields.append_with_capacity(enum_field_index); + enum_fields[index] = enum_field_index; }, .@"struct" => { assert(field_node.id == .container_field); @@ -8888,11 +8955,10 @@ pub const Builder = struct { .type = field_type, .default_value = field_default_value, }); - struct_type.kind.@"struct".fields.append_with_capacity(struct_field); + struct_fields[index] = struct_field; }, .bitfield => { assert(field_node.id == .container_field); - const bitfield = &ty.integer.kind.bitfield; const field_type = try builder.resolveType(unit, context, field_node.left, &.{}); const field_default_value: ?V.Comptime = switch (field_node.right) { .null => null, @@ -8904,7 +8970,7 @@ pub const Builder = struct { .type = field_type, .default_value = field_default_value, }); - bitfield.fields.append_with_capacity(struct_field); + struct_fields[index] = struct_field; }, } } @@ -8912,12 +8978,18 @@ pub const Builder = struct { switch (container_type) { .@"struct" => { const struct_type = unit.structs.get(ty.@"struct"); + struct_type.kind.@"struct".fields = struct_fields; if (struct_type.kind.@"struct".options.sliceable) |*sliceable| { sliceable.pointer = sliceable_pointer_index orelse unreachable; sliceable.length = sliceable_length_index orelse unreachable; } }, - else => {}, + .bitfield => { + ty.integer.kind.bitfield.fields = struct_fields; + }, + .@"enum" => { + ty.integer.kind.@"enum".fields = enum_fields; + }, } } @@ -9014,14 +9086,11 @@ pub const Builder = struct { } } - // Force analysis of exports - if (export_declarations.length > 0) { - for (export_declarations.slice()) |export_declaration| { - const name = unit.getIdentifier(export_declaration.declaration.name); - _ = name; - //if (byte_equal(name, "nat_big_struct_both")) @breakpoint(); - const result = try builder.referenceGlobalDeclaration(unit, context, &scope.scope, &export_declaration.declaration, .{}, &.{}, null, &.{}); - assert(result == export_declaration); + for (builder.current_scope.declarations.values()) |declaration|{ + const global_declaration: *Debug.Declaration.Global = @fieldParentPtr("declaration", declaration); + if (global_declaration.attributes.contains(.@"export")) { + const result = try builder.referenceGlobalDeclaration(unit, context, &scope.scope, declaration, .{}, &.{}, null, &.{}); + assert(result == global_declaration); } } @@ -9119,15 +9188,15 @@ pub const Builder = struct { try builder.pushScope(unit, context, &function.scope.scope); defer builder.popScope(unit, context) catch unreachable; - var comptime_parameter_declarations = UnpinnedArray(ComptimeParameterDeclaration){}; - var comptime_parameter_instantiations = UnpinnedArray(V.Comptime){}; + var comptime_parameter_declarations: []const ComptimeParameterDeclaration = &.{}; + var comptime_parameter_instantiations: []const V.Comptime = &.{}; var is_member_call = false; function.type = if (maybe_function_type_index == .null) b: { const function_prototype_node_index = function_node.left; const function_prototype_index = try builder.resolveFunctionPrototype(unit, context, function_prototype_node_index, global_attributes, maybe_member_value, polymorphic_argument_nodes, &function.scope, &comptime_parameter_declarations, &comptime_parameter_instantiations, &is_member_call, maybe_global); if (maybe_global) |g| { switch (g.initial_value) { - .polymorphic_function => |*pf| if (pf.get_instantiation(comptime_parameter_instantiations.slice())) |_| unreachable else {}, + .polymorphic_function => |*pf| if (pf.get_instantiation(comptime_parameter_instantiations)) |_| unreachable else {}, else => {}, } } @@ -9567,7 +9636,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, context, return_value); - try phi.addIncoming(context, .{ + phi.addIncoming(.{ .value = .{ .runtime = return_value, }, @@ -9594,17 +9663,17 @@ pub const Builder = struct { if (maybe_global != null and maybe_global.?.initial_value == .polymorphic_function) { const polymorphic_function = &maybe_global.?.initial_value.polymorphic_function; - const instantiation = try polymorphic_function.add_instantiation(unit, context, comptime_parameter_instantiations.slice(), maybe_global orelse unreachable, current_function); + const instantiation = try polymorphic_function.add_instantiation(unit, context, comptime_parameter_instantiations, maybe_global orelse unreachable, current_function); return .{ .global = instantiation, }; - } else if (comptime_parameter_declarations.length > 0) { + } else if (comptime_parameter_declarations.len > 0) { var polymorphic_function = PolymorphicFunction{ .node = function_node_index, - .parameters = comptime_parameter_declarations.slice(), + .parameters = comptime_parameter_declarations, .is_member_call = is_member_call, }; - _ = try polymorphic_function.add_instantiation(unit, context, comptime_parameter_instantiations.slice(), maybe_global orelse unreachable, current_function); + _ = try polymorphic_function.add_instantiation(unit, context, comptime_parameter_instantiations, maybe_global orelse unreachable, current_function); return V.Comptime{ .polymorphic_function = polymorphic_function, }; @@ -9675,7 +9744,7 @@ pub const Builder = struct { .integer => |*integer| switch (integer.kind) { .@"enum" => |*enum_type| { return V.Comptime{ - .enum_fields = enum_type.fields.slice(), + .enum_fields = enum_type.fields, }; }, else => |t| @panic(@tagName(t)), @@ -9842,7 +9911,7 @@ pub const Builder = struct { .parent = builder.current_scope, }, }, - .fields = try UnpinnedArray(Type.Error.Field.Index).initialize_with_capacity(context.my_allocator, @intCast(nodes.len)), + .fields = try DynamicBoundedArray(Type.Error.Field.Index).init(context.arena, @intCast(nodes.len)), .id = unit.error_count, }, }, @@ -9860,7 +9929,7 @@ pub const Builder = struct { .type = error_type_index, .value = index, }); - error_type.fields.append_with_capacity(error_field_index); + error_type.fields.append(error_field_index); } return .{ @@ -9876,7 +9945,7 @@ pub const Builder = struct { switch (expected_type.*) { .integer => |*integer| switch (integer.kind) { .@"enum" => |*enum_type| { - for (enum_type.fields.slice()) |field_index| { + for (enum_type.fields) |field_index| { const field = unit.enum_fields.get(field_index); if (field.name == hash) { return .{ @@ -9915,17 +9984,7 @@ pub const Builder = struct { switch (appointee.id) { .anonymous_empty_literal => switch (type_expect) { .type => |type_index| switch (unit.types.get(type_index).*) { - .slice => |slice| { - _ = slice; // autofix - var field_list = try UnpinnedArray(V.Comptime).initialize_with_capacity(context.my_allocator, 2); - - field_list.append_with_capacity(.undefined); - field_list.append_with_capacity(V.Comptime{ - .constant_int = .{ - .value = 0, - }, - }); - + .slice => { const constant_slice = try unit.constant_slices.append(context.my_allocator, .{ .array = null, .start = 0, @@ -9984,14 +10043,14 @@ pub const Builder = struct { switch (left_node.id) { .string_literal => { const string_literal = try unit.fixupStringLiteral(context, left_node.token); - var values = try UnpinnedArray(V.Comptime).initialize_with_capacity(context.my_allocator, @intCast(string_literal.len)); + var values = try context.arena.new_array(V.Comptime, string_literal.len); - for (string_literal) |b| { - values.append_with_capacity(V.Comptime{ + for (string_literal, 0..) |b, i| { + values[i] = V.Comptime{ .constant_int = .{ .value = b, }, - }); + }; } const array_type = try unit.getArrayType(context, .{ @@ -10004,7 +10063,7 @@ pub const Builder = struct { .value = .{ .@"comptime" = .{ .constant_array = try unit.constant_arrays.append(context.my_allocator, .{ - .values = values.slice(), + .values = values, .type = array_type, }), }, @@ -10585,7 +10644,7 @@ pub const Builder = struct { .@"enum" => |*enum_type| { const identifier = unit.getExpectedTokenBytes(@enumFromInt(@intFromEnum(node.token) + 1), .identifier); const hash = try unit.processIdentifier(context, identifier); - for (enum_type.fields.slice()) |field_index| { + for (enum_type.fields) |field_index| { const field = unit.enum_fields.get(field_index); if (field.name == hash) { break :block V{ @@ -12086,7 +12145,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, context, load); - const pointer_field_index = struct_type.fields.slice()[sliceable.pointer]; + const pointer_field_index = struct_type.fields[sliceable.pointer]; const pointer_field = unit.struct_fields.get(pointer_field_index); const pointer_type = unit.types.get(pointer_field.type).pointer; const child_type_index = pointer_type.type; @@ -12110,7 +12169,7 @@ pub const Builder = struct { }, .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => |*struct_type| if (struct_type.options.sliceable) |sliceable| b: { - const field_index = struct_type.fields.slice()[sliceable.pointer]; + const field_index = struct_type.fields[sliceable.pointer]; const field = unit.struct_fields.get(field_index); const child_pointer_type = field.type; const pointer_type = unit.types.get(field.type).pointer; @@ -12491,10 +12550,11 @@ pub const Builder = struct { const phi_index = try unit.instructions.append(context.my_allocator, .{ .phi = .{ .type = type_to_expect, + .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), }, }); const phi = &unit.instructions.get(phi_index).phi; - try phi.addIncoming(context, else_expr, builder.current_basic_block); + phi.addIncoming(else_expr, builder.current_basic_block); const phi_block = try builder.newBasicBlock(unit, context); try builder.jump(unit, context, phi_block); @@ -12516,7 +12576,7 @@ pub const Builder = struct { }, .type = type_to_expect, }; - try phi.addIncoming(context, unwrap, builder.current_basic_block); + phi.addIncoming(unwrap, builder.current_basic_block); try builder.jump(unit, context, phi_block); builder.current_basic_block = phi_block; @@ -12966,7 +13026,7 @@ pub const Builder = struct { if (builder.return_block != .null) { assert(builder.return_phi != .null); const phi = &unit.instructions.get(builder.return_phi).phi; - try phi.addIncoming(context, final_error_union, builder.current_basic_block); + phi.addIncoming(final_error_union, builder.current_basic_block); } else if (builder.return_phi != .null) { unreachable; } else { @@ -12975,11 +13035,12 @@ pub const Builder = struct { const phi_index = try unit.instructions.append(context.my_allocator, .{ .phi = .{ .type = return_type_index, + .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), }, }); const phi = &unit.instructions.get(phi_index).phi; const phi_block = try builder.newBasicBlock(unit, context); - try phi.addIncoming(context, final_error_union, builder.current_basic_block); + phi.addIncoming(final_error_union, builder.current_basic_block); // const old_block = builder.current_basic_block; @@ -13020,17 +13081,17 @@ pub const Builder = struct { const fields = switch (container_type.*) { .integer => |*integer| switch (integer.kind) { - .bitfield => |*bitfield| bitfield.fields.slice(), + .bitfield => |*bitfield| bitfield.fields, else => |t| @panic(@tagName(t)), }, .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { - .@"struct" => |*struct_type| struct_type.fields.slice(), + .@"struct" => |*struct_type| struct_type.fields, else => |t| @panic(@tagName(t)), }, else => |t| @panic(@tagName(t)), }; - var list = try UnpinnedArray(V).initialize_with_capacity(context.my_allocator, @intCast(fields.len)); + var list = try DynamicBoundedArray(V).init(context.arena, @intCast(fields.len)); var is_comptime = true; for (fields) |field_index| { @@ -13048,11 +13109,11 @@ pub const Builder = struct { const expected_type = field.type; const field_initialization = try builder.resolveRuntimeValue(unit, context, Type.Expect{ .type = expected_type }, initialization_node.left, .right); is_comptime = is_comptime and field_initialization.value == .@"comptime"; - list.append_with_capacity(field_initialization); + list.append(field_initialization); break; } } else if (field.default_value) |default_value| { - list.append_with_capacity(.{ + list.append(.{ .value = .{ .@"comptime" = default_value, }, @@ -13189,16 +13250,16 @@ pub const Builder = struct { .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => { if (is_comptime) { - var comptime_list = try UnpinnedArray(V.Comptime).initialize_with_capacity(context.my_allocator, @intCast(fields.len)); - for (list.slice()) |item| { - comptime_list.append_with_capacity(item.value.@"comptime"); + var comptime_list = try context.arena.new_array(V.Comptime, fields.len); + for (list.slice(), 0..) |item, i| { + comptime_list[i] = item.value.@"comptime"; } return .{ .value = .{ .@"comptime" = .{ .constant_struct = try unit.constant_structs.append(context.my_allocator, .{ - .fields = comptime_list.slice(), + .fields = comptime_list, .type = type_index, }), }, @@ -13267,17 +13328,17 @@ pub const Builder = struct { .none => false, else => true, }; - var values = try UnpinnedArray(V).initialize_with_capacity(context.my_allocator, @intCast(nodes.len + @intFromBool(is_terminated))); + var values = try DynamicBoundedArray(V).init(context.arena, @intCast(nodes.len + @intFromBool(is_terminated))); for (nodes) |node_index| { const value = try builder.resolveRuntimeValue(unit, context, Type.Expect{ .type = array_type.type }, node_index, .right); // assert(value.value == .@"comptime"); is_comptime = is_comptime and value.value == .@"comptime"; - values.append_with_capacity(value); + values.append(value); } switch (array_type.termination) { .none => {}, - .zero => values.append_with_capacity(.{ + .zero => values.append(.{ .value = .{ .@"comptime" = .{ .constant_int = .{ @@ -13287,7 +13348,7 @@ pub const Builder = struct { }, .type = array_type.type, }), - .null => values.append_with_capacity(.{ + .null => values.append(.{ .value = .{ .@"comptime" = .null_pointer, }, @@ -13298,13 +13359,13 @@ pub const Builder = struct { if (is_comptime) { const constant_array = try unit.constant_arrays.append(context.my_allocator, .{ .values = blk: { - var ct_values = try UnpinnedArray(V.Comptime).initialize_with_capacity(context.my_allocator, values.length); + var ct_values = try context.arena.new_array(V.Comptime, values.length); - for (values.slice()) |v| { - ct_values.append_with_capacity(v.value.@"comptime"); + for (values.slice(), 0..) |v, i| { + ct_values[i] = v.value.@"comptime"; } - break :blk ct_values.slice(); + break :blk ct_values; }, // TODO: avoid hash lookup .type = try unit.getArrayType(context, array_type), @@ -13357,7 +13418,7 @@ pub const Builder = struct { switch (ty.*) { .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => |*struct_type| { - for (struct_type.fields.slice(), 0..) |field_index, index| { + for (struct_type.fields, 0..) |field_index, index| { const field = unit.struct_fields.get(field_index); if (field.name == right_identifier_hash) { @@ -13671,6 +13732,7 @@ pub const Builder = struct { const function_prototype = unit.function_prototypes.get(unit.types.get(function_type_index).function); const argument_declaration_count = function_prototype.argument_types.len; + _ = argument_declaration_count; // autofix // // Argument list holds already the value of the member value // if (argument_nodes.len + @intFromBool(member_resolution.member != null) != argument_declaration_count) { @@ -13679,8 +13741,9 @@ pub const Builder = struct { const is_indirect = function_prototype.abi.return_type_abi.kind == .indirect; const extra_member_count = @as(usize, @intFromBool(is_indirect)) + @intFromBool(member_resolution.member != null); + _ = extra_member_count; // autofix - var argument_list = try UnpinnedArray(V).initialize_with_capacity(context.my_allocator, @intCast(argument_declaration_count + extra_member_count)); + var argument_list = BoundedArray(V, 512){}; const indirect_return: ?V = switch (function_prototype.abi.return_type_abi.kind) { .indirect => |indirect| b: { @@ -13694,7 +13757,7 @@ pub const Builder = struct { }, .type = indirect.pointer, }; - argument_list.append_with_capacity(v); + argument_list.appendAssumeCapacity(v); break :b v; } else { @@ -13708,7 +13771,7 @@ pub const Builder = struct { const member_argument_index = @intFromBool(is_indirect); const abi = function_prototype.abi.parameter_types_abi[member_argument_index]; switch (abi.kind) { - .direct => argument_list.append_with_capacity(m), + .direct => argument_list.appendAssumeCapacity(m), else => |t| @panic(@tagName(t)), } } @@ -13740,9 +13803,9 @@ pub const Builder = struct { switch (argument_abi.kind) { .direct => { assert(argument_value.type == argument_type_index); - argument_list.append_with_capacity(argument_value); + argument_list.appendAssumeCapacity(argument_value); }, - .direct_coerce => |coerced_type_index| if (coerced_type_index == argument_value.type) argument_list.append_with_capacity(argument_value) else { + .direct_coerce => |coerced_type_index| if (coerced_type_index == argument_value.type) argument_list.appendAssumeCapacity(argument_value) else { const stack = try builder.createStackVariable(unit, context, argument_value.type, null); const pointer_type = try unit.getPointerType(context, .{ @@ -13787,7 +13850,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, context, load); - argument_list.append_with_capacity(V{ + argument_list.appendAssumeCapacity(V{ .value = .{ .runtime = load, }, @@ -13826,7 +13889,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, context, load); - argument_list.append_with_capacity(V{ + argument_list.appendAssumeCapacity(V{ .value = .{ .runtime = load, }, @@ -13845,8 +13908,8 @@ pub const Builder = struct { switch (original_type.*) { .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => |*original_struct| { - if (original_struct.fields.length == 2) { - for (original_struct.fields.slice(), pair) |field_index, pair_type_index| { + if (original_struct.fields.len == 2) { + for (original_struct.fields, pair) |field_index, pair_type_index| { const field = unit.struct_fields.get(field_index); if (field.type != pair_type_index) break :b false; } @@ -13872,7 +13935,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, context, extract_0); - argument_list.append_with_capacity(.{ + argument_list.appendAssumeCapacity(.{ .value = .{ .runtime = extract_0, }, @@ -13887,7 +13950,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, context, extract_1); - argument_list.append_with_capacity(.{ + argument_list.appendAssumeCapacity(.{ .value = .{ .runtime = extract_1, }, @@ -14027,13 +14090,13 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, context, load1); - argument_list.append_with_capacity(V{ + argument_list.appendAssumeCapacity(V{ .value = .{ .runtime = load0, }, .type = pair[0], }); - argument_list.append_with_capacity(V{ + argument_list.appendAssumeCapacity(V{ .value = .{ .runtime = load1, }, @@ -14073,7 +14136,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, context, store); - argument_list.append_with_capacity(indirect_value); + argument_list.appendAssumeCapacity(indirect_value); } }, else => |t| @panic(@tagName(t)), @@ -14086,7 +14149,11 @@ pub const Builder = struct { .call = .{ .callable = member_resolution.callable, .function_type = function_type_index, - .arguments = argument_list.slice(), + .arguments = b: { + const array = try context.arena.new_array(V, argument_list.len); + @memcpy(array, argument_list.slice()); + break :b array; + }, }, }); try builder.appendInstruction(unit, context, instruction); @@ -14404,7 +14471,8 @@ pub const Builder = struct { } } else { const count = slices_and_range_node.len; - var slices = UnpinnedArray(V){}; + var slices = try context.arena.new_array(V, slices_and_range_node.len); + slices.len = count - 1; const last_element_node_index = slices_and_range_node[count - 1]; const last_element_node = unit.getNode(last_element_node_index); @@ -14415,9 +14483,9 @@ pub const Builder = struct { end: V, }; - for (slices_and_range_node[0 .. count - 1]) |slice_or_range_node_index| { + for (slices_and_range_node[0 .. count - 1], 0..) |slice_or_range_node_index, i| { const slice = try builder.resolveRuntimeValue(unit, context, Type.Expect.none, slice_or_range_node_index, .right); - try slices.append(context.my_allocator, slice); + slices[i] = slice; } const loop_counter: LoopCounter = switch (last_element_node.id) { @@ -14429,11 +14497,11 @@ pub const Builder = struct { const stack_slot = try builder.emitLocalVariableDeclaration(unit, context, last_element_payload.token, .@"var", Type.usize, range_start, emit, null); // This is put up here so that the length is constant throughout the loop and we dont have to load the variable unnecessarily const range_end = switch (last_element_node.right) { - .null => switch (unit.types.get(slices.pointer[0].type).*) { + .null => switch (unit.types.get(slices[0].type).*) { .slice => b: { const len_extract_instruction = try unit.instructions.append(context.my_allocator, .{ .extract_value = .{ - .expression = slices.pointer[0], + .expression = slices[0], .index = 1, }, }); @@ -14475,7 +14543,9 @@ pub const Builder = struct { switch (unit.types.get(for_loop_value.type).*) { .slice => { - try slices.append(context.my_allocator, for_loop_value); + const index = slices.len; + slices.len += 1; + slices[index] = for_loop_value; const len_extract_value = try unit.instructions.append(context.my_allocator, .{ .extract_value = .{ @@ -14530,7 +14600,11 @@ pub const Builder = struct { }, .type = slice_type, }; - try slices.append(context.my_allocator, slice_value); + + const index = slices.len; + slices.len += 1; + slices[index] = slice_value; + break :blk .{ .stack_slot = stack_slot, .end = .{ @@ -14609,7 +14683,7 @@ pub const Builder = struct { const is_last_element_range = last_element_node.id == .range; const not_range_len = payloads.len - @intFromBool(is_last_element_range); - if (slices.length > 0) { + if (slices.len > 0) { const load_i = try unit.instructions.append(context.my_allocator, .{ .load = .{ .value = .{ @@ -14623,7 +14697,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, context, load_i); - for (payloads[0..not_range_len], slices.slice()) |payload_node_index, slice| { + for (payloads[0..not_range_len], slices) |payload_node_index, slice| { const pointer_extract_value = try unit.instructions.append(context.my_allocator, .{ .extract_value = .{ .expression = slice, @@ -14905,10 +14979,11 @@ pub const Builder = struct { const phi_index = try unit.instructions.append(context.my_allocator, .{ .phi = .{ .type = expected_type, + .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), }, }); const phi = &unit.instructions.get(phi_index).phi; - try phi.addIncoming(context, v, builder.current_basic_block); + phi.addIncoming(v, builder.current_basic_block); const phi_block = try builder.newBasicBlock(unit, context); try builder.jump(unit, context, phi_block); @@ -14942,7 +15017,7 @@ pub const Builder = struct { const phi = &unit.instructions.get(phi_index).phi; const exit_block = catch_info.exit_block; - try phi.addIncoming(context, value, builder.current_basic_block); + phi.addIncoming(value, builder.current_basic_block); try builder.jump(unit, context, exit_block); builder.current_basic_block = exit_block; @@ -15052,7 +15127,7 @@ pub const Builder = struct { const taken_block = try builder.newBasicBlock(unit, context); const exit_block = try builder.newBasicBlock(unit, context); const not_taken_block = if (not_taken_node_index != .null) try builder.newBasicBlock(unit, context) else exit_block; - try builder.exit_blocks.append(context.my_allocator, exit_block); + builder.exit_blocks.appendAssumeCapacity(exit_block); try builder.branch(unit, context, condition, taken_block, not_taken_block); builder.current_basic_block = taken_block; @@ -15157,8 +15232,10 @@ pub const Builder = struct { try builder.appendInstruction(unit, context, br); unit.basic_blocks.get(builder.current_basic_block).terminated = true; - try unit.basic_blocks.get(taken_block).predecessors.append(context.my_allocator, builder.current_basic_block); - try unit.basic_blocks.get(non_taken_block).predecessors.append(context.my_allocator, builder.current_basic_block); + const taken_bb = unit.basic_blocks.get(taken_block); + const non_taken_bb = unit.basic_blocks.get(non_taken_block); + try taken_bb.add_predecessor(builder.current_basic_block); + try non_taken_bb.add_predecessor(builder.current_basic_block); } fn jump(builder: *Builder, unit: *Unit, context: *const Context, new_basic_block: BasicBlock.Index) !void { @@ -15172,7 +15249,8 @@ pub const Builder = struct { try builder.appendInstruction(unit, context, instruction); unit.basic_blocks.get(builder.current_basic_block).terminated = true; - try unit.basic_blocks.get(new_basic_block).predecessors.append(context.my_allocator, builder.current_basic_block); + const new_bb = unit.basic_blocks.get(new_basic_block); + try new_bb.add_predecessor(builder.current_basic_block); } fn resolveComptimeSwitch(builder: *Builder, unit: *Unit, context: *const Context, type_expect: Type.Expect, global_attributes: Debug.Declaration.Global.Attributes, node_index: Node.Index, maybe_global: ?*Debug.Declaration.Global) !V.Comptime { @@ -15186,8 +15264,8 @@ pub const Builder = struct { const enum_type = &unit.types.get(enum_field.parent).integer.kind.@"enum"; const typecheck_enum_result = try unit.typecheckSwitchEnums(context, enum_type, case_nodes); - const group_index = for (typecheck_enum_result.switch_case_groups.pointer[0..typecheck_enum_result.switch_case_groups.length], 0..) |switch_case_group, switch_case_group_index| { - break for (switch_case_group.pointer[0..switch_case_group.length]) |field_index| { + const group_index = for (typecheck_enum_result.switch_case_groups, 0..) |switch_case_group, switch_case_group_index| { + break for (switch_case_group) |field_index| { if (enum_field_index == field_index) { break switch_case_group_index; } @@ -15235,8 +15313,8 @@ pub const Builder = struct { const enum_type = &unit.types.get(enum_field.parent).integer.kind.@"enum"; const typecheck_enum_result = try unit.typecheckSwitchEnums(context, enum_type, case_nodes); - const group_index = for (typecheck_enum_result.switch_case_groups.pointer[0..typecheck_enum_result.switch_case_groups.length], 0..) |switch_case_group, switch_case_group_index| { - break for (switch_case_group.pointer[0..switch_case_group.length]) |field_index| { + const group_index = for (typecheck_enum_result.switch_case_groups, 0..) |switch_case_group, switch_case_group_index| { + break for (switch_case_group) |field_index| { if (enum_field_index == field_index) { break switch_case_group_index; } @@ -15299,6 +15377,7 @@ pub const Builder = struct { .instruction = try unit.instructions.append(context.my_allocator, .{ .phi = .{ .type = type_index, + .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), }, }), .block = try builder.newBasicBlock(unit, context), @@ -15308,11 +15387,13 @@ pub const Builder = struct { const before_switch_bb = builder.current_basic_block; const switch_exit_block = try builder.newBasicBlock(unit, context); + var stack_switch_cases = BoundedArray(Instruction.Switch.Case, 512){}; + for (case_nodes) |case_node_index| { builder.current_basic_block = before_switch_bb; const case_node = unit.getNode(case_node_index); assert(case_node.right != .null); - var conditions = UnpinnedArray(V.Comptime){}; + var conditions = BoundedArray(V.Comptime, 512){}; switch (case_node.left) { .null => {}, @@ -15321,7 +15402,6 @@ pub const Builder = struct { switch (condition_node.id) { .node_list => { const condition_nodes = unit.getNodeListFromNode(condition_node); - try conditions.ensure_capacity(context.my_allocator, @intCast(condition_nodes.len)); for (condition_nodes) |condition_node_index| { const cn = unit.getNode(condition_node_index); @@ -15343,7 +15423,7 @@ pub const Builder = struct { if (left_ch < right_ch) { while (left_ch <= right_ch) : (left_ch += 1) { - try conditions.append(context.my_allocator, .{ + conditions.appendAssumeCapacity(.{ .constant_int = .{ .value = left_ch, }, @@ -15356,28 +15436,30 @@ pub const Builder = struct { else => unreachable, } }, - else => try conditions.append(context.my_allocator, try builder.resolveComptimeValue(unit, context, Type.Expect{ .type = condition_type }, .{}, condition_node_index, null, .right, &.{}, null, &.{})), + else => conditions.appendAssumeCapacity(try builder.resolveComptimeValue(unit, context, Type.Expect{ .type = condition_type }, .{}, condition_node_index, null, .right, &.{}, null, &.{})), } } }, else => { const v = try builder.resolveComptimeValue(unit, context, Type.Expect{ .type = condition_type }, .{}, case_node.left, null, .right, &.{}, null, &.{}); - try conditions.ensure_capacity(context.my_allocator, 1); - conditions.append_with_capacity(v); + conditions.appendAssumeCapacity(v); }, } }, } const case_block = try builder.newBasicBlock(unit, context); - try unit.basic_blocks.get(case_block).predecessors.append(context.my_allocator, before_switch_bb); + const case_bb = unit.basic_blocks.get(case_block); + try case_bb.add_predecessor(before_switch_bb); + builder.current_basic_block = case_block; + const v = try builder.resolveRuntimeValue(unit, context, type_expect, case_node.right, .right); if (phi_info) |phi| { if (!unit.basic_blocks.get(builder.current_basic_block).terminated) { const phi_instruction = &unit.instructions.get(phi.instruction).phi; - try phi_instruction.addIncoming(context, v, case_block); + phi_instruction.addIncoming(v, case_block); try builder.jump(unit, context, phi.block); } } else if (builder.current_basic_block != .null) { @@ -15393,26 +15475,32 @@ pub const Builder = struct { } } - if (conditions.length > 0) { + if (conditions.len > 0) { for (conditions.slice()) |condition| { const case = Instruction.Switch.Case{ .condition = condition, .basic_block = case_block, }; - try switch_instruction.cases.append(context.my_allocator, case); + stack_switch_cases.appendAssumeCapacity(case); } } else { + assert(switch_instruction.else_block == .null); switch_instruction.else_block = case_block; } } + const switch_cases = try context.arena.new_array(Instruction.Switch.Case, stack_switch_cases.len); + @memcpy(switch_cases, stack_switch_cases.slice()); + + switch_instruction.cases = switch_cases; + if (switch_instruction.else_block == .null) { switch_instruction.else_block = try builder.create_unreachable_block(unit, context); } if (phi_info) |phi| { const phi_instruction = &unit.instructions.get(phi.instruction).phi; - if (phi_instruction.values.length > 0) { + if (phi_instruction.values.len > 0) { builder.current_basic_block = phi.block; try builder.appendInstruction(unit, context, phi.instruction); @@ -15555,7 +15643,7 @@ pub const Builder = struct { } else switch (left_type.*) { .integer => |*integer| switch (integer.kind) { .@"enum" => |*enum_type| blk: { - const field_index = for (enum_type.fields.slice()) |enum_field_index| { + const field_index = for (enum_type.fields) |enum_field_index| { const enum_field = unit.enum_fields.get(enum_field_index); if (enum_field.name == identifier_hash) { break enum_field_index; @@ -15749,7 +15837,7 @@ pub const Builder = struct { }, .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => |*struct_type| { - const fields = struct_type.fields.slice(); + const fields = struct_type.fields; for (fields, 0..) |field_index, i| { const field = unit.struct_fields.get(field_index); @@ -15830,7 +15918,7 @@ pub const Builder = struct { }, .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => |*struct_type| { - const fields = struct_type.fields.slice(); + const fields = struct_type.fields; for (fields, 0..) |field_index, i| { const field = unit.struct_fields.get(field_index); @@ -15898,7 +15986,7 @@ pub const Builder = struct { }, .integer => |*integer| switch (integer.kind) { .bitfield => |*bitfield| { - const fields = bitfield.fields.slice(); + const fields = bitfield.fields; for (fields, 0..) |field_index, i| { const field = unit.struct_fields.get(field_index); @@ -16336,23 +16424,24 @@ pub const Builder = struct { if (builder.return_block != .null) { if (builder.return_phi != .null) { const phi = &unit.instructions.get(builder.return_phi).phi; - try phi.addIncoming(context, return_value, builder.current_basic_block); + phi.addIncoming(return_value, builder.current_basic_block); } assert(builder.current_basic_block != builder.return_block); try builder.jump(unit, context, builder.return_block); - } else if (builder.exit_blocks.length > 0) { + } else if (builder.exit_blocks.len > 0) { builder.return_phi = try unit.instructions.append(context.my_allocator, .{ .phi = .{ .type = return_type_index, + .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), }, }); builder.return_block = try builder.newBasicBlock(unit, context); const phi = &unit.instructions.get(builder.return_phi).phi; - try phi.addIncoming(context, return_value, builder.current_basic_block); + phi.addIncoming(return_value, builder.current_basic_block); try builder.jump(unit, context, builder.return_block); } else { @@ -16666,14 +16755,14 @@ pub const Builder = struct { const struct_test_type = unit.types.get(test_type); const test_type_struct = unit.structs.get(struct_test_type.@"struct"); - const struct_fields = test_type_struct.kind.@"struct".fields.slice(); + const struct_fields = test_type_struct.kind.@"struct".fields; assert(struct_fields.len == 2); const first_field = unit.struct_fields.get(struct_fields[0]); // const second_field = unit.struct_fields.get(test_type_struct.fields.items[1]); - var list = try UnpinnedArray(V.Comptime).initialize_with_capacity(context.my_allocator, unit.test_functions.length); - for (unit.test_functions.keys(), unit.test_functions.values()) |test_function_name_global, test_function_global| { - var fields = try UnpinnedArray(V.Comptime).initialize_with_capacity(context.my_allocator, 2); + var list = try context.arena.new_array(V.Comptime, unit.test_functions.length); + for (unit.test_functions.keys(), unit.test_functions.values(), 0..) |test_function_name_global, test_function_global, i| { + var fields = try context.arena.new_array(V.Comptime, 2); const name = unit.getIdentifier(test_function_name_global.initial_value.string_literal); const name_slice = try unit.constant_slices.append(context.my_allocator, .{ .array = test_function_name_global, @@ -16681,25 +16770,25 @@ pub const Builder = struct { .end = name.len, .type = first_field.type, }); - fields.append_with_capacity(.{ + fields[0] = .{ .constant_slice = name_slice, - }); - fields.append_with_capacity(.{ + }; + fields[1] = .{ .global = test_function_global, - }); + }; const constant_struct = try unit.constant_structs.append(context.my_allocator, .{ - .fields = fields.slice(), + .fields = fields, .type = test_type, }); - list.append_with_capacity(.{ + list[i] = .{ .constant_struct = constant_struct, - }); + }; } const constant_array = try unit.constant_arrays.append(context.my_allocator, .{ .type = array_type, - .values = list.slice(), + .values = list, }); const array_name = "_anon_test_function_array"; @@ -16721,11 +16810,11 @@ pub const Builder = struct { .attributes = .{}, }); const test_function_array_global = unit.global_declarations.get(test_function_array_global_index); - try unit.data_to_emit.append(context.my_allocator, test_function_array_global); + _ = unit.data_to_emit.append(test_function_array_global); const constant_slice = try unit.constant_slices.append(context.my_allocator, .{ .array = test_function_array_global, .start = 0, - .end = list.length, + .end = list.len, .type = test_functions_global.declaration.type, }); @@ -16804,7 +16893,7 @@ pub const Builder = struct { pub const Enum = struct { scope: Debug.Scope.Global, - fields: UnpinnedArray(Enum.Field.Index) = .{}, + fields: []const Enum.Field.Index = &.{}, pub const Field = struct { value: usize, @@ -16862,7 +16951,7 @@ pub const Unit = struct { error_count: u32 = 0, code_to_emit: MyHashMap(Function.Definition.Index, *Debug.Declaration.Global) = .{}, - data_to_emit: UnpinnedArray(*Debug.Declaration.Global) = .{}, + data_to_emit: PinnedArray(*Debug.Declaration.Global), external_functions: MyHashMap(Type.Index, *Debug.Declaration.Global) = .{}, type_declarations: MyHashMap(Type.Index, *Debug.Declaration.Global) = .{}, test_functions: MyHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global) = .{}, @@ -17026,8 +17115,10 @@ pub const Unit = struct { try dumpInt(BasicBlock.unwrap(branch.not_taken), 10, false); try write(.ir, "]"); }, - .phi => |phi| { - for (phi.values.pointer[0..phi.values.length], phi.basic_blocks.pointer[0..phi.basic_blocks.length]) |value, bb| { + .phi => |*phi| { + for (phi.values.slice()) |v| { + const value = v.value; + const bb = v.basic_block; try write(.ir, "("); switch (value.value) { .@"comptime" => try write(.ir, "$comptime"), @@ -17073,23 +17164,22 @@ pub const Unit = struct { } fn typecheckSwitchEnums(unit: *Unit, context: *const Context, enum_type: *Enum, switch_case_node_list: []const Node.Index) !TypeCheckSwitchEnums { - var result = TypeCheckSwitchEnums{ - .switch_case_groups = try UnpinnedArray(UnpinnedArray(Enum.Field.Index)).initialize_with_capacity(context.my_allocator, @intCast(switch_case_node_list.len)), - }; + var else_switch_case_group_index: ?usize = null; + var switch_case_groups = try context.arena.new_array([]const Enum.Field.Index, switch_case_node_list.len); + switch_case_groups.len = 0; - var existing_enums = UnpinnedArray(Enum.Field.Index){}; - // _ = existing_enums; // autofix + var existing_enums = BoundedArray(Enum.Field.Index, 512){}; for (switch_case_node_list, 0..) |switch_case_node_index, index| { const switch_case_node = unit.getNode(switch_case_node_index); switch (switch_case_node.left) { + .null => else_switch_case_group_index = index, else => { const switch_case_condition_node = unit.getNode(switch_case_node.left); - var switch_case_group = UnpinnedArray(Enum.Field.Index){}; - switch (switch_case_condition_node.id) { - .dot_literal => { + const switch_case_group = switch (switch_case_condition_node.id) { + .dot_literal => b: { if (try unit.typeCheckEnumLiteral(context, @enumFromInt(@intFromEnum(switch_case_condition_node.token) + 1), enum_type)) |enum_field_index| { for (existing_enums.slice()) |existing| { if (enum_field_index == existing) { @@ -17098,17 +17188,19 @@ pub const Unit = struct { } } - try switch_case_group.append(context.my_allocator, enum_field_index); - try existing_enums.append(context.my_allocator, enum_field_index); + var switch_case_group = try context.arena.new_array(Enum.Field.Index, 1); + switch_case_group[0] = enum_field_index; + existing_enums.appendAssumeCapacity(enum_field_index); + break :b switch_case_group; } else { unreachable; } }, - .node_list => { + .node_list => b: { const node_list = unit.getNodeListFromNode(switch_case_condition_node); - try switch_case_group.ensure_capacity(context.my_allocator, @intCast(node_list.len)); + var switch_case_group = try context.arena.new_array(Enum.Field.Index, node_list.len); - for (node_list) |case_condition_node_index| { + for (node_list, 0..) |case_condition_node_index, i| { const case_condition_node = unit.getNode(case_condition_node_index); switch (case_condition_node.id) { .dot_literal => { @@ -17120,8 +17212,8 @@ pub const Unit = struct { } } - try existing_enums.append(context.my_allocator, enum_field_index); - switch_case_group.append_with_capacity(enum_field_index); + existing_enums.appendAssumeCapacity(enum_field_index); + switch_case_group[i] = enum_field_index; } else { unreachable; } @@ -17129,25 +17221,31 @@ pub const Unit = struct { else => |t| @panic(@tagName(t)), } } + + break :b switch_case_group; }, else => |t| @panic(@tagName(t)), - } + }; - result.switch_case_groups.append_with_capacity(switch_case_group); - }, - .null => { - result.else_switch_case_group_index = index; + const i = switch_case_groups.len; + switch_case_groups.len += 1; + switch_case_groups[i] = switch_case_group; }, } } - return result; + assert(switch_case_groups.len + @intFromBool(else_switch_case_group_index != null) == switch_case_node_list.len); + + return TypeCheckSwitchEnums{ + .switch_case_groups = switch_case_groups, + .else_switch_case_group_index = else_switch_case_group_index, + }; } fn typeCheckEnumLiteral(unit: *Unit, context: *const Context, token_index: Token.Index, enum_type: *Enum) !?Enum.Field.Index { const enum_name = unit.getExpectedTokenBytes(token_index, .identifier); const enum_name_hash = try unit.processIdentifier(context, enum_name); - for (enum_type.fields.slice()) |enum_field_index| { + for (enum_type.fields) |enum_field_index| { const enum_field = unit.enum_fields.get(enum_field_index); if (enum_field.name == enum_name_hash) { return enum_field_index; @@ -17304,9 +17402,11 @@ pub const Unit = struct { const bytes = unit.getExpectedTokenBytes(token_index, .string_literal); // Eat double quotes const string_literal_bytes = bytes[1..][0 .. bytes.len - 2]; - var fixed_string = try UnpinnedArray(u8).initialize_with_capacity(context.my_allocator, @intCast(string_literal_bytes.len + 1)); var i: usize = 0; + var fixed_string = try context.arena.new_array(u8, string_literal_bytes.len + 1); + fixed_string.len = 0; + while (i < string_literal_bytes.len) : (i += 1) { const ch = string_literal_bytes[i]; switch (ch) { @@ -17314,17 +17414,27 @@ pub const Unit = struct { i += 1; const next_ch = string_literal_bytes[i]; switch (next_ch) { - 'n' => fixed_string.append_with_capacity('\n'), + 'n' => { + const index = fixed_string.len; + fixed_string.len += 1; + fixed_string[index] = '\n'; + }, else => unreachable, } }, - else => fixed_string.append_with_capacity(ch), + else => { + const index = fixed_string.len; + fixed_string.len += 1; + fixed_string[index] = ch; + }, } } - fixed_string.append_with_capacity(0); + const zero_index = fixed_string.len; + fixed_string.len += 1; + fixed_string[zero_index] = 0; - const string = fixed_string.slice()[0 .. fixed_string.length - 1 :0]; + const string = fixed_string[0 .. zero_index :0]; return string; } @@ -17602,10 +17712,11 @@ pub const Unit = struct { } if (!unit.descriptor.only_parse) { - var object_files = try UnpinnedArray(linker.Object).initialize_with_capacity(context.my_allocator, @intCast(unit.descriptor.c_source_files.len + 1)); - object_files.append_with_capacity(.{ + var object_files = try context.arena.new_array(linker.Object, unit.descriptor.c_source_files.len + 1); + object_files[0] = .{ .path = unit.descriptor.object_path, - }); + }; + object_files.len = 1; for (unit.descriptor.c_source_files) |c_source_file| { const dot_index = last_byte(c_source_file, '.') orelse unreachable; @@ -17619,9 +17730,11 @@ pub const Unit = struct { var arguments = [_][]const u8{ "-c", c_source_file, "-o", object_path, "-g", "-fno-stack-protector" }; try compileCSourceFile(context, &arguments, .c); - object_files.append_with_capacity(.{ + const index = object_files.len; + object_files.len += 1; + object_files[index] = .{ .path = object_path, - }); + }; } try unit.analyze(context); @@ -17630,7 +17743,7 @@ pub const Unit = struct { try linker.link(context, .{ .output_file_path = unit.descriptor.executable_path, - .objects = object_files.slice(), + .objects = object_files, .libraries = &.{}, .link_libc = unit.descriptor.link_libc, .link_libcpp = false, diff --git a/bootstrap/backend/llvm.zig b/bootstrap/backend/llvm.zig index f197892..f0960c7 100644 --- a/bootstrap/backend/llvm.zig +++ b/bootstrap/backend/llvm.zig @@ -1296,7 +1296,7 @@ pub const LLVM = struct { break :blk struct_type.toType(); }, .@"struct" => |*sema_struct_type| blk: { - for (sema_struct_type.fields.slice()) |sema_field_index| { + for (sema_struct_type.fields) |sema_field_index| { const sema_field = unit.struct_fields.get(sema_field_index); const llvm_type = try llvm.getType(unit, context, sema_field.type); type_buffer.appendAssumeCapacity(llvm_type); @@ -1561,7 +1561,7 @@ pub const LLVM = struct { const sema_type = unit.types.get(sema_type_index); const result = switch (sema_type.*) { .integer => |*integer| switch (integer.kind) { - .bitfield => |*bitfield| try llvm.getDebugStructType(unit, context, sema_type_index, &bitfield.scope.scope, bitfield.fields.slice(), name), + .bitfield => |*bitfield| try llvm.getDebugStructType(unit, context, sema_type_index, &bitfield.scope.scope, bitfield.fields, name), .materialized_int => b: { const dwarf_encoding: LLVM.DebugInfo.AttributeType = switch (integer.signedness) { .unsigned => .unsigned, @@ -1633,9 +1633,9 @@ pub const LLVM = struct { break :b boolean_type; }, .@"enum" => |*enum_type| b: { - var enumerators = try context.arena.new_array(*LLVM.DebugInfo.Type.Enumerator, enum_type.fields.length); + var enumerators = try context.arena.new_array(*LLVM.DebugInfo.Type.Enumerator, enum_type.fields.len); enumerators.len = 0; - for (enum_type.fields.slice()) |enum_field_index| { + for (enum_type.fields) |enum_field_index| { const enum_field = unit.enum_fields.get(enum_field_index); const enum_field_name = unit.getIdentifier(enum_field.name); @@ -1693,7 +1693,7 @@ pub const LLVM = struct { else => |t| @panic(@tagName(t)), }, .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { - .@"struct" => |*sema_struct_type| try llvm.getDebugStructType(unit, context, sema_type_index, &sema_struct_type.scope.scope, sema_struct_type.fields.slice(), name), + .@"struct" => |*sema_struct_type| try llvm.getDebugStructType(unit, context, sema_type_index, &sema_struct_type.scope.scope, sema_struct_type.fields, name), else => |t| @panic(@tagName(t)), }, .pointer => |pointer| b: { @@ -2131,7 +2131,7 @@ pub const LLVM = struct { switch (sema_struct.kind) { .@"struct" => |*sema_struct_type| { - for (constant_struct.fields, sema_struct_type.fields.slice()) |field_value, field_index| { + for (constant_struct.fields, sema_struct_type.fields) |field_value, field_index| { const field = unit.struct_fields.get(field_index); const constant = try llvm.emitComptimeRightValue(unit, context, field_value, field.type); field_values.appendAssumeCapacity(constant); @@ -3091,7 +3091,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo }, .phi => |phi| { const phi_type = try llvm.getType(unit, context, phi.type); - const reserved_value_count: c_uint = @intCast(phi.values.length); + const reserved_value_count: c_uint = @intCast(phi.values.len); const phi_name = "phi"; const phi_node = llvm.builder.createPhi(phi_type, reserved_value_count, phi_name, phi_name.len) orelse unreachable; @@ -3148,7 +3148,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo var basic_block_array = BoundedArray(*LLVM.Value.BasicBlock, 4096){}; var condition_array = BoundedArray(*LLVM.Value.Constant.Int, 4096){}; - for (switch_expression.cases.pointer[0..switch_expression.cases.length]) |case| { + for (switch_expression.cases) |case| { const constant_value = try llvm.emitComptimeRightValue(unit, context, case.condition, switch_expression.condition.type); const constant_int = constant_value.toInt() orelse unreachable; const block = if (llvm.llvm_block_map.get(case.basic_block)) |bb| bb else b: { @@ -3192,7 +3192,9 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo for (phis.keys(), phis.values()) |instruction_index, phi| { const instruction = unit.instructions.get(instruction_index); const sema_phi = &instruction.phi; - for (sema_phi.values.slice(), sema_phi.basic_blocks.slice()) |sema_value, sema_block| { + for (sema_phi.values.slice()) |v| { + const sema_value = v.value; + const sema_block = v.basic_block; assert(sema_value.type == sema_phi.type); const value_basic_block = llvm.llvm_block_map.get(sema_block).?; const value = llvm.llvm_value_map.get(sema_value) orelse try llvm.emitRightValue(unit, context, sema_value); diff --git a/bootstrap/library.zig b/bootstrap/library.zig index 070b30c..993cb04 100644 --- a/bootstrap/library.zig +++ b/bootstrap/library.zig @@ -82,6 +82,44 @@ pub const Arena = struct{ } }; +pub fn DynamicBoundedArray(comptime T: type) type { + return struct{ + pointer: [*]T = @constCast((&[_]T{}).ptr), + length: u32 = 0, + capacity: u32 = 0, + + const Array = @This(); + + pub fn init(arena: *Arena, count: u32) !Array { + const array = try arena.new_array(T, count); + return Array{ + .pointer = array.ptr, + .length = 0, + .capacity = count, + }; + } + + pub fn append(array: *Array, item: T) void { + const index = array.length; + assert(index < array.capacity); + array.pointer[index] = item; + array.length += 1; + } + + pub fn append_slice(array: *Array, items: []const T) void { + const count: u32 = @intCast(items.len); + const index = array.length; + assert(index + count <= array.capacity); + @memcpy(array.pointer[index..][0..count], items); + array.length += count; + } + + pub fn slice(array: *Array) []T{ + return array.pointer[0..array.length]; + } + }; +} + const pinned_array_page_size = 2 * 1024 * 1024; const pinned_array_max_size = std.math.maxInt(u32) - pinned_array_page_size; const pinned_array_default_granularity = pinned_array_page_size; @@ -181,20 +219,21 @@ pub fn PinnedArray(comptime T: type) type { } pub fn reserve(size: u64) ![*]u8{ - const slice = switch (os) { - .linux, .macos => try std.posix.mmap(null, size, std.posix.PROT.NONE, .{ + return switch (os) { + .linux, .macos => (try std.posix.mmap(null, size, std.posix.PROT.NONE, .{ .ANONYMOUS = true, .TYPE = .PRIVATE, - }, -1, 0), + }, -1, 0)).ptr, + .windows => @ptrCast(try std.os.windows.VirtualAlloc(null, size, std.os.windows.MEM_RESERVE, std.os.windows.PAGE_READWRITE)), else => @compileError("OS not supported"), }; - return slice.ptr; } pub fn commit(bytes: [*]u8, size: u64) !void{ const slice = bytes[0..size]; return switch (os) { .linux, .macos => try std.posix.mprotect(@alignCast(slice), std.posix.PROT.WRITE | std.posix.PROT.READ), + .windows => _ = try std.os.windows.VirtualAlloc(bytes, size, std.os.windows.MEM_COMMIT, std.os.windows.PAGE_READWRITE), else => @compileError("OS not supported"), }; } From 5539f1e9043e701fe27485baa80b317919215405 Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Thu, 25 Apr 2024 18:43:26 -0600 Subject: [PATCH 08/14] Delete old hashmap implementation --- bootstrap/Compilation.zig | 238 +++++++++++++++++++++++-------------- bootstrap/backend/llvm.zig | 153 +++++++++++++----------- bootstrap/library.zig | 167 +++++++++++++++----------- 3 files changed, 331 insertions(+), 227 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index 0d6e8ae..1668e94 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -19,7 +19,7 @@ const PinnedArray = library.PinnedArray; const UnpinnedArray = library.UnpinnedArray; const BlockList = library.BlockList; const MyAllocator = library.MyAllocator; -const MyHashMap = library.MyHashMap; +const PinnedHashMap = library.PinnedHashMap; const span = library.span; const format_int = library.format_int; const my_hash = library.my_hash; @@ -130,6 +130,24 @@ pub fn compileBuildExecutable(context: *const Context, arguments: []const []cons .node_buffer = try PinnedArray(Node).init_with_default_granularity(), .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), + .file_token_offsets = try PinnedHashMap(Token.Range, Debug.File.Index).init(std.mem.page_size), + .file_map = try PinnedHashMap([]const u8, Debug.File.Index).init(std.mem.page_size), + .identifiers = try PinnedHashMap(u32, []const u8).init(std.mem.page_size), + .string_literal_values = try PinnedHashMap(u32, [:0]const u8).init(std.mem.page_size), + .string_literal_globals = try PinnedHashMap(u32, *Debug.Declaration.Global).init(std.mem.page_size), + .optionals = try PinnedHashMap(Type.Index, Type.Index).init(std.mem.page_size), + .pointers = try PinnedHashMap(Type.Pointer, Type.Index).init(std.mem.page_size), + .slices = try PinnedHashMap(Type.Slice, Type.Index).init(std.mem.page_size), + .arrays = try PinnedHashMap(Type.Array, Type.Index).init(std.mem.page_size), + .integers = try PinnedHashMap(Type.Integer, Type.Index).init(std.mem.page_size), + .error_unions = try PinnedHashMap(Type.Error.Union.Descriptor, Type.Index).init(std.mem.page_size), + .two_structs = try PinnedHashMap([2]Type.Index, Type.Index).init(std.mem.page_size), + .fields_array = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + .name_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + .external_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + .type_declarations = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + .test_functions = try PinnedHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global).init(std.mem.page_size), + .code_to_emit = try PinnedHashMap(Function.Definition.Index, *Debug.Declaration.Global).init(std.mem.page_size), }; try unit.compile(context); @@ -2941,7 +2959,7 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o if (i + 1 != arguments.len) { i += 1; - c_source_files.appendSliceAssumeCapacity( arguments[i..]); + c_source_files.appendSliceAssumeCapacity(arguments[i..]); i = arguments.len; } else { reportUnterminatedArgumentError(current_argument); @@ -3026,6 +3044,24 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o .node_buffer = try PinnedArray(Node).init_with_default_granularity(), .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), + .file_token_offsets = try PinnedHashMap(Token.Range, Debug.File.Index).init(std.mem.page_size), + .file_map = try PinnedHashMap([]const u8, Debug.File.Index).init(std.mem.page_size), + .identifiers = try PinnedHashMap(u32, []const u8).init(std.mem.page_size), + .string_literal_values = try PinnedHashMap(u32, [:0]const u8).init(std.mem.page_size), + .string_literal_globals = try PinnedHashMap(u32, *Debug.Declaration.Global).init(std.mem.page_size), + .optionals = try PinnedHashMap(Type.Index, Type.Index).init(std.mem.page_size), + .pointers = try PinnedHashMap(Type.Pointer, Type.Index).init(std.mem.page_size), + .slices = try PinnedHashMap(Type.Slice, Type.Index).init(std.mem.page_size), + .arrays = try PinnedHashMap(Type.Array, Type.Index).init(std.mem.page_size), + .integers = try PinnedHashMap(Type.Integer, Type.Index).init(std.mem.page_size), + .error_unions = try PinnedHashMap(Type.Error.Union.Descriptor, Type.Index).init(std.mem.page_size), + .two_structs = try PinnedHashMap([2]Type.Index, Type.Index).init(std.mem.page_size), + .fields_array = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + .name_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + .external_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + .type_declarations = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + .test_functions = try PinnedHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global).init(std.mem.page_size), + .code_to_emit = try PinnedHashMap(Function.Definition.Index, *Debug.Declaration.Global).init(std.mem.page_size), }; try unit.compile(context); @@ -3052,10 +3088,10 @@ pub const Package = struct { directory: Directory, /// Relative to the package main directory source_path: []const u8, - dependencies: MyHashMap([]const u8, *Package) = .{}, + dependencies: PinnedHashMap([]const u8, *Package), - fn addDependency(package: *Package, allocator: *MyAllocator, package_name: []const u8, new_dependency: *Package) !void { - try package.dependencies.put_no_clobber(allocator, package_name, new_dependency); + fn addDependency(package: *Package, package_name: []const u8, new_dependency: *Package) !void { + try package.dependencies.put_no_clobber(package_name, new_dependency); } }; @@ -3298,6 +3334,7 @@ const _usize: Type.Index = .u64; const _ssize: Type.Index = .s64; fn serialize_comptime_parameters(unit: *Unit, context: *const Context, original_declaration: *Debug.Declaration, parameters: []const V.Comptime) !u32 { + _ = context; // autofix var name = BoundedArray(u8, 4096){}; const original_name = unit.getIdentifier(original_declaration.name); name.appendSliceAssumeCapacity(original_name); @@ -3339,7 +3376,7 @@ fn serialize_comptime_parameters(unit: *Unit, context: *const Context, original_ const hash = my_hash(name.slice()); // Don't allocate memory if not necessary if (unit.identifiers.get(hash) == null) { - try unit.identifiers.put_no_clobber(context.my_allocator, hash, name.slice()); + try unit.identifiers.put_no_clobber(hash, name.slice()); } return hash; @@ -3364,7 +3401,7 @@ pub const Type = union(enum) { pub const Polymorphic = struct { parameters: []const Token.Index, - instantiations: MyHashMap(u32, *Debug.Declaration.Global) = .{}, + instantiations: PinnedHashMap(u32, *Debug.Declaration.Global), node: Node.Index, pub fn get_instantiation(polymorphic: *Polymorphic, types: []const V.Comptime) ?*Debug.Declaration.Global { @@ -3395,7 +3432,7 @@ pub const Type = union(enum) { const new_declaration = unit.global_declarations.get(new_declaration_index); const parameter_hash = hash(parameters); - try polymorphic.instantiations.put_no_clobber(context.my_allocator, parameter_hash, new_declaration); + try polymorphic.instantiations.put_no_clobber(parameter_hash, new_declaration); } fn hash(types: []const V.Comptime) u32 { @@ -3735,7 +3772,7 @@ pub const Instruction = union(enum) { pub const max_value_count = 32; - const Value = struct{ + const Value = struct { value: V, basic_block: BasicBlock.Index, }; @@ -4111,7 +4148,7 @@ pub fn joinPath(context: *const Context, a: []const u8, b: []const u8) ![]const pub const PolymorphicFunction = struct { parameters: []const ComptimeParameterDeclaration, - instantiations: MyHashMap(u32, *Debug.Declaration.Global) = .{}, + instantiations: PinnedHashMap(u32, *Debug.Declaration.Global), node: Node.Index, is_member_call: bool, @@ -4150,7 +4187,7 @@ pub const PolymorphicFunction = struct { const new_declaration = unit.global_declarations.get(new_declaration_index); const parameter_hash = hash(parameters); - try polymorphic_function.instantiations.put_no_clobber(context.my_allocator, parameter_hash, new_declaration); + try polymorphic_function.instantiations.put_no_clobber(parameter_hash, new_declaration); return new_declaration; } @@ -4298,7 +4335,7 @@ pub const Debug = struct { }; pub const Scope = struct { - declarations: MyHashMap(u32, *Declaration) = .{}, + declarations: PinnedHashMap(u32, *Declaration), parent: ?*Scope = null, file: File.Index, line: u32, @@ -4314,7 +4351,7 @@ pub const Debug = struct { pub const Local = struct { scope: Scope, - local_declaration_map: MyHashMap(*Debug.Declaration.Local, Instruction.Index) = .{}, + local_declaration_map: PinnedHashMap(*Debug.Declaration.Local, Instruction.Index), }; pub const Global = struct { @@ -4324,8 +4361,8 @@ pub const Debug = struct { pub const Function = struct { scope: Scope, - argument_map: MyHashMap(*Debug.Declaration.Argument, Instruction.Index) = .{}, - // comptime_parameters: MyHashMap(*Debug.Declaration.Argument, + argument_map: PinnedHashMap(*Debug.Declaration.Argument, Instruction.Index), + // comptime_parameters: PinnedArray(*Debug.Declaration.Argument, }; fn lookupDeclaration(s: *Scope, name: u32, look_in_parent_scopes: bool) ?Lookup { @@ -4565,7 +4602,7 @@ pub const Builder = struct { const error_union_type_index = try unit.types.append(context.my_allocator, .{ .@"struct" = error_union_struct_index, }); - try unit.error_unions.put_no_clobber(context.my_allocator, error_union, error_union_type_index); + try unit.error_unions.put_no_clobber(error_union, error_union_type_index); return error_union_type_index; } @@ -4597,7 +4634,7 @@ pub const Builder = struct { } else { const string_name = try join_name(context, "__anon_str_", possible_id, 10); const identifier = try unit.processIdentifier(context, string_name); - try unit.string_literal_values.put_no_clobber(context.my_allocator, hash, string); + try unit.string_literal_values.put_no_clobber(hash, string); const string_global_index = try unit.global_declarations.append(context.my_allocator, .{ .declaration = .{ @@ -4637,7 +4674,7 @@ pub const Builder = struct { const string_global = unit.global_declarations.get(string_global_index); - try unit.string_literal_globals.put_no_clobber(context.my_allocator, hash, string_global); + try unit.string_literal_globals.put_no_clobber(hash, string_global); _ = unit.data_to_emit.append(string_global); @@ -4825,7 +4862,7 @@ pub const Builder = struct { .destination = destination, .source = v, }, - }); + }); try builder.appendInstruction(unit, context, store); const load = try unit.instructions.append(context.my_allocator, .{ @@ -4833,7 +4870,7 @@ pub const Builder = struct { .value = destination, .type = type_index, }, - }); + }); try builder.appendInstruction(unit, context, load); return V{ @@ -5177,7 +5214,9 @@ pub const Builder = struct { .kind = .function, .local = true, .level = builder.current_scope.level + 1, + .declarations = try PinnedHashMap(u32, *Debug.Declaration).init(std.mem.page_size), }, + .argument_map = try PinnedHashMap(*Debug.Declaration.Argument, Instruction.Index).init(std.mem.page_size), }, .type = function_type_index, .body = .null, @@ -5212,7 +5251,7 @@ pub const Builder = struct { comptime assert(@TypeOf(argument_declaration_index) == Debug.Declaration.Argument.Index); const argument = unit.argument_declarations.get(argument_declaration_index); - try builder.current_scope.declarations.put_no_clobber(context.my_allocator, argument_name_hash, &argument.declaration); + try builder.current_scope.declarations.put_no_clobber(argument_name_hash, &argument.declaration); const entry_block = try builder.newBasicBlock(unit, context); const exit_block = try builder.newBasicBlock(unit, context); @@ -5327,8 +5366,8 @@ pub const Builder = struct { const global = unit.global_declarations.get(global_index); - try unit.code_to_emit.put_no_clobber(context.my_allocator, function_definition_index, global); - try unit.name_functions.put_no_clobber(context.my_allocator, type_index, global); + try unit.code_to_emit.put_no_clobber(function_definition_index, global); + try unit.name_functions.put_no_clobber(type_index, global); return global; } @@ -5383,7 +5422,7 @@ pub const Builder = struct { const global_declaration = unit.global_declarations.get(global_declaration_index); _ = unit.data_to_emit.append(global_declaration); - try unit.fields_array.put_no_clobber(context.my_allocator, container_type_index, global_declaration); + try unit.fields_array.put_no_clobber(container_type_index, global_declaration); return global_declaration; }, @@ -5807,13 +5846,13 @@ pub const Builder = struct { const function_definition_global = polymorphic_function.instantiations.values()[0]; assert(function_definition_global.initial_value == .function_definition); - try unit.code_to_emit.put_no_clobber(context.my_allocator, function_definition_global.initial_value.function_definition, function_definition_global); + try unit.code_to_emit.put_no_clobber(function_definition_global.initial_value.function_definition, function_definition_global); return function_definition_global; }, .function_definition => |function_definition_index| { switch (unit.getNode(declaration_node_index).id) { - .function_definition => try unit.code_to_emit.put_no_clobber(context.my_allocator, function_definition_index, global_declaration), + .function_definition => try unit.code_to_emit.put_no_clobber(function_definition_index, global_declaration), else => { const actual_function_declaration = unit.code_to_emit.get(function_definition_index).?; global_declaration.initial_value = .{ @@ -5824,7 +5863,7 @@ pub const Builder = struct { }, .function_declaration => |function_type| { switch (unit.getNode(declaration_node_index).id) { - .function_prototype => try unit.external_functions.put_no_clobber(context.my_allocator, function_type, global_declaration), + .function_prototype => try unit.external_functions.put_no_clobber(function_type, global_declaration), else => { const actual_function_declaration = unit.external_functions.get(function_type).?; global_declaration.initial_value = .{ @@ -5841,7 +5880,7 @@ pub const Builder = struct { unreachable; } }, - else => unit.type_declarations.put(context.my_allocator, type_index, global_declaration) catch { + else => unit.type_declarations.put(type_index, global_declaration) catch { assert(unit.type_declarations.get(type_index).? == global_declaration); }, } @@ -5856,7 +5895,7 @@ pub const Builder = struct { .polymorphic_function => |*polymorphic_function| { const instantiation_value = try builder.resolveComptimeValue(unit, context, Type.Expect.none, global_declaration.attributes, polymorphic_function.node, global_declaration, .right, new_parameters, maybe_member_value, polymorphic_argument_nodes); const instantiation_global = instantiation_value.global; - try unit.code_to_emit.put(context.my_allocator, instantiation_global.initial_value.function_definition, instantiation_global); + try unit.code_to_emit.put(instantiation_global.initial_value.function_definition, instantiation_global); return instantiation_global; }, @@ -7274,6 +7313,7 @@ pub const Builder = struct { .local = false, .level = builder.current_scope.level + 1, .parent = &unit.scope.scope, + .declarations = try PinnedHashMap(u32, *Debug.Declaration).init(std.mem.page_size), }, }, .id = std.math.maxInt(u32), @@ -7482,7 +7522,7 @@ pub const Builder = struct { }); const comptime_parameter = unit.global_declarations.get(comptime_parameter_index); - try builder.current_scope.declarations.put_no_clobber(context.my_allocator, name_hash, &comptime_parameter.declaration); + try builder.current_scope.declarations.put_no_clobber(name_hash, &comptime_parameter.declaration); }, else => |t| @panic(@tagName(t)), }, @@ -7591,7 +7631,7 @@ pub const Builder = struct { comptime assert(@TypeOf(argument_declaration_index) == Debug.Declaration.Argument.Index); const argument = unit.argument_declarations.get(argument_declaration_index); - try builder.current_scope.declarations.put_no_clobber(context.my_allocator, argument_name_hash, &argument.declaration); + try builder.current_scope.declarations.put_no_clobber(argument_name_hash, &argument.declaration); } fn classify_argument_type_aarch64(builder: *Builder, unit: *Unit, context: *const Context, type_index: Type.Index) Function.AbiInfo { @@ -7928,7 +7968,7 @@ pub const Builder = struct { function_prototype.abi.return_type_abi = return_abi; const abi_infos = try context.arena.new_array(Function.AbiInfo, parameter_types_abi.len); @memcpy(abi_infos, parameter_types_abi.slice()); - function_prototype.abi.parameter_types_abi = abi_infos; + function_prototype.abi.parameter_types_abi = abi_infos; } const Class_SystemVx86_64 = enum { @@ -8482,6 +8522,7 @@ pub const Builder = struct { .level = builder.current_scope.level + 1, .local = false, .file = builder.current_file, + .declarations = try PinnedHashMap(u32, *Debug.Declaration).init(std.mem.page_size), }, }, .options = .{}, @@ -8572,7 +8613,7 @@ pub const Builder = struct { .attributes = .{}, }); const global_declaration = unit.global_declarations.get(global_declaration_index); - try struct_type.kind.@"struct".scope.scope.declarations.put_no_clobber(context.my_allocator, hash, &global_declaration.declaration); + try struct_type.kind.@"struct".scope.scope.declarations.put_no_clobber(hash, &global_declaration.declaration); } const polymorphic_type_index = switch (parameter_types.len > 0) { @@ -8585,6 +8626,7 @@ pub const Builder = struct { break :param heap_parameter_types; }, .node = container_node_index, + .instantiations = try PinnedHashMap(u32, *Debug.Declaration.Global).init(std.mem.page_size), }, }); const polymorphic_type = &unit.types.get(polymorphic_type_index).polymorphic; @@ -8648,6 +8690,7 @@ pub const Builder = struct { .level = builder.current_scope.level + 1, .local = false, .file = builder.current_file, + .declarations = try PinnedHashMap(u32, *Debug.Declaration).init(std.mem.page_size), }, }, }, @@ -8694,6 +8737,7 @@ pub const Builder = struct { .level = builder.current_scope.level + 1, .local = false, .file = builder.current_file, + .declarations = try PinnedHashMap(u32, *Debug.Declaration).init(std.mem.page_size), }, }, }, @@ -8853,7 +8897,7 @@ pub const Builder = struct { }); const global_declaration = unit.global_declarations.get(global_declaration_index); - try builder.current_scope.declarations.put_no_clobber(context.my_allocator, identifier_hash, &global_declaration.declaration); + try builder.current_scope.declarations.put_no_clobber(identifier_hash, &global_declaration.declaration); }, else => unreachable, } @@ -9078,15 +9122,15 @@ pub const Builder = struct { const test_global = unit.global_declarations.get(test_global_index); - try scope.scope.declarations.put_no_clobber(context.my_allocator, name_hash, &test_global.declaration); + try scope.scope.declarations.put_no_clobber(name_hash, &test_global.declaration); - try unit.test_functions.put_no_clobber(context.my_allocator, test_name_global, test_global); + try unit.test_functions.put_no_clobber(test_name_global, test_global); - try unit.code_to_emit.put_no_clobber(context.my_allocator, comptime_value.function_definition, test_global); + try unit.code_to_emit.put_no_clobber(comptime_value.function_definition, test_global); } } - for (builder.current_scope.declarations.values()) |declaration|{ + for (builder.current_scope.declarations.values()) |declaration| { const global_declaration: *Debug.Declaration.Global = @fieldParentPtr("declaration", declaration); if (global_declaration.attributes.contains(.@"export")) { const result = try builder.referenceGlobalDeclaration(unit, context, &scope.scope, declaration, .{}, &.{}, null, &.{}); @@ -9175,7 +9219,9 @@ pub const Builder = struct { .local = true, .level = builder.current_scope.level + 1, .file = builder.current_file, + .declarations = try PinnedHashMap(u32, *Debug.Declaration).init(std.mem.page_size), }, + .argument_map = try PinnedHashMap(*Debug.Declaration.Argument, Instruction.Index).init(std.mem.page_size), }, .has_debug_info = true, }); @@ -9498,7 +9544,7 @@ pub const Builder = struct { // else => |t| @panic(@tagName(t)), }; - try function.scope.argument_map.put_no_clobber(context.my_allocator, argument_declaration, stack); + try function.scope.argument_map.put_no_clobber(argument_declaration, stack); const debug_declare_argument = try unit.instructions.append(context.my_allocator, .{ .debug_declare_argument = .{ @@ -9672,6 +9718,7 @@ pub const Builder = struct { .node = function_node_index, .parameters = comptime_parameter_declarations, .is_member_call = is_member_call, + .instantiations = try PinnedHashMap(u32, *Debug.Declaration.Global).init(std.mem.page_size), }; _ = try polymorphic_function.add_instantiation(unit, context, comptime_parameter_instantiations, maybe_global orelse unreachable, current_function); return V.Comptime{ @@ -9909,6 +9956,7 @@ pub const Builder = struct { .local = false, .level = builder.current_scope.level + 1, .parent = builder.current_scope, + .declarations = try PinnedHashMap(u32, *Debug.Declaration).init(std.mem.page_size), }, }, .fields = try DynamicBoundedArray(Type.Error.Field.Index).init(context.arena, @intCast(nodes.len)), @@ -10868,7 +10916,7 @@ pub const Builder = struct { .expression = expression_to_slice, .index = 0, }, - }); + }); try builder.appendInstruction(unit, context, extract_pointer); const gep = try unit.instructions.append(context.my_allocator, .{ @@ -10879,7 +10927,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "slice_comptime_expression_slice"), .is_struct = false, }, - }); + }); try builder.appendInstruction(unit, context, gep); break :slice V{ @@ -10950,7 +10998,7 @@ pub const Builder = struct { .value = expression_to_slice, .type = pointer.type, }, - }); + }); try builder.appendInstruction(unit, context, load); const gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -10989,7 +11037,7 @@ pub const Builder = struct { .insert_value = .{ .expression = .{ .value = .{ - .@"comptime" = .@"undefined", + .@"comptime" = .undefined, }, .type = destination_type_index, }, @@ -11667,7 +11715,7 @@ pub const Builder = struct { .@"comptime" = .{ .string_literal = hash, }, - }, + }, .type = ty, }; }, @@ -14225,14 +14273,14 @@ pub const Builder = struct { const local_declaration = unit.local_declarations.get(declaration_index); assert(builder.current_scope.kind == .block); - try builder.current_scope.declarations.put_no_clobber(context.my_allocator, identifier_hash, &local_declaration.declaration); + try builder.current_scope.declarations.put_no_clobber(identifier_hash, &local_declaration.declaration); if (emit) { const stack = try builder.createStackVariable(unit, context, declaration_type, null); assert(builder.current_scope.kind == .block); const local_scope: *Debug.Scope.Local = @fieldParentPtr("scope", builder.current_scope); - try local_scope.local_declaration_map.put_no_clobber(context.my_allocator, local_declaration, stack); + try local_scope.local_declaration_map.put_no_clobber(local_declaration, stack); const debug_declare_local = try unit.instructions.append(context.my_allocator, .{ .debug_declare_local_variable = .{ @@ -14276,7 +14324,9 @@ pub const Builder = struct { .level = builder.current_scope.level + 1, .local = builder.current_scope.local, .file = builder.current_file, + .declarations = try PinnedHashMap(u32, *Debug.Declaration).init(std.mem.page_size), }, + .local_declaration_map = try PinnedHashMap(*Debug.Declaration.Local, Instruction.Index).init(std.mem.page_size), }, }); @@ -16323,7 +16373,7 @@ pub const Builder = struct { .value = result, .type = ti, }, - }); + }); try builder.appendInstruction(unit, context, zero_extend); return .{ @@ -16933,28 +16983,27 @@ pub const Unit = struct { constant_arrays: V.Comptime.ConstantArray.List = .{}, constant_slices: V.Comptime.ConstantSlice.List = .{}, error_fields: Type.Error.Field.List = .{}, - file_token_offsets: MyHashMap(Token.Range, Debug.File.Index) = .{}, - file_map: MyHashMap([]const u8, Debug.File.Index) = .{}, - identifiers: MyHashMap(u32, []const u8) = .{}, - string_literal_values: MyHashMap(u32, [:0]const u8) = .{}, - string_literal_globals: MyHashMap(u32, *Debug.Declaration.Global) = .{}, + file_token_offsets: PinnedHashMap(Token.Range, Debug.File.Index), + file_map: PinnedHashMap([]const u8, Debug.File.Index), + identifiers: PinnedHashMap(u32, []const u8), + string_literal_values: PinnedHashMap(u32, [:0]const u8), + string_literal_globals: PinnedHashMap(u32, *Debug.Declaration.Global), - optionals: MyHashMap(Type.Index, Type.Index) = .{}, - pointers: MyHashMap(Type.Pointer, Type.Index) = .{}, - slices: MyHashMap(Type.Slice, Type.Index) = .{}, - arrays: MyHashMap(Type.Array, Type.Index) = .{}, - integers: MyHashMap(Type.Integer, Type.Index) = .{}, - error_unions: MyHashMap(Type.Error.Union.Descriptor, Type.Index) = .{}, - two_structs: MyHashMap([2]Type.Index, Type.Index) = .{}, - fields_array: MyHashMap(Type.Index, *Debug.Declaration.Global) = .{}, - name_functions: MyHashMap(Type.Index, *Debug.Declaration.Global) = .{}, - error_count: u32 = 0, + optionals: PinnedHashMap(Type.Index, Type.Index), + pointers: PinnedHashMap(Type.Pointer, Type.Index), + slices: PinnedHashMap(Type.Slice, Type.Index), + arrays: PinnedHashMap(Type.Array, Type.Index), + integers: PinnedHashMap(Type.Integer, Type.Index), + error_unions: PinnedHashMap(Type.Error.Union.Descriptor, Type.Index), + two_structs: PinnedHashMap([2]Type.Index, Type.Index), + fields_array: PinnedHashMap(Type.Index, *Debug.Declaration.Global), + name_functions: PinnedHashMap(Type.Index, *Debug.Declaration.Global), - code_to_emit: MyHashMap(Function.Definition.Index, *Debug.Declaration.Global) = .{}, + external_functions: PinnedHashMap(Type.Index, *Debug.Declaration.Global), + type_declarations: PinnedHashMap(Type.Index, *Debug.Declaration.Global), + test_functions: PinnedHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global), + code_to_emit: PinnedHashMap(Function.Definition.Index, *Debug.Declaration.Global), data_to_emit: PinnedArray(*Debug.Declaration.Global), - external_functions: MyHashMap(Type.Index, *Debug.Declaration.Global) = .{}, - type_declarations: MyHashMap(Type.Index, *Debug.Declaration.Global) = .{}, - test_functions: MyHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global) = .{}, scope: Debug.Scope.Global = .{ .scope = .{ .file = .null, @@ -16963,6 +17012,13 @@ pub const Unit = struct { .column = 0, .level = 0, .local = false, + .declarations = .{ + .key_pointer = undefined, + .value_pointer = undefined, + .length = 0, + .granularity = 0, + .committed = 0, + }, }, }, root_package: *Package = undefined, @@ -16975,6 +17031,7 @@ pub const Unit = struct { discard_identifiers: usize = 0, anon_i: usize = 0, anon_arr: usize = 0, + error_count: u32 = 0, fn dumpInstruction(instruction_index: Instruction.Index) !void { try write(.ir, "%"); @@ -17309,7 +17366,7 @@ pub const Unit = struct { .@"struct" = optional_struct_index, }); - try unit.optionals.put_no_clobber(context.my_allocator, element_type, optional_type_index); + try unit.optionals.put_no_clobber(element_type, optional_type_index); return optional_type_index; } @@ -17322,7 +17379,7 @@ pub const Unit = struct { const type_index = try unit.types.append(context.my_allocator, .{ .pointer = pointer, }); - try unit.pointers.put_no_clobber(context.my_allocator, pointer, type_index); + try unit.pointers.put_no_clobber(pointer, type_index); return type_index; } @@ -17335,7 +17392,7 @@ pub const Unit = struct { const type_index = try unit.types.append(context.my_allocator, .{ .slice = slice, }); - try unit.slices.put_no_clobber(context.my_allocator, slice, type_index); + try unit.slices.put_no_clobber(slice, type_index); return type_index; } @@ -17349,7 +17406,7 @@ pub const Unit = struct { const array_type = try unit.types.append(context.my_allocator, .{ .array = array, }); - try unit.arrays.put_no_clobber(context.my_allocator, array, array_type); + try unit.arrays.put_no_clobber(array, array_type); return array_type; } @@ -17381,7 +17438,7 @@ pub const Unit = struct { const type_index = try unit.types.append(context.my_allocator, .{ .integer = integer, }); - try unit.integers.put_no_clobber(context.my_allocator, integer, type_index); + try unit.integers.put_no_clobber(integer, type_index); return type_index; } }, @@ -17391,9 +17448,10 @@ pub const Unit = struct { } fn processIdentifier(unit: *Unit, context: *const Context, string: []const u8) !u32 { + _ = context; // autofix const hash = my_hash(string); if (unit.identifiers.get_pointer(hash) == null) { - try unit.identifiers.put_no_clobber(context.my_allocator, hash, string); + try unit.identifiers.put_no_clobber(hash, string); } return hash; } @@ -17434,7 +17492,7 @@ pub const Unit = struct { fixed_string.len += 1; fixed_string[zero_index] = 0; - const string = fixed_string[0 .. zero_index :0]; + const string = fixed_string[0..zero_index :0]; return string; } @@ -17508,7 +17566,7 @@ pub const Unit = struct { file.lexer = try lexer.analyze(file.source_code, &unit.token_buffer); assert(file.status == .loaded_into_memory); file.status = .lexed; - try unit.file_token_offsets.put_no_clobber(context.my_allocator, .{ + try unit.file_token_offsets.put_no_clobber(.{ .start = file.lexer.offset, .count = file.lexer.count, }, file_index); @@ -17582,18 +17640,21 @@ pub const Unit = struct { const file_index = try unit.files.append(context.my_allocator, Debug.File{ .relative_path = relative_path, .package = package, - .scope = .{ .scope = .{ - .file = .null, - .kind = .file, - .line = 0, - .column = 0, - .local = false, - .level = 1, - } }, + .scope = .{ + .scope = .{ + .file = .null, + .kind = .file, + .line = 0, + .column = 0, + .local = false, + .level = 1, + .declarations = try PinnedHashMap(u32, *Debug.Declaration).init(std.mem.page_size), + }, + }, }); // logln(.compilation, .new_file, "Adding file #{}: {s}\n", .{ file_index, full_path }); - try unit.file_map.put_no_clobber(context.my_allocator, full_path, file_index); + try unit.file_map.put_no_clobber(full_path, file_index); return .{ .index = file_index, @@ -17642,6 +17703,7 @@ pub const Unit = struct { .path = main_package_absolute_directory_path, }, .source_path = try context.my_allocator.duplicate_bytes(std.fs.path.basename(unit.descriptor.main_package_path)), + .dependencies = try PinnedHashMap([]const u8, *Package).init(std.mem.page_size), }; break :blk result; }; @@ -17655,6 +17717,7 @@ pub const Unit = struct { .path = directory_path, }, .source_path = "test_runner.nat", + .dependencies = try PinnedHashMap([]const u8, *Package).init(std.mem.page_size), }; unit.main_package = main_package; @@ -17689,9 +17752,10 @@ pub const Unit = struct { .handle = try std.fs.openDirAbsolute(package_descriptor.directory_path, .{}), }, .source_path = try std.mem.concat(context.allocator, u8, &.{ package_descriptor.name, ".nat" }), + .dependencies = try PinnedHashMap([]const u8, *Package).init(std.mem.page_size), }; - try unit.root_package.addDependency(context.my_allocator, package_descriptor.name, package); + try unit.root_package.addDependency(package_descriptor.name, package); package_ptr.* = package; } @@ -17763,7 +17827,7 @@ pub const Unit = struct { .@"struct" = two_struct, }); - try unit.two_structs.put_no_clobber(context.my_allocator, types, type_index); + try unit.two_structs.put_no_clobber(types, type_index); return type_index; } @@ -17838,11 +17902,11 @@ pub const Token = struct { length: u32, id: Token.Id, - pub const Buffer = struct{ + pub const Buffer = struct { line_offsets: PinnedArray(u32) = .{}, tokens: PinnedArray(Token) = .{}, }; - + pub const Id = enum { keyword_unsigned_integer, keyword_signed_integer, diff --git a/bootstrap/backend/llvm.zig b/bootstrap/backend/llvm.zig index f0960c7..220eb91 100644 --- a/bootstrap/backend/llvm.zig +++ b/bootstrap/backend/llvm.zig @@ -6,10 +6,11 @@ const write = Compilation.write; // const log = Compilation.log; // const logln = Compilation.logln; const Module = Compilation.Module; -const data_structures = @import("../library.zig"); -const BoundedArray = data_structures.BoundedArray; -const MyHashMap = data_structures.MyHashMap; -const PinnedArray = data_structures.PinnedArray; +const library = @import("../library.zig"); +const BoundedArray = library.BoundedArray; +const PinnedHashMap = library.PinnedHashMap; +const PinnedArray = library.PinnedArray; +const format_int = library.format_int; pub const bindings = @import("llvm_bindings.zig"); @@ -28,23 +29,23 @@ pub const LLVM = struct { module: *LLVM.Module, builder: *LLVM.Builder, debug_info_builder: *LLVM.DebugInfo.Builder, - debug_info_file_map: MyHashMap(Compilation.Debug.File.Index, *LLVM.DebugInfo.File) = .{}, - debug_type_map: MyHashMap(Compilation.Type.Index, *LLVM.DebugInfo.Type) = .{}, - type_name_map: MyHashMap(Compilation.Type.Index, []const u8) = .{}, - type_map: MyHashMap(Compilation.Type.Index, *LLVM.Type) = .{}, - function_declaration_map: MyHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.Function) = .{}, - function_definition_map: MyHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.Function) = .{}, - llvm_instruction_map: MyHashMap(Compilation.Instruction.Index, *LLVM.Value) = .{}, - llvm_value_map: MyHashMap(Compilation.V, *LLVM.Value) = .{}, - llvm_block_map: MyHashMap(Compilation.BasicBlock.Index, *LLVM.Value.BasicBlock) = .{}, - llvm_external_functions: MyHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.Function) = .{}, - global_variable_map: MyHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.GlobalVariable) = .{}, - scope_map: MyHashMap(*Compilation.Debug.Scope, *LLVM.DebugInfo.Scope) = .{}, + debug_info_file_map: PinnedHashMap(Compilation.Debug.File.Index, *LLVM.DebugInfo.File), + debug_type_map: PinnedHashMap(Compilation.Type.Index, *LLVM.DebugInfo.Type), + type_name_map: PinnedHashMap(Compilation.Type.Index, []const u8), + type_map: PinnedHashMap(Compilation.Type.Index, *LLVM.Type), + function_declaration_map: PinnedHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.Function), + function_definition_map: PinnedHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.Function), + llvm_instruction_map: PinnedHashMap(Compilation.Instruction.Index, *LLVM.Value), + llvm_value_map: PinnedHashMap(Compilation.V, *LLVM.Value), + llvm_block_map: PinnedHashMap(Compilation.BasicBlock.Index, *LLVM.Value.BasicBlock), + llvm_external_functions: PinnedHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.Function), + global_variable_map: PinnedHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.GlobalVariable), + scope_map: PinnedHashMap(*Compilation.Debug.Scope, *LLVM.DebugInfo.Scope), + argument_allocas: PinnedHashMap(Compilation.Instruction.Index, *LLVM.Value), pointer_type: ?*LLVM.Type.Pointer = null, function: *LLVM.Value.Constant.Function = undefined, exit_block: *LLVM.Value.BasicBlock = undefined, sema_function: *Compilation.Debug.Declaration.Global = undefined, - argument_allocas: MyHashMap(Compilation.Instruction.Index, *LLVM.Value) = .{}, return_phi_node: ?*LLVM.Value.Instruction.PhiNode = null, scope: *LLVM.DebugInfo.Scope = undefined, file: *LLVM.DebugInfo.File = undefined, @@ -1333,7 +1334,7 @@ pub const LLVM = struct { else => |t| @panic(@tagName(t)), }; - try llvm.type_map.put_no_clobber(context.my_allocator, type_index, llvm_type); + try llvm.type_map.put_no_clobber(type_index, llvm_type); return llvm_type; } @@ -1349,7 +1350,7 @@ pub const LLVM = struct { const filename = std.fs.path.basename(full_path); const directory = full_path[0 .. full_path.len - filename.len]; const debug_file = llvm.debug_info_builder.createFile(filename.ptr, filename.len, directory.ptr, directory.len) orelse unreachable; - try llvm.debug_info_file_map.put_no_clobber(context.my_allocator, sema_file_index, debug_file); + try llvm.debug_info_file_map.put_no_clobber(sema_file_index, debug_file); return debug_file; } } @@ -1360,7 +1361,7 @@ pub const LLVM = struct { } else { if (unit.type_declarations.get(sema_type_index)) |global_declaration| { const result = unit.getIdentifier(global_declaration.declaration.name); - try llvm.type_name_map.put_no_clobber(context.my_allocator, sema_type_index, result); + try llvm.type_name_map.put_no_clobber(sema_type_index, result); return result; } else { const sema_type = unit.types.get(sema_type_index); @@ -1369,7 +1370,7 @@ pub const LLVM = struct { .integer => |integer| switch (integer.kind) { .materialized_int => b: { var buffer: [65]u8 = undefined; - const format = data_structures.format_int(&buffer, integer.bit_count, 10, false); + const format = format_int(&buffer, integer.bit_count, 10, false); const slice_ptr = format.ptr - 1; const slice = slice_ptr[0 .. format.len + 1]; slice[0] = switch (integer.signedness) { @@ -1410,7 +1411,7 @@ pub const LLVM = struct { .array => |array| b: { name.appendAssumeCapacity('['); var buffer: [65]u8 = undefined; - const array_count = data_structures.format_int(&buffer, array.count, 10, false); + const array_count = format_int(&buffer, array.count, 10, false); name.appendSliceAssumeCapacity(array_count); name.appendAssumeCapacity(']'); const element_type_name = try llvm.renderTypeName(unit, context, array.type); @@ -1424,8 +1425,7 @@ pub const LLVM = struct { else => |t| @panic(@tagName(t)), }; - - try llvm.type_name_map.put(context.my_allocator, sema_type_index, if (name.len > 0) b: { + try llvm.type_name_map.put(sema_type_index, if (name.len > 0) b: { const new_name = try context.arena.new_array(u8, name.len); @memcpy(new_name, result); break :b new_name; @@ -1533,7 +1533,7 @@ pub const LLVM = struct { .forward_declaration = null, }); - try llvm.debug_type_map.put_no_clobber(context.my_allocator, sema_type_index, struct_type.toType()); + try llvm.debug_type_map.put_no_clobber(sema_type_index, struct_type.toType()); var field_types = BoundedArray(*LLVM.DebugInfo.Type, 512){}; bit_size = 0; @@ -1842,7 +1842,7 @@ pub const LLVM = struct { else => |t| @panic(@tagName(t)), }; - try llvm.debug_type_map.put(context.my_allocator, sema_type_index, result); + try llvm.debug_type_map.put(sema_type_index, result); assert(@intFromPtr(result) != 0xaaaa_aaaa_aaaa_aaaa); return result; @@ -1863,7 +1863,7 @@ pub const LLVM = struct { if (gep.is_struct and gep.index.type != .u32) unreachable; const gep_name = unit.getIdentifier(gep.name); const get_element_pointer = llvm.builder.createGEP(base_type, pointer, indices.ptr, indices.len, gep_name.ptr, gep_name.len, in_bounds) orelse unreachable; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, get_element_pointer); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, get_element_pointer); return get_element_pointer; } @@ -2047,7 +2047,7 @@ pub const LLVM = struct { basic_block_node.* = .{ .data = basic_block_index, }; - try llvm.llvm_block_map.put_no_clobber(context.my_allocator, basic_block_index, basic_block); + try llvm.llvm_block_map.put_no_clobber(basic_block_index, basic_block); return basic_block_node; } @@ -2287,8 +2287,8 @@ pub const LLVM = struct { }); switch (declaration.initial_value) { - .function_declaration => try llvm.function_declaration_map.put_no_clobber(context.my_allocator, declaration, function), - .function_definition => try llvm.function_definition_map.put_no_clobber(context.my_allocator, declaration, function), + .function_declaration => try llvm.function_declaration_map.put_no_clobber(declaration, function), + .function_definition => try llvm.function_definition_map.put_no_clobber(declaration, function), else => unreachable, } @@ -2360,7 +2360,7 @@ pub const LLVM = struct { const function_definition = unit.function_definitions.get(function_definition_index); const scope = subprogram.toLocalScope().toScope(); - try llvm.scope_map.put_no_clobber(context.my_allocator, &function_definition.scope.scope, scope); + try llvm.scope_map.put_no_clobber(&function_definition.scope.scope, scope); }, .function_declaration => {}, else => |t| @panic(@tagName(t)), @@ -2368,7 +2368,7 @@ pub const LLVM = struct { } switch (declaration.initial_value) { - .function_declaration => try llvm.llvm_external_functions.put_no_clobber(context.my_allocator, declaration, function), + .function_declaration => try llvm.llvm_external_functions.put_no_clobber(declaration, function), .function_definition => {}, else => |t| @panic(@tagName(t)), } @@ -2419,6 +2419,19 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo .inreg = llvm_context.getAttributeFromEnum(.InReg, 0), .@"noalias" = llvm_context.getAttributeFromEnum(.NoAlias, 0), }, + .debug_info_file_map = try PinnedHashMap(Compilation.Debug.File.Index, *LLVM.DebugInfo.File).init(std.mem.page_size), + .debug_type_map = try PinnedHashMap(Compilation.Type.Index, *LLVM.DebugInfo.Type).init(std.mem.page_size), + .type_name_map = try PinnedHashMap(Compilation.Type.Index, []const u8).init(std.mem.page_size), + .type_map = try PinnedHashMap(Compilation.Type.Index, *LLVM.Type).init(std.mem.page_size), + .function_declaration_map = try PinnedHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.Function).init(std.mem.page_size), + .function_definition_map = try PinnedHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.Function).init(std.mem.page_size), + .llvm_instruction_map = try PinnedHashMap(Compilation.Instruction.Index, *LLVM.Value).init(std.mem.page_size), + .llvm_value_map = try PinnedHashMap(Compilation.V, *LLVM.Value).init(std.mem.page_size), + .llvm_block_map = try PinnedHashMap(Compilation.BasicBlock.Index, *LLVM.Value.BasicBlock).init(std.mem.page_size), + .llvm_external_functions = try PinnedHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.Function).init(std.mem.page_size), + .global_variable_map = try PinnedHashMap(*Compilation.Debug.Declaration.Global, *LLVM.Value.Constant.GlobalVariable).init(std.mem.page_size), + .scope_map = try PinnedHashMap(*Compilation.Debug.Scope, *LLVM.DebugInfo.Scope).init(std.mem.page_size), + .argument_allocas = try PinnedHashMap(Compilation.Instruction.Index, *LLVM.Value).init(std.mem.page_size), }; if (unit.descriptor.generate_debug_information) { @@ -2442,7 +2455,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const compile_unit = llvm.debug_info_builder.createCompileUnit(LLVM.DebugInfo.Language.c, debug_info_file, producer, producer.len, is_optimized, flags, flags.len, runtime_version, splitname, splitname.len, debug_info_kind, DWOId, split_debug_inlining, debug_info_for_profiling, name_table_kind, ranges_base_address, sysroot, sysroot.len, sdk, sdk.len) orelse unreachable; llvm.scope = compile_unit.toScope(); - try llvm.scope_map.put_no_clobber(context.my_allocator, &unit.scope.scope, llvm.scope); + try llvm.scope_map.put_no_clobber(&unit.scope.scope, llvm.scope); } for (unit.external_functions.values()) |external_function_declaration| { @@ -2469,7 +2482,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo .string_literal => |hash| { const string_literal = unit.string_literal_values.get(hash).?; const global_variable = llvm.builder.createGlobalString(string_literal.ptr, string_literal.len, name.ptr, name.len, address_space, llvm.module) orelse unreachable; - try llvm.global_variable_map.put_no_clobber(context.my_allocator, global_declaration, global_variable); + try llvm.global_variable_map.put_no_clobber(global_declaration, global_variable); }, else => { const global_type = try llvm.getType(unit, context, global_declaration.declaration.type); @@ -2485,7 +2498,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const thread_local_mode = LLVM.ThreadLocalMode.not_thread_local; const externally_initialized = false; const global_variable = llvm.module.addGlobalVariable(global_type, constant, linkage, initializer, name.ptr, name.len, null, thread_local_mode, address_space, externally_initialized) orelse return LLVM.Value.Error.constant_int; - try llvm.global_variable_map.put_no_clobber(context.my_allocator, global_declaration, global_variable); + try llvm.global_variable_map.put_no_clobber(global_declaration, global_variable); }, } @@ -2513,7 +2526,11 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo global_variable.setInitializer(constant_initializer); } + var phis = try PinnedHashMap(Compilation.Instruction.Index, *LLVM.Value.Instruction.PhiNode).init(0x1000); + for (llvm.function_definition_map.keys(), llvm.function_definition_map.values()) |function_declaration, function| { + phis.clear(); + const function_definition_index = function_declaration.getFunctionDefinitionIndex(); const function_definition = unit.function_definitions.get(function_definition_index); llvm.function = function; @@ -2528,15 +2545,13 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo llvm.scope = subprogram.toLocalScope().toScope(); } - var alloca_map = MyHashMap(Compilation.Instruction.Index, *LLVM.Value){}; + var alloca_map = try PinnedHashMap(Compilation.Instruction.Index, *LLVM.Value).init(std.mem.page_size); var block_command_list = BasicBlockList{}; const entry_block_node = try llvm.createBasicBlock(context, function_definition.basic_blocks.pointer[0], "fn_entry"); block_command_list.append(entry_block_node); - var phis = MyHashMap(Compilation.Instruction.Index, *LLVM.Value.Instruction.PhiNode){}; - while (block_command_list.len != 0) { const block_node = block_command_list.first orelse unreachable; const basic_block_index = block_node.data; @@ -2557,7 +2572,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo assert(@intFromEnum(push_scope.old.kind) >= @intFromEnum(Compilation.Debug.Scope.Kind.function)); const lexical_block = llvm.debug_info_builder.createLexicalBlock(old_scope, llvm.file, push_scope.new.line + 1, push_scope.new.column + 1) orelse unreachable; - try llvm.scope_map.put_no_clobber(context.my_allocator, push_scope.new, lexical_block.toScope()); + try llvm.scope_map.put_no_clobber(push_scope.new, lexical_block.toScope()); llvm.scope = lexical_block.toScope(); } }, @@ -2616,7 +2631,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo }, .number_literal => |literal| { var buffer: [65]u8 = undefined; - const number_literal = data_structures.format_int(&buffer, literal, 16, false); + const number_literal = format_int(&buffer, literal, 16, false); const slice_ptr = number_literal.ptr - 4; const literal_slice = slice_ptr[0 .. number_literal.len + 4]; literal_slice[0] = '$'; @@ -2632,7 +2647,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo } else { const value = try llvm.emitLeftValue(unit, context, sema_value); var buffer: [65]u8 = undefined; - const operand_number = data_structures.format_int(&buffer, operand_values.len, 16, false); + const operand_number = format_int(&buffer, operand_values.len, 16, false); const slice_ptr = operand_number.ptr - 2; const operand_slice = slice_ptr[0 .. operand_number.len + 2]; operand_slice[0] = '$'; @@ -2676,7 +2691,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const inline_assembly = LLVM.Value.InlineAssembly.get(function_type, &assembly_statements.buffer, assembly_statements.len, &constraints.buffer, constraints.len, has_side_effects, is_align_stack, dialect, can_throw) orelse return LLVM.Value.Error.inline_assembly; const call = llvm.builder.createCall(function_type, inline_assembly.toValue(), &operand_values.buffer, operand_values.len, "", "".len, null) orelse return LLVM.Value.Instruction.Error.call; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, call.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, call.toValue()); }, .stack_slot => |stack_slot| { // const stack_slot_type = unit.types.get(stack_slot.type); @@ -2685,8 +2700,8 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const type_alignment = unit.types.get(stack_slot.type).getAbiAlignment(unit); const alloca_array_size = null; const declaration_alloca = llvm.builder.createAlloca(declaration_type, address_space, alloca_array_size, "", "".len, type_alignment) orelse return LLVM.Value.Instruction.Error.alloca; - try alloca_map.put_no_clobber(context.my_allocator, instruction_index, declaration_alloca.toValue()); - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, declaration_alloca.toValue()); + try alloca_map.put_no_clobber(instruction_index, declaration_alloca.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, declaration_alloca.toValue()); }, .store => |store| { const right = try llvm.emitRightValue(unit, context, store.source); @@ -2706,7 +2721,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const cast_type = LLVM.Value.Instruction.Cast.Type.int_to_pointer; const cast_name = @tagName(cast_type); const cast_instruction = llvm.builder.createCast(cast_type, value, value.getType(), cast_name.ptr, cast_name.len) orelse return LLVM.Value.Instruction.Error.cast; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, cast_instruction); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, cast_instruction); }, .array_bitcast_to_integer => unreachable, // TODO: Poke metadata @@ -2724,27 +2739,27 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo .pointer_source_type_to_destination_type, .pointer_none_terminated_to_zero, => { - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, value); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, value); }, .sign_extend => { const sign_extend = llvm.builder.createCast(.sign_extend, value, dest_type, "sign_extend", "sign_extend".len) orelse return LLVM.Value.Instruction.Error.cast; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, sign_extend); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, sign_extend); }, .zero_extend => { const zero_extend = llvm.builder.createCast(.zero_extend, value, dest_type, "zero_extend", "zero_extend".len) orelse return LLVM.Value.Instruction.Error.cast; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, zero_extend); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, zero_extend); }, .bitcast => { const bitcast = llvm.builder.createCast(.bitcast, value, dest_type, "bitcast", "bitcast".len) orelse return LLVM.Value.Instruction.Error.cast; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, bitcast); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, bitcast); }, .pointer_to_int => { const pointer_to_int = llvm.builder.createCast(.pointer_to_int, value, dest_type, "pointer_to_int", "pointer_to_int".len) orelse return LLVM.Value.Instruction.Error.cast; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, pointer_to_int); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, pointer_to_int); }, .truncate => { const truncate = llvm.builder.createCast(.truncate, value, dest_type, "truncate", "truncate".len) orelse return LLVM.Value.Instruction.Error.cast; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, truncate); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, truncate); }, .error_union_type_int_to_pointer, .error_union_type_upcast, .error_union_type_downcast => unreachable, } @@ -2761,7 +2776,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo }, else => |t| @panic(@tagName(t)), }; - try llvm.llvm_value_map.put_no_clobber(context.my_allocator, load.value, value); + try llvm.llvm_value_map.put_no_clobber(load.value, value); break :blk value; }; @@ -2771,7 +2786,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const value_type = try llvm.getType(unit, context, load.type); const is_volatile = false; const load_i = llvm.builder.createLoad(value_type, value, is_volatile, "", "".len, alignment) orelse return LLVM.Value.Instruction.Error.load; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, load_i.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, load_i.toValue()); }, .integer_binary_operation => |binary_operation| { assert(binary_operation.left.type == binary_operation.right.type); @@ -2810,7 +2825,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo }, else => unreachable, }; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, instruction); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, instruction); }, .call => |sema_call| { var argument_buffer: [32]*LLVM.Value = undefined; @@ -2861,7 +2876,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo else => |t| @panic(@tagName(t)), }; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, call.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, call.toValue()); try llvm.setCallOrFunctionAttributes(unit, context, function_prototype, .{ .call = call, @@ -2918,14 +2933,14 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo }; const call_to_asm = llvm.builder.createCall(function_type, inline_asm.toValue(), syscall_arguments.ptr, syscall_arguments.len, "syscall", "syscall".len, null) orelse return LLVM.Value.Instruction.Error.call; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, call_to_asm.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, call_to_asm.toValue()); }, .@"unreachable" => { _ = llvm.builder.createUnreachable() orelse return LLVM.Value.Instruction.Error.@"unreachable"; }, .abi_argument => |argument_index| { const argument = llvm.function.getArgument(argument_index) orelse unreachable; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, argument.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, argument.toValue()); }, .debug_declare_argument => |debug_declare| { if (generate_debug_information) { @@ -3024,7 +3039,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const value = try llvm.emitRightValue(unit, context, insert_value.new_value); const indices = [1]c_uint{insert_value.index}; const instruction = llvm.builder.createInsertValue(aggregate, value, &indices, indices.len, "", "".len) orelse unreachable; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, instruction); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, instruction); }, .extract_value => |extract_value| { switch (unit.types.get(extract_value.expression.type).*) { @@ -3037,7 +3052,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo assert(!aggregate.getType().isPointer()); const indices = [1]c_uint{extract_value.index}; const instruction = llvm.builder.createExtractValue(aggregate, &indices, indices.len, "", "".len) orelse unreachable; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, instruction); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, instruction); }, .integer_compare => |integer_compare| { assert(integer_compare.left.type == integer_compare.right.type); @@ -3057,7 +3072,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo .signed_greater_equal => .sge, }; const icmp = llvm.builder.createICmp(comparison_id, left, right, "", "".len) orelse unreachable; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, icmp); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, icmp); }, .jump => |jump| { const target_block = if (llvm.llvm_block_map.get(jump.to)) |target_block| target_block else blk: { @@ -3070,7 +3085,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo }; const br = llvm.builder.createBranch(target_block) orelse unreachable; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, br.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, br.toValue()); }, .branch => |branch| { const taken_node = try llvm.createBasicBlock(context, branch.taken, "taken_block"); @@ -3087,7 +3102,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const branch_weights = null; const unpredictable = null; const br = llvm.builder.createConditionalBranch(condition, taken_block, not_taken_block, branch_weights, unpredictable) orelse unreachable; - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, br.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, br.toValue()); }, .phi => |phi| { const phi_type = try llvm.getType(unit, context, phi.type); @@ -3095,9 +3110,9 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const phi_name = "phi"; const phi_node = llvm.builder.createPhi(phi_type, reserved_value_count, phi_name, phi_name.len) orelse unreachable; - try phis.put_no_clobber(context.my_allocator, instruction_index, phi_node); + try phis.put_no_clobber(instruction_index, phi_node); - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, phi_node.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, phi_node.toValue()); }, .umin => |umin| { const intrinsic_type = try llvm.getType(unit, context, umin.type); @@ -3106,7 +3121,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const right = try llvm.emitRightValue(unit, context, umin.right); const arguments = [_]*LLVM.Value{ left, right }; const intrinsic_call = try llvm.callIntrinsic("llvm.umin", ¶meter_types, &arguments); - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, intrinsic_call); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, intrinsic_call); }, .get_element_pointer => { _ = try llvm.createGEP(unit, context, instruction_index); @@ -3115,7 +3130,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const parameter_types: []const *LLVM.Type = &.{}; const parameter_values: []const *LLVM.Value = &.{}; const intrinsic_call = try llvm.callIntrinsic("llvm.trap", parameter_types, parameter_values); - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, intrinsic_call); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, intrinsic_call); }, .add_overflow => |add_overflow| { const intrinsic_type = try llvm.getType(unit, context, add_overflow.type); @@ -3124,7 +3139,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const right = try llvm.emitRightValue(unit, context, add_overflow.right); const arguments = [_]*LLVM.Value{ left, right }; const intrinsic_call = try llvm.callIntrinsic("llvm.sadd.with.overflow", ¶meter_types, &arguments); - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, intrinsic_call); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, intrinsic_call); }, .trailing_zeroes => |v| { const intrinsic_type = try llvm.getType(unit, context, v.type); @@ -3133,7 +3148,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const is_poison = llvm.context.getConstantInt(1, 0, false) orelse unreachable; const arguments = [_]*LLVM.Value{ value, is_poison.toValue() }; const intrinsic_call = try llvm.callIntrinsic("llvm.cttz", ¶meter_types, &arguments); - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, intrinsic_call); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, intrinsic_call); }, .@"switch" => |switch_expression| { const condition = try llvm.emitRightValue(unit, context, switch_expression.condition); @@ -3165,7 +3180,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo const branch_weights = null; const unpredictable = null; const switch_instruction = llvm.builder.createSwitch(condition, else_block, &condition_array.buffer, &basic_block_array.buffer, condition_array.len, branch_weights, unpredictable); - try llvm.llvm_instruction_map.put_no_clobber(context.my_allocator, instruction_index, switch_instruction.toValue()); + try llvm.llvm_instruction_map.put_no_clobber(instruction_index, switch_instruction.toValue()); }, .memcpy => |memcpy| { const destination = try llvm.emitLeftValue(unit, context, memcpy.destination); diff --git a/bootstrap/library.zig b/bootstrap/library.zig index 993cb04..3a623a6 100644 --- a/bootstrap/library.zig +++ b/bootstrap/library.zig @@ -11,13 +11,13 @@ pub fn assert(ok: bool) void { pub const Allocator = std.mem.Allocator; pub const BoundedArray = std.BoundedArray; -pub const Arena = struct{ +pub const Arena = struct { position: u64, commit_position: u64, alignment: u64, size: u64, - pub const Temporary = struct{ + pub const Temporary = struct { arena: *Arena, position: u64, }; @@ -71,7 +71,7 @@ pub const Arena = struct{ } } - pub inline fn new(arena: *Arena, comptime T: type) !*T{ + pub inline fn new(arena: *Arena, comptime T: type) !*T { const result: *T = @ptrCast(@alignCast(try arena.allocate(@sizeOf(T)))); return result; } @@ -83,7 +83,7 @@ pub const Arena = struct{ }; pub fn DynamicBoundedArray(comptime T: type) type { - return struct{ + return struct { pointer: [*]T = @constCast((&[_]T{}).ptr), length: u32 = 0, capacity: u32 = 0, @@ -114,7 +114,7 @@ pub fn DynamicBoundedArray(comptime T: type) type { array.length += count; } - pub fn slice(array: *Array) []T{ + pub fn slice(array: *Array) []T { return array.pointer[0..array.length]; } }; @@ -123,24 +123,26 @@ pub fn DynamicBoundedArray(comptime T: type) type { const pinned_array_page_size = 2 * 1024 * 1024; const pinned_array_max_size = std.math.maxInt(u32) - pinned_array_page_size; const pinned_array_default_granularity = pinned_array_page_size; -/// This must be used with big arrays + +// This must be used with big arrays, which are not resizeable (can't be cleared) pub fn PinnedArray(comptime T: type) type { - return struct{ + return struct { pointer: [*]T = @constCast((&[_]T{}).ptr), length: u32 = 0, granularity: u32 = 0, - pub const Index = enum(u32){ + pub const Index = enum(u32) { null = 0xffff_ffff, _, }; const Array = @This(); - pub fn const_slice(array: *const Array) []const T{ + pub fn const_slice(array: *const Array) []const T { return array.pointer[0..array.length]; } - pub fn slice(array: *Array) []T{ + + pub fn slice(array: *Array) []T { return array.pointer[0..array.length]; } @@ -155,14 +157,15 @@ pub fn PinnedArray(comptime T: type) type { return array.get_unchecked(i); } - pub fn get_index(array: *Array, item: *T) Index{ + pub fn get_index(array: *Array, item: *T) Index { const many_item: [*]T = @ptrCast(item); const result = @intFromPtr(many_item) - @intFromPtr(array.pointer); assert(result < pinned_array_max_size); return @enumFromInt(@divExact(result, @sizeOf(T))); } - pub fn init(granularity: u32) !Array{ + pub fn init(granularity: u32) !Array { + assert(granularity & 0xfff == 0); const raw_ptr = try reserve(pinned_array_max_size); try commit(raw_ptr, granularity); return Array{ @@ -172,30 +175,30 @@ pub fn PinnedArray(comptime T: type) type { }; } - pub fn init_with_default_granularity() !Array{ + pub fn init_with_default_granularity() !Array { return try Array.init(pinned_array_default_granularity); } - pub fn append(array: *Array, item: T) *T { - if (((array.length + 1) * @sizeOf(T)) & (array.granularity - 1) == 0) { - const length: u64 = array.length; - assert((length + 1) * @sizeOf(T) <= pinned_array_max_size); + pub fn ensure_capacity(array: *Array, additional: u32) void { + const length = array.length; + const size = length * @sizeOf(T); + const granularity_aligned_size = align_forward(size, array.granularity); + const new_size = size + additional * @sizeOf(T); + if (granularity_aligned_size < new_size) { + assert((length + additional) * @sizeOf(T) <= pinned_array_max_size); + const new_granularity_aligned_size = align_forward(new_size, array.granularity); const ptr: [*]u8 = @ptrCast(array.pointer); - commit(ptr + ((length + 1) * @sizeOf(T)), array.granularity) catch unreachable; + commit(ptr + granularity_aligned_size, new_granularity_aligned_size - granularity_aligned_size) catch unreachable; } + } + pub fn append(array: *Array, item: T) *T { + array.ensure_capacity(1); return array.append_with_capacity(item); } pub fn append_slice(array: *Array, items: []const T) void { - const count: u32 = @intCast(items.len); - if (((array.length + count) * @sizeOf(T)) & (array.granularity - 1) == 0) { - const length: u64 = array.length; - assert((length + count) * @sizeOf(T) <= pinned_array_max_size); - const ptr: [*]u8 = @ptrCast(array.pointer); - commit(ptr + ((length + count) * @sizeOf(T)), array.granularity) catch unreachable; - } - + array.ensure_capacity(@intCast(items.len)); array.append_slice_with_capacity(items); } @@ -218,7 +221,7 @@ pub fn PinnedArray(comptime T: type) type { }; } -pub fn reserve(size: u64) ![*]u8{ +pub fn reserve(size: u64) ![*]u8 { return switch (os) { .linux, .macos => (try std.posix.mmap(null, size, std.posix.PROT.NONE, .{ .ANONYMOUS = true, @@ -229,7 +232,7 @@ pub fn reserve(size: u64) ![*]u8{ }; } -pub fn commit(bytes: [*]u8, size: u64) !void{ +pub fn commit(bytes: [*]u8, size: u64) !void { const slice = bytes[0..size]; return switch (os) { .linux, .macos => try std.posix.mprotect(@alignCast(slice), std.posix.PROT.WRITE | std.posix.PROT.READ), @@ -445,41 +448,37 @@ const MapResult = struct { capacity: IndexType, }; -fn ensure_capacity_hashmap(allocator: *MyAllocator, current_capacity: IndexType, desired_capacity: IndexType, key_pointer: [*]u8, value_pointer: [*]u8, length: IndexType, key_size: IndexType, key_alignment: u16, value_size: IndexType, value_alignment: u16) !MapResult { - var new_capacity = @max(current_capacity, initial_item_count); - while (new_capacity < desired_capacity) { - new_capacity *= factor; - } +const pinned_hash_map_page_size = 2 * 1024 * 1024; +const pinned_hash_map_max_size = std.math.maxInt(u32) - pinned_hash_map_page_size; +const pinned_hash_map_default_granularity = pinned_hash_map_page_size; - if (new_capacity > current_capacity) { - const old_key_slice = key_pointer[0 .. length * key_size]; - const old_value_slice = value_pointer[0 .. length * value_size]; - const new_key_slice = try allocator.reallocate(old_key_slice, new_capacity * key_size, key_alignment); - const new_value_slice = try allocator.reallocate(old_value_slice, new_capacity * value_size, value_alignment); - - return .{ - .key_pointer = new_key_slice.ptr, - .value_pointer = new_value_slice.ptr, - .capacity = new_capacity, - }; - } else { - return .{ - .capacity = current_capacity, - .key_pointer = key_pointer, - .value_pointer = value_pointer, - }; - } -} - -pub fn MyHashMap(comptime K: type, comptime V: type) type { - // const K = []const u8; +pub fn PinnedHashMap(comptime K: type, comptime V: type) type { return struct { - key_pointer: [*]K = undefined, - value_pointer: [*]V = undefined, - length: IndexType = 0, - capacity: IndexType = 0, + key_pointer: [*]K, + value_pointer: [*]V, + length: u32, + granularity: u32, + committed: u32, - pub fn get_pointer(map: *@This(), key: K) ?*V { + const Map = @This(); + + pub fn init(granularity: u32) !Map { + assert(granularity & 0xfff == 0); + const key_raw_pointer = try reserve(pinned_hash_map_max_size); + try commit(key_raw_pointer, granularity); + const value_raw_pointer = try reserve(pinned_hash_map_max_size); + try commit(value_raw_pointer, granularity); + + return Map{ + .key_pointer = @alignCast(@ptrCast(key_raw_pointer)), + .value_pointer = @alignCast(@ptrCast(value_raw_pointer)), + .length = 0, + .granularity = granularity, + .committed = 1, + }; + } + + pub fn get_pointer(map: *Map, key: K) ?*V { for (map.keys(), 0..) |k, i| { const is_equal = switch (@typeInfo(K)) { .Pointer => |pointer| switch (pointer.size) { @@ -506,20 +505,20 @@ pub fn MyHashMap(comptime K: type, comptime V: type) type { } } - pub fn put(map: *@This(), allocator: *MyAllocator, key: K, value: V) !void { - if (map.get_pointer(key)) |value_ptr| { - value_ptr.* = value; + pub fn put(map: *@This(), key: K, value: V) !void { + if (map.get_pointer(key)) |value_pointer| { + value_pointer.* = value; } else { const len = map.length; - try map.ensure_capacity(allocator, len + 1); + map.ensure_capacity(len + 1); map.put_at_with_capacity(len, key, value); } } - pub fn put_no_clobber(map: *@This(), allocator: *MyAllocator, key: K, value: V) !void { + pub fn put_no_clobber(map: *@This(), key: K, value: V) !void { assert(map.get_pointer(key) == null); const len = map.length; - try map.ensure_capacity(allocator, len + 1); + map.ensure_capacity(len + 1); map.put_at_with_capacity(len, key, value); } @@ -530,11 +529,33 @@ pub fn MyHashMap(comptime K: type, comptime V: type) type { map.value_pointer[index] = value; } - pub fn ensure_capacity(map: *@This(), allocator: *MyAllocator, desired_capacity: IndexType) !void { - const result = try ensure_capacity_hashmap(allocator, map.capacity, desired_capacity, @ptrCast(map.key_pointer), @ptrCast(map.value_pointer), map.length, @sizeOf(K), @alignOf(K), @sizeOf(V), @alignOf(V)); - map.capacity = result.capacity; - map.key_pointer = @ptrCast(@alignCast(result.key_pointer)); - map.value_pointer = @ptrCast(@alignCast(result.value_pointer)); + fn ensure_capacity(map: *Map, additional: u32) void { + const length = map.length; + assert((length + additional) * @sizeOf(K) <= pinned_array_max_size); + + { + const key_size = length * @sizeOf(K); + const key_granularity_aligned_size = align_forward(key_size, map.granularity); + const key_new_size = key_size + additional * @sizeOf(K); + + if (key_granularity_aligned_size < key_new_size) { + const new_key_granularity_aligned_size = align_forward(key_new_size, map.granularity); + const key_pointer: [*]u8 = @ptrCast(map.key_pointer); + commit(key_pointer + key_granularity_aligned_size, new_key_granularity_aligned_size - key_granularity_aligned_size) catch unreachable; + } + } + + { + const value_size = length * @sizeOf(V); + const value_granularity_aligned_size = align_forward(value_size, map.granularity); + const value_new_size = value_size + additional * @sizeOf(K); + + if (value_granularity_aligned_size < value_new_size) { + const new_value_granularity_aligned_size = align_forward(value_new_size, map.granularity); + const value_pointer: [*]u8 = @ptrCast(map.value_pointer); + commit(value_pointer + value_granularity_aligned_size, new_value_granularity_aligned_size - value_granularity_aligned_size) catch unreachable; + } + } } pub fn keys(map: *@This()) []K { @@ -544,6 +565,10 @@ pub fn MyHashMap(comptime K: type, comptime V: type) type { pub fn values(map: *@This()) []V { return map.value_pointer[0..map.length]; } + + pub fn clear(map: *Map) void { + map.length = 0; + } }; } From 104f8ef82725596731fcf65a3e75b67ae4f16db2 Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Thu, 25 Apr 2024 21:44:19 -0600 Subject: [PATCH 09/14] Better support Windows in the lexer --- bootstrap/frontend/lexer.zig | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/bootstrap/frontend/lexer.zig b/bootstrap/frontend/lexer.zig index 420b967..be805d2 100644 --- a/bootstrap/frontend/lexer.zig +++ b/bootstrap/frontend/lexer.zig @@ -496,6 +496,7 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { const start_ch = text[start_i]; switch (start_ch) { + '\r' => index += 1, '\n' => { index += 1; line_index += 1; @@ -583,7 +584,14 @@ pub fn analyze(text: []const u8, token_buffer: *Token.Buffer) !Result { else => unreachable, } }, - else => unreachable, + else => { + var ch_array : [64]u8 = undefined; + try Compilation.write(.panic, "TODO char: 0x"); + const ch_fmt = library.format_int(&ch_array, start_ch, 16, false); + try Compilation.write(.panic, ch_fmt); + try Compilation.write(.panic, "\n"); + std.posix.exit(0); + }, } } From 3ee62420a3bbc3a962664892427b1beba0de54d3 Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Thu, 25 Apr 2024 22:07:40 -0600 Subject: [PATCH 10/14] Remove direct use of UnpinnedArray in Compilation --- bootstrap/Compilation.zig | 476 +++++++++++++++++++------------------- bootstrap/library.zig | 10 + bootstrap/main.zig | 2 +- 3 files changed, 251 insertions(+), 237 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index 1668e94..f53efd4 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -16,7 +16,6 @@ const first_byte = library.first_byte; const first_slice = library.first_slice; const starts_with_slice = library.starts_with_slice; const PinnedArray = library.PinnedArray; -const UnpinnedArray = library.UnpinnedArray; const BlockList = library.BlockList; const MyAllocator = library.MyAllocator; const PinnedHashMap = library.PinnedHashMap; @@ -3966,7 +3965,7 @@ pub const Instruction = union(enum) { }; pub const BasicBlock = struct { - instructions: UnpinnedArray(Instruction.Index) = .{}, + instructions: PinnedArray(Instruction.Index), predecessors: PinnedArray(BasicBlock.Index) = .{ .pointer = undefined, .length = 0, @@ -4001,7 +4000,7 @@ pub const Function = struct { pub const Definition = struct { scope: Debug.Scope.Function, - basic_blocks: UnpinnedArray(BasicBlock.Index) = .{}, + basic_blocks: PinnedArray(BasicBlock.Index), // TODO: make this more efficient type: Type.Index, body: Debug.Block.Index, @@ -4772,7 +4771,7 @@ pub const Builder = struct { const inline_asm = try unit.instructions.append(context.my_allocator, .{ .inline_assembly = inline_assembly, }); - try builder.appendInstruction(unit, context, inline_asm); + try builder.appendInstruction(unit, inline_asm); return .{ .value = .{ @@ -4863,7 +4862,7 @@ pub const Builder = struct { .source = v, }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); const load = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -4871,7 +4870,7 @@ pub const Builder = struct { .type = type_index, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); return V{ .value = .{ .runtime = load }, @@ -4889,7 +4888,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); return .{ .value = .{ @@ -4953,7 +4952,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, syscall); + try builder.appendInstruction(unit, syscall); return .{ .value = .{ @@ -4988,7 +4987,7 @@ pub const Builder = struct { }, }; const min = try unit.instructions.append(context.my_allocator, instruction); - try builder.appendInstruction(unit, context, min); + try builder.appendInstruction(unit, min); return .{ .value = .{ @@ -5090,7 +5089,7 @@ pub const Builder = struct { .arguments = args, }, }); - try builder.appendInstruction(unit, context, call); + try builder.appendInstruction(unit, call); return V{ .value = .{ @@ -5147,7 +5146,7 @@ pub const Builder = struct { const trailing_zeroes = try unit.instructions.append(context.my_allocator, .{ .trailing_zeroes = argument, }); - try builder.appendInstruction(unit, context, trailing_zeroes); + try builder.appendInstruction(unit, trailing_zeroes); return V{ .type = argument.type, @@ -5162,7 +5161,7 @@ pub const Builder = struct { const leading_zeroes = try unit.instructions.append(context.my_allocator, .{ .leading_zeroes = argument, }); - try builder.appendInstruction(unit, context, leading_zeroes); + try builder.appendInstruction(unit, leading_zeroes); return V{ .type = argument.type, @@ -5221,6 +5220,7 @@ pub const Builder = struct { .type = function_type_index, .body = .null, .has_debug_info = false, + .basic_blocks = try PinnedArray(BasicBlock.Index).init(std.mem.page_size), }); const function_definition = unit.function_definitions.get(function_definition_index); @@ -5260,7 +5260,7 @@ pub const Builder = struct { const argument_instruction = try unit.instructions.append(context.my_allocator, .{ .abi_argument = 0, }); - try builder.appendInstruction(unit, context, argument_instruction); + try builder.appendInstruction(unit, argument_instruction); const switch_instruction_index = try unit.instructions.append(context.my_allocator, .{ .@"switch" = .{ .condition = .{ @@ -5272,7 +5272,7 @@ pub const Builder = struct { .block_type = return_type_index, }, }); - try builder.appendInstruction(unit, context, switch_instruction_index); + try builder.appendInstruction(unit, switch_instruction_index); const switch_instruction = &unit.instructions.get(switch_instruction_index).@"switch"; const phi_instruction_index = try unit.instructions.append(context.my_allocator, .{ @@ -5335,7 +5335,7 @@ pub const Builder = struct { switch_instruction.else_block = try builder.create_unreachable_block(unit, context); builder.current_basic_block = exit_block; - try builder.appendInstruction(unit, context, phi_instruction_index); + try builder.appendInstruction(unit, phi_instruction_index); const ret = try unit.instructions.append(context.my_allocator, .{ .ret = .{ @@ -5345,7 +5345,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, ret); + try builder.appendInstruction(unit, ret); const global_index = try unit.global_declarations.append(context.my_allocator, .{ .declaration = .{ @@ -5608,7 +5608,7 @@ pub const Builder = struct { .new = new_scope, }, }); - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); } } @@ -5629,7 +5629,7 @@ pub const Builder = struct { .new = new_scope, }, }); - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); } builder.setCurrentScope(new_scope); @@ -5731,7 +5731,8 @@ pub const Builder = struct { .column = debug_info.column, }, }); - try basic_block.instructions.append(context.my_allocator, instruction); + + _ = basic_block.instructions.append(instruction); builder.last_check_point = .{ .scope = builder.current_scope, @@ -5753,14 +5754,14 @@ pub const Builder = struct { const basic_block_index = function_definition.basic_blocks.slice()[0]; const basic_block = unit.basic_blocks.get(basic_block_index); - try basic_block.instructions.insert(context.my_allocator, function_definition.alloca_index, stack); + basic_block.instructions.insert(function_definition.alloca_index, stack); function_definition.alloca_index += 1; return stack; } - fn appendInstruction(builder: *Builder, unit: *Unit, context: *const Context, instruction_index: Instruction.Index) !void { + fn appendInstruction(builder: *Builder, unit: *Unit, instruction_index: Instruction.Index) !void { switch (unit.instructions.get(instruction_index).*) { .extract_value => |extract_value| switch (unit.types.get(extract_value.expression.type).*) { .pointer => unreachable, @@ -5773,11 +5774,11 @@ pub const Builder = struct { } const basic_block = unit.basic_blocks.get(builder.current_basic_block); if (!basic_block.terminated) { - try basic_block.instructions.append(context.my_allocator, instruction_index); + _ = basic_block.instructions.append(instruction_index); } else { const instruction = unit.instructions.get(instruction_index); assert(instruction.* == .pop_scope); - try basic_block.instructions.insert(context.my_allocator, basic_block.instructions.length - 1, instruction_index); + basic_block.instructions.insert(basic_block.instructions.length - 1, instruction_index); } } @@ -6370,7 +6371,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :blk .{ .value = .{ @@ -6413,7 +6414,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); break :b .{ .value = .{ @@ -6446,7 +6447,7 @@ pub const Builder = struct { .type = expected_type_index, }, }); - try builder.appendInstruction(unit, context, zero_extend); + try builder.appendInstruction(unit, zero_extend); return .{ .value = .{ @@ -6463,7 +6464,7 @@ pub const Builder = struct { .type = expected_type_index, }, }); - try builder.appendInstruction(unit, context, sign_extend); + try builder.appendInstruction(unit, sign_extend); return .{ .value = .{ .runtime = sign_extend, @@ -6480,7 +6481,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, cast_to_const); + try builder.appendInstruction(unit, cast_to_const); return .{ .value = .{ .runtime = cast_to_const, @@ -6496,7 +6497,7 @@ pub const Builder = struct { .type = expected_type_index, }, }); - try builder.appendInstruction(unit, context, cast_to_zero_termination); + try builder.appendInstruction(unit, cast_to_zero_termination); return .{ .value = .{ @@ -6514,7 +6515,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, cast_to_const); + try builder.appendInstruction(unit, cast_to_const); return .{ .value = .{ .runtime = cast_to_const, @@ -6531,7 +6532,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); return .{ .value = .{ .runtime = cast, @@ -6548,7 +6549,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); return .{ .value = .{ .runtime = cast, @@ -6621,7 +6622,7 @@ pub const Builder = struct { .new_value = v, }, }); - try builder.appendInstruction(unit, context, error_union_builder); + try builder.appendInstruction(unit, error_union_builder); const final_error_union = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -6642,7 +6643,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_error_union); + try builder.appendInstruction(unit, final_error_union); const value = V{ .value = .{ @@ -6673,7 +6674,7 @@ pub const Builder = struct { .new_value = v, }, }); - try builder.appendInstruction(unit, context, error_union_builder); + try builder.appendInstruction(unit, error_union_builder); const final_error_union = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -6694,7 +6695,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_error_union); + try builder.appendInstruction(unit, final_error_union); const support_alloca = try builder.createStackVariable(unit, context, error_union.union_for_error, null); @@ -6722,7 +6723,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, support_store); + try builder.appendInstruction(unit, support_store); const support_load = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -6735,7 +6736,7 @@ pub const Builder = struct { .type = expected_type_index, }, }); - try builder.appendInstruction(unit, context, support_load); + try builder.appendInstruction(unit, support_load); return .{ .value = .{ .runtime = support_load, @@ -6754,7 +6755,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); return .{ .value = .{ .runtime = cast, @@ -6820,7 +6821,7 @@ pub const Builder = struct { .new_value = value, }, }); - try builder.appendInstruction(unit, context, error_union_builder); + try builder.appendInstruction(unit, error_union_builder); const final_error_union = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -6841,7 +6842,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_error_union); + try builder.appendInstruction(unit, final_error_union); const result = V{ .value = .{ @@ -6872,7 +6873,7 @@ pub const Builder = struct { .type = .u64, }, }); - try builder.appendInstruction(unit, context, zero_extend); + try builder.appendInstruction(unit, zero_extend); const or_value = try unit.instructions.append(context.my_allocator, .{ .integer_binary_operation = .{ @@ -6896,7 +6897,7 @@ pub const Builder = struct { .signedness = .unsigned, }, }); - try builder.appendInstruction(unit, context, or_value); + try builder.appendInstruction(unit, or_value); if (destination_error_union.union_for_error == destination_error_union.abi) { const error_union_builder = try unit.instructions.append(context.my_allocator, .{ @@ -6916,7 +6917,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, error_union_builder); + try builder.appendInstruction(unit, error_union_builder); const final_error_union = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -6937,7 +6938,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_error_union); + try builder.appendInstruction(unit, final_error_union); return V{ .value = .{ @@ -6984,7 +6985,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, left_load); + try builder.appendInstruction(unit, left_load); switch (unit.types.get(expected_right_type).*) { .integer => |integer| switch (integer.kind) { @@ -7008,7 +7009,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); break :blk V{ .value = .{ @@ -7029,7 +7030,7 @@ pub const Builder = struct { .source = value_to_store, }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); return .{ .value = .{ @@ -7045,8 +7046,10 @@ pub const Builder = struct { fn newBasicBlock(builder: *Builder, unit: *Unit, context: *const Context) !BasicBlock.Index { const function = unit.function_definitions.get(builder.current_function); - const basic_block = try unit.basic_blocks.append(context.my_allocator, .{}); - try function.basic_blocks.append(context.my_allocator, basic_block); + const basic_block = try unit.basic_blocks.append(context.my_allocator, .{ + .instructions = try PinnedArray(Instruction.Index).init(std.mem.page_size), + }); + _ = function.basic_blocks.append(basic_block); return basic_block; } @@ -9145,7 +9148,7 @@ pub const Builder = struct { const memcpy = try unit.instructions.append(context.my_allocator, .{ .memcpy = arguments, }); - try builder.appendInstruction(unit, context, memcpy); + try builder.appendInstruction(unit, memcpy); } fn emitIntegerCompare(builder: *Builder, unit: *Unit, context: *const Context, left_value: V, right_value: V, integer: Type.Integer, compare_node_id: Node.Id) anyerror!V { @@ -9177,7 +9180,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, compare); + try builder.appendInstruction(unit, compare); return .{ .value = .{ @@ -9224,6 +9227,7 @@ pub const Builder = struct { .argument_map = try PinnedHashMap(*Debug.Declaration.Argument, Instruction.Index).init(std.mem.page_size), }, .has_debug_info = true, + .basic_blocks = try PinnedArray(BasicBlock.Index).init(std.mem.page_size), }); defer builder.current_function = old_function; @@ -9265,7 +9269,7 @@ pub const Builder = struct { const return_pointer_argument = try unit.instructions.append(context.my_allocator, .{ .abi_argument = 0, }); - try builder.appendInstruction(unit, context, return_pointer_argument); + try builder.appendInstruction(unit, return_pointer_argument); function.return_pointer = return_pointer_argument; } @@ -9287,7 +9291,7 @@ pub const Builder = struct { .abi_argument = @intCast(argument_abi.indices[0] + argument_index), }); - try builder.appendInstruction(unit, context, argument_instruction); + try builder.appendInstruction(unit, argument_instruction); argument_abi_instructions[argument_index] = argument_instruction; } @@ -9343,7 +9347,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); break :b stack; }, @@ -9398,7 +9402,7 @@ pub const Builder = struct { .source = source, }, }); - try builder.appendInstruction(unit, context, first_store); + try builder.appendInstruction(unit, first_store); const gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -9418,7 +9422,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "direct_pair"), }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); destination = .{ .value = .{ @@ -9440,7 +9444,7 @@ pub const Builder = struct { .source = source, }, }); - try builder.appendInstruction(unit, context, second_store); + try builder.appendInstruction(unit, second_store); break :b stack; }, @@ -9487,7 +9491,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); } else { const coerced_alloca = try builder.createStackVariable(unit, context, coerced_type_index, null); const coerced_pointer_type = try unit.getPointerType(context, .{ @@ -9514,7 +9518,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); try builder.emitMemcpy(unit, context, .{ .destination = .{ @@ -9553,7 +9557,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, debug_declare_argument); + try builder.appendInstruction(unit, debug_declare_argument); runtime_parameter_count += 1; }, @@ -9578,7 +9582,7 @@ pub const Builder = struct { const old_block = builder.current_basic_block; builder.current_basic_block = builder.return_block; - try builder.appendInstruction(unit, context, builder.return_phi); + try builder.appendInstruction(unit, builder.return_phi); try builder.buildRet(unit, context, .{ .value = .{ .runtime = builder.return_phi, @@ -9629,7 +9633,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, insert); + try builder.appendInstruction(unit, insert); try builder.buildRet(unit, context, .{ .value = .{ @@ -9680,7 +9684,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, return_value); + try builder.appendInstruction(unit, return_value); phi.addIncoming(.{ .value = .{ @@ -10141,7 +10145,7 @@ pub const Builder = struct { .type = load_type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :right .{ .value = .{ @@ -10278,7 +10282,7 @@ pub const Builder = struct { .type = right_value.type, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); const new_left_value = V{ .value = .{ @@ -10540,7 +10544,7 @@ pub const Builder = struct { else => |t| @panic(@tagName(t)), }; - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); break :block .{ .value = .{ @@ -10569,7 +10573,7 @@ pub const Builder = struct { .is_struct = false, }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const v = V{ .value = .{ .runtime = gep }, @@ -10767,7 +10771,7 @@ pub const Builder = struct { .index = 1, }, }); - try builder.appendInstruction(unit, context, extract_value); + try builder.appendInstruction(unit, extract_value); break :b .{ .value = .{ @@ -10810,7 +10814,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "slice_end_gep"), }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const load = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -10829,7 +10833,7 @@ pub const Builder = struct { .type = Type.usize, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :b V{ .value = .{ @@ -10896,7 +10900,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, range_compute); + try builder.appendInstruction(unit, range_compute); break :b .{ .value = .{ @@ -10917,7 +10921,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, extract_pointer); + try builder.appendInstruction(unit, extract_pointer); const gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -10928,7 +10932,7 @@ pub const Builder = struct { .is_struct = false, }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); break :slice V{ .value = .{ @@ -10957,7 +10961,7 @@ pub const Builder = struct { .type = slice.child_pointer_type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -10968,7 +10972,7 @@ pub const Builder = struct { .is_struct = false, }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); break :slice V{ .value = .{ @@ -10999,7 +11003,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ .pointer = load, @@ -11009,7 +11013,7 @@ pub const Builder = struct { .is_struct = false, }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); break :pointer V{ .value = .{ .runtime = gep, @@ -11045,7 +11049,7 @@ pub const Builder = struct { .new_value = pointer_value, }, }); - try builder.appendInstruction(unit, context, insert_pointer); + try builder.appendInstruction(unit, insert_pointer); const insert_length = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11059,7 +11063,7 @@ pub const Builder = struct { .new_value = len_expression, }, }); - try builder.appendInstruction(unit, context, insert_length); + try builder.appendInstruction(unit, insert_length); break :block V{ .value = .{ @@ -11085,7 +11089,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, extract_value); + try builder.appendInstruction(unit, extract_value); const pointer_type = slice.child_pointer_type; const pointer_gep = try unit.instructions.append(context.my_allocator, .{ @@ -11097,7 +11101,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "slice_pointer_gep"), }, }); - try builder.appendInstruction(unit, context, pointer_gep); + try builder.appendInstruction(unit, pointer_gep); const slice_builder = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11116,7 +11120,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const final_slice = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11131,7 +11135,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11151,7 +11155,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "pointer_many_slice"), }, }); - try builder.appendInstruction(unit, context, pointer_gep); + try builder.appendInstruction(unit, pointer_gep); const pointer_type = try unit.getPointerType(context, .{ .type = pointer.type, @@ -11186,7 +11190,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const final_slice = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11200,7 +11204,7 @@ pub const Builder = struct { .new_value = len_expression, }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11221,7 +11225,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "array_slice"), }, }); - try builder.appendInstruction(unit, context, pointer_gep); + try builder.appendInstruction(unit, pointer_gep); const pointer_type = try unit.getPointerType(context, .{ .type = array.type, @@ -11256,7 +11260,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const final_slice = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11270,7 +11274,7 @@ pub const Builder = struct { .new_value = len_expression, }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11288,7 +11292,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const pointer_gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -11299,7 +11303,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "double_many_pointer_slice"), }, }); - try builder.appendInstruction(unit, context, pointer_gep); + try builder.appendInstruction(unit, pointer_gep); const pointer_type = try unit.getPointerType(context, .{ .type = child_pointer.type, @@ -11334,7 +11338,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const final_slice = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11348,7 +11352,7 @@ pub const Builder = struct { .new_value = len_expression, }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11365,7 +11369,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const pointer_gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -11376,7 +11380,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "double_array_slice"), }, }); - try builder.appendInstruction(unit, context, pointer_gep); + try builder.appendInstruction(unit, pointer_gep); const pointer_type = try unit.getPointerType(context, .{ .type = array.type, @@ -11411,7 +11415,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const final_slice = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11425,7 +11429,7 @@ pub const Builder = struct { .new_value = len_expression, }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11444,7 +11448,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const extract_pointer = try unit.instructions.append(context.my_allocator, .{ .extract_value = .{ @@ -11457,7 +11461,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, extract_pointer); + try builder.appendInstruction(unit, extract_pointer); const pointer_gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -11468,7 +11472,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "slice_ptr_gep"), }, }); - try builder.appendInstruction(unit, context, pointer_gep); + try builder.appendInstruction(unit, pointer_gep); const slice_type = pointer.type; @@ -11489,7 +11493,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const final_slice = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11503,7 +11507,7 @@ pub const Builder = struct { .new_value = len_expression, }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11532,7 +11536,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); break :b .{ .value = .{ .runtime = cast, @@ -11549,7 +11553,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); break :b V{ .value = .{ .runtime = cast, @@ -11605,7 +11609,7 @@ pub const Builder = struct { .new_value = global_string_pointer, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const len = unit.types.get(string_global.declaration.type).array.count; @@ -11631,7 +11635,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11665,7 +11669,7 @@ pub const Builder = struct { .type = type_index, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); break :blk .{ .value = .{ @@ -11775,7 +11779,7 @@ pub const Builder = struct { .type = slice.child_pointer_type, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); const slice_builder = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ .expression = .{ @@ -11793,7 +11797,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const final_slice = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11816,7 +11820,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11850,7 +11854,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const final_slice = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11873,7 +11877,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11896,7 +11900,7 @@ pub const Builder = struct { .type = slice.child_pointer_type, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); const slice_builder = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11915,7 +11919,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, slice_builder); + try builder.appendInstruction(unit, slice_builder); const final_slice = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -11938,7 +11942,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_slice); + try builder.appendInstruction(unit, final_slice); break :blk .{ .value = .{ @@ -11967,7 +11971,7 @@ pub const Builder = struct { .value = v, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); break :blk .{ .value = .{ .runtime = cast, @@ -12054,7 +12058,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "indexed_array_gep"), }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const gep_type = try unit.getPointerType(context, .{ .type = array.type, @@ -12079,7 +12083,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ .pointer = load, @@ -12089,7 +12093,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "indexed_many_pointer"), }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const gep_type = try unit.getPointerType(context, .{ .type = child_pointer.type, @@ -12114,7 +12118,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -12125,7 +12129,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "indexed_pointer_array"), }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const gep_type = try unit.getPointerType(context, .{ .type = array.type, @@ -12152,7 +12156,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -12163,7 +12167,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "many_pointer_integer"), }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const gep_type = try unit.getPointerType(context, .{ .type = child_pointer.type, @@ -12191,7 +12195,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const pointer_field_index = struct_type.fields[sliceable.pointer]; const pointer_field = unit.struct_fields.get(pointer_field_index); @@ -12244,7 +12248,7 @@ pub const Builder = struct { .type = unit.types.get(gep.type).pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :blk .{ .value = .{ @@ -12302,7 +12306,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, xor); + try builder.appendInstruction(unit, xor); break :blk .{ .value = .{ @@ -12402,7 +12406,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, sub); + try builder.appendInstruction(unit, sub); break :block .{ .value = .{ .runtime = sub, @@ -12508,7 +12512,7 @@ pub const Builder = struct { .id = .equal, }, }); - try builder.appendInstruction(unit, context, cmp); + try builder.appendInstruction(unit, cmp); const is_null_block = try builder.newBasicBlock(unit, context); const is_not_null_block = try builder.newBasicBlock(unit, context); try builder.branch(unit, context, cmp, is_null_block, is_not_null_block); @@ -12529,7 +12533,7 @@ pub const Builder = struct { .type = type_to_expect, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); break :block .{ .value = .{ @@ -12570,7 +12574,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, get_pointer); + try builder.appendInstruction(unit, get_pointer); const cmp = try unit.instructions.append(context.my_allocator, .{ .integer_compare = .{ .left = .{ @@ -12584,7 +12588,7 @@ pub const Builder = struct { .id = .equal, }, }); - try builder.appendInstruction(unit, context, cmp); + try builder.appendInstruction(unit, cmp); const is_null_block = try builder.newBasicBlock(unit, context); const is_not_null_block = try builder.newBasicBlock(unit, context); try builder.branch(unit, context, cmp, is_null_block, is_not_null_block); @@ -12616,7 +12620,7 @@ pub const Builder = struct { .type = type_to_expect, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); const unwrap = V{ .value = .{ @@ -12629,7 +12633,7 @@ pub const Builder = struct { builder.current_basic_block = phi_block; - try builder.appendInstruction(unit, context, phi_index); + try builder.appendInstruction(unit, phi_index); break :block V{ .value = .{ .runtime = phi_index }, @@ -12644,7 +12648,7 @@ pub const Builder = struct { .type = type_to_expect, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); break :block .{ .value = .{ @@ -12680,7 +12684,7 @@ pub const Builder = struct { .signedness = .unsigned, }, }); - try builder.appendInstruction(unit, context, not); + try builder.appendInstruction(unit, not); break :block V{ .type = value.type, @@ -12737,7 +12741,7 @@ pub const Builder = struct { .index = 1, }, }); - try builder.appendInstruction(unit, context, is_error); + try builder.appendInstruction(unit, is_error); const error_block = try builder.newBasicBlock(unit, context); const clean_block = try builder.newBasicBlock(unit, context); @@ -12758,7 +12762,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, extract_value); + try builder.appendInstruction(unit, extract_value); break :blk V{ .type = error_union.abi, @@ -12786,7 +12790,7 @@ pub const Builder = struct { .source = value, }, }); - try builder.appendInstruction(unit, context, try_store); + try builder.appendInstruction(unit, try_store); const union_for_error_gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -12806,7 +12810,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "union_for_error_gep"), }, }); - try builder.appendInstruction(unit, context, union_for_error_gep); + try builder.appendInstruction(unit, union_for_error_gep); const error_load = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -12825,7 +12829,7 @@ pub const Builder = struct { .type = error_union.@"error", }, }); - try builder.appendInstruction(unit, context, error_load); + try builder.appendInstruction(unit, error_load); break :err V{ .value = .{ .runtime = error_load, @@ -12847,7 +12851,7 @@ pub const Builder = struct { .new_value = error_value, }, }); - try builder.appendInstruction(unit, context, error_union_builder); + try builder.appendInstruction(unit, error_union_builder); const final_error_union = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -12868,7 +12872,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_error_union); + try builder.appendInstruction(unit, final_error_union); break :final V{ .value = .{ @@ -12899,7 +12903,7 @@ pub const Builder = struct { .new_value = error_value, }, }); - try builder.appendInstruction(unit, context, error_union_builder); + try builder.appendInstruction(unit, error_union_builder); const final_error_union = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -12920,7 +12924,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_error_union); + try builder.appendInstruction(unit, final_error_union); const support_alloca = try builder.createStackVariable(unit, context, error_union.union_for_error, null); @@ -12948,7 +12952,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, support_store); + try builder.appendInstruction(unit, support_store); const support_load = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -12961,7 +12965,7 @@ pub const Builder = struct { .type = return_type_index, }, }); - try builder.appendInstruction(unit, context, support_load); + try builder.appendInstruction(unit, support_load); break :final V{ .value = .{ @@ -12985,7 +12989,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, get_error); + try builder.appendInstruction(unit, get_error); break :b V{ .value = .{ .runtime = get_error, @@ -13012,7 +13016,7 @@ pub const Builder = struct { .source = value, }, }); - try builder.appendInstruction(unit, context, try_store); + try builder.appendInstruction(unit, try_store); const union_for_error_gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -13032,7 +13036,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "union_for_error_gep"), }, }); - try builder.appendInstruction(unit, context, union_for_error_gep); + try builder.appendInstruction(unit, union_for_error_gep); const error_load = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -13051,7 +13055,7 @@ pub const Builder = struct { .type = error_union.@"error", }, }); - try builder.appendInstruction(unit, context, error_load); + try builder.appendInstruction(unit, error_load); break :err V{ .value = .{ .runtime = error_load, @@ -13107,7 +13111,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, result); + try builder.appendInstruction(unit, result); const v = V{ .value = .{ @@ -13217,7 +13221,7 @@ pub const Builder = struct { .type = type_index, }, }); - try builder.appendInstruction(unit, context, zero_extend); + try builder.appendInstruction(unit, zero_extend); var value = V{ .value = .{ .runtime = zero_extend, @@ -13240,7 +13244,7 @@ pub const Builder = struct { .type = type_index, }, }); - try builder.appendInstruction(unit, context, field_zero_extend); + try builder.appendInstruction(unit, field_zero_extend); const shift_left = try unit.instructions.append(context.my_allocator, .{ .integer_binary_operation = .{ @@ -13265,7 +13269,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, shift_left); + try builder.appendInstruction(unit, shift_left); const merge_or = try unit.instructions.append(context.my_allocator, .{ .integer_binary_operation = .{ @@ -13280,7 +13284,7 @@ pub const Builder = struct { .right = value, }, }); - try builder.appendInstruction(unit, context, merge_or); + try builder.appendInstruction(unit, merge_or); value = .{ .value = .{ @@ -13331,7 +13335,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, struct_initialization_instruction); + try builder.appendInstruction(unit, struct_initialization_instruction); struct_initialization.value = .{ .runtime = struct_initialization_instruction, @@ -13445,7 +13449,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, insert_value); + try builder.appendInstruction(unit, insert_value); array_builder.value = .{ .runtime = insert_value, @@ -13494,7 +13498,7 @@ pub const Builder = struct { .name = field.name, }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const second_load = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -13513,7 +13517,7 @@ pub const Builder = struct { .type = field.type, }, }); - try builder.appendInstruction(unit, context, second_load); + try builder.appendInstruction(unit, second_load); return .{ .callable = .{ @@ -13564,7 +13568,7 @@ pub const Builder = struct { .type = first_argument_type_index, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); return .{ .member = .{ @@ -13678,7 +13682,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const member_resolution = try builder.end_up_resolving_member_call(unit, context, child_pointer.type, .{ .value = .{ @@ -13744,7 +13748,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :b .{ .callable = .{ @@ -13877,7 +13881,7 @@ pub const Builder = struct { .source = argument_value, }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); const target_type = unit.types.get(coerced_type_index); const target_alignment = target_type.getAbiAlignment(unit); @@ -13896,7 +13900,7 @@ pub const Builder = struct { .type = coerced_type_index, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); argument_list.appendAssumeCapacity(V{ .value = .{ @@ -13935,7 +13939,7 @@ pub const Builder = struct { .alignment = alignment, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); argument_list.appendAssumeCapacity(V{ .value = .{ @@ -13981,7 +13985,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, extract_0); + try builder.appendInstruction(unit, extract_0); argument_list.appendAssumeCapacity(.{ .value = .{ @@ -13996,7 +14000,7 @@ pub const Builder = struct { .index = 1, }, }); - try builder.appendInstruction(unit, context, extract_1); + try builder.appendInstruction(unit, extract_1); argument_list.appendAssumeCapacity(.{ .value = .{ @@ -14031,7 +14035,7 @@ pub const Builder = struct { .source = argument_value, }, }); - try builder.appendInstruction(unit, context, coerced_store); + try builder.appendInstruction(unit, coerced_store); break :b coerced_pointer; } else b: { @@ -14056,7 +14060,7 @@ pub const Builder = struct { .source = argument_value, }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); break :b argument_alloca; }; @@ -14078,7 +14082,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "direct_pair_gep0"), }, }); - try builder.appendInstruction(unit, context, gep0); + try builder.appendInstruction(unit, gep0); const load0 = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -14097,7 +14101,7 @@ pub const Builder = struct { .type = pair[0], }, }); - try builder.appendInstruction(unit, context, load0); + try builder.appendInstruction(unit, load0); const gep1 = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -14117,7 +14121,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "direct_pair_gep1"), }, }); - try builder.appendInstruction(unit, context, gep1); + try builder.appendInstruction(unit, gep1); const load1 = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -14136,7 +14140,7 @@ pub const Builder = struct { .type = pair[1], }, }); - try builder.appendInstruction(unit, context, load1); + try builder.appendInstruction(unit, load1); argument_list.appendAssumeCapacity(V{ .value = .{ @@ -14182,7 +14186,7 @@ pub const Builder = struct { .source = argument_value, }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); argument_list.appendAssumeCapacity(indirect_value); } @@ -14204,7 +14208,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); if (function_prototype.return_type == .noreturn) { try builder.buildTrap(unit, context); @@ -14218,7 +14222,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); return .{ .value = .{ .runtime = load, @@ -14289,7 +14293,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, debug_declare_local); + try builder.appendInstruction(unit, debug_declare_local); const store = try unit.instructions.append(context.my_allocator, .{ .store = .{ @@ -14303,7 +14307,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); return stack; } else { @@ -14555,7 +14559,7 @@ pub const Builder = struct { .index = 1, }, }); - try builder.appendInstruction(unit, context, len_extract_instruction); + try builder.appendInstruction(unit, len_extract_instruction); break :b V{ .value = .{ @@ -14603,7 +14607,7 @@ pub const Builder = struct { .index = 1, }, }); - try builder.appendInstruction(unit, context, len_extract_value); + try builder.appendInstruction(unit, len_extract_value); break :blk .{ .stack_slot = stack_slot, @@ -14704,7 +14708,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); const compare = try unit.instructions.append(context.my_allocator, .{ .integer_compare = .{ @@ -14719,7 +14723,7 @@ pub const Builder = struct { .id = .unsigned_less, }, }); - try builder.appendInstruction(unit, context, compare); + try builder.appendInstruction(unit, compare); const body_block = try builder.newBasicBlock(unit, context); const exit_block = try builder.newBasicBlock(unit, context); @@ -14745,7 +14749,7 @@ pub const Builder = struct { .type = Type.usize, }, }); - try builder.appendInstruction(unit, context, load_i); + try builder.appendInstruction(unit, load_i); for (payloads[0..not_range_len], slices) |payload_node_index, slice| { const pointer_extract_value = try unit.instructions.append(context.my_allocator, .{ @@ -14754,7 +14758,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, pointer_extract_value); + try builder.appendInstruction(unit, pointer_extract_value); const slice_type = unit.types.get(slice.type).slice; @@ -14772,7 +14776,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "slice_for_payload"), }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const is_by_value = true; const init_instruction = switch (is_by_value) { @@ -14788,7 +14792,7 @@ pub const Builder = struct { .type = slice_type.child_type, }, }); - try builder.appendInstruction(unit, context, load_gep); + try builder.appendInstruction(unit, load_gep); break :vblk load_gep; }, false => gep, @@ -14829,7 +14833,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, load_iterator); + try builder.appendInstruction(unit, load_iterator); const increment = try unit.instructions.append(context.my_allocator, .{ .integer_binary_operation = .{ @@ -14854,7 +14858,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, increment); + try builder.appendInstruction(unit, increment); const increment_store = try unit.instructions.append(context.my_allocator, .{ .store = .{ @@ -14873,7 +14877,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, increment_store); + try builder.appendInstruction(unit, increment_store); try builder.jump(unit, context, builder.loop_header_block); @@ -14977,7 +14981,7 @@ pub const Builder = struct { .index = 1, }, }); - try builder.appendInstruction(unit, context, is_error); + try builder.appendInstruction(unit, is_error); const error_block = try builder.newBasicBlock(unit, context); const clean_block = try builder.newBasicBlock(unit, context); try builder.branch(unit, context, is_error, error_block, clean_block); @@ -14996,7 +15000,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, error_extract_value); + try builder.appendInstruction(unit, error_extract_value); const error_value = V{ .value = .{ .runtime = error_extract_value, @@ -15052,7 +15056,7 @@ pub const Builder = struct { .index = 0, }, }); - try builder.appendInstruction(unit, context, no_error_extract_value); + try builder.appendInstruction(unit, no_error_extract_value); const value = V{ .value = .{ @@ -15072,7 +15076,7 @@ pub const Builder = struct { try builder.jump(unit, context, exit_block); builder.current_basic_block = exit_block; - try builder.appendInstruction(unit, context, phi_index); + try builder.appendInstruction(unit, phi_index); return .{ .value = .{ @@ -15119,7 +15123,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, pointer_value); + try builder.appendInstruction(unit, pointer_value); const condition = try unit.instructions.append(context.my_allocator, .{ .integer_compare = .{ @@ -15139,7 +15143,7 @@ pub const Builder = struct { .type = slice.child_pointer_type, }, }); - try builder.appendInstruction(unit, context, condition); + try builder.appendInstruction(unit, condition); try builder.resolveBranch(unit, context, Type.Expect{ .type = .void }, condition, arguments.taken_expression_node_index, arguments.not_taken_expression_node_index, payload_node.token, optional_expression); } else { unreachable; @@ -15160,7 +15164,7 @@ pub const Builder = struct { .type = optional_expression.type, }, }); - try builder.appendInstruction(unit, context, condition); + try builder.appendInstruction(unit, condition); try builder.resolveBranch(unit, context, Type.Expect{ .type = .void }, condition, arguments.taken_expression_node_index, arguments.not_taken_expression_node_index, payload_node.token, optional_expression); } else { unreachable; @@ -15211,7 +15215,7 @@ pub const Builder = struct { .type = not_null_slice, }, }); - try builder.appendInstruction(unit, context, unwrap); + try builder.appendInstruction(unit, unwrap); const emit = true; _ = try builder.emitLocalVariableDeclaration(unit, context, optional_payload_token, .@"const", not_null_slice, .{ @@ -15237,7 +15241,7 @@ pub const Builder = struct { .type = pointer_type, }, }); - try builder.appendInstruction(unit, context, unwrap); + try builder.appendInstruction(unit, unwrap); const emit = true; _ = try builder.emitLocalVariableDeclaration(unit, context, optional_payload_token, .@"const", pointer_type, .{ @@ -15279,7 +15283,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, br); + try builder.appendInstruction(unit, br); unit.basic_blocks.get(builder.current_basic_block).terminated = true; const taken_bb = unit.basic_blocks.get(taken_block); @@ -15296,7 +15300,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); unit.basic_blocks.get(builder.current_basic_block).terminated = true; const new_bb = unit.basic_blocks.get(new_basic_block); @@ -15418,7 +15422,7 @@ pub const Builder = struct { .block_type = type_index, }, }); - try builder.appendInstruction(unit, context, switch_instruction_index); + try builder.appendInstruction(unit, switch_instruction_index); const switch_instruction = &unit.instructions.get(switch_instruction_index).@"switch"; const phi_info: ?PhiInfo = switch (unit.types.get(type_index).*) { @@ -15552,7 +15556,7 @@ pub const Builder = struct { const phi_instruction = &unit.instructions.get(phi.instruction).phi; if (phi_instruction.values.len > 0) { builder.current_basic_block = phi.block; - try builder.appendInstruction(unit, context, phi.instruction); + try builder.appendInstruction(unit, phi.instruction); return V{ .value = .{ @@ -15680,7 +15684,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :v .{ .value = .{ .runtime = load, @@ -15832,7 +15836,7 @@ pub const Builder = struct { }), }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const gep_value = V{ .value = .{ @@ -15856,7 +15860,7 @@ pub const Builder = struct { .type = field_type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :b .{ .value = .{ @@ -15898,7 +15902,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); // GEP because this is still a pointer const gep = try unit.instructions.append(context.my_allocator, .{ @@ -15919,7 +15923,7 @@ pub const Builder = struct { .name = field.name, }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const mutability = child_pointer.mutability; const gep_pointer_type = try unit.getPointerType(context, .{ @@ -15945,7 +15949,7 @@ pub const Builder = struct { .type = field.type, }, }); - try builder.appendInstruction(unit, context, field_load); + try builder.appendInstruction(unit, field_load); break :right .{ .value = .{ @@ -15991,7 +15995,7 @@ pub const Builder = struct { .name = field.name, }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const gep_value = V{ .value = .{ @@ -16015,7 +16019,7 @@ pub const Builder = struct { }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :b V{ .value = .{ @@ -16049,7 +16053,7 @@ pub const Builder = struct { .type = pointer.type, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); var bit_offset: u32 = 0; for (fields[0..i]) |fi| { @@ -16084,7 +16088,7 @@ pub const Builder = struct { .signedness = integer.signedness, }, }); - try builder.appendInstruction(unit, context, shl); + try builder.appendInstruction(unit, shl); break :shl shl; }, @@ -16111,7 +16115,7 @@ pub const Builder = struct { .type = field.type, }, }); - try builder.appendInstruction(unit, context, truncate); + try builder.appendInstruction(unit, truncate); break :b V{ .value = .{ .runtime = truncate, @@ -16149,7 +16153,7 @@ pub const Builder = struct { .type = ti, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); return .{ .value = .{ @@ -16206,7 +16210,7 @@ pub const Builder = struct { .type = ti, }, }); - try builder.appendInstruction(unit, context, cast); + try builder.appendInstruction(unit, cast); return .{ .value = .{ @@ -16237,7 +16241,7 @@ pub const Builder = struct { .new_value = result, }, }); - try builder.appendInstruction(unit, context, error_union_builder); + try builder.appendInstruction(unit, error_union_builder); const final_error_union = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -16258,7 +16262,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_error_union); + try builder.appendInstruction(unit, final_error_union); return .{ .value = .{ @@ -16288,7 +16292,7 @@ pub const Builder = struct { .new_value = result, }, }); - try builder.appendInstruction(unit, context, error_union_builder); + try builder.appendInstruction(unit, error_union_builder); const final_error_union = try unit.instructions.append(context.my_allocator, .{ .insert_value = .{ @@ -16309,7 +16313,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, final_error_union); + try builder.appendInstruction(unit, final_error_union); const support_alloca = try builder.createStackVariable(unit, context, error_union.union_for_error, null); @@ -16337,7 +16341,7 @@ pub const Builder = struct { }, }, }); - try builder.appendInstruction(unit, context, support_store); + try builder.appendInstruction(unit, support_store); const support_load = try unit.instructions.append(context.my_allocator, .{ .load = .{ @@ -16350,7 +16354,7 @@ pub const Builder = struct { .type = ti, }, }); - try builder.appendInstruction(unit, context, support_load); + try builder.appendInstruction(unit, support_load); return .{ .value = .{ .runtime = support_load, @@ -16374,7 +16378,7 @@ pub const Builder = struct { .type = ti, }, }); - try builder.appendInstruction(unit, context, zero_extend); + try builder.appendInstruction(unit, zero_extend); return .{ .value = .{ @@ -16501,13 +16505,13 @@ pub const Builder = struct { fn buildUnreachable(builder: *Builder, unit: *Unit, context: *const Context) !void { const instruction = try unit.instructions.append(context.my_allocator, .@"unreachable"); - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); unit.basic_blocks.get(builder.current_basic_block).terminated = true; } fn buildTrap(builder: *Builder, unit: *Unit, context: *const Context) !void { const instruction = try unit.instructions.append(context.my_allocator, .trap); - try builder.appendInstruction(unit, context, instruction); + try builder.appendInstruction(unit, instruction); try builder.buildUnreachable(unit, context); } @@ -16552,7 +16556,7 @@ pub const Builder = struct { .source = value, }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); const target_type = unit.types.get(struct_type_index); const target_size = target_type.getAbiSize(unit); @@ -16576,7 +16580,7 @@ pub const Builder = struct { .type = struct_type_index, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :b V{ .value = .{ @@ -16615,7 +16619,7 @@ pub const Builder = struct { .alignment = alignment, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :b V{ .value = .{ @@ -16639,7 +16643,7 @@ pub const Builder = struct { .source = value, }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); const void_value = V{ .value = .{ .@"comptime" = .void, @@ -16672,7 +16676,7 @@ pub const Builder = struct { .source = value, }, }); - try builder.appendInstruction(unit, context, store); + try builder.appendInstruction(unit, store); const target_type = unit.types.get(coerced_type_index); const target_alignment = target_type.getAbiAlignment(unit); @@ -16691,7 +16695,7 @@ pub const Builder = struct { .type = coerced_type_index, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :b V{ .value = .{ @@ -16730,7 +16734,7 @@ pub const Builder = struct { .alignment = alignment, }, }); - try builder.appendInstruction(unit, context, load); + try builder.appendInstruction(unit, load); break :b V{ .value = .{ @@ -16745,7 +16749,7 @@ pub const Builder = struct { const ret = try unit.instructions.append(context.my_allocator, .{ .ret = abi_value, }); - try builder.appendInstruction(unit, context, ret); + try builder.appendInstruction(unit, ret); unit.basic_blocks.get(builder.current_basic_block).terminated = true; } @@ -16892,7 +16896,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "slice_pointer_access"), }, }); - try builder.appendInstruction(unit, context, gep); + try builder.appendInstruction(unit, gep); const pointer_to_slice_pointer = try unit.getPointerType(context, .{ .type = sliceable_pointer_type_index, @@ -16913,7 +16917,7 @@ pub const Builder = struct { .type = sliceable_pointer_type_index, }, }); - try builder.appendInstruction(unit, context, pointer_load); + try builder.appendInstruction(unit, pointer_load); const slice_pointer_gep = try unit.instructions.append(context.my_allocator, .{ .get_element_pointer = .{ @@ -16924,7 +16928,7 @@ pub const Builder = struct { .name = try unit.processIdentifier(context, "indexed_slice_gep"), }, }); - try builder.appendInstruction(unit, context, slice_pointer_gep); + try builder.appendInstruction(unit, slice_pointer_gep); return .{ .value = .{ diff --git a/bootstrap/library.zig b/bootstrap/library.zig index 3a623a6..0516ed2 100644 --- a/bootstrap/library.zig +++ b/bootstrap/library.zig @@ -218,6 +218,16 @@ pub fn PinnedArray(comptime T: type) type { array.length += count; @memcpy(array.pointer[index..][0..count], items); } + + pub fn insert(array: *@This(), index: u32, item: T) void { + assert(index < array.length); + array.ensure_capacity(1); + const src = array.slice()[index..]; + array.length += 1; + const dst = array.slice()[index + 1..]; + copy_backwards(T, dst, src); + array.slice()[index] = item; + } }; } diff --git a/bootstrap/main.zig b/bootstrap/main.zig index 846e3de..020b578 100644 --- a/bootstrap/main.zig +++ b/bootstrap/main.zig @@ -33,7 +33,7 @@ pub fn main() !void { // assert(arguments.len > 0); // const home_dir = std.posix.getenv("HOME") orelse unreachable; // const timestamp = std.time.milliTimestamp(); - // var argument_list = UnpinnedArray(u8){}; + // var argument_list = PinnedArray(u8){}; // for (arguments) |arg| { // argument_list.append_slice(context.my_allocator, arg) catch {}; // argument_list.append(context.my_allocator, ' ') catch {}; From 98aff391b1b93dd87b9aa1caa8e5c78fd5dbf2d0 Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Thu, 25 Apr 2024 23:27:22 -0600 Subject: [PATCH 11/14] Remove UnpinnedArray last dependency (BlockList) --- bootstrap/Compilation.zig | 1322 +++++++++++++++++++----------------- bootstrap/backend/llvm.zig | 4 +- bootstrap/library.zig | 238 +------ bootstrap/linker/lld.zig | 2 +- 4 files changed, 726 insertions(+), 840 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index f53efd4..1ded94e 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -16,7 +16,7 @@ const first_byte = library.first_byte; const first_slice = library.first_slice; const starts_with_slice = library.starts_with_slice; const PinnedArray = library.PinnedArray; -const BlockList = library.BlockList; +const PinnedArrayAdvanced = library.PinnedArrayAdvanced; const MyAllocator = library.MyAllocator; const PinnedHashMap = library.PinnedHashMap; const span = library.span; @@ -126,9 +126,7 @@ pub fn compileBuildExecutable(context: *const Context, arguments: []const []cons .tokens = try PinnedArray(Token).init_with_default_granularity(), .line_offsets = try PinnedArray(u32).init_with_default_granularity(), }, - .node_buffer = try PinnedArray(Node).init_with_default_granularity(), - .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), - .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), + // pinned hashmaps .file_token_offsets = try PinnedHashMap(Token.Range, Debug.File.Index).init(std.mem.page_size), .file_map = try PinnedHashMap([]const u8, Debug.File.Index).init(std.mem.page_size), .identifiers = try PinnedHashMap(u32, []const u8).init(std.mem.page_size), @@ -147,6 +145,30 @@ pub fn compileBuildExecutable(context: *const Context, arguments: []const []cons .type_declarations = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), .test_functions = try PinnedHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global).init(std.mem.page_size), .code_to_emit = try PinnedHashMap(Function.Definition.Index, *Debug.Declaration.Global).init(std.mem.page_size), + // special pinned arrays + .types = try Type.List.init_with_default_granularity(), + // pinned arrays + .node_buffer = try PinnedArray(Node).init_with_default_granularity(), + .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), + .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), + .files = try PinnedArray(Debug.File).init_with_default_granularity(), + .structs = try PinnedArray(Struct).init_with_default_granularity(), + .struct_fields = try PinnedArray(Struct.Field).init_with_default_granularity(), + .enum_fields = try PinnedArray(Enum.Field).init_with_default_granularity(), + .function_definitions = try PinnedArray(Function.Definition).init_with_default_granularity(), + .blocks = try PinnedArray(Debug.Block).init_with_default_granularity(), + .global_declarations = try PinnedArray(Debug.Declaration.Global).init_with_default_granularity(), + .local_declarations = try PinnedArray(Debug.Declaration.Local).init_with_default_granularity(), + .argument_declarations = try PinnedArray(Debug.Declaration.Argument).init_with_default_granularity(), + .assembly_instructions = try PinnedArray(InlineAssembly.Instruction).init_with_default_granularity(), + .function_prototypes = try PinnedArray(Function.Prototype).init_with_default_granularity(), + .inline_assembly = try PinnedArray(InlineAssembly).init_with_default_granularity(), + .instructions = try PinnedArray(Instruction).init_with_default_granularity(), + .basic_blocks = try PinnedArray(BasicBlock).init_with_default_granularity(), + .constant_structs = try PinnedArray(V.Comptime.ConstantStruct).init_with_default_granularity(), + .constant_arrays = try PinnedArray(V.Comptime.ConstantArray).init_with_default_granularity(), + .constant_slices = try PinnedArray(V.Comptime.ConstantSlice).init_with_default_granularity(), + .error_fields = try PinnedArray(Type.Error.Field).init_with_default_granularity(), }; try unit.compile(context); @@ -3012,37 +3034,120 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o break :blk slice; }; + // const unit = try context.allocator.create(Unit); + // unit.* = .{ + // .descriptor = .{ + // .main_package_path = main_package_path, + // .executable_path = executable_path, + // .object_path = object_file_path, + // .only_parse = only_parse, + // .arch = arch, + // .os = os, + // .abi = abi, + // .optimization = optimization, + // .link_libc = switch (os) { + // .linux => link_libc, + // .macos => true, + // .windows => link_libc, + // // .windows => link_libc, + // // else => unreachable, + // }, + // .link_libcpp = false, + // .generate_debug_information = generate_debug_information, + // .name = executable_name, + // .is_test = options.is_test, + // .c_source_files = c_source_files.slice(), + // }, + // .token_buffer = Token.Buffer{ + // .tokens = try PinnedArray(Token).init_with_default_granularity(), + // .line_offsets = try PinnedArray(u32).init_with_default_granularity(), + // }, + // .node_buffer = try PinnedArray(Node).init_with_default_granularity(), + // .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), + // .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), + // .file_token_offsets = try PinnedHashMap(Token.Range, Debug.File.Index).init(std.mem.page_size), + // .file_map = try PinnedHashMap([]const u8, Debug.File.Index).init(std.mem.page_size), + // .identifiers = try PinnedHashMap(u32, []const u8).init(std.mem.page_size), + // .string_literal_values = try PinnedHashMap(u32, [:0]const u8).init(std.mem.page_size), + // .string_literal_globals = try PinnedHashMap(u32, *Debug.Declaration.Global).init(std.mem.page_size), + // .optionals = try PinnedHashMap(Type.Index, Type.Index).init(std.mem.page_size), + // .pointers = try PinnedHashMap(Type.Pointer, Type.Index).init(std.mem.page_size), + // .slices = try PinnedHashMap(Type.Slice, Type.Index).init(std.mem.page_size), + // .arrays = try PinnedHashMap(Type.Array, Type.Index).init(std.mem.page_size), + // .integers = try PinnedHashMap(Type.Integer, Type.Index).init(std.mem.page_size), + // .error_unions = try PinnedHashMap(Type.Error.Union.Descriptor, Type.Index).init(std.mem.page_size), + // .two_structs = try PinnedHashMap([2]Type.Index, Type.Index).init(std.mem.page_size), + // .fields_array = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + // .name_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + // .external_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + // .type_declarations = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), + // .test_functions = try PinnedHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global).init(std.mem.page_size), + // .code_to_emit = try PinnedHashMap(Function.Definition.Index, *Debug.Declaration.Global).init(std.mem.page_size), + // }; + + const unit = try createUnit(context, .{ + .main_package_path = main_package_path, + .object_path = object_file_path, + .executable_path = executable_path, + .only_parse = only_parse, + .arch = arch, + .os = os, + .abi = abi, + .optimization = optimization, + .link_libc = link_libc, + .generate_debug_information = generate_debug_information, + .name = executable_name, + .is_test = options.is_test, + .c_source_files = c_source_files.slice(), + }); + + try unit.compile(context); +} + +fn createUnit(context: *const Context, arguments: struct{ + main_package_path: []const u8, + executable_path: []const u8, + object_path: []const u8, + only_parse: bool, + arch: Arch, + os: Os, + abi: Abi, + optimization: Optimization, + link_libc: bool, + generate_debug_information: bool, + name: []const u8, + is_test: bool, + c_source_files: []const []const u8, +}) !*Unit{ const unit = try context.allocator.create(Unit); unit.* = .{ .descriptor = .{ - .main_package_path = main_package_path, - .executable_path = executable_path, - .object_path = object_file_path, - .only_parse = only_parse, - .arch = arch, - .os = os, - .abi = abi, - .optimization = optimization, - .link_libc = switch (os) { - .linux => link_libc, + .main_package_path = arguments.main_package_path, + .executable_path = arguments.executable_path, + .object_path = arguments.object_path, + .only_parse = arguments.only_parse, + .arch = arguments.arch, + .os = arguments.os, + .abi = arguments.abi, + .optimization = arguments.optimization, + .link_libc = switch (arguments.os) { + .linux => arguments.link_libc, .macos => true, - .windows => link_libc, + .windows => arguments.link_libc, // .windows => link_libc, // else => unreachable, }, .link_libcpp = false, - .generate_debug_information = generate_debug_information, - .name = executable_name, - .is_test = options.is_test, - .c_source_files = c_source_files.slice(), + .generate_debug_information = arguments.generate_debug_information, + .name = arguments.name, + .is_test = arguments.is_test, + .c_source_files = arguments.c_source_files, }, .token_buffer = Token.Buffer{ .tokens = try PinnedArray(Token).init_with_default_granularity(), .line_offsets = try PinnedArray(u32).init_with_default_granularity(), }, - .node_buffer = try PinnedArray(Node).init_with_default_granularity(), - .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), - .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), + // pinned hashmaps .file_token_offsets = try PinnedHashMap(Token.Range, Debug.File.Index).init(std.mem.page_size), .file_map = try PinnedHashMap([]const u8, Debug.File.Index).init(std.mem.page_size), .identifiers = try PinnedHashMap(u32, []const u8).init(std.mem.page_size), @@ -3061,9 +3166,33 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o .type_declarations = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), .test_functions = try PinnedHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global).init(std.mem.page_size), .code_to_emit = try PinnedHashMap(Function.Definition.Index, *Debug.Declaration.Global).init(std.mem.page_size), + // special pinned arrays + .types = try Type.List.init_with_default_granularity(), + // pinned arrays + .node_buffer = try PinnedArray(Node).init_with_default_granularity(), + .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), + .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), + .files = try PinnedArray(Debug.File).init_with_default_granularity(), + .structs = try PinnedArray(Struct).init_with_default_granularity(), + .struct_fields = try PinnedArray(Struct.Field).init_with_default_granularity(), + .enum_fields = try PinnedArray(Enum.Field).init_with_default_granularity(), + .function_definitions = try PinnedArray(Function.Definition).init_with_default_granularity(), + .blocks = try PinnedArray(Debug.Block).init_with_default_granularity(), + .global_declarations = try PinnedArray(Debug.Declaration.Global).init_with_default_granularity(), + .local_declarations = try PinnedArray(Debug.Declaration.Local).init_with_default_granularity(), + .argument_declarations = try PinnedArray(Debug.Declaration.Argument).init_with_default_granularity(), + .assembly_instructions = try PinnedArray(InlineAssembly.Instruction).init_with_default_granularity(), + .function_prototypes = try PinnedArray(Function.Prototype).init_with_default_granularity(), + .inline_assembly = try PinnedArray(InlineAssembly).init_with_default_granularity(), + .instructions = try PinnedArray(Instruction).init_with_default_granularity(), + .basic_blocks = try PinnedArray(BasicBlock).init_with_default_granularity(), + .constant_structs = try PinnedArray(V.Comptime.ConstantStruct).init_with_default_granularity(), + .constant_arrays = try PinnedArray(V.Comptime.ConstantArray).init_with_default_granularity(), + .constant_slices = try PinnedArray(V.Comptime.ConstantSlice).init_with_default_granularity(), + .error_fields = try PinnedArray(Type.Error.Field).init_with_default_granularity(), }; - try unit.compile(context); + return unit; } fn realpathAlloc(allocator: Allocator, pathname: []const u8) ![]const u8 { @@ -3412,7 +3541,7 @@ pub const Type = union(enum) { pub fn add_instantiation(polymorphic: *Polymorphic, unit: *Unit, context: *const Context, parameters: []const V.Comptime, original_declaration: *Debug.Declaration.Global, type_index: Type.Index) !void { const name_hash = try serialize_comptime_parameters(unit, context, &original_declaration.declaration, parameters); - const new_declaration_index = try unit.global_declarations.append(context.my_allocator, .{ + const new_declaration = unit.global_declarations.append(.{ .declaration = .{ .scope = original_declaration.declaration.scope, .type = .type, @@ -3428,7 +3557,6 @@ pub const Type = union(enum) { .type_node_index = original_declaration.type_node_index, .attributes = original_declaration.attributes, }); - const new_declaration = unit.global_declarations.get(new_declaration_index); const parameter_hash = hash(parameters); try polymorphic.instantiations.put_no_clobber(parameter_hash, new_declaration); @@ -3496,30 +3624,28 @@ pub const Type = union(enum) { cast: Type.Index, }; - const Error = struct { + pub const Error = struct { fields: DynamicBoundedArray(Type.Error.Field.Index), scope: Debug.Scope.Global, id: u32, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(Type.Error).Index; - const Field = struct { + pub const Field = struct { name: u32, type: Type.Index, value: usize, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(Type.Error.Field).Index; }; - const Union = struct { + pub const Union = struct { @"error": Type.Index, type: Type.Index, alternate_type: Type.Index, alternate_index: bool, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + + pub const Index = PinnedArray(Type.Error.Union).Index; const Descriptor = struct { @"error": Type.Index, @@ -3704,8 +3830,8 @@ pub const Type = union(enum) { }); }; - pub const List = BlockList(@This(), Common); - pub usingnamespace List.Index; + pub const List = PinnedArrayAdvanced(Type, Common); + pub const Index = List.Index; }; pub const Instruction = union(enum) { @@ -3960,8 +4086,7 @@ pub const Instruction = union(enum) { type: Type.Index, }; - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(Instruction).Index; }; pub const BasicBlock = struct { @@ -3981,8 +4106,7 @@ pub const BasicBlock = struct { _ = basic_block.predecessors.append(predecessor); } - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(BasicBlock).Index; }; pub const ComptimeParameterDeclaration = struct { @@ -4008,8 +4132,7 @@ pub const Function = struct { alloca_index: u32 = 1, has_debug_info: bool, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(Definition).Index; }; pub const CallingConvention = enum { @@ -4029,8 +4152,7 @@ pub const Function = struct { naked: bool = false, }; - const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + const Index = PinnedArray(Prototype).Index; const Abi = struct { return_type: Type.Index = .null, @@ -4115,12 +4237,10 @@ pub const Struct = struct { type: Type.Index, default_value: ?V.Comptime, - const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(Struct.Field).Index; }; - const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(Struct).Index; }; pub const Context = struct { @@ -4167,7 +4287,7 @@ pub const PolymorphicFunction = struct { const function_definition = unit.function_definitions.get(function_definition_index); const type_index = function_definition.type; - const new_declaration_index = try unit.global_declarations.append(context.my_allocator, .{ + const new_declaration = unit.global_declarations.append(.{ .declaration = .{ .scope = original_declaration.declaration.scope, .type = type_index, @@ -4183,7 +4303,6 @@ pub const PolymorphicFunction = struct { .type_node_index = original_declaration.type_node_index, .attributes = original_declaration.attributes, }); - const new_declaration = unit.global_declarations.get(new_declaration_index); const parameter_hash = hash(parameters); try polymorphic_function.instantiations.put_no_clobber(parameter_hash, new_declaration); @@ -4235,24 +4354,21 @@ pub const V = struct { end: usize, type: Type.Index, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(ConstantSlice).Index; }; pub const ConstantArray = struct { values: []const V.Comptime, type: Type.Index, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(@This()).Index; }; pub const ConstantStruct = struct { fields: []const V.Comptime, type: Type.Index, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(@This()).Index; }; pub const ComptimeInt = struct { @@ -4310,8 +4426,7 @@ pub const Debug = struct { @"extern", }; - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(@This()).Index; pub fn getFunctionDefinitionIndex(global: *Global) Function.Definition.Index { return global.initial_value.function_definition; @@ -4321,15 +4436,13 @@ pub const Debug = struct { pub const Local = struct { declaration: Declaration, init_value: V, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(@This()).Index; }; pub const Argument = struct { declaration: Declaration, index: u32, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(@This()).Index; }; }; @@ -4406,8 +4519,7 @@ pub const Debug = struct { pub const Block = struct { scope: Scope.Local, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(@This()).Index; }; pub const File = struct { @@ -4420,8 +4532,7 @@ pub const Debug = struct { // value: Value.Index = .null, scope: Scope.Global, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(File).Index; pub const Status = enum { not_loaded, @@ -4507,6 +4618,7 @@ pub const Builder = struct { } fn getErrorUnionType(builder: *Builder, unit: *Unit, context: *const Context, error_union: Type.Error.Union.Descriptor) !Type.Index { + _ = context; // autofix _ = builder; // autofix if (unit.error_unions.get(error_union)) |type_index| { return type_index; @@ -4520,13 +4632,13 @@ pub const Builder = struct { const biggest_index = @intFromBool(is_type_smaller_or_equal); const biggest_type_index = types[biggest_index]; - const abi_struct_index = try unit.structs.append(context.my_allocator, .{ + const abi_struct_index = unit.structs.append_index(.{ .kind = .{ .raw_error_union = biggest_type_index, }, }); - const abi_type_index = try unit.types.append(context.my_allocator, .{ + const abi_type_index = unit.types.append_index(.{ .@"struct" = abi_struct_index, }); @@ -4541,13 +4653,13 @@ pub const Builder = struct { if (padding_bit_count == 0 and t.* == .integer) { error_union_for_error = abi_type_index; } else { - const padding_type = if (padding_bit_count != 0) try unit.getArrayType(context, .{ + const padding_type = if (padding_bit_count != 0) try unit.getArrayType(.{ .count = padding_bit_count, .type = .u1, .termination = .none, }) else .null; - const error_union_for_error_struct_index = try unit.structs.append(context.my_allocator, .{ + const error_union_for_error_struct_index = unit.structs.append_index(.{ .kind = .{ .abi_compatible_error_union = .{ .type = error_union.@"error", @@ -4555,7 +4667,7 @@ pub const Builder = struct { }, }, }); - error_union_for_error = try unit.types.append(context.my_allocator, .{ + error_union_for_error = unit.types.append_index(.{ .@"struct" = error_union_for_error_struct_index, }); } @@ -4565,13 +4677,13 @@ pub const Builder = struct { const padding_bit_count = e_bitsize - t_bitsize; assert(padding_bit_count != 0); if (padding_bit_count != e_bitsize) { - const padding_type = try unit.getArrayType(context, .{ + const padding_type = try unit.getArrayType(.{ .count = padding_bit_count, .type = .u1, .termination = .none, }); - const error_union_for_error_struct_index = try unit.structs.append(context.my_allocator, .{ + const error_union_for_error_struct_index = unit.structs.append_index(.{ .kind = .{ .abi_compatible_error_union = .{ .type = error_union.@"error", @@ -4586,7 +4698,7 @@ pub const Builder = struct { } } - const error_union_struct_index = try unit.structs.append(context.my_allocator, .{ + const error_union_struct_index = unit.structs.append_index(.{ .kind = .{ .error_union = .{ .@"error" = error_union.@"error", @@ -4598,7 +4710,7 @@ pub const Builder = struct { }, }); - const error_union_type_index = try unit.types.append(context.my_allocator, .{ + const error_union_type_index = unit.types.append_index(.{ .@"struct" = error_union_struct_index, }); try unit.error_unions.put_no_clobber(error_union, error_union_type_index); @@ -4635,18 +4747,18 @@ pub const Builder = struct { const identifier = try unit.processIdentifier(context, string_name); try unit.string_literal_values.put_no_clobber(hash, string); - const string_global_index = try unit.global_declarations.append(context.my_allocator, .{ + const string_global = unit.global_declarations.append(.{ .declaration = .{ .scope = builder.current_scope, .name = identifier, .type = blk: { const length = string.len; - const array_type = try unit.getArrayType(context, .{ + const array_type = try unit.getArrayType(.{ .type = .u8, .count = length, .termination = .zero, }); - const string_type = try unit.getPointerType(context, .{ + const string_type = try unit.getPointerType(.{ .type = array_type, .termination = .none, .mutability = .@"const", @@ -4671,8 +4783,6 @@ pub const Builder = struct { }), }); - const string_global = unit.global_declarations.get(string_global_index); - try unit.string_literal_globals.put_no_clobber(hash, string_global); _ = unit.data_to_emit.append(string_global); @@ -4754,7 +4864,7 @@ pub const Builder = struct { operands[index] = operand; } - const instruction_index = try unit.assembly_instructions.append(context.my_allocator, .{ + const instruction_index = unit.assembly_instructions.append_index(.{ .id = @intFromEnum(instruction), .operands = operands, }); @@ -4764,11 +4874,11 @@ pub const Builder = struct { instructions[index] = instruction_index; } - const inline_assembly = try unit.inline_assembly.append(context.my_allocator, .{ + const inline_assembly = unit.inline_assembly.append_index(.{ .instructions = instructions, }); - const inline_asm = try unit.instructions.append(context.my_allocator, .{ + const inline_asm = unit.instructions.append_index(.{ .inline_assembly = inline_assembly, }); try builder.appendInstruction(unit, inline_asm); @@ -4848,7 +4958,7 @@ pub const Builder = struct { const stack = try builder.createStackVariable(unit, context, type_index, null); const destination = V{ .value = .{ .runtime = stack }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = type_index, .many = false, .termination = .none, @@ -4856,7 +4966,7 @@ pub const Builder = struct { .nullable = false, }), }; - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = destination, .source = v, @@ -4864,7 +4974,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, store); - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = destination, .type = type_index, @@ -4880,7 +4990,7 @@ pub const Builder = struct { else => |t| @panic(@tagName(t)), }, else => { - const instruction = try unit.instructions.append(context.my_allocator, .{ + const instruction = unit.instructions.append_index(.{ .cast = .{ .value = v, .type = type_expect.type, @@ -4946,7 +5056,7 @@ pub const Builder = struct { instruction_list[i] = argument_value; } - const syscall = try unit.instructions.append(context.my_allocator, .{ + const syscall = unit.instructions.append_index(.{ .syscall = .{ .arguments = instruction_list, }, @@ -4986,7 +5096,7 @@ pub const Builder = struct { .smin = min_descriptor, }, }; - const min = try unit.instructions.append(context.my_allocator, instruction); + const min = unit.instructions.append_index(instruction); try builder.appendInstruction(unit, min); return .{ @@ -5056,7 +5166,7 @@ pub const Builder = struct { .global = fields, }, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = fields.declaration.type, .termination = .none, .mutability = .@"const", @@ -5075,7 +5185,7 @@ pub const Builder = struct { const name_function = try builder.get_name_function(unit, context, v.type); var args = try context.arena.new_array(V, 1); args[0] = v; - const call = try unit.instructions.append(context.my_allocator, .{ + const call = unit.instructions.append_index(.{ .call = .{ .callable = .{ .value = .{ @@ -5115,7 +5225,7 @@ pub const Builder = struct { .type => |type_index| switch (unit.types.get(type_index).*) { .slice => |slice| { assert(slice.child_type == .u8); - const constant_slice = try unit.constant_slices.append(context.my_allocator, .{ + const constant_slice = unit.constant_slices.append_index(.{ .array = string_literal, .start = 0, .end = enum_name.len, @@ -5143,7 +5253,7 @@ pub const Builder = struct { .trailing_zeroes => { assert(argument_node_list.len == 1); const argument = try builder.resolveRuntimeValue(unit, context, Type.Expect.none, argument_node_list[0], .right); - const trailing_zeroes = try unit.instructions.append(context.my_allocator, .{ + const trailing_zeroes = unit.instructions.append_index(.{ .trailing_zeroes = argument, }); try builder.appendInstruction(unit, trailing_zeroes); @@ -5158,7 +5268,7 @@ pub const Builder = struct { .leading_zeroes => { assert(argument_node_list.len == 1); const argument = try builder.resolveRuntimeValue(unit, context, Type.Expect.none, argument_node_list[0], .right); - const leading_zeroes = try unit.instructions.append(context.my_allocator, .{ + const leading_zeroes = unit.instructions.append_index(.{ .leading_zeroes = argument, }); try builder.appendInstruction(unit, leading_zeroes); @@ -5178,8 +5288,8 @@ pub const Builder = struct { if (unit.name_functions.get(type_index)) |result| return result else { var argument_types = try context.arena.new_array(Type.Index, 1); argument_types[0] = type_index; - const return_type_index = try unit.getSliceType(context, .{ - .child_pointer_type = try unit.getPointerType(context, .{ + const return_type_index = try unit.getSliceType(.{ + .child_pointer_type = try unit.getPointerType(.{ .type = .u8, // TODO: zero-terminate? .termination = .none, @@ -5193,7 +5303,7 @@ pub const Builder = struct { .mutability = .@"const", .nullable = false, }); - const function_prototype_index = try unit.function_prototypes.append(context.my_allocator, .{ + const function_prototype_index = unit.function_prototypes.append_index(.{ .argument_types = argument_types, .return_type = return_type_index, .abi = .{ @@ -5201,10 +5311,10 @@ pub const Builder = struct { .parameter_types = argument_types, }, }); - const function_type_index = try unit.types.append(context.my_allocator, .{ + const function_type_index = unit.types.append_index(.{ .function = function_prototype_index, }); - const function_definition_index = try unit.function_definitions.append(context.my_allocator, .{ + const function_definition_index = unit.function_definitions.append_index(.{ .scope = .{ .scope = .{ .file = builder.current_file, @@ -5236,7 +5346,7 @@ pub const Builder = struct { defer builder.current_basic_block = old_basic_block; const argument_name_hash = try unit.processIdentifier(context, "_enum_value_"); - const argument_declaration_index = try unit.argument_declarations.append(context.my_allocator, .{ + const argument_declaration = unit.argument_declarations.append(.{ .declaration = .{ .scope = builder.current_scope, .name = argument_name_hash, @@ -5248,20 +5358,18 @@ pub const Builder = struct { }, .index = 0, }); - comptime assert(@TypeOf(argument_declaration_index) == Debug.Declaration.Argument.Index); - const argument = unit.argument_declarations.get(argument_declaration_index); - try builder.current_scope.declarations.put_no_clobber(argument_name_hash, &argument.declaration); + try builder.current_scope.declarations.put_no_clobber(argument_name_hash, &argument_declaration.declaration); - const entry_block = try builder.newBasicBlock(unit, context); - const exit_block = try builder.newBasicBlock(unit, context); + const entry_block = try builder.newBasicBlock(unit); + const exit_block = try builder.newBasicBlock(unit); builder.current_basic_block = entry_block; - const argument_instruction = try unit.instructions.append(context.my_allocator, .{ + const argument_instruction = unit.instructions.append_index(.{ .abi_argument = 0, }); try builder.appendInstruction(unit, argument_instruction); - const switch_instruction_index = try unit.instructions.append(context.my_allocator, .{ + const switch_instruction_index = unit.instructions.append_index(.{ .@"switch" = .{ .condition = .{ .value = .{ @@ -5275,7 +5383,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, switch_instruction_index); const switch_instruction = &unit.instructions.get(switch_instruction_index).@"switch"; - const phi_instruction_index = try unit.instructions.append(context.my_allocator, .{ + const phi_instruction_index = unit.instructions.append_index(.{ .phi = .{ .type = return_type_index, .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), @@ -5290,7 +5398,7 @@ pub const Builder = struct { for (enum_type.fields, 0..) |enum_field_index, i| { builder.current_basic_block = entry_block; const enum_field = unit.enum_fields.get(enum_field_index); - const case_block = try builder.newBasicBlock(unit, context); + const case_block = try builder.newBasicBlock(unit); builder.current_basic_block = case_block; const identifier = unit.getIdentifier(enum_field.name); const identifier_z = try context.allocator.dupeZ(u8, identifier); @@ -5298,7 +5406,7 @@ pub const Builder = struct { .line = 0, .column = 0, }); - const slice = try unit.constant_slices.append(context.my_allocator, .{ + const slice = unit.constant_slices.append_index(.{ .array = string_literal, .start = 0, .end = identifier_z.len, @@ -5313,7 +5421,7 @@ pub const Builder = struct { .type = return_type_index, }; phi.addIncoming(v, builder.current_basic_block); - try builder.jump(unit, context, exit_block); + try builder.jump(unit, exit_block); const case = Instruction.Switch.Case{ .condition = .{ @@ -5337,7 +5445,7 @@ pub const Builder = struct { builder.current_basic_block = exit_block; try builder.appendInstruction(unit, phi_instruction_index); - const ret = try unit.instructions.append(context.my_allocator, .{ + const ret = unit.instructions.append_index(.{ .ret = .{ .type = return_type_index, .value = .{ @@ -5347,7 +5455,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, ret); - const global_index = try unit.global_declarations.append(context.my_allocator, .{ + const global = unit.global_declarations.append(.{ .declaration = .{ .scope = builder.current_scope, .name = try unit.processIdentifier(context, try std.fmt.allocPrint(context.allocator, "get_enum_name_{}", .{@intFromEnum(type_index)})), @@ -5364,8 +5472,6 @@ pub const Builder = struct { .attributes = .{}, }); - const global = unit.global_declarations.get(global_index); - try unit.code_to_emit.put_no_clobber(function_definition_index, global); try unit.name_functions.put_no_clobber(type_index, global); @@ -5381,7 +5487,7 @@ pub const Builder = struct { .integer => |*integer| switch (integer.kind) { .@"enum" => |*enum_type| { const enum_count = enum_type.fields.len; - const array_type = try unit.getArrayType(context, .{ + const array_type = try unit.getArrayType(.{ .type = container_type_index, .count = enum_count, .termination = .none, @@ -5392,7 +5498,7 @@ pub const Builder = struct { .enum_value = enum_field_index, }; } - const constant_array = try unit.constant_arrays.append(context.my_allocator, .{ + const constant_array = unit.constant_arrays.append_index(.{ .values = fields, .type = array_type, }); @@ -5401,7 +5507,7 @@ pub const Builder = struct { const name = try join_name(context, "_field_array_", unit.fields_array.length, 10); const identifier = try unit.processIdentifier(context, name); - const global_declaration_index = try unit.global_declarations.append(context.my_allocator, .{ + const global_declaration = unit.global_declarations.append(.{ .declaration = .{ .scope = builder.current_scope, .name = identifier, @@ -5419,7 +5525,6 @@ pub const Builder = struct { .@"export", }), }); - const global_declaration = unit.global_declarations.get(global_declaration_index); _ = unit.data_to_emit.append(global_declaration); try unit.fields_array.put_no_clobber(container_type_index, global_declaration); @@ -5591,7 +5696,7 @@ pub const Builder = struct { } } - fn pushScope(builder: *Builder, unit: *Unit, context: *const Context, new_scope: *Debug.Scope) !void { + fn pushScope(builder: *Builder, unit: *Unit, new_scope: *Debug.Scope) !void { const old_scope = builder.current_scope; assert(@intFromEnum(old_scope.kind) <= @intFromEnum(new_scope.kind)); @@ -5602,7 +5707,7 @@ pub const Builder = struct { if (current_function.basic_blocks.length <= 1 or current_basic_block.instructions.length > 0 or current_basic_block.predecessors.length > 0) { assert(@intFromEnum(old_scope.kind) >= @intFromEnum(Debug.Scope.Kind.function)); - const instruction = try unit.instructions.append(context.my_allocator, .{ + const instruction = unit.instructions.append_index(.{ .push_scope = .{ .old = old_scope, .new = new_scope, @@ -5616,14 +5721,14 @@ pub const Builder = struct { builder.setCurrentScope(new_scope); } - fn popScope(builder: *Builder, unit: *Unit, context: *const Context) !void { + fn popScope(builder: *Builder, unit: *Unit) !void { const old_scope = builder.current_scope; const new_scope = old_scope.parent.?; assert(@intFromEnum(old_scope.kind) >= @intFromEnum(new_scope.kind)); if (builder.current_basic_block != .null and (unit.function_definitions.get(builder.current_function).basic_blocks.length <= 1 or (unit.basic_blocks.get(builder.current_basic_block).instructions.length > 0 or unit.basic_blocks.get(builder.current_basic_block).predecessors.length > 0))) { - const instruction = try unit.instructions.append(context.my_allocator, .{ + const instruction = unit.instructions.append_index(.{ .pop_scope = .{ .old = old_scope, .new = new_scope, @@ -5680,8 +5785,8 @@ pub const Builder = struct { builder.current_file = file_index; defer builder.current_file = previous_file; - try builder.pushScope(unit, context, &file.scope.scope); - defer builder.popScope(unit, context) catch unreachable; + try builder.pushScope(unit, &file.scope.scope); + defer builder.popScope(unit) catch unreachable; const main_node_index = file.parser.main_node_index; @@ -5716,7 +5821,7 @@ pub const Builder = struct { }; } - fn insertDebugCheckPoint(builder: *Builder, unit: *Unit, context: *const Context, token: Token.Index) !void { + fn insertDebugCheckPoint(builder: *Builder, unit: *Unit, token: Token.Index) !void { if (builder.generate_debug_info and builder.current_scope.local) { const basic_block = unit.basic_blocks.get(builder.current_basic_block); assert(!basic_block.terminated); @@ -5724,7 +5829,7 @@ pub const Builder = struct { const debug_info = builder.getTokenDebugInfo(unit, token); if (debug_info.line != builder.last_check_point.line or debug_info.column != builder.last_check_point.column or builder.current_scope != builder.last_check_point.scope) { - const instruction = try unit.instructions.append(context.my_allocator, .{ + const instruction = unit.instructions.append_index(.{ .debug_checkpoint = .{ .scope = builder.current_scope, .line = debug_info.line, @@ -5744,7 +5849,8 @@ pub const Builder = struct { } fn createStackVariable(builder: *Builder, unit: *Unit, context: *const Context, type_index: Type.Index, alignment: ?u32) !Instruction.Index { - const stack = try unit.instructions.append(context.my_allocator, .{ + _ = context; // autofix + const stack = unit.instructions.append_index(.{ .stack_slot = .{ .type = type_index, .alignment = alignment, @@ -5984,6 +6090,7 @@ pub const Builder = struct { }; fn referenceArgumentDeclaration(builder: *Builder, unit: *Unit, context: *const Context, scope: *Debug.Scope, declaration: *Debug.Declaration) !V { + _ = context; // autofix _ = builder; // autofix assert(declaration.kind == .argument); assert(scope.kind == .function); @@ -5996,7 +6103,7 @@ pub const Builder = struct { .value = .{ .runtime = instruction_index, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = declaration.type, .termination = .none, .mutability = .@"const", @@ -6007,6 +6114,7 @@ pub const Builder = struct { } fn referenceLocalDeclaration(builder: *Builder, unit: *Unit, context: *const Context, scope: *Debug.Scope, declaration: *Debug.Declaration) !V { + _ = context; // autofix _ = builder; // autofix assert(declaration.kind == .local); assert(scope.kind == .block); @@ -6018,7 +6126,7 @@ pub const Builder = struct { .value = .{ .runtime = instruction_index, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = declaration.type, .termination = .none, .mutability = declaration.mutability, @@ -6301,7 +6409,7 @@ pub const Builder = struct { .struct_type, => b: { const global = try builder.referenceGlobalDeclaration(unit, context, lookup.scope, lookup.declaration, global_attributes, new_parameters, null, &.{}); - const pointer_to_global = try unit.getPointerType(context, .{ + const pointer_to_global = try unit.getPointerType(.{ .type = global.declaration.type, .termination = switch (type_expect) { .none => .none, @@ -6357,7 +6465,7 @@ pub const Builder = struct { .type = global.declaration.type, }, .@"var" => blk: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -6407,7 +6515,7 @@ pub const Builder = struct { .runtime => switch (side) { .left => preliminary_result, .right => b: { - const instruction = try unit.instructions.append(context.my_allocator, .{ + const instruction = unit.instructions.append_index(.{ .load = .{ .value = preliminary_result, .type = lookup.declaration.type, @@ -6440,7 +6548,7 @@ pub const Builder = struct { switch (typecheck_result) { .success => return v, .zero_extend => { - const zero_extend = try unit.instructions.append(context.my_allocator, .{ + const zero_extend = unit.instructions.append_index(.{ .cast = .{ .id = .zero_extend, .value = v, @@ -6457,7 +6565,7 @@ pub const Builder = struct { }; }, .sign_extend => { - const sign_extend = try unit.instructions.append(context.my_allocator, .{ + const sign_extend = unit.instructions.append_index(.{ .cast = .{ .id = .sign_extend, .value = v, @@ -6473,7 +6581,7 @@ pub const Builder = struct { }; }, .pointer_var_to_const => { - const cast_to_const = try unit.instructions.append(context.my_allocator, .{ + const cast_to_const = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_var_to_const, .value = v, @@ -6490,7 +6598,7 @@ pub const Builder = struct { }; }, .slice_coerce_to_zero_termination => { - const cast_to_zero_termination = try unit.instructions.append(context.my_allocator, .{ + const cast_to_zero_termination = unit.instructions.append_index(.{ .cast = .{ .id = .slice_coerce_to_zero_termination, .value = v, @@ -6507,7 +6615,7 @@ pub const Builder = struct { }; }, .slice_var_to_const => { - const cast_to_const = try unit.instructions.append(context.my_allocator, .{ + const cast_to_const = unit.instructions.append_index(.{ .cast = .{ .id = .slice_var_to_const, .value = v, @@ -6524,7 +6632,7 @@ pub const Builder = struct { }; }, .slice_to_nullable => { - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .slice_to_nullable, .value = v, @@ -6541,7 +6649,7 @@ pub const Builder = struct { }; }, .pointer_to_nullable => { - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_to_nullable, .value = v, @@ -6615,7 +6723,7 @@ pub const Builder = struct { }, .type = expected_type_index, }; - const error_union_builder = try unit.instructions.append(context.my_allocator, .{ + const error_union_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = undef, .index = 0, @@ -6624,7 +6732,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, error_union_builder); - const final_error_union = try unit.instructions.append(context.my_allocator, .{ + const final_error_union = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -6667,7 +6775,7 @@ pub const Builder = struct { .type = error_union.union_for_error, }; - const error_union_builder = try unit.instructions.append(context.my_allocator, .{ + const error_union_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = undef, .index = 0, @@ -6676,7 +6784,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, error_union_builder); - const final_error_union = try unit.instructions.append(context.my_allocator, .{ + const final_error_union = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -6699,7 +6807,7 @@ pub const Builder = struct { const support_alloca = try builder.createStackVariable(unit, context, error_union.union_for_error, null); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = error_union.union_for_error, .termination = .none, .mutability = .@"var", @@ -6707,7 +6815,7 @@ pub const Builder = struct { .nullable = false, }); - const support_store = try unit.instructions.append(context.my_allocator, .{ + const support_store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -6725,7 +6833,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, support_store); - const support_load = try unit.instructions.append(context.my_allocator, .{ + const support_load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -6747,7 +6855,7 @@ pub const Builder = struct { }, .type_to_error_union => return try builder.resolveTypeToErrorUnion(unit, context, expected_type_index, v), .slice_zero_to_no_termination => { - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .slice_zero_to_no_termination, .value = v, @@ -6775,7 +6883,7 @@ pub const Builder = struct { .array => |array| array.count, else => |t| @panic(@tagName(t)), }; - const array_type = try unit.getArrayType(context, .{ + const array_type = try unit.getArrayType(.{ .count = expected_array_descriptor.count orelse len, .termination = expected_array_descriptor.termination, .type = expected_array_descriptor.type, @@ -6800,6 +6908,7 @@ pub const Builder = struct { } fn resolveTypeToErrorUnion(builder: *Builder, unit: *Unit, context: *const Context, error_union_type_index: Type.Index, value: V) !V { + _ = context; // autofix const error_union_struct_index = unit.types.get(error_union_type_index).@"struct"; const error_union_struct = unit.structs.get(error_union_struct_index); const error_union = error_union_struct.kind.error_union; @@ -6814,7 +6923,7 @@ pub const Builder = struct { }, .type = error_union_type_index, }; - const error_union_builder = try unit.instructions.append(context.my_allocator, .{ + const error_union_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = undef, .index = 0, @@ -6823,7 +6932,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, error_union_builder); - const final_error_union = try unit.instructions.append(context.my_allocator, .{ + const final_error_union = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -6857,6 +6966,7 @@ pub const Builder = struct { } fn resolveErrorToAllErrorUnion(builder: *Builder, unit: *Unit, context: *const Context, destination_type_index: Type.Index, error_value: V) !V { + _ = context; // autofix const error_value_type = unit.types.get(error_value.type); const error_type = error_value_type.integer.kind.@"error"; const destination_error_union_type = unit.types.get(destination_type_index); @@ -6866,7 +6976,7 @@ pub const Builder = struct { const error_id = error_type.id; const constant_shifted = @as(u64, error_id) << 32; - const zero_extend = try unit.instructions.append(context.my_allocator, .{ + const zero_extend = unit.instructions.append_index(.{ .cast = .{ .id = .zero_extend, .value = error_value, @@ -6875,7 +6985,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, zero_extend); - const or_value = try unit.instructions.append(context.my_allocator, .{ + const or_value = unit.instructions.append_index(.{ .integer_binary_operation = .{ .left = .{ .value = .{ @@ -6900,7 +7010,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, or_value); if (destination_error_union.union_for_error == destination_error_union.abi) { - const error_union_builder = try unit.instructions.append(context.my_allocator, .{ + const error_union_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -6919,7 +7029,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, error_union_builder); - const final_error_union = try unit.instructions.append(context.my_allocator, .{ + const final_error_union = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -6978,7 +7088,7 @@ pub const Builder = struct { const value_to_store = switch (node.id) { .assign => right, else => blk: { - const left_load = try unit.instructions.append(context.my_allocator, .{ + const left_load = unit.instructions.append_index(.{ .load = .{ .value = left, .type = expected_right_type, @@ -6990,7 +7100,7 @@ pub const Builder = struct { switch (unit.types.get(expected_right_type).*) { .integer => |integer| switch (integer.kind) { .materialized_int => { - const instruction = try unit.instructions.append(context.my_allocator, .{ + const instruction = unit.instructions.append_index(.{ .integer_binary_operation = .{ .left = .{ .value = .{ @@ -7024,7 +7134,7 @@ pub const Builder = struct { } }, }; - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = left, .source = value_to_store, @@ -7044,9 +7154,9 @@ pub const Builder = struct { } } - fn newBasicBlock(builder: *Builder, unit: *Unit, context: *const Context) !BasicBlock.Index { + fn newBasicBlock(builder: *Builder, unit: *Unit) !BasicBlock.Index { const function = unit.function_definitions.get(builder.current_function); - const basic_block = try unit.basic_blocks.append(context.my_allocator, .{ + const basic_block = unit.basic_blocks.append_index(.{ .instructions = try PinnedArray(Instruction.Index).init(std.mem.page_size), }); _ = function.basic_blocks.append(basic_block); @@ -7054,8 +7164,7 @@ pub const Builder = struct { return basic_block; } - fn resolveIntegerType(builder: *Builder, unit: *Unit, context: *const Context, node_index: Node.Index) anyerror!Type.Index { - _ = builder; // autofix + fn resolveIntegerType(unit: *Unit, node_index: Node.Index) anyerror!Type.Index { const node = unit.getNode(node_index); const result: Type.Index = switch (node.id) { .signed_integer_type, @@ -7068,7 +7177,7 @@ pub const Builder = struct { }); const number_chunk = token_bytes[1..]; - const type_index = try unit.getIntegerType(context, .{ + const type_index = try unit.getIntegerType(.{ .bit_count = try std.fmt.parseInt(u16, number_chunk, 10), .signedness = switch (node.id) { .signed_integer_type => .signed, @@ -7115,7 +7224,7 @@ pub const Builder = struct { const element_type_index = @as(usize, 1) + @intFromBool(attribute_node_list.len == 3); const element_type = try builder.resolveType(unit, context, attribute_node_list[element_type_index], &.{}); - const array_type = try unit.getArrayType(context, .{ + const array_type = try unit.getArrayType(.{ .count = len, .type = element_type, .termination = termination, @@ -7139,7 +7248,7 @@ pub const Builder = struct { .signed_integer_type, .unsigned_integer_type, => b: { - break :b try builder.resolveIntegerType(unit, context, node_index); + break :b try resolveIntegerType(unit, node_index); }, .pointer_type => b: { const attribute_node_list = unit.getNodeList(node.left); @@ -7186,7 +7295,7 @@ pub const Builder = struct { assert(element_type_index != .null); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .mutability = mutability, .many = many, .type = element_type_index, @@ -7238,10 +7347,10 @@ pub const Builder = struct { const nullable = false; - const slice_type = try unit.getSliceType(context, .{ + const slice_type = try unit.getSliceType(.{ .mutability = mutability, .child_type = element_type_index, - .child_pointer_type = try unit.getPointerType(context, .{ + .child_pointer_type = try unit.getPointerType(.{ .type = element_type_index, .termination = termination, .mutability = mutability, @@ -7262,16 +7371,16 @@ pub const Builder = struct { var nullable_pointer = pointer; assert(!nullable_pointer.nullable); nullable_pointer.nullable = true; - break :b try unit.getPointerType(context, nullable_pointer); + break :b try unit.getPointerType(nullable_pointer); }, .slice => |slice| b: { var nullable_slice = slice; assert(!nullable_slice.nullable); nullable_slice.nullable = true; - break :b try unit.getSliceType(context, nullable_slice); + break :b try unit.getSliceType(nullable_slice); }, else => b: { - const optional_type = try unit.getOptionalType(context, element_type_index); + const optional_type = try unit.getOptionalType(element_type_index); break :b optional_type; }, }; @@ -7301,7 +7410,7 @@ pub const Builder = struct { break :blk unit.all_errors; } else { const token_debug_info = builder.getTokenDebugInfo(unit, node.token); - unit.all_errors = try unit.types.append(context.my_allocator, .{ + unit.all_errors = unit.types.append_index(.{ .integer = .{ .bit_count = 64, .signedness = .unsigned, @@ -7414,7 +7523,7 @@ pub const Builder = struct { const attribute_node_list = attribute_and_return_type_node_list[0 .. attribute_and_return_type_node_list.len - 1]; const return_type_node_index = attribute_and_return_type_node_list[attribute_and_return_type_node_list.len - 1]; - const function_prototype_index = try unit.function_prototypes.append(context.my_allocator, .{}); + const function_prototype_index = unit.function_prototypes.append_index(.{}); const function_prototype = unit.function_prototypes.get(function_prototype_index); var is_naked: bool = false; @@ -7509,7 +7618,7 @@ pub const Builder = struct { // std.debug.panic("Symbol with name '{s}' already declarared on scope", .{argument_name}); } - const comptime_parameter_index = try unit.global_declarations.append(context.my_allocator, .{ + const comptime_parameter = unit.global_declarations.append(.{ .declaration = .{ .scope = builder.current_scope, .name = name_hash, @@ -7523,8 +7632,6 @@ pub const Builder = struct { .type_node_index = argument_declaration_node.left, .attributes = .{}, }); - - const comptime_parameter = unit.global_declarations.get(comptime_parameter_index); try builder.current_scope.declarations.put_no_clobber(name_hash, &comptime_parameter.declaration); }, else => |t| @panic(@tagName(t)), @@ -7590,7 +7697,7 @@ pub const Builder = struct { try builder.resolveFunctionPrototypeAbi(unit, context, function_prototype); - const function_prototype_type_index = try unit.types.append(context.my_allocator, .{ + const function_prototype_type_index = unit.types.append_index(.{ .function = function_prototype_index, }); @@ -7619,7 +7726,7 @@ pub const Builder = struct { } const argument_token_debug_info = builder.getTokenDebugInfo(unit, argument_node.token); - const argument_declaration_index = try unit.argument_declarations.append(context.my_allocator, .{ + const argument_declaration = unit.argument_declarations.append(.{ .declaration = .{ .scope = builder.current_scope, .name = argument_name_hash, @@ -7631,14 +7738,11 @@ pub const Builder = struct { }, .index = @intCast(argument_index), }); - comptime assert(@TypeOf(argument_declaration_index) == Debug.Declaration.Argument.Index); - const argument = unit.argument_declarations.get(argument_declaration_index); - try builder.current_scope.declarations.put_no_clobber(argument_name_hash, &argument.declaration); + try builder.current_scope.declarations.put_no_clobber(argument_name_hash, &argument_declaration.declaration); } - fn classify_argument_type_aarch64(builder: *Builder, unit: *Unit, context: *const Context, type_index: Type.Index) Function.AbiInfo { - _ = builder; + fn classify_argument_type_aarch64(unit: *Unit, type_index: Type.Index) Function.AbiInfo { if (type_index == .void or type_index == .noreturn) return Function.AbiInfo{ .kind = .ignore, }; @@ -7699,7 +7803,7 @@ pub const Builder = struct { } else { const m = aligned_size / alignment; if (m > 1) { - const array_type = unit.getArrayType(context, .{ + const array_type = unit.getArrayType(.{ .type = .u64, .count = m, .termination = .none, @@ -7720,7 +7824,7 @@ pub const Builder = struct { } else { const alignment = ty.getAbiAlignment(unit); assert(alignment > 0); - const pointer_type = unit.getPointerType(context, .{ + const pointer_type = unit.getPointerType(.{ .type = type_index, .termination = .none, .mutability = .@"var", @@ -7741,6 +7845,7 @@ pub const Builder = struct { } fn classify_return_type_aarch64(builder: *Builder, unit: *Unit, context: *const Context, type_index: Type.Index) Function.AbiInfo { + _ = context; // autofix _ = builder; if (type_index == .void or type_index == .noreturn) return Function.AbiInfo{ .kind = .ignore, @@ -7791,7 +7896,7 @@ pub const Builder = struct { if (size <= 8 and @import("builtin").cpu.arch.endian() == .little) { return .{ .kind = .{ - .direct_coerce = unit.getIntegerType(context, .{ + .direct_coerce = unit.getIntegerType(.{ .bit_count = @intCast(size * 8), .signedness = .unsigned, .kind = .materialized_int, @@ -7802,7 +7907,7 @@ pub const Builder = struct { const alignment = ty.getAbiAlignment(unit); const aligned_size: u16 = @intCast(align_forward(size, 8)); if (alignment < 16 and aligned_size == 16) { - const array_type = unit.getArrayType(context, .{ + const array_type = unit.getArrayType(.{ .count = 2, .type = .u64, .termination = .none, @@ -7813,7 +7918,7 @@ pub const Builder = struct { }, }; } else { - const integer_t = unit.getIntegerType(context, .{ + const integer_t = unit.getIntegerType(.{ .kind = .materialized_int, .bit_count = aligned_size * 8, .signedness = .unsigned, @@ -7828,7 +7933,7 @@ pub const Builder = struct { } else { const alignment = ty.getAbiAlignment(unit); assert(alignment > 0); - const pointer_type = unit.getPointerType(context, .{ + const pointer_type = unit.getPointerType(.{ .type = type_index, .termination = .none, .mutability = .@"var", @@ -7855,7 +7960,7 @@ pub const Builder = struct { var parameter_types_abi = BoundedArray(Function.AbiInfo, 512){}; const return_type_abi = builder.classify_return_type_aarch64(unit, context, function_prototype.return_type); for (function_prototype.argument_types) |argument_type_index| { - const abi_arg = builder.classify_argument_type_aarch64(unit, context, argument_type_index); + const abi_arg = classify_argument_type_aarch64(unit, argument_type_index); parameter_types_abi.appendAssumeCapacity(abi_arg); } @@ -7899,7 +8004,7 @@ pub const Builder = struct { var abi_parameter_types = BoundedArray(Type.Index, 512){}; const abi_return_type = switch (function_prototype.abi.return_type_abi.kind) { .ignore => function_prototype.return_type, - .direct_pair => |direct_pair| try unit.getTwoStruct(context, direct_pair), + .direct_pair => |direct_pair| try unit.getTwoStruct(direct_pair), .direct => function_prototype.return_type, .indirect => |indirect| b: { abi_parameter_types.appendAssumeCapacity(indirect.pointer); @@ -7959,7 +8064,7 @@ pub const Builder = struct { const parameter_classification = builder.classify_argument_type_systemv_x86_64(unit, context, parameter_type_index, available_registers.gp_registers); const parameter_abi = if (available_registers.sse_registers < parameter_classification.needed_registers.sse_registers or available_registers.gp_registers < parameter_classification.needed_registers.gp_registers) b: { - break :b indirect_result(unit, context, parameter_type_index, available_registers.gp_registers); + break :b indirect_result(unit, parameter_type_index, available_registers.gp_registers); } else b: { available_registers.gp_registers -= parameter_classification.needed_registers.gp_registers; available_registers.sse_registers -= parameter_classification.needed_registers.sse_registers; @@ -8029,7 +8134,7 @@ pub const Builder = struct { => result[current_index] = .integer, else => switch (integer.kind) { .comptime_int => unreachable, - else => return builder.classify_systemv_x86_64(unit, context, unit.getIntegerType(context, .{ + else => return builder.classify_systemv_x86_64(unit, context, unit.getIntegerType(.{ .bit_count = integer.bit_count, .signedness = integer.signedness, .kind = .materialized_int, @@ -8217,7 +8322,7 @@ pub const Builder = struct { return type_index; } }, - else => return builder.get_int_type_at_offset_system_v_x86_64(unit, context, unit.getIntegerType(context, .{ + else => return builder.get_int_type_at_offset_system_v_x86_64(unit, context, unit.getIntegerType(.{ .bit_count = integer.bit_count, .signedness = integer.signedness, .kind = .materialized_int, @@ -8248,7 +8353,7 @@ pub const Builder = struct { } else { const byte_count: u16 = @intCast(source_size - source_offset); const bit_count = byte_count * 8; - const integer_type = unit.getIntegerType(context, .{ + const integer_type = unit.getIntegerType(.{ .bit_count = bit_count, .kind = .materialized_int, .signedness = .unsigned, // TODO @@ -8295,7 +8400,7 @@ pub const Builder = struct { } }, .memory => return .{ - .abi = indirect_result(unit, context, type_index, free_gp_registers), + .abi = indirect_result(unit, type_index, free_gp_registers), .needed_registers = needed_registers, }, else => |t| @panic(@tagName(t)), @@ -8372,7 +8477,7 @@ pub const Builder = struct { } break :b result_type; }, - .memory => return indirect_return_result(unit, context, type_index), + .memory => return indirect_return_result(unit, type_index), else => |t| @panic(@tagName(t)), }; @@ -8409,7 +8514,7 @@ pub const Builder = struct { sse_registers: u32, }; - fn indirect_result(unit: *Unit, context: *const Context, type_index: Type.Index, free_gp_registers: u32) Function.AbiInfo { + fn indirect_result(unit: *Unit, type_index: Type.Index, free_gp_registers: u32) Function.AbiInfo { const ty = unit.types.get(type_index); const is_illegal_vector = false; if (!ty.is_aggregate() and !is_illegal_vector) { @@ -8429,7 +8534,7 @@ pub const Builder = struct { } } - const pointer_type = unit.getPointerType(context, .{ + const pointer_type = unit.getPointerType(.{ .type = type_index, .termination = .none, .mutability = .@"var", @@ -8468,10 +8573,10 @@ pub const Builder = struct { } } - fn indirect_return_result(unit: *Unit, context: *const Context, type_index: Type.Index) Function.AbiInfo { + fn indirect_return_result(unit: *Unit, type_index: Type.Index) Function.AbiInfo { const ty = unit.types.get(type_index); if (ty.is_aggregate()) { - const pointer_type = unit.getPointerType(context, .{ + const pointer_type = unit.getPointerType(.{ .type = type_index, .termination = .none, .mutability = .@"var", @@ -8511,7 +8616,7 @@ pub const Builder = struct { .@"struct" => b: { assert(container_node.id == .struct_type); - const struct_index = try unit.structs.append(context.my_allocator, .{ + const struct_index = unit.structs.append_index(.{ .kind = .{ .@"struct" = .{ .scope = .{ @@ -8585,7 +8690,7 @@ pub const Builder = struct { } } - const plain_type_index = try unit.types.append(context.my_allocator, .{ + const plain_type_index = unit.types.append_index(.{ .@"struct" = struct_index, }); @@ -8599,7 +8704,7 @@ pub const Builder = struct { const declaration_token_debug_info = builder.getTokenDebugInfo(unit, parameter_type_token); const identifier = unit.getExpectedTokenBytes(parameter_type_token, .identifier); const hash = try unit.processIdentifier(context, identifier); - const global_declaration_index = try unit.global_declarations.append(context.my_allocator, .{ + const global_declaration = unit.global_declarations.append(.{ .declaration = .{ .scope = &struct_type.kind.@"struct".scope.scope, .name = hash, @@ -8615,13 +8720,12 @@ pub const Builder = struct { .type_node_index = .null, .attributes = .{}, }); - const global_declaration = unit.global_declarations.get(global_declaration_index); try struct_type.kind.@"struct".scope.scope.declarations.put_no_clobber(hash, &global_declaration.declaration); } const polymorphic_type_index = switch (parameter_types.len > 0) { true => blk: { - const polymorphic_type_index = try unit.types.append(context.my_allocator, .{ + const polymorphic_type_index = unit.types.append_index(.{ .polymorphic = .{ .parameters = param: { const heap_parameter_types = try context.arena.new_array(Token.Index, parameter_types.len); @@ -8679,7 +8783,7 @@ pub const Builder = struct { }, }; - const type_index = try unit.types.append(context.my_allocator, .{ + const type_index = unit.types.append_index(.{ .integer = .{ .bit_count = integer.bit_count, .signedness = integer.signedness, @@ -8726,7 +8830,7 @@ pub const Builder = struct { }, }; - const bitfield_type_index = try unit.types.append(context.my_allocator, .{ + const bitfield_type_index = unit.types.append_index(.{ .integer = .{ .bit_count = integer.bit_count, .signedness = integer.signedness, @@ -8766,8 +8870,8 @@ pub const Builder = struct { }; } - try builder.pushScope(unit, context, &scope.scope); - defer builder.popScope(unit, context) catch unreachable; + try builder.pushScope(unit, &scope.scope); + defer builder.popScope(unit) catch unreachable; const count = blk: { var result: struct { @@ -8882,7 +8986,7 @@ pub const Builder = struct { else => unreachable, }; - const global_declaration_index = try unit.global_declarations.append(context.my_allocator, .{ + const global_declaration = unit.global_declarations.append(.{ .declaration = .{ .scope = &scope.scope, .name = identifier_hash, @@ -8899,7 +9003,6 @@ pub const Builder = struct { .attributes = attributes, }); - const global_declaration = unit.global_declarations.get(global_declaration_index); try builder.current_scope.declarations.put_no_clobber(identifier_hash, &global_declaration.declaration); }, else => unreachable, @@ -8949,7 +9052,7 @@ pub const Builder = struct { .@"enum" => { assert(field_node.id == .enum_field); - const integer_type = try unit.getIntegerType(context, .{ + const integer_type = try unit.getIntegerType(.{ .bit_count = ty.integer.bit_count, .signedness = ty.integer.signedness, .kind = .materialized_int, @@ -8964,7 +9067,7 @@ pub const Builder = struct { }, }; - const enum_field_index = try unit.enum_fields.append(context.my_allocator, .{ + const enum_field_index = unit.enum_fields.append_index(.{ .name = hash, .value = enum_value, .parent = data.plain, @@ -8997,7 +9100,7 @@ pub const Builder = struct { else => |default_value_node_index| try builder.resolveComptimeValue(unit, context, Type.Expect{ .type = field_type }, .{}, default_value_node_index, null, .right, &.{}, null, &.{}), }; - const struct_field = try unit.struct_fields.append(context.my_allocator, .{ + const struct_field = unit.struct_fields.append_index(.{ .name = hash, .type = field_type, .default_value = field_default_value, @@ -9012,7 +9115,7 @@ pub const Builder = struct { else => |default_value_node_index| try builder.resolveComptimeValue(unit, context, Type.Expect{ .type = field_type }, .{}, default_value_node_index, null, .right, &.{}, null, &.{}), }; - const struct_field = try unit.struct_fields.append(context.my_allocator, .{ + const struct_field = unit.struct_fields.append_index(.{ .name = hash, .type = field_type, .default_value = field_default_value, @@ -9071,7 +9174,7 @@ pub const Builder = struct { }); // TODO: make test function prototypes unique - const function_prototype_index = try unit.function_prototypes.append(context.my_allocator, .{ + const function_prototype_index = unit.function_prototypes.append_index(.{ .argument_types = &.{}, .return_type = return_type, .abi = .{ @@ -9082,7 +9185,7 @@ pub const Builder = struct { }, .calling_convention = .auto, }); - const function_type = try unit.types.append(context.my_allocator, .{ + const function_type = unit.types.append_index(.{ .function = function_prototype_index, }); unit.test_function_type = function_type; @@ -9108,7 +9211,7 @@ pub const Builder = struct { const name_hash = test_name_global.initial_value.string_literal; - const test_global_index = try unit.global_declarations.append(context.my_allocator, .{ + const test_global = unit.global_declarations.append(.{ .declaration = .{ .scope = &scope.scope, .type = function_type, @@ -9123,12 +9226,8 @@ pub const Builder = struct { .attributes = .{}, }); - const test_global = unit.global_declarations.get(test_global_index); - try scope.scope.declarations.put_no_clobber(name_hash, &test_global.declaration); - try unit.test_functions.put_no_clobber(test_name_global, test_global); - try unit.code_to_emit.put_no_clobber(comptime_value.function_definition, test_global); } } @@ -9145,15 +9244,17 @@ pub const Builder = struct { } fn emitMemcpy(builder: *Builder, unit: *Unit, context: *const Context, arguments: Instruction.Memcpy) !void { - const memcpy = try unit.instructions.append(context.my_allocator, .{ + _ = context; // autofix + const memcpy = unit.instructions.append_index(.{ .memcpy = arguments, }); try builder.appendInstruction(unit, memcpy); } fn emitIntegerCompare(builder: *Builder, unit: *Unit, context: *const Context, left_value: V, right_value: V, integer: Type.Integer, compare_node_id: Node.Id) anyerror!V { + _ = context; // autofix assert(left_value.type == right_value.type); - const compare = try unit.instructions.append(context.my_allocator, .{ + const compare = unit.instructions.append_index(.{ .integer_compare = .{ .left = left_value, .right = right_value, @@ -9211,7 +9312,7 @@ pub const Builder = struct { const token_debug_info = builder.getTokenDebugInfo(unit, function_node.token); const old_function = builder.current_function; - builder.current_function = try unit.function_definitions.append(context.my_allocator, .{ + builder.current_function = unit.function_definitions.append_index(.{ .type = maybe_function_type_index, .body = .null, .scope = .{ @@ -9235,8 +9336,8 @@ pub const Builder = struct { const function = unit.function_definitions.get(builder.current_function); builder.last_check_point = .{}; - try builder.pushScope(unit, context, &function.scope.scope); - defer builder.popScope(unit, context) catch unreachable; + try builder.pushScope(unit, &function.scope.scope); + defer builder.popScope(unit) catch unreachable; var comptime_parameter_declarations: []const ComptimeParameterDeclaration = &.{}; var comptime_parameter_instantiations: []const V.Comptime = &.{}; @@ -9253,12 +9354,12 @@ pub const Builder = struct { break :b function_prototype_index; } else maybe_function_type_index; - const entry_basic_block = try builder.newBasicBlock(unit, context); + const entry_basic_block = try builder.newBasicBlock(unit); builder.current_basic_block = entry_basic_block; defer builder.current_basic_block = .null; const body_node = unit.getNode(body_node_index); - try builder.insertDebugCheckPoint(unit, context, body_node.token); + try builder.insertDebugCheckPoint(unit, body_node.token); const function_prototype_index = unit.types.get(function.type).function; const function_prototype = unit.function_prototypes.get(function_prototype_index); @@ -9266,7 +9367,7 @@ pub const Builder = struct { //function.has_polymorphic_parameters = function_prototype.comptime_parameter_instantiations.len > 0; if (function_prototype.abi.return_type_abi.kind == .indirect) { - const return_pointer_argument = try unit.instructions.append(context.my_allocator, .{ + const return_pointer_argument = unit.instructions.append_index(.{ .abi_argument = 0, }); try builder.appendInstruction(unit, return_pointer_argument); @@ -9287,7 +9388,7 @@ pub const Builder = struct { const argument_type_index = argument_types[runtime_parameter_count]; const argument_abi_count = argument_abi.indices[1] - argument_abi.indices[0]; for (0..argument_abi_count) |argument_index| { - const argument_instruction = try unit.instructions.append(context.my_allocator, .{ + const argument_instruction = unit.instructions.append_index(.{ .abi_argument = @intCast(argument_abi.indices[0] + argument_index), }); @@ -9322,7 +9423,7 @@ pub const Builder = struct { assert(argument_abi_count == 1); const stack = try builder.createStackVariable(unit, context, argument_type_index, null); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = argument_type_index, .termination = .none, .mutability = .@"var", @@ -9330,7 +9431,7 @@ pub const Builder = struct { .nullable = false, }); - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -9367,14 +9468,14 @@ pub const Builder = struct { const stack = try builder.createStackVariable(unit, context, argument_type_index, null); const pointer_types = [2]Type.Index{ - try unit.getPointerType(context, .{ + try unit.getPointerType(.{ .type = pair[0], .termination = .none, .mutability = .@"var", .many = false, .nullable = false, }), - try unit.getPointerType(context, .{ + try unit.getPointerType(.{ .type = pair[0], .termination = .none, .mutability = .@"var", @@ -9396,7 +9497,7 @@ pub const Builder = struct { }, .type = pair[0], }; - const first_store = try unit.instructions.append(context.my_allocator, .{ + const first_store = unit.instructions.append_index(.{ .store = .{ .destination = destination, .source = source, @@ -9404,7 +9505,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, first_store); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = stack, .base_type = pair[0], @@ -9438,7 +9539,7 @@ pub const Builder = struct { .type = pair[1], }; - const second_store = try unit.instructions.append(context.my_allocator, .{ + const second_store = unit.instructions.append_index(.{ .store = .{ .destination = destination, .source = source, @@ -9460,7 +9561,7 @@ pub const Builder = struct { const argument_alloca = try builder.createStackVariable(unit, context, argument_type_index, null); const coerced_type = unit.types.get(coerced_type_index); const coerced_size = coerced_type.getAbiSize(unit); - const argument_pointer_type = try unit.getPointerType(context, .{ + const argument_pointer_type = try unit.getPointerType(.{ .type = argument_type_index, .termination = .none, .mutability = .@"var", @@ -9474,7 +9575,7 @@ pub const Builder = struct { const is_vector = false; if (coerced_size <= argument_size and !is_vector) { - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -9494,7 +9595,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, store); } else { const coerced_alloca = try builder.createStackVariable(unit, context, coerced_type_index, null); - const coerced_pointer_type = try unit.getPointerType(context, .{ + const coerced_pointer_type = try unit.getPointerType(.{ .type = coerced_type_index, .termination = .none, .mutability = .@"var", @@ -9502,7 +9603,7 @@ pub const Builder = struct { .nullable = false, }); - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -9550,7 +9651,7 @@ pub const Builder = struct { try function.scope.argument_map.put_no_clobber(argument_declaration, stack); - const debug_declare_argument = try unit.instructions.append(context.my_allocator, .{ + const debug_declare_argument = unit.instructions.append_index(.{ .debug_declare_argument = .{ .argument = argument_declaration, .stack = stack, @@ -9619,7 +9720,7 @@ pub const Builder = struct { }, .type = return_type_index, }; - const insert = try unit.instructions.append(context.my_allocator, .{ + const insert = unit.instructions.append_index(.{ .insert_value = .{ .expression = undefined_value, .index = 1, @@ -9665,7 +9766,7 @@ pub const Builder = struct { // TODO: is this correct? error_union.type == .noreturn) { - const return_value = try unit.instructions.append(context.my_allocator, .{ + const return_value = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -9693,7 +9794,7 @@ pub const Builder = struct { .type = return_type_index, }, builder.current_basic_block); - try builder.jump(unit, context, builder.return_block); + try builder.jump(unit, builder.return_block); } else { try unit.dumpFunctionDefinition(builder.current_function); unreachable; @@ -9945,7 +10046,7 @@ pub const Builder = struct { } const token_debug_info = builder.getTokenDebugInfo(unit, node.token); - const error_type_index = try unit.types.append(context.my_allocator, .{ + const error_type_index = unit.types.append_index(.{ .integer = .{ .signedness = .unsigned, .bit_count = 32, @@ -9976,7 +10077,7 @@ pub const Builder = struct { const field_node = unit.getNode(field_node_index); const identifier = unit.getExpectedTokenBytes(field_node.token, .identifier); const hash = try unit.processIdentifier(context, identifier); - const error_field_index = try unit.error_fields.append(context.my_allocator, .{ + const error_field_index = unit.error_fields.append_index(.{ .name = hash, .type = error_type_index, .value = index, @@ -10037,7 +10138,7 @@ pub const Builder = struct { .anonymous_empty_literal => switch (type_expect) { .type => |type_index| switch (unit.types.get(type_index).*) { .slice => { - const constant_slice = try unit.constant_slices.append(context.my_allocator, .{ + const constant_slice = unit.constant_slices.append_index(.{ .array = null, .start = 0, .end = 0, @@ -10075,7 +10176,7 @@ pub const Builder = struct { const pointer_type_expect = switch (type_expect) { .none => type_expect, .type => |type_index| b: { - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = type_index, .mutability = .@"const", .many = false, // TODO @@ -10105,7 +10206,7 @@ pub const Builder = struct { }; } - const array_type = try unit.getArrayType(context, .{ + const array_type = try unit.getArrayType(.{ .count = string_literal.len, .type = .u8, .termination = .none, @@ -10114,7 +10215,7 @@ pub const Builder = struct { return V{ .value = .{ .@"comptime" = .{ - .constant_array = try unit.constant_arrays.append(context.my_allocator, .{ + .constant_array = unit.constant_arrays.append_index(.{ .values = values, .type = array_type, }), @@ -10139,7 +10240,7 @@ pub const Builder = struct { else => unreachable, }; - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = pointer_like_value, .type = load_type, @@ -10275,7 +10376,7 @@ pub const Builder = struct { unreachable; } else { // Right nullable, then we cast the left side to optional - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_to_nullable, .value = left_value, @@ -10513,7 +10614,7 @@ pub const Builder = struct { .shift_right => .shift_right, }; - const i = try unit.instructions.append(context.my_allocator, .{ + const i = unit.instructions.append_index(.{ .integer_binary_operation = .{ .left = left_value, .right = right_value, @@ -10529,7 +10630,7 @@ pub const Builder = struct { .bit_or => .bit_or, else => |t| @panic(@tagName(t)), }; - const i = try unit.instructions.append(context.my_allocator, .{ + const i = unit.instructions.append_index(.{ .integer_binary_operation = .{ .left = left_value, .right = right_value, @@ -10564,7 +10665,7 @@ pub const Builder = struct { switch (binary_operation_id) { .add => switch (unit.types.get(right_value.type).*) { .integer => { - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .index = right_value, .pointer = left_value.value.runtime, @@ -10649,7 +10750,7 @@ pub const Builder = struct { }, .block => block: { const block = try builder.resolveBlock(unit, context, node_index); - const block_i = try unit.instructions.append(context.my_allocator, .{ + const block_i = unit.instructions.append_index(.{ .block = block, }); break :block .{ @@ -10729,12 +10830,12 @@ pub const Builder = struct { .type = if (pointer.nullable) type_index else blk: { var p = pointer; p.nullable = true; - const nullable_pointer = try unit.getPointerType(context, p); + const nullable_pointer = try unit.getPointerType(p); break :blk nullable_pointer; }, }, .slice => |slice| if (slice.nullable) b: { - const constant_slice = try unit.constant_slices.append(context.my_allocator, .{ + const constant_slice = unit.constant_slices.append_index(.{ .array = null, .start = 0, .end = 0, @@ -10765,7 +10866,7 @@ pub const Builder = struct { const range_end: V = switch (range_node.right) { .null => switch (unit.types.get(expression_to_slice.type).*) { .slice => b: { - const extract_value = try unit.instructions.append(context.my_allocator, .{ + const extract_value = unit.instructions.append_index(.{ .extract_value = .{ .expression = expression_to_slice, .index = 1, @@ -10796,7 +10897,7 @@ pub const Builder = struct { .slice => |slice| b: { _ = slice; // autofix assert(!pointer.many); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = expression_to_slice.value.runtime, .is_struct = true, @@ -10816,13 +10917,13 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, gep); - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ .runtime = gep, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = Type.usize, .termination = .none, .many = false, @@ -10891,7 +10992,7 @@ pub const Builder = struct { .type = Type.usize, }; } else { - const range_compute = try unit.instructions.append(context.my_allocator, .{ + const range_compute = unit.instructions.append_index(.{ .integer_binary_operation = .{ .left = range_end, .right = range_start, @@ -10915,7 +11016,7 @@ pub const Builder = struct { .@"comptime" => { const pointer_value = switch (unit.types.get(expression_to_slice.type).*) { .slice => |slice| slice: { - const extract_pointer = try unit.instructions.append(context.my_allocator, .{ + const extract_pointer = unit.instructions.append_index(.{ .extract_value = .{ .expression = expression_to_slice, .index = 0, @@ -10923,7 +11024,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, extract_pointer); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = extract_pointer, .index = range_start, @@ -10938,8 +11039,8 @@ pub const Builder = struct { .value = .{ .runtime = gep, }, - .type = try unit.getPointerType(context, .{ - .type = try unit.getArrayType(context, .{ + .type = try unit.getPointerType(.{ + .type = try unit.getArrayType(.{ .type = slice.child_type, .count = len_expression.value.@"comptime".constant_int.value, .termination = slice.termination, @@ -10955,7 +11056,7 @@ pub const Builder = struct { true => unreachable, false => switch (unit.types.get(pointer.type).*) { .slice => |slice| slice: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = expression_to_slice, .type = slice.child_pointer_type, @@ -10963,7 +11064,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, load); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = load, .index = range_start, @@ -10978,8 +11079,8 @@ pub const Builder = struct { .value = .{ .runtime = gep, }, - .type = try unit.getPointerType(context, .{ - .type = try unit.getArrayType(context, .{ + .type = try unit.getPointerType(.{ + .type = try unit.getArrayType(.{ .type = slice.child_type, .count = len_expression.value.@"comptime".constant_int.value, .termination = slice.termination, @@ -10997,14 +11098,14 @@ pub const Builder = struct { unreachable; } else switch (unit.types.get(child_pointer.type).*) { .array => |array| if (array.type == slice.child_type) pointer: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = expression_to_slice, .type = pointer.type, }, }); try builder.appendInstruction(unit, load); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = load, .index = range_start, @@ -11037,7 +11138,7 @@ pub const Builder = struct { .type => |destination_type_index| switch (try builder.typecheck(unit, context, destination_type_index, pointer_value.type)) { .pointer_to_array_coerce_to_slice => switch (pointer_value.value) { .runtime => { - const insert_pointer = try unit.instructions.append(context.my_allocator, .{ + const insert_pointer = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -11051,7 +11152,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, insert_pointer); - const insert_length = try unit.instructions.append(context.my_allocator, .{ + const insert_length = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -11083,7 +11184,7 @@ pub const Builder = struct { .runtime => { const slice_value: V = switch (unit.types.get(expression_to_slice.type).*) { .slice => |slice| blk: { - const extract_value = try unit.instructions.append(context.my_allocator, .{ + const extract_value = unit.instructions.append_index(.{ .extract_value = .{ .expression = expression_to_slice, .index = 0, @@ -11092,7 +11193,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, extract_value); const pointer_type = slice.child_pointer_type; - const pointer_gep = try unit.instructions.append(context.my_allocator, .{ + const pointer_gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = extract_value, .is_struct = false, @@ -11103,7 +11204,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, pointer_gep); - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11122,7 +11223,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, slice_builder); - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11146,7 +11247,7 @@ pub const Builder = struct { }, .pointer => |pointer| switch (pointer.many) { true => blk: { - const pointer_gep = try unit.instructions.append(context.my_allocator, .{ + const pointer_gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = expression_to_slice.value.runtime, .is_struct = false, @@ -11157,7 +11258,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, pointer_gep); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = pointer.type, .termination = pointer.termination, .mutability = pointer.mutability, @@ -11165,7 +11266,7 @@ pub const Builder = struct { .nullable = false, }); - const slice_type = try unit.getSliceType(context, .{ + const slice_type = try unit.getSliceType(.{ .child_type = pointer.type, .child_pointer_type = pointer_type, .mutability = pointer.mutability, @@ -11173,7 +11274,7 @@ pub const Builder = struct { .nullable = false, }); - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11192,7 +11293,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, slice_builder); - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11216,7 +11317,7 @@ pub const Builder = struct { false => switch (unit.types.get(pointer.type).*) { .array => |array| blk: { assert(!pointer.nullable); - const pointer_gep = try unit.instructions.append(context.my_allocator, .{ + const pointer_gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = expression_to_slice.value.runtime, .base_type = array.type, @@ -11227,7 +11328,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, pointer_gep); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = array.type, .termination = array.termination, .mutability = pointer.mutability, @@ -11235,7 +11336,7 @@ pub const Builder = struct { .nullable = false, }); - const slice_type = try unit.getSliceType(context, .{ + const slice_type = try unit.getSliceType(.{ .child_type = array.type, .child_pointer_type = pointer_type, .termination = array.termination, @@ -11243,7 +11344,7 @@ pub const Builder = struct { .nullable = pointer.nullable, }); - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11262,7 +11363,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, slice_builder); - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11286,7 +11387,7 @@ pub const Builder = struct { .pointer => |child_pointer| switch (child_pointer.many) { true => blk: { assert(!child_pointer.nullable); - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = expression_to_slice, .type = pointer.type, @@ -11294,7 +11395,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, load); - const pointer_gep = try unit.instructions.append(context.my_allocator, .{ + const pointer_gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = load, .base_type = child_pointer.type, @@ -11305,7 +11406,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, pointer_gep); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = child_pointer.type, .termination = child_pointer.termination, .mutability = child_pointer.mutability, @@ -11313,7 +11414,7 @@ pub const Builder = struct { .nullable = false, }); - const slice_type = try unit.getSliceType(context, .{ + const slice_type = try unit.getSliceType(.{ .child_type = child_pointer.type, .child_pointer_type = pointer_type, .termination = child_pointer.termination, @@ -11321,7 +11422,7 @@ pub const Builder = struct { .nullable = false, }); - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11340,7 +11441,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, slice_builder); - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11363,7 +11464,7 @@ pub const Builder = struct { }, false => switch (unit.types.get(child_pointer.type).*) { .array => |array| blk: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = expression_to_slice, .type = pointer.type, @@ -11371,7 +11472,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, load); - const pointer_gep = try unit.instructions.append(context.my_allocator, .{ + const pointer_gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = load, .base_type = array.type, @@ -11382,7 +11483,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, pointer_gep); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = array.type, .termination = array.termination, .mutability = child_pointer.mutability, @@ -11390,7 +11491,7 @@ pub const Builder = struct { .nullable = false, }); - const slice_type = try unit.getSliceType(context, .{ + const slice_type = try unit.getSliceType(.{ .child_type = array.type, .child_pointer_type = pointer_type, .termination = array.termination, @@ -11398,7 +11499,7 @@ pub const Builder = struct { .nullable = false, }); - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11417,7 +11518,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, slice_builder); - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11442,7 +11543,7 @@ pub const Builder = struct { }, }, .slice => |slice| blk: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = expression_to_slice, .type = pointer.type, @@ -11450,7 +11551,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, load); - const extract_pointer = try unit.instructions.append(context.my_allocator, .{ + const extract_pointer = unit.instructions.append_index(.{ .extract_value = .{ .expression = .{ .value = .{ @@ -11463,7 +11564,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, extract_pointer); - const pointer_gep = try unit.instructions.append(context.my_allocator, .{ + const pointer_gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = extract_pointer, .base_type = slice.child_type, @@ -11476,7 +11577,7 @@ pub const Builder = struct { const slice_type = pointer.type; - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11495,7 +11596,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, slice_builder); - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11528,7 +11629,7 @@ pub const Builder = struct { .success => slice_value, .type_to_error_union => try builder.resolveTypeToErrorUnion(unit, context, type_index, slice_value), .slice_to_nullable => b: { - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .slice_to_nullable, .value = slice_value, @@ -11545,7 +11646,7 @@ pub const Builder = struct { }; }, .slice_zero_to_no_termination => b: { - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .slice_zero_to_no_termination, .value = slice_value, @@ -11597,7 +11698,7 @@ pub const Builder = struct { .type = pointer_type, }; - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11613,7 +11714,7 @@ pub const Builder = struct { const len = unit.types.get(string_global.declaration.type).array.count; - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = V{ .value = .{ @@ -11648,14 +11749,14 @@ pub const Builder = struct { const string_global = try builder.processStringLiteralFromToken(unit, context, node.token); switch (pointer.many) { true => { - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = string_global.declaration.type, .termination = .none, .mutability = pointer.mutability, .many = false, .nullable = false, }); - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_to_array_to_pointer_to_many, .value = .{ @@ -11689,7 +11790,7 @@ pub const Builder = struct { .right => blk: { const string_literal = try unit.fixupStringLiteral(context, node.token); const hash = try unit.processIdentifier(context, string_literal); - const ty = try unit.getArrayType(context, .{ + const ty = try unit.getArrayType(.{ .type = .u8, .count = string_literal.len, .termination = .none, @@ -11709,7 +11810,7 @@ pub const Builder = struct { .none => none: { const string_literal = try unit.fixupStringLiteral(context, node.token); const hash = try unit.processIdentifier(context, string_literal); - const ty = try unit.getArrayType(context, .{ + const ty = try unit.getArrayType(.{ .type = .u8, .count = string_literal.len, .termination = .none, @@ -11759,7 +11860,7 @@ pub const Builder = struct { const emit = true; const stack_slot = try builder.emitLocalVariableDeclaration(unit, context, unit.getNode(node.left).token, .@"const", value_pointer.type, value_pointer, emit, name); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = value_pointer.type, .many = false, .nullable = false, @@ -11767,7 +11868,7 @@ pub const Builder = struct { .termination = .none, }); - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_to_array_to_pointer_to_many, .value = .{ @@ -11780,7 +11881,7 @@ pub const Builder = struct { }, }); try builder.appendInstruction(unit, cast); - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -11799,7 +11900,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, slice_builder); - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -11837,7 +11938,7 @@ pub const Builder = struct { unit.anon_arr += 1; const emit = true; const stack_slot = try builder.emitLocalVariableDeclaration(unit, context, unit.getNode(node.left).token, .@"const", value_pointer.type, value_pointer, emit, name); - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -11856,7 +11957,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, slice_builder); - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -11893,7 +11994,7 @@ pub const Builder = struct { .pointer => |pointer| { switch (unit.types.get(pointer.type).*) { .array => |array| { - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_to_array_to_pointer_to_many, .value = value_pointer, @@ -11902,7 +12003,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, cast); - const slice_builder = try unit.instructions.append(context.my_allocator, .{ + const slice_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -11921,7 +12022,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, slice_builder); - const final_slice = try unit.instructions.append(context.my_allocator, .{ + const final_slice = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -11964,7 +12065,7 @@ pub const Builder = struct { .pointer => |left_pointer| switch (unit.types.get(left_pointer.type).*) { .array => |array| { assert(array.type == pointer.type); - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_to_array_to_pointer_to_many, //.array_to_pointer, .type = type_index, @@ -12049,7 +12150,7 @@ pub const Builder = struct { false => switch (unit.types.get(pointer.type).*) { .slice => |slice| try builder.build_slice_indexed_access(unit, context, array_like_expression, pointer.type, slice.child_pointer_type, slice.child_type, slice.mutability, .{ .pointer = 0, .length = 1 }, index), .array => |array| b: { - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = array_like_expression.value.runtime, .base_type = array.type, @@ -12060,7 +12161,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, gep); - const gep_type = try unit.getPointerType(context, .{ + const gep_type = try unit.getPointerType(.{ .type = array.type, .termination = .none, .mutability = pointer.mutability, @@ -12077,14 +12178,14 @@ pub const Builder = struct { }, .pointer => |child_pointer| switch (child_pointer.many) { true => b: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = array_like_expression, .type = pointer.type, }, }); try builder.appendInstruction(unit, load); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = load, .base_type = child_pointer.type, @@ -12095,7 +12196,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, gep); - const gep_type = try unit.getPointerType(context, .{ + const gep_type = try unit.getPointerType(.{ .type = child_pointer.type, .termination = child_pointer.termination, .mutability = child_pointer.mutability, @@ -12112,7 +12213,7 @@ pub const Builder = struct { }, false => switch (unit.types.get(child_pointer.type).*) { .array => |array| b: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = array_like_expression, .type = pointer.type, @@ -12120,7 +12221,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, load); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = load, .base_type = array.type, @@ -12131,7 +12232,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, gep); - const gep_type = try unit.getPointerType(context, .{ + const gep_type = try unit.getPointerType(.{ .type = array.type, .termination = .none, .mutability = pointer.mutability, @@ -12150,7 +12251,7 @@ pub const Builder = struct { .materialized_int => b: { assert(child_pointer.many); - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = array_like_expression, .type = pointer.type, @@ -12158,7 +12259,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, load); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = load, .base_type = child_pointer.type, @@ -12169,7 +12270,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, gep); - const gep_type = try unit.getPointerType(context, .{ + const gep_type = try unit.getPointerType(.{ .type = child_pointer.type, .termination = .none, .mutability = pointer.mutability, @@ -12189,7 +12290,7 @@ pub const Builder = struct { .@"struct" => |struct_index| switch (unit.structs.get(struct_index).kind) { .@"struct" => |*struct_type| b: { if (struct_type.options.sliceable) |sliceable| { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = array_like_expression, .type = pointer.type, @@ -12242,7 +12343,7 @@ pub const Builder = struct { switch (side) { .left => break :blk gep, .right => { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = gep, .type = unit.types.get(gep.type).pointer.type, @@ -12291,7 +12392,7 @@ pub const Builder = struct { const boolean = try builder.resolveRuntimeValue(unit, context, Type.Expect{ .type = .bool }, node.left, .right); switch (boolean.value) { .runtime => { - const xor = try unit.instructions.append(context.my_allocator, .{ + const xor = unit.instructions.append_index(.{ .integer_binary_operation = .{ .id = .bit_xor, .signedness = .unsigned, @@ -12383,7 +12484,7 @@ pub const Builder = struct { else => |t| @panic(@tagName(t)), }, .runtime => { - const sub = try unit.instructions.append(context.my_allocator, .{ + const sub = unit.instructions.append_index(.{ .integer_binary_operation = .{ .id = .sub, .left = .{ @@ -12492,7 +12593,7 @@ pub const Builder = struct { .none => b: { var p = pointer; p.nullable = false; - const non_null_pointer = try unit.getPointerType(context, p); + const non_null_pointer = try unit.getPointerType(p); break :b non_null_pointer; }, else => |t| @panic(@tagName(t)), @@ -12504,7 +12605,7 @@ pub const Builder = struct { .@"comptime" = .null_pointer, }, }; - const cmp = try unit.instructions.append(context.my_allocator, .{ + const cmp = unit.instructions.append_index(.{ .integer_compare = .{ .left = v, .right = null_pointer, @@ -12513,9 +12614,9 @@ pub const Builder = struct { }, }); try builder.appendInstruction(unit, cmp); - const is_null_block = try builder.newBasicBlock(unit, context); - const is_not_null_block = try builder.newBasicBlock(unit, context); - try builder.branch(unit, context, cmp, is_null_block, is_not_null_block); + const is_null_block = try builder.newBasicBlock(unit); + const is_not_null_block = try builder.newBasicBlock(unit); + try builder.branch(unit, cmp, is_null_block, is_not_null_block); builder.current_basic_block = is_null_block; @@ -12526,7 +12627,7 @@ pub const Builder = struct { unreachable; } else { builder.current_basic_block = is_not_null_block; - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_to_not_nullable, .value = v, @@ -12548,13 +12649,13 @@ pub const Builder = struct { .none => b: { var s = slice; s.nullable = false; - const non_null_slice = try unit.getSliceType(context, s); + const non_null_slice = try unit.getSliceType(s); break :b non_null_slice; }, .type => |type_index| b: { var s = slice; s.nullable = false; - const non_null_slice = try unit.getSliceType(context, s); + const non_null_slice = try unit.getSliceType(s); assert(non_null_slice == type_index); break :b non_null_slice; }, @@ -12568,14 +12669,14 @@ pub const Builder = struct { }, }; - const get_pointer = try unit.instructions.append(context.my_allocator, .{ + const get_pointer = unit.instructions.append_index(.{ .extract_value = .{ .expression = v, .index = 0, }, }); try builder.appendInstruction(unit, get_pointer); - const cmp = try unit.instructions.append(context.my_allocator, .{ + const cmp = unit.instructions.append_index(.{ .integer_compare = .{ .left = .{ .value = .{ @@ -12589,9 +12690,9 @@ pub const Builder = struct { }, }); try builder.appendInstruction(unit, cmp); - const is_null_block = try builder.newBasicBlock(unit, context); - const is_not_null_block = try builder.newBasicBlock(unit, context); - try builder.branch(unit, context, cmp, is_null_block, is_not_null_block); + const is_null_block = try builder.newBasicBlock(unit); + const is_not_null_block = try builder.newBasicBlock(unit); + try builder.branch(unit, cmp, is_null_block, is_not_null_block); builder.current_basic_block = is_null_block; @@ -12599,7 +12700,7 @@ pub const Builder = struct { const is_block_terminated = unit.basic_blocks.get(builder.current_basic_block).terminated; if (!is_block_terminated) { assert(else_expr.type == type_to_expect); - const phi_index = try unit.instructions.append(context.my_allocator, .{ + const phi_index = unit.instructions.append_index(.{ .phi = .{ .type = type_to_expect, .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), @@ -12608,12 +12709,12 @@ pub const Builder = struct { const phi = &unit.instructions.get(phi_index).phi; phi.addIncoming(else_expr, builder.current_basic_block); - const phi_block = try builder.newBasicBlock(unit, context); - try builder.jump(unit, context, phi_block); + const phi_block = try builder.newBasicBlock(unit); + try builder.jump(unit, phi_block); builder.current_basic_block = is_not_null_block; - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .slice_to_not_null, .value = v, @@ -12629,7 +12730,7 @@ pub const Builder = struct { .type = type_to_expect, }; phi.addIncoming(unwrap, builder.current_basic_block); - try builder.jump(unit, context, phi_block); + try builder.jump(unit, phi_block); builder.current_basic_block = phi_block; @@ -12641,7 +12742,7 @@ pub const Builder = struct { }; } else { builder.current_basic_block = is_not_null_block; - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .slice_to_not_null, .value = v, @@ -12667,7 +12768,7 @@ pub const Builder = struct { }, .one_complement => block: { const value = try builder.resolveRuntimeValue(unit, context, type_expect, node.left, .right); - const not = try unit.instructions.append(context.my_allocator, .{ + const not = unit.instructions.append_index(.{ .integer_binary_operation = .{ .id = .bit_xor, .left = value, @@ -12694,7 +12795,7 @@ pub const Builder = struct { }; }, .break_expression => b: { - try builder.jump(unit, context, builder.loop_exit_block); + try builder.jump(unit, builder.loop_exit_block); break :b V{ .type = .noreturn, .value = .{ @@ -12735,17 +12836,17 @@ pub const Builder = struct { else => |t| @panic(@tagName(t)), } - const is_error = try unit.instructions.append(context.my_allocator, .{ + const is_error = unit.instructions.append_index(.{ .extract_value = .{ .expression = value, .index = 1, }, }); try builder.appendInstruction(unit, is_error); - const error_block = try builder.newBasicBlock(unit, context); - const clean_block = try builder.newBasicBlock(unit, context); + const error_block = try builder.newBasicBlock(unit); + const clean_block = try builder.newBasicBlock(unit); - try builder.branch(unit, context, is_error, error_block, clean_block); + try builder.branch(unit, is_error, error_block, clean_block); builder.current_basic_block = error_block; @@ -12756,7 +12857,7 @@ pub const Builder = struct { switch (try builder.typecheck(unit, context, return_error_union.@"error", error_union.@"error")) { .success => { const error_value = if (error_union.union_for_error == error_union.abi) blk: { - const extract_value = try unit.instructions.append(context.my_allocator, .{ + const extract_value = unit.instructions.append_index(.{ .extract_value = .{ .expression = value, .index = 0, @@ -12773,13 +12874,13 @@ pub const Builder = struct { } else err: { const try_alloca = try builder.createStackVariable(unit, context, value.type, null); - const try_store = try unit.instructions.append(context.my_allocator, .{ + const try_store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ .runtime = try_alloca, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = value.type, .termination = .none, .mutability = .@"var", @@ -12792,7 +12893,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, try_store); - const union_for_error_gep = try unit.instructions.append(context.my_allocator, .{ + const union_for_error_gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = try_alloca, .base_type = error_union.union_for_error, @@ -12812,13 +12913,13 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, union_for_error_gep); - const error_load = try unit.instructions.append(context.my_allocator, .{ + const error_load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ .runtime = union_for_error_gep, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = error_union.@"error", .termination = .none, .mutability = .@"const", @@ -12839,7 +12940,7 @@ pub const Builder = struct { }; if (return_error_union.union_for_error == return_error_union.abi) { - const error_union_builder = try unit.instructions.append(context.my_allocator, .{ + const error_union_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -12853,7 +12954,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, error_union_builder); - const final_error_union = try unit.instructions.append(context.my_allocator, .{ + const final_error_union = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -12896,7 +12997,7 @@ pub const Builder = struct { .type = error_union.union_for_error, }; - const error_union_builder = try unit.instructions.append(context.my_allocator, .{ + const error_union_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = v, .index = 0, @@ -12905,7 +13006,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, error_union_builder); - const final_error_union = try unit.instructions.append(context.my_allocator, .{ + const final_error_union = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -12928,7 +13029,7 @@ pub const Builder = struct { const support_alloca = try builder.createStackVariable(unit, context, error_union.union_for_error, null); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = error_union.union_for_error, .termination = .none, .mutability = .@"var", @@ -12936,7 +13037,7 @@ pub const Builder = struct { .nullable = false, }); - const support_store = try unit.instructions.append(context.my_allocator, .{ + const support_store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -12954,7 +13055,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, support_store); - const support_load = try unit.instructions.append(context.my_allocator, .{ + const support_load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -12983,7 +13084,7 @@ pub const Builder = struct { _ = constant_shifted; // autofix const error_value = if (error_union.union_for_error == error_union.abi) b: { - const get_error = try unit.instructions.append(context.my_allocator, .{ + const get_error = unit.instructions.append_index(.{ .extract_value = .{ .expression = value, .index = 0, @@ -12999,13 +13100,13 @@ pub const Builder = struct { } else err: { const try_alloca = try builder.createStackVariable(unit, context, value.type, null); - const try_store = try unit.instructions.append(context.my_allocator, .{ + const try_store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ .runtime = try_alloca, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = value.type, .termination = .none, .mutability = .@"var", @@ -13018,7 +13119,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, try_store); - const union_for_error_gep = try unit.instructions.append(context.my_allocator, .{ + const union_for_error_gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = try_alloca, .base_type = error_union.union_for_error, @@ -13038,13 +13139,13 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, union_for_error_gep); - const error_load = try unit.instructions.append(context.my_allocator, .{ + const error_load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ .runtime = union_for_error_gep, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = error_union.@"error", .termination = .none, .mutability = .@"const", @@ -13084,14 +13185,14 @@ pub const Builder = struct { } else { assert(builder.return_phi == .null); assert(builder.return_block == .null); - const phi_index = try unit.instructions.append(context.my_allocator, .{ + const phi_index = unit.instructions.append_index(.{ .phi = .{ .type = return_type_index, .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), }, }); const phi = &unit.instructions.get(phi_index).phi; - const phi_block = try builder.newBasicBlock(unit, context); + const phi_block = try builder.newBasicBlock(unit); phi.addIncoming(final_error_union, builder.current_basic_block); // const old_block = builder.current_basic_block; @@ -13101,11 +13202,11 @@ pub const Builder = struct { } assert(builder.return_block != .null); - try builder.jump(unit, context, builder.return_block); + try builder.jump(unit, builder.return_block); builder.current_basic_block = clean_block; - const result = try unit.instructions.append(context.my_allocator, .{ + const result = unit.instructions.append_index(.{ .extract_value = .{ .expression = value, .index = 0, @@ -13214,7 +13315,7 @@ pub const Builder = struct { .type = type_index, }; } else { - const zero_extend = try unit.instructions.append(context.my_allocator, .{ + const zero_extend = unit.instructions.append_index(.{ .cast = .{ .id = .zero_extend, .value = list.pointer[0], @@ -13237,7 +13338,7 @@ pub const Builder = struct { const field_bit_size = field_type.getBitSize(unit); defer bit_offset += field_bit_size; - const field_zero_extend = try unit.instructions.append(context.my_allocator, .{ + const field_zero_extend = unit.instructions.append_index(.{ .cast = .{ .id = .zero_extend, .value = field, @@ -13246,7 +13347,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, field_zero_extend); - const shift_left = try unit.instructions.append(context.my_allocator, .{ + const shift_left = unit.instructions.append_index(.{ .integer_binary_operation = .{ .id = .shift_left, .left = .{ @@ -13271,7 +13372,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, shift_left); - const merge_or = try unit.instructions.append(context.my_allocator, .{ + const merge_or = unit.instructions.append_index(.{ .integer_binary_operation = .{ .id = .bit_or, .signedness = integer.signedness, @@ -13310,7 +13411,7 @@ pub const Builder = struct { return .{ .value = .{ .@"comptime" = .{ - .constant_struct = try unit.constant_structs.append(context.my_allocator, .{ + .constant_struct = unit.constant_structs.append_index(.{ .fields = comptime_list, .type = type_index, }), @@ -13327,7 +13428,7 @@ pub const Builder = struct { }; for (list.slice(), 0..) |field, index| { - const struct_initialization_instruction = try unit.instructions.append(context.my_allocator, .{ + const struct_initialization_instruction = unit.instructions.append_index(.{ .insert_value = .{ .expression = struct_initialization, .index = @intCast(index), @@ -13363,7 +13464,7 @@ pub const Builder = struct { .array => type_index, else => |t| @panic(@tagName(t)), }, - .array => |array| try unit.getArrayType(context, .{ + .array => |array| try unit.getArrayType(.{ .count = expression_element_count, .type = array.type, .termination = array.termination, @@ -13409,7 +13510,7 @@ pub const Builder = struct { } if (is_comptime) { - const constant_array = try unit.constant_arrays.append(context.my_allocator, .{ + const constant_array = unit.constant_arrays.append_index(.{ .values = blk: { var ct_values = try context.arena.new_array(V.Comptime, values.length); @@ -13420,7 +13521,7 @@ pub const Builder = struct { break :blk ct_values; }, // TODO: avoid hash lookup - .type = try unit.getArrayType(context, array_type), + .type = try unit.getArrayType(array_type), }); const v = V{ .value = .{ @@ -13429,7 +13530,7 @@ pub const Builder = struct { }, }, // TODO: avoid hash lookup - .type = try unit.getArrayType(context, array_type), + .type = try unit.getArrayType(array_type), }; return v; } else { @@ -13441,7 +13542,7 @@ pub const Builder = struct { }; for (values.slice(), 0..) |value, index| { - const insert_value = try unit.instructions.append(context.my_allocator, .{ + const insert_value = unit.instructions.append_index(.{ .insert_value = .{ .expression = array_builder, .index = @intCast(index), @@ -13480,7 +13581,7 @@ pub const Builder = struct { assert(field_pointer_type.mutability == .@"const"); assert(!field_pointer_type.nullable); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = value.value.runtime, .base_type = type_index, @@ -13500,13 +13601,13 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, gep); - const second_load = try unit.instructions.append(context.my_allocator, .{ + const second_load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ .runtime = gep, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = field.type, .many = false, .nullable = false, @@ -13562,7 +13663,7 @@ pub const Builder = struct { .member = value, }; } else if (first_argument_type_index == type_index) { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = value, .type = first_argument_type_index, @@ -13676,7 +13777,7 @@ pub const Builder = struct { else => |t| @panic(@tagName(t)), }, .pointer => |child_pointer| blk: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = field_access_left, .type = pointer.type, @@ -13742,7 +13843,7 @@ pub const Builder = struct { .pointer => |pointer| switch (unit.types.get(pointer.type).*) { .pointer => |child_pointer| switch (unit.types.get(child_pointer.type).*) { .function => b: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = result, .type = pointer.type, @@ -13860,7 +13961,7 @@ pub const Builder = struct { .direct_coerce => |coerced_type_index| if (coerced_type_index == argument_value.type) argument_list.appendAssumeCapacity(argument_value) else { const stack = try builder.createStackVariable(unit, context, argument_value.type, null); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = argument_value.type, .termination = .none, .mutability = .@"var", @@ -13875,7 +13976,7 @@ pub const Builder = struct { .type = pointer_type, }; - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = argument_alloca, .source = argument_value, @@ -13894,7 +13995,7 @@ pub const Builder = struct { const source_is_scalable_vector_type = false; if (source_size >= target_size and !source_is_scalable_vector_type and !target_is_scalable_vector_type) { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = argument_alloca, .type = coerced_type_index, @@ -13911,7 +14012,7 @@ pub const Builder = struct { } else { const alignment = @max(target_alignment, source_alignment); const temporal = try builder.createStackVariable(unit, context, coerced_type_index, alignment); - const coerced_pointer_type = try unit.getPointerType(context, .{ + const coerced_pointer_type = try unit.getPointerType(.{ .type = coerced_type_index, .termination = .none, .mutability = .@"var", @@ -13932,7 +14033,7 @@ pub const Builder = struct { .size = source_size, .is_volatile = false, }); - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = destination, .type = coerced_type_index, @@ -13950,7 +14051,7 @@ pub const Builder = struct { } }, .direct_pair => |pair| { - const struct_type_index = try unit.getTwoStruct(context, pair); + const struct_type_index = try unit.getTwoStruct(pair); const pair_struct_type = unit.types.get(struct_type_index); const are_similar = b: { if (struct_type_index == argument_type_index) { @@ -13979,7 +14080,7 @@ pub const Builder = struct { }; if (are_similar) { - const extract_0 = try unit.instructions.append(context.my_allocator, .{ + const extract_0 = unit.instructions.append_index(.{ .extract_value = .{ .expression = argument_value, .index = 0, @@ -13994,7 +14095,7 @@ pub const Builder = struct { .type = pair[0], }); - const extract_1 = try unit.instructions.append(context.my_allocator, .{ + const extract_1 = unit.instructions.append_index(.{ .extract_value = .{ .expression = argument_value, .index = 1, @@ -14016,7 +14117,7 @@ pub const Builder = struct { const alloca_value = if (argument_alignment < target_alignment) b: { const coerced_alloca = try builder.createStackVariable(unit, context, struct_type_index, null); - const coerced_pointer_type = try unit.getPointerType(context, .{ + const coerced_pointer_type = try unit.getPointerType(.{ .type = struct_type_index, .termination = .none, .mutability = .@"var", @@ -14029,7 +14130,7 @@ pub const Builder = struct { }, .type = coerced_pointer_type, }; - const coerced_store = try unit.instructions.append(context.my_allocator, .{ + const coerced_store = unit.instructions.append_index(.{ .store = .{ .destination = coerced_pointer, .source = argument_value, @@ -14039,7 +14140,7 @@ pub const Builder = struct { break :b coerced_pointer; } else b: { - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = argument_type_index, .termination = .none, .mutability = .@"var", @@ -14054,7 +14155,7 @@ pub const Builder = struct { }, .type = pointer_type, }; - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = argument_alloca, .source = argument_value, @@ -14064,7 +14165,7 @@ pub const Builder = struct { break :b argument_alloca; }; - const gep0 = try unit.instructions.append(context.my_allocator, .{ + const gep0 = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = alloca_value.value.runtime, .base_type = struct_type_index, @@ -14084,13 +14185,13 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, gep0); - const load0 = try unit.instructions.append(context.my_allocator, .{ + const load0 = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ .runtime = gep0, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = pair[0], .termination = .none, .mutability = .@"var", @@ -14103,7 +14204,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, load0); - const gep1 = try unit.instructions.append(context.my_allocator, .{ + const gep1 = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = alloca_value.value.runtime, .base_type = struct_type_index, @@ -14123,13 +14224,13 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, gep1); - const load1 = try unit.instructions.append(context.my_allocator, .{ + const load1 = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ .runtime = gep1, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = pair[1], .termination = .none, .mutability = .@"var", @@ -14180,7 +14281,7 @@ pub const Builder = struct { }, .type = indirect.pointer, }; - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = indirect_value, .source = argument_value, @@ -14197,7 +14298,7 @@ pub const Builder = struct { } } - const instruction = try unit.instructions.append(context.my_allocator, .{ + const instruction = unit.instructions.append_index(.{ .call = .{ .callable = member_resolution.callable, .function_type = function_type_index, @@ -14215,7 +14316,7 @@ pub const Builder = struct { } if (indirect_return) |v| { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = v, .type = function_prototype.return_type, @@ -14262,7 +14363,7 @@ pub const Builder = struct { //std.debug.panic("Identifier '{s}' already declarared on scope", .{identifier}); } - const declaration_index = try unit.local_declarations.append(context.my_allocator, .{ + const local_declaration = unit.local_declarations.append(.{ .declaration = .{ .scope = builder.current_scope, .name = identifier_hash, @@ -14275,7 +14376,6 @@ pub const Builder = struct { .init_value = initialization, }); - const local_declaration = unit.local_declarations.get(declaration_index); assert(builder.current_scope.kind == .block); try builder.current_scope.declarations.put_no_clobber(identifier_hash, &local_declaration.declaration); @@ -14286,7 +14386,7 @@ pub const Builder = struct { const local_scope: *Debug.Scope.Local = @fieldParentPtr("scope", builder.current_scope); try local_scope.local_declaration_map.put_no_clobber(local_declaration, stack); - const debug_declare_local = try unit.instructions.append(context.my_allocator, .{ + const debug_declare_local = unit.instructions.append_index(.{ .debug_declare_local_variable = .{ .variable = local_declaration, .stack = stack, @@ -14295,7 +14395,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, debug_declare_local); - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -14319,7 +14419,7 @@ pub const Builder = struct { const block_node = unit.getNode(node_index); assert(block_node.id == .block); const token_debug_info = builder.getTokenDebugInfo(unit, block_node.token); - const block_index = try unit.blocks.append(context.my_allocator, .{ + const block_index = unit.blocks.append_index(.{ .scope = .{ .scope = .{ .line = token_debug_info.line, @@ -14339,9 +14439,9 @@ pub const Builder = struct { assert(builder.current_scope.kind == .block or builder.current_scope.kind == .function); } - try builder.pushScope(unit, context, &block.scope.scope); + try builder.pushScope(unit, &block.scope.scope); defer { - builder.popScope(unit, context) catch unreachable; + builder.popScope(unit) catch unreachable; } const statement_node_list = unit.getNodeList(block_node.left); @@ -14349,7 +14449,7 @@ pub const Builder = struct { for (statement_node_list) |statement_node_index| { const statement_node = unit.getNode(statement_node_index); - try builder.insertDebugCheckPoint(unit, context, statement_node.token); + try builder.insertDebugCheckPoint(unit, statement_node.token); switch (statement_node.id) { .assign, .add_assign, .sub_assign, .div_assign, .or_assign => { @@ -14433,25 +14533,25 @@ pub const Builder = struct { const old_loop_header_block = builder.loop_header_block; defer builder.loop_header_block = old_loop_header_block; - builder.loop_header_block = try builder.newBasicBlock(unit, context); - try builder.jump(unit, context, builder.loop_header_block); + builder.loop_header_block = try builder.newBasicBlock(unit); + try builder.jump(unit, builder.loop_header_block); builder.current_basic_block = builder.loop_header_block; const condition = try builder.resolveRuntimeValue(unit, context, Type.Expect{ .type = .bool }, statement_node.left, .right); - const body_block = try builder.newBasicBlock(unit, context); - const exit_block = try builder.newBasicBlock(unit, context); + const body_block = try builder.newBasicBlock(unit); + const exit_block = try builder.newBasicBlock(unit); const old_loop_exit_block = builder.loop_exit_block; defer builder.loop_exit_block = old_loop_exit_block; switch (condition.value) { .runtime => |condition_instruction| { - try builder.branch(unit, context, condition_instruction, body_block, exit_block); + try builder.branch(unit, condition_instruction, body_block, exit_block); }, .@"comptime" => |ct| switch (ct) { .bool => |boolean| switch (boolean) { true => { - try builder.jump(unit, context, body_block); + try builder.jump(unit, body_block); }, false => unreachable, }, @@ -14466,7 +14566,7 @@ pub const Builder = struct { const body_value = try builder.resolveRuntimeValue(unit, context, Type.Expect{ .type = .void }, statement_node.right, .right); switch (unit.types.get(body_value.type).*) { .void => { - try builder.jump(unit, context, builder.loop_header_block); + try builder.jump(unit, builder.loop_header_block); }, .noreturn => {}, else => |t| @panic(@tagName(t)), @@ -14553,7 +14653,7 @@ pub const Builder = struct { const range_end = switch (last_element_node.right) { .null => switch (unit.types.get(slices[0].type).*) { .slice => b: { - const len_extract_instruction = try unit.instructions.append(context.my_allocator, .{ + const len_extract_instruction = unit.instructions.append_index(.{ .extract_value = .{ .expression = slices[0], .index = 1, @@ -14601,7 +14701,7 @@ pub const Builder = struct { slices.len += 1; slices[index] = for_loop_value; - const len_extract_value = try unit.instructions.append(context.my_allocator, .{ + const len_extract_value = unit.instructions.append_index(.{ .extract_value = .{ .expression = for_loop_value, .index = 1, @@ -14621,8 +14721,8 @@ pub const Builder = struct { }, .pointer => |pointer| switch (unit.types.get(pointer.type).*) { .array => |array| { - const slice_type = try unit.getSliceType(context, .{ - .child_pointer_type = try unit.getPointerType(context, .{ + const slice_type = try unit.getSliceType(.{ + .child_pointer_type = try unit.getPointerType(.{ .type = array.type, .termination = pointer.termination, .mutability = pointer.mutability, @@ -14634,7 +14734,7 @@ pub const Builder = struct { .mutability = pointer.mutability, .nullable = pointer.nullable, }); - const slice = try unit.constant_slices.append(context.my_allocator, .{ + const slice = unit.constant_slices.append_index(.{ .array = switch (for_loop_value.value) { .@"comptime" => |ct| switch (ct) { .global => |global| global, @@ -14684,11 +14784,11 @@ pub const Builder = struct { const old_loop_header_block = builder.loop_header_block; defer builder.loop_header_block = old_loop_header_block; - builder.loop_header_block = try builder.newBasicBlock(unit, context); - try builder.jump(unit, context, builder.loop_header_block); + builder.loop_header_block = try builder.newBasicBlock(unit); + try builder.jump(unit, builder.loop_header_block); builder.current_basic_block = builder.loop_header_block; - const pointer_to_usize = try unit.getPointerType(context, .{ + const pointer_to_usize = try unit.getPointerType(.{ .type = Type.usize, .mutability = .@"const", .nullable = false, @@ -14696,7 +14796,7 @@ pub const Builder = struct { .termination = .none, }); - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -14710,7 +14810,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, load); - const compare = try unit.instructions.append(context.my_allocator, .{ + const compare = unit.instructions.append_index(.{ .integer_compare = .{ .left = .{ .value = .{ @@ -14725,9 +14825,9 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, compare); - const body_block = try builder.newBasicBlock(unit, context); - const exit_block = try builder.newBasicBlock(unit, context); - try builder.branch(unit, context, compare, body_block, exit_block); + const body_block = try builder.newBasicBlock(unit); + const exit_block = try builder.newBasicBlock(unit); + try builder.branch(unit, compare, body_block, exit_block); builder.current_basic_block = body_block; const old_loop_exit_block = builder.loop_exit_block; @@ -14738,7 +14838,7 @@ pub const Builder = struct { const not_range_len = payloads.len - @intFromBool(is_last_element_range); if (slices.len > 0) { - const load_i = try unit.instructions.append(context.my_allocator, .{ + const load_i = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -14752,7 +14852,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, load_i); for (payloads[0..not_range_len], slices) |payload_node_index, slice| { - const pointer_extract_value = try unit.instructions.append(context.my_allocator, .{ + const pointer_extract_value = unit.instructions.append_index(.{ .extract_value = .{ .expression = slice, .index = 0, @@ -14762,7 +14862,7 @@ pub const Builder = struct { const slice_type = unit.types.get(slice.type).slice; - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = pointer_extract_value, .base_type = slice_type.child_type, @@ -14781,7 +14881,7 @@ pub const Builder = struct { const is_by_value = true; const init_instruction = switch (is_by_value) { true => vblk: { - const load_gep = try unit.instructions.append(context.my_allocator, .{ + const load_gep = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -14821,7 +14921,7 @@ pub const Builder = struct { unreachable; } - const load_iterator = try unit.instructions.append(context.my_allocator, .{ + const load_iterator = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -14835,7 +14935,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, load_iterator); - const increment = try unit.instructions.append(context.my_allocator, .{ + const increment = unit.instructions.append_index(.{ .integer_binary_operation = .{ .left = .{ .value = .{ @@ -14860,7 +14960,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, increment); - const increment_store = try unit.instructions.append(context.my_allocator, .{ + const increment_store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -14879,16 +14979,16 @@ pub const Builder = struct { try builder.appendInstruction(unit, increment_store); - try builder.jump(unit, context, builder.loop_header_block); + try builder.jump(unit, builder.loop_header_block); builder.current_basic_block = exit_block; } }, .break_expression => { - try builder.jump(unit, context, builder.loop_exit_block); + try builder.jump(unit, builder.loop_exit_block); }, .continue_expression => { - try builder.jump(unit, context, builder.loop_header_block); + try builder.jump(unit, builder.loop_header_block); }, .@"if" => { assert(statement_node.left != .null); @@ -14975,16 +15075,16 @@ pub const Builder = struct { } const catch_type_expect = Type.Expect{ .type = error_union.type }; - const is_error = try unit.instructions.append(context.my_allocator, .{ + const is_error = unit.instructions.append_index(.{ .extract_value = .{ .expression = expression, .index = 1, }, }); try builder.appendInstruction(unit, is_error); - const error_block = try builder.newBasicBlock(unit, context); - const clean_block = try builder.newBasicBlock(unit, context); - try builder.branch(unit, context, is_error, error_block, clean_block); + const error_block = try builder.newBasicBlock(unit); + const clean_block = try builder.newBasicBlock(unit); + try builder.branch(unit, is_error, error_block, clean_block); builder.current_basic_block = error_block; @@ -14994,7 +15094,7 @@ pub const Builder = struct { const payload_node = unit.getNode(right_node.left); const emit = true; - const error_extract_value = try unit.instructions.append(context.my_allocator, .{ + const error_extract_value = unit.instructions.append_index(.{ .extract_value = .{ .expression = expression, .index = 0, @@ -15030,7 +15130,7 @@ pub const Builder = struct { const maybe_catch_info: ?CatchInfo = if (!is_block_terminated) blk: { const expected_type = error_union.type; assert(v.type == expected_type); - const phi_index = try unit.instructions.append(context.my_allocator, .{ + const phi_index = unit.instructions.append_index(.{ .phi = .{ .type = expected_type, .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), @@ -15039,8 +15139,8 @@ pub const Builder = struct { const phi = &unit.instructions.get(phi_index).phi; phi.addIncoming(v, builder.current_basic_block); - const phi_block = try builder.newBasicBlock(unit, context); - try builder.jump(unit, context, phi_block); + const phi_block = try builder.newBasicBlock(unit); + try builder.jump(unit, phi_block); break :blk .{ .phi = phi_index, .exit_block = phi_block, @@ -15050,7 +15150,7 @@ pub const Builder = struct { assert(unit.basic_blocks.get(builder.current_basic_block).terminated); builder.current_basic_block = clean_block; - const no_error_extract_value = try unit.instructions.append(context.my_allocator, .{ + const no_error_extract_value = unit.instructions.append_index(.{ .extract_value = .{ .expression = expression, .index = 0, @@ -15073,7 +15173,7 @@ pub const Builder = struct { phi.addIncoming(value, builder.current_basic_block); - try builder.jump(unit, context, exit_block); + try builder.jump(unit, exit_block); builder.current_basic_block = exit_block; try builder.appendInstruction(unit, phi_index); @@ -15116,7 +15216,7 @@ pub const Builder = struct { switch (unit.types.get(optional_expression.type).*) { .slice => |slice| { if (slice.nullable) { - const pointer_value = try unit.instructions.append(context.my_allocator, .{ + const pointer_value = unit.instructions.append_index(.{ .extract_value = .{ .expression = optional_expression, .index = 0, @@ -15125,7 +15225,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, pointer_value); - const condition = try unit.instructions.append(context.my_allocator, .{ + const condition = unit.instructions.append_index(.{ .integer_compare = .{ .id = .not_equal, .left = .{ @@ -15151,7 +15251,7 @@ pub const Builder = struct { }, .pointer => |pointer| { if (pointer.nullable) { - const condition = try unit.instructions.append(context.my_allocator, .{ + const condition = unit.instructions.append_index(.{ .integer_compare = .{ .id = .not_equal, .left = optional_expression, @@ -15178,11 +15278,11 @@ pub const Builder = struct { } fn resolveBranch(builder: *Builder, unit: *Unit, context: *const Context, type_expect: Type.Expect, condition: Instruction.Index, taken_node_index: Node.Index, not_taken_node_index: Node.Index, optional_payload_token: Token.Index, maybe_optional_value: ?V) !void { - const taken_block = try builder.newBasicBlock(unit, context); - const exit_block = try builder.newBasicBlock(unit, context); - const not_taken_block = if (not_taken_node_index != .null) try builder.newBasicBlock(unit, context) else exit_block; + const taken_block = try builder.newBasicBlock(unit); + const exit_block = try builder.newBasicBlock(unit); + const not_taken_block = if (not_taken_node_index != .null) try builder.newBasicBlock(unit) else exit_block; builder.exit_blocks.appendAssumeCapacity(exit_block); - try builder.branch(unit, context, condition, taken_block, not_taken_block); + try builder.branch(unit, condition, taken_block, not_taken_block); builder.current_basic_block = taken_block; @@ -15190,11 +15290,11 @@ pub const Builder = struct { assert(optional_payload_token != .null); switch (unit.types.get(optional_value.type).*) { .slice => |slice| { - const not_null_slice = try unit.getSliceType(context, .{ + const not_null_slice = try unit.getSliceType(.{ .child_pointer_type = blk: { const child_pointer_type = unit.types.get(slice.child_pointer_type).pointer; - break :blk try unit.getPointerType(context, .{ + break :blk try unit.getPointerType(.{ .type = child_pointer_type.type, .termination = child_pointer_type.termination, .mutability = child_pointer_type.mutability, @@ -15208,7 +15308,7 @@ pub const Builder = struct { .nullable = false, }); - const unwrap = try unit.instructions.append(context.my_allocator, .{ + const unwrap = unit.instructions.append_index(.{ .cast = .{ .id = .slice_to_not_null, .value = optional_value, @@ -15226,7 +15326,7 @@ pub const Builder = struct { }, emit, null); }, .pointer => |pointer| { - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = pointer.type, .termination = pointer.termination, .mutability = pointer.mutability, @@ -15234,7 +15334,7 @@ pub const Builder = struct { .nullable = false, }); - const unwrap = try unit.instructions.append(context.my_allocator, .{ + const unwrap = unit.instructions.append_index(.{ .cast = .{ .id = .slice_to_not_null, .value = optional_value, @@ -15257,14 +15357,14 @@ pub const Builder = struct { _ = try builder.resolveRuntimeValue(unit, context, type_expect, taken_node_index, .right); if (!unit.basic_blocks.get(builder.current_basic_block).terminated) { - try builder.jump(unit, context, exit_block); + try builder.jump(unit, exit_block); } if (not_taken_node_index != .null) { builder.current_basic_block = not_taken_block; _ = try builder.resolveRuntimeValue(unit, context, type_expect, not_taken_node_index, .right); if (!unit.basic_blocks.get(builder.current_basic_block).terminated) { - try builder.jump(unit, context, exit_block); + try builder.jump(unit, exit_block); } } @@ -15273,8 +15373,8 @@ pub const Builder = struct { } } - fn branch(builder: *Builder, unit: *Unit, context: *const Context, condition: Instruction.Index, taken_block: BasicBlock.Index, non_taken_block: BasicBlock.Index) !void { - const br = try unit.instructions.append(context.my_allocator, .{ + fn branch(builder: *Builder, unit: *Unit, condition: Instruction.Index, taken_block: BasicBlock.Index, non_taken_block: BasicBlock.Index) !void { + const br = unit.instructions.append_index(.{ .branch = .{ .condition = condition, .from = builder.current_basic_block, @@ -15292,8 +15392,8 @@ pub const Builder = struct { try non_taken_bb.add_predecessor(builder.current_basic_block); } - fn jump(builder: *Builder, unit: *Unit, context: *const Context, new_basic_block: BasicBlock.Index) !void { - const instruction = try unit.instructions.append(context.my_allocator, .{ + fn jump(builder: *Builder, unit: *Unit, new_basic_block: BasicBlock.Index) !void { + const instruction = unit.instructions.append_index(.{ .jump = .{ .from = builder.current_basic_block, .to = new_basic_block, @@ -15416,7 +15516,7 @@ pub const Builder = struct { else => |t| @panic(@tagName(t)), }; - const switch_instruction_index = try unit.instructions.append(context.my_allocator, .{ + const switch_instruction_index = unit.instructions.append_index(.{ .@"switch" = .{ .condition = expression_to_switch_on, .block_type = type_index, @@ -15428,18 +15528,18 @@ pub const Builder = struct { const phi_info: ?PhiInfo = switch (unit.types.get(type_index).*) { .void, .noreturn => null, else => PhiInfo{ - .instruction = try unit.instructions.append(context.my_allocator, .{ + .instruction = unit.instructions.append_index(.{ .phi = .{ .type = type_index, .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), }, }), - .block = try builder.newBasicBlock(unit, context), + .block = try builder.newBasicBlock(unit), }, }; const before_switch_bb = builder.current_basic_block; - const switch_exit_block = try builder.newBasicBlock(unit, context); + const switch_exit_block = try builder.newBasicBlock(unit); var stack_switch_cases = BoundedArray(Instruction.Switch.Case, 512){}; @@ -15502,7 +15602,7 @@ pub const Builder = struct { }, } - const case_block = try builder.newBasicBlock(unit, context); + const case_block = try builder.newBasicBlock(unit); const case_bb = unit.basic_blocks.get(case_block); try case_bb.add_predecessor(before_switch_bb); @@ -15514,7 +15614,7 @@ pub const Builder = struct { if (!unit.basic_blocks.get(builder.current_basic_block).terminated) { const phi_instruction = &unit.instructions.get(phi.instruction).phi; phi_instruction.addIncoming(v, case_block); - try builder.jump(unit, context, phi.block); + try builder.jump(unit, phi.block); } } else if (builder.current_basic_block != .null) { const current_block = unit.basic_blocks.get(builder.current_basic_block); @@ -15522,7 +15622,7 @@ pub const Builder = struct { switch (v_ty.*) { .void => { assert(!current_block.terminated); - try builder.jump(unit, context, switch_exit_block); + try builder.jump(unit, switch_exit_block); }, .noreturn => {}, else => |t| @panic(@tagName(t)), @@ -15604,7 +15704,7 @@ pub const Builder = struct { } fn create_unreachable_block(builder: *Builder, unit: *Unit, context: *const Context) !BasicBlock.Index { - const block = try builder.newBasicBlock(unit, context); + const block = try builder.newBasicBlock(unit); const old_block = builder.current_basic_block; builder.current_basic_block = block; try builder.buildUnreachable(unit, context); @@ -15635,7 +15735,7 @@ pub const Builder = struct { const result: V = if (scope.lookupDeclaration(identifier_hash, look_in_parent_scopes)) |lookup| blk: { const global = try builder.referenceGlobalDeclaration(unit, context, lookup.scope, lookup.declaration, .{}, new_parameters, null, &.{}); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = global.declaration.type, .termination = .none, .mutability = .@"var", @@ -15670,7 +15770,7 @@ pub const Builder = struct { .type = global.declaration.type, }, .@"var" => v: { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -15815,7 +15915,7 @@ pub const Builder = struct { }; const field_index = @intFromEnum(slice_field); - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = left.value.runtime, .base_type = pointer.type, @@ -15842,7 +15942,7 @@ pub const Builder = struct { .value = .{ .runtime = gep, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = Type.usize, .many = false, .nullable = false, @@ -15854,7 +15954,7 @@ pub const Builder = struct { switch (side) { .left => break :b gep_value, .right => { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = gep_value, .type = field_type, @@ -15896,7 +15996,7 @@ pub const Builder = struct { for (fields, 0..) |field_index, i| { const field = unit.struct_fields.get(field_index); if (field.name == identifier_hash) { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = left, .type = pointer.type, @@ -15905,7 +16005,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, load); // GEP because this is still a pointer - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = load, .base_type = child_pointer.type, @@ -15926,7 +16026,7 @@ pub const Builder = struct { try builder.appendInstruction(unit, gep); const mutability = child_pointer.mutability; - const gep_pointer_type = try unit.getPointerType(context, .{ + const gep_pointer_type = try unit.getPointerType(.{ .type = field.type, .termination = .none, .mutability = mutability, @@ -15943,7 +16043,7 @@ pub const Builder = struct { break :b switch (side) { .left => gep_value, .right => right: { - const field_load = try unit.instructions.append(context.my_allocator, .{ + const field_load = unit.instructions.append_index(.{ .load = .{ .value = gep_value, .type = field.type, @@ -15977,7 +16077,7 @@ pub const Builder = struct { for (fields, 0..) |field_index, i| { const field = unit.struct_fields.get(field_index); if (field.name == identifier_hash) { - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = left.value.runtime, .base_type = pointer.type, @@ -16001,7 +16101,7 @@ pub const Builder = struct { .value = .{ .runtime = gep, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = field.type, .mutability = .@"const", .nullable = false, @@ -16012,7 +16112,7 @@ pub const Builder = struct { switch (side) { .left => break :b gep_value, .right => { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = gep_value, .type = field.type, @@ -16047,7 +16147,7 @@ pub const Builder = struct { if (field.name == identifier_hash) { assert(side == .right); - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = left, .type = pointer.type, @@ -16066,7 +16166,7 @@ pub const Builder = struct { const instruction_to_truncate = switch (bit_offset) { 0 => load, else => shl: { - const shl = try unit.instructions.append(context.my_allocator, .{ + const shl = unit.instructions.append_index(.{ .integer_binary_operation = .{ .id = .shift_right, .left = .{ @@ -16103,7 +16203,7 @@ pub const Builder = struct { unreachable; }, false => { - const truncate = try unit.instructions.append(context.my_allocator, .{ + const truncate = unit.instructions.append_index(.{ .cast = .{ .id = .truncate, .value = .{ @@ -16146,7 +16246,7 @@ pub const Builder = struct { switch (typecheck_result) { .success => return result, .pointer_var_to_const => { - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_var_to_const, .value = result, @@ -16203,7 +16303,7 @@ pub const Builder = struct { } }, .pointer_to_nullable => { - const cast = try unit.instructions.append(context.my_allocator, .{ + const cast = unit.instructions.append_index(.{ .cast = .{ .id = .pointer_to_nullable, .value = result, @@ -16234,7 +16334,7 @@ pub const Builder = struct { .type = ti, }; - const error_union_builder = try unit.instructions.append(context.my_allocator, .{ + const error_union_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = v, .index = 0, @@ -16243,7 +16343,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, error_union_builder); - const final_error_union = try unit.instructions.append(context.my_allocator, .{ + const final_error_union = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -16285,7 +16385,7 @@ pub const Builder = struct { .type = error_union.union_for_error, }; - const error_union_builder = try unit.instructions.append(context.my_allocator, .{ + const error_union_builder = unit.instructions.append_index(.{ .insert_value = .{ .expression = v, .index = 0, @@ -16294,7 +16394,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, error_union_builder); - const final_error_union = try unit.instructions.append(context.my_allocator, .{ + const final_error_union = unit.instructions.append_index(.{ .insert_value = .{ .expression = .{ .value = .{ @@ -16317,7 +16417,7 @@ pub const Builder = struct { const support_alloca = try builder.createStackVariable(unit, context, error_union.union_for_error, null); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = error_union.union_for_error, .termination = .none, .mutability = .@"var", @@ -16325,7 +16425,7 @@ pub const Builder = struct { .nullable = false, }); - const support_store = try unit.instructions.append(context.my_allocator, .{ + const support_store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -16343,7 +16443,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, support_store); - const support_load = try unit.instructions.append(context.my_allocator, .{ + const support_load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -16371,7 +16471,7 @@ pub const Builder = struct { .error_to_all_errors_error_union => return try builder.resolveErrorToAllErrorUnion(unit, context, ti, result), .type_to_error_union => return try builder.resolveTypeToErrorUnion(unit, context, ti, result), .zero_extend => { - const zero_extend = try unit.instructions.append(context.my_allocator, .{ + const zero_extend = unit.instructions.append_index(.{ .cast = .{ .id = .zero_extend, .value = result, @@ -16405,7 +16505,7 @@ pub const Builder = struct { const taken_expression_node_index = if_node.right; const not_taken_expression_node_index = node.right; assert(if_node.id == .@"if"); - try builder.insertDebugCheckPoint(unit, context, if_node.token); + try builder.insertDebugCheckPoint(unit, if_node.token); const condition = try builder.resolveRuntimeValue(unit, context, Type.Expect{ .type = .bool }, condition_node_index, .right); const result: V = switch (condition.value) { @@ -16456,7 +16556,7 @@ pub const Builder = struct { .bool = false, }, }; - const constant_struct = try unit.constant_structs.append(context.my_allocator, .{ + const constant_struct = unit.constant_structs.append_index(.{ .fields = fields, .type = return_type_index, }); @@ -16483,34 +16583,35 @@ pub const Builder = struct { assert(builder.current_basic_block != builder.return_block); - try builder.jump(unit, context, builder.return_block); + try builder.jump(unit, builder.return_block); } else if (builder.exit_blocks.len > 0) { - builder.return_phi = try unit.instructions.append(context.my_allocator, .{ + builder.return_phi = unit.instructions.append_index(.{ .phi = .{ .type = return_type_index, .values = try context.arena.new(BoundedArray(Instruction.Phi.Value, Instruction.Phi.max_value_count)), }, }); - builder.return_block = try builder.newBasicBlock(unit, context); + builder.return_block = try builder.newBasicBlock(unit); const phi = &unit.instructions.get(builder.return_phi).phi; phi.addIncoming(return_value, builder.current_basic_block); - try builder.jump(unit, context, builder.return_block); + try builder.jump(unit, builder.return_block); } else { try builder.buildRet(unit, context, return_value); } } fn buildUnreachable(builder: *Builder, unit: *Unit, context: *const Context) !void { - const instruction = try unit.instructions.append(context.my_allocator, .@"unreachable"); + _ = context; // autofix + const instruction = unit.instructions.append_index(.@"unreachable"); try builder.appendInstruction(unit, instruction); unit.basic_blocks.get(builder.current_basic_block).terminated = true; } fn buildTrap(builder: *Builder, unit: *Unit, context: *const Context) !void { - const instruction = try unit.instructions.append(context.my_allocator, .trap); + const instruction = unit.instructions.append_index(.trap); try builder.appendInstruction(unit, instruction); try builder.buildUnreachable(unit, context); @@ -16523,14 +16624,14 @@ pub const Builder = struct { const abi_value = switch (function_prototype.abi.return_type_abi.kind) { .direct, .ignore => value, .direct_pair => |pair| b: { - const struct_type_index = try unit.getTwoStruct(context, pair); + const struct_type_index = try unit.getTwoStruct(pair); assert(struct_type_index == function_prototype.abi.return_type); if (struct_type_index == value.type) { unreachable; } else { const stack = try builder.createStackVariable(unit, context, value.type, null); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = value.type, .termination = .none, .mutability = .@"var", @@ -16545,7 +16646,7 @@ pub const Builder = struct { .type = pointer_type, }; - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -16569,7 +16670,7 @@ pub const Builder = struct { const source_is_scalable_vector_type = false; if (source_size >= target_size and !source_is_scalable_vector_type and !target_is_scalable_vector_type) { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -16591,7 +16692,7 @@ pub const Builder = struct { } else { const alignment = @max(target_alignment, source_alignment); const temporal = try builder.createStackVariable(unit, context, struct_type_index, alignment); - const coerced_pointer_type = try unit.getPointerType(context, .{ + const coerced_pointer_type = try unit.getPointerType(.{ .type = struct_type_index, .termination = .none, .mutability = .@"var", @@ -16612,7 +16713,7 @@ pub const Builder = struct { .size = source_size, .is_volatile = false, }); - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = destination, .type = struct_type_index, @@ -16632,7 +16733,7 @@ pub const Builder = struct { }, .indirect => b: { assert(function_definition.return_pointer != .null); - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = .{ .value = .{ @@ -16655,7 +16756,7 @@ pub const Builder = struct { .direct_coerce => |coerced_type_index| if (coerced_type_index == value.type) value else b: { const stack = try builder.createStackVariable(unit, context, value.type, null); - const pointer_type = try unit.getPointerType(context, .{ + const pointer_type = try unit.getPointerType(.{ .type = value.type, .termination = .none, .mutability = .@"var", @@ -16670,7 +16771,7 @@ pub const Builder = struct { .type = pointer_type, }; - const store = try unit.instructions.append(context.my_allocator, .{ + const store = unit.instructions.append_index(.{ .store = .{ .destination = argument_alloca, .source = value, @@ -16689,7 +16790,7 @@ pub const Builder = struct { const source_is_scalable_vector_type = false; if (source_size >= target_size and !source_is_scalable_vector_type and !target_is_scalable_vector_type) { - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = argument_alloca, .type = coerced_type_index, @@ -16706,7 +16807,7 @@ pub const Builder = struct { } else { const alignment = @max(target_alignment, source_alignment); const temporal = try builder.createStackVariable(unit, context, coerced_type_index, alignment); - const coerced_pointer_type = try unit.getPointerType(context, .{ + const coerced_pointer_type = try unit.getPointerType(.{ .type = coerced_type_index, .termination = .none, .mutability = .@"var", @@ -16727,7 +16828,7 @@ pub const Builder = struct { .size = source_size, .is_volatile = false, }); - const load = try unit.instructions.append(context.my_allocator, .{ + const load = unit.instructions.append_index(.{ .load = .{ .value = destination, .type = coerced_type_index, @@ -16746,7 +16847,7 @@ pub const Builder = struct { }, else => |t| @panic(@tagName(t)), }; - const ret = try unit.instructions.append(context.my_allocator, .{ + const ret = unit.instructions.append_index(.{ .ret = abi_value, }); try builder.appendInstruction(unit, ret); @@ -16801,7 +16902,7 @@ pub const Builder = struct { assert(test_functions.kind == .global); const test_functions_global: *Debug.Declaration.Global = @fieldParentPtr("declaration", test_functions); assert(test_functions_global.declaration.mutability == .@"var"); - const array_type = try unit.getArrayType(context, .{ + const array_type = try unit.getArrayType(.{ .type = test_type, .count = unit.test_functions.values().len, .termination = .none, @@ -16818,7 +16919,7 @@ pub const Builder = struct { for (unit.test_functions.keys(), unit.test_functions.values(), 0..) |test_function_name_global, test_function_global, i| { var fields = try context.arena.new_array(V.Comptime, 2); const name = unit.getIdentifier(test_function_name_global.initial_value.string_literal); - const name_slice = try unit.constant_slices.append(context.my_allocator, .{ + const name_slice = unit.constant_slices.append_index(.{ .array = test_function_name_global, .start = 0, .end = name.len, @@ -16830,7 +16931,7 @@ pub const Builder = struct { fields[1] = .{ .global = test_function_global, }; - const constant_struct = try unit.constant_structs.append(context.my_allocator, .{ + const constant_struct = unit.constant_structs.append_index(.{ .fields = fields, .type = test_type, }); @@ -16840,14 +16941,14 @@ pub const Builder = struct { }; } - const constant_array = try unit.constant_arrays.append(context.my_allocator, .{ + const constant_array = unit.constant_arrays.append_index(.{ .type = array_type, .values = list, }); const array_name = "_anon_test_function_array"; const array_name_hash = try unit.processIdentifier(context, array_name); - const test_function_array_global_index = try unit.global_declarations.append(context.my_allocator, .{ + const test_function_array_global = unit.global_declarations.append(.{ .declaration = .{ .scope = test_functions_global.declaration.scope, .type = array_type, @@ -16863,9 +16964,8 @@ pub const Builder = struct { .type_node_index = .null, .attributes = .{}, }); - const test_function_array_global = unit.global_declarations.get(test_function_array_global_index); _ = unit.data_to_emit.append(test_function_array_global); - const constant_slice = try unit.constant_slices.append(context.my_allocator, .{ + const constant_slice = unit.constant_slices.append_index(.{ .array = test_function_array_global, .start = 0, .end = list.len, @@ -16878,7 +16978,7 @@ pub const Builder = struct { } fn build_slice_indexed_access(builder: *Builder, unit: *Unit, context: *const Context, array_like_expression: V, sliceable_type_index: Type.Index, sliceable_pointer_type_index: Type.Index, sliceable_child_type_index: Type.Index, mutability: Mutability, sliceable: Struct.Sliceable, index: V) !V { - const gep = try unit.instructions.append(context.my_allocator, .{ + const gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = array_like_expression.value.runtime, .base_type = sliceable_type_index, @@ -16898,7 +16998,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, gep); - const pointer_to_slice_pointer = try unit.getPointerType(context, .{ + const pointer_to_slice_pointer = try unit.getPointerType(.{ .type = sliceable_pointer_type_index, .mutability = mutability, .termination = .none, @@ -16906,7 +17006,7 @@ pub const Builder = struct { .nullable = false, }); - const pointer_load = try unit.instructions.append(context.my_allocator, .{ + const pointer_load = unit.instructions.append_index(.{ .load = .{ .value = .{ .value = .{ @@ -16919,7 +17019,7 @@ pub const Builder = struct { }); try builder.appendInstruction(unit, pointer_load); - const slice_pointer_gep = try unit.instructions.append(context.my_allocator, .{ + const slice_pointer_gep = unit.instructions.append_index(.{ .get_element_pointer = .{ .pointer = pointer_load, .base_type = sliceable_child_type_index, @@ -16934,7 +17034,7 @@ pub const Builder = struct { .value = .{ .runtime = slice_pointer_gep, }, - .type = try unit.getPointerType(context, .{ + .type = try unit.getPointerType(.{ .type = sliceable_child_type_index, .mutability = mutability, .many = false, @@ -16954,39 +17054,35 @@ pub const Enum = struct { name: u32, parent: Type.Index, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(@This()).Index; }; - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(@This()).Index; }; pub const Unit = struct { node_buffer: PinnedArray(Node), token_buffer: Token.Buffer, node_lists: PinnedArray([]const Node.Index), - files: Debug.File.List = .{}, - types: Type.List = .{}, - structs: Struct.List = .{}, - // unions: Type.Union.List = .{}, - struct_fields: Struct.Field.List = .{}, - // enums: Enum.List = .{}, - enum_fields: Enum.Field.List = .{}, - function_definitions: Function.Definition.List = .{}, - blocks: Debug.Block.List = .{}, - global_declarations: Debug.Declaration.Global.List = .{}, - local_declarations: Debug.Declaration.Local.List = .{}, - argument_declarations: Debug.Declaration.Argument.List = .{}, - assembly_instructions: InlineAssembly.Instruction.List = .{}, - function_prototypes: Function.Prototype.List = .{}, - inline_assembly: InlineAssembly.List = .{}, - instructions: Instruction.List = .{}, - basic_blocks: BasicBlock.List = .{}, - constant_structs: V.Comptime.ConstantStruct.List = .{}, - constant_arrays: V.Comptime.ConstantArray.List = .{}, - constant_slices: V.Comptime.ConstantSlice.List = .{}, - error_fields: Type.Error.Field.List = .{}, + files: PinnedArray(Debug.File), + types: Type.List, + structs: PinnedArray(Struct), + struct_fields: PinnedArray(Struct.Field), + enum_fields: PinnedArray(Enum.Field), + function_definitions: PinnedArray(Function.Definition), + blocks: PinnedArray(Debug.Block), + global_declarations: PinnedArray(Debug.Declaration.Global), + local_declarations: PinnedArray(Debug.Declaration.Local), + argument_declarations: PinnedArray(Debug.Declaration.Argument), + assembly_instructions: PinnedArray(InlineAssembly.Instruction), + function_prototypes: PinnedArray(Function.Prototype), + inline_assembly: PinnedArray(InlineAssembly), + instructions: PinnedArray(Instruction), + basic_blocks: PinnedArray(BasicBlock), + constant_structs: PinnedArray(V.Comptime.ConstantStruct), + constant_arrays: PinnedArray(V.Comptime.ConstantArray), + constant_slices: PinnedArray(V.Comptime.ConstantSlice), + error_fields: PinnedArray(Type.Error.Field), file_token_offsets: PinnedHashMap(Token.Range, Debug.File.Index), file_map: PinnedHashMap([]const u8, Debug.File.Index), identifiers: PinnedHashMap(u32, []const u8), @@ -17039,7 +17135,7 @@ pub const Unit = struct { fn dumpInstruction(instruction_index: Instruction.Index) !void { try write(.ir, "%"); - try dumpInt(Instruction.unwrap(instruction_index), 10, false); + try dumpInt(@intFromEnum(instruction_index), 10, false); } fn dumpInt(value: u64, base: u8, signed: bool) !void { @@ -17050,7 +17146,7 @@ pub const Unit = struct { fn dumpBasicBlock(basic_block: BasicBlock.Index) !void { try write(.ir, "#"); - try dumpInt(BasicBlock.unwrap(basic_block), 10, false); + try dumpInt(@intFromEnum(basic_block), 10, false); } fn dumpFunctionDefinition(unit: *Unit, function_definition_index: Function.Definition.Index) !void { @@ -17165,15 +17261,15 @@ pub const Unit = struct { }, .jump => |jump| { try write(.ir, "[#"); - try dumpInt(BasicBlock.unwrap(jump.to), 10, false); + try dumpInt(@intFromEnum(jump.to), 10, false); try write(.ir, "]"); }, .branch => |branch| { try dumpInstruction(branch.condition); try write(.ir, ", ["); - try dumpInt(BasicBlock.unwrap(branch.taken), 10, false); + try dumpInt(@intFromEnum(branch.taken), 10, false); try write(.ir, ", "); - try dumpInt(BasicBlock.unwrap(branch.not_taken), 10, false); + try dumpInt(@intFromEnum(branch.not_taken), 10, false); try write(.ir, "]"); }, .phi => |*phi| { @@ -17356,17 +17452,17 @@ pub const Unit = struct { return bytes; } - fn getOptionalType(unit: *Unit, context: *const Context, element_type: Type.Index) !Type.Index { + fn getOptionalType(unit: *Unit, element_type: Type.Index) !Type.Index { if (unit.optionals.get(element_type)) |optional| { return optional; } else { - const optional_struct_index = try unit.structs.append(context.my_allocator, .{ + const optional_struct_index = unit.structs.append_index(.{ .kind = .{ .optional = element_type, }, }); - const optional_type_index = try unit.types.append(context.my_allocator, .{ + const optional_type_index = unit.types.append_index(.{ .@"struct" = optional_struct_index, }); @@ -17376,11 +17472,11 @@ pub const Unit = struct { } } - fn getPointerType(unit: *Unit, context: *const Context, pointer: Type.Pointer) !Type.Index { + fn getPointerType(unit: *Unit, pointer: Type.Pointer) !Type.Index { if (unit.pointers.get(pointer)) |existing_type_index| { return existing_type_index; } else { - const type_index = try unit.types.append(context.my_allocator, .{ + const type_index = unit.types.append_index(.{ .pointer = pointer, }); try unit.pointers.put_no_clobber(pointer, type_index); @@ -17389,11 +17485,11 @@ pub const Unit = struct { } } - fn getSliceType(unit: *Unit, context: *const Context, slice: Type.Slice) !Type.Index { + fn getSliceType(unit: *Unit, slice: Type.Slice) !Type.Index { if (unit.slices.get(slice)) |existing_type_index| { return existing_type_index; } else { - const type_index = try unit.types.append(context.my_allocator, .{ + const type_index = unit.types.append_index(.{ .slice = slice, }); try unit.slices.put_no_clobber(slice, type_index); @@ -17402,12 +17498,12 @@ pub const Unit = struct { } } - fn getArrayType(unit: *Unit, context: *const Context, array: Type.Array) !Type.Index { + fn getArrayType(unit: *Unit, array: Type.Array) !Type.Index { if (unit.arrays.get(array)) |array_type| { return array_type; } else { assert(array.count != 0); - const array_type = try unit.types.append(context.my_allocator, .{ + const array_type = unit.types.append_index(.{ .array = array, }); try unit.arrays.put_no_clobber(array, array_type); @@ -17416,7 +17512,7 @@ pub const Unit = struct { } } - pub fn getIntegerType(unit: *Unit, context: *const Context, integer: Type.Integer) !Type.Index { + pub fn getIntegerType(unit: *Unit, integer: Type.Integer) !Type.Index { // if (integer.bit_count > 64) unreachable; const existing_type_index: Type.Index = switch (integer.bit_count) { 8 => switch (integer.signedness) { @@ -17439,7 +17535,7 @@ pub const Unit = struct { if (unit.integers.get(integer)) |type_index| { return type_index; } else { - const type_index = try unit.types.append(context.my_allocator, .{ + const type_index = unit.types.append_index(.{ .integer = integer, }); try unit.integers.put_no_clobber(integer, type_index); @@ -17516,7 +17612,7 @@ pub const Unit = struct { inline for (@typeInfo(Type.Common).Enum.fields) |enum_field| { const e = @field(Type.Common, enum_field.name); const type_value = Type.Common.map.get(e); - _ = try unit.types.append(context.my_allocator, type_value); + _ = unit.types.append(type_value); } try builder.analyzePackage(unit, context, unit.root_package.dependencies.get("std").?); @@ -17531,7 +17627,7 @@ pub const Unit = struct { for (unit.code_to_emit.values()) |function_declaration| { const function_definition_index = function_declaration.initial_value.function_definition; try write(.ir, "\nFunction #"); - try dumpInt(Function.Definition.unwrap(function_definition_index), 16, false); + try dumpInt(@intFromEnum(function_definition_index), 16, false); try write(.ir, ": "); const function_name = unit.getIdentifier(function_declaration.declaration.name); try write(.ir, function_name); @@ -17585,7 +17681,7 @@ pub const Unit = struct { fn importPackage(unit: *Unit, context: *const Context, package: *Package) !ImportPackageResult { const full_path = try package.directory.handle.realpathAlloc(context.allocator, package.source_path); //try std.fs.path.resolve(context.allocator, &.{ package.directory.path, package.source_path }); // logln(.compilation, .import, "Import full path: {s}\n", .{full_path}); - const import_file = try unit.getFile(context, full_path, package.source_path, package); + const import_file = try unit.getFile(full_path, package.source_path, package); return .{ .file = import_file, @@ -17623,7 +17719,7 @@ pub const Unit = struct { const full_path = try joinPath(context, current_file.package.directory.path, import_file_relative_path); const file_relative_path = import_file_relative_path; const package = current_file.package; - const import_file = try unit.getFile(context, full_path, file_relative_path, package); + const import_file = try unit.getFile(full_path, file_relative_path, package); _ = @intFromPtr(unit.files.get(import_file.index).package); const result = ImportPackageResult{ @@ -17634,14 +17730,14 @@ pub const Unit = struct { return result; } - fn getFile(unit: *Unit, context: *const Context, full_path: []const u8, relative_path: []const u8, package: *Package) !ImportFileResult { + fn getFile(unit: *Unit, full_path: []const u8, relative_path: []const u8, package: *Package) !ImportFileResult { if (unit.file_map.get(full_path)) |file_index| { return .{ .index = file_index, .is_new = false, }; } else { - const file_index = try unit.files.append(context.my_allocator, Debug.File{ + const file_index = unit.files.append_index(Debug.File{ .relative_path = relative_path, .package = package, .scope = .{ @@ -17820,14 +17916,14 @@ pub const Unit = struct { } } - fn getTwoStruct(unit: *Unit, context: *const Context, types: [2]Type.Index) !Type.Index { + fn getTwoStruct(unit: *Unit, types: [2]Type.Index) !Type.Index { if (unit.two_structs.get(types)) |result| return result else { - const two_struct = try unit.structs.append(context.my_allocator, .{ + const two_struct = unit.structs.append_index(.{ .kind = .{ .two_struct = types, }, }); - const type_index = try unit.types.append(context.my_allocator, .{ + const type_index = unit.types.append_index(.{ .@"struct" = two_struct, }); @@ -18095,15 +18191,13 @@ pub const Token = struct { pub const InlineAssembly = struct { instructions: []const InlineAssembly.Instruction.Index, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace List.Index; + pub const Index = PinnedArray(@This()).Index; pub const Instruction = struct { id: u32, operands: []const Operand, - pub const List = BlockList(@This(), enum {}); - pub usingnamespace @This().List.Index; + pub const Index = PinnedArray(@This()).Index; }; pub const Operand = union(enum) { diff --git a/bootstrap/backend/llvm.zig b/bootstrap/backend/llvm.zig index 220eb91..0ab6b36 100644 --- a/bootstrap/backend/llvm.zig +++ b/bootstrap/backend/llvm.zig @@ -1649,7 +1649,7 @@ pub const LLVM = struct { const type_declaration = unit.type_declarations.get(sema_type_index).?; const file = try llvm.getDebugInfoFile(unit, context, type_declaration.declaration.scope.file); const bit_size = integer.bit_count; - const sema_backing_type = try unit.getIntegerType(context, .{ + const sema_backing_type = try unit.getIntegerType(.{ .kind = .materialized_int, .bit_count = integer.bit_count, .signedness = integer.signedness, @@ -1678,7 +1678,7 @@ pub const LLVM = struct { const type_declaration = unit.type_declarations.get(sema_type_index).?; const file = try llvm.getDebugInfoFile(unit, context, type_declaration.declaration.scope.file); const bit_size = integer.bit_count; - const sema_backing_type = try unit.getIntegerType(context, .{ + const sema_backing_type = try unit.getIntegerType(.{ .kind = .materialized_int, .bit_count = integer.bit_count, .signedness = integer.signedness, diff --git a/bootstrap/library.zig b/bootstrap/library.zig index 0516ed2..d414e25 100644 --- a/bootstrap/library.zig +++ b/bootstrap/library.zig @@ -126,12 +126,17 @@ const pinned_array_default_granularity = pinned_array_page_size; // This must be used with big arrays, which are not resizeable (can't be cleared) pub fn PinnedArray(comptime T: type) type { + return PinnedArrayAdvanced(T, null); +} + +// This must be used with big arrays, which are not resizeable (can't be cleared) +pub fn PinnedArrayAdvanced(comptime T: type, comptime MaybeIndex: ?type) type { return struct { pointer: [*]T = @constCast((&[_]T{}).ptr), length: u32 = 0, granularity: u32 = 0, - pub const Index = enum(u32) { + pub const Index = if (MaybeIndex) |I| getIndexForType(T, I) else enum(u32) { null = 0xffff_ffff, _, }; @@ -197,6 +202,10 @@ pub fn PinnedArray(comptime T: type) type { return array.append_with_capacity(item); } + pub fn append_index(array: *Array, item: T) Index { + return array.get_index(array.append(item)); + } + pub fn append_slice(array: *Array, items: []const T) void { array.ensure_capacity(@intCast(items.len)); array.append_slice_with_capacity(items); @@ -251,98 +260,16 @@ pub fn commit(bytes: [*]u8, size: u64) !void { }; } -pub fn BlockList(comptime T: type, comptime E: type) type { - const item_count = 64; - - return struct { - blocks: UnpinnedArray(*Block) = .{}, - len: usize = 0, - - const Block = BoundedArray(T, item_count); - const List = @This(); - - pub const Index = getIndexForType(T, E); - pub const ElementIndex = Index.Index; - - pub fn wrapSplit(block: usize, element: usize) ElementIndex { - return @enumFromInt(block * item_count + element); - } - - pub fn get(list: *List, index: ElementIndex) *T { - assert(index != .null); - const i: u32 = @intFromEnum(index); - const block_index = i / item_count; - const element_index = i % item_count; - assert(block_index < list.blocks.length); - const block = list.blocks.pointer[block_index]; - const block_slice = block.buffer[0..block.len]; - const element = &block_slice[element_index]; - return element; - } - - pub fn append(list: *List, allocator: *MyAllocator, element: T) !ElementIndex { - const result = try list.addOne(allocator); - list.get(result).* = element; - return result; - } - - pub fn addOne(list: *List, allocator: *MyAllocator) !ElementIndex { - const block_index = try list.getFreeBlock(allocator); - assert(block_index < list.blocks.length); - const block = list.blocks.pointer[block_index]; - const index = block.len; - _ = try block.addOne(); - list.len += 1; - return @enumFromInt(block_index * item_count + index); - } - - fn getFreeBlock(list: *List, allocator: *MyAllocator) !usize { - for (list.blocks.slice(), 0..) |block, i| { - block.ensureUnusedCapacity(1) catch continue; - return i; - } else { - const new_block = try allocator.allocate_one(Block); - new_block.* = .{}; - const block_index = list.blocks.length; - try list.blocks.append(allocator, new_block); - return block_index; - } - } - - pub fn indexOf(list: *List, elem: *const T) ElementIndex { - const address = @intFromPtr(elem); - for (list.blocks.items, 0..) |block, block_index| { - const base = @intFromPtr(&block.buffer[0]); - const top = base + @sizeOf(T) * item_count; - if (address >= base and address < top) { - const result: u32 = @intCast(block_index * item_count + @divExact(address - base, @sizeOf(T))); - return Index.wrap(result); - } - } - - @panic("not found"); - } - }; -} - pub fn getIndexForType(comptime T: type, comptime E: type) type { assert(@typeInfo(E) == .Enum); _ = T; - const MAX = std.math.maxInt(IndexType); + const MAX = std.math.maxInt(u32); const EnumField = std.builtin.Type.EnumField; comptime var fields: []const EnumField = &.{}; // comptime var enum_value: comptime_int = 0; fields = fields ++ @typeInfo(E).Enum.fields; - // for (names) |name| { - // fields = fields ++ [1]EnumField{.{ - // .name = name, - // .value = enum_value, - // }}; - // enum_value += 1; - // } - fields = fields ++ [1]EnumField{.{ .name = "null", .value = MAX, @@ -350,47 +277,17 @@ pub fn getIndexForType(comptime T: type, comptime E: type) type { const Result = @Type(.{ .Enum = .{ - .tag_type = IndexType, + .tag_type = u32, .fields = fields, .decls = &.{}, .is_exhaustive = false, }, }); - return struct { - pub const Index = Result; - - pub fn unwrap(this: Index) IndexType { - assert(this != .null); - return @intFromEnum(this); - } - - pub fn wrap(value: IndexType) Index { - assert(value < MAX); - return @enumFromInt(value); - } - - pub fn addInt(this: Index, value: IndexType) Index { - const this_int = @intFromEnum(this); - return @enumFromInt(this_int + value); - } - - pub fn subInt(this: Index, value: IndexType) IndexType { - const this_int = @intFromEnum(this); - return this_int - value; - } - - pub fn add(a: Index, b: Index) Index { - return @enumFromInt(@intFromEnum(a) + @intFromEnum(b)); - } - - pub fn sub(a: Index, b: Index) IndexType { - return @intFromEnum(a) - @intFromEnum(b); - } - }; + return Result; } -pub fn my_hash(bytes: []const u8) IndexType { +pub fn my_hash(bytes: []const u8) u32 { const fnv_offset = 14695981039346656037; const fnv_prime = 1099511628211; var result: u64 = fnv_offset; @@ -452,12 +349,6 @@ pub fn byte_equal_terminated(a: [*:0]const u8, b: [*:0]const u8) bool { return byte_equal(a_slice, b_slice); } -const MapResult = struct { - key_pointer: *anyopaque, - value_pointer: *anyopaque, - capacity: IndexType, -}; - const pinned_hash_map_page_size = 2 * 1024 * 1024; const pinned_hash_map_max_size = std.math.maxInt(u32) - pinned_hash_map_page_size; const pinned_hash_map_default_granularity = pinned_hash_map_page_size; @@ -532,7 +423,7 @@ pub fn PinnedHashMap(comptime K: type, comptime V: type) type { map.put_at_with_capacity(len, key, value); } - fn put_at_with_capacity(map: *@This(), index: IndexType, key: K, value: V) void { + fn put_at_with_capacity(map: *@This(), index: u32, key: K, value: V) void { map.length += 1; assert(index < map.length); map.key_pointer[index] = key; @@ -703,105 +594,6 @@ pub const PageAllocator = struct { } }; -pub const IndexType = if (@sizeOf(usize) >= 8) u32 else usize; - -const ArrayCapacity = struct { - pointer: *anyopaque, - capacity: IndexType, -}; - -fn ensure_capacity_array(allocator: *MyAllocator, current_capacity: IndexType, desired_capacity: IndexType, pointer: [*]u8, length: IndexType, element_size: IndexType, element_alignment: u16) !ArrayCapacity { - var new_capacity = @max(current_capacity, initial_item_count); - while (new_capacity < desired_capacity) { - new_capacity *= factor; - } - if (new_capacity > current_capacity) { - const old_byte_slice = pointer[0 .. length * element_size]; - const new_byte_capacity = new_capacity * element_size; - const new_slice = try allocator.reallocate(old_byte_slice, new_byte_capacity, element_alignment); - return .{ - .pointer = new_slice.ptr, - .capacity = new_capacity, - }; - } else { - return .{ - .pointer = pointer, - .capacity = current_capacity, - }; - } -} - -const initial_item_count = 16; -const factor = 2; - -pub fn UnpinnedArray(comptime T: type) type { - return struct { - pointer: [*]T = undefined, - length: IndexType = 0, - capacity: IndexType = 0, - - pub fn initialize_with_capacity(allocator: *MyAllocator, item_count: IndexType) !@This() { - var array = @This(){}; - try array.ensure_capacity(allocator, item_count); - return array; - } - - pub fn ensure_capacity(array: *@This(), allocator: *MyAllocator, desired_capacity: IndexType) !void { - const result = try ensure_capacity_array(allocator, array.capacity, desired_capacity, @ptrCast(array.pointer), array.length, @sizeOf(T), @alignOf(T)); - array.pointer = @ptrCast(@alignCast(result.pointer)); - array.capacity = result.capacity; - } - - pub fn append(array: *@This(), allocator: *MyAllocator, item: T) !void { - try array.ensure_capacity(allocator, array.length + 1); - array.append_with_capacity(item); - } - - pub fn append_slice(array: *@This(), allocator: *MyAllocator, items: []const T) !void { - try array.ensure_capacity(allocator, @intCast(array.length + items.len)); - @memcpy(array.pointer[array.length..][0..items.len], items); - array.length += @intCast(items.len); - } - - pub fn append_with_capacity(array: *@This(), item: T) void { - assert(array.length < array.capacity); - array.pointer[array.length] = item; - array.length += 1; - } - - pub fn slice(array: *@This()) []T { - return array.pointer[0..array.length]; - } - - pub fn insert(array: *@This(), allocator: *MyAllocator, index: IndexType, item: T) !void { - assert(index < array.length); - if (array.length + 1 <= array.capacity) { - const after_count = array.length - index; - copy_backwards(T, array.pointer[index + 1 ..][0..after_count], array.pointer[index..][0..after_count]); - } else { - const new_capacity = array.capacity * 2; - const new_slice = try allocator.allocate(new_capacity * @sizeOf(T), @alignOf(T)); - const new_typed_slice: []T = @as([*]T, @ptrCast(@alignCast(new_slice.ptr)))[0..new_capacity]; - @memcpy(new_typed_slice[0..index], array.pointer[0..index]); - const after_count = array.length - index; - @memcpy(new_typed_slice[index + 1 ..][0..after_count], array.pointer[index..][0..after_count]); - try allocator.free(@as([*]u8, @ptrCast(@alignCast(array.slice().ptr)))[0 .. array.capacity * @sizeOf(T)]); - array.pointer = new_typed_slice.ptr; - array.capacity = new_capacity; - } - - array.pointer[index] = item; - array.length += 1; - } - - pub fn pop(array: *@This()) T { - assert(array.length > 0); - array.length -= 1; - return array.pointer[array.length]; - } - }; -} - fn copy_backwards(comptime T: type, destination: []T, source: []const T) void { @setRuntimeSafety(false); assert(destination.len >= source.len); diff --git a/bootstrap/linker/lld.zig b/bootstrap/linker/lld.zig index 2b8bf95..3bcfd9b 100644 --- a/bootstrap/linker/lld.zig +++ b/bootstrap/linker/lld.zig @@ -34,7 +34,7 @@ pub fn link(context: *const Compilation.Context, options: linker.Options) !void switch (@import("builtin").os.tag) { .macos => { _ = argv.append("-dynamic"); - argv.append_slice(context.my_allocator, &.{ "-platform_version", "macos", "13.4.1", "13.3" }); + argv.append_slice(&.{ "-platform_version", "macos", "13.4.1", "13.3" }); _ = argv.append("-arch"); _ = argv.append(switch (@import("builtin").cpu.arch) { .aarch64 => "arm64", From b281ff9a881761bb2d8f857a0f1a6e17d1f691ab Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Fri, 26 Apr 2024 08:14:01 -0600 Subject: [PATCH 12/14] Fix segfault in ReleaseFast --- bootstrap/backend/llvm.zig | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bootstrap/backend/llvm.zig b/bootstrap/backend/llvm.zig index 0ab6b36..284b368 100644 --- a/bootstrap/backend/llvm.zig +++ b/bootstrap/backend/llvm.zig @@ -3255,7 +3255,9 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo } } - llvm.debug_info_builder.finalize(); + if (unit.descriptor.generate_debug_information) { + llvm.debug_info_builder.finalize(); + } var module_len: usize = 0; const module_ptr = llvm.module.toString(&module_len); From 8b8d1b720eae79a0b1d41d33710b05ff17405497 Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Fri, 26 Apr 2024 08:18:17 -0600 Subject: [PATCH 13/14] Remove unit dead code --- bootstrap/Compilation.zig | 154 +++++++------------------------------- 1 file changed, 26 insertions(+), 128 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index 1ded94e..b2422a4 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -91,85 +91,34 @@ pub fn createContext(allocator: Allocator, my_allocator: *MyAllocator) !*const C pub fn compileBuildExecutable(context: *const Context, arguments: []const []const u8) !void { _ = arguments; // autofix - const unit = try context.my_allocator.allocate_one(Unit); - unit.* = .{ - .descriptor = .{ - .main_package_path = "build.nat", - .arch = switch (@import("builtin").cpu.arch) { - .x86_64 => .x86_64, - .aarch64 => .aarch64, - else => |t| @panic(@tagName(t)), - }, - .os = switch (@import("builtin").os.tag) { - .linux => .linux, - .macos => .macos, - else => |t| @panic(@tagName(t)), - }, - .abi = switch (@import("builtin").abi) { - .none => .none, - .gnu => .gnu, - .musl => .musl, - else => |t| @panic(@tagName(t)), - }, - .only_parse = false, - .executable_path = "nat/build", - .object_path = "nat/build.o", - .link_libc = @import("builtin").os.tag == .macos, - .link_libcpp = false, - .optimization = .none, - .generate_debug_information = true, - .name = "build", - .is_test = false, - .c_source_files = &.{}, + const unit = try createUnit(context, .{ + .main_package_path = "build.nat", + .object_path = "nat/build.o", + .executable_path = "nat/build", + .only_parse = false, + .arch = switch (@import("builtin").cpu.arch) { + .x86_64 => .x86_64, + .aarch64 => .aarch64, + else => @compileError("Architecture not supported"), }, - .token_buffer = Token.Buffer{ - .tokens = try PinnedArray(Token).init_with_default_granularity(), - .line_offsets = try PinnedArray(u32).init_with_default_granularity(), + .os = switch (@import("builtin").os.tag) { + .linux => .linux, + .macos => .macos, + else => |t| @panic(@tagName(t)), }, - // pinned hashmaps - .file_token_offsets = try PinnedHashMap(Token.Range, Debug.File.Index).init(std.mem.page_size), - .file_map = try PinnedHashMap([]const u8, Debug.File.Index).init(std.mem.page_size), - .identifiers = try PinnedHashMap(u32, []const u8).init(std.mem.page_size), - .string_literal_values = try PinnedHashMap(u32, [:0]const u8).init(std.mem.page_size), - .string_literal_globals = try PinnedHashMap(u32, *Debug.Declaration.Global).init(std.mem.page_size), - .optionals = try PinnedHashMap(Type.Index, Type.Index).init(std.mem.page_size), - .pointers = try PinnedHashMap(Type.Pointer, Type.Index).init(std.mem.page_size), - .slices = try PinnedHashMap(Type.Slice, Type.Index).init(std.mem.page_size), - .arrays = try PinnedHashMap(Type.Array, Type.Index).init(std.mem.page_size), - .integers = try PinnedHashMap(Type.Integer, Type.Index).init(std.mem.page_size), - .error_unions = try PinnedHashMap(Type.Error.Union.Descriptor, Type.Index).init(std.mem.page_size), - .two_structs = try PinnedHashMap([2]Type.Index, Type.Index).init(std.mem.page_size), - .fields_array = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), - .name_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), - .external_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), - .type_declarations = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), - .test_functions = try PinnedHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global).init(std.mem.page_size), - .code_to_emit = try PinnedHashMap(Function.Definition.Index, *Debug.Declaration.Global).init(std.mem.page_size), - // special pinned arrays - .types = try Type.List.init_with_default_granularity(), - // pinned arrays - .node_buffer = try PinnedArray(Node).init_with_default_granularity(), - .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), - .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), - .files = try PinnedArray(Debug.File).init_with_default_granularity(), - .structs = try PinnedArray(Struct).init_with_default_granularity(), - .struct_fields = try PinnedArray(Struct.Field).init_with_default_granularity(), - .enum_fields = try PinnedArray(Enum.Field).init_with_default_granularity(), - .function_definitions = try PinnedArray(Function.Definition).init_with_default_granularity(), - .blocks = try PinnedArray(Debug.Block).init_with_default_granularity(), - .global_declarations = try PinnedArray(Debug.Declaration.Global).init_with_default_granularity(), - .local_declarations = try PinnedArray(Debug.Declaration.Local).init_with_default_granularity(), - .argument_declarations = try PinnedArray(Debug.Declaration.Argument).init_with_default_granularity(), - .assembly_instructions = try PinnedArray(InlineAssembly.Instruction).init_with_default_granularity(), - .function_prototypes = try PinnedArray(Function.Prototype).init_with_default_granularity(), - .inline_assembly = try PinnedArray(InlineAssembly).init_with_default_granularity(), - .instructions = try PinnedArray(Instruction).init_with_default_granularity(), - .basic_blocks = try PinnedArray(BasicBlock).init_with_default_granularity(), - .constant_structs = try PinnedArray(V.Comptime.ConstantStruct).init_with_default_granularity(), - .constant_arrays = try PinnedArray(V.Comptime.ConstantArray).init_with_default_granularity(), - .constant_slices = try PinnedArray(V.Comptime.ConstantSlice).init_with_default_granularity(), - .error_fields = try PinnedArray(Type.Error.Field).init_with_default_granularity(), - }; + .abi = switch (@import("builtin").abi) { + .none => .none, + .gnu => .gnu, + .musl => .musl, + else => |t| @panic(@tagName(t)), + }, + .optimization = .none, + .link_libc = false, + .generate_debug_information = true, + .name = "build", + .is_test = false, + .c_source_files = &.{}, + }); try unit.compile(context); const argv: []const []const u8 = &.{ "nat/build", "-compiler_path", context.executable_absolute_path }; @@ -3034,57 +2983,6 @@ pub fn buildExecutable(context: *const Context, arguments: []const []const u8, o break :blk slice; }; - // const unit = try context.allocator.create(Unit); - // unit.* = .{ - // .descriptor = .{ - // .main_package_path = main_package_path, - // .executable_path = executable_path, - // .object_path = object_file_path, - // .only_parse = only_parse, - // .arch = arch, - // .os = os, - // .abi = abi, - // .optimization = optimization, - // .link_libc = switch (os) { - // .linux => link_libc, - // .macos => true, - // .windows => link_libc, - // // .windows => link_libc, - // // else => unreachable, - // }, - // .link_libcpp = false, - // .generate_debug_information = generate_debug_information, - // .name = executable_name, - // .is_test = options.is_test, - // .c_source_files = c_source_files.slice(), - // }, - // .token_buffer = Token.Buffer{ - // .tokens = try PinnedArray(Token).init_with_default_granularity(), - // .line_offsets = try PinnedArray(u32).init_with_default_granularity(), - // }, - // .node_buffer = try PinnedArray(Node).init_with_default_granularity(), - // .node_lists = try PinnedArray([]const Node.Index).init_with_default_granularity(), - // .data_to_emit = try PinnedArray(*Debug.Declaration.Global).init_with_default_granularity(), - // .file_token_offsets = try PinnedHashMap(Token.Range, Debug.File.Index).init(std.mem.page_size), - // .file_map = try PinnedHashMap([]const u8, Debug.File.Index).init(std.mem.page_size), - // .identifiers = try PinnedHashMap(u32, []const u8).init(std.mem.page_size), - // .string_literal_values = try PinnedHashMap(u32, [:0]const u8).init(std.mem.page_size), - // .string_literal_globals = try PinnedHashMap(u32, *Debug.Declaration.Global).init(std.mem.page_size), - // .optionals = try PinnedHashMap(Type.Index, Type.Index).init(std.mem.page_size), - // .pointers = try PinnedHashMap(Type.Pointer, Type.Index).init(std.mem.page_size), - // .slices = try PinnedHashMap(Type.Slice, Type.Index).init(std.mem.page_size), - // .arrays = try PinnedHashMap(Type.Array, Type.Index).init(std.mem.page_size), - // .integers = try PinnedHashMap(Type.Integer, Type.Index).init(std.mem.page_size), - // .error_unions = try PinnedHashMap(Type.Error.Union.Descriptor, Type.Index).init(std.mem.page_size), - // .two_structs = try PinnedHashMap([2]Type.Index, Type.Index).init(std.mem.page_size), - // .fields_array = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), - // .name_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), - // .external_functions = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), - // .type_declarations = try PinnedHashMap(Type.Index, *Debug.Declaration.Global).init(std.mem.page_size), - // .test_functions = try PinnedHashMap(*Debug.Declaration.Global, *Debug.Declaration.Global).init(std.mem.page_size), - // .code_to_emit = try PinnedHashMap(Function.Definition.Index, *Debug.Declaration.Global).init(std.mem.page_size), - // }; - const unit = try createUnit(context, .{ .main_package_path = main_package_path, .object_path = object_file_path, From ead0dbb55077d895ed479802fd31d03b98048438 Mon Sep 17 00:00:00 2001 From: David Gonzalez Martin Date: Fri, 26 Apr 2024 08:20:52 -0600 Subject: [PATCH 14/14] Fix MacOS build --- bootstrap/Compilation.zig | 2 +- bootstrap/backend/llvm.zig | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index b2422a4..bf1826c 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -575,7 +575,7 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8 target_triple_buffer.appendSliceAssumeCapacity("x86_64-"); }, .aarch64 => { - target_triple_buffer.appendSliceAssumeCapacityppend_slice("aarch64-"); + target_triple_buffer.appendSliceAssumeCapacity("aarch64-"); }, else => @compileError("Architecture not supported"), } diff --git a/bootstrap/backend/llvm.zig b/bootstrap/backend/llvm.zig index 284b368..4272289 100644 --- a/bootstrap/backend/llvm.zig +++ b/bootstrap/backend/llvm.zig @@ -2526,7 +2526,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo global_variable.setInitializer(constant_initializer); } - var phis = try PinnedHashMap(Compilation.Instruction.Index, *LLVM.Value.Instruction.PhiNode).init(0x1000); + var phis = try PinnedHashMap(Compilation.Instruction.Index, *LLVM.Value.Instruction.PhiNode).init(std.mem.page_size); for (llvm.function_definition_map.keys(), llvm.function_definition_map.values()) |function_declaration, function| { phis.clear();