Get rid of UnpinnedArray in the parser

This commit is contained in:
David Gonzalez Martin 2024-04-24 19:36:38 -06:00
parent cabe273670
commit 7bd4095cd7
3 changed files with 95 additions and 83 deletions

View File

@ -8671,10 +8671,14 @@ pub const Builder = struct {
break :blk result; break :blk result;
}; };
var declaration_nodes = try UnpinnedArray(Node.Index).initialize_with_capacity(context.my_allocator, count.declarations); var declaration_nodes = try context.arena.new_array(Node.Index, count.declarations);
var field_nodes = try UnpinnedArray(Node.Index).initialize_with_capacity(context.my_allocator, count.fields); var field_nodes = try context.arena.new_array(Node.Index, count.fields);
var comptime_block_nodes = try UnpinnedArray(Node.Index).initialize_with_capacity(context.my_allocator, count.comptime_blocks); var comptime_block_nodes = try context.arena.new_array(Node.Index, count.comptime_blocks);
var test_declarations = try UnpinnedArray(Node.Index).initialize_with_capacity(context.my_allocator, count.test_declarations); var test_declarations = try context.arena.new_array(Node.Index, count.test_declarations);
declaration_nodes.len = 0;
field_nodes.len = 0;
comptime_block_nodes.len = 0;
test_declarations.len = 0;
for (container_nodes) |member_index| { for (container_nodes) |member_index| {
const member_node = unit.getNode(member_index); const member_node = unit.getNode(member_index);
@ -8685,12 +8689,14 @@ pub const Builder = struct {
.field => &field_nodes, .field => &field_nodes,
.test_declaration => &test_declarations, .test_declaration => &test_declarations,
}; };
array_list.append_with_capacity(member_index); const index = array_list.len;
array_list.len += 1;
array_list.*[index] = member_index;
} }
var export_declarations = UnpinnedArray(*Debug.Declaration.Global){}; var export_declarations = UnpinnedArray(*Debug.Declaration.Global){};
if (count.declarations > 0) { if (count.declarations > 0) {
for (declaration_nodes.slice()) |declaration_node_index| { for (declaration_nodes) |declaration_node_index| {
const declaration_node = unit.getNode(declaration_node_index); const declaration_node = unit.getNode(declaration_node_index);
switch (declaration_node.id) { switch (declaration_node.id) {
@ -8786,7 +8792,7 @@ pub const Builder = struct {
if (count.fields > 0) { if (count.fields > 0) {
const ty = unit.types.get(data.plain); const ty = unit.types.get(data.plain);
const field_count = field_nodes.length; const field_count: u32 = @intCast(field_nodes.len);
switch (container_type) { switch (container_type) {
.@"enum" => { .@"enum" => {
const integer_type = &ty.integer; const integer_type = &ty.integer;
@ -8794,7 +8800,7 @@ pub const Builder = struct {
try enum_type.fields.ensure_capacity(context.my_allocator, field_count); try enum_type.fields.ensure_capacity(context.my_allocator, field_count);
if (integer_type.bit_count == 0) { if (integer_type.bit_count == 0) {
integer_type.bit_count = @bitSizeOf(@TypeOf(field_nodes.length)) - @clz(field_nodes.length); integer_type.bit_count = @bitSizeOf(@TypeOf(field_nodes.len)) - @clz(field_nodes.len);
} }
assert(integer_type.bit_count > 0); assert(integer_type.bit_count > 0);
}, },
@ -8811,7 +8817,7 @@ pub const Builder = struct {
var sliceable_length_index: ?u32 = null; var sliceable_length_index: ?u32 = null;
var ignore_field_count: u8 = 0; var ignore_field_count: u8 = 0;
for (field_nodes.slice(), 0..) |field_node_index, index| { for (field_nodes, 0..) |field_node_index, index| {
const field_node = unit.getNode(field_node_index); const field_node = unit.getNode(field_node_index);
const identifier = switch (unit.token_buffer.tokens.get(field_node.token).id) { const identifier = switch (unit.token_buffer.tokens.get(field_node.token).id) {
.identifier => unit.getExpectedTokenBytes(field_node.token, .identifier), .identifier => unit.getExpectedTokenBytes(field_node.token, .identifier),
@ -8920,7 +8926,7 @@ pub const Builder = struct {
builder.emit_ir = false; builder.emit_ir = false;
defer builder.emit_ir = emit_ir; defer builder.emit_ir = emit_ir;
for (comptime_block_nodes.slice()) |comptime_node_index| { for (comptime_block_nodes) |comptime_node_index| {
const comptime_node = unit.getNode(comptime_node_index); const comptime_node = unit.getNode(comptime_node_index);
assert(comptime_node.id == .@"comptime"); assert(comptime_node.id == .@"comptime");
@ -8965,7 +8971,7 @@ pub const Builder = struct {
break :b function_type; break :b function_type;
}; };
for (test_declarations.slice()) |test_declaration_node_index| { for (test_declarations) |test_declaration_node_index| {
const test_node = unit.getNode(test_declaration_node_index); const test_node = unit.getNode(test_declaration_node_index);
assert(test_node.id == .test_declaration); assert(test_node.id == .test_declaration);

View File

@ -6,10 +6,10 @@ const library = @import("../library.zig");
const Arena = library.Arena; const Arena = library.Arena;
const byte_equal = library.byte_equal; const byte_equal = library.byte_equal;
const BlockList = library.BlockList; const BlockList = library.BlockList;
const BoundedArray = library.BoundedArray;
const enumFromString = library.enumFromString; const enumFromString = library.enumFromString;
const PinnedArray = library.PinnedArray; const PinnedArray = library.PinnedArray;
const MyAllocator = library.MyAllocator; const MyAllocator = library.MyAllocator;
const UnpinnedArray = library.UnpinnedArray;
const lexer = @import("lexer.zig"); const lexer = @import("lexer.zig");
@ -66,6 +66,9 @@ pub const Node = struct {
token: Token.Index, token: Token.Index,
id: Id, id: Id,
// TODO: this is dangerous
pub const StackList = BoundedArray(Node.Index, 512);
pub const Index = PinnedArray(Node).Index; pub const Index = PinnedArray(Node).Index;
pub const Range = struct { pub const Range = struct {
@ -337,7 +340,8 @@ const Analyzer = struct {
const attribute_node_index: Node.Index = if (analyzer.peekToken() == .operator_colon) b: { const attribute_node_index: Node.Index = if (analyzer.peekToken() == .operator_colon) b: {
analyzer.consumeToken(); analyzer.consumeToken();
var list = UnpinnedArray(Node.Index){}; var stack_list = Node.StackList{};
while (analyzer.peekToken() != .operator_assign) { while (analyzer.peekToken() != .operator_assign) {
const identifier = try analyzer.expectToken(.identifier); const identifier = try analyzer.expectToken(.identifier);
const identifier_name = analyzer.bytes(identifier); const identifier_name = analyzer.bytes(identifier);
@ -359,7 +363,7 @@ const Analyzer = struct {
} }
} else @panic(identifier_name); } else @panic(identifier_name);
try list.append(analyzer.my_allocator, attribute_node); try stack_list.append(attribute_node);
switch (analyzer.peekToken()) { switch (analyzer.peekToken()) {
.operator_assign => {}, .operator_assign => {},
@ -368,7 +372,7 @@ const Analyzer = struct {
} }
} }
break :b try analyzer.nodeList(list.slice()); break :b try analyzer.nodeList(&stack_list);
} else .null; } else .null;
break :blk try analyzer.addNode(.{ break :blk try analyzer.addNode(.{
@ -425,7 +429,7 @@ const Analyzer = struct {
fn functionPrototype(analyzer: *Analyzer) !Node.Index { fn functionPrototype(analyzer: *Analyzer) !Node.Index {
const token = analyzer.token_i; const token = analyzer.token_i;
var attribute_and_return_type_node_list = UnpinnedArray(Node.Index){}; var attribute_and_return_type_node_list = Node.StackList{};
while (analyzer.peekToken() != .operator_left_parenthesis) { while (analyzer.peekToken() != .operator_left_parenthesis) {
const identifier = try analyzer.expectToken(.identifier); const identifier = try analyzer.expectToken(.identifier);
@ -458,7 +462,7 @@ const Analyzer = struct {
} }
} else @panic(identifier_name); } else @panic(identifier_name);
try attribute_and_return_type_node_list.append(analyzer.my_allocator, attribute_node); try attribute_and_return_type_node_list.append(attribute_node);
if (analyzer.peekToken() == .operator_comma) analyzer.consumeToken(); if (analyzer.peekToken() == .operator_comma) analyzer.consumeToken();
} }
@ -467,13 +471,13 @@ const Analyzer = struct {
const arguments = try analyzer.argumentList(.operator_left_parenthesis, .operator_right_parenthesis); const arguments = try analyzer.argumentList(.operator_left_parenthesis, .operator_right_parenthesis);
const return_type = try analyzer.typeExpression(); const return_type = try analyzer.typeExpression();
try attribute_and_return_type_node_list.append(analyzer.my_allocator, return_type); try attribute_and_return_type_node_list.append(return_type);
const function_prototype = try analyzer.addNode(.{ const function_prototype = try analyzer.addNode(.{
.id = .function_prototype, .id = .function_prototype,
.token = token, .token = token,
.left = arguments, .left = arguments,
.right = try analyzer.nodeList(attribute_and_return_type_node_list.slice()), .right = try analyzer.nodeList(&attribute_and_return_type_node_list),
}); });
return function_prototype; return function_prototype;
@ -484,7 +488,7 @@ const Analyzer = struct {
_ = try analyzer.expectToken(start_token); _ = try analyzer.expectToken(start_token);
} }
var list = UnpinnedArray(Node.Index){}; var list = Node.StackList{};
while (analyzer.peekToken() != end_token) { while (analyzer.peekToken() != end_token) {
const identifier_token = analyzer.token_i; const identifier_token = analyzer.token_i;
@ -506,7 +510,7 @@ const Analyzer = struct {
analyzer.consumeToken(); analyzer.consumeToken();
} }
try list.append(analyzer.my_allocator, try analyzer.addNode(.{ try list.append(try analyzer.addNode(.{
.id = id, .id = id,
.token = identifier_token, .token = identifier_token,
.left = type_expression, .left = type_expression,
@ -516,8 +520,8 @@ const Analyzer = struct {
_ = try analyzer.expectToken(end_token); _ = try analyzer.expectToken(end_token);
if (list.length != 0) { if (list.len != 0) {
return try analyzer.nodeList(list.slice()); return try analyzer.nodeList(&list);
} else { } else {
return Node.Index.null; return Node.Index.null;
} }
@ -531,7 +535,7 @@ const Analyzer = struct {
fn block(analyzer: *Analyzer) anyerror!Node.Index { fn block(analyzer: *Analyzer) anyerror!Node.Index {
const left_brace = try analyzer.expectToken(.operator_left_brace); const left_brace = try analyzer.expectToken(.operator_left_brace);
var list = UnpinnedArray(Node.Index){}; var list = Node.StackList{};
while (analyzer.peekToken() != .operator_right_brace) { while (analyzer.peekToken() != .operator_right_brace) {
const first_statement_token = analyzer.peekToken(); const first_statement_token = analyzer.peekToken();
@ -548,7 +552,7 @@ const Analyzer = struct {
=> try analyzer.symbolDeclaration(), => try analyzer.symbolDeclaration(),
}; };
try list.append(analyzer.my_allocator, statement_index); try list.append(statement_index);
} }
_ = try analyzer.expectToken(.operator_right_brace); _ = try analyzer.expectToken(.operator_right_brace);
@ -556,7 +560,7 @@ const Analyzer = struct {
return try analyzer.addNode(.{ return try analyzer.addNode(.{
.id = .block, .id = .block,
.token = left_brace, .token = left_brace,
.left = try analyzer.nodeList(list.slice()), .left = try analyzer.nodeList(&list),
.right = Node.Index.null, .right = Node.Index.null,
}); });
} }
@ -594,7 +598,7 @@ const Analyzer = struct {
// logln(.parser, .@"switch", "Parsed switch expression...", .{}); // logln(.parser, .@"switch", "Parsed switch expression...", .{});
_ = try analyzer.expectToken(.operator_left_brace); _ = try analyzer.expectToken(.operator_left_brace);
var list = UnpinnedArray(Node.Index){}; var list = Node.StackList{};
while (analyzer.peekToken() != .operator_right_brace) { while (analyzer.peekToken() != .operator_right_brace) {
const case_token = analyzer.token_i; const case_token = analyzer.token_i;
@ -605,7 +609,7 @@ const Analyzer = struct {
break :blk Node.Index.null; break :blk Node.Index.null;
}, },
else => blk: { else => blk: {
var array_list = UnpinnedArray(Node.Index){}; var array_list = Node.StackList{};
while (true) { while (true) {
const token = analyzer.token_i; const token = analyzer.token_i;
const left = try analyzer.expression(); const left = try analyzer.expression();
@ -623,7 +627,7 @@ const Analyzer = struct {
else => left, else => left,
}; };
try array_list.append(analyzer.my_allocator, switch_case_node); try array_list.append(switch_case_node);
switch (analyzer.peekToken()) { switch (analyzer.peekToken()) {
.operator_comma => analyzer.consumeToken(), .operator_comma => analyzer.consumeToken(),
@ -632,10 +636,10 @@ const Analyzer = struct {
} }
} }
break :blk switch (array_list.length) { break :blk switch (array_list.len) {
0 => unreachable, 0 => unreachable,
1 => array_list.pointer[0], 1 => array_list.buffer[0],
else => try analyzer.nodeList(array_list.slice()), else => try analyzer.nodeList(&array_list),
}; };
}, },
}; };
@ -656,7 +660,7 @@ const Analyzer = struct {
.right = expr, .right = expr,
}); });
try list.append(analyzer.my_allocator, node); try list.append(node);
} }
_ = try analyzer.expectToken(.operator_right_brace); _ = try analyzer.expectToken(.operator_right_brace);
@ -665,7 +669,7 @@ const Analyzer = struct {
.id = .@"switch", .id = .@"switch",
.token = switch_token, .token = switch_token,
.left = switch_expression, .left = switch_expression,
.right = try analyzer.nodeList(list.slice()), .right = try analyzer.nodeList(&list),
}); });
} }
@ -741,7 +745,7 @@ const Analyzer = struct {
const token = try analyzer.expectToken(.fixed_keyword_for); const token = try analyzer.expectToken(.fixed_keyword_for);
_ = try analyzer.expectToken(.operator_left_parenthesis); _ = try analyzer.expectToken(.operator_left_parenthesis);
var for_expression_list = UnpinnedArray(Node.Index){}; var for_expression_list = Node.StackList{};
while (analyzer.peekToken() != .operator_right_parenthesis) { while (analyzer.peekToken() != .operator_right_parenthesis) {
const expression_token = analyzer.token_i; const expression_token = analyzer.token_i;
@ -766,7 +770,7 @@ const Analyzer = struct {
else => |t| @panic(@tagName(t)), else => |t| @panic(@tagName(t)),
}; };
try for_expression_list.append(analyzer.my_allocator, node_index); try for_expression_list.append(node_index);
switch (analyzer.peekToken()) { switch (analyzer.peekToken()) {
.operator_comma => analyzer.consumeToken(), .operator_comma => analyzer.consumeToken(),
@ -779,7 +783,7 @@ const Analyzer = struct {
_ = try analyzer.expectToken(.operator_bar); _ = try analyzer.expectToken(.operator_bar);
var payload_nodes = UnpinnedArray(Node.Index){}; var payload_nodes = Node.StackList{};
while (analyzer.peekToken() != .operator_bar) { while (analyzer.peekToken() != .operator_bar) {
const payload_token = analyzer.token_i; const payload_token = analyzer.token_i;
@ -797,7 +801,7 @@ const Analyzer = struct {
else => |t| @panic(@tagName(t)), else => |t| @panic(@tagName(t)),
} }
try payload_nodes.append(analyzer.my_allocator, try analyzer.addNode(.{ try payload_nodes.append(try analyzer.addNode(.{
.id = id, .id = id,
.token = payload_token, .token = payload_token,
.left = Node.Index.null, .left = Node.Index.null,
@ -807,15 +811,15 @@ const Analyzer = struct {
_ = try analyzer.expectToken(.operator_bar); _ = try analyzer.expectToken(.operator_bar);
if (payload_nodes.length != for_expression_list.length) { if (payload_nodes.len != for_expression_list.len) {
unreachable; unreachable;
} }
const for_condition_node = try analyzer.addNode(.{ const for_condition_node = try analyzer.addNode(.{
.id = .for_condition, .id = .for_condition,
.token = token, .token = token,
.left = try analyzer.nodeList(for_expression_list.slice()), .left = try analyzer.nodeList(&for_expression_list),
.right = try analyzer.nodeList(payload_nodes.slice()), .right = try analyzer.nodeList(&payload_nodes),
}); });
const true_expression = switch (analyzer.peekToken()) { const true_expression = switch (analyzer.peekToken()) {
@ -936,17 +940,17 @@ const Analyzer = struct {
} }
} else @panic(intrinsic_name); } else @panic(intrinsic_name);
var list = UnpinnedArray(Node.Index){}; var list = Node.StackList{};
if (intrinsic_id == .@"asm") { if (intrinsic_id == .@"asm") {
const backtick = try analyzer.expectToken(.operator_backtick); const backtick = try analyzer.expectToken(.operator_backtick);
var instruction_list = UnpinnedArray(Node.Index){}; var instruction_list = Node.StackList{};
while (analyzer.peekToken() != .operator_backtick) { while (analyzer.peekToken() != .operator_backtick) {
const instruction_token = analyzer.token_i; const instruction_token = analyzer.token_i;
const instruction_name = try analyzer.identifierNode(); const instruction_name = try analyzer.identifierNode();
var operand_list = UnpinnedArray(Node.Index){}; var operand_list = Node.StackList{};
while (analyzer.peekToken() != .operator_semicolon) { while (analyzer.peekToken() != .operator_semicolon) {
const node = switch (analyzer.peekToken()) { const node = switch (analyzer.peekToken()) {
.identifier => try analyzer.addNode(.{ .identifier => try analyzer.addNode(.{
@ -988,7 +992,7 @@ const Analyzer = struct {
.operator_semicolon => {}, .operator_semicolon => {},
else => |t| @panic(@tagName(t)), else => |t| @panic(@tagName(t)),
} }
try operand_list.append(analyzer.my_allocator, node); try operand_list.append(node);
} }
analyzer.consumeToken(); analyzer.consumeToken();
@ -997,10 +1001,10 @@ const Analyzer = struct {
.id = .assembly_instruction, .id = .assembly_instruction,
.token = instruction_token, .token = instruction_token,
.left = instruction_name, .left = instruction_name,
.right = try analyzer.nodeList(operand_list.slice()), .right = try analyzer.nodeList(&operand_list),
}); });
try instruction_list.append(analyzer.my_allocator, instruction); try instruction_list.append(instruction);
} }
_ = try analyzer.expectToken(.operator_backtick); _ = try analyzer.expectToken(.operator_backtick);
@ -1009,15 +1013,15 @@ const Analyzer = struct {
const assembly_block = try analyzer.addNode(.{ const assembly_block = try analyzer.addNode(.{
.id = .assembly_code_block, .id = .assembly_code_block,
.token = backtick, .token = backtick,
.left = try analyzer.nodeList(instruction_list.slice()), .left = try analyzer.nodeList(&instruction_list),
.right = .null, .right = .null,
}); });
try list.append(analyzer.my_allocator, assembly_block); try list.append(assembly_block);
const intrinsic = try analyzer.addNode(.{ const intrinsic = try analyzer.addNode(.{
.id = .intrinsic, .id = .intrinsic,
.token = intrinsic_token, .token = intrinsic_token,
.left = try analyzer.nodeList(list.slice()), .left = try analyzer.nodeList(&list),
.right = @enumFromInt(@intFromEnum(intrinsic_id)), .right = @enumFromInt(@intFromEnum(intrinsic_id)),
}); });
@ -1025,7 +1029,7 @@ const Analyzer = struct {
} else { } else {
while (analyzer.peekToken() != .operator_right_parenthesis) { while (analyzer.peekToken() != .operator_right_parenthesis) {
const parameter = try analyzer.expression(); const parameter = try analyzer.expression();
try list.append(analyzer.my_allocator, parameter); try list.append(parameter);
switch (analyzer.peekToken()) { switch (analyzer.peekToken()) {
.operator_comma => analyzer.consumeToken(), .operator_comma => analyzer.consumeToken(),
@ -1041,7 +1045,7 @@ const Analyzer = struct {
return try analyzer.addNode(.{ return try analyzer.addNode(.{
.id = .intrinsic, .id = .intrinsic,
.token = intrinsic_token, .token = intrinsic_token,
.left = try analyzer.nodeList(list.slice()), .left = try analyzer.nodeList(&list),
.right = @enumFromInt(@intFromEnum(intrinsic_id)), .right = @enumFromInt(@intFromEnum(intrinsic_id)),
}); });
} }
@ -1452,7 +1456,7 @@ const Analyzer = struct {
fn pointerOrArrayTypeExpression(analyzer: *Analyzer, expected: PointerOrArrayTypeExpectedExpression) !Node.Index { fn pointerOrArrayTypeExpression(analyzer: *Analyzer, expected: PointerOrArrayTypeExpectedExpression) !Node.Index {
const first = analyzer.token_i; const first = analyzer.token_i;
var list = UnpinnedArray(Node.Index){}; var list = Node.StackList{};
const expression_type: Node.Id = switch (expected) { const expression_type: Node.Id = switch (expected) {
.single_pointer_type => blk: { .single_pointer_type => blk: {
@ -1461,7 +1465,7 @@ const Analyzer = struct {
break :blk .pointer_type; break :blk .pointer_type;
}, },
.many_pointer_type => blk: { .many_pointer_type => blk: {
try list.append(analyzer.my_allocator, try analyzer.addNode(.{ try list.append(try analyzer.addNode(.{
.id = .many_pointer_expression, .id = .many_pointer_expression,
.token = analyzer.token_i, .token = analyzer.token_i,
.left = Node.Index.null, .left = Node.Index.null,
@ -1471,7 +1475,7 @@ const Analyzer = struct {
_ = try analyzer.expectToken(.operator_ampersand); _ = try analyzer.expectToken(.operator_ampersand);
switch (analyzer.peekToken()) { switch (analyzer.peekToken()) {
.operator_right_bracket => {}, .operator_right_bracket => {},
.operator_colon => try list.append(analyzer.my_allocator, try analyzer.parseTermination()), .operator_colon => try list.append(try analyzer.parseTermination()),
else => |t| @panic(@tagName(t)), else => |t| @panic(@tagName(t)),
} }
_ = try analyzer.expectToken(.operator_right_bracket); _ = try analyzer.expectToken(.operator_right_bracket);
@ -1486,17 +1490,17 @@ const Analyzer = struct {
break :blk .slice_type; break :blk .slice_type;
}, },
.operator_colon => { .operator_colon => {
try list.append(analyzer.my_allocator, try analyzer.parseTermination()); try list.append(try analyzer.parseTermination());
_ = try analyzer.expectToken(.operator_right_bracket); _ = try analyzer.expectToken(.operator_right_bracket);
break :blk .slice_type; break :blk .slice_type;
}, },
else => { else => {
const length_expression = try analyzer.expression(); const length_expression = try analyzer.expression();
try list.append(analyzer.my_allocator, length_expression); try list.append(length_expression);
switch (analyzer.peekToken()) { switch (analyzer.peekToken()) {
.operator_right_bracket => {}, .operator_right_bracket => {},
.operator_colon => try list.append(analyzer.my_allocator, try analyzer.parseTermination()), .operator_colon => try list.append(try analyzer.parseTermination()),
else => |t| @panic(@tagName(t)), else => |t| @panic(@tagName(t)),
} }
@ -1521,17 +1525,17 @@ const Analyzer = struct {
analyzer.consumeTokens(@intFromBool(analyzer.peekToken() == .fixed_keyword_const)); analyzer.consumeTokens(@intFromBool(analyzer.peekToken() == .fixed_keyword_const));
if (const_node != .null) { if (const_node != .null) {
try list.append(analyzer.my_allocator, const_node); try list.append(const_node);
} }
} else { } else {
assert(list.length > 0); assert(list.len > 0);
} }
const type_expression = try analyzer.typeExpression(); const type_expression = try analyzer.typeExpression();
assert(type_expression != .null); assert(type_expression != .null);
try list.append(analyzer.my_allocator, type_expression); try list.append(type_expression);
const node_list = try analyzer.nodeList(list.slice()); const node_list = try analyzer.nodeList(&list);
const node = Node{ const node = Node{
.id = expression_type, .id = expression_type,
@ -1640,7 +1644,7 @@ const Analyzer = struct {
const left_parenthesis = analyzer.token_i; const left_parenthesis = analyzer.token_i;
analyzer.consumeToken(); analyzer.consumeToken();
var expression_list = UnpinnedArray(Node.Index){}; var expression_list = Node.StackList{};
// logln(.parser, .suffix, "[DEPTH={}] Initializating suffix call-like expression", .{analyzer.suffix_depth}); // logln(.parser, .suffix, "[DEPTH={}] Initializating suffix call-like expression", .{analyzer.suffix_depth});
while (analyzer.peekToken() != .operator_right_parenthesis) { while (analyzer.peekToken() != .operator_right_parenthesis) {
const current_token = analyzer.token_i; const current_token = analyzer.token_i;
@ -1659,7 +1663,7 @@ const Analyzer = struct {
}); });
} }
try expression_list.append(analyzer.my_allocator, parameter); try expression_list.append(parameter);
switch (analyzer.peekToken()) { switch (analyzer.peekToken()) {
.operator_right_parenthesis => {}, .operator_right_parenthesis => {},
@ -1679,7 +1683,7 @@ const Analyzer = struct {
.id = .call, .id = .call,
.token = left_parenthesis, .token = left_parenthesis,
.left = result, .left = result,
.right = try analyzer.nodeList(expression_list.slice()), .right = try analyzer.nodeList(&expression_list),
}); });
} else { } else {
return result; return result;
@ -1693,7 +1697,7 @@ const Analyzer = struct {
fn containerLiteral(analyzer: *Analyzer, type_node: Node.Index) anyerror!Node.Index { fn containerLiteral(analyzer: *Analyzer, type_node: Node.Index) anyerror!Node.Index {
const token = try analyzer.expectToken(.operator_left_brace); const token = try analyzer.expectToken(.operator_left_brace);
var list = UnpinnedArray(Node.Index){}; var list = Node.StackList{};
const InitializationType = enum { const InitializationType = enum {
anonymous, anonymous,
@ -1722,7 +1726,7 @@ const Analyzer = struct {
.right = Node.Index.null, .right = Node.Index.null,
}); });
try list.append(analyzer.my_allocator, field_initialization); try list.append(field_initialization);
switch (analyzer.peekToken()) { switch (analyzer.peekToken()) {
.operator_comma => analyzer.consumeToken(), .operator_comma => analyzer.consumeToken(),
else => {}, else => {},
@ -1733,7 +1737,7 @@ const Analyzer = struct {
else => |t| @panic(@tagName(t)), else => |t| @panic(@tagName(t)),
}, },
else => blk: { else => blk: {
try list.append(analyzer.my_allocator, try analyzer.anonymousExpression()); try list.append(try analyzer.anonymousExpression());
_ = try analyzer.expectToken(.operator_comma); _ = try analyzer.expectToken(.operator_comma);
break :blk .anonymous; break :blk .anonymous;
}, },
@ -1750,7 +1754,7 @@ const Analyzer = struct {
else => {}, else => {},
} }
try list.append(analyzer.my_allocator, field_expression_initializer); try list.append(field_expression_initializer);
break :blk .anonymous; break :blk .anonymous;
}, },
else => |t| @panic(@tagName(t)), else => |t| @panic(@tagName(t)),
@ -1794,7 +1798,7 @@ const Analyzer = struct {
}, },
.token = token, .token = token,
.left = type_node, .left = type_node,
.right = try analyzer.nodeList(list.slice()), .right = try analyzer.nodeList(&list),
}); });
} }
@ -1830,10 +1834,10 @@ const Analyzer = struct {
const parameters_node = if (analyzer.hasTokens() and analyzer.peekToken() == .operator_left_parenthesis) b: { const parameters_node = if (analyzer.hasTokens() and analyzer.peekToken() == .operator_left_parenthesis) b: {
analyzer.consumeToken(); analyzer.consumeToken();
var list = UnpinnedArray(Node.Index){}; var list = Node.StackList{};
while (analyzer.peekToken() != .operator_right_parenthesis) { while (analyzer.peekToken() != .operator_right_parenthesis) {
const parameter_node = try analyzer.expression(); const parameter_node = try analyzer.expression();
try list.append(analyzer.my_allocator, parameter_node); try list.append(parameter_node);
switch (analyzer.peekToken()) { switch (analyzer.peekToken()) {
.operator_comma => analyzer.consumeToken(), .operator_comma => analyzer.consumeToken(),
else => {}, else => {},
@ -1842,11 +1846,11 @@ const Analyzer = struct {
analyzer.consumeToken(); analyzer.consumeToken();
break :b try analyzer.nodeList(list.slice()); break :b try analyzer.nodeList(&list);
} else Node.Index.null; } else Node.Index.null;
if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_left_brace); if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_left_brace);
var node_list = UnpinnedArray(Node.Index){}; var list = Node.StackList{};
while (analyzer.hasTokens() and analyzer.peekToken() != .operator_right_brace) { while (analyzer.hasTokens() and analyzer.peekToken() != .operator_right_brace) {
const first = analyzer.token_i; const first = analyzer.token_i;
@ -1931,7 +1935,7 @@ const Analyzer = struct {
// logln(.parser, .container_members, "Container member {s}", .{@tagName(member_node.id)}); // logln(.parser, .container_members, "Container member {s}", .{@tagName(member_node.id)});
assert(member_node.id != .identifier); assert(member_node.id != .identifier);
try node_list.append(analyzer.my_allocator, member_node_index); try list.append(member_node_index);
} }
if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_right_brace); if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_right_brace);
@ -1951,7 +1955,7 @@ const Analyzer = struct {
return try analyzer.addNode(.{ return try analyzer.addNode(.{
.id = node_id, .id = node_id,
.token = token_i, .token = token_i,
.left = try analyzer.nodeList(node_list.slice()), .left = try analyzer.nodeList(&list),
.right = parameters_node, .right = parameters_node,
}); });
} }
@ -2139,7 +2143,7 @@ const Analyzer = struct {
} else Node.Index.null; } else Node.Index.null;
_ = try analyzer.expectToken(.operator_left_brace); _ = try analyzer.expectToken(.operator_left_brace);
var list = UnpinnedArray(Node.Index){}; var list = Node.StackList{};
while (analyzer.peekToken() != .operator_right_brace) { while (analyzer.peekToken() != .operator_right_brace) {
const tok_i = analyzer.token_i; const tok_i = analyzer.token_i;
@ -2165,7 +2169,7 @@ const Analyzer = struct {
.right = value_associated, .right = value_associated,
}); });
try list.append(analyzer.my_allocator, error_field_node); try list.append(error_field_node);
} }
analyzer.consumeToken(); analyzer.consumeToken();
@ -2173,7 +2177,7 @@ const Analyzer = struct {
break :blk try analyzer.addNode(.{ break :blk try analyzer.addNode(.{
.id = .error_type, .id = .error_type,
.token = token_i, .token = token_i,
.left = try analyzer.nodeList(list.slice()), .left = try analyzer.nodeList(&list),
.right = backing_type, .right = backing_type,
}); });
}, },
@ -2357,9 +2361,11 @@ const Analyzer = struct {
return node_index; return node_index;
} }
fn nodeList(analyzer: *Analyzer, node_list: []const Node.Index) !Node.Index { fn nodeList(analyzer: *Analyzer, stack_list: *Node.StackList) !Node.Index {
const heap_list = try analyzer.arena.new_array(Node.Index, stack_list.len);
@memcpy(heap_list, stack_list.slice());
const index = analyzer.node_lists.length; const index = analyzer.node_lists.length;
_ = analyzer.node_lists.append(node_list); _ = analyzer.node_lists.append(heap_list);
return try analyzer.addNode(.{ return try analyzer.addNode(.{
.id = .node_list, .id = .node_list,
.token = @enumFromInt(0), .token = @enumFromInt(0),

View File

@ -77,8 +77,8 @@ pub const Arena = struct{
} }
pub inline fn new_array(arena: *Arena, comptime T: type, count: usize) ![]T { pub inline fn new_array(arena: *Arena, comptime T: type, count: usize) ![]T {
const result: [*]T = @ptrCast(try arena.allocate(@sizeOf(T) * count)); const result: [*]T = @ptrCast(@alignCast(try arena.allocate(@sizeOf(T) * count)));
return result; return result[0..count];
} }
}; };