parent
f0125f2141
commit
4538c395f9
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,286 +0,0 @@
|
|||||||
const std = @import("std");
|
|
||||||
const assert = std.debug.assert;
|
|
||||||
|
|
||||||
pub const Allocator = std.mem.Allocator;
|
|
||||||
pub const AutoArrayHashMap = std.AutoArrayHashMapUnmanaged;
|
|
||||||
pub const ArrayList = std.ArrayListUnmanaged;
|
|
||||||
pub const ArrayListAligned = std.ArrayListAlignedUnmanaged;
|
|
||||||
pub const AutoHashMap = std.AutoHashMapUnmanaged;
|
|
||||||
pub const BoundedArray = std.BoundedArray;
|
|
||||||
pub const HashMap = std.HashMapUnmanaged;
|
|
||||||
pub const StringHashMap = std.StringHashMapUnmanaged;
|
|
||||||
pub const StringArrayHashMap = std.StringArrayHashMapUnmanaged;
|
|
||||||
|
|
||||||
pub fn BlockList(comptime T: type, comptime E: type) type {
|
|
||||||
const item_count = 64;
|
|
||||||
|
|
||||||
return struct {
|
|
||||||
blocks: ArrayList(*Block) = .{},
|
|
||||||
len: usize = 0,
|
|
||||||
|
|
||||||
const Block = BoundedArray(T, item_count);
|
|
||||||
const List = @This();
|
|
||||||
|
|
||||||
pub const Index = getIndexForType(T, E);
|
|
||||||
pub const ElementIndex = Index.Index;
|
|
||||||
|
|
||||||
// pub const append = switch (list_type) {
|
|
||||||
// .index => appendIndexed,
|
|
||||||
// .pointer => appendPointer,
|
|
||||||
// };
|
|
||||||
// pub const addOne = switch (list_type) {
|
|
||||||
// .index => addOneIndexed,
|
|
||||||
// .pointer => addOnePointer,
|
|
||||||
// };
|
|
||||||
|
|
||||||
pub fn wrapSplit(block: usize, element: usize) ElementIndex {
|
|
||||||
return @enumFromInt(block * item_count + element);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get(list: *List, index: ElementIndex) *T {
|
|
||||||
assert(index != .null);
|
|
||||||
const i: u32 = @intFromEnum(index);
|
|
||||||
const block_index = i / item_count;
|
|
||||||
const element_index = i % item_count;
|
|
||||||
const block = list.blocks.items[block_index];
|
|
||||||
const block_slice = block.buffer[0..block.len];
|
|
||||||
const element = &block_slice[element_index];
|
|
||||||
return element;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn append(list: *List, allocator: Allocator, element: T) !ElementIndex {
|
|
||||||
const result = try list.addOne(allocator);
|
|
||||||
list.get(result).* = element;
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn addOne(list: *List, allocator: Allocator) !ElementIndex {
|
|
||||||
const block_index = try list.getFreeBlock(allocator);
|
|
||||||
const block = list.blocks.items[block_index];
|
|
||||||
const index = block.len;
|
|
||||||
_ = try block.addOne();
|
|
||||||
return @enumFromInt(block_index * item_count + index);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getFreeBlock(list: *List, allocator: Allocator) !usize {
|
|
||||||
for (list.blocks.items, 0..) |block, i| {
|
|
||||||
block.ensureUnusedCapacity(1) catch continue;
|
|
||||||
return i;
|
|
||||||
} else {
|
|
||||||
const new_block = try allocator.create(Block);
|
|
||||||
new_block.* = .{};
|
|
||||||
const block_index = list.blocks.items.len;
|
|
||||||
try list.blocks.append(allocator, new_block);
|
|
||||||
return block_index;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn indexOf(list: *List, elem: *const T) ElementIndex {
|
|
||||||
const address = @intFromPtr(elem);
|
|
||||||
for (list.blocks.items, 0..) |block, block_index| {
|
|
||||||
const base = @intFromPtr(&block.buffer[0]);
|
|
||||||
const top = base + @sizeOf(T) * item_count;
|
|
||||||
if (address >= base and address < top) {
|
|
||||||
const result: u32 = @intCast(block_index * item_count + @divExact(address - base, @sizeOf(T)));
|
|
||||||
return Index.wrap(result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@panic("not found");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getIndexForType(comptime T: type, comptime E: type) type {
|
|
||||||
assert(@typeInfo(E) == .Enum);
|
|
||||||
_ = T;
|
|
||||||
const IndexType = u32;
|
|
||||||
const MAX = std.math.maxInt(IndexType);
|
|
||||||
|
|
||||||
const EnumField = std.builtin.Type.EnumField;
|
|
||||||
comptime var fields: []const EnumField = &.{};
|
|
||||||
// comptime var enum_value: comptime_int = 0;
|
|
||||||
fields = fields ++ @typeInfo(E).Enum.fields;
|
|
||||||
|
|
||||||
// for (names) |name| {
|
|
||||||
// fields = fields ++ [1]EnumField{.{
|
|
||||||
// .name = name,
|
|
||||||
// .value = enum_value,
|
|
||||||
// }};
|
|
||||||
// enum_value += 1;
|
|
||||||
// }
|
|
||||||
|
|
||||||
fields = fields ++ [1]EnumField{.{
|
|
||||||
.name = "null",
|
|
||||||
.value = MAX,
|
|
||||||
}};
|
|
||||||
|
|
||||||
const Result = @Type(.{
|
|
||||||
.Enum = .{
|
|
||||||
.tag_type = IndexType,
|
|
||||||
.fields = fields,
|
|
||||||
.decls = &.{},
|
|
||||||
.is_exhaustive = false,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return struct {
|
|
||||||
pub const Index = Result;
|
|
||||||
|
|
||||||
pub fn unwrap(this: Index) IndexType {
|
|
||||||
assert(this != .null);
|
|
||||||
return @intFromEnum(this);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn wrap(value: IndexType) Index {
|
|
||||||
assert(value < MAX);
|
|
||||||
return @enumFromInt(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn addInt(this: Index, value: IndexType) Index {
|
|
||||||
const this_int = @intFromEnum(this);
|
|
||||||
return @enumFromInt(this_int + value);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn subInt(this: Index, value: IndexType) IndexType {
|
|
||||||
const this_int = @intFromEnum(this);
|
|
||||||
return this_int - value;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add(a: Index, b: Index) Index {
|
|
||||||
return @enumFromInt(@intFromEnum(a) + @intFromEnum(b));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn sub(a: Index, b: Index) IndexType {
|
|
||||||
return @intFromEnum(a) - @intFromEnum(b);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const ListType = enum {
|
|
||||||
index,
|
|
||||||
pointer,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn enumFromString(comptime E: type, string: []const u8) ?E {
|
|
||||||
return inline for (@typeInfo(E).Enum.fields) |enum_field| {
|
|
||||||
if (std.mem.eql(u8, string, enum_field.name)) {
|
|
||||||
break @field(E, enum_field.name);
|
|
||||||
}
|
|
||||||
} else null;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn hash(string: []const u8) u32 {
|
|
||||||
const string_key: u32 = @truncate(std.hash.Wyhash.hash(0, string));
|
|
||||||
return string_key;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn StringKeyMap(comptime Value: type) type {
|
|
||||||
return struct {
|
|
||||||
list: std.MultiArrayList(Data) = .{},
|
|
||||||
const Key = u32;
|
|
||||||
const Data = struct {
|
|
||||||
key: Key,
|
|
||||||
value: Value,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn length(string_map: *@This()) usize {
|
|
||||||
return string_map.list.len;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash(string: []const u8) Key {
|
|
||||||
const string_key: Key = @truncate(std.hash.Wyhash.hash(0, string));
|
|
||||||
return string_key;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getKey(string_map: *const @This(), string: []const u8) ?Key {
|
|
||||||
return if (string_map.getKeyPtr(string)) |key_ptr| key_ptr.* else null;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getKeyPtr(string_map: *const @This(), string_key: Key) ?*const Key {
|
|
||||||
for (string_map.list.items(.key)) |*key_ptr| {
|
|
||||||
if (key_ptr.* == string_key) {
|
|
||||||
return key_ptr;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn getValue(string_map: *const @This(), key: Key) ?Value {
|
|
||||||
if (string_map.getKeyPtr(key)) |key_ptr| {
|
|
||||||
const index = string_map.indexOfKey(key_ptr);
|
|
||||||
return string_map.list.items(.value)[index];
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn indexOfKey(string_map: *const @This(), key_ptr: *const Key) usize {
|
|
||||||
return @divExact(@intFromPtr(key_ptr) - @intFromPtr(string_map.list.items(.key).ptr), @sizeOf(Key));
|
|
||||||
}
|
|
||||||
|
|
||||||
const GOP = struct {
|
|
||||||
key: Key,
|
|
||||||
found_existing: bool,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn getOrPut(string_map: *@This(), allocator: Allocator, string: []const u8, value: Value) !GOP {
|
|
||||||
const string_key: Key = @truncate(std.hash.Wyhash.hash(0, string));
|
|
||||||
for (string_map.list.items(.key)) |key| {
|
|
||||||
if (key == string_key) return .{
|
|
||||||
.key = string_key,
|
|
||||||
.found_existing = true,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
try string_map.list.append(allocator, .{
|
|
||||||
.key = string_key,
|
|
||||||
.value = value,
|
|
||||||
});
|
|
||||||
|
|
||||||
return .{
|
|
||||||
.key = string_key,
|
|
||||||
.found_existing = false,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const page_size = std.mem.page_size;
|
|
||||||
extern fn pthread_jit_write_protect_np(enabled: bool) void;
|
|
||||||
|
|
||||||
pub fn mmap(size: usize, flags: packed struct {
|
|
||||||
executable: bool = false,
|
|
||||||
}) ![]align(page_size) u8 {
|
|
||||||
return switch (@import("builtin").os.tag) {
|
|
||||||
.windows => blk: {
|
|
||||||
const windows = std.os.windows;
|
|
||||||
break :blk @as([*]align(page_size) u8, @ptrCast(@alignCast(try windows.VirtualAlloc(null, size, windows.MEM_COMMIT | windows.MEM_RESERVE, windows.PAGE_EXECUTE_READWRITE))))[0..size];
|
|
||||||
},
|
|
||||||
.linux, .macos => |os_tag| blk: {
|
|
||||||
const jit = switch (os_tag) {
|
|
||||||
.macos => 0x800,
|
|
||||||
.linux => 0,
|
|
||||||
else => unreachable,
|
|
||||||
};
|
|
||||||
const execute_flag: switch (os_tag) {
|
|
||||||
.linux => u32,
|
|
||||||
.macos => c_int,
|
|
||||||
else => unreachable,
|
|
||||||
} = if (flags.executable) std.os.PROT.EXEC else 0;
|
|
||||||
const protection_flags: u32 = @intCast(std.os.PROT.READ | std.os.PROT.WRITE | execute_flag);
|
|
||||||
const mmap_flags = std.os.MAP.ANONYMOUS | std.os.MAP.PRIVATE | jit;
|
|
||||||
|
|
||||||
const result = try std.os.mmap(null, size, protection_flags, mmap_flags, -1, 0);
|
|
||||||
if (@import("builtin").cpu.arch == .aarch64 and @import("builtin").os.tag == .macos) {
|
|
||||||
if (flags.executable) {
|
|
||||||
pthread_jit_write_protect_np(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
break :blk result;
|
|
||||||
},
|
|
||||||
else => @compileError("OS not supported"),
|
|
||||||
};
|
|
||||||
}
|
|
@ -3,11 +3,9 @@ const Allocator = std.mem.Allocator;
|
|||||||
const assert = std.debug.assert;
|
const assert = std.debug.assert;
|
||||||
const log = std.log;
|
const log = std.log;
|
||||||
|
|
||||||
const equal = std.mem.eql;
|
const data_structures = @import("../library.zig");
|
||||||
|
|
||||||
const data_structures = @import("../data_structures.zig");
|
|
||||||
const ArrayList = data_structures.ArrayList;
|
|
||||||
const enumFromString = data_structures.enumFromString;
|
const enumFromString = data_structures.enumFromString;
|
||||||
|
const MyAllocator = data_structures.MyAllocator;
|
||||||
|
|
||||||
const Compilation = @import("../Compilation.zig");
|
const Compilation = @import("../Compilation.zig");
|
||||||
const File = Compilation.File;
|
const File = Compilation.File;
|
||||||
@ -27,11 +25,6 @@ pub const Result = struct {
|
|||||||
count: u32,
|
count: u32,
|
||||||
line_offset: u32,
|
line_offset: u32,
|
||||||
line_count: u32,
|
line_count: u32,
|
||||||
// ids: ArrayList(Token.Id) = .{},
|
|
||||||
// token_lines: ArrayList(u32) = .{},
|
|
||||||
// file_line_offsets: ArrayList(u32) = .{},
|
|
||||||
// token_offsets: ArrayList(u32) = .{},
|
|
||||||
// token_lengths: ArrayList(u32) = .{},
|
|
||||||
time: u64 = 0,
|
time: u64 = 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -49,7 +42,7 @@ pub const Logger = enum {
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn analyze(allocator: Allocator, text: []const u8, token_buffer: *Token.Buffer) !Result {
|
pub fn analyze(allocator: *MyAllocator, text: []const u8, token_buffer: *Token.Buffer) !Result {
|
||||||
assert(text.len <= std.math.maxInt(u32));
|
assert(text.len <= std.math.maxInt(u32));
|
||||||
const len: u32 = @intCast(text.len);
|
const len: u32 = @intCast(text.len);
|
||||||
|
|
||||||
@ -73,9 +66,9 @@ pub fn analyze(allocator: Allocator, text: []const u8, token_buffer: *Token.Buff
|
|||||||
var index: u32 = 0;
|
var index: u32 = 0;
|
||||||
var line_index: u32 = lexer.line_offset;
|
var line_index: u32 = lexer.line_offset;
|
||||||
|
|
||||||
try token_buffer.tokens.ensureUnusedCapacity(allocator, text.len / 4);
|
try token_buffer.ensure_with_capacity(allocator, len / 4);
|
||||||
|
|
||||||
logln(.lexer, .end, "START LEXER - TOKEN OFFSET: {} - LINE OFFSET: {}", .{ Token.unwrap(lexer.offset), lexer.line_offset });
|
// logln(.lexer, .end, "START LEXER - TOKEN OFFSET: {} - LINE OFFSET: {}", .{ Token.unwrap(lexer.offset), lexer.line_offset });
|
||||||
|
|
||||||
while (index < len) {
|
while (index < len) {
|
||||||
const start_index = index;
|
const start_index = index;
|
||||||
@ -116,7 +109,7 @@ pub fn analyze(allocator: Allocator, text: []const u8, token_buffer: *Token.Buff
|
|||||||
const string = text[start_index..][0 .. index - start_index];
|
const string = text[start_index..][0 .. index - start_index];
|
||||||
break :blk if (enumFromString(Compilation.FixedKeyword, string)) |fixed_keyword| switch (fixed_keyword) {
|
break :blk if (enumFromString(Compilation.FixedKeyword, string)) |fixed_keyword| switch (fixed_keyword) {
|
||||||
inline else => |comptime_fixed_keyword| @field(Token.Id, "fixed_keyword_" ++ @tagName(comptime_fixed_keyword)),
|
inline else => |comptime_fixed_keyword| @field(Token.Id, "fixed_keyword_" ++ @tagName(comptime_fixed_keyword)),
|
||||||
} else if (equal(u8, string, "_")) .discard else .identifier;
|
} else if (data_structures.byte_equal( string, "_")) .discard else .identifier;
|
||||||
},
|
},
|
||||||
'0'...'9' => blk: {
|
'0'...'9' => blk: {
|
||||||
// Detect other non-decimal literals
|
// Detect other non-decimal literals
|
||||||
@ -404,25 +397,26 @@ pub fn analyze(allocator: Allocator, text: []const u8, token_buffer: *Token.Buff
|
|||||||
break :blk .operator_dollar;
|
break :blk .operator_dollar;
|
||||||
},
|
},
|
||||||
else => |ch| {
|
else => |ch| {
|
||||||
std.debug.panic("NI: '{c}'", .{ch});
|
const ch_arr = [1]u8{ch};
|
||||||
|
@panic(&ch_arr);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const end_index = index;
|
const end_index = index;
|
||||||
const token_length = end_index - start_index;
|
const token_length = end_index - start_index;
|
||||||
|
|
||||||
token_buffer.tokens.appendAssumeCapacity(.{
|
token_buffer.append_with_capacity(.{
|
||||||
.id = token_id,
|
.id = token_id,
|
||||||
.offset = start_index,
|
.offset = start_index,
|
||||||
.length = token_length,
|
.length = token_length,
|
||||||
.line = line_index,
|
.line = line_index,
|
||||||
});
|
});
|
||||||
const line_offset = token_buffer.line_offsets.items[line_index];
|
// const line_offset = token_buffer.line_offsets.pointer[line_index];
|
||||||
const column = start_index - line_offset;
|
// const column = start_index - line_offset;
|
||||||
logln(.lexer, .new_token, "T at line {}, column {}, byte offset {}, with length {} -line offset: {}- ({s})", .{ line_index, column, start_index, token_length, line_offset, @tagName(token_id) });
|
// logln(.lexer, .new_token, "T at line {}, column {}, byte offset {}, with length {} -line offset: {}- ({s})", .{ line_index, column, start_index, token_length, line_offset, @tagName(token_id) });
|
||||||
}
|
}
|
||||||
|
|
||||||
logln(.lexer, .end, "END LEXER - TOKEN OFFSET: {} - LINE OFFSET: {}", .{ Token.unwrap(lexer.offset), lexer.line_offset });
|
// logln(.lexer, .end, "END LEXER - TOKEN OFFSET: {} - LINE OFFSET: {}", .{ Token.unwrap(lexer.offset), lexer.line_offset });
|
||||||
|
|
||||||
lexer.count = Token.sub(token_buffer.getOffset(), lexer.offset);
|
lexer.count = Token.sub(token_buffer.getOffset(), lexer.offset);
|
||||||
lexer.line_count = token_buffer.getLineOffset() - lexer.line_offset;
|
lexer.line_count = token_buffer.getLineOffset() - lexer.line_offset;
|
||||||
|
@ -1,19 +1,16 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const Allocator = std.mem.Allocator;
|
const Allocator = std.mem.Allocator;
|
||||||
const assert = std.debug.assert;
|
const assert = std.debug.assert;
|
||||||
const equal = std.mem.eql;
|
|
||||||
const panic = std.debug.panic;
|
|
||||||
|
|
||||||
const data_structures = @import("../data_structures.zig");
|
const data_structures = @import("../library.zig");
|
||||||
const ArrayList = data_structures.ArrayList;
|
const UnpinnedArray = data_structures.UnpinnedArray;
|
||||||
const BlockList = data_structures.BlockList;
|
const BlockList = data_structures.BlockList;
|
||||||
const enumFromString = data_structures.enumFromString;
|
const enumFromString = data_structures.enumFromString;
|
||||||
const HashMap = data_structures.HashMap;
|
|
||||||
|
|
||||||
const lexer = @import("lexer.zig");
|
const lexer = @import("lexer.zig");
|
||||||
|
|
||||||
const Compilation = @import("../Compilation.zig");
|
const Compilation = @import("../Compilation.zig");
|
||||||
const log = Compilation.log;
|
// const log = Compilation.log;
|
||||||
const logln = Compilation.logln;
|
const logln = Compilation.logln;
|
||||||
const Token = Compilation.Token;
|
const Token = Compilation.Token;
|
||||||
|
|
||||||
@ -204,9 +201,10 @@ const Analyzer = struct {
|
|||||||
token_i: Token.Index,
|
token_i: Token.Index,
|
||||||
token_buffer: *Token.Buffer,
|
token_buffer: *Token.Buffer,
|
||||||
nodes: *Node.List,
|
nodes: *Node.List,
|
||||||
node_lists: *ArrayList(ArrayList(Node.Index)),
|
node_lists: *UnpinnedArray(UnpinnedArray(Node.Index)),
|
||||||
source_file: []const u8,
|
source_file: []const u8,
|
||||||
allocator: Allocator,
|
allocator: Allocator,
|
||||||
|
my_allocator: *data_structures.MyAllocator,
|
||||||
suffix_depth: usize = 0,
|
suffix_depth: usize = 0,
|
||||||
|
|
||||||
fn expectToken(analyzer: *Analyzer, expected_token_id: Token.Id) !Token.Index {
|
fn expectToken(analyzer: *Analyzer, expected_token_id: Token.Id) !Token.Index {
|
||||||
@ -218,9 +216,9 @@ const Analyzer = struct {
|
|||||||
const result = token_i;
|
const result = token_i;
|
||||||
return result;
|
return result;
|
||||||
} else {
|
} else {
|
||||||
const file_offset = analyzer.getTokenOffset(token_i);
|
// const file_offset = analyzer.getTokenOffset(token_i);
|
||||||
const file_chunk = analyzer.source_file[file_offset..];
|
// const file_chunk = analyzer.source_file[file_offset..];
|
||||||
std.debug.print("Unexpected token {s} when expected {s}\n| |\n v \n```\n{s}\n```", .{ @tagName(token_id), @tagName(expected_token_id), file_chunk });
|
// std.debug.print("Unexpected token {s} when expected {s}\n| |\n v \n```\n{s}\n```", .{ @tagName(token_id), @tagName(expected_token_id), file_chunk });
|
||||||
@breakpoint();
|
@breakpoint();
|
||||||
return error.unexpected_token;
|
return error.unexpected_token;
|
||||||
}
|
}
|
||||||
@ -228,13 +226,16 @@ const Analyzer = struct {
|
|||||||
|
|
||||||
fn getTokenOffset(analyzer: *Analyzer, token_index: Token.Index) u32 {
|
fn getTokenOffset(analyzer: *Analyzer, token_index: Token.Index) u32 {
|
||||||
const index = Token.unwrap(token_index);
|
const index = Token.unwrap(token_index);
|
||||||
const offset = analyzer.token_buffer.tokens.items(.offset)[index];
|
assert(index < analyzer.token_buffer.length);
|
||||||
|
const offset = analyzer.token_buffer.offsets[index];
|
||||||
return offset;
|
return offset;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn peekTokenAhead(analyzer: *Analyzer, ahead_offset: u32) Token.Id {
|
fn peekTokenAhead(analyzer: *Analyzer, ahead_offset: u32) Token.Id {
|
||||||
const token_index = Token.addInt(analyzer.token_i, ahead_offset);
|
const token_index = Token.addInt(analyzer.token_i, ahead_offset);
|
||||||
const token = analyzer.token_buffer.tokens.items(.id)[Token.unwrap(token_index)];
|
const index =Token.unwrap(token_index);
|
||||||
|
assert(index < analyzer.token_buffer.length);
|
||||||
|
const token = analyzer.token_buffer.ids[index];
|
||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,24 +259,27 @@ const Analyzer = struct {
|
|||||||
|
|
||||||
fn consumeTokens(analyzer: *Analyzer, token_count: u32) void {
|
fn consumeTokens(analyzer: *Analyzer, token_count: u32) void {
|
||||||
assert(Token.unwrap(Token.addInt(analyzer.token_i, token_count)) <= analyzer.getTokenEnd());
|
assert(Token.unwrap(Token.addInt(analyzer.token_i, token_count)) <= analyzer.getTokenEnd());
|
||||||
log(.parser, .consume_token, "Consuming {} {s}: ", .{ token_count, if (token_count == 1) "token" else "tokens" });
|
// log(.parser, .consume_token, "Consuming {} {s}: ", .{ token_count, if (token_count == 1) "token" else "tokens" });
|
||||||
|
|
||||||
for (0..token_count) |i_usize| {
|
for (0..token_count) |i_usize| {
|
||||||
const i: u32 = @intCast(i_usize);
|
const i: u32 = @intCast(i_usize);
|
||||||
const token_id = analyzer.peekTokenAhead(i);
|
const token_id = analyzer.peekTokenAhead(i);
|
||||||
|
_ = token_id; // autofix
|
||||||
const token_index = Token.addInt(analyzer.token_i, i);
|
const token_index = Token.addInt(analyzer.token_i, i);
|
||||||
const token_bytes = analyzer.bytes(token_index);
|
const token_bytes = analyzer.bytes(token_index);
|
||||||
log(.parser, .consume_token, "{s}, '{s}'", .{ @tagName(token_id), token_bytes });
|
_ = token_bytes; // autofix
|
||||||
|
// log(.parser, .consume_token, "{s}, '{s}'", .{ @tagName(token_id), token_bytes });
|
||||||
}
|
}
|
||||||
|
|
||||||
log(.parser, .consume_token, "\n", .{});
|
// log(.parser, .consume_token, "\n", .{});
|
||||||
analyzer.token_i = Token.addInt(analyzer.token_i, token_count);
|
analyzer.token_i = Token.addInt(analyzer.token_i, token_count);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bytes(analyzer: *const Analyzer, token_index: Token.Index) []const u8 {
|
fn bytes(analyzer: *const Analyzer, token_index: Token.Index) []const u8 {
|
||||||
const index = Token.unwrap(token_index);
|
const index = Token.unwrap(token_index);
|
||||||
const offset = analyzer.token_buffer.tokens.items(.offset)[index];
|
assert(index < analyzer.token_buffer.length);
|
||||||
const len = analyzer.token_buffer.tokens.items(.length)[index];
|
const offset = analyzer.token_buffer.offsets[index];
|
||||||
|
const len = analyzer.token_buffer.lengths[index];
|
||||||
const slice = analyzer.source_file[offset..][0..len];
|
const slice = analyzer.source_file[offset..][0..len];
|
||||||
return slice;
|
return slice;
|
||||||
}
|
}
|
||||||
@ -290,9 +294,10 @@ const Analyzer = struct {
|
|||||||
analyzer.consumeToken();
|
analyzer.consumeToken();
|
||||||
const declaration_name_token = try analyzer.expectToken(.identifier);
|
const declaration_name_token = try analyzer.expectToken(.identifier);
|
||||||
const declaration_name = analyzer.bytes(declaration_name_token);
|
const declaration_name = analyzer.bytes(declaration_name_token);
|
||||||
logln(.parser, .symbol_declaration, "Starting parsing declaration \"{s}\"", .{declaration_name});
|
_ = declaration_name; // autofix
|
||||||
|
// logln(.parser, .symbol_declaration, "Starting parsing declaration \"{s}\"", .{declaration_name});
|
||||||
|
|
||||||
logln(.parser, .symbol_declaration, "Current token: {}", .{analyzer.peekToken()});
|
// logln(.parser, .symbol_declaration, "Current token: {}", .{analyzer.peekToken()});
|
||||||
|
|
||||||
const metadata_node_index = switch (analyzer.peekToken()) {
|
const metadata_node_index = switch (analyzer.peekToken()) {
|
||||||
.operator_colon => blk: {
|
.operator_colon => blk: {
|
||||||
@ -301,13 +306,13 @@ const Analyzer = struct {
|
|||||||
const attribute_node_index: Node.Index = if (analyzer.peekToken() == .operator_colon) b: {
|
const attribute_node_index: Node.Index = if (analyzer.peekToken() == .operator_colon) b: {
|
||||||
analyzer.consumeToken();
|
analyzer.consumeToken();
|
||||||
|
|
||||||
var list = ArrayList(Node.Index){};
|
var list = UnpinnedArray(Node.Index){};
|
||||||
while (analyzer.peekToken() != .operator_assign) {
|
while (analyzer.peekToken() != .operator_assign) {
|
||||||
const identifier = try analyzer.expectToken(.identifier);
|
const identifier = try analyzer.expectToken(.identifier);
|
||||||
const identifier_name = analyzer.bytes(identifier);
|
const identifier_name = analyzer.bytes(identifier);
|
||||||
|
|
||||||
const attribute_node = inline for (@typeInfo(Compilation.Debug.Declaration.Global.Attribute).Enum.fields) |enum_field| {
|
const attribute_node = inline for (@typeInfo(Compilation.Debug.Declaration.Global.Attribute).Enum.fields) |enum_field| {
|
||||||
if (equal(u8, identifier_name, enum_field.name)) {
|
if (data_structures.byte_equal(identifier_name, enum_field.name)) {
|
||||||
const attribute = @field(Compilation.Debug.Declaration.Global.Attribute, enum_field.name);
|
const attribute = @field(Compilation.Debug.Declaration.Global.Attribute, enum_field.name);
|
||||||
const attribute_node = switch (attribute) {
|
const attribute_node = switch (attribute) {
|
||||||
.@"export",
|
.@"export",
|
||||||
@ -321,8 +326,8 @@ const Analyzer = struct {
|
|||||||
};
|
};
|
||||||
break attribute_node;
|
break attribute_node;
|
||||||
}
|
}
|
||||||
} else panic("Unknown symbol attribute: {s}", .{identifier_name});
|
} else @panic(identifier_name);
|
||||||
try list.append(analyzer.allocator, attribute_node);
|
try list.append(analyzer.my_allocator, attribute_node);
|
||||||
|
|
||||||
switch (analyzer.peekToken()) {
|
switch (analyzer.peekToken()) {
|
||||||
.operator_assign => {},
|
.operator_assign => {},
|
||||||
@ -362,7 +367,7 @@ const Analyzer = struct {
|
|||||||
.right = init_node_index,
|
.right = init_node_index,
|
||||||
};
|
};
|
||||||
|
|
||||||
logln(.parser, .symbol_declaration, "Adding declaration \"{s}\" with init node of type: {s}", .{ declaration_name, @tagName(init_node.id) });
|
// logln(.parser, .symbol_declaration, "Adding declaration \"{s}\" with init node of type: {s}", .{ declaration_name, @tagName(init_node.id) });
|
||||||
|
|
||||||
return try analyzer.addNode(declaration);
|
return try analyzer.addNode(declaration);
|
||||||
}
|
}
|
||||||
@ -388,14 +393,14 @@ const Analyzer = struct {
|
|||||||
|
|
||||||
fn functionPrototype(analyzer: *Analyzer) !Node.Index {
|
fn functionPrototype(analyzer: *Analyzer) !Node.Index {
|
||||||
const token = analyzer.token_i;
|
const token = analyzer.token_i;
|
||||||
var attribute_and_return_type_node_list = ArrayList(Node.Index){};
|
var attribute_and_return_type_node_list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
while (analyzer.peekToken() != .operator_left_parenthesis) {
|
while (analyzer.peekToken() != .operator_left_parenthesis) {
|
||||||
const identifier = try analyzer.expectToken(.identifier);
|
const identifier = try analyzer.expectToken(.identifier);
|
||||||
const identifier_name = analyzer.bytes(identifier);
|
const identifier_name = analyzer.bytes(identifier);
|
||||||
|
|
||||||
const attribute_node = inline for (@typeInfo(Compilation.Function.Attribute).Enum.fields) |enum_field| {
|
const attribute_node = inline for (@typeInfo(Compilation.Function.Attribute).Enum.fields) |enum_field| {
|
||||||
if (equal(u8, identifier_name, enum_field.name)) {
|
if (data_structures.byte_equal(identifier_name, enum_field.name)) {
|
||||||
const attribute = @field(Compilation.Function.Attribute, enum_field.name);
|
const attribute = @field(Compilation.Function.Attribute, enum_field.name);
|
||||||
const attribute_node = switch (attribute) {
|
const attribute_node = switch (attribute) {
|
||||||
.naked,
|
.naked,
|
||||||
@ -409,9 +414,9 @@ const Analyzer = struct {
|
|||||||
};
|
};
|
||||||
break attribute_node;
|
break attribute_node;
|
||||||
}
|
}
|
||||||
} else panic("Unknown function attribute: {s}", .{identifier_name});
|
} else @panic(identifier_name);
|
||||||
|
|
||||||
try attribute_and_return_type_node_list.append(analyzer.allocator, attribute_node);
|
try attribute_and_return_type_node_list.append(analyzer.my_allocator, attribute_node);
|
||||||
|
|
||||||
if (analyzer.peekToken() == .operator_comma) analyzer.consumeToken();
|
if (analyzer.peekToken() == .operator_comma) analyzer.consumeToken();
|
||||||
}
|
}
|
||||||
@ -420,7 +425,7 @@ const Analyzer = struct {
|
|||||||
|
|
||||||
const arguments = try analyzer.argumentList(.operator_left_parenthesis, .operator_right_parenthesis);
|
const arguments = try analyzer.argumentList(.operator_left_parenthesis, .operator_right_parenthesis);
|
||||||
const return_type = try analyzer.typeExpression();
|
const return_type = try analyzer.typeExpression();
|
||||||
try attribute_and_return_type_node_list.append(analyzer.allocator, return_type);
|
try attribute_and_return_type_node_list.append(analyzer.my_allocator, return_type);
|
||||||
|
|
||||||
const function_prototype = try analyzer.addNode(.{
|
const function_prototype = try analyzer.addNode(.{
|
||||||
.id = .function_prototype,
|
.id = .function_prototype,
|
||||||
@ -437,7 +442,7 @@ const Analyzer = struct {
|
|||||||
_ = try analyzer.expectToken(start_token);
|
_ = try analyzer.expectToken(start_token);
|
||||||
}
|
}
|
||||||
|
|
||||||
var list = ArrayList(Node.Index){};
|
var list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
while (analyzer.peekToken() != end_token) {
|
while (analyzer.peekToken() != end_token) {
|
||||||
const identifier = try analyzer.expectToken(.identifier);
|
const identifier = try analyzer.expectToken(.identifier);
|
||||||
@ -448,7 +453,7 @@ const Analyzer = struct {
|
|||||||
analyzer.consumeToken();
|
analyzer.consumeToken();
|
||||||
}
|
}
|
||||||
|
|
||||||
try list.append(analyzer.allocator, try analyzer.addNode(.{
|
try list.append(analyzer.my_allocator, try analyzer.addNode(.{
|
||||||
.id = .argument_declaration,
|
.id = .argument_declaration,
|
||||||
.token = identifier,
|
.token = identifier,
|
||||||
.left = type_expression,
|
.left = type_expression,
|
||||||
@ -458,7 +463,7 @@ const Analyzer = struct {
|
|||||||
|
|
||||||
_ = try analyzer.expectToken(end_token);
|
_ = try analyzer.expectToken(end_token);
|
||||||
|
|
||||||
if (list.items.len != 0) {
|
if (list.length != 0) {
|
||||||
return try analyzer.nodeList(list);
|
return try analyzer.nodeList(list);
|
||||||
} else {
|
} else {
|
||||||
return Node.Index.null;
|
return Node.Index.null;
|
||||||
@ -473,11 +478,11 @@ const Analyzer = struct {
|
|||||||
|
|
||||||
fn block(analyzer: *Analyzer) anyerror!Node.Index {
|
fn block(analyzer: *Analyzer) anyerror!Node.Index {
|
||||||
const left_brace = try analyzer.expectToken(.operator_left_brace);
|
const left_brace = try analyzer.expectToken(.operator_left_brace);
|
||||||
var list = ArrayList(Node.Index){};
|
var list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
while (analyzer.peekToken() != .operator_right_brace) {
|
while (analyzer.peekToken() != .operator_right_brace) {
|
||||||
const first_statement_token = analyzer.peekToken();
|
const first_statement_token = analyzer.peekToken();
|
||||||
logln(.parser, .block, "First statement token: {s}", .{@tagName(first_statement_token)});
|
// logln(.parser, .block, "First statement token: {s}", .{@tagName(first_statement_token)});
|
||||||
const statement_index = switch (first_statement_token) {
|
const statement_index = switch (first_statement_token) {
|
||||||
else => try analyzer.assignExpressionStatement(),
|
else => try analyzer.assignExpressionStatement(),
|
||||||
// .identifier => switch (analyzer.peekTokenAhead(1)) {
|
// .identifier => switch (analyzer.peekTokenAhead(1)) {
|
||||||
@ -506,10 +511,10 @@ const Analyzer = struct {
|
|||||||
// else => |t| @panic(@tagName(t)),
|
// else => |t| @panic(@tagName(t)),
|
||||||
};
|
};
|
||||||
|
|
||||||
const node = analyzer.nodes.get(statement_index);
|
// const node = analyzer.nodes.get(statement_index);
|
||||||
logln(.parser, .block, "Adding statement: {s}", .{@tagName(node.id)});
|
// logln(.parser, .block, "Adding statement: {s}", .{@tagName(node.id)});
|
||||||
|
|
||||||
try list.append(analyzer.allocator, statement_index);
|
try list.append(analyzer.my_allocator, statement_index);
|
||||||
}
|
}
|
||||||
|
|
||||||
_ = try analyzer.expectToken(.operator_right_brace);
|
_ = try analyzer.expectToken(.operator_right_brace);
|
||||||
@ -541,30 +546,30 @@ const Analyzer = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn switchExpression(analyzer: *Analyzer) anyerror!Node.Index {
|
fn switchExpression(analyzer: *Analyzer) anyerror!Node.Index {
|
||||||
logln(.parser, .@"switch", "Parsing switch...", .{});
|
// logln(.parser, .@"switch", "Parsing switch...", .{});
|
||||||
const switch_token = analyzer.token_i;
|
const switch_token = analyzer.token_i;
|
||||||
analyzer.consumeToken();
|
analyzer.consumeToken();
|
||||||
_ = try analyzer.expectToken(.operator_left_parenthesis);
|
_ = try analyzer.expectToken(.operator_left_parenthesis);
|
||||||
const switch_expression = try analyzer.expression();
|
const switch_expression = try analyzer.expression();
|
||||||
_ = try analyzer.expectToken(.operator_right_parenthesis);
|
_ = try analyzer.expectToken(.operator_right_parenthesis);
|
||||||
logln(.parser, .@"switch", "Parsed switch expression...", .{});
|
// logln(.parser, .@"switch", "Parsed switch expression...", .{});
|
||||||
_ = try analyzer.expectToken(.operator_left_brace);
|
_ = try analyzer.expectToken(.operator_left_brace);
|
||||||
|
|
||||||
var list = ArrayList(Node.Index){};
|
var list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
while (analyzer.peekToken() != .operator_right_brace) {
|
while (analyzer.peekToken() != .operator_right_brace) {
|
||||||
const case_token = analyzer.token_i;
|
const case_token = analyzer.token_i;
|
||||||
logln(.parser, .@"switch", "Parsing switch case...", .{});
|
// logln(.parser, .@"switch", "Parsing switch case...", .{});
|
||||||
const case_node = switch (analyzer.peekToken()) {
|
const case_node = switch (analyzer.peekToken()) {
|
||||||
.fixed_keyword_else => blk: {
|
.fixed_keyword_else => blk: {
|
||||||
analyzer.consumeToken();
|
analyzer.consumeToken();
|
||||||
break :blk Node.Index.null;
|
break :blk Node.Index.null;
|
||||||
},
|
},
|
||||||
else => blk: {
|
else => blk: {
|
||||||
var array_list = ArrayList(Node.Index){};
|
var array_list = UnpinnedArray(Node.Index){};
|
||||||
while (true) {
|
while (true) {
|
||||||
const switch_case_node = try analyzer.expression();
|
const switch_case_node = try analyzer.expression();
|
||||||
try array_list.append(analyzer.allocator, switch_case_node);
|
try array_list.append(analyzer.my_allocator, switch_case_node);
|
||||||
switch (analyzer.peekToken()) {
|
switch (analyzer.peekToken()) {
|
||||||
.operator_comma => analyzer.consumeToken(),
|
.operator_comma => analyzer.consumeToken(),
|
||||||
.operator_switch_case => break,
|
.operator_switch_case => break,
|
||||||
@ -572,9 +577,9 @@ const Analyzer = struct {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
break :blk switch (array_list.items.len) {
|
break :blk switch (array_list.length) {
|
||||||
0 => unreachable,
|
0 => unreachable,
|
||||||
1 => array_list.items[0],
|
1 => array_list.pointer[0],
|
||||||
else => try analyzer.nodeList(array_list),
|
else => try analyzer.nodeList(array_list),
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
@ -596,7 +601,7 @@ const Analyzer = struct {
|
|||||||
.right = expr,
|
.right = expr,
|
||||||
});
|
});
|
||||||
|
|
||||||
try list.append(analyzer.allocator, node);
|
try list.append(analyzer.my_allocator, node);
|
||||||
}
|
}
|
||||||
|
|
||||||
_ = try analyzer.expectToken(.operator_right_brace);
|
_ = try analyzer.expectToken(.operator_right_brace);
|
||||||
@ -670,7 +675,7 @@ const Analyzer = struct {
|
|||||||
const token = try analyzer.expectToken(.fixed_keyword_for);
|
const token = try analyzer.expectToken(.fixed_keyword_for);
|
||||||
_ = try analyzer.expectToken(.operator_left_parenthesis);
|
_ = try analyzer.expectToken(.operator_left_parenthesis);
|
||||||
|
|
||||||
var for_expression_list = ArrayList(Node.Index){};
|
var for_expression_list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
while (analyzer.peekToken() != .operator_right_parenthesis) {
|
while (analyzer.peekToken() != .operator_right_parenthesis) {
|
||||||
const expression_token = analyzer.token_i;
|
const expression_token = analyzer.token_i;
|
||||||
@ -699,7 +704,7 @@ const Analyzer = struct {
|
|||||||
else => |t| @panic(@tagName(t)),
|
else => |t| @panic(@tagName(t)),
|
||||||
};
|
};
|
||||||
|
|
||||||
try for_expression_list.append(analyzer.allocator, node_index);
|
try for_expression_list.append(analyzer.my_allocator, node_index);
|
||||||
|
|
||||||
switch (analyzer.peekToken()) {
|
switch (analyzer.peekToken()) {
|
||||||
.operator_comma => analyzer.consumeToken(),
|
.operator_comma => analyzer.consumeToken(),
|
||||||
@ -712,7 +717,7 @@ const Analyzer = struct {
|
|||||||
|
|
||||||
_ = try analyzer.expectToken(.operator_bar);
|
_ = try analyzer.expectToken(.operator_bar);
|
||||||
|
|
||||||
var payload_nodes = ArrayList(Node.Index){};
|
var payload_nodes = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
while (analyzer.peekToken() != .operator_bar) {
|
while (analyzer.peekToken() != .operator_bar) {
|
||||||
const payload_token = analyzer.token_i;
|
const payload_token = analyzer.token_i;
|
||||||
@ -730,7 +735,7 @@ const Analyzer = struct {
|
|||||||
else => |t| @panic(@tagName(t)),
|
else => |t| @panic(@tagName(t)),
|
||||||
}
|
}
|
||||||
|
|
||||||
try payload_nodes.append(analyzer.allocator, try analyzer.addNode(.{
|
try payload_nodes.append(analyzer.my_allocator, try analyzer.addNode(.{
|
||||||
.id = id,
|
.id = id,
|
||||||
.token = payload_token,
|
.token = payload_token,
|
||||||
.left = Node.Index.null,
|
.left = Node.Index.null,
|
||||||
@ -740,7 +745,7 @@ const Analyzer = struct {
|
|||||||
|
|
||||||
_ = try analyzer.expectToken(.operator_bar);
|
_ = try analyzer.expectToken(.operator_bar);
|
||||||
|
|
||||||
if (payload_nodes.items.len != for_expression_list.items.len) {
|
if (payload_nodes.length != for_expression_list.length) {
|
||||||
unreachable;
|
unreachable;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -804,7 +809,7 @@ const Analyzer = struct {
|
|||||||
.right = right,
|
.right = right,
|
||||||
};
|
};
|
||||||
|
|
||||||
logln(.parser, .assign, "assign:\nleft: {}.\nright: {}", .{ node.left, node.right });
|
// logln(.parser, .assign, "assign:\nleft: {}.\nright: {}", .{ node.left, node.right });
|
||||||
return try analyzer.addNode(node);
|
return try analyzer.addNode(node);
|
||||||
// .operator_equal => .operator_assign,
|
// .operator_equal => .operator_assign,
|
||||||
// .operator_add => switch (analyzer.peekTokenAhead(1)) {
|
// .operator_add => switch (analyzer.peekTokenAhead(1)) {
|
||||||
@ -872,19 +877,19 @@ const Analyzer = struct {
|
|||||||
const intrinsic_name = analyzer.bytes(intrinsic_token)[1..];
|
const intrinsic_name = analyzer.bytes(intrinsic_token)[1..];
|
||||||
|
|
||||||
const intrinsic_id = inline for (@typeInfo(Compilation.IntrinsicId).Enum.fields) |enum_field| {
|
const intrinsic_id = inline for (@typeInfo(Compilation.IntrinsicId).Enum.fields) |enum_field| {
|
||||||
if (equal(u8, enum_field.name, intrinsic_name)) {
|
if (data_structures.byte_equal(enum_field.name, intrinsic_name)) {
|
||||||
break @field(Compilation.IntrinsicId, enum_field.name);
|
break @field(Compilation.IntrinsicId, enum_field.name);
|
||||||
}
|
}
|
||||||
} else @panic(intrinsic_name);
|
} else @panic(intrinsic_name);
|
||||||
|
|
||||||
var list = ArrayList(Node.Index){};
|
var list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
if (intrinsic_id == .@"asm") {
|
if (intrinsic_id == .@"asm") {
|
||||||
_ = try analyzer.expectToken(.operator_left_brace);
|
_ = try analyzer.expectToken(.operator_left_brace);
|
||||||
|
|
||||||
while (analyzer.peekToken() != .operator_right_brace) {
|
while (analyzer.peekToken() != .operator_right_brace) {
|
||||||
const instruction_token = try analyzer.expectToken(.identifier);
|
const instruction_token = try analyzer.expectToken(.identifier);
|
||||||
var operand_list = ArrayList(Node.Index){};
|
var operand_list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
while (analyzer.peekToken() != .operator_semicolon) {
|
while (analyzer.peekToken() != .operator_semicolon) {
|
||||||
const asm_operand = try analyzer.parseAsmOperand();
|
const asm_operand = try analyzer.parseAsmOperand();
|
||||||
@ -893,12 +898,12 @@ const Analyzer = struct {
|
|||||||
.operator_comma => analyzer.consumeToken(),
|
.operator_comma => analyzer.consumeToken(),
|
||||||
else => |t| @panic(@tagName(t)),
|
else => |t| @panic(@tagName(t)),
|
||||||
}
|
}
|
||||||
try operand_list.append(analyzer.allocator, asm_operand);
|
try operand_list.append(analyzer.my_allocator, asm_operand);
|
||||||
}
|
}
|
||||||
|
|
||||||
_ = try analyzer.expectToken(.operator_semicolon);
|
_ = try analyzer.expectToken(.operator_semicolon);
|
||||||
|
|
||||||
try list.append(analyzer.allocator, try analyzer.addNode(.{
|
try list.append(analyzer.my_allocator, try analyzer.addNode(.{
|
||||||
.id = .assembly_statement,
|
.id = .assembly_statement,
|
||||||
.token = instruction_token,
|
.token = instruction_token,
|
||||||
.left = try analyzer.nodeList(operand_list),
|
.left = try analyzer.nodeList(operand_list),
|
||||||
@ -911,7 +916,7 @@ const Analyzer = struct {
|
|||||||
} else {
|
} else {
|
||||||
while (analyzer.peekToken() != .operator_right_parenthesis) {
|
while (analyzer.peekToken() != .operator_right_parenthesis) {
|
||||||
const parameter = try analyzer.expression();
|
const parameter = try analyzer.expression();
|
||||||
try list.append(analyzer.allocator, parameter);
|
try list.append(analyzer.my_allocator, parameter);
|
||||||
|
|
||||||
switch (analyzer.peekToken()) {
|
switch (analyzer.peekToken()) {
|
||||||
.operator_comma => analyzer.consumeToken(),
|
.operator_comma => analyzer.consumeToken(),
|
||||||
@ -1019,10 +1024,10 @@ const Analyzer = struct {
|
|||||||
fn expressionPrecedence(analyzer: *Analyzer, minimum_precedence: i32) !Node.Index {
|
fn expressionPrecedence(analyzer: *Analyzer, minimum_precedence: i32) !Node.Index {
|
||||||
assert(minimum_precedence >= 0);
|
assert(minimum_precedence >= 0);
|
||||||
var result = try analyzer.prefixExpression();
|
var result = try analyzer.prefixExpression();
|
||||||
if (result != .null) {
|
// if (result != .null) {
|
||||||
const prefix_node = analyzer.nodes.get(result);
|
// const prefix_node = analyzer.nodes.get(result);
|
||||||
logln(.parser, .precedence, "Prefix: {s}", .{@tagName(prefix_node.id)});
|
// logln(.parser, .precedence, "Prefix: {s}", .{@tagName(prefix_node.id)});
|
||||||
}
|
// }
|
||||||
|
|
||||||
var banned_precedence: i32 = -1;
|
var banned_precedence: i32 = -1;
|
||||||
|
|
||||||
@ -1072,16 +1077,16 @@ const Analyzer = struct {
|
|||||||
else => |t| @panic(@tagName(t)),
|
else => |t| @panic(@tagName(t)),
|
||||||
};
|
};
|
||||||
|
|
||||||
logln(.parser, .precedence, "Precedence operator: {s}", .{@tagName(operator)});
|
// logln(.parser, .precedence, "Precedence operator: {s}", .{@tagName(operator)});
|
||||||
|
|
||||||
const precedence = operator_precedence.get(operator);
|
const precedence = operator_precedence.get(operator);
|
||||||
if (precedence < minimum_precedence) {
|
if (precedence < minimum_precedence) {
|
||||||
logln(.parser, .precedence, "Breaking for minimum_precedence", .{});
|
// logln(.parser, .precedence, "Breaking for minimum_precedence", .{});
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (precedence < banned_precedence) {
|
if (precedence < banned_precedence) {
|
||||||
logln(.parser, .precedence, "Breaking for banned_precedence", .{});
|
// logln(.parser, .precedence, "Breaking for banned_precedence", .{});
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1089,7 +1094,7 @@ const Analyzer = struct {
|
|||||||
analyzer.consumeToken();
|
analyzer.consumeToken();
|
||||||
|
|
||||||
// TODO: fix this
|
// TODO: fix this
|
||||||
logln(.parser, .precedence, "Going for right in expressionPrecedence with operator {s}", .{@tagName(operator)});
|
// logln(.parser, .precedence, "Going for right in expressionPrecedence with operator {s}", .{@tagName(operator)});
|
||||||
const right = try analyzer.expressionPrecedence(precedence + 1);
|
const right = try analyzer.expressionPrecedence(precedence + 1);
|
||||||
|
|
||||||
const node_id = operator_node_id.get(operator);
|
const node_id = operator_node_id.get(operator);
|
||||||
@ -1182,7 +1187,8 @@ const Analyzer = struct {
|
|||||||
// todo:?
|
// todo:?
|
||||||
.operator_left_brace => try analyzer.block(),
|
.operator_left_brace => try analyzer.block(),
|
||||||
.fixed_keyword_if => try analyzer.ifExpression(),
|
.fixed_keyword_if => try analyzer.ifExpression(),
|
||||||
else => |id| std.debug.panic("WARN: By default, calling curlySuffixExpression with {s}", .{@tagName(id)}),
|
else => |id| @panic(@tagName(id)),
|
||||||
|
//else => |id| std.debug.panic("WARN: By default, calling curlySuffixExpression with {s}", .{@tagName(id)}),
|
||||||
};
|
};
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
@ -1262,7 +1268,7 @@ const Analyzer = struct {
|
|||||||
fn pointerOrArrayTypeExpression(analyzer: *Analyzer, expected: PointerOrArrayTypeExpectedExpression) !Node.Index {
|
fn pointerOrArrayTypeExpression(analyzer: *Analyzer, expected: PointerOrArrayTypeExpectedExpression) !Node.Index {
|
||||||
const first = analyzer.token_i;
|
const first = analyzer.token_i;
|
||||||
|
|
||||||
var list = ArrayList(Node.Index){};
|
var list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
const expression_type: Node.Id = switch (expected) {
|
const expression_type: Node.Id = switch (expected) {
|
||||||
.single_pointer_type => blk: {
|
.single_pointer_type => blk: {
|
||||||
@ -1271,7 +1277,7 @@ const Analyzer = struct {
|
|||||||
break :blk .pointer_type;
|
break :blk .pointer_type;
|
||||||
},
|
},
|
||||||
.many_pointer_type => blk: {
|
.many_pointer_type => blk: {
|
||||||
try list.append(analyzer.allocator, try analyzer.addNode(.{
|
try list.append(analyzer.my_allocator, try analyzer.addNode(.{
|
||||||
.id = .many_pointer_expression,
|
.id = .many_pointer_expression,
|
||||||
.token = analyzer.token_i,
|
.token = analyzer.token_i,
|
||||||
.left = Node.Index.null,
|
.left = Node.Index.null,
|
||||||
@ -1281,7 +1287,7 @@ const Analyzer = struct {
|
|||||||
_ = try analyzer.expectToken(.operator_ampersand);
|
_ = try analyzer.expectToken(.operator_ampersand);
|
||||||
switch (analyzer.peekToken()) {
|
switch (analyzer.peekToken()) {
|
||||||
.operator_right_bracket => {},
|
.operator_right_bracket => {},
|
||||||
.operator_colon => try list.append(analyzer.allocator, try analyzer.parseTermination()),
|
.operator_colon => try list.append(analyzer.my_allocator, try analyzer.parseTermination()),
|
||||||
else => |t| @panic(@tagName(t)),
|
else => |t| @panic(@tagName(t)),
|
||||||
}
|
}
|
||||||
_ = try analyzer.expectToken(.operator_right_bracket);
|
_ = try analyzer.expectToken(.operator_right_bracket);
|
||||||
@ -1296,17 +1302,17 @@ const Analyzer = struct {
|
|||||||
break :blk .slice_type;
|
break :blk .slice_type;
|
||||||
},
|
},
|
||||||
.operator_colon => {
|
.operator_colon => {
|
||||||
try list.append(analyzer.allocator, try analyzer.parseTermination());
|
try list.append(analyzer.my_allocator, try analyzer.parseTermination());
|
||||||
_ = try analyzer.expectToken(.operator_right_bracket);
|
_ = try analyzer.expectToken(.operator_right_bracket);
|
||||||
break :blk .slice_type;
|
break :blk .slice_type;
|
||||||
},
|
},
|
||||||
else => {
|
else => {
|
||||||
const length_expression = try analyzer.expression();
|
const length_expression = try analyzer.expression();
|
||||||
try list.append(analyzer.allocator, length_expression);
|
try list.append(analyzer.my_allocator, length_expression);
|
||||||
|
|
||||||
switch (analyzer.peekToken()) {
|
switch (analyzer.peekToken()) {
|
||||||
.operator_right_bracket => {},
|
.operator_right_bracket => {},
|
||||||
.operator_colon => try list.append(analyzer.allocator, try analyzer.parseTermination()),
|
.operator_colon => try list.append(analyzer.my_allocator, try analyzer.parseTermination()),
|
||||||
else => |t| @panic(@tagName(t)),
|
else => |t| @panic(@tagName(t)),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1331,15 +1337,15 @@ const Analyzer = struct {
|
|||||||
analyzer.consumeTokens(@intFromBool(analyzer.peekToken() == .fixed_keyword_const));
|
analyzer.consumeTokens(@intFromBool(analyzer.peekToken() == .fixed_keyword_const));
|
||||||
|
|
||||||
if (const_node != .null) {
|
if (const_node != .null) {
|
||||||
try list.append(analyzer.allocator, const_node);
|
try list.append(analyzer.my_allocator, const_node);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
assert(list.items.len > 0);
|
assert(list.length > 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
const type_expression = try analyzer.typeExpression();
|
const type_expression = try analyzer.typeExpression();
|
||||||
assert(type_expression != .null);
|
assert(type_expression != .null);
|
||||||
try list.append(analyzer.allocator, type_expression);
|
try list.append(analyzer.my_allocator, type_expression);
|
||||||
|
|
||||||
const node_list = try analyzer.nodeList(list);
|
const node_list = try analyzer.nodeList(list);
|
||||||
|
|
||||||
@ -1350,15 +1356,15 @@ const Analyzer = struct {
|
|||||||
.right = Node.Index.null,
|
.right = Node.Index.null,
|
||||||
};
|
};
|
||||||
|
|
||||||
logln(.parser, .pointer_like_type_expression, "ARRAY START\n===========", .{});
|
// logln(.parser, .pointer_like_type_expression, "ARRAY START\n===========", .{});
|
||||||
for (list.items) |ni| {
|
// for (list.slice()) |ni| {
|
||||||
const n = analyzer.nodes.get(ni);
|
// const n = analyzer.nodes.get(ni);
|
||||||
logln(.parser, .pointer_like_type_expression, "{s} node element: {s}", .{ @tagName(expression_type), @tagName(n.id) });
|
// logln(.parser, .pointer_like_type_expression, "{s} node element: {s}", .{ @tagName(expression_type), @tagName(n.id) });
|
||||||
}
|
// }
|
||||||
logln(.parser, .pointer_like_type_expression, "ARRAY END\n=========", .{});
|
// logln(.parser, .pointer_like_type_expression, "ARRAY END\n=========", .{});
|
||||||
|
|
||||||
const node_index = try analyzer.addNode(node);
|
const node_index = try analyzer.addNode(node);
|
||||||
logln(.parser, .pointer_like_type_expression, "Pointer end", .{});
|
// logln(.parser, .pointer_like_type_expression, "Pointer end", .{});
|
||||||
|
|
||||||
switch (analyzer.peekToken()) {
|
switch (analyzer.peekToken()) {
|
||||||
.operator_comma,
|
.operator_comma,
|
||||||
@ -1449,8 +1455,8 @@ const Analyzer = struct {
|
|||||||
const left_parenthesis = analyzer.token_i;
|
const left_parenthesis = analyzer.token_i;
|
||||||
analyzer.consumeToken();
|
analyzer.consumeToken();
|
||||||
|
|
||||||
var expression_list = ArrayList(Node.Index){};
|
var expression_list = UnpinnedArray(Node.Index){};
|
||||||
logln(.parser, .suffix, "[DEPTH={}] Initializating suffix call-like expression", .{analyzer.suffix_depth});
|
// logln(.parser, .suffix, "[DEPTH={}] Initializating suffix call-like expression", .{analyzer.suffix_depth});
|
||||||
while (analyzer.peekToken() != .operator_right_parenthesis) {
|
while (analyzer.peekToken() != .operator_right_parenthesis) {
|
||||||
const current_token = analyzer.token_i;
|
const current_token = analyzer.token_i;
|
||||||
// logln(.parser, .suffix, "[DEPTH={}] First token: {s}", .{ analyzer.suffix_depth, @tagName(analyzer.tokens[current_token].id) });
|
// logln(.parser, .suffix, "[DEPTH={}] First token: {s}", .{ analyzer.suffix_depth, @tagName(analyzer.tokens[current_token].id) });
|
||||||
@ -1468,19 +1474,19 @@ const Analyzer = struct {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
try expression_list.append(analyzer.allocator, parameter);
|
try expression_list.append(analyzer.my_allocator, parameter);
|
||||||
|
|
||||||
switch (analyzer.peekToken()) {
|
switch (analyzer.peekToken()) {
|
||||||
.operator_right_parenthesis => {},
|
.operator_right_parenthesis => {},
|
||||||
.operator_comma => analyzer.consumeToken(),
|
.operator_comma => analyzer.consumeToken(),
|
||||||
.operator_colon, .operator_right_brace, .operator_right_bracket => unreachable,
|
.operator_colon, .operator_right_brace, .operator_right_bracket => unreachable,
|
||||||
.operator_dot => panic("[DEPTH={}] Unexpected period", .{analyzer.suffix_depth}),
|
.operator_dot => @panic("Unexpected period"), //panic("[DEPTH={}] Unexpected period", .{analyzer.suffix_depth}),
|
||||||
else => |t| @panic(@tagName(t)),
|
else => |t| @panic(@tagName(t)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logln(.parser, .suffix, "[DEPTH={}] Ending suffix call-like expression", .{analyzer.suffix_depth});
|
// logln(.parser, .suffix, "[DEPTH={}] Ending suffix call-like expression", .{analyzer.suffix_depth});
|
||||||
logln(.parser, .suffix, "Callee node: {s}", .{@tagName(analyzer.nodes.get(result).id)});
|
// logln(.parser, .suffix, "Callee node: {s}", .{@tagName(analyzer.nodes.get(result).id)});
|
||||||
|
|
||||||
_ = try analyzer.expectToken(.operator_right_parenthesis);
|
_ = try analyzer.expectToken(.operator_right_parenthesis);
|
||||||
// const is_comma = analyzer.tokens[analyzer.token_i].id == .comma;
|
// const is_comma = analyzer.tokens[analyzer.token_i].id == .comma;
|
||||||
@ -1502,7 +1508,7 @@ const Analyzer = struct {
|
|||||||
fn containerLiteral(analyzer: *Analyzer, type_node: Node.Index) anyerror!Node.Index {
|
fn containerLiteral(analyzer: *Analyzer, type_node: Node.Index) anyerror!Node.Index {
|
||||||
const token = try analyzer.expectToken(.operator_left_brace);
|
const token = try analyzer.expectToken(.operator_left_brace);
|
||||||
|
|
||||||
var list = ArrayList(Node.Index){};
|
var list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
const InitializationType = enum {
|
const InitializationType = enum {
|
||||||
anonymous,
|
anonymous,
|
||||||
@ -1531,7 +1537,7 @@ const Analyzer = struct {
|
|||||||
.right = Node.Index.null,
|
.right = Node.Index.null,
|
||||||
});
|
});
|
||||||
|
|
||||||
try list.append(analyzer.allocator, field_initialization);
|
try list.append(analyzer.my_allocator, field_initialization);
|
||||||
_ = try analyzer.expectToken(.operator_comma);
|
_ = try analyzer.expectToken(.operator_comma);
|
||||||
|
|
||||||
break :blk .container_field_names;
|
break :blk .container_field_names;
|
||||||
@ -1539,7 +1545,7 @@ const Analyzer = struct {
|
|||||||
else => |t| @panic(@tagName(t)),
|
else => |t| @panic(@tagName(t)),
|
||||||
},
|
},
|
||||||
else => blk: {
|
else => blk: {
|
||||||
try list.append(analyzer.allocator, try analyzer.anonymousExpression());
|
try list.append(analyzer.my_allocator, try analyzer.anonymousExpression());
|
||||||
_ = try analyzer.expectToken(.operator_comma);
|
_ = try analyzer.expectToken(.operator_comma);
|
||||||
break :blk .anonymous;
|
break :blk .anonymous;
|
||||||
},
|
},
|
||||||
@ -1556,7 +1562,7 @@ const Analyzer = struct {
|
|||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
try list.append(analyzer.allocator, field_expression_initializer);
|
try list.append(analyzer.my_allocator, field_expression_initializer);
|
||||||
break :blk .anonymous;
|
break :blk .anonymous;
|
||||||
},
|
},
|
||||||
else => |t| @panic(@tagName(t)),
|
else => |t| @panic(@tagName(t)),
|
||||||
@ -1618,7 +1624,7 @@ const Analyzer = struct {
|
|||||||
|
|
||||||
fn processContainerType(analyzer: *Analyzer, maybe_token_id: ?Token.Id) !Node.Index {
|
fn processContainerType(analyzer: *Analyzer, maybe_token_id: ?Token.Id) !Node.Index {
|
||||||
const token_i = if (maybe_token_id) |tid| try analyzer.expectToken(tid) else analyzer.token_i;
|
const token_i = if (maybe_token_id) |tid| try analyzer.expectToken(tid) else analyzer.token_i;
|
||||||
assert(Token.unwrap(analyzer.token_i) < analyzer.token_buffer.tokens.len);
|
assert(Token.unwrap(analyzer.token_i) < analyzer.token_buffer.length);
|
||||||
const token_id = maybe_token_id orelse .fixed_keyword_struct;
|
const token_id = maybe_token_id orelse .fixed_keyword_struct;
|
||||||
const container_type: Compilation.ContainerType = switch (token_id) {
|
const container_type: Compilation.ContainerType = switch (token_id) {
|
||||||
.fixed_keyword_struct => .@"struct",
|
.fixed_keyword_struct => .@"struct",
|
||||||
@ -1640,11 +1646,11 @@ const Analyzer = struct {
|
|||||||
} else Node.Index.null;
|
} else Node.Index.null;
|
||||||
|
|
||||||
if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_left_brace);
|
if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_left_brace);
|
||||||
var node_list = ArrayList(Node.Index){};
|
var node_list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
while (analyzer.hasTokens() and analyzer.peekToken() != .operator_right_brace) {
|
while (analyzer.hasTokens() and analyzer.peekToken() != .operator_right_brace) {
|
||||||
const first = analyzer.token_i;
|
const first = analyzer.token_i;
|
||||||
logln(.parser, .container_members, "First token for container member: {s}", .{@tagName(analyzer.peekToken())});
|
// logln(.parser, .container_members, "First token for container member: {s}", .{@tagName(analyzer.peekToken())});
|
||||||
|
|
||||||
const member_node_index: Node.Index = switch (analyzer.peekToken()) {
|
const member_node_index: Node.Index = switch (analyzer.peekToken()) {
|
||||||
.fixed_keyword_comptime => switch (analyzer.peekTokenAhead(1)) {
|
.fixed_keyword_comptime => switch (analyzer.peekTokenAhead(1)) {
|
||||||
@ -1717,23 +1723,25 @@ const Analyzer = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const member_node = analyzer.nodes.get(member_node_index);
|
const member_node = analyzer.nodes.get(member_node_index);
|
||||||
logln(.parser, .container_members, "Container member {s}", .{@tagName(member_node.id)});
|
// logln(.parser, .container_members, "Container member {s}", .{@tagName(member_node.id)});
|
||||||
assert(member_node.id != .identifier);
|
assert(member_node.id != .identifier);
|
||||||
|
|
||||||
try node_list.append(analyzer.allocator, member_node_index);
|
try node_list.append(analyzer.my_allocator, member_node_index);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_right_brace);
|
if (maybe_token_id) |_| _ = try analyzer.expectToken(.operator_right_brace);
|
||||||
|
|
||||||
for (node_list.items, 0..) |member_node_index, index| {
|
// for (node_list.slice(), 0..) |member_node_index, index| {
|
||||||
const member_node = analyzer.nodes.get(member_node_index);
|
// _ = index; // autofix
|
||||||
if (member_node.id == .identifier) {
|
// const member_node = analyzer.nodes.get(member_node_index);
|
||||||
const token_offset = analyzer.getTokenOffset(member_node.token);
|
// if (member_node.id == .identifier) {
|
||||||
std.debug.print("Node index #{} (list index {}):\n```\n{s}\n```\n", .{ Node.unwrap(member_node_index), index, analyzer.source_file[token_offset..] });
|
// const token_offset = analyzer.getTokenOffset(member_node.token);
|
||||||
// std.debug.print("ID: {s}\n", .{analyzer.bytes(member_node.token)});
|
// _ = token_offset; // autofix
|
||||||
unreachable;
|
// // std.debug.print("Node index #{} (list index {}):\n```\n{s}\n```\n", .{ Node.unwrap(member_node_index), index, analyzer.source_file[token_offset..] });
|
||||||
}
|
// // std.debug.print("ID: {s}\n", .{analyzer.bytes(member_node.token)});
|
||||||
}
|
// unreachable;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
return try analyzer.addNode(.{
|
return try analyzer.addNode(.{
|
||||||
.id = node_id,
|
.id = node_id,
|
||||||
@ -1893,11 +1901,11 @@ const Analyzer = struct {
|
|||||||
analyzer.consumeToken();
|
analyzer.consumeToken();
|
||||||
if (analyzer.peekToken() == .operator_left_brace) {
|
if (analyzer.peekToken() == .operator_left_brace) {
|
||||||
analyzer.consumeToken();
|
analyzer.consumeToken();
|
||||||
var list = ArrayList(Node.Index){};
|
var list = UnpinnedArray(Node.Index){};
|
||||||
|
|
||||||
while (analyzer.peekToken() != .operator_right_brace) {
|
while (analyzer.peekToken() != .operator_right_brace) {
|
||||||
const identifier = try analyzer.identifierNode();
|
const identifier = try analyzer.identifierNode();
|
||||||
try list.append(analyzer.allocator, identifier);
|
try list.append(analyzer.my_allocator, identifier);
|
||||||
const comma = try analyzer.expectToken(.operator_comma);
|
const comma = try analyzer.expectToken(.operator_comma);
|
||||||
_ = comma; // autofix
|
_ = comma; // autofix
|
||||||
}
|
}
|
||||||
@ -1917,7 +1925,7 @@ const Analyzer = struct {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
else => |t| switch (t) {
|
else => |t| switch (t) {
|
||||||
.identifier => std.debug.panic("{s}: {s}", .{ @tagName(t), analyzer.bytes(token_i) }),
|
.identifier => @panic(analyzer.bytes(token_i)),
|
||||||
else => @panic(@tagName(t)),
|
else => @panic(@tagName(t)),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@ -2039,7 +2047,7 @@ const Analyzer = struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn addNode(analyzer: *Analyzer, node: Node) !Node.Index {
|
fn addNode(analyzer: *Analyzer, node: Node) !Node.Index {
|
||||||
const node_index = try analyzer.nodes.append(analyzer.allocator, node);
|
const node_index = try analyzer.nodes.append(analyzer.my_allocator, node);
|
||||||
// logln(.parser, .node_creation, "Adding node #{} {s} to file #{} (left: {}, right: {})", .{ Node.unwrap(node_index), @tagName(node.id), File.unwrap(analyzer.file_index), switch (node.left) {
|
// logln(.parser, .node_creation, "Adding node #{} {s} to file #{} (left: {}, right: {})", .{ Node.unwrap(node_index), @tagName(node.id), File.unwrap(analyzer.file_index), switch (node.left) {
|
||||||
// .null => 0xffff_ffff,
|
// .null => 0xffff_ffff,
|
||||||
// else => Node.unwrap(node.left),
|
// else => Node.unwrap(node.left),
|
||||||
@ -2062,9 +2070,9 @@ const Analyzer = struct {
|
|||||||
return node_index;
|
return node_index;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn nodeList(analyzer: *Analyzer, node_list: ArrayList(Node.Index)) !Node.Index {
|
fn nodeList(analyzer: *Analyzer, node_list: UnpinnedArray(Node.Index)) !Node.Index {
|
||||||
const index = analyzer.node_lists.items.len;
|
const index = analyzer.node_lists.length;
|
||||||
try analyzer.node_lists.append(analyzer.allocator, node_list);
|
try analyzer.node_lists.append(analyzer.my_allocator, node_list);
|
||||||
return try analyzer.addNode(.{
|
return try analyzer.addNode(.{
|
||||||
.id = .node_list,
|
.id = .node_list,
|
||||||
.token = Token.wrap(0),
|
.token = Token.wrap(0),
|
||||||
@ -2087,7 +2095,7 @@ const Analyzer = struct {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Here it is assumed that left brace is consumed
|
// Here it is assumed that left brace is consumed
|
||||||
pub fn analyze(allocator: Allocator, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *Node.List, node_lists: *ArrayList(ArrayList(Node.Index))) !Result {
|
pub fn analyze(allocator: Allocator, my_allocator: *data_structures.MyAllocator, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *Node.List, node_lists: *UnpinnedArray(UnpinnedArray(Node.Index))) !Result {
|
||||||
const start = std.time.Instant.now() catch unreachable;
|
const start = std.time.Instant.now() catch unreachable;
|
||||||
var analyzer = Analyzer{
|
var analyzer = Analyzer{
|
||||||
.lexer = lexer_result,
|
.lexer = lexer_result,
|
||||||
@ -2096,6 +2104,7 @@ pub fn analyze(allocator: Allocator, lexer_result: lexer.Result, source_file: []
|
|||||||
// .file_index = file_index,
|
// .file_index = file_index,
|
||||||
.token_i = lexer_result.offset,
|
.token_i = lexer_result.offset,
|
||||||
.allocator = allocator,
|
.allocator = allocator,
|
||||||
|
.my_allocator = my_allocator,
|
||||||
.nodes = node_list,
|
.nodes = node_list,
|
||||||
.node_lists = node_lists,
|
.node_lists = node_lists,
|
||||||
};
|
};
|
||||||
|
720
bootstrap/library.zig
Normal file
720
bootstrap/library.zig
Normal file
@ -0,0 +1,720 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const builtin = @import("builtin");
|
||||||
|
const os = builtin.os.tag;
|
||||||
|
const arch = builtin.cpu.arch;
|
||||||
|
|
||||||
|
pub fn assert(ok: bool) void {
|
||||||
|
if (!ok) unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const Allocator = std.mem.Allocator;
|
||||||
|
pub const BoundedArray = std.BoundedArray;
|
||||||
|
|
||||||
|
pub fn BlockList(comptime T: type, comptime E: type) type {
|
||||||
|
const item_count = 64;
|
||||||
|
|
||||||
|
return struct {
|
||||||
|
blocks: UnpinnedArray(*Block) = .{},
|
||||||
|
len: usize = 0,
|
||||||
|
|
||||||
|
const Block = BoundedArray(T, item_count);
|
||||||
|
const List = @This();
|
||||||
|
|
||||||
|
pub const Index = getIndexForType(T, E);
|
||||||
|
pub const ElementIndex = Index.Index;
|
||||||
|
|
||||||
|
pub fn wrapSplit(block: usize, element: usize) ElementIndex {
|
||||||
|
return @enumFromInt(block * item_count + element);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(list: *List, index: ElementIndex) *T {
|
||||||
|
assert(index != .null);
|
||||||
|
const i: u32 = @intFromEnum(index);
|
||||||
|
const block_index = i / item_count;
|
||||||
|
const element_index = i % item_count;
|
||||||
|
assert(block_index < list.blocks.length);
|
||||||
|
const block = list.blocks.pointer[block_index];
|
||||||
|
const block_slice = block.buffer[0..block.len];
|
||||||
|
const element = &block_slice[element_index];
|
||||||
|
return element;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append(list: *List, allocator: *MyAllocator, element: T) !ElementIndex {
|
||||||
|
const result = try list.addOne(allocator);
|
||||||
|
list.get(result).* = element;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn addOne(list: *List, allocator: *MyAllocator) !ElementIndex {
|
||||||
|
const block_index = try list.getFreeBlock(allocator);
|
||||||
|
assert(block_index < list.blocks.length);
|
||||||
|
const block = list.blocks.pointer[block_index];
|
||||||
|
const index = block.len;
|
||||||
|
_ = try block.addOne();
|
||||||
|
return @enumFromInt(block_index * item_count + index);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn getFreeBlock(list: *List, allocator: *MyAllocator) !usize {
|
||||||
|
for (list.blocks.slice(), 0..) |block, i| {
|
||||||
|
block.ensureUnusedCapacity(1) catch continue;
|
||||||
|
return i;
|
||||||
|
} else {
|
||||||
|
const new_block = try allocator.allocate_one(Block);
|
||||||
|
new_block.* = .{};
|
||||||
|
const block_index = list.blocks.length;
|
||||||
|
try list.blocks.append(allocator, new_block);
|
||||||
|
return block_index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn indexOf(list: *List, elem: *const T) ElementIndex {
|
||||||
|
const address = @intFromPtr(elem);
|
||||||
|
for (list.blocks.items, 0..) |block, block_index| {
|
||||||
|
const base = @intFromPtr(&block.buffer[0]);
|
||||||
|
const top = base + @sizeOf(T) * item_count;
|
||||||
|
if (address >= base and address < top) {
|
||||||
|
const result: u32 = @intCast(block_index * item_count + @divExact(address - base, @sizeOf(T)));
|
||||||
|
return Index.wrap(result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@panic("not found");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn getIndexForType(comptime T: type, comptime E: type) type {
|
||||||
|
assert(@typeInfo(E) == .Enum);
|
||||||
|
_ = T;
|
||||||
|
const MAX = std.math.maxInt(IndexType);
|
||||||
|
|
||||||
|
const EnumField = std.builtin.Type.EnumField;
|
||||||
|
comptime var fields: []const EnumField = &.{};
|
||||||
|
// comptime var enum_value: comptime_int = 0;
|
||||||
|
fields = fields ++ @typeInfo(E).Enum.fields;
|
||||||
|
|
||||||
|
// for (names) |name| {
|
||||||
|
// fields = fields ++ [1]EnumField{.{
|
||||||
|
// .name = name,
|
||||||
|
// .value = enum_value,
|
||||||
|
// }};
|
||||||
|
// enum_value += 1;
|
||||||
|
// }
|
||||||
|
|
||||||
|
fields = fields ++ [1]EnumField{.{
|
||||||
|
.name = "null",
|
||||||
|
.value = MAX,
|
||||||
|
}};
|
||||||
|
|
||||||
|
const Result = @Type(.{
|
||||||
|
.Enum = .{
|
||||||
|
.tag_type = IndexType,
|
||||||
|
.fields = fields,
|
||||||
|
.decls = &.{},
|
||||||
|
.is_exhaustive = false,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return struct {
|
||||||
|
pub const Index = Result;
|
||||||
|
|
||||||
|
pub fn unwrap(this: Index) IndexType {
|
||||||
|
assert(this != .null);
|
||||||
|
return @intFromEnum(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn wrap(value: IndexType) Index {
|
||||||
|
assert(value < MAX);
|
||||||
|
return @enumFromInt(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn addInt(this: Index, value: IndexType) Index {
|
||||||
|
const this_int = @intFromEnum(this);
|
||||||
|
return @enumFromInt(this_int + value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn subInt(this: Index, value: IndexType) IndexType {
|
||||||
|
const this_int = @intFromEnum(this);
|
||||||
|
return this_int - value;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add(a: Index, b: Index) Index {
|
||||||
|
return @enumFromInt(@intFromEnum(a) + @intFromEnum(b));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sub(a: Index, b: Index) IndexType {
|
||||||
|
return @intFromEnum(a) - @intFromEnum(b);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn my_hash(bytes: []const u8) IndexType {
|
||||||
|
const fnv_offset = 14695981039346656037;
|
||||||
|
const fnv_prime = 1099511628211;
|
||||||
|
var result: u64 = fnv_offset;
|
||||||
|
|
||||||
|
for (bytes) |byte| {
|
||||||
|
result ^= byte;
|
||||||
|
result *%= fnv_prime;
|
||||||
|
}
|
||||||
|
|
||||||
|
return @truncate(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn CopyPtrAttrs(
|
||||||
|
comptime source: type,
|
||||||
|
comptime size: std.builtin.Type.Pointer.Size,
|
||||||
|
comptime child: type,
|
||||||
|
) type {
|
||||||
|
const info = @typeInfo(source).Pointer;
|
||||||
|
return @Type(.{
|
||||||
|
.Pointer = .{
|
||||||
|
.size = size,
|
||||||
|
.is_const = info.is_const,
|
||||||
|
.is_volatile = info.is_volatile,
|
||||||
|
.is_allowzero = info.is_allowzero,
|
||||||
|
.alignment = info.alignment,
|
||||||
|
.address_space = info.address_space,
|
||||||
|
.child = child,
|
||||||
|
.sentinel = null,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
fn AsBytesReturnType(comptime P: type) type {
|
||||||
|
const size = @sizeOf(std.meta.Child(P));
|
||||||
|
return CopyPtrAttrs(P, .One, [size]u8);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Given a pointer to a single item, returns a slice of the underlying bytes, preserving pointer attributes.
|
||||||
|
pub fn asBytes(ptr: anytype) AsBytesReturnType(@TypeOf(ptr)) {
|
||||||
|
return @ptrCast(@alignCast(ptr));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn byte_equal(a: []const u8, b: []const u8) bool {
|
||||||
|
if (a.len != b.len) return false;
|
||||||
|
|
||||||
|
if (a.len != b.len) return false;
|
||||||
|
if (a.len == 0 or a.ptr == b.ptr) return true;
|
||||||
|
|
||||||
|
for (a, b) |byte_a, byte_b| {
|
||||||
|
if (byte_a != byte_b) return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const MapResult = struct{
|
||||||
|
key_pointer: *anyopaque,
|
||||||
|
value_pointer: *anyopaque,
|
||||||
|
capacity: IndexType,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn ensure_capacity_hashmap(allocator: *MyAllocator, current_capacity: IndexType, desired_capacity: IndexType, key_pointer: [*]u8, value_pointer: [*]u8, length: IndexType, key_size: IndexType, key_alignment: u16, value_size: IndexType, value_alignment: u16) !MapResult {
|
||||||
|
var new_capacity = @max(current_capacity, initial_item_count);
|
||||||
|
while (new_capacity < desired_capacity) {
|
||||||
|
new_capacity *= factor;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (new_capacity > current_capacity) {
|
||||||
|
const old_key_slice = key_pointer[0..length * key_size];
|
||||||
|
const old_value_slice = value_pointer[0..length * value_size];
|
||||||
|
const new_key_slice = try allocator.reallocate(old_key_slice, new_capacity * key_size, key_alignment);
|
||||||
|
const new_value_slice = try allocator.reallocate(old_value_slice, new_capacity * value_size, value_alignment);
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.key_pointer = new_key_slice.ptr,
|
||||||
|
.value_pointer = new_value_slice.ptr,
|
||||||
|
.capacity = new_capacity,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return .{
|
||||||
|
.capacity = current_capacity,
|
||||||
|
.key_pointer = key_pointer,
|
||||||
|
.value_pointer = value_pointer,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn MyHashMap(comptime K: type, comptime V: type) type {
|
||||||
|
// const K = []const u8;
|
||||||
|
return struct {
|
||||||
|
key_pointer: [*]K = undefined,
|
||||||
|
value_pointer: [*]V = undefined,
|
||||||
|
length: IndexType = 0,
|
||||||
|
capacity: IndexType = 0,
|
||||||
|
|
||||||
|
pub fn get_pointer(map: *@This(), key: K) ?*V {
|
||||||
|
for (map.keys(), 0..) |k, i| {
|
||||||
|
const is_equal = switch (@typeInfo(K)) {
|
||||||
|
.Pointer => |pointer| switch (pointer.size) {
|
||||||
|
.Slice => byte_equal(k, key),
|
||||||
|
else => k == key,
|
||||||
|
},
|
||||||
|
.Struct => equal(k, key),
|
||||||
|
else => k == key,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (is_equal) {
|
||||||
|
return &map.value_pointer[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(map: *@This(), key: K) ?V {
|
||||||
|
if (map.get_pointer(key)) |p| {
|
||||||
|
return p.*;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put(map: *@This(), allocator: *MyAllocator, key: K, value: V) !void {
|
||||||
|
if (map.get_pointer(key)) |value_ptr| {
|
||||||
|
value_ptr.* = value;
|
||||||
|
} else {
|
||||||
|
const len = map.length;
|
||||||
|
try map.ensure_capacity(allocator, len + 1);
|
||||||
|
map.put_at_with_capacity(len, key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_no_clobber(map: *@This(), allocator: *MyAllocator, key: K, value: V) !void {
|
||||||
|
assert(map.get_pointer(key) == null);
|
||||||
|
const len = map.length;
|
||||||
|
try map.ensure_capacity(allocator, len + 1);
|
||||||
|
map.put_at_with_capacity(len, key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn put_at_with_capacity(map: *@This(), index: IndexType, key: K, value: V) void {
|
||||||
|
map.length += 1;
|
||||||
|
assert(index < map.length);
|
||||||
|
map.key_pointer[index] = key;
|
||||||
|
map.value_pointer[index] = value;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ensure_capacity(map: *@This(), allocator: *MyAllocator, desired_capacity: IndexType) !void {
|
||||||
|
const result = try ensure_capacity_hashmap(allocator, map.capacity, desired_capacity, @ptrCast(map.key_pointer), @ptrCast(map.value_pointer), map.length, @sizeOf(K), @alignOf(K), @sizeOf(V), @alignOf(V));
|
||||||
|
map.capacity = result.capacity;
|
||||||
|
map.key_pointer = @ptrCast(@alignCast(result.key_pointer));
|
||||||
|
map.value_pointer = @ptrCast(@alignCast(result.value_pointer));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn keys(map: *@This()) []K{
|
||||||
|
return map.key_pointer[0..map.length];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn values(map: *@This()) []V{
|
||||||
|
return map.value_pointer[0..map.length];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const ListType = enum {
|
||||||
|
index,
|
||||||
|
pointer,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn enumFromString(comptime E: type, string: []const u8) ?E {
|
||||||
|
return inline for (@typeInfo(E).Enum.fields) |enum_field| {
|
||||||
|
if (byte_equal(string, enum_field.name)) {
|
||||||
|
break @field(E, enum_field.name);
|
||||||
|
}
|
||||||
|
} else null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const page_size = std.mem.page_size;
|
||||||
|
extern fn pthread_jit_write_protect_np(enabled: bool) void;
|
||||||
|
|
||||||
|
pub fn allocate_virtual_memory(size: usize, flags: packed struct {
|
||||||
|
executable: bool = false,
|
||||||
|
}) ![]align(page_size) u8 {
|
||||||
|
return switch (os) {
|
||||||
|
.windows => blk: {
|
||||||
|
const windows = std.os.windows;
|
||||||
|
break :blk @as([*]align(page_size) u8, @ptrCast(@alignCast(try windows.VirtualAlloc(null, size, windows.MEM_COMMIT | windows.MEM_RESERVE, windows.PAGE_EXECUTE_READWRITE))))[0..size];
|
||||||
|
},
|
||||||
|
.linux, .macos => |os_tag| blk: {
|
||||||
|
const jit = switch (os_tag) {
|
||||||
|
.macos => 0x800,
|
||||||
|
.linux => 0,
|
||||||
|
else => @compileError("OS not supported"),
|
||||||
|
};
|
||||||
|
_ = jit; // autofix
|
||||||
|
const execute_flag: switch (os_tag) {
|
||||||
|
.linux => u32,
|
||||||
|
.macos => c_int,
|
||||||
|
else => @compileError("OS not supported"),
|
||||||
|
} = if (flags.executable) std.os.PROT.EXEC else 0;
|
||||||
|
const protection_flags: u32 = @intCast(std.os.PROT.READ | std.os.PROT.WRITE | execute_flag);
|
||||||
|
|
||||||
|
const result = try std.os.mmap(null, size, protection_flags, .{
|
||||||
|
.TYPE = .PRIVATE,
|
||||||
|
.ANONYMOUS = true,
|
||||||
|
}, -1, 0);
|
||||||
|
if (arch == .aarch64 and os == .macos) {
|
||||||
|
if (flags.executable) {
|
||||||
|
pthread_jit_write_protect_np(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
break :blk result;
|
||||||
|
},
|
||||||
|
else => @compileError("OS not supported"),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn free_virtual_memory(slice: []const align(0x1000) u8) void {
|
||||||
|
switch (os) {
|
||||||
|
.windows => {
|
||||||
|
std.os.windows.VirtualFree(slice.ptr, slice.len, std.os.windows.MEM_RELEASE);
|
||||||
|
},
|
||||||
|
else => {
|
||||||
|
std.os.munmap(slice);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const MyAllocator = struct{
|
||||||
|
handler: *const fn(allocator: *MyAllocator, old_ptr: ?[*]u8, old_size: usize, new_size: usize, alignment: u16) Error![*]u8,
|
||||||
|
|
||||||
|
pub fn allocate_one(allocator: *MyAllocator, comptime T: type) !*T {
|
||||||
|
const slice = try allocator.allocate(@sizeOf(T), @alignOf(T));
|
||||||
|
assert(slice.len == @sizeOf(T));
|
||||||
|
return @ptrCast(@alignCast(&slice.ptr[0]));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allocate(allocator: *MyAllocator, size: usize, alignment: u16) ![]u8 {
|
||||||
|
const ptr = try allocator.handler(allocator, null, 0, size, alignment);
|
||||||
|
return ptr[0..size];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn free(allocator: *MyAllocator, bytes: []u8) !void {
|
||||||
|
_ = try allocator.handler(allocator, bytes.ptr, bytes.len, 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reallocate(allocator: *MyAllocator, bytes: []u8, size: usize, alignment: u16) ![]u8 {
|
||||||
|
const new_ptr = try allocator.handler(allocator, bytes.ptr, bytes.len, size, alignment);
|
||||||
|
return new_ptr[0..size];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn duplicate_bytes(allocator: *MyAllocator, bytes: []const u8) ![]u8 {
|
||||||
|
const slice = try allocator.allocate(bytes.len, 0);
|
||||||
|
@memcpy(slice, bytes);
|
||||||
|
return slice;
|
||||||
|
}
|
||||||
|
|
||||||
|
const Error = error{
|
||||||
|
allocation_failed,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const PageAllocator = struct{
|
||||||
|
allocator: MyAllocator = .{ .handler = handler },
|
||||||
|
|
||||||
|
fn handler(allocator: *MyAllocator, maybe_old_ptr: ?[*]u8, old_size: usize, new_size: usize, alignment: u16) MyAllocator.Error![*]u8 {
|
||||||
|
_ = allocator; // autofix
|
||||||
|
_ = alignment; // autofix
|
||||||
|
const maybe_new_slice: ?[]u8 = if (new_size > 0) allocate_virtual_memory(new_size, .{}) catch return MyAllocator.Error.allocation_failed else null;
|
||||||
|
|
||||||
|
if (maybe_old_ptr) |old_ptr| {
|
||||||
|
const old_slice = old_ptr[0..old_size];
|
||||||
|
if (maybe_new_slice) |new_slice| {
|
||||||
|
@memcpy(new_slice[0..old_size], old_slice);
|
||||||
|
free_virtual_memory(@ptrCast(@alignCast(old_slice)));
|
||||||
|
return new_slice.ptr;
|
||||||
|
} else {
|
||||||
|
return old_slice.ptr;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return (maybe_new_slice orelse unreachable).ptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const IndexType = if (@sizeOf(usize) >= 8) u32 else usize;
|
||||||
|
|
||||||
|
const ArrayCapacity = struct{
|
||||||
|
pointer: *anyopaque,
|
||||||
|
capacity: IndexType,
|
||||||
|
};
|
||||||
|
|
||||||
|
fn ensure_capacity_array(allocator: *MyAllocator, current_capacity: IndexType, desired_capacity: IndexType, pointer: [*]u8, length: IndexType, element_size: IndexType, element_alignment: u16) !ArrayCapacity {
|
||||||
|
var new_capacity = @max(current_capacity, initial_item_count);
|
||||||
|
while (new_capacity < desired_capacity) {
|
||||||
|
new_capacity *= factor;
|
||||||
|
}
|
||||||
|
if (new_capacity > current_capacity) {
|
||||||
|
const old_byte_slice = pointer[0..length * element_size];
|
||||||
|
const new_byte_capacity = new_capacity * element_size;
|
||||||
|
const new_slice = try allocator.reallocate(old_byte_slice, new_byte_capacity, element_alignment);
|
||||||
|
return .{
|
||||||
|
.pointer = new_slice.ptr,
|
||||||
|
.capacity = new_capacity,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
return .{
|
||||||
|
.pointer = pointer,
|
||||||
|
.capacity = current_capacity,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const initial_item_count = 16;
|
||||||
|
const factor = 2;
|
||||||
|
|
||||||
|
pub fn UnpinnedArray(comptime T: type) type {
|
||||||
|
|
||||||
|
return struct {
|
||||||
|
pointer: [*]T = undefined,
|
||||||
|
length: IndexType = 0,
|
||||||
|
capacity: IndexType = 0,
|
||||||
|
|
||||||
|
pub fn initialize_with_capacity(allocator: *MyAllocator, item_count: IndexType) !@This() {
|
||||||
|
var array = @This() {};
|
||||||
|
try array.ensure_capacity(allocator, item_count);
|
||||||
|
return array;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn ensure_capacity(array: *@This(), allocator: *MyAllocator, desired_capacity: IndexType) !void {
|
||||||
|
const result = try ensure_capacity_array(allocator, array.capacity, desired_capacity, @ptrCast(array.pointer), array.length, @sizeOf(T), @alignOf(T));
|
||||||
|
array.pointer = @ptrCast(@alignCast(result.pointer));
|
||||||
|
array.capacity = result.capacity;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append(array: *@This(), allocator: *MyAllocator, item: T) !void {
|
||||||
|
try array.ensure_capacity(allocator, array.length + 1);
|
||||||
|
array.append_with_capacity(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append_slice(array: *@This(), allocator: *MyAllocator, items: []const T) !void {
|
||||||
|
try array.ensure_capacity(allocator, @intCast(array.length + items.len));
|
||||||
|
@memcpy(array.pointer[array.length..][0..items.len], items);
|
||||||
|
array.length += @intCast(items.len);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn append_with_capacity(array: *@This(), item: T) void {
|
||||||
|
assert(array.length < array.capacity);
|
||||||
|
array.pointer[array.length] = item;
|
||||||
|
array.length += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn slice(array: *@This()) []T {
|
||||||
|
return array.pointer[0..array.length];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(array: *@This(), allocator: *MyAllocator, index: IndexType, item: T) !void {
|
||||||
|
assert(index < array.length);
|
||||||
|
if (array.length + 1 >= array.capacity) {
|
||||||
|
const after_count = array.length - index;
|
||||||
|
copy_backwards(T, array.pointer[index + 1..][0..after_count], array.pointer[index..][0..after_count]);
|
||||||
|
} else {
|
||||||
|
const new_capacity = array.capacity * 2;
|
||||||
|
const new_slice = try allocator.allocate(new_capacity * @sizeOf(T), @alignOf(T));
|
||||||
|
const new_typed_slice: []T = @as([*]T, @ptrCast(@alignCast(new_slice.ptr)))[0..new_capacity];
|
||||||
|
@memcpy(new_typed_slice[0..index], array.pointer[0..index]);
|
||||||
|
const after_count = array.length - index;
|
||||||
|
@memcpy(new_typed_slice[index + 1..][0..after_count], array.pointer[index..][0..after_count]);
|
||||||
|
try allocator.free(@as([*]u8, @ptrCast(@alignCast(array.slice().ptr)))[0.. array.capacity * @sizeOf(T)]);
|
||||||
|
array.pointer = new_typed_slice.ptr;
|
||||||
|
array.capacity = new_capacity;
|
||||||
|
}
|
||||||
|
|
||||||
|
array.pointer[index] = item;
|
||||||
|
array.length += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pop(array: *@This()) T {
|
||||||
|
assert(array.length > 0);
|
||||||
|
array.length -= 1;
|
||||||
|
return array.pointer[array.length];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn copy_backwards(comptime T: type, destination: []T, source: []const T) void {
|
||||||
|
@setRuntimeSafety(false);
|
||||||
|
assert(destination.len >= source.len);
|
||||||
|
var i = source.len;
|
||||||
|
while (i > 0) {
|
||||||
|
i -= 1;
|
||||||
|
destination[i] = source[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
test {
|
||||||
|
var page_allocator = PageAllocator{};
|
||||||
|
const allocator = &page_allocator.allocator;
|
||||||
|
var foo = UnpinnedArray(u32){};
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
try foo.append(allocator, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn equal(a: anytype, b: @TypeOf(a)) bool {
|
||||||
|
const T = @TypeOf(a);
|
||||||
|
|
||||||
|
switch (@typeInfo(T)) {
|
||||||
|
.Struct => |info| {
|
||||||
|
inline for (info.fields) |field_info| {
|
||||||
|
if (!equal(@field(a, field_info.name), @field(b, field_info.name))) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
.ErrorUnion => {
|
||||||
|
if (a) |a_p| {
|
||||||
|
if (b) |b_p| return equal(a_p, b_p) else |_| return false;
|
||||||
|
} else |a_e| {
|
||||||
|
if (b) |_| return false else |b_e| return a_e == b_e;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
.Union => |info| {
|
||||||
|
if (info.tag_type) |UnionTag| {
|
||||||
|
const tag_a = activeTag(a);
|
||||||
|
const tag_b = activeTag(b);
|
||||||
|
if (tag_a != tag_b) return false;
|
||||||
|
|
||||||
|
inline for (info.fields) |field_info| {
|
||||||
|
if (@field(UnionTag, field_info.name) == tag_a) {
|
||||||
|
return equal(@field(a, field_info.name), @field(b, field_info.name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@compileError("cannot compare untagged union type " ++ @typeName(T));
|
||||||
|
},
|
||||||
|
.Array => {
|
||||||
|
if (a.len != b.len) return false;
|
||||||
|
for (a, 0..) |e, i|
|
||||||
|
if (!equal(e, b[i])) return false;
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
.Vector => |info| {
|
||||||
|
var i: usize = 0;
|
||||||
|
while (i < info.len) : (i += 1) {
|
||||||
|
if (!equal(a[i], b[i])) return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
.Pointer => |info| {
|
||||||
|
return switch (info.size) {
|
||||||
|
.One, .Many, .C => a == b,
|
||||||
|
.Slice => a.ptr == b.ptr and a.len == b.len,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
.Optional => {
|
||||||
|
if (a == null and b == null) return true;
|
||||||
|
if (a == null or b == null) return false;
|
||||||
|
return equal(a.?, b.?);
|
||||||
|
},
|
||||||
|
else => return a == b,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn Tag(comptime T: type) type {
|
||||||
|
return switch (@typeInfo(T)) {
|
||||||
|
.Enum => |info| info.tag_type,
|
||||||
|
.Union => |info| info.tag_type orelse @compileError(@typeName(T) ++ " has no tag type"),
|
||||||
|
else => @compileError("expected enum or union type, found '" ++ @typeName(T) ++ "'"),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
///Returns the active tag of a tagged union
|
||||||
|
pub fn activeTag(u: anytype) Tag(@TypeOf(u)) {
|
||||||
|
const T = @TypeOf(u);
|
||||||
|
return @as(Tag(T), u);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn missingCase(e: anytype) noreturn {
|
||||||
|
@panic(@tagName(e));
|
||||||
|
}
|
||||||
|
// Converts values in the range [0, 100) to a string.
|
||||||
|
fn digits2(value: usize) [2]u8 {
|
||||||
|
return ("0001020304050607080910111213141516171819" ++
|
||||||
|
"2021222324252627282930313233343536373839" ++
|
||||||
|
"4041424344454647484950515253545556575859" ++
|
||||||
|
"6061626364656667686970717273747576777879" ++
|
||||||
|
"8081828384858687888990919293949596979899")[value * 2 ..][0..2].*;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn digit_to_char(digit: u8) u8 {
|
||||||
|
return switch (digit) {
|
||||||
|
0...9 => digit + '0',
|
||||||
|
10...35 => digit + ((@as(u8, 'a')) - 10),
|
||||||
|
else => unreachable,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format_int(buffer: []u8, value: u64, base: u8, signed: bool) []u8 {
|
||||||
|
assert(base >= 2);
|
||||||
|
|
||||||
|
var a: u64 = value;
|
||||||
|
var index: usize = buffer.len;
|
||||||
|
|
||||||
|
if (base == 10) {
|
||||||
|
while (a >= 100) : (a = @divTrunc(a, 100)) {
|
||||||
|
index -= 2;
|
||||||
|
buffer[index..][0..2].* = digits2(@as(usize, @intCast(a % 100)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (a < 10) {
|
||||||
|
index -= 1;
|
||||||
|
buffer[index] = '0' + @as(u8, @intCast(a));
|
||||||
|
} else {
|
||||||
|
index -= 2;
|
||||||
|
buffer[index..][0..2].* = digits2(@as(usize, @intCast(a)));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
while (true) {
|
||||||
|
const digit = a % base;
|
||||||
|
index -= 1;
|
||||||
|
buffer[index] = digit_to_char(@as(u8, @intCast(digit)));
|
||||||
|
a /= base;
|
||||||
|
if (a == 0) break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (signed) {
|
||||||
|
index -= 1;
|
||||||
|
buffer[index] = '-';
|
||||||
|
}
|
||||||
|
|
||||||
|
return buffer[index..];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn span(ptr: [*:0]const u8) [:0]const u8 {
|
||||||
|
var len: usize = 0;
|
||||||
|
while (ptr[len] != 0) {
|
||||||
|
len += 1;
|
||||||
|
}
|
||||||
|
return ptr[0..len:0];
|
||||||
|
}
|
@ -1,21 +1,44 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
const Allocator = std.mem.Allocator;
|
const Allocator = std.mem.Allocator;
|
||||||
const equal = std.mem.eql;
|
|
||||||
|
|
||||||
const Compilation = @import("Compilation.zig");
|
const Compilation = @import("Compilation.zig");
|
||||||
pub const panic = Compilation.panic;
|
pub const panic = Compilation.panic;
|
||||||
|
|
||||||
|
const library = @import("library.zig");
|
||||||
|
const byte_equal = library.byte_equal;
|
||||||
|
const MyAllocator = library.MyAllocator;
|
||||||
|
const PageAllocator = library.PageAllocator;
|
||||||
|
|
||||||
const env_detecting_libc_paths = "NATIVITY_IS_DETECTING_LIBC_PATHS";
|
const env_detecting_libc_paths = "NATIVITY_IS_DETECTING_LIBC_PATHS";
|
||||||
|
|
||||||
|
test {
|
||||||
|
_ = library;
|
||||||
|
}
|
||||||
|
|
||||||
fn todo() noreturn {
|
fn todo() noreturn {
|
||||||
@setCold(true);
|
@setCold(true);
|
||||||
@panic("TODO");
|
@panic("TODO");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn main() !void {
|
var my_allocator = PageAllocator{};
|
||||||
|
pub export fn main(c_argc: c_int, c_argv: [*][*:0]c_char, c_envp: [*:null]?[*:0]c_char) callconv(.C) c_int {
|
||||||
|
_ = c_envp; // autofix
|
||||||
|
const argument_count: usize = @intCast(c_argc);
|
||||||
|
const argument_values: [*][*:0]u8 = @ptrCast(c_argv);
|
||||||
|
const arguments = argument_values[0..argument_count];
|
||||||
|
if (entry_point(arguments)) |_| {
|
||||||
|
return 0;
|
||||||
|
} else |err| {
|
||||||
|
const error_name: []const u8 = @errorName(err);
|
||||||
|
_ = error_name; // autofix
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn entry_point(arguments: [][*:0]u8) !void {
|
||||||
var arena_allocator = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
var arena_allocator = std.heap.ArenaAllocator.init(std.heap.page_allocator);
|
||||||
const allocator = arena_allocator.allocator();
|
const allocator = arena_allocator.allocator();
|
||||||
const arguments = try std.process.argsAlloc(allocator);
|
// const arguments = try std.process.argsAlloc(allocator);
|
||||||
|
|
||||||
if (arguments.len <= 1) {
|
if (arguments.len <= 1) {
|
||||||
return error.InvalidInput;
|
return error.InvalidInput;
|
||||||
@ -25,32 +48,30 @@ pub fn main() !void {
|
|||||||
todo();
|
todo();
|
||||||
}
|
}
|
||||||
|
|
||||||
const command = arguments[1];
|
const command = library.span(arguments[1]);
|
||||||
const command_arguments = arguments[2..];
|
const command_arguments = arguments[2..];
|
||||||
|
|
||||||
if (equal(u8, command, "build")) {
|
const context = try Compilation.createContext(allocator, &my_allocator.allocator);
|
||||||
const context = try Compilation.createContext(allocator);
|
if (byte_equal(command, "build")) {
|
||||||
try Compilation.compileBuildExecutable(context, command_arguments);
|
try Compilation.compileBuildExecutable(context, command_arguments);
|
||||||
} else if (equal(u8, command, "clang") or equal(u8, command, "-cc1") or equal(u8, command, "-cc1as")) {
|
} else if (byte_equal(command, "clang") or byte_equal(command, "-cc1") or byte_equal(command, "-cc1as")) {
|
||||||
// const exit_code = try clangMain(allocator, arguments);
|
// const exit_code = try clangMain(allocator, arguments);
|
||||||
// std.process.exit(exit_code);
|
// std.process.exit(exit_code);
|
||||||
} else if (equal(u8, command, "cc")) {
|
} else if (byte_equal(command, "cc")) {
|
||||||
// TODO: transform our arguments to Clang and invoke it
|
// TODO: transform our arguments to Clang and invoke it
|
||||||
todo();
|
todo();
|
||||||
} else if (equal(u8, command, "c++")) {
|
} else if (byte_equal(command, "c++")) {
|
||||||
// TODO: transform our arguments to Clang and invoke it
|
// TODO: transform our arguments to Clang and invoke it
|
||||||
todo();
|
todo();
|
||||||
} else if (equal(u8, command, "exe")) {
|
} else if (byte_equal(command, "exe")) {
|
||||||
const context = try Compilation.createContext(allocator);
|
|
||||||
try Compilation.buildExecutable(context, command_arguments, .{
|
try Compilation.buildExecutable(context, command_arguments, .{
|
||||||
.is_test = false,
|
.is_test = false,
|
||||||
});
|
});
|
||||||
} else if (equal(u8, command, "lib")) {
|
} else if (byte_equal(command, "lib")) {
|
||||||
todo();
|
todo();
|
||||||
} else if (equal(u8, command, "obj")) {
|
} else if (byte_equal(command, "obj")) {
|
||||||
todo();
|
todo();
|
||||||
} else if (equal(u8, command, "test")) {
|
} else if (byte_equal(command, "test")) {
|
||||||
const context = try Compilation.createContext(allocator);
|
|
||||||
try Compilation.buildExecutable(context, command_arguments, .{
|
try Compilation.buildExecutable(context, command_arguments, .{
|
||||||
.is_test = true,
|
.is_test = true,
|
||||||
});
|
});
|
||||||
|
25
build.zig
25
build.zig
@ -27,6 +27,7 @@ pub fn build(b: *std.Build) !void {
|
|||||||
compiler.formatted_panics = print_stack_trace;
|
compiler.formatted_panics = print_stack_trace;
|
||||||
compiler.root_module.unwind_tables = print_stack_trace;
|
compiler.root_module.unwind_tables = print_stack_trace;
|
||||||
compiler.root_module.omit_frame_pointer = false;
|
compiler.root_module.omit_frame_pointer = false;
|
||||||
|
compiler.root_module.error_tracing = false;
|
||||||
compiler.want_lto = false;
|
compiler.want_lto = false;
|
||||||
|
|
||||||
compiler.linkLibC();
|
compiler.linkLibC();
|
||||||
@ -414,6 +415,23 @@ pub fn build(b: *std.Build) !void {
|
|||||||
debug_command.addArgs(args);
|
debug_command.addArgs(args);
|
||||||
test_command.addArgs(args);
|
test_command.addArgs(args);
|
||||||
}
|
}
|
||||||
|
//
|
||||||
|
// const tests = b.addTest(.{
|
||||||
|
// .name = "nat_test",
|
||||||
|
// .root_source_file = .{ .path = "bootstrap/main.zig" },
|
||||||
|
// .target = target,
|
||||||
|
// .optimize = optimization,
|
||||||
|
// });
|
||||||
|
// tests.root_module.addOptions("configuration", compiler_options);
|
||||||
|
// tests.formatted_panics = print_stack_trace;
|
||||||
|
// tests.root_module.unwind_tables = print_stack_trace;
|
||||||
|
// tests.root_module.omit_frame_pointer = false;
|
||||||
|
// tests.want_lto = false;
|
||||||
|
//
|
||||||
|
//
|
||||||
|
// const run_tests = b.addRunArtifact(tests);
|
||||||
|
// b.installArtifact(tests);
|
||||||
|
// run_tests.step.dependOn(b.getInstallStep());
|
||||||
|
|
||||||
const run_step = b.step("run", "Test the Nativity compiler");
|
const run_step = b.step("run", "Test the Nativity compiler");
|
||||||
run_step.dependOn(&run_command.step);
|
run_step.dependOn(&run_command.step);
|
||||||
@ -421,4 +439,11 @@ pub fn build(b: *std.Build) !void {
|
|||||||
debug_step.dependOn(&debug_command.step);
|
debug_step.dependOn(&debug_command.step);
|
||||||
const test_step = b.step("test", "Test the Nativity compiler");
|
const test_step = b.step("test", "Test the Nativity compiler");
|
||||||
test_step.dependOn(&test_command.step);
|
test_step.dependOn(&test_command.step);
|
||||||
|
|
||||||
|
// const test_lib = b.step("test_lib", "Test the Nativity Zig library");
|
||||||
|
// test_lib.dependOn(&run_tests.step);
|
||||||
|
|
||||||
|
const test_all = b.step("test_all", "Test all");
|
||||||
|
// test_all.dependOn(&run_tests.step);
|
||||||
|
test_all.dependOn(&test_command.step);
|
||||||
}
|
}
|
||||||
|
@ -95,7 +95,8 @@ pub fn main() !void {
|
|||||||
try request.wait();
|
try request.wait();
|
||||||
|
|
||||||
if (request.response.status != .ok) {
|
if (request.response.status != .ok) {
|
||||||
std.debug.panic("Status: {s} when fetching TAR {s}", .{@tagName(request.response.status), url});
|
@panic("Failure when fetching TAR");
|
||||||
|
//std.debug.panic("Status: {s} when fetching TAR {s}", .{@tagName(request.response.status), url});
|
||||||
}
|
}
|
||||||
|
|
||||||
var decompression = try std.compress.xz.decompress(allocator, request.reader());
|
var decompression = try std.compress.xz.decompress(allocator, request.reader());
|
||||||
|
Loading…
x
Reference in New Issue
Block a user