Merge pull request #8 from birth-software/main-function

main function
This commit is contained in:
David 2023-10-02 13:25:51 -06:00 committed by GitHub
commit 7977bccf64
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 2182 additions and 977 deletions

View File

@ -3,6 +3,7 @@ comptime {
}
const _start = fn () noreturn {
_ = #syscall(231, 0);
const result = #import("main").main();
_ = #syscall(231, result);
unreachable;
};

View File

@ -64,35 +64,57 @@ pub const Struct = struct {
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Type = union(enum) {
void,
noreturn,
bool,
integer: Integer,
integer: Type.Integer,
@"struct": Struct.Index,
pub const List = BlockList(@This());
pub const Index = List.Index;
};
pub const Allocation = List.Allocation;
pub const Integer = struct {
pub const Integer = struct {
bit_count: u16,
signedness: Signedness,
pub const Signedness = enum(u1) {
unsigned = 0,
signed = 1,
};
pub fn getSize(integer: Type.Integer) u64 {
return integer.bit_count / @bitSizeOf(u8) + @intFromBool(integer.bit_count % @bitSizeOf(u8) != 0);
}
};
pub fn getSize(type_info: Type) u64 {
return switch (type_info) {
.integer => |integer| integer.getSize(),
else => |t| @panic(@tagName(t)),
};
}
pub fn getAlignment(type_info: Type) u64 {
return switch (type_info) {
.integer => |integer| @min(16, integer.getSize()),
else => |t| @panic(@tagName(t)),
};
}
};
/// A scope contains a bunch of declarations
pub const Scope = struct {
parent: Scope.Index,
type: Type.Index = Type.Index.invalid,
declarations: AutoHashMap(u32, Declaration.Index) = .{},
parent: Scope.Index,
file: File.Index,
type: Type.Index = Type.Index.invalid,
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const ScopeType = enum(u1) {
@ -113,6 +135,7 @@ pub const Declaration = struct {
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Function = struct {
@ -133,6 +156,7 @@ pub const Function = struct {
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Block = struct {
@ -140,6 +164,7 @@ pub const Block = struct {
reaches_end: bool,
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Field = struct {
@ -147,6 +172,7 @@ pub const Field = struct {
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Loop = struct {
@ -156,6 +182,7 @@ pub const Loop = struct {
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
const Runtime = struct {
@ -172,6 +199,7 @@ pub const Assignment = struct {
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Syscall = struct {
@ -185,11 +213,36 @@ pub const Syscall = struct {
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Call = struct {
value: Value.Index,
arguments: ArgumentList.Index,
type: Type.Index,
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const ArgumentList = struct {
array: ArrayList(Value.Index),
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Return = struct {
value: Value.Index,
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Value = union(enum) {
unresolved: Unresolved,
declaration: Declaration.Index,
declaration_reference: Declaration.Index,
void,
bool: bool,
undefined,
@ -200,11 +253,15 @@ pub const Value = union(enum) {
runtime: Runtime,
assign: Assignment.Index,
type: Type.Index,
integer: u64,
integer: Integer,
syscall: Syscall.Index,
call: Call.Index,
argument_list: ArgumentList,
@"return": Return.Index,
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
pub fn isComptime(value: Value) bool {
return switch (value) {
@ -213,12 +270,17 @@ pub const Value = union(enum) {
};
}
pub fn getType(value: *Value) !void {
switch (value.*) {
pub fn getType(value: *Value, module: *Module) Type.Index {
return switch (value.*) {
.call => |call_index| module.calls.get(call_index).type,
else => |t| @panic(@tagName(t)),
};
}
unreachable;
}
};
pub const Integer = struct {
value: u64,
type: Type.Integer,
};
pub const Module = struct {
@ -238,23 +300,27 @@ pub const Module = struct {
loops: BlockList(Loop) = .{},
assignments: BlockList(Assignment) = .{},
syscalls: BlockList(Syscall) = .{},
calls: BlockList(Call) = .{},
argument_list: BlockList(ArgumentList) = .{},
returns: BlockList(Return) = .{},
entry_point: ?u32 = null,
pub const Descriptor = struct {
main_package_path: []const u8,
};
const ImportFileResult = struct {
file: *File,
ptr: *File,
index: File.Index,
is_new: bool,
};
const ImportPackageResult = struct {
file: *File,
is_new: bool,
file: ImportFileResult,
is_package: bool,
};
pub fn importFile(module: *Module, allocator: Allocator, current_file: *File, import_name: []const u8) !ImportPackageResult {
pub fn importFile(module: *Module, allocator: Allocator, current_file_index: File.Index, import_name: []const u8) !ImportPackageResult {
print("import: '{s}'\n", .{import_name});
if (equal(u8, import_name, "std")) {
return module.importPackage(allocator, module.main_package.dependencies.get("std").?);
@ -268,6 +334,7 @@ pub const Module = struct {
return module.importPackage(allocator, module.main_package);
}
const current_file = module.files.get(current_file_index);
if (current_file.package.dependencies.get(import_name)) |package| {
return module.importPackage(allocator, package);
}
@ -279,55 +346,73 @@ pub const Module = struct {
const full_path = try std.fs.path.join(allocator, &.{ current_file.package.directory.path, import_name });
const file_relative_path = std.fs.path.basename(full_path);
const package = current_file.package;
const import = try module.getFile(allocator, full_path, file_relative_path, package);
const import_file = try module.getFile(allocator, full_path, file_relative_path, package);
try import.file.addFileReference(allocator, current_file);
try import_file.ptr.addFileReference(allocator, current_file);
const result = ImportPackageResult{
.file = import.file,
.is_new = import.is_new,
.file = import_file,
.is_package = false,
};
return result;
}
fn lookupDeclaration(module: *Module, hashed: u32) !noreturn {
_ = hashed;
_ = module;
while (true) {}
}
fn getFile(module: *Module, allocator: Allocator, full_path: []const u8, relative_path: []const u8, package: *Package) !ImportFileResult {
const path_lookup = try module.import_table.getOrPut(allocator, full_path);
const file: *File = switch (path_lookup.found_existing) {
true => path_lookup.value_ptr.*,
const file, const index = switch (path_lookup.found_existing) {
true => blk: {
const result = path_lookup.value_ptr.*;
const index = module.files.indexOf(result);
break :blk .{
result,
index,
};
},
false => blk: {
const new_file_index = try module.files.append(allocator, File{
const file_allocation = try module.files.append(allocator, File{
.relative_path = relative_path,
.package = package,
});
const file = module.files.get(new_file_index);
path_lookup.value_ptr.* = file;
break :blk file;
std.debug.print("Adding file #{}: {s}\n", .{ file_allocation.index.uniqueInteger(), full_path });
path_lookup.value_ptr.* = file_allocation.ptr;
// break :blk file;
break :blk .{
file_allocation.ptr,
file_allocation.index,
};
},
};
return .{
.file = file,
.ptr = file,
.index = index,
.is_new = !path_lookup.found_existing,
};
}
pub fn importPackage(module: *Module, allocator: Allocator, package: *Package) !ImportPackageResult {
const full_path = try std.fs.path.resolve(allocator, &.{ package.directory.path, package.source_path });
const import = try module.getFile(allocator, full_path, package.source_path, package);
try import.file.addPackageReference(allocator, package);
const import_file = try module.getFile(allocator, full_path, package.source_path, package);
try import_file.ptr.addPackageReference(allocator, package);
return .{
.file = import.file,
.is_new = import.is_new,
.file = import_file,
.is_package = true,
};
}
pub fn generateAbstractSyntaxTreeForFile(module: *Module, allocator: Allocator, file: *File) !void {
_ = module;
const source_file = try file.package.directory.handle.openFile(file.relative_path, .{});
const source_file = file.package.directory.handle.openFile(file.relative_path, .{}) catch |err| {
std.debug.panic("Can't find file {s} in directory {s} for error {s}", .{ file.relative_path, file.package.directory.path, @errorName(err) });
};
const file_size = try source_file.getEndPos();
var file_buffer = try allocator.alloc(u8, file_size);
@ -426,14 +511,11 @@ pub fn compileModule(compilation: *Compilation, descriptor: Module.Descriptor) !
try module.generateAbstractSyntaxTreeForFile(compilation.base_allocator, import);
}
const main_declaration = try semantic_analyzer.initialize(compilation, module, packages[0]);
const main_declaration = try semantic_analyzer.initialize(compilation, module, packages[0], .{ .block = 0, .index = 0 });
var ir = try intermediate_representation.initialize(compilation, module, packages[0], main_declaration);
switch (@import("builtin").cpu.arch) {
.x86_64 => |arch| try emit.get(arch).initialize(compilation.base_allocator, &ir),
else => {},
}
try emit.get(.x86_64).initialize(compilation.base_allocator, &ir);
}
fn generateAST() !void {}
@ -465,6 +547,9 @@ pub const File = struct {
relative_path: []const u8,
package: *Package,
pub const List = BlockList(@This());
pub const Index = List.Index;
const Status = enum {
not_loaded,
loaded_into_memory,
@ -484,15 +569,6 @@ pub const File = struct {
try file.file_references.append(allocator, affected);
}
pub fn fromRelativePath(allocator: Allocator, file_relative_path: []const u8) *File {
const file_content = try std.fs.cwd().readFileAlloc(allocator, file_relative_path, std.math.maxInt(usize));
_ = file_content;
const file = try allocator.create(File);
file.* = File{};
return file;
}
fn lex(file: *File, allocator: Allocator) !void {
assert(file.status == .loaded_into_memory);
file.lexical_analyzer_result = try lexical_analyzer.analyze(allocator, file.source_code);

View File

@ -27,7 +27,7 @@ pub const Result = struct {
},
entry_point: u32 = 0,
fn create() !Result {
pub fn create() !Result {
return Result{
.sections = .{
.text = .{ .content = try mmap(page_size, .{ .executable = true }) },
@ -46,13 +46,18 @@ pub const Result = struct {
break :blk @as([*]align(0x1000) u8, @ptrCast(@alignCast(try windows.VirtualAlloc(null, size, windows.MEM_COMMIT | windows.MEM_RESERVE, windows.PAGE_EXECUTE_READWRITE))))[0..size];
},
.linux, .macos => |os_tag| blk: {
const jit = switch (os_tag) {
.macos => 0x800,
.linux => 0,
else => unreachable,
};
const execute_flag: switch (os_tag) {
.linux => u32,
.macos => c_int,
else => unreachable,
} = if (flags.executable) std.os.PROT.EXEC else 0;
const protection_flags: u32 = @intCast(std.os.PROT.READ | std.os.PROT.WRITE | execute_flag);
const mmap_flags = std.os.MAP.ANONYMOUS | std.os.MAP.PRIVATE;
const mmap_flags = std.os.MAP.ANONYMOUS | std.os.MAP.PRIVATE | jit;
break :blk std.os.mmap(null, size, protection_flags, mmap_flags, -1, 0);
},
@ -77,14 +82,6 @@ pub const Result = struct {
image.sections.text.index += 1;
}
// fn appendOnlyOpcodeSkipInstructionBytes(image: *Result, instruction: Instruction) void {
// const instruction_descriptor = instruction_descriptors.get(instruction);
// assert(instruction_descriptor.opcode_byte_count == instruction_descriptor.operand_offset);
// image.appendCode(instruction_descriptor.getOpcode());
//
// image.sections.text.index += instruction_descriptor.size - instruction_descriptor.opcode_byte_count;
// }
fn getEntryPoint(image: *const Result, comptime FunctionType: type) *const FunctionType {
comptime {
assert(@typeInfo(FunctionType) == .Fn);
@ -102,16 +99,14 @@ pub fn InstructionSelector(comptime Instruction: type) type {
pub const Function = struct {
instructions: ArrayList(Instruction) = .{},
block_byte_counts: ArrayList(u16),
block_offsets: ArrayList(u32),
relocations: ArrayList(u32) = .{},
block_map: AutoHashMap(ir.BasicBlock.Index, u32) = .{},
byte_count: u32 = 0,
block_byte_count: u16 = 0,
pub fn selectInstruction(function: *Function, allocator: Allocator, instruction: Instruction) !void {
pub fn addInstruction(function: *Function, allocator: Allocator, instruction: Instruction) !u32 {
const index = function.instructions.items.len;
try function.instructions.append(allocator, instruction);
function.block_byte_count += Instruction.descriptors.get(instruction).size;
return @intCast(index);
}
};
@ -124,81 +119,13 @@ pub fn get(comptime arch: std.Target.Cpu.Arch) type {
.x86_64 => @import("x86_64.zig"),
else => @compileError("Architecture not supported"),
};
const Instruction = backend.Instruction;
return struct {
pub fn initialize(allocator: Allocator, intermediate: *ir.Result) !void {
var result = try Result.create();
var function_iterator = intermediate.functions.iterator();
const IS = InstructionSelector(Instruction);
var instruction_selector = IS{
.functions = try ArrayList(IS.Function).initCapacity(allocator, intermediate.functions.len),
.allocator = allocator,
};
while (function_iterator.next()) |ir_function| {
const function = instruction_selector.functions.addOneAssumeCapacity();
function.* = .{
.block_byte_counts = try ArrayList(u16).initCapacity(allocator, ir_function.blocks.items.len),
.block_offsets = try ArrayList(u32).initCapacity(allocator, ir_function.blocks.items.len),
};
try function.block_map.ensureTotalCapacity(allocator, @intCast(ir_function.blocks.items.len));
for (ir_function.blocks.items, 0..) |block_index, index| {
function.block_map.putAssumeCapacity(block_index, @intCast(index));
}
for (ir_function.blocks.items) |block_index| {
const block = intermediate.blocks.get(block_index);
function.block_offsets.appendAssumeCapacity(function.byte_count);
function.block_byte_count = 0;
for (block.instructions.items) |instruction_index| {
const instruction = intermediate.instructions.get(instruction_index).*;
try backend.selectInstruction(&instruction_selector, function, intermediate, instruction);
}
function.block_byte_counts.appendAssumeCapacity(function.block_byte_count);
function.byte_count += function.block_byte_count;
}
}
for (instruction_selector.functions.items) |function| {
for (function.instructions.items) |instruction| backend.emitInstruction(&result, instruction, intermediate);
}
// for (instruction_selector.functions.items) |function| {
// var fix_size: bool = false;
// _ = fix_size;
// for (function.relocations.items) |instruction_index| {
// const instruction = function.instructions.items[instruction_index];
// const relative = instruction.jmp_rel_8;
// const source_block = relative.source;
// const destination_block = relative.destination;
// const source_offset = function.block_offsets.items[source_block];
// const destination_offset = function.block_offsets.items[destination_block];
// std.debug.print("Source offset: {}. Destination: {}\n", .{ source_offset, destination_offset });
// const instruction_descriptor = instruction_descriptors.get(relative.instruction);
// const instruction_offset = source_offset + relative.block_offset;
// const really_source_offset = instruction_offset + instruction_descriptor.size;
// const displacement = @as(i64, destination_offset) - @as(i64, really_source_offset);
//
// const operands = instruction_descriptor.getOperands();
// switch (operands.len) {
// 1 => switch (operands[0].size) {
// @sizeOf(u8) => {
// if (displacement >= std.math.minInt(i8) and displacement <= std.math.maxInt(i8)) {
// const writer_index = instruction_offset + instruction_descriptor.operand_offset;
// std.debug.print("Instruction offset: {}. Operand offset: {}. Writer index: {}. displacement: {}\n", .{ instruction_offset, instruction_descriptor.operand_offset, writer_index, displacement });
// result.sections.text.content[writer_index] = @bitCast(@as(i8, @intCast(displacement)));
// } else {
// unreachable;
// }
// },
// else => unreachable,
// },
// else => unreachable,
// }
// }
// }
std.debug.print("Entry point: {}\n", .{intermediate.entry_point});
var mir = try backend.MIR.generate(allocator, intermediate);
try mir.allocateRegisters(allocator, intermediate);
const result = try mir.encode(intermediate);
const text_section = result.sections.text.content[0..result.sections.text.index];
for (text_section) |byte| {

View File

@ -10,15 +10,22 @@ const Package = Compilation.Package;
const data_structures = @import("../data_structures.zig");
const ArrayList = data_structures.ArrayList;
const BlockList = data_structures.BlockList;
const AutoArrayHashMap = data_structures.AutoArrayHashMap;
const AutoHashMap = data_structures.AutoHashMap;
pub const Result = struct {
functions: BlockList(Function) = .{},
blocks: BlockList(BasicBlock) = .{},
calls: BlockList(Call) = .{},
functions: BlockList(Function) = .{},
instructions: BlockList(Instruction) = .{},
jumps: BlockList(Jump) = .{},
values: BlockList(Value) = .{},
syscalls: BlockList(Syscall) = .{},
loads: BlockList(Load) = .{},
phis: BlockList(Phi) = .{},
stores: BlockList(Store) = .{},
syscalls: BlockList(Syscall) = .{},
values: BlockList(Value) = .{},
stack_references: BlockList(StackReference) = .{},
entry_point: u32 = 0,
};
pub fn initialize(compilation: *Compilation, module: *Module, package: *Package, main_file: Compilation.Type.Index) !Result {
@ -32,10 +39,16 @@ pub fn initialize(compilation: *Compilation, module: *Module, package: *Package,
.module = module,
};
while (function_iterator.next()) |sema_function| {
print("\nFunction: {}\n", .{sema_function});
builder.ir.entry_point = module.entry_point orelse unreachable;
try builder.function(sema_function);
while (function_iterator.next()) |sema_function| {
const function_index = try builder.buildFunction(sema_function);
try builder.optimizeFunction(function_index);
}
var ir_function_iterator = builder.ir.functions.iterator();
while (ir_function_iterator.nextPointer()) |function| {
print("\n{}\n", .{function});
}
return builder.ir;
@ -61,11 +74,14 @@ pub const BasicBlock = struct {
};
pub const Instruction = union(enum) {
call: Call.Index,
jump: Jump.Index,
load: Load.Index,
phi: Phi.Index,
ret: Ret,
syscall: Syscall.Index,
ret: Value.Index,
store: Store.Index,
syscall: Value.Index,
copy: Value.Index,
@"unreachable",
pub const List = BlockList(@This());
@ -73,15 +89,14 @@ pub const Instruction = union(enum) {
};
const Phi = struct {
foo: u32 = 0,
value: Value.Index,
jump: Jump.Index,
block: BasicBlock.Index,
next: Phi.Index,
pub const List = BlockList(@This());
pub const Index = List.Index;
};
const Ret = struct {
value: Instruction.Index,
};
pub const Jump = struct {
source: BasicBlock.Index,
destination: BasicBlock.Index,
@ -102,9 +117,37 @@ const Load = struct {
pub const Index = List.Index;
};
const Store = struct {
source: Value.Index,
destination: Value.Index,
pub const List = BlockList(@This());
pub const Index = List.Index;
};
pub const StackReference = struct {
size: u64,
alignment: u64,
offset: u64,
pub const List = BlockList(@This());
pub const Index = List.Index;
};
pub const Call = struct {
function: Function.Index,
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
};
pub const Value = union(enum) {
integer: Integer,
integer: Compilation.Integer,
load: Load.Index,
call: Call.Index,
stack_reference: StackReference.Index,
phi: Phi.Index,
instruction: Instruction.Index,
syscall: Syscall.Index,
pub const List = BlockList(@This());
pub const Index = List.Index;
@ -112,75 +155,301 @@ pub const Value = union(enum) {
return switch (value) {
.integer => false,
.load => true,
.call => true,
.stack_reference => true,
.phi => unreachable,
.instruction => unreachable,
.syscall => unreachable,
};
}
};
const Integer = struct {
value: u64,
sign: bool,
};
const Function = struct {
pub const Function = struct {
blocks: ArrayList(BasicBlock.Index) = .{},
stack_map: AutoHashMap(Compilation.Declaration.Index, Value.Index) = .{},
current_basic_block: BasicBlock.Index = BasicBlock.Index.invalid,
return_phi_node: Instruction.Index = Instruction.Index.invalid,
return_phi_block: BasicBlock.Index = BasicBlock.Index.invalid,
ir: *Result,
current_stack_offset: usize = 0,
pub const List = BlockList(@This());
pub const Index = List.Index;
pub fn format(function: *const Function, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
try writer.writeAll("Function:\n");
for (function.blocks.items, 0..) |block_index, function_block_index| {
try writer.print("#{}:\n", .{function_block_index});
const block = function.ir.blocks.get(block_index);
for (block.instructions.items, 0..) |instruction_index, block_instruction_index| {
try writer.print("%{}: ", .{block_instruction_index});
const instruction = function.ir.instructions.get(instruction_index).*;
try writer.print("{s}", .{@tagName(instruction)});
try writer.writeByte('\n');
}
try writer.writeByte('\n');
}
_ = options;
_ = fmt;
}
};
pub const Builder = struct {
allocator: Allocator,
ir: Result = .{},
module: *Module,
current_basic_block: BasicBlock.Index = BasicBlock.Index.invalid,
current_function_index: Function.Index = Function.Index.invalid,
fn function(builder: *Builder, sema_function: Compilation.Function) !void {
builder.current_function_index = try builder.ir.functions.append(builder.allocator, .{});
fn currentFunction(builder: *Builder) *Function {
return builder.ir.functions.get(builder.current_function_index);
}
fn buildFunction(builder: *Builder, sema_function: Compilation.Function) !Function.Index {
const function_allocation = try builder.ir.functions.append(builder.allocator, .{
.ir = &builder.ir,
});
builder.current_function_index = function_allocation.index;
const function = function_allocation.ptr;
// TODO: arguments
builder.current_basic_block = try builder.newBlock();
function.current_basic_block = try builder.newBlock();
const return_type = builder.module.types.get(builder.module.function_prototypes.get(sema_function.prototype).return_type);
const is_noreturn = return_type.* == .noreturn;
if (!is_noreturn) {
const exit_block = try builder.newBlock();
const phi = try builder.appendToBlock(exit_block, .{
const phi_instruction = try builder.appendToBlock(exit_block, .{
.phi = Phi.Index.invalid,
});
// phi.ptr.* = .{
// .value = Value.Index.invalid,
// .jump = Jump.Index.invalid,
// .block = exit_block,
// .next = Phi.Index.invalid,
// };
const ret = try builder.appendToBlock(exit_block, .{
.ret = .{
.value = phi,
},
.ret = (try builder.ir.values.append(builder.allocator, .{
.instruction = phi_instruction,
})).index,
});
_ = ret;
function.return_phi_node = phi_instruction;
function.return_phi_block = exit_block;
}
const sema_block = sema_function.getBodyBlock(builder.module);
try builder.block(sema_block, .{ .emit_exit_block = !is_noreturn });
try builder.dumpFunction(std.io.getStdErr().writer(), builder.current_function_index);
builder.currentFunction().current_stack_offset = std.mem.alignForward(usize, builder.currentFunction().current_stack_offset, 0x10);
return builder.current_function_index;
}
fn dumpFunction(builder: *Builder, writer: anytype, index: Function.Index) !void {
const f = builder.ir.functions.get(index);
try writer.writeAll("Hello world!\n");
print("Function blocks: {}\n", .{f.blocks.items.len});
var function_instruction_index: usize = 0;
for (f.blocks.items, 0..) |block_index, function_block_index| {
print("#{}:\n", .{function_block_index});
const function_block = builder.ir.blocks.get(block_index);
for (function_block.instructions.items) |instruction_index| {
const BlockSearcher = struct {
to_visit: ArrayList(BasicBlock.Index) = .{},
visited: AutoArrayHashMap(BasicBlock.Index, void) = .{},
};
fn findReachableBlocks(builder: *Builder, first: BasicBlock.Index) ![]const BasicBlock.Index {
var searcher = BlockSearcher{};
try searcher.to_visit.append(builder.allocator, first);
try searcher.visited.put(builder.allocator, first, {});
while (searcher.to_visit.items.len > 0) {
const block_index = searcher.to_visit.swapRemove(0);
const block_to_visit = builder.ir.blocks.get(block_index);
const last_instruction_index = block_to_visit.instructions.items[block_to_visit.instructions.items.len - 1];
const last_instruction = builder.ir.instructions.get(last_instruction_index);
switch (last_instruction.*) {
.jump => |jump_index| {
const ir_jump = builder.ir.jumps.get(jump_index);
assert(ir_jump.source.eq(block_index));
const new_block = ir_jump.destination;
if (searcher.visited.get(new_block) == null) {
try searcher.to_visit.append(builder.allocator, new_block);
try searcher.visited.put(builder.allocator, new_block, {});
}
},
.@"unreachable", .ret => {},
else => |t| @panic(@tagName(t)),
}
}
return searcher.visited.keys();
}
fn optimizeFunction(builder: *Builder, function_index: Function.Index) !void {
const function = builder.ir.functions.get(function_index);
const reachable_blocks = try builder.findReachableBlocks(function.blocks.items[0]);
var did_something = true;
while (did_something) {
did_something = false;
for (reachable_blocks) |basic_block_index| {
const basic_block = builder.ir.blocks.get(basic_block_index);
for (basic_block.instructions.items) |instruction_index| {
did_something = did_something or try builder.removeUnreachablePhis(reachable_blocks, instruction_index);
did_something = did_something or try builder.removeTrivialPhis(instruction_index);
const copy = try builder.removeCopyReferences(instruction_index);
did_something = did_something or copy;
}
}
}
var instructions_to_delete = ArrayList(u32){};
for (reachable_blocks) |basic_block_index| {
instructions_to_delete.clearRetainingCapacity();
const basic_block = builder.ir.blocks.get(basic_block_index);
for (basic_block.instructions.items, 0..) |instruction_index, index| {
const instruction = builder.ir.instructions.get(instruction_index);
print("%{}: {}\n", .{ function_instruction_index, instruction });
function_instruction_index += 1;
switch (instruction.*) {
.copy => try instructions_to_delete.append(builder.allocator, @intCast(index)),
else => {},
}
}
print("\n", .{});
var deleted_instruction_count: usize = 0;
for (instructions_to_delete.items) |instruction_to_delete| {
_ = basic_block.instructions.orderedRemove(instruction_to_delete - deleted_instruction_count);
}
}
}
fn removeUnreachablePhis(builder: *Builder, reachable_blocks: []const BasicBlock.Index, instruction_index: Instruction.Index) !bool {
const instruction = builder.ir.instructions.get(instruction_index);
return switch (instruction.*) {
.phi => blk: {
var did_something = false;
var head = &instruction.phi;
next: while (head.valid) {
const phi = builder.ir.phis.get(head.*);
const phi_jump = builder.ir.jumps.get(phi.jump);
assert(phi_jump.source.valid);
for (reachable_blocks) |block_index| {
if (phi_jump.source.eq(block_index)) {
head = &phi.next;
continue :next;
}
}
head.* = phi.next;
did_something = true;
}
break :blk did_something;
},
else => false,
};
}
fn removeTrivialPhis(builder: *Builder, instruction_index: Instruction.Index) !bool {
const instruction = builder.ir.instructions.get(instruction_index);
return switch (instruction.*) {
.phi => |phi_index| blk: {
const trivial_phi: ?Value.Index = trivial_blk: {
var only_value = Value.Index.invalid;
var it = phi_index;
while (it.valid) {
const phi = builder.ir.phis.get(it);
const phi_value = builder.ir.values.get(phi.value);
if (phi_value.* == .phi) unreachable;
// TODO: undefined
if (only_value.valid) {
if (!only_value.eq(phi.value)) {
break :trivial_blk null;
}
} else {
only_value = phi.value;
}
it = phi.next;
}
break :trivial_blk only_value;
};
if (trivial_phi) |trivial_value| {
if (trivial_value.valid) {
// Option to delete
const delete = false;
if (delete) {
unreachable;
} else {
instruction.* = .{
.copy = trivial_value,
};
}
} else {
unreachable;
}
}
break :blk instruction.* != .phi;
},
else => false,
};
}
fn removeCopyReferences(builder: *Builder, instruction_index: Instruction.Index) !bool {
const instruction = builder.ir.instructions.get(instruction_index);
return switch (instruction.*) {
.copy => false,
else => {
var did_something = false;
const operands: []const *Value.Index = switch (instruction.*) {
.jump, .@"unreachable" => &.{},
.ret => &.{&instruction.ret},
// TODO: arguments
.call => blk: {
var list = ArrayList(*Value.Index){};
break :blk list.items;
},
.store => |store_index| blk: {
const store_instr = builder.ir.stores.get(store_index);
break :blk &.{ &store_instr.source, &store_instr.destination };
},
.syscall => |syscall_value_index| blk: {
const syscall_value = builder.ir.values.get(syscall_value_index);
const syscall = builder.ir.syscalls.get(syscall_value.syscall);
var list = ArrayList(*Value.Index){};
try list.ensureTotalCapacity(builder.allocator, syscall.arguments.items.len);
for (syscall.arguments.items) |*arg| {
list.appendAssumeCapacity(arg);
}
break :blk list.items;
},
else => |t| @panic(@tagName(t)),
};
for (operands) |operand_value_index| {
const operand_value = builder.ir.values.get(operand_value_index.*);
switch (operand_value.*) {
.instruction => |operand_instruction_index| {
const operand_instruction = builder.ir.instructions.get(operand_instruction_index);
switch (operand_instruction.*) {
.copy => |copy_value| {
operand_value_index.* = copy_value;
did_something = true;
},
else => |t| @panic(@tagName(t)),
}
},
.integer, .stack_reference, .call => {},
else => |t| @panic(@tagName(t)),
}
}
return did_something;
},
};
}
fn blockInsideBasicBlock(builder: *Builder, sema_block: *Compilation.Block, block_index: BasicBlock.Index) !BasicBlock.Index {
builder.current_basic_block = block_index;
const current_function = builder.currentFunction();
current_function.current_basic_block = block_index;
try builder.block(sema_block, .{});
return builder.current_basic_block;
return current_function.current_basic_block;
}
const BlockOptions = packed struct {
@ -203,7 +472,7 @@ pub const Builder = struct {
else => |t| @panic(@tagName(t)),
};
const original_block = builder.current_basic_block;
const original_block = builder.currentFunction().current_basic_block;
const jump_to_loop = try builder.append(.{
.jump = undefined,
});
@ -221,7 +490,7 @@ pub const Builder = struct {
});
const sema_body_block = builder.module.blocks.get(sema_loop_body.block);
builder.current_basic_block = try builder.blockInsideBasicBlock(sema_body_block, loop_body_block);
builder.currentFunction().current_basic_block = try builder.blockInsideBasicBlock(sema_body_block, loop_body_block);
if (loop_prologue_block.valid) {
builder.ir.blocks.get(loop_prologue_block).seal();
}
@ -229,20 +498,20 @@ pub const Builder = struct {
if (sema_body_block.reaches_end) {
_ = try builder.append(.{
.jump = try builder.jump(.{
.source = builder.current_basic_block,
.source = builder.currentFunction().current_basic_block,
.destination = loop_head_block,
}),
});
}
builder.ir.blocks.get(builder.current_basic_block).filled = true;
builder.ir.blocks.get(builder.currentFunction().current_basic_block).filled = true;
builder.ir.blocks.get(loop_body_block).seal();
if (!loop_head_block.eq(loop_body_block)) {
unreachable;
}
if (loop_prologue_block.valid) {
builder.current_basic_block = loop_prologue_block;
builder.currentFunction().current_basic_block = loop_prologue_block;
}
},
.syscall => |syscall_index| {
@ -257,80 +526,216 @@ pub const Builder = struct {
for (sema_syscall.getArguments()) |sema_syscall_argument| {
assert(sema_syscall_argument.valid);
const argument_value_index = try builder.emitValue(sema_syscall_argument);
var argument_value_index = try builder.emitValue(sema_syscall_argument);
arguments.appendAssumeCapacity(argument_value_index);
}
// TODO: undo this mess
_ = try builder.append(.{
.syscall = try builder.ir.syscalls.append(builder.allocator, .{
.syscall = (try builder.ir.values.append(builder.allocator, .{
.syscall = (try builder.ir.syscalls.append(builder.allocator, .{
.arguments = arguments,
}),
})).index,
})).index,
});
},
.@"unreachable" => _ = try builder.append(.{
.@"unreachable" = {},
}),
.@"return" => |sema_ret_index| {
const sema_ret = builder.module.returns.get(sema_ret_index);
const return_value = try builder.emitValue(sema_ret.value);
const phi_instruction = builder.ir.instructions.get(builder.currentFunction().return_phi_node);
const phi = switch (phi_instruction.phi.valid) {
true => unreachable,
false => (try builder.ir.phis.append(builder.allocator, std.mem.zeroes(Phi))).ptr,
}; //builder.ir.phis.get(phi_instruction.phi);
const exit_jump = try builder.jump(.{
.source = builder.currentFunction().current_basic_block,
.destination = switch (phi_instruction.phi.valid) {
true => phi.block,
false => builder.currentFunction().return_phi_block,
},
});
print("Previous phi: {}\n", .{phi_instruction.phi});
phi_instruction.phi = (try builder.ir.phis.append(builder.allocator, .{
.value = return_value,
.jump = exit_jump,
.next = phi_instruction.phi,
.block = phi.block,
})).index;
_ = try builder.append(.{
.jump = exit_jump,
});
},
.declaration => |sema_declaration_index| {
const sema_declaration = builder.module.declarations.get(sema_declaration_index);
assert(sema_declaration.scope_type == .local);
const sema_init_value = builder.module.values.get(sema_declaration.init_value);
const declaration_type = builder.module.types.get(sema_init_value.getType(builder.module));
const size = declaration_type.getSize();
const alignment = declaration_type.getAlignment();
const stack_offset = switch (size > 0) {
true => builder.allocateStack(size, alignment),
false => 0,
};
var value_index = try builder.emitValue(sema_declaration.init_value);
const value = builder.ir.values.get(value_index);
print("Value: {}\n", .{value.*});
value_index = switch (value.isInMemory()) {
false => try builder.load(value_index),
true => value_index,
};
if (stack_offset > 0) {
_ = try builder.store(.{
.source = value_index,
.destination = try builder.stackReference(stack_offset, declaration_type.*, sema_declaration_index),
});
}
},
else => |t| @panic(@tagName(t)),
}
}
}
fn stackReference(builder: *Builder, stack_offset: u64, t: Compilation.Type, sema_declaration: Compilation.Declaration.Index) !Value.Index {
const stack_reference_allocation = try builder.ir.stack_references.append(builder.allocator, .{
.offset = stack_offset,
.size = t.getSize(),
.alignment = t.getAlignment(),
});
const value_allocation = try builder.ir.values.append(builder.allocator, .{
.stack_reference = stack_reference_allocation.index,
});
try builder.currentFunction().stack_map.put(builder.allocator, sema_declaration, value_allocation.index);
return value_allocation.index;
}
fn store(builder: *Builder, descriptor: Store) !void {
const store_allocation = try builder.ir.stores.append(builder.allocator, descriptor);
_ = try builder.append(.{
.store = store_allocation.index,
});
}
fn allocateStack(builder: *Builder, size: u64, alignment: u64) u64 {
builder.currentFunction().current_stack_offset = std.mem.alignForward(u64, builder.currentFunction().current_stack_offset, alignment);
builder.currentFunction().current_stack_offset += size;
return builder.currentFunction().current_stack_offset;
}
fn load(builder: *Builder, value_index: Value.Index) !Value.Index {
print("Doing load!\n", .{});
const load_index = try builder.ir.loads.append(builder.allocator, .{
const load_allocation = try builder.ir.loads.append(builder.allocator, .{
.value = value_index,
});
const instruction_index = try builder.append(.{
.load = load_index,
.load = load_allocation.index,
});
_ = instruction_index;
const result = try builder.ir.values.append(builder.allocator, .{
.load = load_index,
.load = load_allocation.index,
});
return result;
return result.index;
}
fn emitValue(builder: *Builder, sema_value_index: Compilation.Value.Index) !Value.Index {
const sema_value = builder.module.values.get(sema_value_index).*;
return switch (sema_value) {
// TODO
.integer => |integer| try builder.ir.values.append(builder.allocator, .{
.integer = .{
.value = integer,
.sign = false,
.integer => |integer| (try builder.ir.values.append(builder.allocator, .{
.integer = integer,
})).index,
.call => |sema_call_index| {
const sema_call = builder.module.calls.get(sema_call_index);
const argument_list_index = sema_call.arguments;
if (argument_list_index.valid) {
unreachable;
}
const call_index = try builder.call(.{
.function = switch (builder.module.values.get(sema_call.value).*) {
.function => |function_index| .{
.index = function_index.index,
.block = function_index.block,
},
else => |t| @panic(@tagName(t)),
},
});
_ = try builder.append(.{
.call = call_index,
});
const value_allocation = try builder.ir.values.append(builder.allocator, .{
.call = call_index,
});
return value_allocation.index;
},
.declaration_reference => |sema_declaration_index| {
const sema_declaration = builder.module.declarations.get(sema_declaration_index);
const sema_init_value = builder.module.values.get(sema_declaration.init_value);
const init_type = sema_init_value.getType(builder.module);
_ = init_type;
switch (sema_declaration.scope_type) {
.local => {
const stack_reference = builder.currentFunction().stack_map.get(sema_declaration_index).?;
return stack_reference;
},
.global => unreachable,
}
// switch (sema_declaration.*) {
// else => |t| @panic(@tagName(t)),
// }
},
}),
else => |t| @panic(@tagName(t)),
};
}
fn jump(builder: *Builder, jump_descriptor: Jump) !Jump.Index {
const destination_block = builder.ir.blocks.get(jump_descriptor.destination);
fn call(builder: *Builder, descriptor: Call) !Call.Index {
const call_allocation = try builder.ir.calls.append(builder.allocator, descriptor);
return call_allocation.index;
}
fn jump(builder: *Builder, descriptor: Jump) !Jump.Index {
const destination_block = builder.ir.blocks.get(descriptor.destination);
assert(!destination_block.sealed);
return try builder.ir.jumps.append(builder.allocator, jump_descriptor);
const jump_allocation = try builder.ir.jumps.append(builder.allocator, descriptor);
return jump_allocation.index;
}
fn append(builder: *Builder, instruction: Instruction) !Instruction.Index {
assert(builder.current_basic_block.valid);
return builder.appendToBlock(builder.current_basic_block, instruction);
assert(builder.current_function_index.valid);
const current_function = builder.currentFunction();
assert(current_function.current_basic_block.valid);
return builder.appendToBlock(current_function.current_basic_block, instruction);
}
fn appendToBlock(builder: *Builder, block_index: BasicBlock.Index, instruction: Instruction) !Instruction.Index {
const instruction_index = try builder.ir.instructions.append(builder.allocator, instruction);
try builder.ir.blocks.get(block_index).instructions.append(builder.allocator, instruction_index);
if (instruction == .phi) {
print("Adding phi: {}\n", .{instruction});
}
const instruction_allocation = try builder.ir.instructions.append(builder.allocator, instruction);
try builder.ir.blocks.get(block_index).instructions.append(builder.allocator, instruction_allocation.index);
return instruction_index;
return instruction_allocation.index;
}
fn newBlock(builder: *Builder) !BasicBlock.Index {
const new_block_index = try builder.ir.blocks.append(builder.allocator, .{});
const new_block_allocation = try builder.ir.blocks.append(builder.allocator, .{});
const current_function = builder.ir.functions.get(builder.current_function_index);
const function_block_index = current_function.blocks.items.len;
try current_function.blocks.append(builder.allocator, new_block_index);
try current_function.blocks.append(builder.allocator, new_block_allocation.index);
print("Adding block: {}\n", .{function_block_index});
return new_block_index;
return new_block_allocation.index;
}
};

File diff suppressed because it is too large Load Diff

View File

@ -2,6 +2,7 @@ const std = @import("std");
const assert = std.debug.assert;
pub const Allocator = std.mem.Allocator;
pub const AutoArrayHashMap = std.AutoArrayHashMapUnmanaged;
pub const ArrayList = std.ArrayListUnmanaged;
pub const AutoHashMap = std.AutoHashMapUnmanaged;
pub const HashMap = std.HashMapUnmanaged;
@ -36,8 +37,8 @@ pub fn BlockList(comptime T: type) type {
const List = @This();
pub const Index = packed struct(u32) {
block: u24,
index: u6,
block: u24,
_reserved: bool = false,
valid: bool = true,
@ -50,6 +51,11 @@ pub fn BlockList(comptime T: type) type {
pub fn eq(index: Index, other: Index) bool {
return @as(u32, @bitCast(index)) == @as(u32, @bitCast(other));
}
pub fn uniqueInteger(index: Index) u32 {
assert(index.valid);
return @as(u30, @truncate(@as(u32, @bitCast(index))));
}
};
pub const Iterator = struct {
@ -81,6 +87,11 @@ pub fn BlockList(comptime T: type) type {
}
};
pub const Allocation = struct {
ptr: *T,
index: Index,
};
pub fn iterator(list: *const List) Iterator {
return .{
.block_index = 0,
@ -94,33 +105,50 @@ pub fn BlockList(comptime T: type) type {
return &list.blocks.items[index.block].items[index.index];
}
pub fn append(list: *List, allocator: Allocator, element: T) !Index {
pub fn append(list: *List, allocator: Allocator, element: T) !Allocation {
const result = try list.addOne(allocator);
result.ptr.* = element;
return result;
}
pub fn addOne(list: *List, allocator: Allocator) !Allocation {
try list.ensureCapacity(allocator, list.len + 1);
const max_allocation = list.blocks.items.len * item_count;
if (list.len < max_allocation) {
// Follow the guess
if (list.blocks.items[list.first_block].allocateIndex()) |index| {
list.blocks.items[list.first_block].items[index] = element;
list.len += 1;
return .{
const result = switch (list.len < max_allocation) {
true => blk: {
const block = &list.blocks.items[list.first_block];
if (block.allocateIndex()) |index| {
const ptr = &block.items[index];
break :blk Allocation{
.ptr = ptr,
.index = .{
.index = index,
.block = @intCast(list.first_block),
},
};
} else |_| {
@panic("TODO");
}
} else {
},
false => blk: {
const block_index = list.blocks.items.len;
const new_block = list.blocks.addOneAssumeCapacity();
new_block.* = .{};
const index = new_block.allocateIndex() catch unreachable;
new_block.items[index] = element;
list.len += 1;
return .{
const ptr = &new_block.items[index];
break :blk Allocation{
.ptr = ptr,
.index = .{
.index = index,
.block = @intCast(block_index),
},
};
}
},
};
list.len += 1;
return result;
}
pub fn ensureCapacity(list: *List, allocator: Allocator, new_capacity: usize) !void {
@ -131,6 +159,24 @@ pub fn BlockList(comptime T: type) type {
}
}
pub fn indexOf(list: *List, elem: *T) Index {
const address = @intFromPtr(elem);
std.debug.print("Items: {}. Block count: {}\n", .{ list.len, list.blocks.items.len });
for (list.blocks.items, 0..) |*block, block_index| {
const base = @intFromPtr(&block.items[0]);
const top = base + @sizeOf(T) * item_count;
std.debug.print("Bitset: {}. address: 0x{x}. Base: 0x{x}. Top: 0x{x}\n", .{ block.bitset, address, base, top });
if (address >= base and address < top) {
return .{
.block = @intCast(block_index),
.index = @intCast(@divExact(address - base, @sizeOf(T))),
};
}
}
@panic("not found");
}
test "Bitset index allocation" {
const expect = std.testing.expect;
var block = Block{};

View File

@ -34,6 +34,9 @@ pub const Token = packed struct(u64) {
fixed_keyword_false = 0x0d,
fixed_keyword_fn = 0x0e,
fixed_keyword_unreachable = 0x0f,
fixed_keyword_return = 0x10,
keyword_unsigned_integer = 0x1f,
keyword_signed_integer = 0x20,
bang = '!', // 0x21
hash = '#', // 0x23
dollar_sign = '$', // 0x24
@ -82,6 +85,7 @@ pub const FixedKeyword = enum {
false,
@"fn",
@"unreachable",
@"return",
};
pub const Result = struct {
@ -109,8 +113,9 @@ pub fn analyze(allocator: Allocator, text: []const u8) !Result {
break;
}
const identifier = text[start_index..][0 .. index - start_index];
std.debug.print("Identifier: {s}\n", .{identifier});
// const identifier = text[start_index..][0 .. index - start_index];
// _ = identifier;
// std.debug.print("Identifier: {s}\n", .{identifier});
if (start_character == 'u' or start_character == 's') {
var index_integer = start_index + 1;
@ -119,7 +124,13 @@ pub fn analyze(allocator: Allocator, text: []const u8) !Result {
}
if (index_integer == index) {
unreachable;
const id: Token.Id = switch (start_character) {
'u' => .keyword_unsigned_integer,
's' => .keyword_signed_integer,
else => unreachable,
};
break :blk id;
}
}
@ -127,7 +138,7 @@ pub fn analyze(allocator: Allocator, text: []const u8) !Result {
inline else => |comptime_fixed_keyword| @field(Token.Id, "fixed_keyword_" ++ @tagName(comptime_fixed_keyword)),
} else .identifier;
},
'(', ')', '{', '}', '-', '=', ';', '#', '@', ',' => |operator| blk: {
'(', ')', '{', '}', '-', '=', ';', '#', '@', ',', '.' => |operator| blk: {
index += 1;
break :blk @enumFromInt(operator);
},

View File

@ -7,6 +7,7 @@ const File = Compilation.File;
const Module = Compilation.Module;
const Package = Compilation.Package;
const ArgumentList = Compilation.ArgumentList;
const Assignment = Compilation.Assignment;
const Block = Compilation.Block;
const Declaration = Compilation.Declaration;
@ -14,6 +15,7 @@ const Field = Compilation.Field;
const Function = Compilation.Function;
const Loop = Compilation.Loop;
const Scope = Compilation.Scope;
const ScopeType = Compilation.ScopeType;
const Struct = Compilation.Struct;
const Type = Compilation.Type;
const Value = Compilation.Value;
@ -33,35 +35,67 @@ const HashMap = data_structures.AutoHashMap;
const print = std.debug.print;
const Analyzer = struct {
source_code: []const u8,
nodes: []const Node,
tokens: []const Token,
file: *File,
allocator: Allocator,
module: *Module,
current_file: File.Index,
fn lazyGlobalDeclaration(analyzer: *Analyzer, node_index: Node.Index) void {
print("Global: {}", .{analyzer.nodes[node_index.unwrap()]});
fn getSourceFile(analyzer: *Analyzer, scope_index: Scope.Index) []const u8 {
const scope = analyzer.module.scopes.get(scope_index);
const file = analyzer.module.files.get(scope.file);
return file.source_code;
}
fn comptimeBlock(analyzer: *Analyzer, scope: *Scope, node_index: Node.Index) !Value.Index {
const comptime_node = analyzer.nodes[node_index.unwrap()];
fn getNode(analyzer: *Analyzer, scope_index: Scope.Index, node_index: Node.Index) Node {
const scope = analyzer.module.scopes.get(scope_index);
const file = analyzer.module.files.get(scope.file);
const result = file.syntactic_analyzer_result.nodes.items[node_index.unwrap()];
return result;
}
const comptime_block = try analyzer.block(scope, .{ .none = {} }, comptime_node.left);
return try analyzer.module.values.append(analyzer.allocator, .{
fn getToken(analyzer: *Analyzer, scope_index: Scope.Index, token_index: Token.Index) Token {
const scope = analyzer.module.scopes.get(scope_index);
const file = analyzer.module.files.get(scope.file);
const result = file.lexical_analyzer_result.tokens.items[token_index];
return result;
}
fn getNodeList(analyzer: *Analyzer, scope_index: Scope.Index, list_index: u32) ArrayList(Node.Index) {
const scope = analyzer.module.scopes.get(scope_index);
const file = analyzer.module.files.get(scope.file);
return file.syntactic_analyzer_result.node_lists.items[list_index];
}
fn comptimeBlock(analyzer: *Analyzer, scope_index: Scope.Index, node_index: Node.Index) !Value.Index {
const comptime_node = analyzer.getNode(scope_index, node_index);
const comptime_block = try analyzer.block(scope_index, .{ .none = {} }, comptime_node.left);
const value_allocation = try analyzer.module.values.append(analyzer.allocator, .{
.block = comptime_block,
});
return value_allocation.index;
}
fn assign(analyzer: *Analyzer, scope: *Scope, node_index: Node.Index) !Assignment.Index {
_ = node_index;
_ = scope;
_ = analyzer;
fn unresolved(analyzer: *Analyzer, node_index: Node.Index) !Value.Allocation {
const value_allocation = try analyzer.module.values.addOne(analyzer.allocator);
value_allocation.ptr.* = .{
.unresolved = .{
.node_index = node_index,
},
};
return value_allocation;
}
fn block(analyzer: *Analyzer, scope: *Scope, expect_type: ExpectType, node_index: Node.Index) anyerror!Block.Index {
fn unresolvedAllocate(analyzer: *Analyzer, scope_index: Scope.Index, expect_type: ExpectType, node_index: Node.Index) !Value.Allocation {
const new = try analyzer.unresolved(node_index);
try analyzer.resolveNode(new.ptr, scope_index, expect_type, node_index);
return new;
}
fn block(analyzer: *Analyzer, scope_index: Scope.Index, expect_type: ExpectType, node_index: Node.Index) anyerror!Block.Index {
var reaches_end = true;
const block_node = analyzer.nodes[node_index.unwrap()];
const block_node = analyzer.getNode(scope_index, node_index);
var statement_nodes = ArrayList(Node.Index){};
switch (block_node.id) {
.block_one, .comptime_block_one => {
@ -72,12 +106,13 @@ const Analyzer = struct {
try statement_nodes.append(analyzer.allocator, block_node.left);
try statement_nodes.append(analyzer.allocator, block_node.right);
},
.block, .comptime_block => statement_nodes = analyzer.getNodeList(scope_index, block_node.left.unwrap()),
else => |t| @panic(@tagName(t)),
}
const is_comptime = switch (block_node.id) {
.comptime_block_zero, .comptime_block_one, .comptime_block_two => true,
.block_zero, .block_one, .block_two => false,
.comptime_block, .comptime_block_zero, .comptime_block_one, .comptime_block_two => true,
.block, .block_zero, .block_one, .block_two => false,
else => |t| @panic(@tagName(t)),
};
print("Is comptime: {}\n", .{is_comptime});
@ -89,7 +124,7 @@ const Analyzer = struct {
unreachable;
}
const statement_node = analyzer.nodes[statement_node_index.unwrap()];
const statement_node = analyzer.getNode(scope_index, statement_node_index);
const statement_value = switch (statement_node.id) {
inline .assign, .simple_while => |statement_id| blk: {
const specific_value_index = switch (statement_id) {
@ -99,17 +134,23 @@ const Analyzer = struct {
switch (statement_node.left.valid) {
// In an assignment, the node being invalid means a discarding underscore, like this: ```_ = result```
false => {
const right = try analyzer.expression(scope, ExpectType.none, statement_node.right);
try statements.append(analyzer.allocator, right);
const right_value_allocation = try analyzer.module.values.addOne(analyzer.allocator);
right_value_allocation.ptr.* = .{
.unresolved = .{
.node_index = statement_node.right,
},
};
try analyzer.resolveNode(right_value_allocation.ptr, scope_index, ExpectType.none, statement_node.right);
// switch (right_value_allocation.ptr.*) {
// else => |t| std.debug.print("\n\n\n\n\nASSIGN RIGHT: {s}\n\n\n\n", .{@tagName(t)}),
// }
try statements.append(analyzer.allocator, right_value_allocation.index);
continue;
},
true => {
const left_node = analyzer.nodes[statement_node.left.unwrap()];
print("left node index: {}. Left node: {}\n", .{ statement_node.left, left_node });
// const id = analyzer.tokenIdentifier(.token);
// print("id: {s}\n", .{id});
const left = try analyzer.expression(scope, ExpectType.none, statement_node.left);
_ = left;
// const left = try analyzer.expression(scope_index, ExpectType.none, statement_node.left);
// if (analyzer.module.values.get(left).isComptime() and analyzer.module.values.get(right).isComptime()) {
// unreachable;
@ -125,20 +166,18 @@ const Analyzer = struct {
}
},
.simple_while => statement: {
const loop_index = try analyzer.module.loops.append(analyzer.allocator, .{
const loop_allocation = try analyzer.module.loops.append(analyzer.allocator, .{
.condition = Value.Index.invalid,
.body = Value.Index.invalid,
.breaks = false,
});
const loop_structure = analyzer.module.loops.get(loop_index);
const while_condition = try analyzer.expression(scope, ExpectType.boolean, statement_node.left);
const while_body = try analyzer.expression(scope, expect_type, statement_node.right);
loop_structure.condition = while_condition;
loop_structure.body = while_body;
loop_allocation.ptr.condition = (try analyzer.unresolvedAllocate(scope_index, ExpectType.boolean, statement_node.left)).index;
loop_allocation.ptr.body = (try analyzer.unresolvedAllocate(scope_index, ExpectType.none, statement_node.right)).index;
reaches_end = loop_structure.breaks or while_condition.valid;
// TODO: bool true
reaches_end = loop_allocation.ptr.breaks or unreachable;
break :statement loop_index;
break :statement loop_allocation.index;
},
else => unreachable,
};
@ -147,62 +186,87 @@ const Analyzer = struct {
.simple_while => "loop",
else => unreachable,
}, specific_value_index);
const value_index = try analyzer.module.values.append(analyzer.allocator, value);
break :blk value_index;
const value_allocation = try analyzer.module.values.append(analyzer.allocator, value);
break :blk value_allocation.index;
},
.@"unreachable" => blk: {
reaches_end = false;
break :blk Values.@"unreachable".getIndex();
},
.simple_variable_declaration => (try analyzer.module.values.append(analyzer.allocator, .{
.declaration = try analyzer.symbolDeclaration(scope_index, statement_node_index, .local),
})).index,
.@"return" => blk: {
reaches_end = false;
const return_expression: Value.Index = switch (statement_node_index.valid) {
// TODO: expect type
true => ret: {
const return_value_allocation = try analyzer.module.values.addOne(analyzer.allocator);
return_value_allocation.ptr.* = .{
.unresolved = .{
.node_index = statement_node.left,
},
};
try analyzer.resolveNode(return_value_allocation.ptr, scope_index, expect_type, statement_node.left);
break :ret return_value_allocation.index;
},
false => @panic("TODO: ret void"),
};
const return_value_allocation = try analyzer.module.returns.append(analyzer.allocator, .{
.value = return_expression,
});
const return_expression_value_allocation = try analyzer.module.values.append(analyzer.allocator, .{
.@"return" = return_value_allocation.index,
});
break :blk return_expression_value_allocation.index;
},
else => |t| @panic(@tagName(t)),
};
try statements.append(analyzer.allocator, statement_value);
}
return try analyzer.module.blocks.append(analyzer.allocator, .{
const block_allocation = try analyzer.module.blocks.append(analyzer.allocator, .{
.statements = statements,
.reaches_end = reaches_end,
});
return block_allocation.index;
}
fn whileExpression(analyzer: *Analyzer, scope: *Scope, expect_type: ExpectType, node: Node) !Loop.Index {
_ = node;
_ = expect_type;
_ = scope;
_ = analyzer;
}
fn resolve(analyzer: *Analyzer, scope: *Scope, expect_type: ExpectType, value: *Value) !void {
const node_index = switch (value.*) {
.unresolved => |unresolved| unresolved.node_index,
else => |t| @panic(@tagName(t)),
};
value.* = try analyzer.resolveNode(scope, expect_type, node_index);
}
fn doIdentifier(analyzer: *Analyzer, scope: *Scope, expect_type: ExpectType, node: Node) !Value.Index {
assert(node.id == .identifier);
const identifier_hash = try analyzer.identifierFromToken(node.token);
fn doIdentifier(analyzer: *Analyzer, scope_index: Scope.Index, expect_type: ExpectType, node_token: Token.Index, node_scope_index: Scope.Index) !Value.Index {
const identifier_hash = try analyzer.identifierFromToken(node_scope_index, node_token);
const scope = analyzer.module.scopes.get(scope_index);
// TODO: search in upper scopes too
const identifier_scope_lookup = try scope.declarations.getOrPut(analyzer.allocator, identifier_hash);
if (identifier_scope_lookup.found_existing) {
const declaration_index = identifier_scope_lookup.value_ptr.*;
const declaration = analyzer.module.declarations.get(declaration_index);
const init_value = analyzer.module.values.get(declaration.init_value);
try analyzer.resolve(scope, expect_type, init_value);
if (init_value.* != .runtime and declaration.mutability == .@"const") {
print("Declaration found: {}\n", .{init_value});
switch (init_value.*) {
.unresolved => |ur| try analyzer.resolveNode(init_value, scope_index, expect_type, ur.node_index),
else => {},
}
if (init_value.isComptime() and declaration.mutability == .@"const") {
return declaration.init_value;
} else {
unreachable;
const ref_allocation = try analyzer.module.values.append(analyzer.allocator, .{
.declaration_reference = declaration_index,
});
return ref_allocation.index;
}
} else {
@panic("TODO: not found");
std.debug.panic("Identifier not found in scope #{} of file #{} referenced by scope #{} of file #{}: {s}", .{ scope_index.uniqueInteger(), scope.file.uniqueInteger(), node_scope_index.uniqueInteger(), analyzer.module.scopes.get(node_scope_index).file.uniqueInteger(), tokenBytes(analyzer.getToken(scope_index, node_token), analyzer.getSourceFile(scope_index)) });
}
}
fn getArguments(analyzer: *Analyzer, node_index: Node.Index) !ArrayList(Node.Index) {
fn getArguments(analyzer: *Analyzer, scope_index: Scope.Index, node_index: Node.Index) !ArrayList(Node.Index) {
var arguments = ArrayList(Node.Index){};
const node = analyzer.nodes[node_index.unwrap()];
const node = analyzer.getNode(scope_index, node_index);
switch (node.id) {
.compiler_intrinsic_two => {
try arguments.append(analyzer.allocator, node.left);
@ -214,48 +278,76 @@ const Analyzer = struct {
return arguments;
}
fn resolveNode(analyzer: *Analyzer, scope: *Scope, expect_type: ExpectType, node_index: Node.Index) anyerror!Value {
const node = analyzer.nodes[node_index.unwrap()];
return switch (node.id) {
.identifier => unreachable,
fn resolveNode(analyzer: *Analyzer, value: *Value, scope_index: Scope.Index, expect_type: ExpectType, node_index: Node.Index) anyerror!void {
const node = analyzer.getNode(scope_index, node_index);
print("Resolving node #{}: {}\n", .{ node_index.uniqueInteger(), node });
assert(value.* == .unresolved);
value.* = switch (node.id) {
.identifier => blk: {
const value_index = try analyzer.doIdentifier(scope_index, expect_type, node.token, scope_index);
const value_ref = analyzer.module.values.get(value_index);
break :blk value_ref.*;
},
.keyword_true => {
switch (expect_type) {
.none => {},
.type_index => |expected_type| {
if (@as(u32, @bitCast(type_boolean)) != @as(u32, @bitCast(expected_type))) {
@panic("TODO: compile error");
}
},
else => unreachable,
}
// TODO
unreachable;
// break :blk Values.getIndex(.bool_true);
},
.compiler_intrinsic_one, .compiler_intrinsic_two => blk: {
const intrinsic_name = analyzer.tokenIdentifier(node.token + 1);
const intrinsic_name = analyzer.tokenIdentifier(scope_index, node.token + 1);
const intrinsic = data_structures.enumFromString(Intrinsic, intrinsic_name) orelse unreachable;
print("Intrinsic: {s}\n", .{@tagName(intrinsic)});
switch (intrinsic) {
.import => {
assert(node.id == .compiler_intrinsic_one);
const import_argument = analyzer.nodes[node.left.unwrap()];
const import_argument = analyzer.getNode(scope_index, node.left);
switch (import_argument.id) {
.string_literal => {
const import_name = analyzer.tokenStringLiteral(import_argument.token);
const imported_file = try analyzer.module.importFile(analyzer.allocator, analyzer.file, import_name);
const import_name = analyzer.tokenStringLiteral(scope_index, import_argument.token);
const import_file = try analyzer.module.importFile(analyzer.allocator, analyzer.current_file, import_name);
if (imported_file.is_new) {
if (import_file.file.is_new) {
// TODO: fix error
try analyzer.module.generateAbstractSyntaxTreeForFile(analyzer.allocator, imported_file.file);
try analyzer.module.generateAbstractSyntaxTreeForFile(analyzer.allocator, import_file.file.ptr);
} else {
unreachable;
}
break :blk .{
.type = try analyzeFile(analyzer.allocator, analyzer.module, imported_file.file),
.type = try analyzeFile(value, analyzer.allocator, analyzer.module, import_file.file.ptr, import_file.file.index),
};
},
else => unreachable,
}
},
.syscall => {
var argument_nodes = try analyzer.getArguments(node_index);
var argument_nodes = try analyzer.getArguments(scope_index, node_index);
print("Argument count: {}\n", .{argument_nodes.items.len});
if (argument_nodes.items.len > 0 and argument_nodes.items.len <= 6 + 1) {
const number = try analyzer.expression(scope, ExpectType.none, argument_nodes.items[0]);
const number_allocation = try analyzer.unresolvedAllocate(scope_index, .{
.flexible_integer = .{
.byte_count = 8,
},
}, argument_nodes.items[0]);
const number = number_allocation.index;
assert(number.valid);
var arguments = std.mem.zeroes([6]Value.Index);
for (argument_nodes.items[1..], 0..) |argument_node_index, argument_index| {
const argument = try analyzer.expression(scope, ExpectType.none, argument_node_index);
print("Index: {}. Argument: {}\n", .{ argument_index, argument });
arguments[argument_index] = argument;
const argument_allocation = try analyzer.unresolvedAllocate(scope_index, ExpectType.none, argument_node_index);
arguments[argument_index] = argument_allocation.index;
}
// TODO: typecheck for usize
@ -264,11 +356,11 @@ const Analyzer = struct {
}
break :blk .{
.syscall = try analyzer.module.syscalls.append(analyzer.allocator, .{
.syscall = (try analyzer.module.syscalls.append(analyzer.allocator, .{
.number = number,
.arguments = arguments,
.argument_count = @intCast(argument_nodes.items.len - 1),
}),
})).index,
};
} else {
unreachable;
@ -278,55 +370,100 @@ const Analyzer = struct {
unreachable;
},
.function_definition => blk: {
const function_prototype_index = try analyzer.functionPrototype(node.left);
const function_prototype_index = try analyzer.functionPrototype(scope_index, node.left);
const function_body = try analyzer.block(scope, .{
const function_body = try analyzer.block(scope_index, .{
.type_index = analyzer.functionPrototypeReturnType(function_prototype_index),
}, node.right);
const function_index = try analyzer.module.functions.append(analyzer.allocator, .{
const function_allocation = try analyzer.module.functions.append(analyzer.allocator, .{
.prototype = function_prototype_index,
.body = function_body,
});
break :blk .{
.function = function_index,
.function = function_allocation.index,
};
},
.keyword_true => unreachable,
.simple_while => unreachable,
.block_zero, .block_one => blk: {
const block_index = try analyzer.block(scope, expect_type, node_index);
const block_index = try analyzer.block(scope_index, expect_type, node_index);
break :blk .{
.block = block_index,
};
},
.number_literal => switch (std.zig.parseNumberLiteral(analyzer.tokenBytes(analyzer.tokens[node.token]))) {
.int => |integer| .{
.integer = integer,
.number_literal => switch (std.zig.parseNumberLiteral(analyzer.numberBytes(scope_index, node.token))) {
.int => |integer| blk: {
assert(expect_type != .none);
const int_type = switch (expect_type) {
.flexible_integer => |flexible_integer_type| Compilation.Type.Integer{
.bit_count = flexible_integer_type.byte_count << 3,
.signedness = .unsigned,
},
.type_index => |type_index| a: {
const type_info = analyzer.module.types.get(type_index);
break :a switch (type_info.*) {
.integer => |int| int,
else => |t| @panic(@tagName(t)),
};
},
else => |t| @panic(@tagName(t)),
};
}
fn expression(analyzer: *Analyzer, scope: *Scope, expect_type: ExpectType, node_index: Node.Index) !Value.Index {
const node = analyzer.nodes[node_index.unwrap()];
return switch (node.id) {
.identifier => analyzer.doIdentifier(scope, expect_type, node),
.keyword_true => blk: {
switch (expect_type) {
.none => {},
.type_index => |expected_type| {
if (@as(u32, @bitCast(type_boolean)) != @as(u32, @bitCast(expected_type))) {
@panic("TODO: compile error");
}
break :blk .{
.integer = .{
.value = integer,
.type = int_type,
},
}
break :blk Values.getIndex(.bool_true);
};
},
else => try analyzer.module.values.append(analyzer.allocator, try analyzer.resolveNode(scope, expect_type, node_index)),
else => |t| @panic(@tagName(t)),
},
.call_one => blk: {
const this_value_node_index = node.left;
const this_value_allocation = try analyzer.unresolvedAllocate(scope_index, ExpectType.none, this_value_node_index);
const value_type = switch (this_value_allocation.ptr.*) {
.function => |function_index| analyzer.module.function_prototypes.get(analyzer.module.functions.get(function_index).prototype).return_type,
else => |t| @panic(@tagName(t)),
};
const call_allocation = try analyzer.module.calls.append(analyzer.allocator, .{
.value = this_value_allocation.index,
.arguments = ArgumentList.Index.invalid,
.type = value_type,
});
break :blk .{
.call = call_allocation.index,
};
},
.field_access => blk: {
const left_allocation = try analyzer.unresolvedAllocate(scope_index, ExpectType.none, node.left);
const identifier = analyzer.tokenIdentifier(scope_index, node.right.value);
_ = identifier;
switch (left_allocation.ptr.*) {
.type => |type_index| {
const left_type = analyzer.module.types.get(type_index);
switch (left_type.*) {
.@"struct" => |struct_index| {
const struct_type = analyzer.module.structs.get(struct_index);
const right_index = try analyzer.doIdentifier(struct_type.scope, ExpectType.none, node.right.value, scope_index);
const right_value = analyzer.module.values.get(right_index);
switch (right_value.*) {
.function => break :blk right_value.*,
else => unreachable,
}
print("Right: {}\n", .{right_value});
// struct_scope.declarations.get(identifier);
unreachable;
},
else => |t| @panic(@tagName(t)),
}
unreachable;
},
else => |t| @panic(@tagName(t)),
}
unreachable;
},
else => |t| @panic(@tagName(t)),
};
}
@ -335,92 +472,99 @@ const Analyzer = struct {
return function_prototype.return_type;
}
fn functionPrototype(analyzer: *Analyzer, node_index: Node.Index) !Function.Prototype.Index {
const node = analyzer.nodes[node_index.unwrap()];
fn functionPrototype(analyzer: *Analyzer, scope_index: Scope.Index, node_index: Node.Index) !Function.Prototype.Index {
const node = analyzer.getNode(scope_index, node_index);
switch (node.id) {
.simple_function_prototype => {
const arguments: ?[]const Field.Index = blk: {
const argument_node = analyzer.nodes[node.left.get() orelse break :blk null];
if (node.left.get() == null) break :blk null;
const argument_node = analyzer.getNode(scope_index, node.left);
switch (argument_node.id) {
else => |t| @panic(@tagName(t)),
}
};
const return_type_node = analyzer.nodes[node.right.unwrap()];
const return_type_node = analyzer.getNode(scope_index, node.right);
const return_type: Type.Index = switch (return_type_node.id) {
.identifier => {
unreachable;
},
.keyword_noreturn => .{ .block = 0, .index = FixedTypeKeyword.offset + @intFromEnum(FixedTypeKeyword.noreturn) },
inline .signed_integer_type, .unsigned_integer_type => |int_type_signedness| blk: {
const bit_count: u16 = @intCast(return_type_node.left.value);
print("Bit count: {}\n", .{bit_count});
break :blk switch (bit_count) {
inline 8, 16, 32, 64 => |hardware_bit_count| Type.Index{
.block = 0,
.index = @ctz(hardware_bit_count) - @ctz(@as(u8, 8)) + switch (int_type_signedness) {
.signed_integer_type => HardwareSignedIntegerType,
.unsigned_integer_type => HardwareUnsignedIntegerType,
else => unreachable,
}.offset,
},
else => unreachable,
};
},
else => |t| @panic(@tagName(t)),
};
return try analyzer.module.function_prototypes.append(analyzer.allocator, .{
const function_prototype_allocation = try analyzer.module.function_prototypes.append(analyzer.allocator, .{
.arguments = arguments,
.return_type = return_type,
});
return function_prototype_allocation.index;
},
else => |t| @panic(@tagName(t)),
}
}
fn analyzeDeclaration(analyzer: *Analyzer, scope: *Scope, declaration: *Declaration) !Value.Index {
_ = declaration;
_ = scope;
_ = analyzer;
// switch (declaration.*) {
// .unresolved => |node_index| {
// const declaration_node = analyzer.nodes[node_index.unwrap()];
// return switch (declaration_node.id) {
// .simple_variable_declaration => blk: {
// const expect_type = switch (declaration_node.left.valid) {
// true => unreachable,
// false => @unionInit(ExpectType, "none", {}),
// };
//
// const initialization_expression = try analyzer.expression(scope, expect_type, declaration_node.right);
// const value = analyzer.module.values.get(initialization_expression);
// if (value.is_comptime and value.is_const) {
// break :blk initialization_expression;
// }
//
// unreachable;
// },
// else => |t| @panic(@tagName(t)),
// };
// },
// .struct_type => unreachable,
// }
fn structType(analyzer: *Analyzer, value: *Value, parent_scope_index: Scope.Index, index: Node.Index, file_index: File.Index) !Type.Index {
var node_buffer: [2]Node.Index = undefined;
// We have the file because this might be the first file
const file = analyzer.module.files.get(file_index);
const node = file.syntactic_analyzer_result.nodes.items[index.unwrap()];
const nodes = switch (node.id) {
.main_one => blk: {
node_buffer[0] = node.left;
break :blk node_buffer[0..1];
},
.main_two => blk: {
node_buffer[0] = node.left;
node_buffer[1] = node.right;
break :blk &node_buffer;
},
else => |t| @panic(@tagName(t)),
};
@panic("TODO: analyzeDeclaration");
}
fn structType(analyzer: *Analyzer, parent_scope: Scope.Index, container_declaration: syntactic_analyzer.ContainerDeclaration, index: Node.Index) !Type.Index {
_ = index;
const new_scope = try analyzer.allocateScope(.{ .parent = parent_scope });
if (nodes.len > 0) {
const new_scope = try analyzer.allocateScope(.{
.parent = parent_scope_index,
.file = file_index,
});
const scope = new_scope.ptr;
const scope_index = new_scope.index;
const is_file = !parent_scope.valid;
const is_file = !parent_scope_index.valid;
assert(is_file);
const struct_index = try analyzer.module.structs.append(analyzer.allocator, .{
const struct_allocation = try analyzer.module.structs.append(analyzer.allocator, .{
.scope = new_scope.index,
});
const struct_type = analyzer.module.structs.get(struct_index);
const type_index = try analyzer.module.types.append(analyzer.allocator, .{
.@"struct" = struct_index,
const type_allocation = try analyzer.module.types.append(analyzer.allocator, .{
.@"struct" = struct_allocation.index,
});
scope.type = type_index;
_ = struct_type;
assert(container_declaration.members.len > 0);
scope.type = type_allocation.index;
value.* = .{
.type = type_allocation.index,
};
const count = blk: {
var result: struct {
fields: u32 = 0,
declarations: u32 = 0,
} = .{};
for (container_declaration.members) |member_index| {
const member = analyzer.nodes[member_index.unwrap()];
for (nodes) |member_index| {
const member = analyzer.getNode(scope_index, member_index);
const member_type = getContainerMemberType(member.id);
switch (member_type) {
@ -434,8 +578,8 @@ const Analyzer = struct {
var declaration_nodes = try ArrayList(Node.Index).initCapacity(analyzer.allocator, count.declarations);
var field_nodes = try ArrayList(Node.Index).initCapacity(analyzer.allocator, count.fields);
for (container_declaration.members) |member_index| {
const member = analyzer.nodes[member_index.unwrap()];
for (nodes) |member_index| {
const member = analyzer.getNode(scope_index, member_index);
const member_type = getContainerMemberType(member.id);
const array_list = switch (member_type) {
.declaration => &declaration_nodes,
@ -445,27 +589,59 @@ const Analyzer = struct {
}
for (declaration_nodes.items) |declaration_node_index| {
const declaration_node = analyzer.nodes[declaration_node_index.unwrap()];
const declaration_node = analyzer.getNode(scope_index, declaration_node_index);
switch (declaration_node.id) {
.@"comptime" => {},
.simple_variable_declaration => {
const mutability: Compilation.Mutability = switch (analyzer.tokens[declaration_node.token].id) {
.simple_variable_declaration => _ = try analyzer.symbolDeclaration(scope_index, declaration_node_index, .global),
else => unreachable,
}
}
// TODO: consider iterating over scope declarations instead?
for (declaration_nodes.items) |declaration_node_index| {
const declaration_node = analyzer.getNode(scope_index, declaration_node_index);
switch (declaration_node.id) {
.@"comptime" => _ = try analyzer.comptimeBlock(scope_index, declaration_node_index),
.simple_variable_declaration => {},
else => |t| @panic(@tagName(t)),
}
}
for (field_nodes.items) |field_index| {
const field_node = analyzer.getNode(scope_index, field_index);
_ = field_node;
@panic("TODO: fields");
}
return type_allocation.index;
} else {
return Type.Index.invalid;
}
}
fn symbolDeclaration(analyzer: *Analyzer, scope_index: Scope.Index, node_index: Node.Index, scope_type: ScopeType) !Declaration.Index {
const declaration_node = analyzer.getNode(scope_index, node_index);
assert(declaration_node.id == .simple_variable_declaration);
assert(!declaration_node.left.valid);
const mutability: Compilation.Mutability = switch (analyzer.getToken(scope_index, declaration_node.token).id) {
.fixed_keyword_const => .@"const",
.fixed_keyword_var => .@"var",
else => |t| @panic(@tagName(t)),
};
const expected_identifier_token_index = declaration_node.token + 1;
const expected_identifier_token = analyzer.tokens[expected_identifier_token_index];
const expected_identifier_token = analyzer.getToken(scope_index, expected_identifier_token_index);
if (expected_identifier_token.id != .identifier) {
print("Error: found: {}", .{expected_identifier_token.id});
@panic("Expected identifier");
}
// TODO: Check if it is a keyword
const identifier_index = try analyzer.identifierFromToken(expected_identifier_token_index);
const identifier_index = try analyzer.identifierFromToken(scope_index, expected_identifier_token_index);
const declaration_name = analyzer.tokenIdentifier(expected_identifier_token_index);
const declaration_name = analyzer.tokenIdentifier(scope_index, expected_identifier_token_index);
// Check if the symbol name is already occupied in the same scope
const scope = analyzer.module.scopes.get(scope_index);
const scope_lookup = try scope.declarations.getOrPut(analyzer.allocator, identifier_index);
if (scope_lookup.found_existing) {
std.debug.panic("Existing name in lookup: {s}", .{declaration_name});
@ -477,42 +653,22 @@ const Analyzer = struct {
while (upper_scope_index.valid) {
@panic("TODO: upper scope");
}
assert(declaration_node.right.valid);
const container_declaration_index = try analyzer.module.declarations.append(analyzer.allocator, .{
const declaration_allocation = try analyzer.module.declarations.append(analyzer.allocator, .{
.name = declaration_name,
.scope_type = .global,
.scope_type = scope_type,
.mutability = mutability,
.init_value = try analyzer.module.values.append(analyzer.allocator, .{
.init_value = (try analyzer.module.values.append(analyzer.allocator, .{
.unresolved = .{
.node_index = declaration_node.right,
},
}),
})).index,
});
scope_lookup.value_ptr.* = container_declaration_index;
},
else => unreachable,
}
}
scope_lookup.value_ptr.* = declaration_allocation.index;
// TODO: consider iterating over scope declarations instead?
for (declaration_nodes.items) |declaration_node_index| {
const declaration_node = analyzer.nodes[declaration_node_index.unwrap()];
switch (declaration_node.id) {
.@"comptime" => _ = try analyzer.comptimeBlock(scope, declaration_node_index),
.simple_variable_declaration => {},
else => |t| @panic(@tagName(t)),
}
}
for (field_nodes.items) |field_index| {
const field_node = analyzer.nodes[field_index.unwrap()];
_ = field_node;
@panic("TODO: fields");
}
return type_index;
return declaration_allocation.index;
}
const MemberType = enum {
@ -528,8 +684,8 @@ const Analyzer = struct {
};
}
fn identifierFromToken(analyzer: *Analyzer, token_index: Token.Index) !u32 {
const identifier = analyzer.tokenIdentifier(token_index);
fn identifierFromToken(analyzer: *Analyzer, scope_index: Scope.Index, token_index: Token.Index) !u32 {
const identifier = analyzer.tokenIdentifier(scope_index, token_index);
const key: u32 = @truncate(std.hash.Wyhash.hash(0, identifier));
const lookup_result = try analyzer.module.string_table.getOrPut(analyzer.allocator, key);
@ -541,46 +697,47 @@ const Analyzer = struct {
}
}
fn tokenIdentifier(analyzer: *Analyzer, token_index: Token.Index) []const u8 {
const token = analyzer.tokens[token_index];
fn tokenIdentifier(analyzer: *Analyzer, scope_index: Scope.Index, token_index: Token.Index) []const u8 {
const token = analyzer.getToken(scope_index, token_index);
assert(token.id == .identifier);
const identifier = analyzer.tokenBytes(token);
const source_file = analyzer.getSourceFile(scope_index);
const identifier = tokenBytes(token, source_file);
return identifier;
}
fn tokenBytes(analyzer: *Analyzer, token: Token) []const u8 {
return analyzer.source_code[token.start..][0..token.len];
fn tokenBytes(token: Token, source_code: []const u8) []const u8 {
return source_code[token.start..][0..token.len];
}
fn tokenStringLiteral(analyzer: *Analyzer, token_index: Token.Index) []const u8 {
const token = analyzer.tokens[token_index];
fn numberBytes(analyzer: *Analyzer, scope_index: Scope.Index, token_index: Token.Index) []const u8 {
const token = analyzer.getToken(scope_index, token_index);
assert(token.id == .number_literal);
const source_file = analyzer.getSourceFile(scope_index);
const bytes = tokenBytes(token, source_file);
return bytes;
}
fn tokenStringLiteral(analyzer: *Analyzer, scope_index: Scope.Index, token_index: Token.Index) []const u8 {
const token = analyzer.getToken(scope_index, token_index);
assert(token.id == .string_literal);
const source_file = analyzer.getSourceFile(scope_index);
// Eat double quotes
const string_literal = analyzer.tokenBytes(token)[1..][0 .. token.len - 2];
const string_literal = tokenBytes(token, source_file)[1..][0 .. token.len - 2];
return string_literal;
}
const ScopeAllocation = struct {
ptr: *Scope,
index: Scope.Index,
};
fn allocateScope(analyzer: *Analyzer, scope_value: Scope) !ScopeAllocation {
const scope_index = try analyzer.module.scopes.append(analyzer.allocator, scope_value);
const scope = analyzer.module.scopes.get(scope_index);
return .{
.ptr = scope,
.index = scope_index,
};
fn allocateScope(analyzer: *Analyzer, scope_value: Scope) !Scope.Allocation {
return analyzer.module.scopes.append(analyzer.allocator, scope_value);
}
};
const ExpectType = union(enum) {
none,
type_index: Type.Index,
flexible_integer: FlexibleInteger,
pub const none = ExpectType{
.none = {},
@ -588,6 +745,11 @@ const ExpectType = union(enum) {
pub const boolean = ExpectType{
.type_index = type_boolean,
};
const FlexibleInteger = struct {
byte_count: u8,
sign: ?bool = null,
};
};
const type_boolean = Type.Index{
@ -647,7 +809,8 @@ const HardwareSignedIntegerType = enum {
const offset = HardwareUnsignedIntegerType.offset + @typeInfo(HardwareUnsignedIntegerType).Enum.fields.len;
};
pub fn initialize(compilation: *Compilation, module: *Module, package: *Package) !Type.Index {
pub fn initialize(compilation: *Compilation, module: *Module, package: *Package, file_index: File.Index) !Type.Index {
_ = file_index;
inline for (@typeInfo(FixedTypeKeyword).Enum.fields) |enum_field| {
_ = try module.types.append(compilation.base_allocator, @unionInit(Type, enum_field.name, {}));
}
@ -692,58 +855,56 @@ pub fn initialize(compilation: *Compilation, module: *Module, package: *Package)
.@"unreachable" = {},
});
return analyzeExistingPackage(compilation, module, package);
const value_allocation = try module.values.append(compilation.base_allocator, .{
.unresolved = .{
.node_index = .{ .value = 0 },
},
});
const result = analyzeExistingPackage(value_allocation.ptr, compilation, module, package);
var decl_iterator = module.declarations.iterator();
while (decl_iterator.nextPointer()) |decl| {
if (equal(u8, decl.name, "_start")) {
const value = module.values.get(decl.init_value);
module.entry_point = switch (value.*) {
.function => |function_index| function_index.uniqueInteger(),
else => |t| @panic(@tagName(t)),
};
break;
}
} else {
@panic("Entry point not found");
}
return result;
}
pub fn analyzeExistingPackage(compilation: *Compilation, module: *Module, package: *Package) !Type.Index {
pub fn analyzeExistingPackage(value: *Value, compilation: *Compilation, module: *Module, package: *Package) !Type.Index {
const package_import = try module.importPackage(compilation.base_allocator, package);
assert(!package_import.is_new);
const package_file = package_import.file;
assert(!package_import.file.is_new);
const package_file = package_import.file.ptr;
const file_index = package_import.file.index;
return try analyzeFile(compilation.base_allocator, module, package_file);
return try analyzeFile(value, compilation.base_allocator, module, package_file, file_index);
}
pub fn analyzeFile(allocator: Allocator, module: *Module, file: *File) !Type.Index {
pub fn analyzeFile(value: *Value, allocator: Allocator, module: *Module, file: *File, file_index: File.Index) !Type.Index {
assert(value.* == .unresolved);
assert(file.status == .parsed);
var analyzer = Analyzer{
.source_code = file.source_code,
.nodes = file.syntactic_analyzer_result.nodes.items,
.tokens = file.lexical_analyzer_result.tokens.items,
.file = file,
.current_file = file_index,
.allocator = allocator,
.module = module,
};
const result = try analyzer.structType(Scope.Index.invalid, try mainNodeToContainerDeclaration(allocator, file), .{ .value = 0 });
var buffer = [2]Node.Index{
Node.Index.invalid,
Node.Index.invalid,
};
_ = buffer;
const result = try analyzer.structType(value, Scope.Index.invalid, .{ .value = 0 }, file_index);
return result;
}
fn mainNodeToContainerDeclaration(allocator: Allocator, file: *File) !ContainerDeclaration {
const main_node = getNode(file, 0);
var list_buffer: [2]Node.Index = undefined;
const left_node = getNode(file, main_node.left.value);
const node_list: []const Node.Index = blk: {
if (left_node.id != .node_list) {
const len = @as(u2, @intFromBool(main_node.left.valid)) + @as(u2, @intFromBool(main_node.right.valid)) - @as(u2, @intFromBool(main_node.left.valid and main_node.right.valid and main_node.left.value == main_node.right.value));
assert(len > 0);
list_buffer[0] = main_node.left;
list_buffer[1] = main_node.right;
break :blk list_buffer[0..len];
} else {
@panic("TODO: get list");
}
};
const owned_node_list = try allocator.alloc(Node.Index, node_list.len);
@memcpy(owned_node_list, node_list);
// Deal properly with this allocation
return .{
.members = owned_node_list,
};
}
fn getNode(file: *const File, index: u32) *Node {
return &file.syntactic_analyzer_result.nodes.items[index];
}

View File

@ -14,6 +14,7 @@ const Token = lexical_analyzer.Token;
pub const Result = struct {
nodes: ArrayList(Node),
node_lists: ArrayList(Node.List),
time: u64,
};
@ -47,6 +48,11 @@ pub const Node = packed struct(u128) {
assert(index.valid);
return index.value;
}
pub fn uniqueInteger(index: Index) u32 {
assert(index.valid);
return index.value;
}
};
pub const Range = struct {
@ -81,6 +87,15 @@ pub const Node = packed struct(u128) {
comptime_block_two = 23,
block_two = 24,
@"unreachable" = 25,
field_access = 26,
call_one = 27,
comptime_block = 28,
block = 29,
unsigned_integer_type = 30,
signed_integer_type = 31,
main_one = 32,
main_two = 33,
main_zero = 34,
};
};
@ -109,10 +124,37 @@ const Analyzer = struct {
}
}
fn getIdentifier(analyzer: *const Analyzer, token: Token) []const u8 {
assert(token.id == .identifier);
const identifier = analyzer.file[token.start..][0..token.len];
return identifier;
fn bytes(analyzer: *const Analyzer, token_index: Token.Index) []const u8 {
const token = analyzer.tokens[token_index];
return analyzer.file[token.start..][0..token.len];
}
fn symbolDeclaration(analyzer: *Analyzer) !Node.Index {
const first = analyzer.token_i;
assert(analyzer.tokens[first].id == .fixed_keyword_var or analyzer.tokens[first].id == .fixed_keyword_const);
analyzer.token_i += 1;
_ = try analyzer.expectToken(.identifier);
// TODO: type
_ = try analyzer.expectToken(.equal);
const init_node = try analyzer.expression();
_ = try analyzer.expectToken(.semicolon);
// TODO:
const type_node = Node.Index.invalid;
const declaration = Node{
.id = .simple_variable_declaration,
.token = first,
.left = type_node,
.right = init_node,
};
const declaration_init_node = analyzer.nodes.items[init_node.unwrap()];
std.debug.print("Declaration init node: {}\n", .{declaration_init_node});
return analyzer.addNode(declaration);
}
fn containerMembers(analyzer: *Analyzer) !Members {
@ -121,58 +163,26 @@ const Analyzer = struct {
while (analyzer.token_i < analyzer.tokens.len) {
const first = analyzer.token_i;
const member_node: Node = switch (analyzer.tokens[first].id) {
const member_node_index: Node.Index = switch (analyzer.tokens[first].id) {
.fixed_keyword_comptime => switch (analyzer.tokens[analyzer.token_i + 1].id) {
.left_brace => blk: {
analyzer.token_i += 1;
const comptime_block = try analyzer.block(.{ .is_comptime = true });
break :blk .{
break :blk try analyzer.addNode(.{
.id = .@"comptime",
.token = first,
.left = comptime_block,
.right = Node.Index.invalid,
};
});
},
else => |foo| std.debug.panic("NI: {s}", .{@tagName(foo)}),
else => |foo| @panic(@tagName(foo)),
},
.fixed_keyword_const, .fixed_keyword_var => blk: {
analyzer.token_i += 1;
_ = try analyzer.expectToken(.identifier);
// TODO: type
_ = try analyzer.expectToken(.equal);
// TODO: do this in a function
const init_node = try analyzer.expression();
// const init_node = switch (analyzer.tokens[analyzer.token_i].id) {
// .identifier => unreachable,
// .hash => try analyzer.compilerIntrinsic(),
// .left_parenthesis => try analyzer.function(),
// else => |t| std.debug.panic("NI: {s}", .{@tagName(t)}),
// };
_ = try analyzer.expectToken(.semicolon);
// TODO:
const type_node = Node.Index.invalid;
const top_level_decl = .{
.id = .simple_variable_declaration,
.token = first,
.left = type_node,
.right = init_node,
.fixed_keyword_const, .fixed_keyword_var => try analyzer.symbolDeclaration(),
else => |t| @panic(@tagName(t)),
};
break :blk top_level_decl;
},
.identifier => {
unreachable;
},
else => |t| std.debug.panic("NI: {s}", .{@tagName(t)}),
};
const member_index = try analyzer.addNode(member_node);
try analyzer.temporal_node_heap.append(analyzer.allocator, member_index);
try analyzer.temporal_node_heap.append(analyzer.allocator, member_node_index);
}
const members_array = analyzer.temporal_node_heap.items[node_heap_top..];
@ -263,10 +273,12 @@ const Analyzer = struct {
},
else => try analyzer.assignExpressionStatement(),
},
.fixed_keyword_unreachable => try analyzer.assignExpressionStatement(),
.fixed_keyword_unreachable, .fixed_keyword_return => try analyzer.assignExpressionStatement(),
.fixed_keyword_while => try analyzer.whileStatement(options),
else => unreachable,
.fixed_keyword_const, .fixed_keyword_var => try analyzer.symbolDeclaration(),
else => |t| @panic(@tagName(t)),
};
try analyzer.temporal_node_heap.append(analyzer.allocator, statement_index);
}
@ -301,7 +313,15 @@ const Analyzer = struct {
.left = statement_array[0],
.right = statement_array[1],
},
else => |len| std.debug.panic("len: {}", .{len}),
else => .{
.id = switch (options.is_comptime) {
true => .comptime_block,
false => .block,
},
.token = left_brace,
.left = try analyzer.nodeList(statement_array),
.right = Node.Index.invalid,
},
};
return analyzer.addNode(node);
}
@ -329,7 +349,7 @@ const Analyzer = struct {
const expression_id: Node.Id = switch (analyzer.tokens[analyzer.token_i].id) {
.semicolon => return expr,
.equal => .assign,
else => unreachable,
else => |t| @panic(@tagName(t)),
};
const node = Node{
@ -398,8 +418,8 @@ const Analyzer = struct {
while (analyzer.token_i < analyzer.tokens.len) {
const precedence: i32 = switch (analyzer.tokens[analyzer.token_i].id) {
.equal, .semicolon, .right_parenthesis, .right_brace, .comma => -1,
else => |foo| std.debug.panic("Foo: ({s}) {}", .{ @tagName(foo), foo }),
.equal, .semicolon, .right_parenthesis, .right_brace, .comma, .period => -1,
else => |t| @panic(@tagName(t)),
};
if (precedence < minimum_precedence) {
@ -446,6 +466,16 @@ const Analyzer = struct {
},
.string_literal, .number_literal, .fixed_keyword_true, .fixed_keyword_false, .hash, .fixed_keyword_unreachable => try analyzer.curlySuffixExpression(),
.fixed_keyword_fn => analyzer.function(),
.fixed_keyword_return => try analyzer.addNode(.{
.id = .@"return",
.token = blk: {
const token = analyzer.token_i;
analyzer.token_i += 1;
break :blk token;
},
.left = try analyzer.expression(),
.right = Node.Index.invalid,
}),
// todo:?
// .left_brace => try analyzer.block(),
else => |id| {
@ -492,14 +522,8 @@ const Analyzer = struct {
fn typeExpression(analyzer: *Analyzer) !Node.Index {
return switch (analyzer.tokens[analyzer.token_i].id) {
.identifier, .fixed_keyword_noreturn, .fixed_keyword_true, .fixed_keyword_false, .hash => try analyzer.errorUnionExpression(),
else => |id| blk: {
log.warn("By default, calling errorUnionExpression with {s}", .{@tagName(id)});
const result = try analyzer.errorUnionExpression();
break :blk result;
},
.identifier, .fixed_keyword_noreturn, .fixed_keyword_true, .fixed_keyword_false, .hash, .string_literal, .number_literal, .fixed_keyword_unreachable, .keyword_unsigned_integer, .keyword_signed_integer => try analyzer.errorUnionExpression(),
else => |id| @panic(@tagName(id)),
};
}
@ -516,14 +540,17 @@ const Analyzer = struct {
var result = try analyzer.primaryTypeExpression();
while (true) {
if (analyzer.suffixOperator()) |_| {
unreachable;
const suffix_operator = try analyzer.suffixOperator(result);
if (suffix_operator.valid) {
result = suffix_operator;
} else {
if (analyzer.tokens[analyzer.token_i].id == .left_parenthesis) {
const left_parenthesis = analyzer.token_i;
analyzer.token_i += 1;
var expression_list = ArrayList(Node.Index){};
while (analyzer.tokens[analyzer.token_i].id != .right_parenthesis) {
std.debug.print("Loop\n", .{});
const parameter = try analyzer.expression();
try expression_list.append(analyzer.allocator, parameter);
analyzer.token_i += @intFromBool(switch (analyzer.tokens[analyzer.token_i].id) {
@ -534,7 +561,16 @@ const Analyzer = struct {
}
_ = try analyzer.expectToken(.right_parenthesis);
@panic("TODO");
// const is_comma = analyzer.tokens[analyzer.token_i].id == .comma;
return analyzer.addNode(switch (expression_list.items.len) {
0 => .{
.id = .call_one,
.token = left_parenthesis,
.left = result,
.right = Node.Index.invalid,
},
else => |len| std.debug.panic("len: {}", .{len}),
});
} else {
return result;
}
@ -569,8 +605,8 @@ const Analyzer = struct {
.identifier => switch (analyzer.tokens[token_i + 1].id) {
.colon => unreachable,
else => blk: {
const identifier = analyzer.getIdentifier(token);
std.debug.print("identifier: {s}\n", .{identifier});
const identifier = analyzer.bytes(token_i);
// std.debug.print("identifier: {s}\n", .{identifier});
analyzer.token_i += 1;
if (equal(u8, identifier, "_")) {
break :blk Node.Index.invalid;
@ -594,20 +630,55 @@ const Analyzer = struct {
.right = Node.Index.invalid,
}),
.hash => analyzer.compilerIntrinsic(),
.keyword_unsigned_integer, .keyword_signed_integer => |signedness| try analyzer.addNode(.{
.id = switch (signedness) {
.keyword_unsigned_integer => .unsigned_integer_type,
.keyword_signed_integer => .signed_integer_type,
else => unreachable,
},
.token = blk: {
analyzer.token_i += 1;
break :blk token_i;
},
.left = @bitCast(@as(u32, std.fmt.parseInt(u16, analyzer.bytes(token_i)[1..], 10) catch unreachable)),
.right = Node.Index.invalid,
}),
else => |foo| {
switch (foo) {
.identifier => std.debug.panic("{s}: {s}", .{ @tagName(foo), analyzer.getIdentifier(analyzer.tokens[token_i]) }),
else => std.debug.panic("{s}", .{@tagName(foo)}),
.identifier => std.debug.panic("{s}: {s}", .{ @tagName(foo), analyzer.bytes(token_i) }),
else => @panic(@tagName(foo)),
}
},
};
}
// TODO:
fn suffixOperator(analyzer: *Analyzer) ?bool {
_ = analyzer;
return null;
fn suffixOperator(analyzer: *Analyzer, left: Node.Index) !Node.Index {
const token = analyzer.tokens[analyzer.token_i];
return switch (token.id) {
.left_bracket => unreachable,
.period => switch (analyzer.tokens[analyzer.token_i + 1].id) {
.identifier => analyzer.addNode(.{
.id = .field_access,
.token = blk: {
const main_token = analyzer.token_i;
analyzer.token_i += 1;
break :blk main_token;
},
.left = left,
.right = blk: {
//TODO ???
const right_token = analyzer.token_i;
analyzer.token_i += 1;
const result: Node.Index = @bitCast(right_token);
std.debug.print("WARNING: rhs has node index {} but it's token #{}\n", .{ result, right_token });
break :blk result;
},
}),
else => |t| @panic(@tagName(t)),
},
else => Node.Index.invalid,
};
}
fn addNode(analyzer: *Analyzer, node: Node) !Node.Index {
@ -618,27 +689,23 @@ const Analyzer = struct {
.value = @intCast(index),
};
}
fn nodeList(analyzer: *Analyzer, input: []const Node.Index) !Node.Index {
const index = analyzer.node_lists.items.len;
var new_node_list = try ArrayList(Node.Index).initCapacity(analyzer.allocator, input.len);
try new_node_list.appendSlice(analyzer.allocator, input);
try analyzer.node_lists.append(analyzer.allocator, new_node_list);
return .{
.value = @intCast(index),
};
}
};
const Members = struct {
len: usize,
left: Node.Index,
right: Node.Index,
pub fn toRange(members: Members) Node.Range {
return switch (members.len) {
0 => unreachable,
1 => .{
.start = members.left.value,
.end = members.left.value,
},
2 => .{
.start = members.left.value,
.end = members.right.value,
},
else => unreachable,
};
}
};
pub fn analyze(allocator: Allocator, tokens: []const Token, file: []const u8) !Result {
@ -657,10 +724,22 @@ pub fn analyze(allocator: Allocator, tokens: []const Token, file: []const u8) !R
assert(node_index.value == 0);
assert(node_index.valid);
const members = try analyzer.containerMembers();
const member_range = members.toRange();
analyzer.nodes.items[0].left = .{ .value = @intCast(member_range.start) };
analyzer.nodes.items[0].right = .{ .value = @intCast(member_range.end) };
switch (members.len) {
0 => unreachable,
1 => {
analyzer.nodes.items[0].id = .main_one;
analyzer.nodes.items[0].left = members.left;
},
2 => {
analyzer.nodes.items[0].id = .main_two;
analyzer.nodes.items[0].left = members.left;
analyzer.nodes.items[0].right = members.right;
},
else => unreachable,
}
const end = std.time.Instant.now() catch unreachable;
@ -668,6 +747,7 @@ pub fn analyze(allocator: Allocator, tokens: []const Token, file: []const u8) !R
return .{
.nodes = analyzer.nodes,
.node_lists = analyzer.node_lists,
.time = end.since(start),
};
}

View File

@ -1,8 +1,6 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
pub const first = "src/test/main.b";
pub fn readFile(allocator: Allocator, file_relative_path: []const u8) ![]const u8 {
const file = try std.fs.cwd().readFileAlloc(allocator, file_relative_path, std.math.maxInt(usize));
return file;

View File

@ -5,7 +5,7 @@ const assert = std.debug.assert;
const Compilation = @import("Compilation.zig");
pub const seed = std.math.maxInt(u64);
const default_src_file = "src/test/main.b";
const default_src_file = "src/test/main.nat";
pub fn main() !void {
try singleCompilation(default_src_file);

View File

@ -1,3 +1,3 @@
const main = fn() i32 {
const main = fn() s32 {
return 0;
};