Add custom logger
This commit is contained in:
parent
aefabd6108
commit
b24bd8221e
@ -3,7 +3,6 @@ const Compilation = @This();
|
||||
const std = @import("std");
|
||||
const assert = std.debug.assert;
|
||||
const equal = std.mem.eql;
|
||||
const print = std.debug.print;
|
||||
|
||||
const Allocator = std.mem.Allocator;
|
||||
|
||||
@ -40,7 +39,104 @@ build_directory: std.fs.Dir,
|
||||
const cache_dir_name = "cache";
|
||||
const installation_dir_name = "installation";
|
||||
|
||||
pub fn init(allocator: Allocator) !*Compilation {
|
||||
const ArgumentParsingError = error{
|
||||
main_package_path_not_specified,
|
||||
};
|
||||
|
||||
fn reportUnterminatedArgumentError(string: []const u8) noreturn {
|
||||
std.debug.panic("Unterminated argument: {s}", .{string});
|
||||
}
|
||||
|
||||
fn parseArguments(allocator: Allocator) !Compilation.Module.Descriptor {
|
||||
const arguments = (try std.process.argsAlloc(allocator))[1..];
|
||||
|
||||
var maybe_executable_path: ?[]const u8 = null;
|
||||
var maybe_main_package_path: ?[]const u8 = null;
|
||||
var target_triplet: []const u8 = "x86_64-linux-gnu";
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < arguments.len) : (i += 1) {
|
||||
const current_argument = arguments[i];
|
||||
if (equal(u8, current_argument, "-o")) {
|
||||
if (i + 1 != arguments.len) {
|
||||
maybe_executable_path = arguments[i + 1];
|
||||
assert(maybe_executable_path.?.len != 0);
|
||||
i += 1;
|
||||
} else {
|
||||
reportUnterminatedArgumentError(current_argument);
|
||||
}
|
||||
} else if (equal(u8, current_argument, "-target")) {
|
||||
if (i + 1 != arguments.len) {
|
||||
target_triplet = arguments[i + 1];
|
||||
i += 1;
|
||||
} else {
|
||||
reportUnterminatedArgumentError(current_argument);
|
||||
}
|
||||
} else if (equal(u8, current_argument, "-log")) {
|
||||
if (i + 1 != arguments.len) {
|
||||
i += 1;
|
||||
|
||||
var log_argument_iterator = std.mem.splitScalar(u8, arguments[i], ',');
|
||||
|
||||
while (log_argument_iterator.next()) |log_argument| {
|
||||
var log_argument_splitter = std.mem.splitScalar(u8, log_argument, '.');
|
||||
const log_scope_candidate = log_argument_splitter.next() orelse unreachable;
|
||||
var recognized_scope = false;
|
||||
|
||||
inline for (@typeInfo(LoggerScope).Enum.fields) |logger_scope_enum_field| {
|
||||
const log_scope = @field(LoggerScope, logger_scope_enum_field.name);
|
||||
|
||||
if (equal(u8, @tagName(log_scope), log_scope_candidate)) {
|
||||
const LogScope = getLoggerScopeType(log_scope);
|
||||
|
||||
if (log_argument_splitter.next()) |particular_log_candidate| {
|
||||
var recognized_particular = false;
|
||||
inline for (@typeInfo(LogScope.Logger).Enum.fields) |particular_log_field| {
|
||||
const particular_log = @field(LogScope.Logger, particular_log_field.name);
|
||||
|
||||
if (equal(u8, particular_log_candidate, @tagName(particular_log))) {
|
||||
LogScope.Logger.bitset.setPresent(particular_log, true);
|
||||
recognized_particular = true;
|
||||
}
|
||||
} else if (!recognized_particular) std.debug.panic("Unrecognized particular log \"{s}\" in scope {s}", .{ particular_log_candidate, @tagName(log_scope) });
|
||||
} else {
|
||||
unreachable;
|
||||
}
|
||||
|
||||
logger_bitset.setPresent(log_scope, true);
|
||||
|
||||
recognized_scope = true;
|
||||
}
|
||||
} else if (!recognized_scope) std.debug.panic("Unrecognized log scope: {s}", .{log_scope_candidate});
|
||||
}
|
||||
} else {
|
||||
reportUnterminatedArgumentError(current_argument);
|
||||
}
|
||||
} else {
|
||||
maybe_main_package_path = current_argument;
|
||||
}
|
||||
}
|
||||
|
||||
const main_package_path = maybe_main_package_path orelse return error.main_package_path_not_specified;
|
||||
|
||||
const executable_path = maybe_executable_path orelse blk: {
|
||||
const executable_name = std.fs.path.basename(main_package_path[0 .. main_package_path.len - "/main.nat".len]);
|
||||
assert(executable_name.len > 0);
|
||||
const result = try std.mem.concat(allocator, u8, &.{ "nat/", executable_name });
|
||||
break :blk result;
|
||||
};
|
||||
|
||||
const cross_target = try std.zig.CrossTarget.parse(.{ .arch_os_abi = target_triplet });
|
||||
const target = cross_target.toTarget();
|
||||
|
||||
return .{
|
||||
.main_package_path = main_package_path,
|
||||
.executable_path = executable_path,
|
||||
.target = target,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn init(allocator: Allocator) !void {
|
||||
const compilation: *Compilation = try allocator.create(Compilation);
|
||||
|
||||
const self_exe_path = try std.fs.selfExePathAlloc(allocator);
|
||||
@ -56,7 +152,9 @@ pub fn init(allocator: Allocator) !*Compilation {
|
||||
try compilation.build_directory.makePath(cache_dir_name);
|
||||
try compilation.build_directory.makePath(installation_dir_name);
|
||||
|
||||
return compilation;
|
||||
const compilation_descriptor = try parseArguments(allocator);
|
||||
|
||||
try compilation.compileModule(compilation_descriptor);
|
||||
}
|
||||
|
||||
pub const Struct = struct {
|
||||
@ -617,7 +715,7 @@ pub const Module = struct {
|
||||
};
|
||||
|
||||
pub fn importFile(module: *Module, allocator: Allocator, current_file_index: File.Index, import_name: []const u8) !ImportPackageResult {
|
||||
print("import: '{s}'\n", .{import_name});
|
||||
logln(.compilation, .import, "import: '{s}'\n", .{import_name});
|
||||
if (equal(u8, import_name, "std")) {
|
||||
return module.importPackage(allocator, module.main_package.dependencies.get("std").?);
|
||||
}
|
||||
@ -672,7 +770,7 @@ pub const Module = struct {
|
||||
.relative_path = relative_path,
|
||||
.package = package,
|
||||
});
|
||||
std.debug.print("Adding file #{}: {s}\n", .{ file_allocation.index.uniqueInteger(), full_path });
|
||||
logln(.compilation, .new_file, "Adding file #{}: {s}\n", .{ file_allocation.index.uniqueInteger(), full_path });
|
||||
path_lookup.value_ptr.* = file_allocation.ptr;
|
||||
// break :blk file;
|
||||
break :blk .{
|
||||
@ -691,7 +789,7 @@ pub const Module = struct {
|
||||
|
||||
pub fn importPackage(module: *Module, allocator: Allocator, package: *Package) !ImportPackageResult {
|
||||
const full_path = try std.fs.path.resolve(allocator, &.{ package.directory.path, package.source_path });
|
||||
print("Import full path: {s}\n", .{full_path});
|
||||
logln(.compilation, .import, "Import full path: {s}\n", .{full_path});
|
||||
const import_file = try module.getFile(allocator, full_path, package.source_path, package);
|
||||
try import_file.ptr.addPackageReference(allocator, package);
|
||||
|
||||
@ -719,9 +817,7 @@ pub const Module = struct {
|
||||
file.status = .loaded_into_memory;
|
||||
|
||||
try file.lex(allocator, file_index);
|
||||
print("Start of parsing file #{}\n", .{file_index.uniqueInteger()});
|
||||
try file.parse(allocator, file_index);
|
||||
print("End of parsing file #{}\n", .{file_index.uniqueInteger()});
|
||||
}
|
||||
|
||||
fn getString(map: *StringKeyMap([]const u8), key: u32) ?[]const u8 {
|
||||
@ -1020,3 +1116,57 @@ pub const File = struct {
|
||||
file.status = .parsed;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn panic(message: []const u8, stack_trace: ?*std.builtin.StackTrace, return_address: ?usize) noreturn {
|
||||
std.builtin.default_panic(message, stack_trace, return_address);
|
||||
}
|
||||
|
||||
const LoggerScope = enum {
|
||||
compilation,
|
||||
lexer,
|
||||
parser,
|
||||
sema,
|
||||
ir,
|
||||
codegen,
|
||||
};
|
||||
|
||||
const Logger = enum {
|
||||
import,
|
||||
new_file,
|
||||
arguments,
|
||||
var bitset = std.EnumSet(Logger).initEmpty();
|
||||
};
|
||||
|
||||
fn getLoggerScopeType(comptime logger_scope: LoggerScope) type {
|
||||
comptime {
|
||||
return switch (logger_scope) {
|
||||
.compilation => @This(),
|
||||
.lexer => lexical_analyzer,
|
||||
.parser => syntactic_analyzer,
|
||||
.sema => semantic_analyzer,
|
||||
.ir => intermediate_representation,
|
||||
.codegen => emit,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
var logger_bitset = std.EnumSet(LoggerScope).initEmpty();
|
||||
|
||||
var writer = std.io.getStdErr().writer();
|
||||
|
||||
fn shouldLog(comptime logger_scope: LoggerScope, logger: getLoggerScopeType(logger_scope).Logger) bool {
|
||||
return logger_bitset.contains(logger_scope) and getLoggerScopeType(logger_scope).Logger.bitset.contains(logger);
|
||||
}
|
||||
|
||||
pub fn logln(comptime logger_scope: LoggerScope, logger: getLoggerScopeType(logger_scope).Logger, comptime format: []const u8, arguments: anytype) void {
|
||||
if (shouldLog(logger_scope, logger)) {
|
||||
log(logger_scope, logger, format, arguments);
|
||||
writer.writeByte('\n') catch unreachable;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn log(comptime logger_scope: LoggerScope, logger: getLoggerScopeType(logger_scope).Logger, comptime format: []const u8, arguments: anytype) void {
|
||||
if (shouldLog(logger_scope, logger)) {
|
||||
std.fmt.format(writer, format, arguments) catch unreachable;
|
||||
}
|
||||
}
|
||||
|
@ -105,10 +105,15 @@ pub fn InstructionSelector(comptime Instruction: type) type {
|
||||
};
|
||||
}
|
||||
|
||||
const x86_64 = @import("x86_64.zig");
|
||||
const aarch64 = @import("aarch64.zig");
|
||||
|
||||
pub const Logger = x86_64.Logger;
|
||||
|
||||
pub fn get(comptime arch: std.Target.Cpu.Arch) type {
|
||||
const backend = switch (arch) {
|
||||
.x86_64 => @import("x86_64.zig"),
|
||||
.aarch64 => @import("aarch64.zig"),
|
||||
.x86_64 => x86_64,
|
||||
.aarch64 => aarch64,
|
||||
else => {},
|
||||
};
|
||||
|
||||
@ -138,10 +143,8 @@ pub fn get(comptime arch: std.Target.Cpu.Arch) type {
|
||||
// switch (@import("builtin").os.tag) {
|
||||
// .linux => switch (@import("builtin").cpu.arch == arch) {
|
||||
// true => {
|
||||
// std.debug.print("Executing...\n", .{});
|
||||
// const entryPoint = result.getEntryPoint(fn () callconv(.SysV) noreturn);
|
||||
// entryPoint();
|
||||
// std.debug.print("This should not print...\n", .{});
|
||||
// },
|
||||
// false => {},
|
||||
// },
|
||||
|
@ -1,9 +1,10 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const assert = std.debug.assert;
|
||||
const print = std.debug.print;
|
||||
|
||||
const Compilation = @import("../Compilation.zig");
|
||||
const log = Compilation.log;
|
||||
const logln = Compilation.logln;
|
||||
const Module = Compilation.Module;
|
||||
const Package = Compilation.Package;
|
||||
|
||||
@ -14,6 +15,13 @@ const AutoArrayHashMap = data_structures.AutoArrayHashMap;
|
||||
const AutoHashMap = data_structures.AutoHashMap;
|
||||
const StringKeyMap = data_structures.StringKeyMap;
|
||||
|
||||
pub const Logger = enum {
|
||||
function,
|
||||
phi_removal,
|
||||
|
||||
pub var bitset = std.EnumSet(Logger).initEmpty();
|
||||
};
|
||||
|
||||
pub const Result = struct {
|
||||
blocks: BlockList(BasicBlock) = .{},
|
||||
calls: BlockList(Call) = .{},
|
||||
@ -398,6 +406,7 @@ pub const Builder = struct {
|
||||
};
|
||||
|
||||
const function_decl_name = builder.ir.getFunctionName(function_declaration_allocation.index);
|
||||
_ = function_decl_name;
|
||||
|
||||
if (sema_prototype.arguments) |sema_arguments| {
|
||||
try function_declaration.arguments.ensureTotalCapacity(builder.allocator, @intCast(sema_arguments.len));
|
||||
@ -429,9 +438,6 @@ pub const Builder = struct {
|
||||
|
||||
const return_type = builder.module.types.get(sema_prototype.return_type);
|
||||
const is_noreturn = return_type.* == .noreturn;
|
||||
if (std.mem.eql(u8, function_decl_name, "print")) {
|
||||
print("WTDASDAS", .{});
|
||||
}
|
||||
|
||||
if (!is_noreturn) {
|
||||
const exit_block = try builder.newBlock();
|
||||
@ -545,7 +551,7 @@ pub const Builder = struct {
|
||||
|
||||
fn optimizeFunction(builder: *Builder, function: *Function) !void {
|
||||
// HACK
|
||||
print("\n[BEFORE OPTIMIZE]:\n{}", .{function});
|
||||
logln(.ir, .function, "\n[BEFORE OPTIMIZE]:\n{}", .{function});
|
||||
var reachable_blocks = try builder.findReachableBlocks(function.blocks.items[0]);
|
||||
var did_something = true;
|
||||
|
||||
@ -616,7 +622,7 @@ pub const Builder = struct {
|
||||
}
|
||||
}
|
||||
|
||||
print("[AFTER OPTIMIZE]:\n{}", .{function});
|
||||
logln(.ir, .function, "[AFTER OPTIMIZE]:\n{}", .{function});
|
||||
}
|
||||
|
||||
fn removeUnreachablePhis(builder: *Builder, reachable_blocks: []const BasicBlock.Index, instruction_index: Instruction.Index) !bool {
|
||||
@ -686,7 +692,7 @@ pub const Builder = struct {
|
||||
};
|
||||
}
|
||||
} else {
|
||||
print("TODO: maybe this phi removal is wrong?", .{});
|
||||
logln(.ir, .phi_removal, "TODO: maybe this phi removal is wrong?", .{});
|
||||
instruction.* = .{
|
||||
.copy = trivial_value,
|
||||
};
|
||||
@ -957,7 +963,7 @@ pub const Builder = struct {
|
||||
},
|
||||
.declaration => |sema_declaration_index| {
|
||||
const sema_declaration = builder.module.declarations.get(sema_declaration_index);
|
||||
print("Name: {s}\n", .{builder.module.getName(sema_declaration.name).?});
|
||||
//logln("Name: {s}\n", .{builder.module.getName(sema_declaration.name).?});
|
||||
assert(sema_declaration.scope_type == .local);
|
||||
const declaration_type = builder.module.types.get(sema_declaration.type);
|
||||
switch (declaration_type.*) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const assert = std.debug.assert;
|
||||
const print = std.debug.print;
|
||||
const panic = std.debug.panic;
|
||||
const emit = @import("emit.zig");
|
||||
const ir = @import("intermediate_representation.zig");
|
||||
|
||||
@ -12,8 +12,32 @@ const ArrayList = data_structures.ArrayList;
|
||||
const AutoArrayHashMap = data_structures.AutoArrayHashMap;
|
||||
const BlockList = data_structures.BlockList;
|
||||
|
||||
const log = Compilation.log;
|
||||
const logln = Compilation.logln;
|
||||
|
||||
const x86_64 = @This();
|
||||
|
||||
pub const Logger = enum {
|
||||
register_allocation_new_instructions,
|
||||
instruction_selection_block,
|
||||
instruction_selection_ir_function,
|
||||
instruction_selection_new_instruction,
|
||||
instruction_selection_cache_flush,
|
||||
instruction_selection_mir_function,
|
||||
register_allocation_block,
|
||||
register_allocation_problematic_hint,
|
||||
register_allocation_assignment,
|
||||
register_allocation_reload,
|
||||
register_allocation_function_before,
|
||||
register_allocation_new_instruction,
|
||||
register_allocation_new_instruction_function_before,
|
||||
register_allocation_instruction_avoid_copy,
|
||||
register_allocation_function_after,
|
||||
register_allocation_operand_list_verification,
|
||||
|
||||
pub var bitset = std.EnumSet(Logger).initEmpty();
|
||||
};
|
||||
|
||||
const Register = struct {
|
||||
list: List = .{},
|
||||
index: Index,
|
||||
@ -1008,7 +1032,6 @@ const InstructionSelection = struct {
|
||||
fn loadRegisterFromStackSlot(instruction_selection: *InstructionSelection, mir: *MIR, insert_before_instruction_index: usize, destination_register: Register.Physical, frame_index: u32, register_class: Register.Class, virtual_register: Register.Virtual.Index) !void {
|
||||
_ = virtual_register;
|
||||
const stack_object = instruction_selection.stack_objects.items[frame_index];
|
||||
print("Stack object size: {}\n", .{stack_object.size});
|
||||
switch (@divExact(stack_object.size, 8)) {
|
||||
@sizeOf(u64) => {
|
||||
switch (register_class) {
|
||||
@ -1043,7 +1066,7 @@ const InstructionSelection = struct {
|
||||
destination_operand,
|
||||
source_operand,
|
||||
});
|
||||
print("Inserting instruction at index {}", .{insert_before_instruction_index});
|
||||
logln(.codegen, .register_allocation_new_instructions, "Inserting instruction at index {}", .{insert_before_instruction_index});
|
||||
try mir.blocks.get(instruction_selection.current_block).instructions.insert(mir.allocator, insert_before_instruction_index, instruction_index);
|
||||
},
|
||||
else => |t| @panic(@tagName(t)),
|
||||
@ -1081,12 +1104,12 @@ const InstructionSelection = struct {
|
||||
destination_operand,
|
||||
source_operand,
|
||||
});
|
||||
print("Inserting instruction at index {}\n", .{insert_before_instruction_index});
|
||||
logln(.codegen, .register_allocation_new_instructions, "Inserting instruction at index {}\n", .{insert_before_instruction_index});
|
||||
try mir.blocks.get(instruction_selection.current_block).instructions.insert(mir.allocator, insert_before_instruction_index, instruction_index);
|
||||
},
|
||||
else => |t| @panic(@tagName(t)),
|
||||
},
|
||||
else => std.debug.panic("Stack object size: {}\n", .{stack_object.size}),
|
||||
else => panic("Stack object size: {} bits\n", .{stack_object.size}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1420,7 +1443,6 @@ const InstructionSelection = struct {
|
||||
|
||||
// TODO: addLiveIn MachineBasicBlock ? unreachable;
|
||||
}
|
||||
print("After livein: {}\n", .{instruction_selection.function});
|
||||
}
|
||||
};
|
||||
|
||||
@ -2001,16 +2023,15 @@ pub const MIR = struct {
|
||||
instruction_selections: ArrayList(InstructionSelection) = .{},
|
||||
virtual_registers: BlockList(Register.Virtual) = .{},
|
||||
|
||||
pub fn selectInstructions(allocator: Allocator, intermediate: *ir.Result, target: std.Target) !MIR {
|
||||
print("\n[INSTRUCTION SELECTION]\n\n", .{});
|
||||
var mir_stack = MIR{
|
||||
pub fn selectInstructions(allocator: Allocator, intermediate: *ir.Result, target: std.Target) !*MIR {
|
||||
logln(.codegen, .instruction_selection_block, "\n[INSTRUCTION SELECTION]\n\n", .{});
|
||||
const mir = try allocator.create(MIR);
|
||||
mir.* = .{
|
||||
.allocator = allocator,
|
||||
.ir = intermediate,
|
||||
.target = target,
|
||||
};
|
||||
|
||||
const mir = &mir_stack;
|
||||
|
||||
try mir.blocks.ensureCapacity(allocator, intermediate.blocks.len);
|
||||
try mir.functions.ensureCapacity(allocator, intermediate.function_definitions.len);
|
||||
try mir.instruction_selections.ensureUnusedCapacity(allocator, intermediate.function_definitions.len);
|
||||
@ -2019,7 +2040,7 @@ pub const MIR = struct {
|
||||
|
||||
while (function_definition_iterator.nextPointer()) |ir_function| {
|
||||
const fn_name = mir.ir.getFunctionName(ir_function.declaration);
|
||||
print("=========\n{}=========\n", .{ir_function});
|
||||
logln(.codegen, .instruction_selection_ir_function, "=========\n{}=========\n", .{ir_function});
|
||||
|
||||
const instruction_selection = mir.instruction_selections.addOneAssumeCapacity();
|
||||
const function_allocation = try mir.functions.addOne(mir.allocator);
|
||||
@ -2065,7 +2086,6 @@ pub const MIR = struct {
|
||||
|
||||
try instruction_selection.lowerArguments(mir, ir_function);
|
||||
|
||||
print("Block count: {}\n", .{function.blocks.items.len});
|
||||
var block_i: usize = function.blocks.items.len;
|
||||
|
||||
while (block_i > 0) {
|
||||
@ -2077,7 +2097,6 @@ pub const MIR = struct {
|
||||
const ir_block = mir.ir.blocks.get(ir_block_index);
|
||||
|
||||
var instruction_i: usize = ir_block.instructions.items.len;
|
||||
print("Instruction count: {}\n", .{instruction_i});
|
||||
|
||||
var folded_load = false;
|
||||
|
||||
@ -2089,7 +2108,7 @@ pub const MIR = struct {
|
||||
|
||||
instruction_selection.local_value_map.clearRetainingCapacity();
|
||||
|
||||
print("Instruction #{}\n", .{instruction_i});
|
||||
logln(.codegen, .instruction_selection_new_instruction, "Instruction #{}\n", .{instruction_i});
|
||||
|
||||
switch (ir_instruction.*) {
|
||||
.ret => |ir_ret_index| {
|
||||
@ -2545,7 +2564,7 @@ pub const MIR = struct {
|
||||
|
||||
const instruction_index = instruction_selection.instruction_cache.items[i];
|
||||
const instruction = mir.instructions.get(instruction_index);
|
||||
print("Inserting instruction #{} ({s}) into index {} (instruction count: {})\n", .{ instruction_index.uniqueInteger(), @tagName(instruction.id), block.current_stack_index, block.instructions.items.len });
|
||||
logln(.codegen, .instruction_selection_cache_flush, "Inserting instruction #{} ({s}) into index {} (instruction count: {})\n", .{ instruction_index.uniqueInteger(), @tagName(instruction.id), block.current_stack_index, block.instructions.items.len });
|
||||
try block.instructions.insert(mir.allocator, block.current_stack_index, instruction_index);
|
||||
}
|
||||
|
||||
@ -2555,10 +2574,10 @@ pub const MIR = struct {
|
||||
|
||||
try instruction_selection.emitLiveInCopies(mir, function.blocks.items[0]);
|
||||
|
||||
print("=========\n{}=========\n", .{function});
|
||||
logln(.codegen, .instruction_selection_mir_function, "=========\n{}=========\n", .{function});
|
||||
}
|
||||
|
||||
return mir_stack;
|
||||
return mir;
|
||||
}
|
||||
|
||||
fn getNextInstructionIndex(mir: *MIR, instruction_index: Instruction.Index) usize {
|
||||
@ -2783,8 +2802,8 @@ pub const MIR = struct {
|
||||
// TODO: asserts
|
||||
const assert_result = !operand.flags.isKill() or live_register.last_use.eq(instruction_index);
|
||||
if (assert_result) {
|
||||
print("Existing live register at instruction #{}: {}\n", .{ instruction_index.uniqueInteger(), live_register });
|
||||
print("Function until now: {}\n", .{instruction_selection.function});
|
||||
// logln("Existing live register at instruction #{}: {}\n", .{ instruction_index.uniqueInteger(), live_register });
|
||||
// logln("Function until now: {}\n", .{instruction_selection.function});
|
||||
assert(assert_result);
|
||||
}
|
||||
},
|
||||
@ -2860,13 +2879,13 @@ pub const MIR = struct {
|
||||
register_allocator.assignVirtualToPhysicalRegister(live_register, physical_register);
|
||||
return;
|
||||
} else {
|
||||
print("Second hint {s} not free\n", .{@tagName(physical_register)});
|
||||
logln(.codegen, .register_allocation_problematic_hint, "Second hint {s} not free\n", .{@tagName(physical_register)});
|
||||
}
|
||||
} else {
|
||||
unreachable;
|
||||
}
|
||||
} else {
|
||||
print("Can't take hint for VR{} for instruction #{}\n", .{ virtual_register.uniqueInteger(), instruction_index.uniqueInteger() });
|
||||
logln(.codegen, .register_allocation_problematic_hint, "Can't take hint for VR{} for instruction #{}\n", .{ virtual_register.uniqueInteger(), instruction_index.uniqueInteger() });
|
||||
}
|
||||
|
||||
const register_class_members = registers_by_class.get(register_class);
|
||||
@ -2878,7 +2897,7 @@ pub const MIR = struct {
|
||||
// for (register_class_members) |candidate_register| {
|
||||
// print("{s}, ", .{@tagName(candidate_register)});
|
||||
// }
|
||||
print("\n", .{});
|
||||
// print("\n", .{});
|
||||
for (register_class_members) |candidate_register| {
|
||||
if (register_allocator.isRegisterUsedInInstruction(candidate_register, look_at_physical_register_uses)) continue;
|
||||
const spill_cost = register_allocator.computeSpillCost(candidate_register);
|
||||
@ -3008,7 +3027,7 @@ pub const MIR = struct {
|
||||
return register;
|
||||
}
|
||||
|
||||
print("Missed oportunity for register allocation tracing copy chain for VR{}\n", .{virtual_register_index.uniqueInteger()});
|
||||
logln(.codegen, .register_allocation_problematic_hint, "Missed oportunity for register allocation tracing copy chain for VR{}\n", .{virtual_register_index.uniqueInteger()});
|
||||
},
|
||||
else => |t| @panic(@tagName(t)),
|
||||
}
|
||||
@ -3029,7 +3048,7 @@ pub const MIR = struct {
|
||||
.virtual = virtual_register,
|
||||
});
|
||||
|
||||
print("Assigning V{} to {s}\n", .{ virtual_register.uniqueInteger(), @tagName(register) });
|
||||
logln(.codegen, .register_allocation_assignment, "Assigning V{} to {s}\n", .{ virtual_register.uniqueInteger(), @tagName(register) });
|
||||
// TODO: debug info
|
||||
}
|
||||
|
||||
@ -3065,7 +3084,7 @@ pub const MIR = struct {
|
||||
fn reload(register_allocator: *RegisterAllocator, mir: *MIR, instruction_selection: *InstructionSelection, before_index: usize, virtual_register: Register.Virtual.Index, physical_register: Register.Physical) !void {
|
||||
const frame_index = try register_allocator.getStackSpaceFor(mir, instruction_selection, virtual_register);
|
||||
const register_class = mir.virtual_registers.get(virtual_register).register_class;
|
||||
print("Frame index: {}\n", .{frame_index});
|
||||
logln(.codegen, .register_allocation_reload, "Frame index: {}\n", .{frame_index});
|
||||
|
||||
try instruction_selection.loadRegisterFromStackSlot(mir, before_index, physical_register, frame_index, register_class, virtual_register);
|
||||
}
|
||||
@ -3302,7 +3321,7 @@ pub const MIR = struct {
|
||||
}
|
||||
|
||||
pub fn allocateRegisters(mir: *MIR) !void {
|
||||
print("\n[REGISTER ALLOCATION]\n\n", .{});
|
||||
logln(.codegen, .register_allocation_block, "[REGISTER ALLOCATION]\n\n", .{});
|
||||
const function_count = mir.functions.len;
|
||||
var function_iterator = mir.functions.iterator();
|
||||
const register_count = @typeInfo(Register.Physical).Enum.fields.len;
|
||||
@ -3313,7 +3332,7 @@ pub const MIR = struct {
|
||||
for (0..function_count) |function_index| {
|
||||
const function = function_iterator.nextPointer().?;
|
||||
const instruction_selection = &mir.instruction_selections.items[function_index];
|
||||
print("Allocating registers for {}\n", .{function});
|
||||
logln(.codegen, .register_allocation_function_before, "Allocating registers for {}\n", .{function});
|
||||
|
||||
var block_i: usize = function.blocks.items.len;
|
||||
var register_allocator = try RegisterAllocator.init(mir, instruction_selection);
|
||||
@ -3331,8 +3350,8 @@ pub const MIR = struct {
|
||||
|
||||
const instruction_index = block.instructions.items[instruction_i];
|
||||
const instruction = mir.instructions.get(instruction_index);
|
||||
print("===============\nInstruction {} (#{})\n", .{ instruction_i, instruction_index.uniqueInteger() });
|
||||
print("{}\n", .{function});
|
||||
logln(.codegen, .register_allocation_new_instruction, "===============\nInstruction {} (#{})\n", .{ instruction_i, instruction_index.uniqueInteger() });
|
||||
logln(.codegen, .register_allocation_new_instruction_function_before, "{}\n", .{function});
|
||||
|
||||
register_allocator.used_in_instruction = RegisterBitset.initEmpty();
|
||||
|
||||
@ -3456,7 +3475,7 @@ pub const MIR = struct {
|
||||
|
||||
if (std.meta.eql(dst_register, src_register)) {
|
||||
try register_allocator.coalesced.append(mir.allocator, instruction_index);
|
||||
print("Avoiding copy...\n", .{});
|
||||
logln(.codegen, .register_allocation_instruction_avoid_copy, "Avoiding copy...\n", .{});
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3471,7 +3490,7 @@ pub const MIR = struct {
|
||||
} else unreachable;
|
||||
}
|
||||
|
||||
print("{}\n============\n", .{function});
|
||||
logln(.codegen, .register_allocation_function_after, "{}\n============\n", .{function});
|
||||
}
|
||||
}
|
||||
|
||||
@ -3726,7 +3745,7 @@ pub const MIR = struct {
|
||||
const instruction_index = operand.parent;
|
||||
assert(instruction_index.valid);
|
||||
const instruction = mir.instructions.get(instruction_index);
|
||||
print("Verifying instruction #{}, operand #{}\n", .{ instruction_index.uniqueInteger(), mir.operands.indexOf(operand).uniqueInteger() });
|
||||
logln(.codegen, .register_allocation_operand_list_verification, "Verifying instruction #{}, operand #{}\n", .{ instruction_index.uniqueInteger(), mir.operands.indexOf(operand).uniqueInteger() });
|
||||
_ = instruction;
|
||||
assert(operand.u == .register);
|
||||
assert(operand.u.register.index == .virtual and operand.u.register.index.virtual.eq(register));
|
||||
@ -3853,7 +3872,8 @@ pub const MIR = struct {
|
||||
|
||||
if (instruction == .copy) {
|
||||
const i = instruction_allocation.ptr.*;
|
||||
print("Built copy: DST: {}. SRC: {}\n", .{ mir.operands.get(i.operands.items[0]).u.register.index, mir.operands.get(i.operands.items[1]).u.register.index });
|
||||
_ = i;
|
||||
// print("Built copy: DST: {}. SRC: {}\n", .{ mir.operands.get(i.operands.items[0]).u.register.index, mir.operands.get(i.operands.items[1]).u.register.index });
|
||||
}
|
||||
|
||||
return instruction_allocation.index;
|
||||
|
@ -11,6 +11,7 @@ const enumFromString = data_structures.enumFromString;
|
||||
|
||||
const Compilation = @import("../Compilation.zig");
|
||||
const File = Compilation.File;
|
||||
const logln = Compilation.logln;
|
||||
const fs = @import("../fs.zig");
|
||||
|
||||
pub const Token = packed struct(u64) {
|
||||
@ -112,6 +113,12 @@ pub const Result = struct {
|
||||
time: u64,
|
||||
};
|
||||
|
||||
pub const Logger = enum {
|
||||
main,
|
||||
|
||||
pub var bitset = std.EnumSet(Logger).initEmpty();
|
||||
};
|
||||
|
||||
pub fn analyze(allocator: Allocator, text: []const u8, file_index: File.Index) !Result {
|
||||
_ = file_index;
|
||||
const time_start = std.time.Instant.now() catch unreachable;
|
||||
@ -134,8 +141,7 @@ pub fn analyze(allocator: Allocator, text: []const u8, file_index: File.Index) !
|
||||
}
|
||||
|
||||
// const identifier = text[start_index..][0 .. index - start_index];
|
||||
// _ = identifier;
|
||||
// std.debug.print("Identifier: {s}\n", .{identifier});
|
||||
// logln("Identifier: {s}", .{identifier});
|
||||
|
||||
if (start_character == 'u' or start_character == 's') {
|
||||
var index_integer = start_index + 1;
|
||||
@ -205,11 +211,8 @@ pub fn analyze(allocator: Allocator, text: []const u8, file_index: File.Index) !
|
||||
});
|
||||
}
|
||||
|
||||
const should_log = true;
|
||||
if (should_log) {
|
||||
for (tokens.items, 0..) |token, i| {
|
||||
std.debug.print("#{} {s}\n", .{ i, @tagName(token.id) });
|
||||
}
|
||||
logln(.lexer, .main, "#{} {s}\n", .{ i, @tagName(token.id) });
|
||||
}
|
||||
|
||||
const time_end = std.time.Instant.now() catch unreachable;
|
||||
|
@ -2,6 +2,7 @@ const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const assert = std.debug.assert;
|
||||
const equal = std.mem.eql;
|
||||
const panic = std.debug.panic;
|
||||
const Compilation = @import("../Compilation.zig");
|
||||
const File = Compilation.File;
|
||||
const Module = Compilation.Module;
|
||||
@ -23,6 +24,23 @@ const Struct = Compilation.Struct;
|
||||
const Type = Compilation.Type;
|
||||
const Value = Compilation.Value;
|
||||
|
||||
const log = Compilation.log;
|
||||
const logln = Compilation.logln;
|
||||
|
||||
pub const Logger = enum {
|
||||
type,
|
||||
identifier,
|
||||
symbol_declaration,
|
||||
scope_node,
|
||||
node,
|
||||
typecheck,
|
||||
@"switch",
|
||||
block,
|
||||
call,
|
||||
|
||||
pub var bitset = std.EnumSet(Logger).initEmpty();
|
||||
};
|
||||
|
||||
const lexical_analyzer = @import("lexical_analyzer.zig");
|
||||
const Token = lexical_analyzer.Token;
|
||||
|
||||
@ -35,8 +53,6 @@ const data_structures = @import("../data_structures.zig");
|
||||
const ArrayList = data_structures.ArrayList;
|
||||
const HashMap = data_structures.AutoHashMap;
|
||||
|
||||
const print = std.debug.print;
|
||||
|
||||
const Analyzer = struct {
|
||||
allocator: Allocator,
|
||||
module: *Module,
|
||||
@ -52,7 +68,7 @@ const Analyzer = struct {
|
||||
const scope = analyzer.module.scopes.get(scope_index);
|
||||
const file = analyzer.module.files.get(scope.file);
|
||||
const result = &file.syntactic_analyzer_result.nodes.items[node_index.unwrap()];
|
||||
print("Fetching node #{} (0x{x}) from scope #{} from file #{} with id: {s}\n", .{ node_index.uniqueInteger(), @intFromPtr(result), scope_index.uniqueInteger(), scope.file.uniqueInteger(), @tagName(result.id) });
|
||||
logln(.sema, .scope_node, "Fetching node #{} (0x{x}) from scope #{} from file #{} with id: {s}\n", .{ node_index.uniqueInteger(), @intFromPtr(result), scope_index.uniqueInteger(), scope.file.uniqueInteger(), @tagName(result.id) });
|
||||
return result.*;
|
||||
}
|
||||
|
||||
@ -116,7 +132,7 @@ const Analyzer = struct {
|
||||
}
|
||||
|
||||
fn block(analyzer: *Analyzer, scope_index: Scope.Index, expect_type: ExpectType, node_index: Node.Index) anyerror!Block.Index {
|
||||
print("Resolving block from scope #{} in file #{}\n", .{ scope_index.uniqueInteger(), analyzer.module.scopes.get(scope_index).file.uniqueInteger() });
|
||||
logln(.sema, .block, "Resolving block from scope #{} in file #{}\n", .{ scope_index.uniqueInteger(), analyzer.module.scopes.get(scope_index).file.uniqueInteger() });
|
||||
var reaches_end = true;
|
||||
const block_node = analyzer.getScopeNode(scope_index, node_index);
|
||||
var statement_nodes = ArrayList(Node.Index){};
|
||||
@ -138,7 +154,8 @@ const Analyzer = struct {
|
||||
.block, .block_zero, .block_one, .block_two => false,
|
||||
else => |t| @panic(@tagName(t)),
|
||||
};
|
||||
print("Is comptime: {}\n", .{is_comptime});
|
||||
|
||||
logln(.sema, .block, "Is comptime: {}\n", .{is_comptime});
|
||||
|
||||
var statements = ArrayList(Value.Index){};
|
||||
|
||||
@ -213,7 +230,7 @@ const Analyzer = struct {
|
||||
|
||||
fn processCall(analyzer: *Analyzer, scope_index: Scope.Index, node_index: Node.Index) !Call.Index {
|
||||
const node = analyzer.getScopeNode(scope_index, node_index);
|
||||
print("Node index: {}. Left index: {}\n", .{ node_index.uniqueInteger(), node.left.uniqueInteger() });
|
||||
logln(.sema, .call, "Node index: {}. Left index: {}\n", .{ node_index.uniqueInteger(), node.left.uniqueInteger() });
|
||||
assert(!node.left.invalid);
|
||||
const left_value_index = switch (!node.left.invalid) {
|
||||
true => blk: {
|
||||
@ -247,7 +264,7 @@ const Analyzer = struct {
|
||||
const function = analyzer.module.functions.get(function_index);
|
||||
const function_prototype = analyzer.module.function_prototypes.get(analyzer.module.types.get(function.prototype).function);
|
||||
const argument_declarations = function_prototype.arguments.?;
|
||||
print("Argument declaration count: {}. Argument node list count: {}\n", .{ argument_declarations.len, call_argument_node_list.len });
|
||||
logln(.sema, .call, "Argument declaration count: {}. Argument node list count: {}\n", .{ argument_declarations.len, call_argument_node_list.len });
|
||||
var argument_array = ArrayList(Value.Index){};
|
||||
if (argument_declarations.len == call_argument_node_list.len) {
|
||||
for (argument_declarations, call_argument_node_list) |argument_declaration_index, argument_node_index| {
|
||||
@ -276,7 +293,7 @@ const Analyzer = struct {
|
||||
|
||||
break :b argument_array;
|
||||
} else {
|
||||
std.debug.panic("Function call has argument count mismatch: call has {}, function declaration has {}\n", .{ call_argument_node_list.len, argument_declarations.len });
|
||||
panic("Function call has argument count mismatch: call has {}, function declaration has {}\n", .{ call_argument_node_list.len, argument_declarations.len });
|
||||
}
|
||||
},
|
||||
else => |t| @panic(@tagName(t)),
|
||||
@ -306,7 +323,8 @@ const Analyzer = struct {
|
||||
if (enum_field.name == enum_name_hash) {
|
||||
return enum_name_hash;
|
||||
}
|
||||
print("Existing \"{s}\" != current \"{s}\"\n", .{ existing, enum_name });
|
||||
|
||||
logln(.sema, .typecheck, "Existing enum field \"{s}\" != enum literal \"{s}\"\n", .{ existing, enum_name });
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
@ -411,7 +429,7 @@ const Analyzer = struct {
|
||||
unreachable;
|
||||
};
|
||||
|
||||
print("Index: {}\n", .{group_index});
|
||||
logln(.sema, .@"switch", "Index: {}\n", .{group_index});
|
||||
|
||||
const true_switch_case_node = analyzer.getScopeNode(scope_index, switch_case_node_list[group_index]);
|
||||
var result = Value{
|
||||
@ -448,7 +466,7 @@ const Analyzer = struct {
|
||||
},
|
||||
false => {
|
||||
// const id = analyzer.tokenIdentifier(.token);
|
||||
// print("id: {s}\n", .{id});
|
||||
// logln("id: {s}\n", .{id});
|
||||
// const left = try analyzer.expression(scope_index, ExpectType.none, statement_node.left);
|
||||
|
||||
// if (analyzer.module.values.get(left).isComptime() and analyzer.module.values.get(right).isComptime()) {
|
||||
@ -518,18 +536,9 @@ const Analyzer = struct {
|
||||
|
||||
fn doIdentifier(analyzer: *Analyzer, scope_index: Scope.Index, expect_type: ExpectType, node_token: Token.Index, node_scope_index: Scope.Index) !Value.Index {
|
||||
const identifier = analyzer.tokenIdentifier(node_scope_index, node_token);
|
||||
print("Referencing identifier: \"{s}\"\n", .{identifier});
|
||||
logln(.sema, .identifier, "Referencing identifier: \"{s}\"\n", .{identifier});
|
||||
const identifier_hash = try analyzer.processIdentifier(identifier);
|
||||
|
||||
if (equal(u8, identifier, "print")) {
|
||||
print("WTF\n", .{});
|
||||
}
|
||||
|
||||
if (equal(u8, identifier, "windows")) {
|
||||
print("WTF\n", .{});
|
||||
unreachable;
|
||||
}
|
||||
|
||||
if (analyzer.lookupDeclarationInCurrentAndParentScopes(scope_index, identifier_hash)) |lookup| {
|
||||
const declaration_index = lookup.declaration;
|
||||
const declaration = analyzer.module.declarations.get(declaration_index);
|
||||
@ -538,7 +547,7 @@ const Analyzer = struct {
|
||||
const typecheck_result = switch (declaration.init_value.invalid) {
|
||||
false => blk: {
|
||||
const init_value = analyzer.module.values.get(declaration.init_value);
|
||||
print("Declaration found: {}\n", .{init_value});
|
||||
logln(.sema, .identifier, "Declaration found: {}\n", .{init_value});
|
||||
const is_unresolved = init_value.* == .unresolved;
|
||||
switch (is_unresolved) {
|
||||
true => {
|
||||
@ -554,8 +563,8 @@ const Analyzer = struct {
|
||||
false => {},
|
||||
}
|
||||
|
||||
print("Declaration resolved as: {}\n", .{init_value});
|
||||
print("Declaration mutability: {s}. Is comptime: {}\n", .{ @tagName(declaration.mutability), init_value.isComptime() });
|
||||
logln(.sema, .identifier, "Declaration resolved as: {}\n", .{init_value});
|
||||
logln(.sema, .identifier, "Declaration mutability: {s}. Is comptime: {}\n", .{ @tagName(declaration.mutability), init_value.isComptime() });
|
||||
|
||||
const typecheck_result = try analyzer.typeCheck(expect_type, declaration.type);
|
||||
|
||||
@ -613,7 +622,7 @@ const Analyzer = struct {
|
||||
};
|
||||
} else {
|
||||
const scope = analyzer.module.scopes.get(scope_index);
|
||||
std.debug.panic("Identifier \"{s}\" not found in scope #{} of file #{} referenced by scope #{} of file #{}: {s}", .{ identifier, scope_index.uniqueInteger(), scope.file.uniqueInteger(), node_scope_index.uniqueInteger(), analyzer.module.scopes.get(node_scope_index).file.uniqueInteger(), tokenBytes(analyzer.getScopeToken(scope_index, node_token), analyzer.getScopeSourceFile(scope_index)) });
|
||||
panic("Identifier \"{s}\" not found in scope #{} of file #{} referenced by scope #{} of file #{}: {s}", .{ identifier, scope_index.uniqueInteger(), scope.file.uniqueInteger(), node_scope_index.uniqueInteger(), analyzer.module.scopes.get(node_scope_index).file.uniqueInteger(), tokenBytes(analyzer.getScopeToken(scope_index, node_token), analyzer.getScopeSourceFile(scope_index)) });
|
||||
}
|
||||
}
|
||||
|
||||
@ -641,7 +650,7 @@ const Analyzer = struct {
|
||||
|
||||
fn resolveNode(analyzer: *Analyzer, value: *Value, scope_index: Scope.Index, expect_type: ExpectType, node_index: Node.Index) anyerror!void {
|
||||
const node = analyzer.getScopeNode(scope_index, node_index);
|
||||
print("Resolving node #{} in scope #{} from file #{}: {}\n", .{ node_index.uniqueInteger(), scope_index.uniqueInteger(), analyzer.module.scopes.get(scope_index).file.uniqueInteger(), node });
|
||||
logln(.sema, .node, "Resolving node #{} in scope #{} from file #{}: {}\n", .{ node_index.uniqueInteger(), scope_index.uniqueInteger(), analyzer.module.scopes.get(scope_index).file.uniqueInteger(), node });
|
||||
|
||||
assert(value.* == .unresolved);
|
||||
|
||||
@ -669,8 +678,8 @@ const Analyzer = struct {
|
||||
},
|
||||
.compiler_intrinsic_one, .compiler_intrinsic_two, .compiler_intrinsic => blk: {
|
||||
const intrinsic_name = analyzer.tokenIdentifier(scope_index, node.token + 1);
|
||||
print("Intrinsic: {s}\n", .{intrinsic_name});
|
||||
const intrinsic = data_structures.enumFromString(Intrinsic, intrinsic_name) orelse std.debug.panic("Unknown intrinsic: {s}\n", .{intrinsic_name});
|
||||
logln(.sema, .node, "Intrinsic: {s}\n", .{intrinsic_name});
|
||||
const intrinsic = data_structures.enumFromString(Intrinsic, intrinsic_name) orelse panic("Unknown intrinsic: {s}\n", .{intrinsic_name});
|
||||
switch (intrinsic) {
|
||||
.import => {
|
||||
assert(node.id == .compiler_intrinsic_one);
|
||||
@ -679,7 +688,7 @@ const Analyzer = struct {
|
||||
.string_literal => {
|
||||
const import_name = analyzer.tokenStringLiteral(scope_index, import_argument.token);
|
||||
const import_file = try analyzer.module.importFile(analyzer.allocator, analyzer.current_file, import_name);
|
||||
print("Importing \"{s}\"...\n", .{import_name});
|
||||
logln(.sema, .node, "Importing \"{s}\"...\n", .{import_name});
|
||||
|
||||
const result = .{
|
||||
.type = switch (import_file.file.is_new) {
|
||||
@ -687,7 +696,7 @@ const Analyzer = struct {
|
||||
const new_file_index = import_file.file.index;
|
||||
try analyzer.module.generateAbstractSyntaxTreeForFile(analyzer.allocator, new_file_index);
|
||||
const analyze_result = try analyzeFile(value, analyzer.allocator, analyzer.module, new_file_index);
|
||||
print("Done analyzing {s}!\n", .{import_name});
|
||||
logln(.sema, .node, "Done analyzing {s}!\n", .{import_name});
|
||||
break :true_block analyze_result;
|
||||
},
|
||||
false => false_block: {
|
||||
@ -705,7 +714,7 @@ const Analyzer = struct {
|
||||
},
|
||||
.syscall => {
|
||||
var argument_nodes = try analyzer.getArguments(scope_index, node_index);
|
||||
print("Argument count: {}\n", .{argument_nodes.items.len});
|
||||
logln(.sema, .node, "Argument count: {}\n", .{argument_nodes.items.len});
|
||||
if (argument_nodes.items.len > 0 and argument_nodes.items.len <= 6 + 1) {
|
||||
const argument_expect_type = .{
|
||||
.flexible_integer = .{
|
||||
@ -741,7 +750,7 @@ const Analyzer = struct {
|
||||
assert(node.id == .compiler_intrinsic_one);
|
||||
const message_node = analyzer.getScopeNode(scope_index, node.left);
|
||||
switch (message_node.id) {
|
||||
.string_literal => std.debug.panic("error: {s}", .{analyzer.tokenStringLiteral(scope_index, message_node.token)}),
|
||||
.string_literal => panic("error: {s}", .{analyzer.tokenStringLiteral(scope_index, message_node.token)}),
|
||||
else => |t| @panic(@tagName(t)),
|
||||
}
|
||||
unreachable;
|
||||
@ -833,11 +842,12 @@ const Analyzer = struct {
|
||||
.call = try analyzer.processCall(scope_index, node_index),
|
||||
},
|
||||
.field_access => blk: {
|
||||
print("left alocation...\n", .{});
|
||||
logln(.sema, .node, "left alocation...\n", .{});
|
||||
const identifier = analyzer.tokenIdentifier(scope_index, node.right.value);
|
||||
print("Field access identifier for RHS: \"{s}\"\n", .{identifier});
|
||||
logln(.sema, .node, "Field access identifier for RHS: \"{s}\"\n", .{identifier});
|
||||
analyzer.debugNode(scope_index, node_index);
|
||||
const left_allocation = try analyzer.unresolvedAllocate(scope_index, ExpectType.none, node.left);
|
||||
|
||||
switch (left_allocation.ptr.*) {
|
||||
.type => |type_index| {
|
||||
if (!type_index.invalid) {
|
||||
@ -852,13 +862,13 @@ const Analyzer = struct {
|
||||
.declaration_reference => |declaration_reference| {
|
||||
const declaration = analyzer.module.declarations.get(declaration_reference.value);
|
||||
const declaration_name = analyzer.module.getName(declaration.name).?;
|
||||
print("Decl ref: {s}\n", .{declaration_name});
|
||||
print("TODO: maybe this should not be runtime", .{});
|
||||
logln(.sema, .node, "Decl ref: {s}\n", .{declaration_name});
|
||||
logln(.sema, .node, "TODO: maybe this should not be runtime", .{});
|
||||
unreachable;
|
||||
},
|
||||
else => |t| @panic(@tagName(t)),
|
||||
}
|
||||
print("Right: {}\n", .{right_value});
|
||||
logln(.sema, .node, "Right: {}\n", .{right_value});
|
||||
// struct_scope.declarations.get(identifier);
|
||||
|
||||
unreachable;
|
||||
@ -877,7 +887,7 @@ const Analyzer = struct {
|
||||
};
|
||||
const enum_field = analyzer.module.enum_fields.get(result);
|
||||
const enum_field_name = analyzer.module.getName(enum_field.name).?;
|
||||
print("Enum field name resolution: {s}\n", .{enum_field_name});
|
||||
logln(.sema, .node, "Enum field name resolution: {s}\n", .{enum_field_name});
|
||||
break :blk .{
|
||||
.enum_field = result,
|
||||
};
|
||||
@ -886,7 +896,7 @@ const Analyzer = struct {
|
||||
}
|
||||
unreachable;
|
||||
} else {
|
||||
std.debug.panic("Identifier \"{s}\" not found. Type empty", .{identifier});
|
||||
panic("Identifier \"{s}\" not found. Type empty", .{identifier});
|
||||
}
|
||||
},
|
||||
.declaration_reference => |declaration_reference| {
|
||||
@ -929,7 +939,7 @@ const Analyzer = struct {
|
||||
for (field_node_list.items) |field_node_index| {
|
||||
const field_node = analyzer.getScopeNode(scope_index, field_node_index);
|
||||
const identifier = analyzer.tokenIdentifier(scope_index, field_node.token);
|
||||
print("Enum field: {s}\n", .{identifier});
|
||||
logln(.sema, .node, "Enum field: {s}\n", .{identifier});
|
||||
assert(field_node.left.invalid);
|
||||
|
||||
const enum_hash_name = try analyzer.processIdentifier(identifier);
|
||||
@ -966,7 +976,7 @@ const Analyzer = struct {
|
||||
const node = analyzer.getScopeNode(scope_index, node_index);
|
||||
const source_file = analyzer.getScopeSourceFile(scope_index);
|
||||
const token = analyzer.getScopeToken(scope_index, node.token);
|
||||
print("Debugging node {s}:\n\n```\n{s}\n```\n", .{ @tagName(node.id), source_file[token.start..] });
|
||||
logln(.sema, .node, "Debugging node {s}:\n\n```\n{s}\n```\n", .{ @tagName(node.id), source_file[token.start..] });
|
||||
}
|
||||
|
||||
fn processStringLiteral(analyzer: *Analyzer, scope_index: Scope.Index, node_index: Node.Index) !u32 {
|
||||
@ -989,7 +999,7 @@ const Analyzer = struct {
|
||||
const token = analyzer.getScopeToken(scope_index, type_node.token);
|
||||
const source_file = analyzer.getScopeSourceFile(scope_index);
|
||||
const identifier = tokenBytes(token, source_file);
|
||||
print("Identifier: \"{s}\"\n", .{identifier});
|
||||
logln(.sema, .type, "Identifier: \"{s}\"", .{identifier});
|
||||
const resolved_value_index = try analyzer.doIdentifier(scope_index, ExpectType.type, type_node.token, scope_index);
|
||||
const resolved_value = analyzer.module.values.get(resolved_value_index);
|
||||
break :blk switch (resolved_value.*) {
|
||||
@ -1000,7 +1010,7 @@ const Analyzer = struct {
|
||||
.keyword_noreturn => Type.noreturn,
|
||||
inline .signed_integer_type, .unsigned_integer_type => |int_type_signedness| blk: {
|
||||
const bit_count: u16 = @intCast(type_node.left.value);
|
||||
print("Bit count: {}\n", .{bit_count});
|
||||
logln(.sema, .type, "Bit count: {}", .{bit_count});
|
||||
break :blk switch (bit_count) {
|
||||
inline 8, 16, 32, 64 => |hardware_bit_count| Type.Integer.getIndex(.{
|
||||
.bit_count = hardware_bit_count,
|
||||
@ -1053,7 +1063,7 @@ const Analyzer = struct {
|
||||
true => null,
|
||||
false => blk: {
|
||||
const argument_list_node = analyzer.getScopeNode(scope_index, arguments_node_index);
|
||||
// print("Function prototype argument list node: {}\n", .{function_prototype_node.left.uniqueInteger()});
|
||||
// logln("Function prototype argument list node: {}\n", .{function_prototype_node.left.uniqueInteger()});
|
||||
const argument_node_list = switch (argument_list_node.id) {
|
||||
.node_list => analyzer.getScopeNodeList(scope_index, argument_list_node),
|
||||
else => |t| @panic(@tagName(t)),
|
||||
@ -1248,7 +1258,7 @@ const Analyzer = struct {
|
||||
|
||||
if (analyzer.lookupDeclarationInCurrentAndParentScopes(scope_index, identifier_index)) |lookup| {
|
||||
const declaration_name = analyzer.tokenIdentifier(lookup.scope, identifier_token);
|
||||
std.debug.panic("Existing name in lookup: {s}", .{declaration_name});
|
||||
panic("Existing name in lookup: {s}", .{declaration_name});
|
||||
}
|
||||
|
||||
// Check if the symbol name is already occupied in the same scope
|
||||
@ -1287,7 +1297,7 @@ const Analyzer = struct {
|
||||
const expected_identifier_token_index = declaration_node.token + 1;
|
||||
const expected_identifier_token = analyzer.getScopeToken(scope_index, expected_identifier_token_index);
|
||||
if (expected_identifier_token.id != .identifier) {
|
||||
print("Error: found: {}", .{expected_identifier_token.id});
|
||||
logln(.sema, .symbol_declaration, "Error: found: {}", .{expected_identifier_token.id});
|
||||
@panic("Expected identifier");
|
||||
}
|
||||
// TODO: Check if it is a keyword
|
||||
@ -1483,7 +1493,7 @@ pub fn initialize(compilation: *Compilation, module: *Module, package: *Package,
|
||||
const value = module.values.get(decl.init_value);
|
||||
module.entry_point = switch (value.*) {
|
||||
.function => |function_index| function_index.uniqueInteger(),
|
||||
.unresolved => std.debug.panic("Unresolved declaration: {s}\n", .{declaration_name}),
|
||||
.unresolved => panic("Unresolved declaration: {s}\n", .{declaration_name}),
|
||||
else => |t| @panic(@tagName(t)),
|
||||
};
|
||||
break;
|
||||
|
@ -2,7 +2,6 @@ const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const assert = std.debug.assert;
|
||||
const equal = std.mem.eql;
|
||||
const log = std.log;
|
||||
|
||||
const data_structures = @import("../data_structures.zig");
|
||||
const ArrayList = data_structures.ArrayList;
|
||||
@ -14,6 +13,8 @@ const Token = lexical_analyzer.Token;
|
||||
|
||||
const Compilation = @import("../Compilation.zig");
|
||||
const File = Compilation.File;
|
||||
const log = Compilation.log;
|
||||
const logln = Compilation.logln;
|
||||
|
||||
pub const Result = struct {
|
||||
nodes: ArrayList(Node),
|
||||
@ -25,6 +26,21 @@ pub const Options = packed struct {
|
||||
is_comptime: bool,
|
||||
};
|
||||
|
||||
pub const Logger = enum {
|
||||
token_errors,
|
||||
symbol_declaration,
|
||||
node_creation,
|
||||
main_node,
|
||||
container_members,
|
||||
block,
|
||||
assign,
|
||||
suffix,
|
||||
precedence,
|
||||
@"switch",
|
||||
|
||||
pub var bitset = std.EnumSet(Logger).initEmpty();
|
||||
};
|
||||
|
||||
// TODO: pack it to be more efficient
|
||||
pub const Node = packed struct(u128) {
|
||||
token: u32,
|
||||
@ -153,7 +169,7 @@ const Analyzer = struct {
|
||||
const result = token_i;
|
||||
return result;
|
||||
} else {
|
||||
std.debug.print("Unexpected token {s} when expected {s}\n", .{ @tagName(token.id), @tagName(token_id) });
|
||||
logln(.parser, .token_errors, "Unexpected token {s} when expected {s}\n", .{ @tagName(token.id), @tagName(token_id) });
|
||||
return error.unexpected_token;
|
||||
}
|
||||
}
|
||||
@ -169,9 +185,9 @@ const Analyzer = struct {
|
||||
analyzer.token_i += 1;
|
||||
const declaration_name_token = try analyzer.expectToken(.identifier);
|
||||
const declaration_name = analyzer.bytes(declaration_name_token);
|
||||
std.debug.print("Starting parsing declaration \"{s}\"\n", .{declaration_name});
|
||||
logln(.parser, .symbol_declaration, "Starting parsing declaration \"{s}\"", .{declaration_name});
|
||||
|
||||
std.debug.print("Current token: {}\n", .{analyzer.tokens[analyzer.token_i].id});
|
||||
logln(.parser, .symbol_declaration, "Current token: {}", .{analyzer.tokens[analyzer.token_i].id});
|
||||
|
||||
const type_node_index = switch (analyzer.tokens[analyzer.token_i].id) {
|
||||
.colon => blk: {
|
||||
@ -199,37 +215,37 @@ const Analyzer = struct {
|
||||
.right = init_node_index,
|
||||
};
|
||||
|
||||
std.debug.print("Adding declaration \"{s}\" with init node of type: {s}\n", .{ declaration_name, @tagName(init_node.id) });
|
||||
logln(.parser, .symbol_declaration, "Adding declaration \"{s}\" with init node of type: {s}", .{ declaration_name, @tagName(init_node.id) });
|
||||
// if (analyzer.token_i < analyzer.tokens.len) {
|
||||
// const first_token = analyzer.tokens[first];
|
||||
// const last_token = analyzer.tokens[analyzer.token_i];
|
||||
// const declaration_source_start = first_token.start;
|
||||
// const declaration_source_end = last_token.start;
|
||||
//
|
||||
// std.debug.print("[ALL]\n", .{});
|
||||
// std.debug.print("Source file ({} bytes) :\n```\n{s}\n```\n", .{ analyzer.source_file.len, analyzer.source_file });
|
||||
// logln("[ALL]\n", .{});
|
||||
// logln("Source file ({} bytes) :\n```\n{s}\n```\n", .{ analyzer.source_file.len, analyzer.source_file });
|
||||
//
|
||||
// std.debug.print("[BEFORE]\n", .{});
|
||||
// logln("[BEFORE]\n", .{});
|
||||
//
|
||||
// std.debug.print("Tokens before the declaration: ", .{});
|
||||
// logln("Tokens before the declaration: ", .{});
|
||||
// for (analyzer.tokens[0..first]) |t| {
|
||||
// std.debug.print("{s} ", .{@tagName(t.id)});
|
||||
// logln("{s} ", .{@tagName(t.id)});
|
||||
// }
|
||||
// std.debug.print("\n", .{});
|
||||
// std.debug.print("Source before the declaration:\n```\n{s}\n```\n", .{analyzer.source_file[0..analyzer.tokens[first].start]});
|
||||
// std.debug.print("[DECLARATION]\n", .{});
|
||||
// logln("\n", .{});
|
||||
// logln("Source before the declaration:\n```\n{s}\n```\n", .{analyzer.source_file[0..analyzer.tokens[first].start]});
|
||||
// logln("[DECLARATION]\n", .{});
|
||||
//
|
||||
// std.debug.print("First token: {}\n", .{first_token});
|
||||
// std.debug.print("Last token: {}\n", .{last_token});
|
||||
// logln("First token: {}\n", .{first_token});
|
||||
// logln("Last token: {}\n", .{last_token});
|
||||
//
|
||||
// std.debug.print("Tokens including declaration ([{}-{}])", .{ first, analyzer.token_i });
|
||||
// logln("Tokens including declaration ([{}-{}])", .{ first, analyzer.token_i });
|
||||
// for (analyzer.tokens[first..][0 .. analyzer.token_i - first]) |t| {
|
||||
// std.debug.print("{s} ", .{@tagName(t.id)});
|
||||
// logln("{s} ", .{@tagName(t.id)});
|
||||
// }
|
||||
// std.debug.print("\n", .{});
|
||||
// logln("\n", .{});
|
||||
//
|
||||
// std.debug.print("Source for the declaration:\n```\n{s}\n```\n", .{analyzer.source_file[declaration_source_start..declaration_source_end]});
|
||||
// std.debug.print("[AFTER]\n", .{});
|
||||
// logln("Source for the declaration:\n```\n{s}\n```\n", .{analyzer.source_file[declaration_source_start..declaration_source_end]});
|
||||
// logln("[AFTER]\n", .{});
|
||||
//
|
||||
// // TODO
|
||||
// // print("Tokens for file #{}\n", .{analyzer.
|
||||
@ -245,7 +261,7 @@ const Analyzer = struct {
|
||||
|
||||
while (analyzer.token_i < analyzer.tokens.len) {
|
||||
const first = analyzer.token_i;
|
||||
std.debug.print("First token for container member: {s}\n", .{@tagName(analyzer.tokens[first].id)});
|
||||
logln(.parser, .container_members, "First token for container member: {s}", .{@tagName(analyzer.tokens[first].id)});
|
||||
const member_node_index: Node.Index = switch (analyzer.tokens[first].id) {
|
||||
.fixed_keyword_comptime => switch (analyzer.tokens[analyzer.token_i + 1].id) {
|
||||
.left_brace => blk: {
|
||||
@ -265,7 +281,7 @@ const Analyzer = struct {
|
||||
else => |t| @panic(@tagName(t)),
|
||||
};
|
||||
|
||||
std.debug.print("Container member {s}\n", .{@tagName(analyzer.nodes.items[member_node_index.unwrap()].id)});
|
||||
logln(.parser, .container_members, "Container member {s}", .{@tagName(analyzer.nodes.items[member_node_index.unwrap()].id)});
|
||||
|
||||
try analyzer.temporal_node_heap.append(analyzer.allocator, member_node_index);
|
||||
}
|
||||
@ -373,7 +389,7 @@ const Analyzer = struct {
|
||||
const type_expression = try analyzer.typeExpression();
|
||||
// const type_expression_node = analyzer.nodes.items[type_expression.unwrap()];
|
||||
// _ = type_expression_node;
|
||||
// std.debug.print("Type expression node: {}\n", .{type_expression_node});
|
||||
// logln("Type expression node: {}\n", .{type_expression_node});
|
||||
foo = true;
|
||||
|
||||
if (analyzer.tokens[analyzer.token_i].id == .comma) {
|
||||
@ -410,7 +426,7 @@ const Analyzer = struct {
|
||||
|
||||
while (analyzer.tokens[analyzer.token_i].id != .right_brace) {
|
||||
const first_statement_token = analyzer.tokens[analyzer.token_i];
|
||||
std.debug.print("First statement token: {s}\n", .{@tagName(first_statement_token.id)});
|
||||
logln(.parser, .block, "First statement token: {s}\n", .{@tagName(first_statement_token.id)});
|
||||
const statement_index = switch (first_statement_token.id) {
|
||||
.identifier => switch (analyzer.tokens[analyzer.token_i + 1].id) {
|
||||
.colon => {
|
||||
@ -428,7 +444,7 @@ const Analyzer = struct {
|
||||
};
|
||||
|
||||
const node = analyzer.nodes.items[statement_index.unwrap()];
|
||||
std.debug.print("Adding statement: {s}\n", .{@tagName(node.id)});
|
||||
logln(.parser, .block, "Adding statement: {s}\n", .{@tagName(node.id)});
|
||||
|
||||
try analyzer.temporal_node_heap.append(analyzer.allocator, statement_index);
|
||||
}
|
||||
@ -497,20 +513,20 @@ const Analyzer = struct {
|
||||
}
|
||||
|
||||
fn switchExpression(analyzer: *Analyzer) anyerror!Node.Index {
|
||||
std.debug.print("Parsing switch...\n", .{});
|
||||
logln(.parser, .@"switch", "Parsing switch...\n", .{});
|
||||
const switch_token = analyzer.token_i;
|
||||
analyzer.token_i += 1;
|
||||
_ = try analyzer.expectToken(.left_parenthesis);
|
||||
const switch_expression = try analyzer.expression();
|
||||
_ = try analyzer.expectToken(.right_parenthesis);
|
||||
std.debug.print("Parsed switch expression...\n", .{});
|
||||
logln(.parser, .@"switch", "Parsed switch expression...\n", .{});
|
||||
_ = try analyzer.expectToken(.left_brace);
|
||||
|
||||
var list = Node.List{};
|
||||
|
||||
while (analyzer.tokens[analyzer.token_i].id != .right_brace) {
|
||||
const case_token = analyzer.token_i;
|
||||
std.debug.print("Parsing switch case...\n", .{});
|
||||
logln(.parser, .@"switch", "Parsing switch case...\n", .{});
|
||||
const case_node = switch (analyzer.tokens[case_token].id) {
|
||||
.fixed_keyword_else => blk: {
|
||||
analyzer.token_i += 1;
|
||||
@ -621,7 +637,8 @@ const Analyzer = struct {
|
||||
.left = expr,
|
||||
.right = try analyzer.expression(),
|
||||
};
|
||||
std.debug.print("assign:\nleft: {}.\nright: {}\n", .{ node.left, node.right });
|
||||
|
||||
logln(.parser, .assign, "assign:\nleft: {}.\nright: {}\n", .{ node.left, node.right });
|
||||
return try analyzer.addNode(node);
|
||||
}
|
||||
|
||||
@ -679,14 +696,14 @@ const Analyzer = struct {
|
||||
var result = try analyzer.prefixExpression();
|
||||
if (!result.invalid) {
|
||||
const prefix_node = analyzer.nodes.items[result.unwrap()];
|
||||
std.debug.print("Prefix: {}\n", .{prefix_node.id});
|
||||
logln(.parser, .precedence, "Prefix: {}\n", .{prefix_node.id});
|
||||
}
|
||||
|
||||
var banned_precedence: i32 = -1;
|
||||
|
||||
while (analyzer.token_i < analyzer.tokens.len) {
|
||||
const token = analyzer.tokens[analyzer.token_i];
|
||||
// std.debug.print("Looping in expression precedence with token {}\n", .{token});
|
||||
// logln("Looping in expression precedence with token {}\n", .{token});
|
||||
const precedence: i32 = switch (token.id) {
|
||||
.equal, .semicolon, .right_parenthesis, .right_brace, .comma, .period, .fixed_keyword_const, .fixed_keyword_var => -1,
|
||||
.bang => switch (analyzer.tokens[analyzer.token_i + 1].id) {
|
||||
@ -695,19 +712,19 @@ const Analyzer = struct {
|
||||
},
|
||||
else => |t| {
|
||||
const start = token.start;
|
||||
std.debug.print("Source file:\n```\n{s}\n```\n", .{analyzer.source_file[start..]});
|
||||
logln(.parser, .precedence, "Source file:\n```\n{s}\n```\n", .{analyzer.source_file[start..]});
|
||||
@panic(@tagName(t));
|
||||
},
|
||||
};
|
||||
std.debug.print("Precedence: {} ({s}) (file #{})\n", .{ precedence, @tagName(token.id), analyzer.file_index.uniqueInteger() });
|
||||
logln(.parser, .precedence, "Precedence: {} ({s}) (file #{})\n", .{ precedence, @tagName(token.id), analyzer.file_index.uniqueInteger() });
|
||||
|
||||
if (precedence < minimum_precedence) {
|
||||
std.debug.print("Breaking for minimum_precedence\n", .{});
|
||||
logln(.parser, .precedence, "Breaking for minimum_precedence\n", .{});
|
||||
break;
|
||||
}
|
||||
|
||||
if (precedence == banned_precedence) {
|
||||
std.debug.print("Breaking for banned precedence\n", .{});
|
||||
logln(.parser, .precedence, "Breaking for banned precedence\n", .{});
|
||||
break;
|
||||
}
|
||||
|
||||
@ -747,7 +764,7 @@ const Analyzer = struct {
|
||||
|
||||
fn prefixExpression(analyzer: *Analyzer) !Node.Index {
|
||||
const token = analyzer.token_i;
|
||||
// std.debug.print("Prefix...\n", .{});
|
||||
// logln("Prefix...\n", .{});
|
||||
const node_id: Node.Id = switch (analyzer.tokens[token].id) {
|
||||
else => |pref| {
|
||||
_ = pref;
|
||||
@ -792,10 +809,7 @@ const Analyzer = struct {
|
||||
}),
|
||||
// todo:?
|
||||
.left_brace => try analyzer.block(.{ .is_comptime = false }),
|
||||
else => |id| {
|
||||
log.warn("By default, calling curlySuffixExpression with {s}", .{@tagName(id)});
|
||||
unreachable;
|
||||
},
|
||||
else => |id| std.debug.panic("WARN: By default, calling curlySuffixExpression with {s}", .{@tagName(id)}),
|
||||
};
|
||||
|
||||
return result;
|
||||
@ -916,13 +930,13 @@ const Analyzer = struct {
|
||||
var expression_list = ArrayList(Node.Index){};
|
||||
while (analyzer.tokens[analyzer.token_i].id != .right_parenthesis) {
|
||||
const current_token = analyzer.tokens[analyzer.token_i];
|
||||
std.debug.print("Current token: {s}\n", .{@tagName(current_token.id)});
|
||||
logln(.parser, .suffix, "Current token: {s}\n", .{@tagName(current_token.id)});
|
||||
const parameter = try analyzer.expression();
|
||||
try expression_list.append(analyzer.allocator, parameter);
|
||||
const parameter_node = analyzer.nodes.items[parameter.unwrap()];
|
||||
std.debug.print("Paremeter node: {s}\n", .{@tagName(parameter_node.id)});
|
||||
logln(.parser, .suffix, "Paremeter node: {s}\n", .{@tagName(parameter_node.id)});
|
||||
const next_token = analyzer.tokens[analyzer.token_i];
|
||||
std.debug.print("next token: {s}\n", .{@tagName(next_token.id)});
|
||||
logln(.parser, .suffix, "next token: {s}\n", .{@tagName(next_token.id)});
|
||||
analyzer.token_i += @intFromBool(switch (next_token.id) {
|
||||
.comma => true,
|
||||
.colon, .right_brace, .right_bracket => unreachable,
|
||||
@ -988,7 +1002,7 @@ const Analyzer = struct {
|
||||
.colon => unreachable,
|
||||
else => blk: {
|
||||
const identifier = analyzer.bytes(token_i);
|
||||
// std.debug.print("identifier: {s}\n", .{identifier});
|
||||
// logln("identifier: {s}\n", .{identifier});
|
||||
analyzer.token_i += 1;
|
||||
if (equal(u8, identifier, "_")) {
|
||||
break :blk Node.Index.invalid;
|
||||
@ -1122,7 +1136,7 @@ const Analyzer = struct {
|
||||
const right_token = analyzer.token_i;
|
||||
analyzer.token_i += 1;
|
||||
const result: Node.Index = @bitCast(right_token);
|
||||
std.debug.print("WARNING: rhs has node index {} but it's token #{}\n", .{ result, right_token });
|
||||
logln(.parser, .suffix, "WARNING: rhs has node index {} but it's token #{}\n", .{ result, right_token });
|
||||
break :blk result;
|
||||
},
|
||||
}),
|
||||
@ -1135,13 +1149,10 @@ const Analyzer = struct {
|
||||
fn addNode(analyzer: *Analyzer, node: Node) !Node.Index {
|
||||
const index = analyzer.nodes.items.len;
|
||||
try analyzer.nodes.append(analyzer.allocator, node);
|
||||
std.debug.print("Adding node #{} (0x{x}) {s} to file #{}\n", .{ index, @intFromPtr(&analyzer.nodes.items[index]), @tagName(node.id), analyzer.file_index.uniqueInteger() });
|
||||
logln(.parser, .node_creation, "Adding node #{} (0x{x}) {s} to file #{}\n", .{ index, @intFromPtr(&analyzer.nodes.items[index]), @tagName(node.id), analyzer.file_index.uniqueInteger() });
|
||||
// if (node.id == .identifier) {
|
||||
// std.debug.print("Node identifier: {s}\n", .{analyzer.bytes(node.token)});
|
||||
// logln("Node identifier: {s}\n", .{analyzer.bytes(node.token)});
|
||||
// }
|
||||
if (node.id == .call) {
|
||||
std.debug.print("Call two: {}\n", .{node});
|
||||
}
|
||||
return Node.Index{
|
||||
.value = @intCast(index),
|
||||
};
|
||||
@ -1185,9 +1196,9 @@ pub fn analyze(allocator: Allocator, tokens: []const Token, source_file: []const
|
||||
assert(node_index.value == 0);
|
||||
assert(!node_index.invalid);
|
||||
|
||||
std.debug.print("Start Parsing file root members\n", .{});
|
||||
logln(.parser, .main_node, "Start Parsing file root members\n", .{});
|
||||
const members = try analyzer.containerMembers();
|
||||
std.debug.print("End Parsing file root members\n", .{});
|
||||
logln(.parser, .main_node, "End Parsing file root members\n", .{});
|
||||
|
||||
switch (members.len) {
|
||||
0 => analyzer.nodes.items[0].id = .main_zero,
|
||||
|
65
src/main.zig
65
src/main.zig
@ -1,73 +1,12 @@
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const assert = std.debug.assert;
|
||||
const equal = std.mem.eql;
|
||||
|
||||
const Compilation = @import("Compilation.zig");
|
||||
|
||||
pub const seed = std.math.maxInt(u64);
|
||||
const default_src_file = "src/test/main.nat";
|
||||
pub const panic = Compilation.panic;
|
||||
|
||||
pub fn main() !void {
|
||||
const allocator = std.heap.page_allocator;
|
||||
const compilation_descriptor = try parseArguments(allocator);
|
||||
const compilation = try Compilation.init(allocator);
|
||||
|
||||
try compilation.compileModule(compilation_descriptor);
|
||||
}
|
||||
|
||||
const ArgumentParsingError = error{
|
||||
main_package_path_not_specified,
|
||||
};
|
||||
|
||||
fn parseArguments(allocator: Allocator) !Compilation.Module.Descriptor {
|
||||
const arguments = (try std.process.argsAlloc(allocator))[1..];
|
||||
|
||||
var maybe_executable_path: ?[]const u8 = null;
|
||||
var maybe_main_package_path: ?[]const u8 = null;
|
||||
var target_triplet: []const u8 = "x86_64-linux-gnu";
|
||||
|
||||
var i: usize = 0;
|
||||
while (i < arguments.len) : (i += 1) {
|
||||
const current_argument = arguments[i];
|
||||
if (equal(u8, current_argument, "-o")) {
|
||||
if (i <= arguments.len) {
|
||||
maybe_executable_path = arguments[i + 1];
|
||||
assert(maybe_executable_path.?.len != 0);
|
||||
i += 1;
|
||||
} else {
|
||||
unreachable;
|
||||
}
|
||||
} else if (equal(u8, current_argument, "-target")) {
|
||||
if (i <= arguments.len) {
|
||||
target_triplet = arguments[i + 1];
|
||||
i += 1;
|
||||
} else {
|
||||
unreachable;
|
||||
}
|
||||
} else {
|
||||
maybe_main_package_path = current_argument;
|
||||
}
|
||||
}
|
||||
|
||||
const main_package_path = maybe_main_package_path orelse return error.main_package_path_not_specified;
|
||||
|
||||
const executable_path = maybe_executable_path orelse blk: {
|
||||
const executable_name = std.fs.path.basename(main_package_path[0 .. main_package_path.len - "/main.nat".len]);
|
||||
assert(executable_name.len > 0);
|
||||
const result = try std.mem.concat(allocator, u8, &.{ "nat/", executable_name });
|
||||
break :blk result;
|
||||
};
|
||||
|
||||
const cross_target = try std.zig.CrossTarget.parse(.{ .arch_os_abi = target_triplet });
|
||||
const target = cross_target.toTarget();
|
||||
std.debug.print("Target: {}\n", .{target});
|
||||
|
||||
return .{
|
||||
.main_package_path = main_package_path,
|
||||
.executable_path = executable_path,
|
||||
.target = target,
|
||||
};
|
||||
try Compilation.init(allocator);
|
||||
}
|
||||
|
||||
test {
|
||||
|
Loading…
x
Reference in New Issue
Block a user