diff --git a/bootstrap/Compilation.zig b/bootstrap/Compilation.zig index c63775c..3aa88d5 100644 --- a/bootstrap/Compilation.zig +++ b/bootstrap/Compilation.zig @@ -214,12 +214,12 @@ pub const ContainerInitialization = struct { pub const Enum = struct { scope: Scope.Index, fields: ArrayList(Enum.Field.Index) = .{}, - type: Type.Index, + backing_type: Type.Index, pub const Field = struct { name: u32, value: Value.Index, - parent: Enum.Index, + parent: Type.Index, pub const List = BlockList(@This()); pub const Index = Enum.Field.List.Index; @@ -816,7 +816,7 @@ pub const Value = union(enum) { .declaration_reference => |declaration_reference| declaration_reference.type, .string_literal => |string_literal| string_literal.type, .type => Type.type, - .enum_field => |enum_field_index| module.types.enums.get(module.types.enum_fields.get(enum_field_index).parent).type, + .enum_field => |enum_field_index| module.types.enum_fields.get(enum_field_index).parent, .function_definition => |function_index| module.types.function_definitions.get(function_index).prototype, .function_declaration => |function_index| module.types.function_declarations.get(function_index).prototype, .binary_operation => |binary_operation| module.values.binary_operations.get(binary_operation).type, @@ -1237,7 +1237,10 @@ pub fn compileModule(compilation: *Compilation, descriptor: Module.Descriptor) ! } } -fn generateAST() !void {} +pub const ContainerType = enum { + @"struct", + @"enum", +}; pub const Directory = struct { handle: std.fs.Dir, diff --git a/bootstrap/backend/c_transpiler.zig b/bootstrap/backend/c_transpiler.zig index b6ad7e3..7371899 100644 --- a/bootstrap/backend/c_transpiler.zig +++ b/bootstrap/backend/c_transpiler.zig @@ -35,6 +35,7 @@ pub const TranslationUnit = struct { function_set: AutoArrayHashMap(Compilation.Function.Index, []const u8) = .{}, slice_type_set: AutoArrayHashMap(Compilation.Type.Index, []const u8) = .{}, array_type_set: AutoArrayHashMap(Compilation.Type.Index, []const u8) = .{}, + enum_type_set: AutoArrayHashMap(Compilation.Type.Index, []const u8) = .{}, declaration_set: AutoArrayHashMap(Compilation.Declaration.Index, []const u8) = .{}, const SyscallBitset = std.StaticBitSet(6); @@ -76,15 +77,15 @@ pub const TranslationUnit = struct { } fn writeFunctionDefinition(unit: *TranslationUnit, module: *Module, allocator: Allocator, function_definition_index: Compilation.Function.Index) ![]const u8 { - const gop = try unit.function_set.getOrPut(allocator, function_definition_index); - - if (!gop.found_existing) { + if (unit.function_set.getIndex(function_definition_index)) |index| { + return unit.function_set.values()[index]; + } else { const function_definition = module.types.function_definitions.get(function_definition_index); const function_prototype_type = function_definition.prototype; const function_prototype = module.types.function_prototypes.get(module.types.array.get(function_prototype_type).function); const function_name = try unit.writeFunctionHeader(module, &unit.function_declarations, allocator, function_definition_index); - gop.value_ptr.* = function_name; + try unit.function_set.putNoClobber(allocator, function_definition_index, function_name); _ = try unit.writeFunctionHeader(module, &unit.function_definitions, allocator, function_definition_index); try unit.function_declarations.appendSlice(allocator, ";\n\n"); @@ -92,9 +93,9 @@ pub const TranslationUnit = struct { try unit.function_definitions.append(allocator, ' '); try unit.writeBlock(module, &unit.function_definitions, allocator, function_definition.body, function_prototype.return_type, 0); try unit.function_definitions.append(allocator, '\n'); - } - return gop.value_ptr.*; + return function_name; + } } fn writeDeclaration(unit: *TranslationUnit, module: *Module, list: *ArrayList(u8), allocator: Allocator, declaration_index: Compilation.Declaration.Index, indentation: usize) !void { @@ -324,9 +325,9 @@ pub const TranslationUnit = struct { } fn renderDeclarationName(unit: *TranslationUnit, module: *Module, allocator: Allocator, declaration_index: Compilation.Declaration.Index, mangle: bool) anyerror![]const u8 { - const gop = try unit.declaration_set.getOrPut(allocator, declaration_index); - - if (!gop.found_existing) { + if (unit.declaration_set.getIndex(declaration_index)) |index| { + return unit.declaration_set.values()[index]; + } else { const declaration = module.values.declarations.get(declaration_index); const base_declaration_name = module.getName(declaration.name).?; var list = ArrayList(u8){}; @@ -362,7 +363,7 @@ pub const TranslationUnit = struct { // TODO: enhance declaration name rendering with file scope name // const scope = declaration.scope; - gop.value_ptr.* = list.items; + try unit.declaration_set.putNoClobber(allocator, declaration_index, list.items); switch (declaration.scope_type) { .global => switch (module.types.array.get(declaration.type).*) { @@ -378,12 +379,9 @@ pub const TranslationUnit = struct { }, .local => {}, } + + return list.items; } - - assert(@intFromPtr(gop.value_ptr.*.ptr) != 0xaaaa_aaaa_aaaa_aaaa); - logln(.c, .g, "Rendering name: {s}", .{gop.value_ptr.*}); - - return gop.value_ptr.*; } fn writeFunctionPrototype(unit: *TranslationUnit, module: *Module, list: *ArrayList(u8), allocator: Allocator, function_prototype_index: Compilation.Function.Prototype.Index, name: []const u8) !void { @@ -472,6 +470,10 @@ pub const TranslationUnit = struct { try list.appendSlice(allocator, name); }, .any => @panic("Internal compiler error: 'any' made it to the backend"), + .@"enum" => { + const name = try unit.cacheEnumType(module, allocator, type_index); + try list.appendSlice(allocator, name); + }, else => |t| @panic(@tagName(t)), } } @@ -557,56 +559,110 @@ pub const TranslationUnit = struct { fn cacheStructType(unit: *TranslationUnit, module: *Module, allocator: Allocator, type_index: Compilation.Type.Index) ![]const u8 { const t = module.types.array.get(type_index); assert(t.* == .@"struct"); - const gop = try unit.struct_type_set.getOrPut(allocator, type_index); - - if (!gop.found_existing) { - const struct_type = module.types.structs.get(t.@"struct"); + if (unit.struct_type_set.getIndex(type_index)) |index| { + return unit.struct_type_set.values()[index]; + } else { const type_name = try unit.renderTypeName(module, allocator, type_index); - gop.value_ptr.* = type_name; - // Forward declare the struct - { - try unit.type_forward_declarations.appendSlice(allocator, "typedef struct "); - try unit.type_forward_declarations.appendSlice(allocator, type_name); - try unit.type_forward_declarations.append(allocator, ' '); - try unit.type_forward_declarations.appendSlice(allocator, type_name); - try unit.type_forward_declarations.appendSlice(allocator, ";\n"); - } + logln(.c, .g, "Registering struct {s}: #{}", .{ type_name, type_index.uniqueInteger() }); + try unit.struct_type_set.putNoClobber(allocator, type_index, type_name); + try unit.forwardDeclareContainerType(allocator, .@"struct", type_name); + + const struct_type = module.types.structs.get(t.@"struct"); // Actually declare the struct { - try unit.type_declarations.appendSlice(allocator, "typedef struct "); - try unit.type_declarations.appendSlice(allocator, type_name); - try unit.type_declarations.appendSlice(allocator, " {\n"); + var list = ArrayList(u8){}; + try list.appendSlice(allocator, "typedef struct "); + try list.appendSlice(allocator, type_name); + try list.appendSlice(allocator, " {\n"); for (struct_type.fields.items) |struct_field_index| { - try unit.type_declarations.appendNTimes(allocator, ' ', margin_width); + try list.appendNTimes(allocator, ' ', margin_width); const struct_field = module.types.container_fields.get(struct_field_index); const struct_field_name = module.getName(struct_field.name).?; switch (struct_type.backing_type.invalid) { false => { - try unit.writeType(module, &unit.type_declarations, allocator, struct_type.backing_type); - try unit.type_declarations.append(allocator, ' '); - try unit.type_declarations.appendSlice(allocator, struct_field_name); - try unit.type_declarations.appendSlice(allocator, " : "); - try unit.type_declarations.writer(allocator).print("{}", .{module.types.array.get(struct_field.type).getBitSize()}); + try unit.writeType(module, &list, allocator, struct_type.backing_type); + try list.append(allocator, ' '); + try list.appendSlice(allocator, struct_field_name); + try list.appendSlice(allocator, " : "); + try list.writer(allocator).print("{}", .{module.types.array.get(struct_field.type).getBitSize()}); }, - true => try unit.writeCDeclaration(module, &unit.type_declarations, allocator, struct_field_name, struct_field.type), + true => try unit.writeCDeclaration(module, &list, allocator, struct_field_name, struct_field.type), } - try unit.type_declarations.appendSlice(allocator, ";\n"); + try list.appendSlice(allocator, ";\n"); } - try unit.type_declarations.appendSlice(allocator, "} "); - try unit.type_declarations.appendSlice(allocator, type_name); - try unit.type_declarations.appendSlice(allocator, ";\n\n"); + try list.appendSlice(allocator, "} "); + try list.appendSlice(allocator, type_name); + try list.appendSlice(allocator, ";\n\n"); + + try unit.type_declarations.appendSlice(allocator, list.items); } + + return type_name; } + } - assert(@intFromPtr(gop.value_ptr.*.ptr) != 0xaaaa_aaaa_aaaa_aaaa); + fn forwardDeclareContainerType(unit: *TranslationUnit, allocator: Allocator, container_type: Compilation.ContainerType, type_name: []const u8) !void { + try unit.type_forward_declarations.appendSlice(allocator, "typedef "); + try unit.type_forward_declarations.appendSlice(allocator, @tagName(container_type)); + try unit.type_forward_declarations.append(allocator, ' '); + try unit.type_forward_declarations.appendSlice(allocator, type_name); + try unit.type_forward_declarations.append(allocator, ' '); + try unit.type_forward_declarations.appendSlice(allocator, type_name); + try unit.type_forward_declarations.appendSlice(allocator, ";\n"); + } - return gop.value_ptr.*; + fn cacheEnumType(unit: *TranslationUnit, module: *Module, allocator: Allocator, type_index: Compilation.Type.Index) ![]const u8 { + if (unit.array_type_set.getIndex(type_index)) |index| { + return unit.array_type_set.values()[index]; + } else { + const type_name = try unit.renderTypeName(module, allocator, type_index); + logln(.c, .g, "Registering enum {s}: #{}", .{ type_name, type_index.uniqueInteger() }); + try unit.array_type_set.putNoClobber(allocator, type_index, type_name); + + try unit.forwardDeclareContainerType(allocator, .@"enum", type_name); + + const t = module.types.array.get(type_index); + const enum_type = module.types.enums.get(t.@"enum"); + + var list = ArrayList(u8){}; + + try list.appendSlice(allocator, "typedef enum "); + try list.appendSlice(allocator, type_name); + try list.appendSlice(allocator, " {\n"); + + for (enum_type.fields.items) |enum_field_index| { + try list.appendNTimes(allocator, ' ', margin_width); + + const enum_field = module.types.enum_fields.get(enum_field_index); + const enum_field_name = module.getName(enum_field.name).?; + try list.appendSlice(allocator, type_name); + try list.append(allocator, '_'); + try list.appendSlice(allocator, enum_field_name); + + try list.appendSlice(allocator, " = "); + + try unit.writeValue(module, &list, allocator, Compilation.Type.Index.invalid, 0, .{ + .value_index = enum_field.value, + .type_index = Compilation.Type.usize, + }); + + try list.appendSlice(allocator, ",\n"); + } + + try list.appendSlice(allocator, "} "); + try list.appendSlice(allocator, type_name); + try list.appendSlice(allocator, ";\n\n"); + + try unit.type_declarations.appendSlice(allocator, list.items); + + return type_name; + } } fn cacheOptionalType(unit: *TranslationUnit, module: *Module, allocator: Allocator, type_index: Compilation.Type.Index) ![]const u8 { @@ -614,121 +670,125 @@ pub const TranslationUnit = struct { assert(optional_type.* == .optional); const optional = optional_type.optional; - const gop = try unit.optional_type_set.getOrPut(allocator, optional.element_type); - - if (!gop.found_existing) { - var type_name = ArrayList(u8){}; + if (unit.optional_type_set.getIndex(optional.element_type)) |index| { + return unit.optional_type_set.values()[index]; + } else { const optional_element_type = module.types.array.get(optional.element_type); switch (optional_element_type.*) { .pointer => { + var type_name = ArrayList(u8){}; try unit.writeType(module, &type_name, allocator, optional.element_type); + try unit.optional_type_set.putNoClobber(allocator, optional.element_type, type_name.items); + return type_name.items; }, else => { + var type_name = ArrayList(u8){}; try type_name.appendSlice(allocator, "Optional_"); try unit.writeType(module, &type_name, allocator, optional.element_type); + logln(.c, .g, "Registering optional {s}: #{}", .{ type_name.items, type_index.uniqueInteger() }); + try unit.optional_type_set.putNoClobber(allocator, optional.element_type, type_name.items); - if (!gop.found_existing) { - try unit.type_forward_declarations.appendSlice(allocator, "typedef struct "); - try unit.type_forward_declarations.appendSlice(allocator, type_name.items); - try unit.type_forward_declarations.append(allocator, ' '); - try unit.type_forward_declarations.appendSlice(allocator, type_name.items); - try unit.type_forward_declarations.appendSlice(allocator, ";\n"); + try unit.forwardDeclareContainerType(allocator, .@"struct", type_name.items); - try unit.type_declarations.appendSlice(allocator, "typedef struct "); - try unit.type_declarations.appendSlice(allocator, type_name.items); - try unit.type_declarations.appendSlice(allocator, " {\n"); + var list = ArrayList(u8){}; - try unit.type_declarations.appendNTimes(allocator, ' ', margin_width); - try unit.writeCDeclaration(module, &unit.type_declarations, allocator, "value", optional.element_type); - try unit.type_declarations.appendSlice(allocator, ";\n"); + try list.appendSlice(allocator, "typedef struct "); + try list.appendSlice(allocator, type_name.items); + try list.appendSlice(allocator, " {\n"); - try unit.type_declarations.appendNTimes(allocator, ' ', margin_width); - try unit.writeCDeclaration(module, &unit.type_declarations, allocator, "is_null", Compilation.Type.boolean); - try unit.type_declarations.appendSlice(allocator, ";\n"); + try list.appendNTimes(allocator, ' ', margin_width); + try unit.writeCDeclaration(module, &list, allocator, "value", optional.element_type); + try list.appendSlice(allocator, ";\n"); - try unit.type_declarations.appendSlice(allocator, "} "); - try unit.type_declarations.appendSlice(allocator, type_name.items); - try unit.type_declarations.appendSlice(allocator, ";\n\n"); - } + try list.appendNTimes(allocator, ' ', margin_width); + try unit.writeCDeclaration(module, &list, allocator, "is_null", Compilation.Type.boolean); + try list.appendSlice(allocator, ";\n"); + + try list.appendSlice(allocator, "} "); + try list.appendSlice(allocator, type_name.items); + try list.appendSlice(allocator, ";\n\n"); + + try unit.type_declarations.appendSlice(allocator, list.items); + + return type_name.items; }, } - gop.value_ptr.* = type_name.items; } - - return gop.value_ptr.*; } fn cacheSliceType(unit: *TranslationUnit, module: *Module, allocator: Allocator, type_index: Compilation.Type.Index) ![]const u8 { const slice = module.types.array.get(type_index).slice; - const gop = try unit.slice_type_set.getOrPut(allocator, slice.element_type); - - if (!gop.found_existing) { + if (unit.slice_type_set.getIndex(slice.element_type)) |index| { + return unit.slice_type_set.values()[index]; + } else { var type_name = ArrayList(u8){}; try type_name.appendSlice(allocator, "Slice_"); try unit.writeType(module, &type_name, allocator, slice.element_type); - gop.value_ptr.* = type_name.items; + logln(.c, .g, "Registering slice {s}: #{}", .{ type_name.items, type_index.uniqueInteger() }); + try unit.slice_type_set.putNoClobber(allocator, slice.element_type, type_name.items); - try unit.type_forward_declarations.appendSlice(allocator, "typedef struct "); - try unit.type_forward_declarations.appendSlice(allocator, type_name.items); - try unit.type_forward_declarations.append(allocator, ' '); - try unit.type_forward_declarations.appendSlice(allocator, type_name.items); - try unit.type_forward_declarations.appendSlice(allocator, ";\n"); + try unit.forwardDeclareContainerType(allocator, .@"struct", type_name.items); - try unit.type_declarations.appendSlice(allocator, "typedef struct "); - try unit.type_declarations.appendSlice(allocator, type_name.items); - try unit.type_declarations.appendSlice(allocator, " {\n"); + var list = ArrayList(u8){}; - try unit.type_declarations.appendNTimes(allocator, ' ', margin_width); - try unit.writeType(module, &unit.type_declarations, allocator, slice.element_type); - try unit.type_declarations.appendSlice(allocator, "* ptr;\n"); + try list.appendSlice(allocator, "typedef struct "); + try list.appendSlice(allocator, type_name.items); + try list.appendSlice(allocator, " {\n"); - try unit.type_declarations.appendNTimes(allocator, ' ', margin_width); - try unit.type_declarations.appendSlice(allocator, "usize len;\n"); + try list.appendNTimes(allocator, ' ', margin_width); + try unit.writeType(module, &list, allocator, slice.element_type); + try list.appendSlice(allocator, "* ptr;\n"); - try unit.type_declarations.appendSlice(allocator, "} "); - try unit.type_declarations.appendSlice(allocator, type_name.items); - try unit.type_declarations.appendSlice(allocator, ";\n\n"); + try list.appendNTimes(allocator, ' ', margin_width); + try list.appendSlice(allocator, "usize len;\n"); + + try list.appendSlice(allocator, "} "); + try list.appendSlice(allocator, type_name.items); + try list.appendSlice(allocator, ";\n\n"); + + try unit.type_declarations.appendSlice(allocator, list.items); + + return type_name.items; } - - return gop.value_ptr.*; } fn cacheArrayType(unit: *TranslationUnit, module: *Module, allocator: Allocator, type_index: Compilation.Type.Index) ![]const u8 { const array = module.types.array.get(type_index).array; - const gop = try unit.array_type_set.getOrPut(allocator, array.element_type); - - if (!gop.found_existing) { + if (unit.array_type_set.getIndex(array.element_type)) |index| { + return unit.array_type_set.values()[index]; + } else { var type_name = ArrayList(u8){}; try type_name.appendSlice(allocator, "Array_"); try unit.writeType(module, &type_name, allocator, array.element_type); try type_name.writer(allocator).print("_{}", .{array.element_count}); - gop.value_ptr.* = type_name.items; + logln(.c, .g, "Registering array {s}: #{}", .{ type_name.items, type_index.uniqueInteger() }); + try unit.array_type_set.putNoClobber(allocator, array.element_type, type_name.items); - try unit.type_forward_declarations.appendSlice(allocator, "typedef struct "); - try unit.type_forward_declarations.appendSlice(allocator, type_name.items); - try unit.type_forward_declarations.append(allocator, ' '); - try unit.type_forward_declarations.appendSlice(allocator, type_name.items); - try unit.type_forward_declarations.appendSlice(allocator, ";\n"); + try unit.forwardDeclareContainerType(allocator, .@"struct", type_name.items); - try unit.type_declarations.appendSlice(allocator, "typedef struct "); - try unit.type_declarations.appendSlice(allocator, type_name.items); - try unit.type_declarations.appendSlice(allocator, " {\n"); + var list = ArrayList(u8){}; - try unit.type_declarations.appendNTimes(allocator, ' ', margin_width); - try unit.writeType(module, &unit.type_declarations, allocator, array.element_type); - try unit.type_declarations.appendSlice(allocator, " value\n"); + try list.appendSlice(allocator, "typedef struct "); + try list.appendSlice(allocator, type_name.items); + try list.appendSlice(allocator, " {\n"); - try unit.type_declarations.writer(allocator).print("[{}];\n", .{array.element_count}); + try list.appendNTimes(allocator, ' ', margin_width); + try unit.writeType(module, &list, allocator, array.element_type); + try list.appendSlice(allocator, " value\n"); - try unit.type_declarations.appendSlice(allocator, "} "); - try unit.type_declarations.appendSlice(allocator, type_name.items); - try unit.type_declarations.appendSlice(allocator, ";\n\n"); + try list.writer(allocator).print("[{}];\n", .{array.element_count}); + + try list.appendSlice(allocator, "} "); + try list.appendSlice(allocator, type_name.items); + try list.appendSlice(allocator, ";\n\n"); + + try unit.type_declarations.appendSlice(allocator, list.items); + + return type_name.items; } - - return gop.value_ptr.*; } fn writeSyscall(unit: *TranslationUnit, module: *Module, list: *ArrayList(u8), allocator: Allocator, syscall_index: Compilation.Syscall.Index, function_return_type: Compilation.Type.Index, indentation: usize) !void { @@ -883,6 +943,7 @@ pub const TranslationUnit = struct { const type_index = arguments.type_index; _ = type_index; const value = module.values.array.get(value_index); + logln(.c, .g, "Generating C code for {s}", .{@tagName(value.*)}); switch (value.*) { .declaration => |declaration_index| { try unit.writeDeclaration(module, list, allocator, declaration_index, indentation); @@ -1135,24 +1196,35 @@ pub const TranslationUnit = struct { } }, .slice => { + try list.append(allocator, '('); switch (slice.range.end.invalid) { false => { - try list.append(allocator, '('); try unit.writeValue(module, list, allocator, function_return_type, indentation + 1, .{ .value_index = slice.range.end, .type_index = Compilation.Type.Index.invalid, }); - try list.appendSlice(allocator, ") - ("); - try unit.writeValue(module, list, allocator, function_return_type, indentation + 1, .{ - .value_index = slice.range.start, - .type_index = Compilation.Type.Index.invalid, - }); - try list.appendSlice(allocator, ")\n"); }, true => { - unreachable; + switch (sliceable_type.*) { + .slice => { + try list.append(allocator, '('); + try unit.writeValue(module, list, allocator, function_return_type, indentation + 1, .{ + .value_index = slice.sliceable, + .type_index = Compilation.Type.Index.invalid, + }); + try list.appendSlice(allocator, ").len"); + }, + else => |t| @panic(@tagName(t)), + } }, } + + try list.appendSlice(allocator, ") - ("); + try unit.writeValue(module, list, allocator, function_return_type, indentation + 1, .{ + .value_index = slice.range.start, + .type_index = Compilation.Type.Index.invalid, + }); + try list.appendSlice(allocator, ")\n"); }, else => |t| @panic(@tagName(t)), } @@ -1288,18 +1360,33 @@ pub const TranslationUnit = struct { .value_index = array_coerce_to_slice.value, .type_index = Compilation.Type.Index.invalid, }); - try list.appendSlice(allocator, ".value,\n"); + switch (module.values.array.get(array_coerce_to_slice.value).*) { + .string_literal => {}, + else => try list.appendSlice(allocator, ".value"), + } + try list.appendSlice(allocator, ",\n"); try list.appendNTimes(allocator, ' ', indentation * margin_width); const array_value = module.values.array.get(array_coerce_to_slice.value); const array_type = module.types.array.get(array_value.getType(module)); const array_length = switch (array_type.*) { .array => |array| array.element_count, + .pointer => |pointer| switch (module.types.array.get(pointer.element_type).*) { + .array => |array| array.element_count, + else => |t| @panic(@tagName(t)), + }, else => |t| @panic(@tagName(t)), }; try list.writer(allocator).print(".len = {},\n", .{array_length}); try list.appendNTimes(allocator, ' ', indentation * margin_width); try list.append(allocator, '}'); }, + .enum_field => |enum_field_index| { + const enum_field = module.types.enum_fields.get(enum_field_index); + try unit.writeType(module, list, allocator, enum_field.parent); + try list.append(allocator, '_'); + const enum_field_name = module.getName(enum_field.name).?; + try list.appendSlice(allocator, enum_field_name); + }, else => |t| @panic(@tagName(t)), } } diff --git a/bootstrap/frontend/semantic_analyzer.zig b/bootstrap/frontend/semantic_analyzer.zig index 3f86ea7..40854ec 100644 --- a/bootstrap/frontend/semantic_analyzer.zig +++ b/bootstrap/frontend/semantic_analyzer.zig @@ -47,7 +47,7 @@ pub const Logger = enum { address_of, pub var bitset = std.EnumSet(Logger).initMany(&.{ - // .type, + .type, .identifier, // .symbol_declaration, // .scope_node, @@ -196,6 +196,8 @@ const Analyzer = struct { .token = block_node.token, }); + logln(.sema, .type, "Creating block scope #{}. Parent: #{}", .{ scope_index.uniqueInteger(), parent_scope_index.uniqueInteger() }); + const block_index = try analyzer.module.values.blocks.append(analyzer.allocator, .{ .statements = ArrayList(Value.Index){}, .reaches_end = true, @@ -356,6 +358,13 @@ const Analyzer = struct { }); break :blk value_index; }, + .simple_while => blk: { + const loop_index = try analyzer.whileLoop(scope_index, expect_type, statement_node_index); + const value_index = try analyzer.module.values.array.append(analyzer.allocator, .{ + .loop = loop_index, + }); + break :blk value_index; + }, else => |t| @panic(@tagName(t)), }; @@ -474,7 +483,20 @@ const Analyzer = struct { break :blk address_of_index; }, }, - else => method_object, + else => switch (analyzer.module.types.array.get(method_object_type).*) { + .pointer => blk: { + const unary_index = try analyzer.module.values.unary_operations.append(analyzer.allocator, .{ + .id = .pointer_dereference, + .value = method_object, + .type = first_argument_type, + }); + const pointer_dereference_index = try analyzer.module.values.array.append(analyzer.allocator, .{ + .unary_operation = unary_index, + }); + break :blk pointer_dereference_index; + }, + else => method_object, + }, }; argument_array.appendAssumeCapacity(method_object_argument); @@ -533,9 +555,21 @@ const Analyzer = struct { const result = try analyzer.typeCheck(ExpectType{ .type_index = argument_declaration.type, }, call_site_type); - assert(result == .success); - argument_array.appendAssumeCapacity(call_argument_value_index); + argument_array.appendAssumeCapacity(switch (result) { + .array_coerce_to_slice => blk: { + const array_coerce_to_slice = try analyzer.module.values.casts.append(analyzer.allocator, .{ + .value = call_argument_value_index, + .type = argument_declaration.type, + }); + const coertion_value = try analyzer.module.values.array.append(analyzer.allocator, .{ + .array_coerce_to_slice = array_coerce_to_slice, + }); + break :blk coertion_value; + }, + else => |t| @panic(@tagName(t)), + .success => call_argument_value_index, + }); } } else { panic("{s} call has argument count mismatch: call has {}, function declaration has {}", .{ switch (method_object.invalid) { @@ -550,7 +584,6 @@ const Analyzer = struct { fn processCall(analyzer: *Analyzer, scope_index: Scope.Index, node_index: Node.Index) !Call.Index { const node = analyzer.getScopeNode(scope_index, node_index); - logln(.sema, .call, "Node index: {}. Left index: {}", .{ node_index.uniqueInteger(), node.left.uniqueInteger() }); assert(!node.left.invalid); var is_field_access = false; @@ -703,7 +736,8 @@ const Analyzer = struct { switch (analyzer.module.values.array.get(switch_expression_value_index).*) { .enum_field => |e_field_index| { const e_field = analyzer.module.types.enum_fields.get(e_field_index); - const enum_type = analyzer.module.types.enums.get(e_field.parent); + const enum_type_general = analyzer.module.types.array.get(e_field.parent); + const enum_type = analyzer.module.types.enums.get(enum_type_general.@"enum"); const enum_field_name = analyzer.module.getName(e_field.name); _ = enum_field_name; @@ -807,7 +841,10 @@ const Analyzer = struct { }; const range_start_index = try analyzer.unresolvedAllocate(scope_index, expect_type, range_node.left); - const range_end_index = try analyzer.unresolvedAllocate(scope_index, expect_type, range_node.right); + const range_end_index = switch (range_node.right.invalid) { + true => Value.Index.invalid, + false => try analyzer.unresolvedAllocate(scope_index, expect_type, range_node.right), + }; return Range{ .start = range_start_index, @@ -825,6 +862,30 @@ const Analyzer = struct { return maybe_payload_name; } + fn whileLoop(analyzer: *Analyzer, parent_scope_index: Scope.Index, expect_type: ExpectType, while_node_index: Node.Index) !Loop.Index { + _ = expect_type; + const while_loop_node = analyzer.getScopeNode(parent_scope_index, while_node_index); + assert(while_loop_node.id == .simple_while); + // TODO: complete + const scope_index = parent_scope_index; + const condition_index = try analyzer.unresolvedAllocate(scope_index, ExpectType.boolean, while_loop_node.left); + const body_index = try analyzer.unresolvedAllocate(scope_index, ExpectType.boolean, while_loop_node.right); + const reaches_end = switch (analyzer.module.values.array.get(body_index).*) { + .block => |block_index| analyzer.module.values.blocks.get(block_index).reaches_end, + else => |t| @panic(@tagName(t)), + }; + + const loop_index = try analyzer.module.values.loops.append(analyzer.allocator, .{ + .pre = Value.Index.invalid, + .condition = condition_index, + .body = body_index, + .post = Value.Index.invalid, + .reaches_end = reaches_end, + }); + + return loop_index; + } + fn forLoop(analyzer: *Analyzer, parent_scope_index: Scope.Index, expect_type: ExpectType, for_node_index: Node.Index) !Loop.Index { const for_loop_node = analyzer.getScopeNode(parent_scope_index, for_node_index); assert(for_loop_node.id == .for_loop); @@ -835,6 +896,8 @@ const Analyzer = struct { .parent = parent_scope_index, }); + logln(.sema, .type, "Creating for loop scope #{}. Parent: #{}", .{ scope_index.uniqueInteger(), parent_scope_index.uniqueInteger() }); + const for_condition_node = analyzer.getScopeNode(scope_index, for_loop_node.left); assert(for_condition_node.id == .for_condition); @@ -866,7 +929,7 @@ const Analyzer = struct { .type = Type.boolean, .left = try analyzer.doIdentifierString(scope_index, ExpectType{ .type_index = Type.usize, - }, payload_name), + }, payload_name, scope_index), .right = for_range.end, }); @@ -1242,11 +1305,15 @@ const Analyzer = struct { return null; } - fn doIdentifierString(analyzer: *Analyzer, scope_index: Scope.Index, expect_type: ExpectType, identifier: []const u8) !Value.Index { - logln(.sema, .identifier, "Referencing identifier: \"{s}\"", .{identifier}); + fn doIdentifierString(analyzer: *Analyzer, from_scope_index: Scope.Index, expect_type: ExpectType, identifier: []const u8, in_scope_index: Scope.Index) !Value.Index { + logln(.sema, .identifier, "Referencing identifier: \"{s}\" from scope #{} in scope #{}", .{ identifier, from_scope_index.uniqueInteger(), in_scope_index.uniqueInteger() }); const identifier_hash = try analyzer.processIdentifier(identifier); - if (analyzer.lookupDeclarationInCurrentAndParentScopes(scope_index, identifier_hash)) |lookup| { + // if (equal(u8, identifier, "write")) { + // @breakpoint(); + // } + + if (analyzer.lookupDeclarationInCurrentAndParentScopes(from_scope_index, identifier_hash)) |lookup| { const declaration_index = lookup.declaration; const declaration = analyzer.module.values.declarations.get(declaration_index); @@ -1286,9 +1353,9 @@ const Analyzer = struct { // logln(.sema, .identifier, "Declaration mutability: {s}. Is comptime: {}", .{ @tagName(declaration.mutability), init_value.isComptime(analyzer.module) }); assert(!declaration.type.invalid); - logln(.sema, .identifier, "About to typecheck identifier: \"{s}\"", .{identifier}); + // logln(.sema, .identifier, "About to typecheck identifier: \"{s}\"", .{identifier}); const typecheck_result = try analyzer.typeCheck(expect_type, declaration.type); - logln(.sema, .identifier, "Done typecheck identifier: \"{s}\"", .{identifier}); + // logln(.sema, .identifier, "Done typecheck identifier: \"{s}\"", .{identifier}); assert(!declaration.type.eq(pointer_to_any_type)); assert(!declaration.type.eq(optional_pointer_to_any_type)); @@ -1388,13 +1455,31 @@ const Analyzer = struct { }, }; } else { - panic("Identifier \"{s}\" not found in scope", .{identifier}); + logln(.sema, .type, "Identifier \"{s}\" not found as a declaration from scope #{} referenced in scope #{}", .{ identifier, from_scope_index.uniqueInteger(), in_scope_index.uniqueInteger() }); + const from_scope = analyzer.module.values.scopes.get(from_scope_index); + const scope_type = analyzer.module.types.array.get(from_scope.type); + switch (scope_type.*) { + .@"struct" => |struct_index| { + const struct_type = analyzer.module.types.structs.get(struct_index); + for (struct_type.fields.items) |struct_field_index| { + const struct_field = analyzer.module.types.container_fields.get(struct_field_index); + if (struct_field.name == identifier_hash) { + unreachable; + } + } else { + unreachable; + } + }, + else => |t| @panic(@tagName(t)), + } + + unreachable; } } fn doIdentifier(analyzer: *Analyzer, scope_index: Scope.Index, expect_type: ExpectType, node_token: Token.Index, node_scope_index: Scope.Index) !Value.Index { const identifier = analyzer.tokenIdentifier(node_scope_index, node_token); - return try analyzer.doIdentifierString(scope_index, expect_type, identifier); + return try analyzer.doIdentifierString(scope_index, expect_type, identifier, node_scope_index); } fn resolveInteger(analyzer: *Analyzer, scope_index: Scope.Index, value_index: Value.Index) usize { @@ -1450,6 +1535,8 @@ const Analyzer = struct { .token = node.token, }); + logln(.sema, .type, "Creating function scope #{}. Parent #{}", .{ function_scope_index.uniqueInteger(), scope_index.uniqueInteger() }); + const function_prototype_index = try analyzer.functionPrototype(function_scope_index, node.left); const function_prototype = analyzer.module.types.function_prototypes.get(function_prototype_index); assert(!function_prototype.attributes.@"extern"); @@ -1457,7 +1544,6 @@ const Analyzer = struct { const expected_type = ExpectType{ .type_index = analyzer.functionPrototypeReturnType(function_prototype_index), }; - logln(.sema, .fn_return_type, "Return type: #{}", .{expected_type.type_index.uniqueInteger()}); const function_body = try analyzer.block(function_scope_index, expected_type, node.right); const prototype_type_index = try analyzer.module.types.array.append(analyzer.allocator, .{ @@ -1552,10 +1638,15 @@ const Analyzer = struct { const right_value = analyzer.module.values.array.get(right_index); switch (right_value.*) { - .function_definition, .type, .enum_field => break :blk right_value.*, - .declaration_reference => break :blk right_value.*, + .function_definition, + .type, + .enum_field, + .declaration_reference, + .integer, + => break :blk right_value.*, else => |t| @panic(@tagName(t)), } + // logln(.sema, .node, "Right: {}", .{right_value}); // struct_scope.declarations.get(identifier); @@ -1571,8 +1662,26 @@ const Analyzer = struct { break enum_field_index; } } else { - @panic("No enum found"); + const right_index = try analyzer.doIdentifier(enum_type.scope, ExpectType.none, node.right.value, scope_index); + const right_value = analyzer.module.values.array.get(right_index); + + switch (right_value.*) { + .function_definition, + .type, + .enum_field, + .declaration_reference, + .integer, + => break :blk right_value.*, + else => |t| @panic(@tagName(t)), + } + // + + logln(.sema, .node, "Right: {}", .{right_value}); + // struct_scope.declarations.get(identifier); + + unreachable; }; + const enum_field = analyzer.module.types.enum_fields.get(result); const enum_field_name = analyzer.module.getName(enum_field.name).?; logln(.sema, .node, "Enum field name resolution: {s}", .{enum_field_name}); @@ -1734,38 +1843,10 @@ const Analyzer = struct { .node_list => analyzer.getScopeNodeList(scope_index, list_node), else => |t| @panic(@tagName(t)), }; - - var field_list = try ArrayList(Enum.Field.Index).initCapacity(analyzer.allocator, field_node_list.items.len); - const enum_index = try analyzer.module.types.enums.addOne(analyzer.allocator); - const type_index = try analyzer.module.types.array.append(analyzer.allocator, .{ - .@"enum" = enum_index, - }); - - for (field_node_list.items) |field_node_index| { - const field_node = analyzer.getScopeNode(scope_index, field_node_index); - const identifier = analyzer.tokenIdentifier(scope_index, field_node.token); - logln(.sema, .node, "Enum field: {s}", .{identifier}); - assert(field_node.left.invalid); - - const enum_hash_name = try analyzer.processIdentifier(identifier); - - const enum_field_index = try analyzer.module.types.enum_fields.append(analyzer.allocator, .{ - .name = enum_hash_name, - .value = Value.Index.invalid, - .parent = enum_index, - }); - - field_list.appendAssumeCapacity(enum_field_index); - } - - analyzer.module.types.enums.get(enum_index).* = .{ - .scope = Scope.Index.invalid, - .fields = field_list, - .type = type_index, - }; - + const file = analyzer.module.values.scopes.get(scope_index).file; + const enum_type = try analyzer.processContainerType(value_index, scope_index, field_node_list.items, file, node_index, .@"enum"); break :blk .{ - .type = type_index, + .type = enum_type, }; }, .assign => try analyzer.processAssignment(scope_index, node_index), @@ -1806,7 +1887,7 @@ const Analyzer = struct { const left_node = analyzer.getScopeNode(scope_index, node.left); const nodes = analyzer.getScopeNodeList(scope_index, left_node); const scope = analyzer.module.values.scopes.get(scope_index); - const struct_type = try analyzer.structType(value_index, scope_index, nodes.items, scope.file, node_index); + const struct_type = try analyzer.processContainerType(value_index, scope_index, nodes.items, scope.file, node_index, .@"struct"); break :blk .{ .type = struct_type, }; @@ -1951,13 +2032,17 @@ const Analyzer = struct { .slice => |slice| slice.element_type, else => |t| @panic(@tagName(t)), }; - + const is_const = switch (analyzer.module.types.array.get(expression_to_slice_type).*) { + .pointer => |pointer| pointer.@"const", + .slice => |slice| slice.@"const", + else => |t| @panic(@tagName(t)), + }; const slice_index = try analyzer.module.values.slices.append(analyzer.allocator, .{ .sliceable = expression_to_slice_index, .range = try analyzer.range(scope_index, node.right), .type = try analyzer.getSliceType(.{ .element_type = element_type, - .@"const" = true, + .@"const" = is_const, }), }); @@ -2026,6 +2111,8 @@ const Analyzer = struct { for (struct_type.fields.items) |struct_field_index| { const struct_field = analyzer.module.types.container_fields.get(struct_field_index); + const struct_field_name = analyzer.module.getName(struct_field.name).?; + logln(.sema, .type, "struct field name in container literal: {s}", .{struct_field_name}); var value_index = Value.Index.invalid; @@ -2050,9 +2137,17 @@ const Analyzer = struct { if (value_index.invalid) { if (!struct_field.default_value.invalid) { - value_index = struct_field.default_value; + const default_value: Value.Index = switch (analyzer.module.values.array.get(struct_field.default_value).*) { + .unresolved => |unresolved| blk: { + try analyzer.resolveNode(struct_field.default_value, struct_type.scope, ExpectType{ + .type_index = struct_field.type, + }, unresolved.node_index); + break :blk (&struct_field.default_value).*; + }, + else => struct_field.default_value, + }; + value_index = default_value; } else { - const struct_field_name = analyzer.module.getName(struct_field.name).?; std.debug.panic("Field \"{s}\" forgotten in struct initialization", .{struct_field_name}); } } @@ -2159,10 +2254,10 @@ const Analyzer = struct { const type_node = analyzer.getScopeNode(scope_index, node_index); const type_index: Type.Index = switch (type_node.id) { .identifier => blk: { - const token = analyzer.getScopeToken(scope_index, type_node.token); - const source_file = analyzer.getScopeSourceFile(scope_index); - const identifier = tokenBytes(token, source_file); - logln(.sema, .type, "Identifier: \"{s}\"", .{identifier}); + // const token = analyzer.getScopeToken(scope_index, type_node.token); + // const source_file = analyzer.getScopeSourceFile(scope_index); + // const identifier = tokenBytes(token, source_file); + // logln(.sema, .type, "Identifier: \"{s}\"", .{identifier}); const resolved_value_index = try analyzer.doIdentifier(scope_index, ExpectType.type, type_node.token, scope_index); const resolved_value = analyzer.module.values.array.get(resolved_value_index); break :blk switch (resolved_value.*) { @@ -2173,7 +2268,6 @@ const Analyzer = struct { .keyword_noreturn => Type.noreturn, inline .signed_integer_type, .unsigned_integer_type => |int_type_signedness| blk: { const bit_count: u16 = @intCast(type_node.left.value); - logln(.sema, .type, "Bit count: {}", .{bit_count}); break :blk switch (bit_count) { inline 8, 16, 32, 64 => |hardware_bit_count| Type.Integer.getIndex(.{ .bit_count = hardware_bit_count, @@ -2250,12 +2344,19 @@ const Analyzer = struct { .@"const" = is_const, }); }, - .slice_type => blk: { + .slice_type, + .const_slice_type, + => blk: { const element_type = try resolveType(analyzer, .{ .scope_index = scope_index, .node_index = type_node.right, }); - const is_const = false; + + const is_const = switch (type_node.id) { + .slice_type => false, + .const_slice_type => true, + else => unreachable, + }; break :blk try analyzer.getSliceType(.{ .element_type = element_type, @@ -2443,7 +2544,7 @@ const Analyzer = struct { while (expression_iterator.next()) |expression_name| { const result = switch (before_expression.invalid) { - true => try analyzer.doIdentifierString(scope_index, ExpectType.type, expression_name), + true => try analyzer.doIdentifierString(scope_index, ExpectType.type, expression_name, scope_index), false => blk: { const expression_name_hash = try analyzer.processIdentifier(expression_name); switch (analyzer.module.values.array.get(before_expression).*) { @@ -2503,149 +2604,219 @@ const Analyzer = struct { return before_expression; } - fn structType(analyzer: *Analyzer, value_index: Value.Index, parent_scope_index: Scope.Index, struct_nodes: []const Node.Index, struct_file_index: File.Index, struct_node_index: Node.Index) !Type.Index { - const struct_node = analyzer.getFileNode(struct_file_index, struct_node_index); - if (struct_nodes.len > 0) { - const scope_index = try analyzer.module.values.scopes.append(analyzer.allocator, .{ - .parent = parent_scope_index, - .file = struct_file_index, - .token = struct_node.token, - }); - - const is_file = parent_scope_index.invalid; - const backing_type = blk: { - if (!is_file) { - if (analyzer.getScopeToken(parent_scope_index, struct_node.token + 1).id == .left_parenthesis) { - const backing_type_token = analyzer.getScopeToken(parent_scope_index, struct_node.token + 2); - const source_file = analyzer.getScopeSourceFile(parent_scope_index); - const token_bytes = tokenBytes(backing_type_token, source_file); - - break :blk switch (backing_type_token.id) { - .keyword_unsigned_integer => if (equal(u8, token_bytes, "u8")) Type.u8 else if (equal(u8, token_bytes, "u16")) Type.u16 else if (equal(u8, token_bytes, "u32")) Type.u32 else if (equal(u8, token_bytes, "u64")) Type.u64 else if (equal(u8, token_bytes, "usize")) Type.usize else unreachable, - else => |t| @panic(@tagName(t)), - }; - } - } - - break :blk Type.Index.invalid; - }; - - const struct_index = try analyzer.module.types.structs.append(analyzer.allocator, .{ - .scope = scope_index, - .backing_type = backing_type, - }); - - const struct_type_index = try analyzer.module.types.array.append(analyzer.allocator, .{ - .@"struct" = struct_index, - }); - - if (is_file) { - const file = analyzer.module.values.files.get(struct_file_index); - file.type = struct_type_index; - } - - analyzer.module.values.scopes.get(scope_index).type = struct_type_index; - analyzer.module.values.array.get(value_index).* = .{ - .type = struct_type_index, - }; - - if (!analyzer.current_declaration.invalid) { - const current_declaration = analyzer.module.values.declarations.get(analyzer.current_declaration); - assert(current_declaration.type.invalid); - current_declaration.type = Type.type; - } - - const count = blk: { - var result: struct { - fields: u32 = 0, - declarations: u32 = 0, - } = .{}; - for (struct_nodes) |member_index| { - const member = analyzer.getFileNode(struct_file_index, member_index); - const member_type = getContainerMemberType(member.id); - - switch (member_type) { - .declaration => result.declarations += 1, - .field => result.fields += 1, - } - } - break :blk result; - }; - - var declaration_nodes = try ArrayList(Node.Index).initCapacity(analyzer.allocator, count.declarations); - var field_nodes = try ArrayList(Node.Index).initCapacity(analyzer.allocator, count.fields); - - for (struct_nodes) |member_index| { - const member = analyzer.getFileNode(struct_file_index, member_index); - const member_type = getContainerMemberType(member.id); - const array_list = switch (member_type) { - .declaration => &declaration_nodes, - .field => &field_nodes, - }; - array_list.appendAssumeCapacity(member_index); - } - - for (declaration_nodes.items) |declaration_node_index| { - const declaration_node = analyzer.getFileNode(struct_file_index, declaration_node_index); - switch (declaration_node.id) { - .@"comptime" => {}, - .simple_symbol_declaration => _ = try analyzer.symbolDeclaration(scope_index, declaration_node_index, .global), - else => unreachable, - } - } - - // TODO: consider iterating over scope declarations instead? - for (declaration_nodes.items) |declaration_node_index| { - const declaration_node = analyzer.getFileNode(struct_file_index, declaration_node_index); - switch (declaration_node.id) { - .@"comptime" => _ = try analyzer.comptimeBlock(scope_index, declaration_node_index), - .simple_symbol_declaration => {}, - else => |t| @panic(@tagName(t)), - } - } - - analyzer.module.types.structs.get(struct_index).fields = try ArrayList(Compilation.ContainerField.Index).initCapacity(analyzer.allocator, field_nodes.items.len); - - if (field_nodes.items.len > 0) { - // This is done in order for the names inside fields not to collision with the declaration ones - const field_scope_index = try analyzer.module.values.scopes.append(analyzer.allocator, .{ - .token = analyzer.getScopeNode(scope_index, field_nodes.items[0]).token, - .file = struct_file_index, - .parent = scope_index, - }); - - for (field_nodes.items) |field_index| { - const field_node = analyzer.getFileNode(struct_file_index, field_index); - const identifier = analyzer.tokenIdentifier(field_scope_index, field_node.token); - const identifier_index = try analyzer.processIdentifier(identifier); - const type_index = try analyzer.resolveType(.{ - .scope_index = field_scope_index, - .node_index = field_node.left, - .allow_non_primitive_size = !backing_type.invalid, - }); - - const default_value = if (field_node.right.invalid) Value.Index.invalid else blk: { - const index = try analyzer.unresolvedAllocate(field_scope_index, ExpectType{ - .type_index = type_index, - }, field_node.right); - break :blk index; - }; - - const container_field_index = try analyzer.module.types.container_fields.append(analyzer.allocator, .{ - .name = identifier_index, - .type = type_index, - .default_value = default_value, - .parent = struct_type_index, - }); - - analyzer.module.types.structs.get(struct_index).fields.appendAssumeCapacity(container_field_index); - } - } - - return struct_type_index; - } else { - return Type.Index.invalid; + fn processContainerType(analyzer: *Analyzer, value_index: Value.Index, parent_scope_index: Scope.Index, container_nodes: []const Node.Index, file_index: File.Index, container_node_index: Node.Index, comptime container_type: Compilation.ContainerType) !Type.Index { + const container_node = analyzer.getFileNode(file_index, container_node_index); + switch (container_type) { + .@"struct" => assert(container_node.id == .struct_type), + .@"enum" => assert(container_node.id == .enum_type), } + const scope_index = try analyzer.module.values.scopes.append(analyzer.allocator, .{ + .parent = parent_scope_index, + .file = file_index, + .token = container_node.token, + }); + logln(.sema, .type, "Creating container scope #{}. Parent: #{}", .{ + scope_index.uniqueInteger(), switch (parent_scope_index.invalid) { + true => 0xffff_ffff, + false => parent_scope_index.uniqueInteger(), + }, + }); + const is_file = parent_scope_index.invalid; + const backing_type = blk: { + if (!is_file) { + if (analyzer.getScopeToken(parent_scope_index, container_node.token + 1).id == .left_parenthesis) { + const backing_type_token = analyzer.getScopeToken(parent_scope_index, container_node.token + 2); + const source_file = analyzer.getScopeSourceFile(parent_scope_index); + const token_bytes = tokenBytes(backing_type_token, source_file); + + break :blk switch (backing_type_token.id) { + .keyword_unsigned_integer => if (equal(u8, token_bytes, "u8")) Type.u8 else if (equal(u8, token_bytes, "u16")) Type.u16 else if (equal(u8, token_bytes, "u32")) Type.u32 else if (equal(u8, token_bytes, "u64")) Type.u64 else if (equal(u8, token_bytes, "usize")) Type.usize else unreachable, + else => |t| @panic(@tagName(t)), + }; + } + } + + break :blk Type.Index.invalid; + }; + + const container_descriptor = .{ + .scope = scope_index, + .backing_type = backing_type, + }; + const container_type_descriptor = switch (container_type) { + .@"struct" => blk: { + const struct_index = try analyzer.module.types.structs.append(analyzer.allocator, container_descriptor); + break :blk Type{ + .@"struct" = struct_index, + }; + }, + .@"enum" => blk: { + const enum_index = try analyzer.module.types.enums.append(analyzer.allocator, container_descriptor); + break :blk Type{ + .@"enum" = enum_index, + }; + }, + }; + + const container_type_index = try analyzer.module.types.array.append(analyzer.allocator, container_type_descriptor); + if (is_file) { + const file = analyzer.module.values.files.get(file_index); + file.type = container_type_index; + } + + analyzer.module.values.scopes.get(scope_index).type = container_type_index; + analyzer.module.values.array.get(value_index).* = .{ + .type = container_type_index, + }; + + if (!analyzer.current_declaration.invalid) { + const current_declaration = analyzer.module.values.declarations.get(analyzer.current_declaration); + assert(current_declaration.type.invalid); + current_declaration.type = Type.type; + } + + const count = blk: { + var result: struct { + fields: u32 = 0, + declarations: u32 = 0, + } = .{}; + + for (container_nodes) |member_index| { + const member = analyzer.getFileNode(file_index, member_index); + switch (container_type) { + .@"struct" => assert(member.id != .enum_field), + .@"enum" => assert(member.id != .container_field), + } + const member_type = getContainerMemberType(member.id); + + switch (member_type) { + .declaration => result.declarations += 1, + .field => result.fields += 1, + } + } + + break :blk result; + }; + + var declaration_nodes = try ArrayList(Node.Index).initCapacity(analyzer.allocator, count.declarations); + var field_nodes = try ArrayList(Node.Index).initCapacity(analyzer.allocator, count.fields); + + for (container_nodes) |member_index| { + const member = analyzer.getFileNode(file_index, member_index); + const member_type = getContainerMemberType(member.id); + const array_list = switch (member_type) { + .declaration => &declaration_nodes, + .field => &field_nodes, + }; + array_list.appendAssumeCapacity(member_index); + } + + for (declaration_nodes.items) |declaration_node_index| { + const declaration_node = analyzer.getFileNode(file_index, declaration_node_index); + switch (declaration_node.id) { + .@"comptime" => {}, + .simple_symbol_declaration => _ = try analyzer.symbolDeclaration(scope_index, declaration_node_index, .global), + else => unreachable, + } + } + + if (field_nodes.items.len > 0) { + // This is done in order for the names inside fields not to collision with the declaration ones + const field_scope_index = try analyzer.module.values.scopes.append(analyzer.allocator, .{ + .token = analyzer.getScopeNode(scope_index, field_nodes.items[0]).token, + .file = file_index, + .parent = scope_index, + }); + + logln(.sema, .type, "Creating container field scope #{}. Parent: #{}", .{ field_scope_index.uniqueInteger(), scope_index.uniqueInteger() }); + + switch (container_type) { + .@"struct" => { + { + const struct_type_general = analyzer.module.types.array.get(container_type_index); + const struct_type = analyzer.module.types.structs.get(struct_type_general.@"struct"); + struct_type.fields = try ArrayList(Compilation.ContainerField.Index).initCapacity(analyzer.allocator, field_nodes.items.len); + } + + for (field_nodes.items) |field_index| { + const field_node = analyzer.getFileNode(file_index, field_index); + const identifier = analyzer.tokenIdentifier(field_scope_index, field_node.token); + const file_path = analyzer.module.values.files.get(file_index).relative_path; + logln(.sema, .type, "Field node index for '{s}' in file {s}", .{ identifier, file_path }); + const identifier_index = try analyzer.processIdentifier(identifier); + const type_index = try analyzer.resolveType(.{ + .scope_index = field_scope_index, + .node_index = field_node.left, + .allow_non_primitive_size = !backing_type.invalid, + }); + + const default_value = if (field_node.right.invalid) Value.Index.invalid else try analyzer.module.values.array.append(analyzer.allocator, .{ + .unresolved = .{ + .node_index = field_node.right, + }, + }); + + const container_field_index = try analyzer.module.types.container_fields.append(analyzer.allocator, .{ + .name = identifier_index, + .type = type_index, + .default_value = default_value, + .parent = container_type_index, + }); + + { + const struct_type_general = analyzer.module.types.array.get(container_type_index); + const struct_type = analyzer.module.types.structs.get(struct_type_general.@"struct"); + struct_type.fields.appendAssumeCapacity(container_field_index); + } + } + }, + .@"enum" => { + { + const enum_type_general = analyzer.module.types.array.get(container_type_index); + const enum_type = analyzer.module.types.enums.get(enum_type_general.@"enum"); + enum_type.fields = try ArrayList(Compilation.Enum.Field.Index).initCapacity(analyzer.allocator, field_nodes.items.len); + } + + for (field_nodes.items) |field_node_index| { + const field_node = analyzer.getScopeNode(scope_index, field_node_index); + assert(field_node.id == .enum_field); + + const identifier = analyzer.tokenIdentifier(scope_index, field_node.token); + logln(.sema, .node, "Enum field: {s}", .{identifier}); + const enum_value = switch (field_node.left.invalid) { + false => try analyzer.unresolvedAllocate(scope_index, ExpectType{ + .type_index = Type.usize, + }, field_node.left), + true => Value.Index.invalid, + }; + + const enum_hash_name = try analyzer.processIdentifier(identifier); + + const enum_field_index = try analyzer.module.types.enum_fields.append(analyzer.allocator, .{ + .name = enum_hash_name, + .value = enum_value, + .parent = container_type_index, + }); + + const enum_type_general = analyzer.module.types.array.get(container_type_index); + const enum_type = analyzer.module.types.enums.get(enum_type_general.@"enum"); + enum_type.fields.appendAssumeCapacity(enum_field_index); + } + }, + } + } + + // TODO: consider iterating over scope declarations instead? + for (declaration_nodes.items) |declaration_node_index| { + const declaration_node = analyzer.getFileNode(file_index, declaration_node_index); + switch (declaration_node.id) { + .@"comptime" => _ = try analyzer.comptimeBlock(scope_index, declaration_node_index), + .simple_symbol_declaration => {}, + else => |t| @panic(@tagName(t)), + } + } + + return container_type_index; } fn declarationCommon(analyzer: *Analyzer, scope_index: Scope.Index, scope_type: ScopeType, mutability: Compilation.Mutability, name: []const u8, type_index: Type.Index, init_value: Value.Index, argument_index: ?u32) !Declaration.Index { @@ -2679,6 +2850,7 @@ const Analyzer = struct { assert(declaration_node.id == .simple_symbol_declaration); const expected_identifier_token_index = declaration_node.token + 1; const identifier = analyzer.tokenIdentifier(scope_index, expected_identifier_token_index); + logln(.sema, .type, "Analyzing '{s}' declaration in {s} scope #{}", .{ identifier, @tagName(scope_type), scope_index.uniqueInteger() }); const expect_type = switch (declaration_node.left.invalid) { false => switch (scope_type) { @@ -2740,9 +2912,12 @@ const Analyzer = struct { fn getContainerMemberType(member_id: Node.Id) MemberType { return switch (member_id) { - .@"comptime" => .declaration, - .simple_symbol_declaration => .declaration, - .container_field => .field, + .@"comptime", + .simple_symbol_declaration, + => .declaration, + .enum_field, + .container_field, + => .field, else => |t| @panic(@tagName(t)), }; } @@ -2824,29 +2999,46 @@ const Analyzer = struct { .type_index => |type_index| blk: { if (source.eq(type_index)) { unreachable; - } + } else { + const destination_type = analyzer.module.types.array.get(type_index); + const source_type = analyzer.module.types.array.get(source); - const destination_type = analyzer.module.types.array.get(type_index); - const source_type = analyzer.module.types.array.get(source); - break :blk switch (source_type.*) { - .integer => |integer| switch (destination_type.*) { - .optional => |optional| switch (analyzer.module.types.array.get(optional.element_type).*) { - .pointer => if (integer.bit_count == 64) .success else unreachable, - else => |t| @panic(@tagName(t)), - }, - .integer => .success, - .pointer => .success, - else => |t| @panic(@tagName(t)), - }, - .pointer => switch (destination_type.*) { - .optional => |destination_optional| switch (analyzer.module.types.array.get(destination_optional.element_type).*) { + break :blk switch (source_type.*) { + .integer => |integer| switch (destination_type.*) { + .optional => |optional| switch (analyzer.module.types.array.get(optional.element_type).*) { + .pointer => if (integer.bit_count == 64) .success else unreachable, + else => |t| @panic(@tagName(t)), + }, + .integer => .success, .pointer => .success, else => |t| @panic(@tagName(t)), }, + .pointer => switch (destination_type.*) { + .optional => |destination_optional| switch (analyzer.module.types.array.get(destination_optional.element_type).*) { + .pointer => .success, + else => |t| @panic(@tagName(t)), + }, + else => .success, + }, + .@"enum" => |enum_type_descriptor| switch (destination_type.*) { + .integer => |integer| { + _ = integer; + const enum_type = analyzer.module.types.enums.get(enum_type_descriptor); + if (!enum_type.backing_type.invalid) { + if (enum_type.backing_type.eq(type_index)) { + unreachable; + } else { + unreachable; + } + } else { + return .success; + } + }, + else => |t| @panic(@tagName(t)), + }, else => |t| @panic(@tagName(t)), - }, - else => |t| @panic(@tagName(t)), - }; + }; + } }, else => unreachable, }; @@ -3046,6 +3238,42 @@ const Analyzer = struct { }, else => |t| @panic(@tagName(t)), }, + .slice => |destination_slice| switch (source_type.*) { + .slice => |source_slice| { + if (destination_slice.@"const" or destination_slice.@"const" == source_slice.@"const") { + if (destination_slice.element_type.eq(source_slice.element_type)) { + return .success; + } else { + unreachable; + } + } else { + @panic("Const mismatch"); + } + }, + .pointer => |source_pointer| { + const source_pointer_element_type = analyzer.module.types.array.get(source_pointer.element_type); + switch (source_pointer_element_type.*) { + .array => |array| { + logln(.sema, .type, "Destination slice: {}", .{destination_slice}); + if (array.element_type.eq(Type.u8)) { + if (array.element_type.eq(destination_slice.element_type)) { + if (destination_slice.@"const") { + if (destination_slice.@"const" == source_pointer.@"const") { + if (source_pointer.many) { + return .array_coerce_to_slice; + } + } + } + } + } + }, + else => |t| @panic(@tagName(t)), + } + // + unreachable; + }, + else => |t| @panic(@tagName(t)), + }, else => |t| @panic(@tagName(t)), } }, @@ -3329,6 +3557,6 @@ pub fn analyzeFile(value_index: Value.Index, allocator: Allocator, module: *Modu const node = file.syntactic_analyzer_result.nodes.items[0]; const node_list_node = analyzer.getFileNode(file_index, node.left); const nodes = analyzer.getFileNodeList(file_index, node_list_node); - const result = try analyzer.structType(value_index, Scope.Index.invalid, nodes.items, file_index, .{ .value = 0 }); + const result = try analyzer.processContainerType(value_index, Scope.Index.invalid, nodes.items, file_index, .{ .value = 0 }, .@"struct"); return result; } diff --git a/bootstrap/frontend/syntactic_analyzer.zig b/bootstrap/frontend/syntactic_analyzer.zig index ab34821..f7884f7 100644 --- a/bootstrap/frontend/syntactic_analyzer.zig +++ b/bootstrap/frontend/syntactic_analyzer.zig @@ -115,6 +115,7 @@ pub const Node = struct { unsigned_integer_type, signed_integer_type, slice_type, + const_slice_type, array_type, argument_declaration, compiler_intrinsic, @@ -962,6 +963,7 @@ const Analyzer = struct { const next_token_index = analyzer.token_i + 1; if (next_token_index < analyzer.tokens.len) { const next_token_id = analyzer.tokens[next_token_index].id; + const next_to_next_token_id = analyzer.tokens[next_token_index + 1].id; break :blk switch (token.id) { .equal => switch (next_token_id) { .equal => .compare_equal, @@ -1002,8 +1004,12 @@ const Analyzer = struct { else => .divide, }, .less => switch (next_token_id) { - .less => .shift_left, - else => unreachable, + .less => switch (next_to_next_token_id) { + .equal => unreachable, + else => .shift_left, + }, + .equal => .compare_less_or_equal, + else => .compare_less_than, }, .greater => switch (next_token_id) { .greater => .shift_right, @@ -1284,10 +1290,14 @@ const Analyzer = struct { _ = try analyzer.expectToken(.right_bracket); - switch (length_expression.invalid) { - true => analyzer.token_i += @intFromBool(analyzer.tokens[analyzer.token_i].id == .fixed_keyword_const), - false => {}, - } + const is_const = switch (length_expression.invalid) { + true => blk: { + const is_constant = analyzer.tokens[analyzer.token_i].id == .fixed_keyword_const; + analyzer.token_i += @intFromBool(is_constant); + break :blk is_constant; + }, + false => false, + }; const type_expression = try analyzer.typeExpression(); const node = switch (length_expression.invalid) { @@ -1298,7 +1308,10 @@ const Analyzer = struct { .right = type_expression, }, true => Node{ // TODO: modifiers - .id = .slice_type, + .id = switch (is_const) { + true => .const_slice_type, + false => .slice_type, + }, .token = left_bracket, .left = Node.Index.invalid, .right = type_expression, @@ -1551,35 +1564,15 @@ const Analyzer = struct { }, .fixed_keyword_enum => blk: { analyzer.token_i += 1; + _ = try analyzer.expectToken(.left_brace); - - var enum_field_list = Node.List{}; - while (analyzer.tokens[analyzer.token_i].id != .right_brace) { - const enum_name_token = try analyzer.expectToken(.identifier); - const value_associated = switch (analyzer.tokens[analyzer.token_i].id) { - .comma => comma: { - analyzer.token_i += 1; - break :comma Node.Index.invalid; - }, - else => |t| @panic(@tagName(t)), - }; - - const enum_field_node = try analyzer.addNode(.{ - .id = .enum_field, - .token = enum_name_token, - .left = value_associated, - .right = Node.Index.invalid, - }); - - try enum_field_list.append(analyzer.allocator, enum_field_node); - } - - analyzer.token_i += 1; + const node_list = try analyzer.containerMembers(.@"enum"); + _ = try analyzer.expectToken(.right_brace); break :blk try analyzer.addNode(.{ .id = .enum_type, .token = token_i, - .left = try analyzer.nodeList(enum_field_list), + .left = try analyzer.nodeList(node_list), .right = Node.Index.invalid, }); }, @@ -1594,7 +1587,7 @@ const Analyzer = struct { } _ = try analyzer.expectToken(.left_brace); - const node_list = try analyzer.containerMembers(); + const node_list = try analyzer.containerMembers(.@"struct"); _ = try analyzer.expectToken(.right_brace); break :blk try analyzer.addNode(.{ @@ -1633,7 +1626,7 @@ const Analyzer = struct { if (analyzer.tokens[analyzer.token_i].id == .period and analyzer.token_i + 1 < analyzer.tokens.len and analyzer.tokens[analyzer.token_i + 1].id == .period) { analyzer.token_i += 2; const range_end_expression = switch (analyzer.tokens[analyzer.token_i].id) { - .right_bracket => unreachable, + .right_bracket => Node.Index.invalid, else => try analyzer.expression(), }; @@ -1736,11 +1729,12 @@ const Analyzer = struct { }); } - fn containerMembers(analyzer: *Analyzer) !ArrayList(Node.Index) { + fn containerMembers(analyzer: *Analyzer, comptime container_type: Compilation.ContainerType) !ArrayList(Node.Index) { var list = ArrayList(Node.Index){}; while (analyzer.token_i < analyzer.tokens.len and analyzer.tokens[analyzer.token_i].id != .right_brace) { const first = analyzer.token_i; logln(.parser, .container_members, "First token for container member: {s}", .{@tagName(analyzer.tokens[first].id)}); + const member_node_index: Node.Index = switch (analyzer.tokens[first].id) { .fixed_keyword_comptime => switch (analyzer.tokens[analyzer.token_i + 1].id) { .left_brace => blk: { @@ -1758,25 +1752,53 @@ const Analyzer = struct { }, .identifier => blk: { analyzer.token_i += 1; - _ = try analyzer.expectToken(.colon); + switch (container_type) { + .@"struct" => { + _ = try analyzer.expectToken(.colon); - const field_type = try analyzer.typeExpression(); + const field_type = try analyzer.typeExpression(); - const field_default_node = if (analyzer.tokens[analyzer.token_i].id == .equal) b: { - analyzer.token_i += 1; - break :b try analyzer.expression(); - } else Node.Index.invalid; + const field_default_node = if (analyzer.tokens[analyzer.token_i].id == .equal) b: { + analyzer.token_i += 1; + const default_index = try analyzer.expression(); + const default_node = analyzer.nodes.items[default_index.unwrap()]; + _ = default_node; + assert(.id != .node_list); + break :b default_index; + } else Node.Index.invalid; - _ = try analyzer.expectToken(.comma); + _ = try analyzer.expectToken(.comma); - const field_node = try analyzer.addNode(.{ - .id = .container_field, - .token = first, - .left = field_type, - .right = field_default_node, - }); + const field_node = try analyzer.addNode(.{ + .id = .container_field, + .token = first, + .left = field_type, + .right = field_default_node, + }); - break :blk field_node; + break :blk field_node; + }, + .@"enum" => { + const value_associated = switch (analyzer.tokens[analyzer.token_i].id) { + .comma => Node.Index.invalid, + else => value: { + analyzer.token_i += 1; + break :value try analyzer.expression(); + }, + }; + + _ = try analyzer.expectToken(.comma); + + const enum_field_node = try analyzer.addNode(.{ + .id = .enum_field, + .token = first, + .left = value_associated, + .right = Node.Index.invalid, + }); + + break :blk enum_field_node; + }, + } }, .fixed_keyword_const, .fixed_keyword_var => try analyzer.symbolDeclaration(), else => |t| @panic(@tagName(t)), @@ -1808,7 +1830,7 @@ pub fn analyze(allocator: Allocator, tokens: []const Token, source_file: []const .allocator = allocator, }; const node_index = try analyzer.addNode(.{ - .id = .main, + .id = .struct_type, .token = 0, .left = Node.Index.invalid, .right = Node.Index.invalid, @@ -1817,12 +1839,11 @@ pub fn analyze(allocator: Allocator, tokens: []const Token, source_file: []const assert(node_index.value == 0); assert(!node_index.invalid); - const members = try analyzer.containerMembers(); + const members = try analyzer.containerMembers(.@"struct"); assert(analyzer.token_i == analyzer.tokens.len); const node_list = try analyzer.nodeList(members); - analyzer.nodes.items[0].id = .main; analyzer.nodes.items[0].left = node_list; const end = std.time.Instant.now() catch unreachable; diff --git a/build.zig b/build.zig index 9968dd0..0991ac6 100644 --- a/build.zig +++ b/build.zig @@ -17,14 +17,16 @@ pub fn build(b: *std.Build) !void { exe.unwind_tables = false; exe.omit_frame_pointer = false; - b.installArtifact(exe); + const install_exe = b.addInstallArtifact(exe, .{}); + b.getInstallStep().dependOn(&install_exe.step); b.installDirectory(.{ .source_dir = std.Build.LazyPath.relative("lib"), .install_dir = .bin, .install_subdir = "lib", }); - const run_command = b.addRunArtifact(exe); + const compiler_exe_path = b.fmt("zig-out/bin/{s}", .{install_exe.dest_sub_path}); + const run_command = b.addSystemCommand(&.{compiler_exe_path}); run_command.step.dependOn(b.getInstallStep()); const debug_command = switch (@import("builtin").os.tag) { @@ -34,13 +36,12 @@ pub fn build(b: *std.Build) !void { result.addArg("-ex=r"); result.addArgs(&.{ "-ex", "up" }); result.addArg("--args"); - result.addArtifactArg(exe); break :blk result; }, .windows => blk: { const result = b.addSystemCommand(&.{"remedybg"}); result.addArg("-g"); - result.addArtifactArg(exe); + result.addArg(compiler_exe_path); break :blk result; }, @@ -48,11 +49,13 @@ pub fn build(b: *std.Build) !void { // not tested const result = b.addSystemCommand(&.{"lldb"}); result.addArg("--"); - result.addArtifactArg(exe); + result.addArg(compiler_exe_path); break :blk result; }, else => @compileError("OS not supported"), }; + debug_command.step.dependOn(b.getInstallStep()); + debug_command.addArg(compiler_exe_path); if (b.args) |args| { run_command.addArgs(args); diff --git a/ci.sh b/ci.sh index e4a5c75..c323f60 100755 --- a/ci.sh +++ b/ci.sh @@ -1,7 +1,6 @@ #!/usr/bin/env bash -echo "Testing Nativity with Zig" -echo "Compiling Nativity with Zig" +echo -e "\e[90mCompiling Nativity with Zig...\e[0m" nativity_use_llvm=true zig build -Duse_llvm=$nativity_use_llvm failed_test_count=0 @@ -51,10 +50,11 @@ do done printf "\n" -echo "[SUMMARY]" -echo "=========" +echo -e "\e[35m[SUMMARY]\e[0m" +echo -e "\e[35m=========\e[0m" echo -e "Ran $total_test_count compilations (\e[32m$passed_compilation_count\e[0m succeeded, \e[31m$failed_compilation_count\e[0m failed)." echo -e "Ran $ran_test_count tests (\e[32m $passed_test_count\e[0m passed, \e[31m$failed_test_count\e[0m failed)." +echo -e "\e[35m=========\e[0m" if [[ "$failed_compilation_count" != "0" ]]; then printf $"\nFailed compilations:\n" diff --git a/lib/std/os.nat b/lib/std/os.nat index 98ad789..014c85b 100644 --- a/lib/std/os.nat +++ b/lib/std/os.nat @@ -7,31 +7,6 @@ const system = switch (current) { .windows => windows, }; -const write = fn (file_descriptor: FileDescriptor, bytes_ptr: [&]const u8, bytes_len: usize) ssize { - switch (current) { - .linux => return #syscall(1, file_descriptor, #cast(bytes_ptr), bytes_len), - .macos => return macos.write(file_descriptor, #cast(bytes_ptr), bytes_len), - .windows => { - var written_bytes: u32 = 0; - if (windows.WriteFile(file_descriptor, bytes_ptr, bytes_len, written_bytes.&, false) != 0) { - return written_bytes; - } else { - unreachable; - } - }, - } -} - -const FileDescriptor = system.FileDescriptor; - -const print = fn(bytes_ptr: [&]const u8, bytes_len: usize) void { - const file_descriptor = switch (current) { - .linux, .macos => 2, - .windows => windows.GetStdHandle(windows.STD_OUTPUT_HANDLE), - }; - - _ = write(file_descriptor, bytes_ptr, bytes_len); -} const exit = fn(exit_code: s32) noreturn { switch (current) { @@ -106,12 +81,12 @@ const max_path_byte_count = switch (current) { else => #error("OS not supported"), }; -const currentExecutablePath = fn(allocator: &Allocator) ?[]u8 { +const current_executable_path = fn(allocator: &Allocator) ?[]u8 { switch (current) { .linux => { var buffer: [max_path_byte_count]u8 = undefined; if (readlink(file_path = "/proc/self/exe", buffer = buffer.&)) |bytes| { - if (allocator.duplicateBytes(bytes)) |result| { + if (allocator.duplicate_bytes(bytes)) |result| { return result; } else { return null; @@ -124,6 +99,42 @@ const currentExecutablePath = fn(allocator: &Allocator) ?[]u8 { } } +const FileDescriptor = struct{ + handle: system.FileDescriptor, + + const write = fn (file_descriptor: FileDescriptor, bytes: []const u8) ?usize { + switch (current) { + .linux => { + const raw_result = #syscall(1, file_descriptor.handle, #cast(bytes.ptr), bytes.len); + if (linux.unwrapSyscall(syscall_result = raw_result)) |byte_count| { + return byte_count; + } else { + return null; + } + }, + else => #error("OS not supported"), + } + } +}; + +const StdFileDescriptor = enum { + stdin = 0, + stdout = 1, + stderr = 2, + + const get = fn(descriptor: StdFileDescriptor) FileDescriptor{ + switch (current) { + .linux, .macos => { + return FileDescriptor{ + .handle = #cast(descriptor), + }; + }, + else => #error("OS not supported"), + } + } +}; + + const linux = #import("os/linux.nat"); const macos = #import("os/macos.nat"); const windows = #import("os/windows.nat"); diff --git a/lib/std/os/linux.nat b/lib/std/os/linux.nat index 2bcf15f..946aa66 100644 --- a/lib/std/os/linux.nat +++ b/lib/std/os/linux.nat @@ -1,5 +1,9 @@ const std = #import("std"); +const stdin: FileDescriptor = 0; +const stdout: FileDescriptor = 1; +const stderr: FileDescriptor = 2; + const FileDescriptor = s32; const ProtectionFlags = struct(u32) { diff --git a/lib/std/std.nat b/lib/std/std.nat index 270db68..f4c52d6 100644 --- a/lib/std/std.nat +++ b/lib/std/std.nat @@ -4,7 +4,6 @@ comptime { const builtin = #import("builtin.nat"); const os = #import("os.nat"); -const print = os.print; const start = #import("start.nat"); const assert = fn(ok: bool) void { @@ -13,6 +12,14 @@ const assert = fn(ok: bool) void { } } +const print = fn(bytes: []const u8) void { + const file_descriptor = os.StdFileDescriptor.get(descriptor = .stdout); + const file_writer = FileWriter{ + .descriptor = file_descriptor, + }; + _ = file_writer.writeAll(bytes); +} + const Allocator = struct { handler: &const fn(allocator: &Allocator, old_ptr: ?[&]const u8, old_size: usize, new_size: usize, alignment: u16) ?[&]u8, @@ -32,9 +39,9 @@ const Allocator = struct { } } - const duplicateBytes = fn (allocator: &Allocator, bytes: []const u8) ?[]u8 { + const duplicate_bytes = fn (allocator: &Allocator, bytes: []const u8) ?[]u8 { if (allocator.allocate(size = bytes.len, alignment = 0)) |result| { - copyBytes(destination = result, source = bytes); + copy_bytes(destination = result, source = bytes); return result; } else { return null; @@ -89,12 +96,38 @@ const PageAllocator = struct{ } } - const getAllocator = fn(page_allocator: &PageAllocator) &Allocator { + const get_allocator = fn(page_allocator: &PageAllocator) &Allocator { return page_allocator.allocator.&; } }; -const copyBytes = fn(destination: []u8, source: []const u8) void { +const Writer = struct{ + callback: &const fn(writer: &Writer, bytes: []const u8) ?usize, +}; + +const FileWriter = struct{ + descriptor: os.FileDescriptor, + + const write = fn(file_writer: FileWriter, bytes: []const u8) ?usize { + return file_writer.descriptor.write(bytes); + } + + const writeAll = fn(file_writer: FileWriter, bytes: []const u8) bool { + var bytes_written: usize = 0; + + while (bytes_written < bytes.len) { + if (file_writer.write(bytes = bytes[bytes_written..])) |iteration_written_byte_count| { + bytes_written += iteration_written_byte_count; + } else { + return false; + } + } + + return bytes_written == bytes.len; + } +}; + +const copy_bytes = fn(destination: []u8, source: []const u8) void { assert(ok = destination.len == source.len); for (0..destination.len) |i| { destination[i] = source[i]; diff --git a/src/main.nat b/src/main.nat index f5c63bd..1f168cb 100644 --- a/src/main.nat +++ b/src/main.nat @@ -4,16 +4,16 @@ const main = fn() s32 { const size = 0x1000; if (std.page_allocator.allocate(size, alignment = 12)) |result| { result[0] = 0; - std.print(bytes_ptr = "Allocation succeeded. Freeing...\n", bytes_len = 33); + std.print(bytes = "Allocation succeeded. Freeing...\n"); if (std.page_allocator.free(bytes_ptr = result.ptr, bytes_len = result.len)) { - std.print(bytes_ptr = "Memory freed successfully\n", bytes_len = 26); + std.print(bytes = "Memory freed successfully\n"); return 0; } else { - std.print(bytes_ptr = "Memory freed with errors\n", bytes_len = 25); + std.print("Memory freed with errors\n"); return 1; } } else { - std.print(bytes_ptr = "Allocation failed!\n", bytes_len = 19); + std.print(bytes = "Allocation failed!\n"); return 1; } } diff --git a/test/hello_world/main.nat b/test/hello_world/main.nat index dc3444f..6392359 100644 --- a/test/hello_world/main.nat +++ b/test/hello_world/main.nat @@ -1,6 +1,6 @@ const std = #import("std"); const main = fn() s32 { - std.print(bytes_ptr = "Hello world!\n", bytes_len = 13); + std.print(bytes = "Hello world!\n"); return 0; } diff --git a/test/self_exe_path/main.nat b/test/self_exe_path/main.nat index 4ed822d..03d4703 100644 --- a/test/self_exe_path/main.nat +++ b/test/self_exe_path/main.nat @@ -2,12 +2,12 @@ const std = #import("std"); const print = std.print; const main = fn () s32 { - if (std.os.currentExecutablePath(allocator = std.page_allocator.allocator.&)) |result| { - print(bytes_ptr = result.ptr, bytes_len = result.len); - print(bytes_ptr = "\n", bytes_len = 1); + if (std.os.current_executable_path(allocator = std.page_allocator.allocator.&)) |bytes| { + print(bytes); + print(bytes = "\n"); return 0; } else { - print(bytes_ptr = "Failed\n", bytes_len = 7); + print(bytes = "Failed\n"); return 1; } } diff --git a/test/virtual_memory/main.nat b/test/virtual_memory/main.nat index f5c63bd..387486a 100644 --- a/test/virtual_memory/main.nat +++ b/test/virtual_memory/main.nat @@ -4,16 +4,16 @@ const main = fn() s32 { const size = 0x1000; if (std.page_allocator.allocate(size, alignment = 12)) |result| { result[0] = 0; - std.print(bytes_ptr = "Allocation succeeded. Freeing...\n", bytes_len = 33); + std.print(bytes = "Allocation succeeded. Freeing...\n"); if (std.page_allocator.free(bytes_ptr = result.ptr, bytes_len = result.len)) { - std.print(bytes_ptr = "Memory freed successfully\n", bytes_len = 26); + std.print(bytes = "Memory freed successfully\n"); return 0; } else { - std.print(bytes_ptr = "Memory freed with errors\n", bytes_len = 25); + std.print(bytes = "Memory freed with errors\n"); return 1; } } else { - std.print(bytes_ptr = "Allocation failed!\n", bytes_len = 19); + std.print(bytes = "Allocation failed!\n"); return 1; } }