Delete custom allocator

This commit is contained in:
David Gonzalez Martin 2024-04-26 12:00:41 -06:00
parent 984835ed7b
commit 30fff3b5c4
5 changed files with 17 additions and 161 deletions

View File

@ -68,14 +68,13 @@ const Optimization = enum {
aggressively_optimize_for_size,
};
pub fn createContext(allocator: Allocator, my_allocator: *MyAllocator) !*const Context {
pub fn createContext(allocator: Allocator) !*const Context {
const context: *Context = try allocator.create(Context);
const self_exe_path = try std.fs.selfExePathAlloc(allocator);
const self_exe_dir_path = std.fs.path.dirname(self_exe_path).?;
context.* = .{
.allocator = allocator,
.my_allocator = my_allocator,
.cwd_absolute_path = try realpathAlloc(allocator, "."),
.executable_absolute_path = self_exe_path,
.directory_absolute_path = self_exe_dir_path,
@ -490,25 +489,6 @@ pub fn compileCSourceFile(context: *const Context, arguments: []const []const u8
} else if (starts_with_slice(argument, "-m")) {
cc_argv.appendAssumeCapacity(argument);
} else {
// const debug_args = true;
// if (debug_args) {
// const home_dir = switch (@import("builtin").os.tag) {
// .linux, .macos => std.posix.getenv("HOME") orelse unreachable,
// .windows => try std.process.getEnvVarOwned(context.allocator, "USERPROFILE"),
// else => @compileError("OS not supported"),
// };
// var list = PinnedArray(u8){};
// for (arguments) |arg| {
// list.append_slice(context.my_allocator, arg);
// list.append(context.my_allocator, ' ');
// }
// list.append(context.my_allocator, '\n');
// list.append_slice(context.my_allocator, "Unhandled argument: ");
// list.append_slice(context.my_allocator, argument);
// list.append(context.my_allocator, '\n');
//
// std.fs.cwd().writeFile(try std.fmt.allocPrint(context.allocator, "{s}/dev/nativity/nat/unhandled_arg_{}", .{ home_dir, std.time.milliTimestamp() }), list.slice());
// }
try write(.panic, "unhandled argument: '");
try write(.panic, argument);
try write(.panic, "'\n");
@ -4143,7 +4123,6 @@ pub const Struct = struct {
pub const Context = struct {
allocator: Allocator,
my_allocator: *MyAllocator,
arena: *Arena,
cwd_absolute_path: []const u8,
directory_absolute_path: []const u8,
@ -4632,7 +4611,7 @@ pub const Builder = struct {
const new_slice = ptr[0 .. slice.len + name.len];
@memcpy(new_slice[0..name.len], name);
buffer[len] = 0;
return @ptrCast(try context.my_allocator.duplicate_bytes(new_slice));
return @ptrCast(try context.arena.duplicate_bytes(new_slice));
}
fn processStringLiteralFromStringAndDebugInfo(builder: *Builder, unit: *Unit, context: *const Context, string: [:0]const u8, debug_info: TokenDebugInfo) !*Debug.Declaration.Global {
@ -17500,7 +17479,7 @@ pub const Unit = struct {
}
pub fn analyze(unit: *Unit, context: *const Context) !void {
const builder = try context.my_allocator.allocate_one(Builder);
const builder = try context.arena.new(Builder);
builder.* = .{
.generate_debug_info = unit.descriptor.generate_debug_information,
.emit_ir = true,
@ -17569,9 +17548,7 @@ pub const Unit = struct {
.count = file.lexer.count,
}, file_index);
// logln(.parser, .file, "[START PARSING FILE #{} {s}]", .{ file_index, file.package.source_path });
file.parser = try parser.analyze(context.allocator, context.my_allocator, context.arena, file.lexer, file.source_code, &unit.token_buffer, &unit.node_buffer, &unit.node_lists);
// logln(.parser, .file, "[END PARSING FILE #{} {s}]", .{ file_index, file.package.source_path });
file.parser = try parser.analyze(context.arena, file.lexer, file.source_code, &unit.token_buffer, &unit.node_buffer, &unit.node_lists);
assert(file.status == .lexed);
file.status = .parsed;
}
@ -17690,7 +17667,7 @@ pub const Unit = struct {
}
const main_package = blk: {
const result = try context.my_allocator.allocate_one(Package);
const result = try context.arena.new(Package);
const main_package_absolute_directory_path = b: {
const relative_path = if (std.fs.path.dirname(unit.descriptor.main_package_path)) |dirname| dirname else ".";
break :b try context.pathFromCwd(relative_path);
@ -17700,7 +17677,7 @@ pub const Unit = struct {
.handle = try std.fs.openDirAbsolute(main_package_absolute_directory_path, .{}),
.path = main_package_absolute_directory_path,
},
.source_path = try context.my_allocator.duplicate_bytes(std.fs.path.basename(unit.descriptor.main_package_path)),
.source_path = try context.arena.duplicate_bytes(std.fs.path.basename(unit.descriptor.main_package_path)),
.dependencies = try PinnedHashMap([]const u8, *Package).init(std.mem.page_size),
};
break :blk result;

View File

@ -1378,7 +1378,7 @@ pub const LLVM = struct {
.unsigned => 'u',
};
break :b try context.my_allocator.duplicate_bytes(slice);
break :b try context.arena.duplicate_bytes(slice);
},
.bool => "bool",
else => |t| @panic(@tagName(t)),
@ -2638,7 +2638,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo
literal_slice[1] = '$';
literal_slice[2] = '0';
literal_slice[3] = 'x';
assembly_statements.appendSliceAssumeCapacity(try context.my_allocator.duplicate_bytes(literal_slice));
assembly_statements.appendSliceAssumeCapacity(try context.arena.duplicate_bytes(literal_slice));
},
.value => |sema_value| {
if (llvm.llvm_value_map.get(sema_value)) |v| {
@ -2657,7 +2657,7 @@ pub fn codegen(unit: *Compilation.Unit, context: *const Compilation.Context) !vo
new_buffer[operand_slice.len] = ':';
new_buffer[operand_slice.len + 1] = 'P';
new_buffer[operand_slice.len + 2] = '}';
const new_slice = try context.my_allocator.duplicate_bytes(new_buffer[0 .. operand_slice.len + 3]);
const new_slice = try context.arena.duplicate_bytes(new_buffer[0 .. operand_slice.len + 3]);
assembly_statements.appendSliceAssumeCapacity(new_slice);
operand_values.appendAssumeCapacity(value);
const value_type = value.getType();

View File

@ -1,5 +1,4 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
const library = @import("../library.zig");
@ -9,7 +8,6 @@ const BlockList = library.BlockList;
const BoundedArray = library.BoundedArray;
const enumFromString = library.enumFromString;
const PinnedArray = library.PinnedArray;
const MyAllocator = library.MyAllocator;
const lexer = @import("lexer.zig");
@ -235,8 +233,6 @@ const Analyzer = struct {
nodes: *PinnedArray(Node),
node_lists: *PinnedArray([]const Node.Index),
source_file: []const u8,
allocator: Allocator,
my_allocator: *MyAllocator,
arena: *Arena,
suffix_depth: usize = 0,
@ -2381,7 +2377,7 @@ const Analyzer = struct {
};
// Here it is assumed that left brace is consumed
pub fn analyze(allocator: Allocator, my_allocator: *MyAllocator, arena: *Arena, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *PinnedArray(Node), node_lists: *PinnedArray([]const Node.Index)) !Result {
pub fn analyze(arena: *Arena, lexer_result: lexer.Result, source_file: []const u8, token_buffer: *Token.Buffer, node_list: *PinnedArray(Node), node_lists: *PinnedArray([]const Node.Index)) !Result {
const start = std.time.Instant.now() catch unreachable;
var analyzer = Analyzer{
.lexer = lexer_result,
@ -2389,8 +2385,6 @@ pub fn analyze(allocator: Allocator, my_allocator: *MyAllocator, arena: *Arena,
.source_file = source_file,
// .file_index = file_index,
.token_i = lexer_result.offset,
.allocator = allocator,
.my_allocator = my_allocator,
.nodes = node_list,
.node_lists = node_lists,
.arena = arena,

View File

@ -80,6 +80,12 @@ pub const Arena = struct {
const result: [*]T = @ptrCast(@alignCast(try arena.allocate(@sizeOf(T) * count)));
return result[0..count];
}
pub fn duplicate_bytes(arena: *Arena, bytes: []const u8) ![]u8 {
const slice = try arena.new_array(u8, bytes.len);
@memcpy(slice, bytes);
return slice;
}
};
pub fn DynamicBoundedArray(comptime T: type) type {
@ -488,112 +494,6 @@ pub fn enumFromString(comptime E: type, string: []const u8) ?E {
extern fn pthread_jit_write_protect_np(enabled: bool) void;
pub fn allocate_virtual_memory(size: usize, flags: packed struct {
executable: bool = false,
}) ![]align(page_size) u8 {
return switch (os) {
.windows => blk: {
const windows = std.os.windows;
break :blk @as([*]align(page_size) u8, @ptrCast(@alignCast(try windows.VirtualAlloc(null, size, windows.MEM_COMMIT | windows.MEM_RESERVE, windows.PAGE_EXECUTE_READWRITE))))[0..size];
},
.linux, .macos => |os_tag| blk: {
const jit = switch (os_tag) {
.macos => 0x800,
.linux => 0,
else => @compileError("OS not supported"),
};
_ = jit; // autofix
const execute_flag: switch (os_tag) {
.linux => u32,
.macos => c_int,
else => @compileError("OS not supported"),
} = if (flags.executable) std.posix.PROT.EXEC else 0;
const protection_flags: u32 = @intCast(std.posix.PROT.READ | std.posix.PROT.WRITE | execute_flag);
const result = try std.posix.mmap(null, size, protection_flags, .{
.TYPE = .PRIVATE,
.ANONYMOUS = true,
}, -1, 0);
if (arch == .aarch64 and os == .macos) {
if (flags.executable) {
pthread_jit_write_protect_np(false);
}
}
break :blk result;
},
else => @compileError("OS not supported"),
};
}
pub fn free_virtual_memory(slice: []align(page_size) const u8) void {
switch (os) {
.windows => {
std.os.windows.VirtualFree(@constCast(@ptrCast(slice.ptr)), slice.len, std.os.windows.MEM_RELEASE);
},
else => {
std.posix.munmap(slice);
},
}
}
pub const MyAllocator = struct {
handler: *const fn (allocator: *MyAllocator, old_ptr: ?[*]u8, old_size: usize, new_size: usize, alignment: u16) Error![*]u8,
pub fn allocate_one(allocator: *MyAllocator, comptime T: type) !*T {
const slice = try allocator.allocate(@sizeOf(T), @alignOf(T));
assert(slice.len == @sizeOf(T));
return @ptrCast(@alignCast(&slice.ptr[0]));
}
pub fn allocate(allocator: *MyAllocator, size: usize, alignment: u16) ![]u8 {
const ptr = try allocator.handler(allocator, null, 0, size, alignment);
return ptr[0..size];
}
pub fn free(allocator: *MyAllocator, bytes: []u8) !void {
_ = try allocator.handler(allocator, bytes.ptr, bytes.len, 0, 0);
}
pub fn reallocate(allocator: *MyAllocator, bytes: []u8, size: usize, alignment: u16) ![]u8 {
const new_ptr = try allocator.handler(allocator, bytes.ptr, bytes.len, size, alignment);
return new_ptr[0..size];
}
pub fn duplicate_bytes(allocator: *MyAllocator, bytes: []const u8) ![]u8 {
const slice = try allocator.allocate(bytes.len, 0);
@memcpy(slice, bytes);
return slice;
}
const Error = error{
allocation_failed,
};
};
pub const PageAllocator = struct {
allocator: MyAllocator = .{ .handler = handler },
fn handler(allocator: *MyAllocator, maybe_old_ptr: ?[*]u8, old_size: usize, new_size: usize, alignment: u16) MyAllocator.Error![*]u8 {
_ = allocator; // autofix
_ = alignment; // autofix
const maybe_new_slice: ?[]u8 = if (new_size > 0) allocate_virtual_memory(new_size, .{}) catch return MyAllocator.Error.allocation_failed else null;
if (maybe_old_ptr) |old_ptr| {
const old_slice = old_ptr[0..old_size];
if (maybe_new_slice) |new_slice| {
@memcpy(new_slice[0..old_size], old_slice);
free_virtual_memory(@ptrCast(@alignCast(old_slice)));
return new_slice.ptr;
} else {
return old_slice.ptr;
}
} else {
return (maybe_new_slice orelse unreachable).ptr;
}
}
};
fn copy_backwards(comptime T: type, destination: []T, source: []const T) void {
@setRuntimeSafety(false);
assert(destination.len >= source.len);

View File

@ -21,26 +21,11 @@ fn todo() noreturn {
@panic("TODO");
}
var my_allocator = PageAllocator{};
pub fn main() !void {
var arena_allocator = std.heap.ArenaAllocator.init(std.heap.page_allocator);
const allocator = arena_allocator.allocator();
const arguments: []const []const u8 = try std.process.argsAlloc(allocator);
const context = try Compilation.createContext(allocator, &my_allocator.allocator);
// const debug_args = false;
// if (debug_args and @import("builtin").os.tag != .windows) {
// assert(arguments.len > 0);
// const home_dir = std.posix.getenv("HOME") orelse unreachable;
// const timestamp = std.time.milliTimestamp();
// var argument_list = PinnedArray(u8){};
// for (arguments) |arg| {
// argument_list.append_slice(context.my_allocator, arg) catch {};
// argument_list.append(context.my_allocator, ' ') catch {};
// }
// argument_list.append(context.my_allocator, '\n') catch {};
// std.fs.cwd().writeFile(std.fmt.allocPrint(std.heap.page_allocator, "{s}/dev/nativity/nat/invocation_log_{}", .{ home_dir, timestamp }) catch unreachable, argument_list.slice()) catch {};
// }
const context = try Compilation.createContext(allocator);
if (arguments.len <= 1) {
return error.InvalidInput;