Compare commits
1 Commits
main
...
polymorphi
Author | SHA1 | Date | |
---|---|---|---|
![]() |
fab147eacc |
@ -241,12 +241,9 @@ const Parser = struct{
|
||||
while (parser.i < file.len) : (parser.i += 1) {
|
||||
const is_line_feed = file[parser.i] == '\n';
|
||||
if (is_line_feed) {
|
||||
parser.line += 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (parser.i == file.len) break;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -258,6 +255,65 @@ const Parser = struct{
|
||||
column: u32,
|
||||
};
|
||||
|
||||
fn parse_polymorphic_arguments(parser: *Parser, thread: *Thread, file: *File, scope: *Scope) []const *Type{
|
||||
const src = file.source_code;
|
||||
|
||||
parser.expect_character(src, polymorphic_start_token);
|
||||
|
||||
var polymorphic_parameters = PinnedArray(*Type){};
|
||||
|
||||
while (true) {
|
||||
parser.skip_space(src);
|
||||
|
||||
if (src[parser.i] == polymorphic_end_token) {
|
||||
break;
|
||||
}
|
||||
|
||||
const line = parser.get_debug_line();
|
||||
const column = parser.get_debug_column();
|
||||
|
||||
parser.expect_character(src, '$');
|
||||
|
||||
const name = parser.parse_identifier(thread, src);
|
||||
const polymorphic_name = thread.polymorphic_names.append(.{
|
||||
.type = .{
|
||||
.sema = .{
|
||||
.id = .polymorphic_name,
|
||||
.thread = thread.get_index(),
|
||||
.resolved = false,
|
||||
},
|
||||
.size = 0,
|
||||
.bit_size = 0,
|
||||
.alignment = 1,
|
||||
},
|
||||
.type_declaration = .{
|
||||
.declaration = .{
|
||||
.id = .type,
|
||||
.name = name,
|
||||
.line = line,
|
||||
.column = column,
|
||||
.scope = scope,
|
||||
},
|
||||
.id = .polymorphic_name,
|
||||
},
|
||||
.index = polymorphic_parameters.length,
|
||||
});
|
||||
|
||||
switch (src[parser.i]) {
|
||||
',' => parser.i += 1,
|
||||
polymorphic_end_token => {},
|
||||
else => fail(),
|
||||
}
|
||||
|
||||
_ = polymorphic_parameters.append(&polymorphic_name.type);
|
||||
_ = scope.declarations.put_no_clobber(name, &polymorphic_name.type_declaration.declaration);
|
||||
}
|
||||
|
||||
parser.i += 1;
|
||||
|
||||
return polymorphic_parameters.slice();
|
||||
}
|
||||
|
||||
fn parse_field(parser: *Parser, thread: *Thread, file: *File, scope: *Scope) ?ParseFieldData{
|
||||
const src = file.source_code;
|
||||
parser.skip_space(src);
|
||||
@ -725,14 +781,14 @@ const Parser = struct{
|
||||
const polymorphic_struct = declaration.get_payload(.polymorphic_struct);
|
||||
parser.skip_space(src);
|
||||
// Expect parameters for the struct (all polymorphic structs have polymorphic parameters)
|
||||
parser.expect_character(src, '[');
|
||||
parser.expect_character(src, polymorphic_start_token);
|
||||
|
||||
var instantiation_types = PinnedArray(*Type){};
|
||||
|
||||
while (true) {
|
||||
parser.skip_space(src);
|
||||
|
||||
if (src[parser.i] == ']') {
|
||||
if (src[parser.i] == polymorphic_end_token) {
|
||||
break;
|
||||
}
|
||||
|
||||
@ -742,7 +798,7 @@ const Parser = struct{
|
||||
|
||||
switch (src[parser.i]) {
|
||||
',' => parser.i += 1,
|
||||
']' => {},
|
||||
polymorphic_end_token => {},
|
||||
else => fail(),
|
||||
}
|
||||
|
||||
@ -766,7 +822,7 @@ const Parser = struct{
|
||||
|
||||
var struct_name = PinnedArray(u8){};
|
||||
_ = struct_name.append_slice(struct_polymorphic_name);
|
||||
_ = struct_name.append('[');
|
||||
_ = struct_name.append(polymorphic_start_token);
|
||||
|
||||
for (instantiation_types.slice()) |ty| {
|
||||
_ = struct_name.append_slice(ty.get_name());
|
||||
@ -774,7 +830,7 @@ const Parser = struct{
|
||||
}
|
||||
|
||||
struct_name.length -= 2;
|
||||
_ = struct_name.append(']');
|
||||
_ = struct_name.append(polymorphic_end_token);
|
||||
|
||||
const struct_name_hash = intern_identifier(&thread.identifiers, struct_name.slice());
|
||||
|
||||
@ -3010,7 +3066,6 @@ const Type = struct {
|
||||
alignment: u32,
|
||||
|
||||
const Id = enum(u8){
|
||||
unresolved,
|
||||
void,
|
||||
noreturn,
|
||||
integer,
|
||||
@ -3118,7 +3173,6 @@ const Type = struct {
|
||||
};
|
||||
|
||||
const id_to_type_map = std.EnumArray(Id, type).init(.{
|
||||
.unresolved = void,
|
||||
.void = void,
|
||||
.noreturn = void,
|
||||
.integer = Integer,
|
||||
@ -3231,7 +3285,6 @@ const Type = struct {
|
||||
|
||||
fn is_aggregate(ty: *Type) bool {
|
||||
return switch (ty.sema.id) {
|
||||
.unresolved => unreachable,
|
||||
.array => unreachable,
|
||||
.function => unreachable,
|
||||
.void,
|
||||
@ -3394,6 +3447,7 @@ const Scope = struct {
|
||||
function,
|
||||
local,
|
||||
polymorphic_struct,
|
||||
polymorphic_function,
|
||||
};
|
||||
};
|
||||
|
||||
@ -3412,7 +3466,6 @@ const ArgumentDeclaration = struct {
|
||||
|
||||
const TypeDeclaration = struct{
|
||||
declaration: Declaration,
|
||||
parent: *Type,
|
||||
id: Id,
|
||||
|
||||
const Id = enum{
|
||||
@ -3520,12 +3573,18 @@ const Declaration = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const PolymorphicFunction = struct{
|
||||
declaration: GlobalDeclaration,
|
||||
scope: Function.Scope,
|
||||
polymorphic_parameters: []const *Type,
|
||||
};
|
||||
|
||||
const Function = struct{
|
||||
declaration: Function.Declaration,
|
||||
entry_block: *BasicBlock,
|
||||
stack_slots: PinnedArray(*LocalSymbol) = .{},
|
||||
scope: Function.Scope,
|
||||
arguments: PinnedArray(*ArgumentSymbol) = .{},
|
||||
stack_slots: PinnedArray(*LocalSymbol) = .{},
|
||||
arguments: []const *ArgumentSymbol,
|
||||
entry_block: *BasicBlock,
|
||||
|
||||
const Attributes = struct{
|
||||
};
|
||||
@ -4159,6 +4218,7 @@ const Thread = struct{
|
||||
bitfields: PinnedArray(Type.Bitfield) = .{},
|
||||
cloned_types: PinnedHashMap(*Type, *Type) = .{},
|
||||
polymorphic_names: PinnedArray(Type.PolymorphicName) = .{},
|
||||
polymorphic_functions: PinnedArray(PolymorphicFunction) = .{},
|
||||
constant_strings: PinnedHashMap(u32, String) = .{},
|
||||
global_strings: PinnedHashMap(u32, String) = .{},
|
||||
string_buffer: PinnedArray(u8) = .{},
|
||||
@ -5278,6 +5338,8 @@ const brace_open = '{';
|
||||
const brace_close = '}';
|
||||
|
||||
const pointer_token = '*';
|
||||
const polymorphic_start_token = '\'';
|
||||
const polymorphic_end_token = '\'';
|
||||
|
||||
const cache_line_size = switch (builtin.os.tag) {
|
||||
.macos => 128,
|
||||
@ -5651,7 +5713,7 @@ fn worker_thread(thread_index: u32, cpu_count: *u32) void {
|
||||
var last_block = basic_block_node;
|
||||
|
||||
if (emit_allocas) {
|
||||
for (nat_function.arguments.slice(), nat_function.declaration.get_function_type().abi.argument_types_abi) |argument, abi| {
|
||||
for (nat_function.arguments, nat_function.declaration.get_function_type().abi.argument_types_abi) |argument, abi| {
|
||||
_ = abi; // autofix
|
||||
switch (argument.instruction.id) {
|
||||
.argument_storage => {
|
||||
@ -8782,6 +8844,81 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
function_declaration_data.global_symbol.attributes.@"export" = true;
|
||||
}
|
||||
|
||||
const has_polymorphic_parameters = src[parser.i] == polymorphic_start_token;
|
||||
if (has_polymorphic_parameters) {
|
||||
const polymorphic_function = thread.polymorphic_functions.append(.{
|
||||
.declaration = function_declaration_data.global_symbol.global_declaration,
|
||||
.scope = .{
|
||||
.scope = .{
|
||||
.parent = &file.scope.scope,
|
||||
.line = declaration_line + 1,
|
||||
.column = declaration_column + 1,
|
||||
.file = file.get_index(),
|
||||
.id = .polymorphic_function,
|
||||
},
|
||||
},
|
||||
.polymorphic_parameters = &.{},
|
||||
});
|
||||
_ = file.scope.scope.declarations.put_no_clobber(polymorphic_function.declaration.declaration.name, &polymorphic_function.declaration.declaration);
|
||||
polymorphic_function.polymorphic_parameters = parser.parse_polymorphic_arguments(thread, file, &polymorphic_function.scope.scope);
|
||||
|
||||
parser.skip_space(src);
|
||||
parser.expect_character(src, '(');
|
||||
while (true) {
|
||||
parser.skip_space(src);
|
||||
if (src[parser.i] == ')') {
|
||||
break;
|
||||
}
|
||||
|
||||
_ = parser.parse_identifier(thread, src);
|
||||
parser.skip_space(src);
|
||||
parser.expect_character(src, ':');
|
||||
parser.skip_space(src);
|
||||
_ = parser.parse_type_expression(thread, file, &polymorphic_function.scope.scope);
|
||||
|
||||
parser.skip_space(src);
|
||||
|
||||
switch (src[parser.i]) {
|
||||
',' => parser.i += 1,
|
||||
')' => {},
|
||||
else => fail(),
|
||||
}
|
||||
}
|
||||
|
||||
parser.i += 1;
|
||||
|
||||
parser.skip_space(src);
|
||||
|
||||
_ = parser.parse_type_expression(thread, file, &polymorphic_function.scope.scope);
|
||||
|
||||
parser.skip_space(src);
|
||||
|
||||
parser.expect_character(src, brace_open);
|
||||
|
||||
// var open_brace_count: usize = 1;
|
||||
// while (parser.i < src.len) {
|
||||
// if (open_brace_count == 0) {
|
||||
// break;
|
||||
// }
|
||||
// const is_open_brace = src[parser.i] == brace_open;
|
||||
// const is_close_brace = src[parser.i] == brace_close;
|
||||
// const is_comment = src[parser.i] == '/' and Parser.get_next_ch_safe(src, parser.i) == '/';
|
||||
// const is_new_line = src[parser.i] == '\n';
|
||||
// open_brace_count += @intFromBool(is_open_brace);
|
||||
// open_brace_count -= @intFromBool(is_close_brace);
|
||||
// if (is_comment) {
|
||||
// while (src[parser.i] != '\n') {
|
||||
// parser.i += 1;
|
||||
// }
|
||||
// } else if (is_new_line) {
|
||||
// } else {
|
||||
// parser.i += 1;
|
||||
// }
|
||||
// }
|
||||
|
||||
// parser.parse_polymorphic_arguments(thread, file, scope);
|
||||
unreachable;
|
||||
} else {
|
||||
parser.expect_character(src, '(');
|
||||
|
||||
const ArgumentData = struct{
|
||||
@ -9266,7 +9403,7 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
};
|
||||
},
|
||||
} else {
|
||||
unreachable;
|
||||
fail();
|
||||
};
|
||||
|
||||
const function_type = thread.function_types.append(.{
|
||||
@ -9296,7 +9433,6 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
fail();
|
||||
}
|
||||
|
||||
|
||||
const function = thread.functions.add_one();
|
||||
const entry_block = create_basic_block(thread);
|
||||
function.* = .{
|
||||
@ -9311,6 +9447,7 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
},
|
||||
},
|
||||
.entry_block = entry_block,
|
||||
.arguments = &.{},
|
||||
};
|
||||
_ = file.scope.scope.declarations.put_no_clobber(function.declaration.global_symbol.global_declaration.declaration.name, &function.declaration.global_symbol.global_declaration.declaration);
|
||||
var analyzer = Analyzer{
|
||||
@ -9340,6 +9477,7 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
}
|
||||
|
||||
if (original_arguments.length > 0) {
|
||||
var arguments = PinnedArray(*ArgumentSymbol){};
|
||||
// var runtime_parameter_count: u64 = 0;
|
||||
for (original_arguments.const_slice(), function_abi.argument_types_abi, 0..) |argument, argument_abi, argument_index| {
|
||||
if (analyzer.current_scope.declarations.get(argument.name) != null) {
|
||||
@ -9359,6 +9497,7 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
.index = @intCast(argument_index),
|
||||
.indirect_argument = argument_abi.indices[0],
|
||||
});
|
||||
_ = arguments.append(argument_symbol);
|
||||
argument_symbol.instruction.id = .abi_indirect_argument;
|
||||
break :blk argument_symbol;
|
||||
} else blk: {
|
||||
@ -9400,6 +9539,7 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
.column = argument.column,
|
||||
.index = @intCast(argument_index),
|
||||
});
|
||||
_ = arguments.append(argument_symbol);
|
||||
_ = emit_store(&analyzer, thread, .{
|
||||
.destination = &argument_symbol.instruction.value,
|
||||
.source = &argument_abi_instructions.slice()[0].value,
|
||||
@ -9420,6 +9560,7 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
.column = argument.column,
|
||||
.index = @intCast(argument_index),
|
||||
});
|
||||
_ = arguments.append(argument_symbol);
|
||||
|
||||
switch (argument.type.sema.id) {
|
||||
.@"struct" => {
|
||||
@ -9485,6 +9626,7 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
.column = argument.column,
|
||||
.index = @intCast(argument_index),
|
||||
});
|
||||
_ = arguments.append(argument_symbol);
|
||||
|
||||
_ = emit_store(&analyzer, thread, .{
|
||||
.destination = &argument_symbol.instruction.value,
|
||||
@ -9533,6 +9675,8 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function.arguments = arguments.slice();
|
||||
}
|
||||
|
||||
const result = analyze_local_block(thread, &analyzer, &parser, file);
|
||||
@ -9581,6 +9725,7 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
},
|
||||
else => fail_message("Unexpected character to close function declaration"),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fail();
|
||||
}
|
||||
@ -9658,9 +9803,8 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
|
||||
parser.skip_space(src);
|
||||
|
||||
if (src[parser.i] == '[') {
|
||||
parser.i += 1;
|
||||
|
||||
const has_polymorphic_parameters = src[parser.i] == polymorphic_start_token;
|
||||
if (has_polymorphic_parameters) {
|
||||
const polymorphic_struct = thread.polymorphic_structs.append(.{
|
||||
.type = .{
|
||||
.sema = .{
|
||||
@ -9691,57 +9835,7 @@ pub fn analyze_file(thread: *Thread, file_index: u32) void {
|
||||
});
|
||||
_ = file.scope.scope.declarations.put_no_clobber(struct_name, &polymorphic_struct.declaration);
|
||||
|
||||
var struct_parameters = PinnedArray(*Type){};
|
||||
|
||||
while (true) {
|
||||
parser.skip_space(src);
|
||||
|
||||
if (src[parser.i] == ']') {
|
||||
break;
|
||||
}
|
||||
|
||||
const line = parser.get_debug_line();
|
||||
const column = parser.get_debug_column();
|
||||
parser.i += 1;
|
||||
const name = parser.parse_identifier(thread, src);
|
||||
const polymorphic_name = thread.polymorphic_names.append(.{
|
||||
.type = .{
|
||||
.sema = .{
|
||||
.id = .polymorphic_name,
|
||||
.thread = thread.get_index(),
|
||||
.resolved = false,
|
||||
},
|
||||
.size = 0,
|
||||
.bit_size = 0,
|
||||
.alignment = 1,
|
||||
},
|
||||
.type_declaration = .{
|
||||
.declaration = .{
|
||||
.id = .type,
|
||||
.name = name,
|
||||
.line = line,
|
||||
.column = column,
|
||||
.scope = &polymorphic_struct.scope,
|
||||
},
|
||||
.parent = &polymorphic_struct.type,
|
||||
.id = .polymorphic_name,
|
||||
},
|
||||
.index = struct_parameters.length,
|
||||
});
|
||||
|
||||
switch (src[parser.i]) {
|
||||
',' => parser.i += 1,
|
||||
']' => {},
|
||||
else => fail(),
|
||||
}
|
||||
|
||||
_ = struct_parameters.append(&polymorphic_name.type);
|
||||
_ = polymorphic_struct.scope.declarations.put_no_clobber(name, &polymorphic_name.type_declaration.declaration);
|
||||
}
|
||||
|
||||
parser.i += 1;
|
||||
|
||||
polymorphic_struct.parameters = struct_parameters.slice();
|
||||
polymorphic_struct.parameters = parser.parse_polymorphic_arguments(thread, file, &polymorphic_struct.scope);
|
||||
|
||||
parser.skip_space(src);
|
||||
|
||||
@ -10079,7 +10173,6 @@ fn emit_argument_symbol(analyzer: *Analyzer, thread: *Thread, args: struct{
|
||||
.column = args.column,
|
||||
}),
|
||||
});
|
||||
_ = analyzer.current_function.arguments.append(argument_symbol);
|
||||
|
||||
return argument_symbol;
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
struct SimplePolymorphic[$T] {
|
||||
struct SimplePolymorphic'$T' {
|
||||
member: T,
|
||||
}
|
||||
|
||||
fn[cc(.c)] main[export]() s32 {
|
||||
>s: SimplePolymorphic[s32] = {
|
||||
>s: SimplePolymorphic's32' = {
|
||||
.member = 0,
|
||||
};
|
||||
return s.member;
|
||||
|
7
retest/standalone/polymorphic_function/main.nat
Normal file
7
retest/standalone/polymorphic_function/main.nat
Normal file
@ -0,0 +1,7 @@
|
||||
fn polymorphic'$T'(arg: T) T {
|
||||
return arg;
|
||||
}
|
||||
|
||||
fn[cc(.c)] main[export]() s32 {
|
||||
return polymorphic's32'(0);
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user