implement add and sub

This commit is contained in:
David Gonzalez Martin 2023-11-11 11:57:11 -06:00
parent be143a8415
commit 35657715e9
8 changed files with 1415 additions and 696 deletions

44
ci.sh
View File

@ -1,12 +1,44 @@
#!/usr/bin/env bash
set -xe
zig build test -Dall --summary all
echo "Testing Nativity with Zig"
zig build test -Dall -Doptimize=ReleaseSafe --summary all
echo "Compiling Nativity with Zig"
time zig build -Doptimize=ReleaseSafe
failed_test_count=0
passed_test_count=0
test_directory_name=test
test_directory=$test_directory_name/*
total_test_count=$(ls 2>/dev/null -Ubad1 -- test/* | wc -l)
ran_test_count=0
test_i=1
for dir in test/*
for dir in $test_directory
do
zig build run -- $dir/main.nat
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
nat/${dir##*/}
MY_TESTNAME=${dir##*/}
zig build run -Doptimize=ReleaseSafe -- $dir/main.nat
if [[ "$?" == "0" ]]; then
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
nat/$MY_TESTNAME
if [[ "$?" == "0" ]]; then
passed_test_count=$(($passed_test_count + 1))
result="PASSED"
else
failed_test_count=$(($failed_test_count + 1))
result="FAILED"
fi
echo "[$test_i/$total_test_count] [$result] $MY_TESTNAME"
ran_test_count=$(($ran_test_count + 1))
fi
else
"$MY_TESTNAME failed to compile"
fi
test_i=$(($test_i + 1))
done
echo "Ran $ran_test_count tests ($passed_test_count passed, $failed_test_count failed)."
if [[ $failed_test_count == "0" ]]; then
true
else
false
fi

View File

@ -536,6 +536,22 @@ pub const Cast = struct {
pub const Allocation = List.Allocation;
};
pub const BinaryOperation = struct {
left: Value.Index,
right: Value.Index,
type: Type.Index,
id: Id,
pub const List = BlockList(@This());
pub const Index = List.Index;
pub const Allocation = List.Allocation;
const Id = enum {
add,
sub,
};
};
pub const CallingConvention = enum {
system_v,
};
@ -565,6 +581,7 @@ pub const Value = union(enum) {
extern_function: Function.Prototype.Index,
sign_extend: Cast.Index,
zero_extend: Cast.Index,
binary_operation: BinaryOperation.Index,
pub const List = BlockList(@This());
pub const Index = List.Index;
@ -585,6 +602,7 @@ pub const Value = union(enum) {
.bool, .void, .undefined, .function, .type, .enum_field => true,
.integer => |integer| integer.type.eq(Type.comptime_int),
.call => false,
.binary_operation => false,
else => |t| @panic(@tagName(t)),
};
}
@ -598,6 +616,7 @@ pub const Value = union(enum) {
.type => Type.type,
.enum_field => |enum_field_index| module.enums.get(module.enum_fields.get(enum_field_index).parent).type,
.function => |function_index| module.functions.get(function_index).prototype,
.binary_operation => |binary_operation| module.binary_operations.get(binary_operation).type,
else => |t| @panic(@tagName(t)),
};
@ -693,6 +712,7 @@ pub const Module = struct {
function_name_map: data_structures.AutoArrayHashMap(Function.Index, u32) = .{},
arrays: BlockList(Array) = .{},
casts: BlockList(Cast) = .{},
binary_operations: BlockList(BinaryOperation) = .{},
string_literal_types: data_structures.AutoArrayHashMap(u32, Type.Index) = .{},
array_types: data_structures.AutoArrayHashMap(Array, Type.Index) = .{},
entry_point: Function.Index = Function.Index.invalid,
@ -1170,7 +1190,7 @@ pub fn log(comptime logger_scope: LoggerScope, logger: getLoggerScopeType(logger
pub fn panic(message: []const u8, stack_trace: ?*std.builtin.StackTrace, return_address: ?usize) noreturn {
const print_stack_trace = true;
switch (print_stack_trace) {
true => std.builtin.default_panic(message, stack_trace, return_address),
true => @call(.always_inline, std.builtin.default_panic, .{ message, stack_trace, return_address }),
false => {
writer.writeAll("\nPANIC: ") catch {};
writer.writeAll(message) catch {};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -164,7 +164,7 @@ pub fn analyze(allocator: Allocator, text: []const u8, file_index: File.Index) !
inline else => |comptime_fixed_keyword| @field(Token.Id, "fixed_keyword_" ++ @tagName(comptime_fixed_keyword)),
} else .identifier;
},
'(', ')', '{', '}', '[', ']', '-', '=', ';', '#', '@', ',', '.', ':', '>', '<', '*', '!' => |operator| blk: {
'(', ')', '{', '}', '[', ']', '=', ';', '#', '@', ',', '.', ':', '>', '<', '!', '+', '-', '*', '\\', '/' => |operator| blk: {
index += 1;
break :blk @enumFromInt(operator);
},

View File

@ -38,7 +38,17 @@ pub const Logger = enum {
block,
call,
pub var bitset = std.EnumSet(Logger).initEmpty();
pub var bitset = std.EnumSet(Logger).initMany(&.{
.type,
.identifier,
.symbol_declaration,
.scope_node,
.node,
.typecheck,
.@"switch",
.block,
.call,
});
};
const lexical_analyzer = @import("lexical_analyzer.zig");
@ -145,7 +155,7 @@ const Analyzer = struct {
try statement_nodes.append(analyzer.allocator, block_node.left);
try statement_nodes.append(analyzer.allocator, block_node.right);
},
.block, .comptime_block => unreachable, //statement_nodes = analyzer.getNodeList(scope_index, block_node.left.unwrap()),
.block, .comptime_block => statement_nodes = analyzer.getScopeNodeList(scope_index, analyzer.getScopeNode(scope_index, block_node.left)),
else => |t| @panic(@tagName(t)),
}
@ -512,6 +522,33 @@ const Analyzer = struct {
};
}
fn processBinaryOperation(analyzer: *Analyzer, scope_index: Scope.Index, expect_type: ExpectType, node_index: Node.Index) !Value {
const node = analyzer.getScopeNode(scope_index, node_index);
const left_allocation = try analyzer.unresolvedAllocate(scope_index, expect_type, node.left);
const right_allocation = try analyzer.unresolvedAllocate(scope_index, expect_type, node.right);
const left_type = left_allocation.ptr.getType(analyzer.module);
const right_type = right_allocation.ptr.getType(analyzer.module);
if (!left_type.eq(right_type)) {
unreachable;
}
const binary_operation = try analyzer.module.binary_operations.append(analyzer.allocator, .{
.left = left_allocation.index,
.right = right_allocation.index,
.type = left_type,
.id = switch (node.id) {
.add => .add,
.sub => .sub,
else => |t| @panic(@tagName(t)),
},
});
return .{
.binary_operation = binary_operation.index,
};
}
const DeclarationLookup = struct {
declaration: Declaration.Index,
scope: Scope.Index,
@ -968,6 +1005,9 @@ const Analyzer = struct {
.type = try analyzer.resolveType(scope_index, node_index),
},
.@"return" => try analyzer.processReturn(scope_index, expect_type, node_index),
.add,
.sub,
=> try analyzer.processBinaryOperation(scope_index, expect_type, node_index),
else => |t| @panic(@tagName(t)),
};
}

View File

@ -141,6 +141,8 @@ pub const Node = packed struct(u128) {
enum_field = 58,
extern_qualifier = 59,
function_prototype = 60,
add = 61,
sub = 62,
};
};
@ -692,6 +694,33 @@ const Analyzer = struct {
return try analyzer.expressionPrecedence(0);
}
const PrecedenceOperator = enum {
compare_equal,
compare_not_equal,
add,
sub,
};
const operator_precedence = std.EnumArray(PrecedenceOperator, i32).init(.{
.compare_equal = 30,
.compare_not_equal = 30,
.add = 60,
.sub = 60,
});
const operator_associativity = std.EnumArray(PrecedenceOperator, Associativity).init(.{
.compare_equal = .none,
.compare_not_equal = .none,
.add = .left,
.sub = .left,
});
const operator_node_id = std.EnumArray(PrecedenceOperator, Node.Id).init(.{
.compare_equal = .compare_equal,
.compare_not_equal = .compare_not_equal,
.add = .add,
.sub = .sub,
});
fn expressionPrecedence(analyzer: *Analyzer, minimum_precedence: i32) !Node.Index {
var result = try analyzer.prefixExpression();
if (!result.invalid) {
@ -704,55 +733,75 @@ const Analyzer = struct {
while (analyzer.token_i < analyzer.tokens.len) {
const token = analyzer.tokens[analyzer.token_i];
// logln("Looping in expression precedence with token {}\n", .{token});
const precedence: i32 = switch (token.id) {
.equal, .semicolon, .right_parenthesis, .right_brace, .comma, .period, .fixed_keyword_const, .fixed_keyword_var => -1,
.bang => switch (analyzer.tokens[analyzer.token_i + 1].id) {
.equal => 30,
else => unreachable,
},
else => |t| {
const start = token.start;
logln(.parser, .precedence, "Source file:\n```\n{s}\n```\n", .{analyzer.source_file[start..]});
@panic(@tagName(t));
const operator: PrecedenceOperator = switch (token.id) {
.equal, .semicolon, .right_parenthesis, .right_brace, .comma, .period, .fixed_keyword_const, .fixed_keyword_var => break,
else => blk: {
const next_token_index = analyzer.token_i + 1;
if (next_token_index < analyzer.tokens.len) {
const next_token_id = analyzer.tokens[next_token_index].id;
break :blk switch (token.id) {
.equal => switch (next_token_id) {
.equal => .compare_equal,
else => break,
},
.bang => switch (next_token_id) {
.equal => .compare_not_equal,
else => unreachable,
},
.plus => switch (next_token_id) {
.plus => unreachable,
.equal => unreachable,
else => .add,
},
.minus => switch (next_token_id) {
.minus => unreachable,
.equal => unreachable,
else => .sub,
},
else => |t| @panic(@tagName(t)),
};
} else {
unreachable;
}
},
};
logln(.parser, .precedence, "Precedence: {} ({s}) (file #{})\n", .{ precedence, @tagName(token.id), analyzer.file_index.uniqueInteger() });
const precedence = operator_precedence.get(operator);
if (precedence < minimum_precedence) {
logln(.parser, .precedence, "Breaking for minimum_precedence\n", .{});
break;
}
if (precedence == banned_precedence) {
logln(.parser, .precedence, "Breaking for banned precedence\n", .{});
if (precedence < banned_precedence) {
logln(.parser, .precedence, "Breaking for banned_precedence\n", .{});
break;
}
const operator_token = analyzer.token_i;
const is_bang_equal = analyzer.tokens[operator_token].id == .bang and analyzer.tokens[operator_token + 1].id == .equal;
analyzer.token_i += @as(u32, 1) + @intFromBool(is_bang_equal);
const extra_token = switch (operator) {
.add,
.sub,
=> false,
.compare_equal,
.compare_not_equal,
=> true,
// else => |t| @panic(@tagName(t)),
};
analyzer.token_i += @as(u32, 1) + @intFromBool(extra_token);
// TODO: fix this
const right = try analyzer.expressionPrecedence(precedence + 1);
const operation_id: Node.Id = switch (is_bang_equal) {
true => .compare_not_equal,
false => switch (analyzer.tokens[operator_token].id) {
else => |t| @panic(@tagName(t)),
},
};
const node_id = operator_node_id.get(operator);
result = try analyzer.addNode(.{
.id = operation_id,
.id = node_id,
.token = operator_token,
.left = result,
.right = right,
});
const associativity: Associativity = switch (operation_id) {
.compare_equal, .compare_not_equal, .compare_less_than, .compare_greater_than, .compare_less_or_equal, .compare_greater_or_equal => .none,
else => .left,
};
const associativity = operator_associativity.get(operator);
if (associativity == .none) {
banned_precedence = precedence;

8
test/add_sub/main.nat Normal file
View File

@ -0,0 +1,8 @@
const main = fn() s32 {
const a: s32 = 1;
const b: s32 = 2;
const c: s32 = a + b;
const d: s32 = 3;
const e: s32 = d - c;
return e;
}