This commit is contained in:
caandt 2025-05-29 23:22:30 -05:00
parent b7ad73cdb6
commit 19bdba61e2
3 changed files with 118 additions and 4 deletions

113
src/frontend/parse.zig Normal file
View file

@ -0,0 +1,113 @@
const std = @import("std");
const token = @import("token.zig");
const Token = token.Token;
const TokenKind = token.TokenKind;
const op_kind = token.op_kind;
pub const Expr = union(enum) {
binop: struct { lhs: *const Expr, rhs: *const Expr, op: Token },
unop: struct { expr: *const Expr, op: Token },
atom: Token,
pub fn eval(self: *const Expr, src: []const u8) i64 {
return switch (self.*) {
.atom => |t| switch (t.kind) {
.number => std.fmt.parseInt(i64, src[t.start..t.end], 0) catch 0,
else => 0,
},
.binop => |b| {
const x = b.lhs.eval(src);
const y = b.rhs.eval(src);
return switch (b.op.kind) {
op_kind("+") => x + y,
op_kind("-") => x - y,
op_kind("*") => x * y,
op_kind("/") => @divTrunc(x, y),
else => 0,
};
},
.unop => |u| {
const x = u.expr.eval(src);
return switch (u.op.kind) {
op_kind("+") => x,
op_kind("-") => -x,
else => 0,
};
},
};
}
};
const Error = error{ InvalidOp, SyntaxError, OutOfMemory };
const PrefixHandler = struct { precedence: u32, parse: *const fn (*Parser, u32, Token) Error!*const Expr };
const InfixHandler = struct { precedence: u32, parse: *const fn (*Parser, u32, *const Expr, Token) Error!*const Expr };
fn prefix_atom(parser: *Parser, _: u32, tok: Token) Error!*const Expr {
return try parser.make_expr(.{ .atom = tok });
}
fn prefix_paren(parser: *Parser, _: u32, _: Token) Error!*const Expr {
const expr = try parser.parse(0);
if (parser.tokenizer.next().kind != op_kind(")")) {
return Error.SyntaxError;
}
return expr;
}
fn prefix_unop(parser: *Parser, precedence: u32, tok: Token) Error!*const Expr {
const expr = try parser.parse(precedence);
return try parser.make_expr(.{ .unop = .{ .expr = expr, .op = tok } });
}
fn infix_binop(parser: *Parser, precedence: u32, lhs: *const Expr, tok: Token) Error!*const Expr {
const rhs = try parser.parse(precedence);
return try parser.make_expr(.{ .binop = .{ .lhs = lhs, .rhs = rhs, .op = tok } });
}
pub const Parser = struct {
tokenizer: token.Tokenizer,
allocator: std.mem.Allocator,
prefixes: std.AutoHashMap(TokenKind, PrefixHandler),
infixes: std.AutoHashMap(TokenKind, InfixHandler),
pub fn parse(self: *Parser, precedence: u32) Error!*const Expr {
const tok = self.tokenizer.next();
const prefix = self.prefixes.get(tok.kind) orelse return Error.InvalidOp;
var left = try prefix.parse(self, prefix.precedence, tok);
var infix: InfixHandler = undefined;
while (w: {
infix = self.infixes.get(self.tokenizer.peek().kind) orelse break :w false;
break :w infix.precedence > precedence;
}) {
left = try infix.parse(self, infix.precedence, left, self.tokenizer.next());
}
return left;
}
fn make_expr(self: *Parser, expr: Error!Expr) Error!*const Expr {
const ptr = try self.allocator.create(Expr);
ptr.* = try expr;
return ptr;
}
fn register_unop(self: *Parser, op: []const u8, precedence: u32) !void {
try self.prefixes.put(op_kind(op), .{ .precedence = precedence, .parse = prefix_unop });
}
fn register_binop(self: *Parser, op: []const u8, precedence: u32) !void {
try self.infixes.put(op_kind(op), .{ .precedence = precedence, .parse = infix_binop });
}
pub fn init(tokenizer: token.Tokenizer, allocator: std.mem.Allocator) !Parser {
const prefixes = std.AutoHashMap(TokenKind, PrefixHandler).init(allocator);
const infixes = std.AutoHashMap(TokenKind, InfixHandler).init(allocator);
var p = Parser{ .tokenizer = tokenizer, .allocator = allocator, .prefixes = prefixes, .infixes = infixes };
try p.prefixes.put(TokenKind.name, .{ .precedence = 0, .parse = prefix_atom });
try p.prefixes.put(TokenKind.number, .{ .precedence = 0, .parse = prefix_atom });
try p.prefixes.put(op_kind("("), .{ .precedence = 0, .parse = prefix_paren });
try p.register_unop("+", 4);
try p.register_unop("-", 4);
try p.register_binop("+", 2);
try p.register_binop("-", 2);
try p.register_binop("*", 3);
try p.register_binop("/", 3);
return p;
}
pub fn free(self: *Parser) void {
self.prefixes.clearAndFree();
self.infixes.clearAndFree();
}
};

View file

@ -18,10 +18,10 @@ pub fn main() !void {
const content = try file.readToEndAlloc(allocator, std.math.maxInt(usize));
defer allocator.free(content);
var t = lib.token.Tokenizer.init(content);
while (!t.at_end()) {
std.debug.print("{}\n", .{t.next()});
}
const t = lib.token.Tokenizer.init(content);
var p = try lib.parse.Parser.init(t, allocator);
defer p.free();
std.debug.print("{}\n", .{(try p.parse(0)).eval(content)});
}
test {

View file

@ -1,5 +1,6 @@
const std = @import("std");
pub const token = @import("frontend/token.zig");
pub const parse = @import("frontend/parse.zig");
test {
std.testing.refAllDecls(@This());