diff --git a/src/frontend/parse.zig b/src/frontend/parse.zig index dd64075..1509947 100644 --- a/src/frontend/parse.zig +++ b/src/frontend/parse.zig @@ -54,18 +54,18 @@ fn prefix_atom(parser: *Parser, _: u32, tok: Token) Error!*const Expr { return try parser.make_expr(.{ .atom = tok }); } fn prefix_paren(parser: *Parser, _: u32, _: Token) Error!*const Expr { - const expr = try parser.parse(0); + const expr = try parser.parse_expr(0); if (parser.tokenizer.next().kind != op_kind(")")) { return Error.SyntaxError; } return expr; } fn prefix_unop(parser: *Parser, precedence: u32, tok: Token) Error!*const Expr { - const expr = try parser.parse(precedence); + const expr = try parser.parse_expr(precedence); return try parser.make_expr(.{ .unop = .{ .expr = expr, .op = tok } }); } fn infix_binop(parser: *Parser, precedence: u32, lhs: *const Expr, tok: Token) Error!*const Expr { - const rhs = try parser.parse(precedence); + const rhs = try parser.parse_expr(precedence); return try parser.make_expr(.{ .binop = .{ .lhs = lhs, .rhs = rhs, .op = tok } }); } @@ -73,7 +73,11 @@ pub const Parser = struct { tokenizer: token.Tokenizer, allocator: std.mem.Allocator, ops: std.AutoHashMap(TokenKind, Operator), - pub fn parse(self: *Parser, precedence: u32) Error!*const Expr { + pub fn parse(self: *Parser, src: []const u8) Error!*const Expr { + self.tokenizer = token.Tokenizer.init(src, self.ops); + return self.parse_expr(0); + } + fn parse_expr(self: *Parser, precedence: u32) Error!*const Expr { const tok = self.tokenizer.next(); const prefix = self.ops.get(tok.kind) orelse return Error.InvalidOp; var left = try prefix.parse_prefix(self, prefix.precedence, tok); @@ -99,13 +103,14 @@ pub const Parser = struct { const op_p = try self.ops.getOrPutValue(op_kind(op), .{ .precedence = precedence }); op_p.value_ptr.parse_infix = infix_binop; } - pub fn init(src: []const u8, allocator: std.mem.Allocator) !Parser { + pub fn init(allocator: std.mem.Allocator) !Parser { const ops = std.AutoHashMap(TokenKind, Operator).init(allocator); - var p = Parser{ .tokenizer = token.Tokenizer.init(src, ops), .allocator = allocator, .ops = ops }; + var p = Parser{ .tokenizer = undefined, .allocator = allocator, .ops = ops }; try p.ops.put(TokenKind.name, .{ .parse_prefix = prefix_atom }); try p.ops.put(TokenKind.number, .{ .parse_prefix = prefix_atom }); try p.ops.put(op_kind("("), .{ .parse_prefix = prefix_paren }); + try p.ops.put(op_kind(")"), .{}); try p.register_unop("+", 4); try p.register_unop("-", 4); diff --git a/src/main.zig b/src/main.zig index 03d8979..39939e7 100644 --- a/src/main.zig +++ b/src/main.zig @@ -18,9 +18,9 @@ pub fn main() !void { const content = try file.readToEndAlloc(allocator, std.math.maxInt(usize)); defer allocator.free(content); - var p = try lib.parse.Parser.init(content, allocator); + var p = try lib.parse.Parser.init(allocator); defer p.free(); - std.debug.print("{}\n", .{(try p.parse(0)).eval(content)}); + std.debug.print("{}\n", .{(try p.parse(content)).eval(content)}); } test {