ast: sub, mul, div

This commit is contained in:
janis 2025-10-30 00:22:24 +01:00
parent 838c96f04f
commit 099d774634
Signed by: janis
SSH key fingerprint: SHA256:bB1qbbqmDXZNT0KKD5c2Dfjg53JGhj7B3CFcLIzSqq8
3 changed files with 87 additions and 16 deletions

View file

@ -20,6 +20,12 @@ section .rdata
TYPE_STR equ 5 TYPE_STR equ 5
;; end-consts ;; end-consts
PRECEDENCE_ADD equ 90
PRECEDENCE_SUB equ 90
PRECEDENCE_MUL equ 100
PRECEDENCE_DIV equ 100
PRECEDENCE_REM equ 100
section .text section .text
extern vec_init_with extern vec_init_with
extern vec_push extern vec_push
@ -323,7 +329,8 @@ parse_binary_expr:
sub rsp, 64 sub rsp, 64
; lexeme: Lexeme [32..56] ; lexeme: Lexeme [32..56]
; right: u64 [24..32] ; right: u64 [24..32]
; precedence: u8 [17..18] ; our_precedence: u8 [18..19]
; upper_precedence: u8 [17..18]
; operator: u8 [16..17] ; operator: u8 [16..17]
; left: u64 [8..16] ; left: u64 [8..16]
; rdi: *mut Ast [0..8] ; rdi: *mut Ast [0..8]
@ -339,22 +346,49 @@ parse_binary_expr:
lea rdi, [rsp + 32] ; lexeme lea rdi, [rsp + 32] ; lexeme
call peek_lexeme call peek_lexeme
mov rax, [rsp + 32] mov rax, [rsp + 32]
mov dil, [rsp + 17]
cmp al, dil ; our_precedence <= upper_precedence
jle .done ; also covers some non-binary operator tokens
cmp al, TOKEN_PLUS cmp al, TOKEN_PLUS
je .plus je .add
cmp al, TOKEN_MINUS
je .sub
cmp al, TOKEN_STAR
je .mul
cmp al, TOKEN_SLASH
je .div
jmp .done jmp .done
.plus: .add:
mov dil, TOKEN_PLUS mov dil, TOKEN_PLUS
call unwrap_token call unwrap_token
mov byte [rsp + 16], TOKEN_PLUS mov byte [rsp + 16], TOKEN_PLUS
mov byte [rsp + 18], PRECEDENCE_ADD
jmp .right
.sub:
mov dil, TOKEN_MINUS
call unwrap_token
mov byte [rsp + 16], TOKEN_MINUS
mov byte [rsp + 18], PRECEDENCE_SUB
jmp .right
.mul:
mov dil, TOKEN_STAR
call unwrap_token
mov byte [rsp + 16], TOKEN_STAR
mov byte [rsp + 18], PRECEDENCE_MUL
jmp .right
.div:
mov dil, TOKEN_SLASH
call unwrap_token
mov byte [rsp + 16], TOKEN_SLASH
mov byte [rsp + 18], PRECEDENCE_DIV
jmp .right jmp .right
.right: .right:
mov dil, [rsp + 17]
mov al, [rsp + 18] ; our_precedence
cmp al, dil ; our_precedence <= upper_precedence
jle .done
mov rdi, [rsp] ; Ast mov rdi, [rsp] ; Ast
mov sil, [rsp + 16] mov sil, [rsp + 18]
call parse_binary_expr call parse_binary_expr
mov [rsp + 24], rax ; right mov [rsp + 24], rax ; right

View file

@ -37,6 +37,15 @@ fn main() {
print_ast(b"fn main() -> void { return (1 + (2)); }", |ast| unsafe { print_ast(b"fn main() -> void { return (1 + (2)); }", |ast| unsafe {
parse_func(ast); parse_func(ast);
}); });
print_ast(
b"fn main() -> void { return (1 + (2 * 3)) / 4; }",
|ast| unsafe {
parse_func(ast);
},
);
print_ast(b"fn main() -> void { return 1 + 2 * 3; }", |ast| unsafe {
parse_func(ast);
});
} }
impl std::fmt::Display for AstNode { impl std::fmt::Display for AstNode {

View file

@ -176,7 +176,7 @@ fn main() {
assert_eq!(expect_token(2).into_option(), None); assert_eq!(expect_token(2).into_option(), None);
assert_eq!(expect_token(4).into_option().unwrap().as_str(), "fn"); assert_eq!(expect_token(4).into_option().unwrap().as_str(), "fn");
assert_eq!(unwrap_token(31).as_str(), "my-function"); assert_eq!(unwrap_token(TOKEN_IDENT).as_str(), "my-function");
eprint!("Initializing tokeniser.. "); eprint!("Initializing tokeniser.. ");
tokeniser_init(c"tests/tokens/comment.l".as_ptr()); tokeniser_init(c"tests/tokens/comment.l".as_ptr());
@ -185,7 +185,7 @@ fn main() {
assert_eq!( assert_eq!(
&collect_tokens()[..], &collect_tokens()[..],
&[ &[
Lexeme(34, ""), Lexeme(TOKEN_COMMENT, ""),
Lexeme(4, ""), Lexeme(4, ""),
Lexeme(TOKEN_IDENT, "my-function"), Lexeme(TOKEN_IDENT, "my-function"),
Lexeme(19, ""), Lexeme(19, ""),
@ -193,7 +193,7 @@ fn main() {
Lexeme(12, ""), Lexeme(12, ""),
Lexeme(11, ""), Lexeme(11, ""),
Lexeme(21, ""), Lexeme(21, ""),
Lexeme(34, ""), Lexeme(TOKEN_COMMENT, ""),
Lexeme(5, ""), Lexeme(5, ""),
Lexeme(10, ""), Lexeme(10, ""),
Lexeme(23, ""), Lexeme(23, ""),
@ -223,11 +223,14 @@ fn main() {
assert_eq!( assert_eq!(
&collect_tokens()[..], &collect_tokens()[..],
&[ &[
Lexeme(TOKEN_COMMENT, "\"this is a string\""), Lexeme(TOKEN_STRING, "\"this is a string\""),
Lexeme(TOKEN_COMMENT, "\"another\nstring\nspanning multiple\n lines\""), Lexeme(
Lexeme(TOKEN_COMMENT, "\"string with a \\\"quoted\\\" word\""), TOKEN_STRING,
Lexeme(TOKEN_COMMENT, "\"a\""), "\"another\nstring\nspanning multiple\n lines\""
Lexeme(TOKEN_COMMENT, "\"\"") ),
Lexeme(TOKEN_STRING, "\"string with a \\\"quoted\\\" word\""),
Lexeme(TOKEN_STRING, "\"a\""),
Lexeme(TOKEN_STRING, "\"\"")
], ],
); );
@ -238,7 +241,11 @@ fn main() {
assert_eq!( assert_eq!(
&collect_tokens()[..], &collect_tokens()[..],
&[Lexeme(TOKEN_NUMBER, "3"), Lexeme(16, "+"), Lexeme(TOKEN_NUMBER, "4")], &[
Lexeme(TOKEN_NUMBER, "3"),
Lexeme(16, "+"),
Lexeme(TOKEN_NUMBER, "4")
],
); );
eprint!("Initializing tokeniser.. "); eprint!("Initializing tokeniser.. ");
@ -265,6 +272,27 @@ fn main() {
], ],
); );
eprint!("Initializing tokeniser.. ");
let src = b"(b / d + c) * 42;";
tokeniser_init_buf(src.as_ptr(), src.len());
eprintln!("ok.");
assert_eq!(
&collect_tokens()[..],
&[
Lexeme(19, "("),
Lexeme(33, "b"),
Lexeme(31, "/"),
Lexeme(33, "d"),
Lexeme(16, "+"),
Lexeme(33, "c"),
Lexeme(18, ")"),
Lexeme(32, "*"),
Lexeme(34, "42"),
Lexeme(23, ";")
],
);
eprintln!("Finished tokenising."); eprintln!("Finished tokenising.");
} }
} }