diff --git a/lang/src/codegen.asm b/lang/src/codegen.asm index fc55532..4d6e4ec 100644 --- a/lang/src/codegen.asm +++ b/lang/src/codegen.asm @@ -971,6 +971,244 @@ codegen_block: pop rbp ret +;; rdi: ctx +;; rsi: &function_ctx +;; rdx: *AstCallExpr +;; define-fn: fn codegen_call_expr(ctx: *const CodegenCtx, function_ctx: &FunctionCtx, call_expr: *const AstCallExpr) -> (u64, bool) +codegen_call_expr: + push rbp + mov rbp, rsp + push rbx + push r15 + push r14 + + ; scratch [80..112] + ; callee_operand [64..80] + ; param_operands [24..64] + ; *AstCallExpr [16..24] + ; function_ctx [8..16] + ; ctx [0..8] + sub rsp, 112 + mov [rsp], rdi ; ctx + mov [rsp + 8], rsi ; &function_ctx + mov [rsp + 16], rdx ; *AstCallExpr + mov rbx, rdx ; copy of *AstCallExpr + + ; initialize param_operands + mov rcx, [rbx + 16] ; AstCallExpr.args_len + lea rdi, [rsp + 24] ; param_operands + mov rsi, 16 ; size_of:: + mov rdx, 0 ; drop = None + mov rcx, rbx ; initial capacity + call vec_init_with + + ; codegen callee + mov rdx, [rbx + 0] ; AstCallExpr.callee + mov rdi, [rsp] ; ctx + mov rsi, [rsp + 8] ; &function_ctx + call codegen_expr + mov [rsp + 64], rax ; callee operand + mov [rsp + 72], rdx + + ; If callee is in a param-register, move to stack + cmp byte [rax], OPERAND_REGISTER + jne .arg_loop_init + mov rdx, 0b00000011_00111100 + movzx rcx, byte [rax + 1] ; callee register + bt rdx, rcx + jnc .arg_loop_init + + ; allocate stack slot + mov rdi, [rsp + 8] ; &function_ctx + movzx rsi, word [rax + 2] ; Operand.width + call codegen_allocate_stack_value + mov [rsp + 80], rax ; new stack operand + mov [rsp + 88], rdx + + ; move from register to stack + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rsp + 80] ; dst + lea rdx, [rsp + 64] ; src + call codegen_move_dst_src + ; free original callee operand + mov rdi, [rsp + 8] ; &function_ctx + lea rsi, [rsp + 64] ; callee operand + call codegen_free_operand + + mov rax, [rsp + 80] ; callee operand = new stack operand + mov rdx, [rsp + 88] + mov [rsp + 64], rax + mov [rsp + 72], rdx + +.arg_loop_init: + mov r15, [rbx + 16] ; AstCallExpr.args_len + xor r14, r14 ; arg index +.arg_loop: + cmp r14, r15 + jge .arg_loop_done + mov rdx, [rbx + 8] ; AstCallExpr.args + lea rsi, [rdx + r14 * 8] + mov rdx, [rsi] ; arg ast index + mov rdi, [rsp] ; ctx + mov rsi, [rsp + 8] ; &function_ctx + call codegen_expr + mov [rsp + 80], rax ; param operand + mov [rsp + 88], rdx + + ; store param operand + ; if the operand is in a register, move it to the stack + ; we know already that these are values + + cmp byte [rax], OPERAND_REGISTER + jne .store_param + ; allocate stack slot + mov rdi, [rsp + 8] ; &function_ctx + movzx rsi, word [rax + 2] ; Operand.width + call codegen_allocate_stack_value + mov [rsp + 96], rax ; new stack operand + mov [rsp + 104], rdx + + ; move from register to stack + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rsp + 96] ; dst + lea rdx, [rsp + 80] ; src + call codegen_move_dst_src + + ; free original param operand + mov rdi, [rsp + 8] ; &function_ctx + lea rsi, [rsp + 80] ; param operand + call codegen_free_operand + mov rax, [rsp + 96] ; param operand = new stack operand + mov rdx, [rsp + 104] + mov [rsp + 80], rax + mov [rsp + 88], rdx + +.store_param: + ; push param operand to param_operands + lea rdi, [rsp + 24] ; param_operands + lea rsi, [rsp + 80] ; ¶m operand + call vec_push + + inc r14 + jmp .arg_loop +.arg_loop_done: + + ; push volatile registers + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rsp + 8] ; &function_ctx + mov rdx, 0 ; pop = false + call codegen_push_pop_used_registers + + ; move params to appropriate registers/stack slots + ; reverse order to allow freeing the allocated stack slots + mov r15, [rbx + 16] ; AstCallExpr.args_len + cmp r15, 6 + jg .panic ; more than 6 params not supported yet +.param_move_loop: + dec r15 + cmp r15, 0 + jl .param_move_done + + mov rdi, r15 ; param index + call codegen_arg_to_operand + mov [rsp + 80], rax ; target operand + mov [rsp + 88], rdx + + lea rdi, [rsp + 24] ; param_operands + mov rsi, r15 + call vec_get + mov r14, rax ; source operand + + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rsp + 80] ; dst + mov rdx, r14 ; src + call codegen_move_dst_src + ; free param operand + mov rdi, [rsp + 8] ; &function_ctx + mov rdx, r14 ; param operand + call codegen_free_operand + + jmp .param_move_loop +.param_move_done: + + ; align stack + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rel ALIGN_STACK] + mov rdx, ALIGN_STACK_LEN + call vec_extend + ; mov rax, rsp + ; sub rsp, 8 + ; and rsp, -16 + ; mov [rsp], rax + + ; call callee + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rel CALL_] + mov rdx, CALL_LEN + call vec_extend + + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rsp + 64] ; callee operand + call codegen_write_operand + + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rel NEWLINE] + call vec_push + + ; de-align stack + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rel RESTORE_STACK] + mov rdx, RESTORE_STACK_LEN + call vec_extend + + ; free callee operand + mov rdi, [rsp + 8] ; &function_ctx + lea rsi, [rsp + 64] ; callee operand + call codegen_free_operand + + ; pop volatile registers + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rsp + 8] ; &function_ctx + mov rdx, 1 ; pop = true + call codegen_push_pop_used_registers + + ; allocate return value operand + mov rdi, [rsp + 8] ; &function_ctx + mov rsi, 8 ; size_of:: + call codegen_allocate_value + mov [rsp + 80], rax ; return value operand + mov [rsp + 88], rdx + + ; mov dst, rax + mov rdi, [rsp] ; ctx + lea rdi, [rdi + 8] ; &ctx.text + lea rsi, [rsp + 80] ; dst + lea rdx, [rel OPERAND_RAX] ; src + call codegen_move_dst_src + + ; return return value operand + mov rax, [rsp + 80] + mov rdx, [rsp + 88] + + add rsp, 112 + pop r14 + pop r15 + pop rbx + pop rbp + ret +.panic: + call panic + ;; rdi: ctx ;; rsi: &function_ctx ;; rdx: expr index @@ -1022,8 +1260,17 @@ codegen_expr: je .address_of cmp bl, AST_IF je .if_expr + cmp bl, AST_CALL + je .call_expr jmp .panic +.call_expr: + mov rdx, [rax + 8] ; AstNode.data = *AstCallExpr + mov rdi, [rsp] ; ctx + mov rsi, [rsp + 8] ; &function_ctx + call codegen_call_expr + jmp .done + .if_expr: mov rbx, [rax + 8] ; AstNode.data = *AstIfExpr mov [rsp + 16], rax ; scratch = AstNode @@ -2545,4 +2792,12 @@ section .rdata JE_B_len equ $ - JE_B JMP_PHI dq "jmp .PHI" JMP_PHI_len equ $ - JMP_PHI + ALIGN_STACK db "mov rax, rsp", 10, "sub rsp, 8", 10, "and rsp, -16", 10, "mov [rsp], rax", 10 + ALIGN_STACK_LEN equ $ - ALIGN_STACK + RESTORE_STACK db "pop rsp", 10 + RESTORE_STACK_LEN equ $ - RESTORE_STACK + CALL_ dq "call " + CALL_LEN equ $ - CALL_ + NEWLINE db 10 + diff --git a/lang/tests/codegen.rs b/lang/tests/codegen.rs index 54b52a2..5cc00c8 100644 --- a/lang/tests/codegen.rs +++ b/lang/tests/codegen.rs @@ -99,15 +99,26 @@ fn main() { // |ast| unsafe { parse_func(ast) }, // ); +// print_ast( +// b"fn main(a: u32) -> void { +// let x: u32 = 10; +// if (a == 42) { +// x = 7; +// } else { +// x = 8; +// } +// return x; +// }", +// |ast| unsafe { parse_func(ast) }, + // ); + print_ast( b"fn main(a: u32) -> void { -let x: u32 = 10; if (a == 42) { -x = 7; + return main(7); } else { -x = 8; + return a; } -return x; }", |ast| unsafe { parse_func(ast) }, ); diff --git a/lang/tests/shared/defs.rs b/lang/tests/shared/defs.rs index a56928f..fb5b9c6 100644 --- a/lang/tests/shared/defs.rs +++ b/lang/tests/shared/defs.rs @@ -29,6 +29,7 @@ unsafe extern "C" { pub unsafe fn codegen_free_operand(ctx: *mut FunctionCtx, operand: *const Operand) -> (); pub unsafe fn codegen_function(ast: *const CodegenCtx, func_idx: u64) -> (); pub unsafe fn codegen_push_pop_used_registers(text: *mut Vec, function_ctx: &FunctionCtx, pop: bool) -> u8; + pub unsafe fn codegen_call_expr(ctx: *const CodegenCtx, function_ctx: &FunctionCtx, call_expr: *const AstCallExpr) -> (u64, bool); pub unsafe fn codegen_expr(ctx: *const CodegenCtx, function_ctx: &FunctionCtx, expr_idx: u64) -> (u64, bool); pub unsafe fn codegen_binary_op_rm64_rm64(function_ctx: *mut FunctionCtx, text: *mut BlobVec, lhs: *const Operand, rhs: *const Operand, op: [u8; 8], discard_result: bool) -> *const Operand; pub unsafe fn vec_insert_many(vec: *mut BlobVec, index: usize, data: *const u8, count: usize);