diff --git a/src/code_gen/aarch64/aarch64_codegen.rs b/src/code_gen/aarch64/aarch64_codegen.rs index b78d2b1..c155f6b 100644 --- a/src/code_gen/aarch64/aarch64_codegen.rs +++ b/src/code_gen/aarch64/aarch64_codegen.rs @@ -202,8 +202,6 @@ impl<'aarch64> CodeGen for Aarch64CodeGen<'aarch64> { return 0xFFFFFFFF; } - // let func_stack_size: usize = func_info.stack_size - // Function preamble println!(".global _{}\n_{}:", func_name, func_name); println!("sub sp, sp, {}", func_info.stack_size); @@ -623,7 +621,10 @@ impl<'aarch64> Aarch64CodeGen<'aarch64> { fn dump_global_with_alignment(symbol: &Symbol) { let def_val: String = if let Some(dv) = &symbol.default_value { dv.to_string() - } else { "0".to_string() }; + } + else { + "0".to_string() + }; match symbol.lit_type { LitTypeVariant::I32 => println!("{}: .align 4\n\t.word {}", symbol.name, def_val), LitTypeVariant::U8 => println!("{}:\t.byte {}", symbol.name, def_val), @@ -675,4 +676,81 @@ impl<'aarch64> Aarch64CodeGen<'aarch64> { }; Ok(result) } +} + +#[cfg(test)] +mod tests { + use core::f64; + use std::cell::RefCell; + + use crate::{code_gen::RegManager, types::LitType}; + + use super::Aarch64CodeGen; + + fn create_reg_mgr() -> RegManager { + let rm: RegManager = RegManager::new( + { + let mut regs: Vec = vec![]; + for i in 0..=28 { + regs.push(format!("x{}", i)); + } + regs + }, + { + let mut regs: Vec = vec![]; + for i in 0..=7 { + regs.push(format!("x{}", i)); + } + regs + } + ); + rm + } + + #[test] + fn test_gen_int_value_load_code_i64() { + // let rm = create_reg_mgr(); + // let cg = Aarch64CodeGen::new(RefCell::new(rm)); + let value = LitType::I64(0x123456789ABCDEF0); + let result = Aarch64CodeGen::gen_int_value_load_code(&value, "x0").unwrap(); + let expected = + "movz x0, 0x1234, lsl #48\n\ + movk x0, 0x5678, lsl #32\n\ + movk x0, 0x9abc, lsl #16\n\ + movk x0, 0xdef0"; + assert_eq!(result, expected); + } + + #[test] + fn test_gen_int_value_load_code_i32() { + let value = LitType::I32(0x12345678); + let result = Aarch64CodeGen::gen_int_value_load_code(&value, "x0").unwrap(); + let expected = + "movz x0, 0x1234, lsl #16\n\ + movk x0, 0x5678"; + assert_eq!(result, expected); + } + + #[test] + fn test_gen_int_value_load_code_i16() { + let value = LitType::I16(0x1234); + let result = Aarch64CodeGen::gen_int_value_load_code(&value, "w0").unwrap(); + let expected = "movz w0, 0x1234"; + assert_eq!(result, expected); + } + + #[test] + fn test_gen_int_value_load_code_u8() { + let value = LitType::U8(0x12); + let result = Aarch64CodeGen::gen_int_value_load_code(&value, "w0").unwrap(); + let expected = "movz w0, 0x12"; + assert_eq!(result, expected); + } + + #[test] + fn test_gen_int_value_load_code_unsupported_type() { + let value = LitType::F64(f64::consts::PI); + let result = Aarch64CodeGen::gen_int_value_load_code(&value, "x0"); + assert!(result.is_err()); // The function should return an error for unsupported types + } } \ No newline at end of file diff --git a/src/parser/parser_impl.rs b/src/parser/parser_impl.rs index cf8962b..6a4d5ef 100644 --- a/src/parser/parser_impl.rs +++ b/src/parser/parser_impl.rs @@ -1327,4 +1327,6 @@ impl<'parser> Parser<'parser> { // REWRITE ALL THE TEST CASES #[cfg(test)] -mod tests {} +mod tests { + +} \ No newline at end of file diff --git a/src/tokenizer/tokenizer_impl.rs b/src/tokenizer/tokenizer_impl.rs index 84af0ab..4a96b26 100644 --- a/src/tokenizer/tokenizer_impl.rs +++ b/src/tokenizer/tokenizer_impl.rs @@ -411,7 +411,6 @@ impl Tokenizer { } } -/* tests #[cfg(test)] mod tests { use super::*; @@ -419,23 +418,22 @@ mod tests { #[test] fn test_int_var_decl_tokenization() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize("global integer a; a = 2323;"); - assert!(tokens.len() == 9); + let tokens: Vec = tok.tokenize(Rc::new("let a: integer = 23;".to_string())); + assert!(tokens.len() == 8); assert_eq!(tokens[0].kind, TokenKind::KW_LET); - assert_eq!(tokens[1].kind, TokenKind::KW_INT); - assert_eq!(tokens[2].kind, TokenKind::T_IDENTIFIER); - assert_eq!(tokens[3].kind, TokenKind::T_SEMICOLON); - assert_eq!(tokens[4].kind, TokenKind::T_IDENTIFIER); - assert_eq!(tokens[5].kind, TokenKind::T_EQUAL); - assert_eq!(tokens[6].kind, TokenKind::T_INT_NUM); - assert_eq!(tokens[7].kind, TokenKind::T_SEMICOLON); - assert_eq!(tokens[8].kind, TokenKind::T_EOF); + assert_eq!(tokens[1].kind, TokenKind::T_IDENTIFIER); + assert_eq!(tokens[2].kind, TokenKind::T_COLON); + assert_eq!(tokens[3].kind, TokenKind::KW_INT); + assert_eq!(tokens[4].kind, TokenKind::T_EQUAL); + assert_eq!(tokens[5].kind, TokenKind::T_CHAR); + assert_eq!(tokens[6].kind, TokenKind::T_SEMICOLON); + assert_eq!(tokens[7].kind, TokenKind::T_EOF); } #[test] fn test_should_report_invalid_numeric_value_error3() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize(".9999"); + let tokens: Vec = tok.tokenize(Rc::new(".9999".to_string())); assert_eq!(tokens[0].kind, TokenKind::T_DOT); assert_eq!(tokens[1].kind, TokenKind::T_INT_NUM); } @@ -443,57 +441,68 @@ mod tests { #[test] fn test_int_var_decl_len_correct() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize("let a = 43343;"); + let tokens: Vec = tok.tokenize(Rc::new("let a = 43343;".to_string())); assert!(tokens.len() == 6); - assert_eq!(tokens[3].lexeme.len(), 6); + assert_eq!(tokens[3].lexeme.len(), 5); } #[test] fn test_float_var_decl_len_correct() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize("let a = 34.343"); - assert!(tokens.len() == 6); - assert_eq!(tokens[3].lexeme, "4334.34"); - assert_eq!(tokens[3].lexeme.len(), 7); + let tokens: Vec = tok.tokenize(Rc::new("let a = 34.343".to_string())); + assert!(tokens.len() == 5); + assert_eq!(tokens[3].lexeme, "34.343"); + assert_eq!(tokens[3].lexeme.len(), 6); } #[test] #[should_panic] fn test_float_var_decl_len_correct2() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize("let a = 3443.44ff"); + let tokens: Vec = tok.tokenize(Rc::new("let a = 3443.44ff".to_string())); assert!(tokens.len() == 6); } #[test] fn test_char_ptr_var_decl_tokenization() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize("let name = \"ram\";"); - assert!(tokens.len() == 7); - assert_eq!(tokens[0].kind, TokenKind::KW_CHAR); - assert_eq!(tokens[1].kind, TokenKind::T_STAR); - assert_eq!(tokens[2].kind, TokenKind::T_IDENTIFIER); - assert_eq!(tokens[3].kind, TokenKind::T_EQUAL); - assert_eq!(tokens[4].kind, TokenKind::T_STRING); - assert_eq!(tokens[5].kind, TokenKind::T_SEMICOLON); - assert_eq!(tokens[6].kind, TokenKind::T_EOF); - assert_eq!(tokens[2].lexeme, "name"); // give identifier - assert_eq!(tokens[4].lexeme, "ram"); // give string + let tokens: Vec = tok.tokenize(Rc::new("let name = \"ram\";".to_string())); + assert!(tokens.len() == 6); + assert_eq!(tokens[0].kind, TokenKind::KW_LET); + assert_eq!(tokens[1].kind, TokenKind::T_IDENTIFIER); + assert_eq!(tokens[2].kind, TokenKind::T_EQUAL); + assert_eq!(tokens[3].kind, TokenKind::T_STRING); + assert_eq!(tokens[4].kind, TokenKind::T_SEMICOLON); + assert_eq!(tokens[5].kind, TokenKind::T_EOF); + assert_eq!(tokens[1].lexeme, "name"); // give identifier + assert_eq!(tokens[3].lexeme, "ram"); // give string } #[test] fn test_func_decl_tokenization() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize("def main() { return 0; }"); - assert!(tokens.len() == 11); + let tokens: Vec = tok.tokenize(Rc::new("def main() -> void { return 0; }".to_string())); + assert!(tokens.len() == 12); assert_eq!(tokens[1].kind, TokenKind::T_IDENTIFIER); assert_eq!(tokens[1].lexeme, "main"); + assert_eq!(tokens[7].lexeme, "return"); + } + + #[test] + fn test_empty_func_decl_tokenization() { + let mut tok: Tokenizer = Tokenizer::new(); + let tokens: Vec = tok.tokenize(Rc::new("def main() -> void { }".to_string())); + assert!(tokens.len() == 9); + assert_eq!(tokens[1].kind, TokenKind::T_IDENTIFIER); + assert_eq!(tokens[1].lexeme, "main"); + assert_eq!(tokens[6].lexeme, "{"); + assert_eq!(tokens[7].lexeme, "}"); } #[test] fn test_empty_source() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize(""); + let tokens: Vec = tok.tokenize(Rc::new("".to_string())); assert_eq!(tokens.len(), 1); // only T_EOF is present assert_eq!(tokens[0].kind, TokenKind::T_EOF); // only T_EOF is present } @@ -501,18 +510,17 @@ mod tests { #[test] fn test_only_whitespace_source() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize(" "); + let tokens: Vec = tok.tokenize(Rc::new(" ".to_string())); assert_eq!(tokens.len(), 1); // only T_EOF is present assert_eq!(tokens[0].kind, TokenKind::T_EOF); // only EOF is present } #[test] - fn test_while_if_else_statement() { + fn test_if_else_statement() { let mut tok: Tokenizer = Tokenizer::new(); - let tokens: Vec = tok.tokenize("if (4 > 5) { } else { }"); + let tokens: Vec = tok.tokenize(Rc::new("if (4 > 5) { } else { }".to_string())); assert_eq!(tokens.len(), 12); // including T_EOF assert_eq!(tokens[0].kind, TokenKind::KW_IF); assert_eq!(tokens[8].kind, TokenKind::KW_ELSE); } -} - */ \ No newline at end of file +} \ No newline at end of file