diff --git a/examples/quicksort.zr b/examples/quicksort.zr index c556a7f..aa7d9e5 100644 --- a/examples/quicksort.zr +++ b/examples/quicksort.zr @@ -22,7 +22,9 @@ func partition[arr: Array, low: I64, high: I64] : I64 return i + 1 func main[] : I64 - let arr: Array = [340, 252, 352, 117, 650, 652, 322, 175, 714, 268, 725, 664] + let arr: Array = [] + for i in 0..10 + Array.push(arr, Math.abs(Math.urandom() % 1000)) for i in 0..Array.size(arr) print_i64(arr[i]) diff --git a/src/codegen_x86_64.rs b/src/codegen_x86_64.rs index 6849cd0..4369d88 100644 --- a/src/codegen_x86_64.rs +++ b/src/codegen_x86_64.rs @@ -84,7 +84,6 @@ impl CodegenX86_64 { pub fn get_output(&self) -> String { format!( "section .data - S0 db \"assertion failed on line %d\",10,0 {}{}", self.data_section, self.output ) @@ -150,34 +149,11 @@ Bit.rshift: sar rax, cl ret -section .text.String.nth -String.nth: - movzx rax, byte [rdi + rsi] - ret - section .text.String.set String.set: mov [rdi + rsi], dl ret -section .text.OS.time -OS.time: - push rbx - sub rsp, 16 - mov rbx, rsp - mov rdi, rbx - xor esi, esi - call gettimeofday - imul rcx, qword [rbx], 1000 - mov rax, qword [rbx+8] - mov esi, 1000 - cqo - idiv rsi - add rax, rcx - add rsp, 16 - pop rbx - ret - section .text.OS.listdir OS.listdir: push r14 @@ -373,18 +349,6 @@ Array.free: emit!(&mut self.output, " pop rbp"); emit!(&mut self.output, " ret"); } - Stmt::Assert { keyword, value } => { - self.compile_expr(env, value)?; - let skip_label = self.label(); - emit!(&mut self.output, " test rax, rax"); - emit!(&mut self.output, " jne {}", skip_label); - emit!(&mut self.output, " mov rdi, S0"); - emit!(&mut self.output, " mov rsi, {}", keyword.loc.line); - emit!(&mut self.output, " call printf"); - emit!(&mut self.output, " mov rdi, 1"); - emit!(&mut self.output, " call exit"); - emit!(&mut self.output, "{}:", skip_label); - } Stmt::For { var, start, diff --git a/src/main.rs b/src/main.rs index 65e3359..6495cb1 100644 --- a/src/main.rs +++ b/src/main.rs @@ -29,6 +29,17 @@ fn compile_file_to( Ok(()) } +fn run_command(cmd: String) { + if !Command::new("sh") + .args(["-c", &cmd]) + .status() + .unwrap() + .success() + { + process::exit(1); + } +} + fn compile_file(args: Args) -> Result<(), ZernError> { let source = match fs::read_to_string(&args.path) { Ok(x) => x, @@ -46,41 +57,17 @@ fn compile_file(args: Args) -> Result<(), ZernError> { compile_file_to(&mut codegen, filename, source)?; if !args.output_asm { - if fs::write(format!("{}.s", args.out), codegen.get_output()).is_err() { - eprintln!("\x1b[91mERROR\x1b[0m: failed to write to {}.s", args.out); - process::exit(1); - } + fs::write(format!("{}.s", args.out), codegen.get_output()).unwrap(); - if !Command::new("sh") - .args([ - "-c", - &format!("nasm -f elf64 -o {}.o {}.s", args.out, args.out), - ]) - .status() - .unwrap() - .success() - { - process::exit(1); - } + run_command(format!("nasm -f elf64 -o {}.o {}.s", args.out, args.out)); // TODO: drop libc entirely - if !Command::new("sh") - .args([ - "-c", - &format!( - "./musl-1.2.4/root/bin/musl-gcc -static -o {} {}.o -flto -Wl,--gc-sections {}", - args.out, args.out, args.cflags - ), - ]) - .status() - .unwrap() - .success() - { - process::exit(1); - } - } else if fs::write(&args.out, codegen.get_output()).is_err() { - eprintln!("\x1b[91mERROR\x1b[0m: failed to write to {}", args.out); - process::exit(1); + run_command(format!( + "./musl-1.2.4/root/bin/musl-gcc -static -o {} {}.o -flto -Wl,--gc-sections {}", + args.out, args.out, args.cflags + )); + } else { + fs::write(&args.out, codegen.get_output()).unwrap(); } Ok(()) diff --git a/src/parser.rs b/src/parser.rs index 365435b..b400878 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -37,10 +37,6 @@ pub enum Stmt { body: Box, }, Return(Expr), - Assert { - keyword: Token, - value: Expr, - }, } #[derive(Debug, Clone)] @@ -197,11 +193,6 @@ impl Parser { self.for_statement() } else if self.match_token(&[TokenType::KeywordReturn]) { Ok(Stmt::Return(self.expression()?)) - } else if self.match_token(&[TokenType::KeywordAssert]) { - Ok(Stmt::Assert { - keyword: self.previous().clone(), - value: self.expression()?, - }) } else { Ok(Stmt::Expression(self.expression()?)) } @@ -490,7 +481,8 @@ impl Parser { fn consume(&mut self, token_type: TokenType, message: &str) -> Result { if self.check(&token_type) { - Ok(self.advance().clone()) + self.current += 1; + Ok(self.previous().clone()) } else { error!(self.previous().loc, format!("{}", message)) } @@ -499,7 +491,7 @@ impl Parser { fn match_token(&mut self, token_types: &[TokenType]) -> bool { for x in token_types { if self.check(x) { - self.advance(); + self.current += 1; return true; } } @@ -514,13 +506,6 @@ impl Parser { } } - fn advance(&mut self) -> &Token { - if !self.eof() { - self.current += 1; - } - self.previous() - } - fn peek(&self) -> &Token { &self.tokens[self.current] } diff --git a/src/std.zr b/src/std.zr index 4e4c3c0..3f877c6 100644 --- a/src/std.zr +++ b/src/std.zr @@ -8,6 +8,9 @@ func print[x: String] : I64 func print_i64[x: I64] : I64 printf("%ld\n", x) +func String.nth[s: String, n: I64] : U8 + return deref(s + n) + func String.is_whitespace[c: U8] : Bool return c == ' ' || c == 10 || c == 13 || c == 9 @@ -167,6 +170,14 @@ func Math.urandom[]: I64 func Array.new[] : Array return calloc(1, 24) +func OS.time[] : I64 + let tv: Ptr = malloc(16) + gettimeofday(tv, 0) + let seconds: I64 = deref(tv) + let microseconds: I64 = deref(tv+8) + free(tv) + return seconds * 1000 + microseconds / 1000 + func Crypto.hex_encode[s: String] : String let hex_chars: String = "0123456789abcdef" let s_len: I64 = strlen(s) @@ -293,7 +304,7 @@ func Crypto.base64_decode[s: String] : String s4 = String.find(chars, String.nth(s, i+3)) i = i + 4 - let triple: U8 = Bit.lshift(s1, 18) || Bit.lshift(s2, 12) || Bit.lshift(s3, 6) || s4 + let triple: I64 = Bit.lshift(s1, 18) || Bit.lshift(s2, 12) || Bit.lshift(s3, 6) || s4 String.set(out, j, Bit.rshift(triple, 16) && 255) j = j + 1 diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 87792f6..b3dd527 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -43,7 +43,6 @@ pub enum TokenType { KeywordIn, KeywordFunc, KeywordReturn, - KeywordAssert, Indent, Dedent, @@ -336,7 +335,6 @@ impl Tokenizer { "in" => TokenType::KeywordIn, "func" => TokenType::KeywordFunc, "return" => TokenType::KeywordReturn, - "assert" => TokenType::KeywordAssert, "true" => TokenType::True, "false" => TokenType::False, _ => TokenType::Identifier,