This commit is contained in:
2025-05-29 18:28:25 +02:00
parent 0fbc964d37
commit 47412dda11
3 changed files with 198 additions and 9 deletions

View File

@@ -1,16 +1,29 @@
mod parser;
mod tokenizer;
use std::{env, error::Error, fs};
use std::{env, error::Error, fs, process};
fn compile_file(path: String) -> Result<(), Box<dyn Error>> {
let source = fs::read_to_string(path.clone())?;
// TODO: basename
let tokenizer = tokenizer::Tokenizer::new(path, source);
let tokens = tokenizer.tokenize()?;
let parser = parser::Parser::new(tokens);
println!("{:#?}", parser.parse());
Ok(())
}
fn main() -> Result<(), Box<dyn Error>> {
let mut args = env::args();
let path = args.nth(1).unwrap();
let source = fs::read_to_string(path.clone())?;
// TODO: basename
let tokenizer = tokenizer::Tokenizer::new(path, source);
println!("{:#?}", tokenizer.tokenize()?);
if let Err(err) = compile_file(path) {
eprintln!("{}", err);
process::exit(1);
}
Ok(())
}

176
src/parser.rs Normal file
View File

@@ -0,0 +1,176 @@
use crate::tokenizer::{MotError, Token, TokenType, error};
#[derive(Debug, Clone)]
pub enum Expr {
Binary {
left: Box<Expr>,
op: Token,
right: Box<Expr>,
},
Grouping(Box<Expr>),
Literal(Token),
Unary {
op: Token,
right: Box<Expr>,
},
}
pub struct Parser {
tokens: Vec<Token>,
current: usize,
}
impl Parser {
pub fn new(tokens: Vec<Token>) -> Parser {
Parser { tokens, current: 0 }
}
pub fn parse(mut self) -> Result<Expr, MotError> {
self.expression()
}
// TODO: synchronization after parse error
fn expression(&mut self) -> Result<Expr, MotError> {
self.equality()
}
fn equality(&mut self) -> Result<Expr, MotError> {
let mut expr = self.comparison()?;
while self.match_token(&[TokenType::DoubleEqual, TokenType::NotEqual]) {
let op = self.previous().clone();
let right = self.comparison()?;
expr = Expr::Binary {
left: Box::new(expr),
op,
right: Box::new(right),
}
}
Ok(expr)
}
fn comparison(&mut self) -> Result<Expr, MotError> {
let mut expr = self.term()?;
while self.match_token(&[
TokenType::Greater,
TokenType::GreaterEqual,
TokenType::LessEqual,
TokenType::Less,
]) {
let op = self.previous().clone();
let right = self.term()?;
expr = Expr::Binary {
left: Box::new(expr),
op,
right: Box::new(right),
}
}
Ok(expr)
}
fn term(&mut self) -> Result<Expr, MotError> {
let mut expr = self.factor()?;
while self.match_token(&[TokenType::Plus, TokenType::Minus, TokenType::Xor]) {
let op = self.previous().clone();
let right = self.factor()?;
expr = Expr::Binary {
left: Box::new(expr),
op,
right: Box::new(right),
}
}
Ok(expr)
}
fn factor(&mut self) -> Result<Expr, MotError> {
let mut expr = self.unary()?;
while self.match_token(&[TokenType::Star, TokenType::Slash, TokenType::Mod]) {
let op = self.previous().clone();
let right = self.unary()?;
expr = Expr::Binary {
left: Box::new(expr),
op,
right: Box::new(right),
}
}
Ok(expr)
}
fn unary(&mut self) -> Result<Expr, MotError> {
if self.match_token(&[TokenType::Bang, TokenType::Minus]) {
let op = self.previous().clone();
let right = self.unary()?;
return Ok(Expr::Unary {
op,
right: Box::new(right),
});
}
self.primary()
}
fn primary(&mut self) -> Result<Expr, MotError> {
if self.match_token(&[TokenType::Number, TokenType::String]) {
Ok(Expr::Literal(self.previous().clone()))
} else if self.match_token(&[TokenType::LeftParen]) {
let expr = self.expression()?;
self.consume(TokenType::RightParen, "expected ')' after expression")?;
Ok(Expr::Grouping(Box::new(expr)))
} else {
error!(self.peek().loc, "expected expression")
}
}
fn consume(&mut self, token_type: TokenType, message: &str) -> Result<Token, MotError> {
if self.check(&token_type) {
Ok(self.advance().clone())
} else {
error!(self.previous().loc, format!("{}", message))
}
}
fn match_token(&mut self, token_types: &[TokenType]) -> bool {
for x in token_types {
if self.check(x) {
self.advance();
return true;
}
}
false
}
fn check(&self, token_type: &TokenType) -> bool {
if self.eof() {
false
} else {
self.peek().token_type == *token_type
}
}
fn advance(&mut self) -> &Token {
if !self.eof() {
self.current += 1;
}
self.previous()
}
fn peek(&self) -> &Token {
&self.tokens[self.current]
}
fn previous(&self) -> &Token {
&self.tokens[self.current - 1]
}
fn eof(&self) -> bool {
self.peek().token_type == TokenType::Eof
}
}

View File

@@ -183,7 +183,7 @@ impl Tokenizer {
while !self.eof() && self.peek() != '"' {
if self.peek() == '\n' {
self.loc.line += 1;
self.loc.column = 0;
self.loc.column = 1;
}
self.advance();
}
@@ -198,10 +198,10 @@ impl Tokenizer {
' ' | '\t' | '\r' => {}
'\n' => {
self.loc.line += 1;
self.loc.column = 0;
self.loc.column = 1;
}
'0'..='9' => self.scan_number(),
'A'..='z' => self.scan_identifier(),
'A'..='Z' | 'a'..='z' | '_' => self.scan_identifier(),
_ => return error!(self.loc, "unexpected character"),
}
Ok(())