basic global constants
This commit is contained in:
@@ -7,12 +7,14 @@ use crate::{
|
||||
|
||||
pub struct Analyzer {
|
||||
pub functions: HashMap<String, i32>,
|
||||
pub constants: HashMap<String, u64>,
|
||||
}
|
||||
|
||||
impl Analyzer {
|
||||
pub fn new() -> Analyzer {
|
||||
Analyzer {
|
||||
functions: HashMap::new(),
|
||||
constants: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,6 +46,18 @@ impl Analyzer {
|
||||
} => {
|
||||
self.analyze_expr(initializer)?;
|
||||
}
|
||||
Stmt::Const { name, value } => {
|
||||
if self.constants.contains_key(&name.lexeme)
|
||||
|| self.functions.contains_key(&name.lexeme)
|
||||
{
|
||||
return error!(
|
||||
name.loc,
|
||||
format!("tried to redefine constant '{}'", name.lexeme)
|
||||
);
|
||||
}
|
||||
self.constants
|
||||
.insert(name.lexeme.clone(), value.lexeme.parse().unwrap());
|
||||
}
|
||||
Stmt::Block(statements) => {
|
||||
for stmt in statements {
|
||||
self.analyze_stmt(stmt)?;
|
||||
|
||||
@@ -176,6 +176,9 @@ _builtin_environ:
|
||||
let offset = env.define_var(name.lexeme.clone(), var_type);
|
||||
emit!(&mut self.output, " mov QWORD [rbp-{}], rax", offset);
|
||||
}
|
||||
Stmt::Const { name: _, value: _ } => {
|
||||
// handled in the analyzer
|
||||
}
|
||||
Stmt::Block(statements) => {
|
||||
env.push_scope();
|
||||
for stmt in statements {
|
||||
@@ -445,18 +448,29 @@ _builtin_environ:
|
||||
}
|
||||
}
|
||||
Expr::Variable(name) => {
|
||||
// TODO: move to analyzer
|
||||
let var = match env.get_var(&name.lexeme) {
|
||||
Some(x) => x,
|
||||
None => {
|
||||
return error!(name.loc, format!("undefined variable: {}", &name.lexeme));
|
||||
}
|
||||
};
|
||||
emit!(
|
||||
&mut self.output,
|
||||
" mov rax, QWORD [rbp-{}]",
|
||||
var.stack_offset,
|
||||
);
|
||||
if self.analyzer.constants.contains_key(&name.lexeme) {
|
||||
emit!(
|
||||
&mut self.output,
|
||||
" mov rax, {}",
|
||||
self.analyzer.constants[&name.lexeme]
|
||||
);
|
||||
} else {
|
||||
// TODO: move to analyzer
|
||||
let var = match env.get_var(&name.lexeme) {
|
||||
Some(x) => x,
|
||||
None => {
|
||||
return error!(
|
||||
name.loc,
|
||||
format!("undefined variable: {}", &name.lexeme)
|
||||
);
|
||||
}
|
||||
};
|
||||
emit!(
|
||||
&mut self.output,
|
||||
" mov rax, QWORD [rbp-{}]",
|
||||
var.stack_offset,
|
||||
);
|
||||
}
|
||||
}
|
||||
Expr::Assign { name, value } => {
|
||||
self.compile_expr(env, *value)?;
|
||||
|
||||
@@ -33,6 +33,7 @@ fn compile_file_to(
|
||||
}
|
||||
|
||||
for stmt in statements {
|
||||
// top level statements are all function/const/extern declarations so a new env for each
|
||||
codegen.compile_stmt(&mut codegen_x86_64::Env::new(), stmt)?;
|
||||
}
|
||||
Ok(())
|
||||
|
||||
@@ -14,6 +14,10 @@ pub enum Stmt {
|
||||
var_type: Option<Token>,
|
||||
initializer: Expr,
|
||||
},
|
||||
Const {
|
||||
name: Token,
|
||||
value: Token,
|
||||
},
|
||||
Block(Vec<Stmt>),
|
||||
If {
|
||||
condition: Expr,
|
||||
@@ -115,6 +119,9 @@ impl Parser {
|
||||
if self.match_token(&[TokenType::KeywordExtern]) {
|
||||
return self.extern_declaration();
|
||||
}
|
||||
if self.match_token(&[TokenType::KeywordConst]) {
|
||||
return self.const_declaration();
|
||||
}
|
||||
return error!(
|
||||
self.peek().loc,
|
||||
"statements not allowed outside function body"
|
||||
@@ -192,6 +199,13 @@ impl Parser {
|
||||
})
|
||||
}
|
||||
|
||||
fn const_declaration(&mut self) -> Result<Stmt, ZernError> {
|
||||
let name = self.consume(TokenType::Identifier, "expected const name")?;
|
||||
self.consume(TokenType::Equal, "expected '=' after const name")?;
|
||||
let value = self.consume(TokenType::Number, "expected a number after '='")?;
|
||||
Ok(Stmt::Const { name, value })
|
||||
}
|
||||
|
||||
fn extern_declaration(&mut self) -> Result<Stmt, ZernError> {
|
||||
Ok(Stmt::Extern(
|
||||
self.consume(TokenType::Identifier, "expected extern name")?,
|
||||
|
||||
@@ -39,6 +39,7 @@ pub enum TokenType {
|
||||
False,
|
||||
|
||||
KeywordLet,
|
||||
KeywordConst,
|
||||
KeywordIf,
|
||||
KeywordElse,
|
||||
KeywordWhile,
|
||||
@@ -336,6 +337,7 @@ impl Tokenizer {
|
||||
let lexeme: String = self.source[self.start..self.current].iter().collect();
|
||||
self.add_token(match lexeme.as_str() {
|
||||
"let" => TokenType::KeywordLet,
|
||||
"const" => TokenType::KeywordConst,
|
||||
"if" => TokenType::KeywordIf,
|
||||
"else" => TokenType::KeywordElse,
|
||||
"while" => TokenType::KeywordWhile,
|
||||
|
||||
Reference in New Issue
Block a user