Browse Source

chore: fix warnings

clippy-fix
Garrit Franke 3 years ago
parent
commit
ebd34541b1
  1. 4
      src/command/build.rs
  2. 2
      src/command/run.rs
  3. 24
      src/generator/c.rs
  4. 8
      src/generator/js.rs
  5. 16
      src/generator/llvm.rs
  6. 4
      src/generator/mod.rs
  7. 38
      src/generator/x86.rs
  8. 3
      src/lexer/cursor.rs
  9. 8
      src/lexer/mod.rs
  10. 408
      src/lexer/tests.rs
  11. 22
      src/parser/infer.rs
  12. 4
      src/parser/mod.rs
  13. 5
      src/parser/node_type.rs
  14. 14
      src/parser/parser.rs
  15. 42
      src/parser/rules.rs

4
src/command/build.rs

@ -20,9 +20,9 @@ use crate::Lib;
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
use std::io::Write; use std::io::Write;
use std::path::PathBuf; use std::path::Path;
pub fn build(in_file: &PathBuf, out_file: &PathBuf) -> Result<(), String> { pub fn build(in_file: &Path, out_file: &Path) -> Result<(), String> {
let mut file = File::open(in_file).expect("Could not open file"); let mut file = File::open(in_file).expect("Could not open file");
let mut contents = String::new(); let mut contents = String::new();

2
src/command/run.rs

@ -14,7 +14,7 @@ pub fn run(in_file: PathBuf) -> Result<(), String> {
build::build(&in_file, &intermediate_out_file_path)?; build::build(&in_file, &intermediate_out_file_path)?;
let out_file = out_dir.join("out"); let out_file = out_dir.join("out");
if cfg!(feature = "backend_c") { if cfg!(feature = "backend_c") {
let comp = Command::new("/usr/bin/cc") Command::new("/usr/bin/cc")
.arg(&intermediate_out_file_path) .arg(&intermediate_out_file_path)
.arg("-o") .arg("-o")
.arg(&out_file) .arg(&out_file)

24
src/generator/c.rs

@ -32,15 +32,11 @@ impl Generator for CGenerator {
code += &format!("{};\n", &generate_function_signature(func.clone())); code += &format!("{};\n", &generate_function_signature(func.clone()));
} }
let funcs: String = prog let funcs: String = prog.func.into_iter().map(generate_function).collect();
.func
.into_iter()
.map(|f| generate_function(f))
.collect();
code += &funcs; code += &funcs;
return code; code
} }
} }
@ -89,7 +85,7 @@ fn generate_function_signature(func: Function) -> String {
format!("{T} {N}({A})", T = t, N = func.name, A = arguments) format!("{T} {N}({A})", T = t, N = func.name, A = arguments)
} }
fn generate_block(block: Vec<Statement>, scope: Vec<Variable>) -> String { fn generate_block(block: Vec<Statement>, _scope: Vec<Variable>) -> String {
let mut generated = String::from("{\n"); let mut generated = String::from("{\n");
for statement in block { for statement in block {
@ -112,7 +108,7 @@ fn generate_statement(statement: Statement) -> String {
Statement::Assign(name, state) => generate_assign(*name, *state), Statement::Assign(name, state) => generate_assign(*name, *state),
Statement::Block(statements, scope) => generate_block(statements, scope), Statement::Block(statements, scope) => generate_block(statements, scope),
Statement::While(expr, body) => generate_while_loop(expr, *body), Statement::While(expr, body) => generate_while_loop(expr, *body),
Statement::For(ident, expr, body) => todo!(), Statement::For(_ident, _expr, _body) => todo!(),
Statement::Continue => todo!(), Statement::Continue => todo!(),
Statement::Break => todo!(), Statement::Break => todo!(),
}; };
@ -121,7 +117,7 @@ fn generate_statement(statement: Statement) -> String {
} }
fn generate_expression(expr: Expression) -> String { fn generate_expression(expr: Expression) -> String {
let st = match expr { match expr {
Expression::Int(val) => val.to_string(), Expression::Int(val) => val.to_string(),
Expression::Variable(val) | Expression::Str(val) => val, Expression::Variable(val) | Expression::Str(val) => val,
Expression::Bool(b) => b.to_string(), Expression::Bool(b) => b.to_string(),
@ -129,9 +125,7 @@ fn generate_expression(expr: Expression) -> String {
Expression::Array(els) => generate_array(els), Expression::Array(els) => generate_array(els),
Expression::ArrayAccess(name, expr) => generate_array_access(name, *expr), Expression::ArrayAccess(name, expr) => generate_array_access(name, *expr),
Expression::BinOp(left, op, right) => generate_bin_op(*left, op, *right), Expression::BinOp(left, op, right) => generate_bin_op(*left, op, *right),
}; }
format!("{}", st)
} }
fn generate_while_loop(expr: Expression, body: Statement) -> String { fn generate_while_loop(expr: Expression, body: Statement) -> String {
@ -201,7 +195,7 @@ fn generate_declare(var: Variable, val: Option<Expression>) -> String {
Some(expr) => format!( Some(expr) => format!(
"{} {} = {};", "{} {} = {};",
generate_type(Either::Left(var.to_owned())), generate_type(Either::Left(var.to_owned())),
var.to_owned().name, var.name,
generate_expression(expr) generate_expression(expr)
), ),
None => format!( None => format!(
@ -246,6 +240,10 @@ fn generate_bin_op(left: Expression, op: BinOp, right: Expression) -> String {
BinOp::GreaterThanOrEqual => ">=", BinOp::GreaterThanOrEqual => ">=",
BinOp::LessThan => "<", BinOp::LessThan => "<",
BinOp::LessThanOrEqual => "<=", BinOp::LessThanOrEqual => "<=",
BinOp::AddAssign => "+=",
BinOp::SubtractAssign => "-=",
BinOp::MultiplyAssign => "*=",
BinOp::DivideAssign => "/=",
BinOp::Modulus => "%", BinOp::Modulus => "%",
BinOp::Multiplication => "*", BinOp::Multiplication => "*",
BinOp::NotEqual => "!=", BinOp::NotEqual => "!=",

8
src/generator/js.rs

@ -26,17 +26,13 @@ impl Generator for JsGenerator {
crate::Builtins::get("builtin.js").expect("Could not locate builtin functions"); crate::Builtins::get("builtin.js").expect("Could not locate builtin functions");
code += std::str::from_utf8(raw_builtins.as_ref()) code += std::str::from_utf8(raw_builtins.as_ref())
.expect("Unable to interpret builtin functions"); .expect("Unable to interpret builtin functions");
let funcs: String = prog let funcs: String = prog.func.into_iter().map(generate_function).collect();
.func
.into_iter()
.map(|f| generate_function(f))
.collect();
code += &funcs; code += &funcs;
code += "main();"; code += "main();";
return code; code
} }
} }

16
src/generator/llvm.rs

@ -1,6 +1,5 @@
use crate::generator::Generator; use crate::generator::Generator;
use crate::parser::node_type::*; use crate::parser::node_type::*;
use inkwell::builder::Builder;
use inkwell::context::Context; use inkwell::context::Context;
use inkwell::module::Module; use inkwell::module::Module;
use inkwell::types::*; use inkwell::types::*;
@ -14,10 +13,7 @@ impl<'ctx> Generator for LLVMGenerator<'ctx> {
fn generate(prog: Program) -> String { fn generate(prog: Program) -> String {
let ctx = Context::create(); let ctx = Context::create();
let module = ctx.create_module("main"); let module = ctx.create_module("main");
let mut generator = LLVMGenerator { let mut generator = LLVMGenerator { ctx: &ctx, module };
ctx: &ctx,
module: module,
};
for func in prog.func { for func in prog.func {
generator.generate_function(func); generator.generate_function(func);
} }
@ -38,7 +34,7 @@ impl<'ctx> LLVMGenerator<'ctx> {
None => panic!("Function argument has no type"), None => panic!("Function argument has no type"),
}) })
.collect(); .collect();
return arg_types; arg_types
} }
fn generate_function(&mut self, func: Function) { fn generate_function(&mut self, func: Function) {
@ -57,7 +53,7 @@ impl<'ctx> LLVMGenerator<'ctx> {
fn generate_statement(&mut self, statement: Statement) { fn generate_statement(&mut self, statement: Statement) {
match statement { match statement {
Statement::Block(statements, scope) => { Statement::Block(statements, _) => {
for s in statements { for s in statements {
self.generate_statement(s); self.generate_statement(s);
} }
@ -67,9 +63,7 @@ impl<'ctx> LLVMGenerator<'ctx> {
}; };
} }
fn generate_expression(&mut self, expr: Expression) { fn generate_expression(&mut self, _expr: Expression) {
match expr { todo!()
_ => todo!(),
}
} }
} }

4
src/generator/mod.rs

@ -29,10 +29,12 @@ pub trait Generator {
fn generate(prog: Program) -> String; fn generate(prog: Program) -> String;
} }
// Since we're using multiple features,
// "unreachable" statements are okay
#[allow(unreachable_code)]
pub fn generate(prog: Program) -> String { pub fn generate(prog: Program) -> String {
#[cfg(feature = "backend_llvm")] #[cfg(feature = "backend_llvm")]
return llvm::LLVMGenerator::generate(prog); return llvm::LLVMGenerator::generate(prog);
#[cfg(feature = "backend_c")] #[cfg(feature = "backend_c")]
return c::CGenerator::generate(prog); return c::CGenerator::generate(prog);

38
src/generator/x86.rs

@ -20,6 +20,8 @@ struct Assembly {
asm: Vec<String>, asm: Vec<String>,
} }
// We don't need "From", so we can ignore the lint here
#[allow(clippy::from_over_into)]
impl Into<String> for Assembly { impl Into<String> for Assembly {
fn into(self) -> String { fn into(self) -> String {
self.build() self.build()
@ -55,20 +57,18 @@ impl X86Generator {
fn gen_program(&mut self, prog: Program) -> Assembly { fn gen_program(&mut self, prog: Program) -> Assembly {
let mut asm = Assembly::new(); let mut asm = Assembly::new();
match prog { let Program { func, globals } = prog;
Program { func, globals } => {
asm.add(".intel_syntax noprefix"); asm.add(".intel_syntax noprefix");
asm.add(".text"); asm.add(".text");
for f in func { for f in func {
asm.add(self.gen_function(f)); asm.add(self.gen_function(f));
} }
asm.add(".data"); asm.add(".data");
for g in globals { for g in globals {
asm.add(format!("_{0}: .word 0", g)); asm.add(format!("_{0}: .word 0", g));
} }
}
};
asm asm
} }
@ -77,13 +77,9 @@ impl X86Generator {
let mut asm = Assembly::new(); let mut asm = Assembly::new();
let has_return: bool = match &func.body { let has_return: bool = match &func.body {
Statement::Block(statements, _) => statements.iter().any(|s| { Statement::Block(statements, _) => statements
if let Statement::Return(_) = *s { .iter()
true .any(|s| matches!(*s, Statement::Return(_))),
} else {
false
}
}),
_ => panic!("Function body should be of type Block"), _ => panic!("Function body should be of type Block"),
}; };

3
src/lexer/cursor.rs

@ -84,8 +84,7 @@ impl<'a> Cursor<'a> {
} }
pub(crate) fn pos(&self) -> Position { pub(crate) fn pos(&self) -> Position {
let p = self.pos.clone(); *self.pos
p
} }
/// Moves to the next character. /// Moves to the next character.

8
src/lexer/mod.rs

@ -181,12 +181,12 @@ pub fn is_whitespace(c: char) -> bool {
/// True if `c` is valid as a first character of an identifier. /// True if `c` is valid as a first character of an identifier.
pub fn is_id_start(c: char) -> bool { pub fn is_id_start(c: char) -> bool {
('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '_' ('a'..='z').contains(&c) || ('A'..='Z').contains(&c) || c == '_'
} }
/// True if `c` is valid as a non-first character of an identifier. /// True if `c` is valid as a non-first character of an identifier.
pub fn is_id_continue(c: char) -> bool { pub fn is_id_continue(c: char) -> bool {
('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9') || c == '_' ('a'..='z').contains(&c) || ('A'..='Z').contains(&c) || ('0'..='9').contains(&c) || c == '_'
} }
impl Cursor<'_> { impl Cursor<'_> {
@ -305,9 +305,7 @@ impl Cursor<'_> {
// Cut the original tokens to the length of the token // Cut the original tokens to the length of the token
raw.truncate(len); raw.truncate(len);
let position = self.pos(); let position = self.pos();
let token = Token::new(token_kind, len, raw, position); Token::new(token_kind, len, raw, position)
token
} }
/// Eats symbols while predicate returns true or until the end of file is reached. /// Eats symbols while predicate returns true or until the end of file is reached.

408
src/lexer/tests.rs

@ -13,226 +13,222 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
use crate::lexer::*;
#[cfg(test)]
mod tests { #[test]
use crate::lexer::*; fn test_basic_tokenizing() {
let raw = tokenize("1 = 2");
#[test] let mut tokens = raw.into_iter();
fn test_basic_tokenizing() {
let raw = tokenize("1 = 2"); assert_eq!(
let mut tokens = raw.into_iter(); tokens.next().unwrap(),
Token {
assert_eq!( len: 1,
tokens.nth(0).unwrap(), kind: TokenKind::Literal(Value::Int),
Token { raw: "1".to_owned(),
len: 1, pos: Position {
kind: TokenKind::Literal(Value::Int), raw: 0,
raw: "1".to_owned(), line: 1,
pos: Position { offset: 0
raw: 0,
line: 1,
offset: 0
}
} }
); }
);
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 1, Token {
kind: TokenKind::Whitespace, len: 1,
raw: " ".to_owned(), kind: TokenKind::Whitespace,
pos: Position { raw: " ".to_owned(),
raw: 1, pos: Position {
line: 1, raw: 1,
offset: 1 line: 1,
} offset: 1
} }
); }
);
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 1, Token {
kind: TokenKind::Assign, len: 1,
raw: "=".to_owned(), kind: TokenKind::Assign,
pos: Position { raw: "=".to_owned(),
raw: 2, pos: Position {
line: 1, raw: 2,
offset: 2 line: 1,
} offset: 2
} }
); }
);
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 1, Token {
kind: TokenKind::Whitespace, len: 1,
raw: " ".to_owned(), kind: TokenKind::Whitespace,
pos: Position { raw: " ".to_owned(),
raw: 3, pos: Position {
line: 1, raw: 3,
offset: 3 line: 1,
} offset: 3
} }
); }
);
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 1, Token {
kind: TokenKind::Literal(Value::Int), len: 1,
raw: "2".to_owned(), kind: TokenKind::Literal(Value::Int),
pos: Position { raw: "2".to_owned(),
raw: 4, pos: Position {
line: 1, raw: 4,
offset: 4 line: 1,
} offset: 4
} }
); }
} );
}
#[test]
fn test_tokenizing_without_whitespace() { #[test]
let mut tokens = tokenize("1=2").into_iter(); fn test_tokenizing_without_whitespace() {
let mut tokens = tokenize("1=2").into_iter();
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 1, Token {
kind: TokenKind::Literal(Value::Int), len: 1,
raw: "1".to_owned(), kind: TokenKind::Literal(Value::Int),
pos: Position { raw: "1".to_owned(),
raw: 0, pos: Position {
line: 1, raw: 0,
offset: 0 line: 1,
} offset: 0
} }
); }
);
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 1, Token {
kind: TokenKind::Assign, len: 1,
raw: "=".to_owned(), kind: TokenKind::Assign,
pos: Position { raw: "=".to_owned(),
raw: 1, pos: Position {
line: 1, raw: 1,
offset: 1 line: 1,
} offset: 1
} }
); }
);
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 1, Token {
kind: TokenKind::Literal(Value::Int), len: 1,
raw: "2".to_owned(), kind: TokenKind::Literal(Value::Int),
pos: Position { raw: "2".to_owned(),
raw: 2, pos: Position {
line: 1, raw: 2,
offset: 2 line: 1,
} offset: 2
} }
); }
} );
}
#[test]
fn test_booleans() { #[test]
let mut tokens = tokenize("true false").into_iter(); fn test_booleans() {
let mut tokens = tokenize("true false").into_iter();
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 4, Token {
kind: TokenKind::Keyword(Keyword::Boolean), len: 4,
raw: "true".to_owned(), kind: TokenKind::Keyword(Keyword::Boolean),
pos: Position { raw: "true".to_owned(),
raw: 3, pos: Position {
line: 1, raw: 3,
offset: 3 line: 1,
} offset: 3
} }
); }
);
assert_eq!(
tokens.nth(1).unwrap(), assert_eq!(
Token { tokens.nth(1).unwrap(),
len: 5, Token {
kind: TokenKind::Keyword(Keyword::Boolean), len: 5,
raw: "false".to_owned(), kind: TokenKind::Keyword(Keyword::Boolean),
pos: Position { raw: "false".to_owned(),
raw: 9, pos: Position {
line: 1, raw: 9,
offset: 9 line: 1,
} offset: 9
} }
); }
} );
}
#[test]
fn test_functions() { #[test]
let mut tokens = tokenize("fn fib() {}").into_iter(); fn test_functions() {
let mut tokens = tokenize("fn fib() {}").into_iter();
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 2, Token {
kind: TokenKind::Keyword(Keyword::Function), len: 2,
raw: "fn".to_owned(), kind: TokenKind::Keyword(Keyword::Function),
pos: Position { raw: "fn".to_owned(),
raw: 1, pos: Position {
line: 1, raw: 1,
offset: 1 line: 1,
} offset: 1
} }
); }
} );
}
#[test] #[test]
fn test_comments() { fn test_comments() {
let mut tokens = tokenize( let mut tokens = tokenize(
"// foo "// foo
fn fib() {} fn fib() {}
", ",
) )
.into_iter() .into_iter()
.filter(|t| { .filter(|t| {
t.kind != TokenKind::Whitespace t.kind != TokenKind::Whitespace
&& t.kind != TokenKind::Tab && t.kind != TokenKind::Tab
&& t.kind != TokenKind::CarriageReturn && t.kind != TokenKind::CarriageReturn
}); });
assert_eq!( assert_eq!(
tokens.nth(0).unwrap(), tokens.next().unwrap(),
Token { Token {
len: 6, len: 6,
kind: TokenKind::Comment, kind: TokenKind::Comment,
raw: "// foo".to_owned(), raw: "// foo".to_owned(),
pos: Position { pos: Position {
raw: 5, raw: 5,
line: 1, line: 1,
offset: 5 offset: 5
}
} }
); }
);
assert_eq!(
tokens.nth(0).unwrap(), assert_eq!(
Token { tokens.next().unwrap(),
len: 2, Token {
kind: TokenKind::Keyword(Keyword::Function), len: 2,
raw: "fn".to_owned(), kind: TokenKind::Keyword(Keyword::Function),
pos: Position { raw: "fn".to_owned(),
raw: 8, pos: Position {
line: 2, raw: 8,
offset: 2 line: 2,
} offset: 2
} }
); }
} );
} }

22
src/parser/infer.rs

@ -4,30 +4,26 @@ use super::node_type::*;
/// ///
/// TODO: Global symbol table is passed around randomly. /// TODO: Global symbol table is passed around randomly.
/// This could probably be cleaned up. /// This could probably be cleaned up.
pub(super) fn infer(program: &mut Program) -> Result<(), String> { pub(super) fn infer(program: &mut Program) {
let table = &program.get_symbol_table(); let table = &program.get_symbol_table();
// TODO: Fix aweful nesting // TODO: Fix aweful nesting
for func in &mut program.func { for func in &mut program.func {
if let Statement::Block(statements, _) = &mut func.body { if let Statement::Block(statements, _) = &mut func.body {
for statement in statements { for statement in statements {
match statement { if let Statement::Declare(var, expr) = statement {
Statement::Declare(var, expr) => { if var.ty.is_none() {
if let None = &var.ty { if let Some(e) = expr {
if let Some(e) = expr { var.ty = infer_expression(&e, table);
var.ty = infer_expression(&e, table); #[cfg(debug_assertions)]
#[cfg(debug_assertions)] if var.ty.is_none() {
if let None = var.ty { println!("Type of {} could not be infered: {:?}", &var.name, e);
println!("Type of {} could not be infered: {:?}", &var.name, e);
}
} }
} }
} }
_ => {}
} }
} }
} }
} }
Ok(())
} }
/// Function table is needed to infer possible function calls /// Function table is needed to infer possible function calls
@ -42,7 +38,7 @@ fn infer_expression(expr: &Expression, table: &SymbolTable) -> Option<Type> {
} }
} }
fn infer_array(elements: &Vec<Expression>, table: &SymbolTable) -> Option<Type> { fn infer_array(elements: &[Expression], table: &SymbolTable) -> Option<Type> {
let types: Vec<Option<Type>> = elements let types: Vec<Option<Type>> = elements
.iter() .iter()
.map(|el| infer_expression(el, table)) .map(|el| infer_expression(el, table))

4
src/parser/mod.rs

@ -15,7 +15,9 @@ mod infer;
* limitations under the License. * limitations under the License.
*/ */
pub mod node_type; pub mod node_type;
pub mod parser; // TODO: Resolve this lint by renaming the module
#[allow(clippy::module_inception)]
mod parser;
mod rules; mod rules;
use crate::lexer::Token; use crate::lexer::Token;
use node_type::Program; use node_type::Program;

5
src/parser/node_type.rs

@ -173,7 +173,10 @@ impl TryFrom<TokenKind> for BinOp {
TokenKind::MinusEqual => Ok(BinOp::SubtractAssign), TokenKind::MinusEqual => Ok(BinOp::SubtractAssign),
TokenKind::StarEqual => Ok(BinOp::MultiplyAssign), TokenKind::StarEqual => Ok(BinOp::MultiplyAssign),
TokenKind::SlashEqual => Ok(BinOp::DivideAssign), TokenKind::SlashEqual => Ok(BinOp::DivideAssign),
other => Err(format!("Token {:?} cannot be converted into a BinOp", other).into()), other => Err(format!(
"Token {:?} cannot be converted into a BinOp",
other
)),
} }
} }
} }

14
src/parser/parser.rs

@ -1,5 +1,3 @@
use crate::lexer::Keyword;
use crate::lexer::{Token, TokenKind};
/** /**
* Copyright 2020 Garrit Franke * Copyright 2020 Garrit Franke
* *
@ -15,6 +13,8 @@ use crate::lexer::{Token, TokenKind};
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
use crate::lexer::Keyword;
use crate::lexer::{Token, TokenKind};
use crate::parser::infer::infer; use crate::parser::infer::infer;
use crate::parser::node_type::*; use crate::parser::node_type::*;
use crate::util::string_util::highlight_position_in_file; use crate::util::string_util::highlight_position_in_file;
@ -41,14 +41,14 @@ impl Parser {
peeked: vec![], peeked: vec![],
current: None, current: None,
prev: None, prev: None,
raw: raw, raw,
} }
} }
pub fn parse(&mut self) -> Result<Program, String> { pub fn parse(&mut self) -> Result<Program, String> {
let mut program = self.parse_program()?; let mut program = self.parse_program()?;
// infer types // infer types
infer(&mut program)?; infer(&mut program);
Ok(program) Ok(program)
} }
@ -71,12 +71,6 @@ impl Parser {
Ok(token) Ok(token)
} }
pub(super) fn drop(&mut self, count: usize) {
for _ in 0..count {
let _ = self.next();
}
}
pub(super) fn push(&mut self, token: Token) { pub(super) fn push(&mut self, token: Token) {
self.peeked.push(token); self.peeked.push(token);
} }

42
src/parser/rules.rs

@ -21,7 +21,7 @@ use crate::lexer::{TokenKind, Value};
use std::convert::TryFrom; use std::convert::TryFrom;
impl Parser { impl Parser {
pub(super) fn parse_program(&mut self) -> Result<Program, String> { pub fn parse_program(&mut self) -> Result<Program, String> {
let mut functions = Vec::new(); let mut functions = Vec::new();
let globals = Vec::new(); let globals = Vec::new();
@ -31,7 +31,7 @@ impl Parser {
Ok(Program { Ok(Program {
func: functions, func: functions,
globals: globals, globals,
}) })
} }
@ -42,7 +42,7 @@ impl Parser {
let mut scope = vec![]; let mut scope = vec![];
// Parse statements until a curly brace is encountered // Parse statements until a curly brace is encountered
while let Err(_) = self.peek_token(TokenKind::CurlyBracesClose) { while self.peek_token(TokenKind::CurlyBracesClose).is_err() {
let statement = self.parse_statement()?; let statement = self.parse_statement()?;
// If the current statement is a variable declaration, // If the current statement is a variable declaration,
@ -81,16 +81,16 @@ impl Parser {
let body = self.parse_block()?; let body = self.parse_block()?;
Ok(Function { Ok(Function {
name: name, name,
arguments: arguments, arguments,
body: body, body,
ret_type: ty, ret_type: ty,
}) })
} }
fn parse_arguments(&mut self) -> Result<Vec<Variable>, String> { fn parse_arguments(&mut self) -> Result<Vec<Variable>, String> {
let mut args = Vec::new(); let mut args = Vec::new();
while let Err(_) = self.peek_token(TokenKind::BraceClose) { while self.peek_token(TokenKind::BraceClose).is_err() {
let next = self.next()?; let next = self.next()?;
match next.kind { match next.kind {
TokenKind::Comma => { TokenKind::Comma => {
@ -98,7 +98,7 @@ impl Parser {
} }
TokenKind::Identifier(name) => { TokenKind::Identifier(name) => {
args.push(Variable { args.push(Variable {
name: name, name,
ty: Some(self.parse_type()?), ty: Some(self.parse_type()?),
}); });
} }
@ -116,7 +116,7 @@ impl Parser {
TokenKind::Identifier(_) => Type::try_from(self.next()?.raw), TokenKind::Identifier(_) => Type::try_from(self.next()?.raw),
_ => Err("Expected type".into()), _ => Err("Expected type".into()),
}?; }?;
if let Ok(_) = self.peek_token(TokenKind::SquareBraceOpen) { if self.peek_token(TokenKind::SquareBraceOpen).is_ok() {
self.match_token(TokenKind::SquareBraceOpen)?; self.match_token(TokenKind::SquareBraceOpen)?;
self.match_token(TokenKind::SquareBraceClose)?; self.match_token(TokenKind::SquareBraceClose)?;
Ok(Type::Array(Box::new(typ))) Ok(Type::Array(Box::new(typ)))
@ -127,7 +127,7 @@ impl Parser {
fn parse_statement(&mut self) -> Result<Statement, String> { fn parse_statement(&mut self) -> Result<Statement, String> {
let token = self.peek()?; let token = self.peek()?;
let state = match &token.kind { match &token.kind {
TokenKind::Keyword(Keyword::Let) => self.parse_declare(), TokenKind::Keyword(Keyword::Let) => self.parse_declare(),
TokenKind::Keyword(Keyword::Return) => self.parse_return(), TokenKind::Keyword(Keyword::Return) => self.parse_return(),
TokenKind::Keyword(Keyword::If) => self.parse_conditional_statement(), TokenKind::Keyword(Keyword::If) => self.parse_conditional_statement(),
@ -138,13 +138,13 @@ impl Parser {
TokenKind::Identifier(_) => { TokenKind::Identifier(_) => {
let ident = self.match_identifier()?; let ident = self.match_identifier()?;
if let Ok(_) = self.peek_token(TokenKind::BraceOpen) { if self.peek_token(TokenKind::BraceOpen).is_ok() {
let state = self.parse_function_call(Some(ident))?; let state = self.parse_function_call(Some(ident))?;
Ok(Statement::Exp(state)) Ok(Statement::Exp(state))
} else if let Ok(_) = self.peek_token(TokenKind::Assign) { } else if self.peek_token(TokenKind::Assign).is_ok() {
let state = self.parse_assignent(Some(Expression::Variable(ident)))?; let state = self.parse_assignent(Some(Expression::Variable(ident)))?;
Ok(state) Ok(state)
} else if let Ok(_) = self.peek_token(TokenKind::SquareBraceOpen) { } else if self.peek_token(TokenKind::SquareBraceOpen).is_ok() {
let expr = self.parse_array_access(Some(ident))?; let expr = self.parse_array_access(Some(ident))?;
let next = self.peek()?; let next = self.peek()?;
@ -152,19 +152,19 @@ impl Parser {
TokenKind::Assign => self.parse_assignent(Some(expr)), TokenKind::Assign => self.parse_assignent(Some(expr)),
_ => Ok(Statement::Exp(expr)), _ => Ok(Statement::Exp(expr)),
} }
} else if let Ok(_) = BinOp::try_from(self.peek()?.kind) { } else if BinOp::try_from(self.peek()?.kind).is_ok() {
let expr = Expression::Variable(ident.into()); // Parse Binary operation
let expr = Expression::Variable(ident);
let state = Statement::Exp(self.parse_bin_op(Some(expr))?); let state = Statement::Exp(self.parse_bin_op(Some(expr))?);
Ok(state) Ok(state)
} else { } else {
let state = Statement::Exp(Expression::Variable(ident.into())); let state = Statement::Exp(Expression::Variable(ident));
Ok(state) Ok(state)
} }
} }
TokenKind::Literal(_) => Ok(Statement::Exp(self.parse_expression()?)), TokenKind::Literal(_) => Ok(Statement::Exp(self.parse_expression()?)),
_ => return Err(self.make_error(TokenKind::Unknown, token)), _ => Err(self.make_error(TokenKind::Unknown, token)),
}; }
state
} }
/// Parses a function call from tokens. /// Parses a function call from tokens.
@ -398,7 +398,7 @@ impl Parser {
let left = match lhs { let left = match lhs {
Some(lhs) => lhs, Some(lhs) => lhs,
None => { None => {
let prev = self.prev().ok_or_else(|| "Expected Token")?; let prev = self.prev().ok_or("Expected token")?;
match &prev.kind { match &prev.kind {
TokenKind::Identifier(_) | TokenKind::Literal(_) | TokenKind::Keyword(_) => { TokenKind::Identifier(_) | TokenKind::Literal(_) | TokenKind::Keyword(_) => {
Ok(Expression::try_from(prev)?) Ok(Expression::try_from(prev)?)
@ -411,7 +411,7 @@ impl Parser {
let op = self.match_operator()?; let op = self.match_operator()?;
Ok(Expression::BinOp( Ok(Expression::BinOp(
Box::from(Expression::try_from(left).map_err(|e| e.to_string())?), Box::from(left),
op, op,
Box::from(self.parse_expression()?), Box::from(self.parse_expression()?),
)) ))

Loading…
Cancel
Save