Browse Source

chore: fix warnings

clippy-fix
Garrit Franke 3 years ago
parent
commit
ebd34541b1
  1. 4
      src/command/build.rs
  2. 2
      src/command/run.rs
  3. 24
      src/generator/c.rs
  4. 8
      src/generator/js.rs
  5. 16
      src/generator/llvm.rs
  6. 4
      src/generator/mod.rs
  7. 38
      src/generator/x86.rs
  8. 3
      src/lexer/cursor.rs
  9. 8
      src/lexer/mod.rs
  10. 408
      src/lexer/tests.rs
  11. 22
      src/parser/infer.rs
  12. 4
      src/parser/mod.rs
  13. 5
      src/parser/node_type.rs
  14. 14
      src/parser/parser.rs
  15. 42
      src/parser/rules.rs

4
src/command/build.rs

@ -20,9 +20,9 @@ use crate::Lib;
use std::fs::File;
use std::io::Read;
use std::io::Write;
use std::path::PathBuf;
use std::path::Path;
pub fn build(in_file: &PathBuf, out_file: &PathBuf) -> Result<(), String> {
pub fn build(in_file: &Path, out_file: &Path) -> Result<(), String> {
let mut file = File::open(in_file).expect("Could not open file");
let mut contents = String::new();

2
src/command/run.rs

@ -14,7 +14,7 @@ pub fn run(in_file: PathBuf) -> Result<(), String> {
build::build(&in_file, &intermediate_out_file_path)?;
let out_file = out_dir.join("out");
if cfg!(feature = "backend_c") {
let comp = Command::new("/usr/bin/cc")
Command::new("/usr/bin/cc")
.arg(&intermediate_out_file_path)
.arg("-o")
.arg(&out_file)

24
src/generator/c.rs

@ -32,15 +32,11 @@ impl Generator for CGenerator {
code += &format!("{};\n", &generate_function_signature(func.clone()));
}
let funcs: String = prog
.func
.into_iter()
.map(|f| generate_function(f))
.collect();
let funcs: String = prog.func.into_iter().map(generate_function).collect();
code += &funcs;
return code;
code
}
}
@ -89,7 +85,7 @@ fn generate_function_signature(func: Function) -> String {
format!("{T} {N}({A})", T = t, N = func.name, A = arguments)
}
fn generate_block(block: Vec<Statement>, scope: Vec<Variable>) -> String {
fn generate_block(block: Vec<Statement>, _scope: Vec<Variable>) -> String {
let mut generated = String::from("{\n");
for statement in block {
@ -112,7 +108,7 @@ fn generate_statement(statement: Statement) -> String {
Statement::Assign(name, state) => generate_assign(*name, *state),
Statement::Block(statements, scope) => generate_block(statements, scope),
Statement::While(expr, body) => generate_while_loop(expr, *body),
Statement::For(ident, expr, body) => todo!(),
Statement::For(_ident, _expr, _body) => todo!(),
Statement::Continue => todo!(),
Statement::Break => todo!(),
};
@ -121,7 +117,7 @@ fn generate_statement(statement: Statement) -> String {
}
fn generate_expression(expr: Expression) -> String {
let st = match expr {
match expr {
Expression::Int(val) => val.to_string(),
Expression::Variable(val) | Expression::Str(val) => val,
Expression::Bool(b) => b.to_string(),
@ -129,9 +125,7 @@ fn generate_expression(expr: Expression) -> String {
Expression::Array(els) => generate_array(els),
Expression::ArrayAccess(name, expr) => generate_array_access(name, *expr),
Expression::BinOp(left, op, right) => generate_bin_op(*left, op, *right),
};
format!("{}", st)
}
}
fn generate_while_loop(expr: Expression, body: Statement) -> String {
@ -201,7 +195,7 @@ fn generate_declare(var: Variable, val: Option<Expression>) -> String {
Some(expr) => format!(
"{} {} = {};",
generate_type(Either::Left(var.to_owned())),
var.to_owned().name,
var.name,
generate_expression(expr)
),
None => format!(
@ -246,6 +240,10 @@ fn generate_bin_op(left: Expression, op: BinOp, right: Expression) -> String {
BinOp::GreaterThanOrEqual => ">=",
BinOp::LessThan => "<",
BinOp::LessThanOrEqual => "<=",
BinOp::AddAssign => "+=",
BinOp::SubtractAssign => "-=",
BinOp::MultiplyAssign => "*=",
BinOp::DivideAssign => "/=",
BinOp::Modulus => "%",
BinOp::Multiplication => "*",
BinOp::NotEqual => "!=",

8
src/generator/js.rs

@ -26,17 +26,13 @@ impl Generator for JsGenerator {
crate::Builtins::get("builtin.js").expect("Could not locate builtin functions");
code += std::str::from_utf8(raw_builtins.as_ref())
.expect("Unable to interpret builtin functions");
let funcs: String = prog
.func
.into_iter()
.map(|f| generate_function(f))
.collect();
let funcs: String = prog.func.into_iter().map(generate_function).collect();
code += &funcs;
code += "main();";
return code;
code
}
}

16
src/generator/llvm.rs

@ -1,6 +1,5 @@
use crate::generator::Generator;
use crate::parser::node_type::*;
use inkwell::builder::Builder;
use inkwell::context::Context;
use inkwell::module::Module;
use inkwell::types::*;
@ -14,10 +13,7 @@ impl<'ctx> Generator for LLVMGenerator<'ctx> {
fn generate(prog: Program) -> String {
let ctx = Context::create();
let module = ctx.create_module("main");
let mut generator = LLVMGenerator {
ctx: &ctx,
module: module,
};
let mut generator = LLVMGenerator { ctx: &ctx, module };
for func in prog.func {
generator.generate_function(func);
}
@ -38,7 +34,7 @@ impl<'ctx> LLVMGenerator<'ctx> {
None => panic!("Function argument has no type"),
})
.collect();
return arg_types;
arg_types
}
fn generate_function(&mut self, func: Function) {
@ -57,7 +53,7 @@ impl<'ctx> LLVMGenerator<'ctx> {
fn generate_statement(&mut self, statement: Statement) {
match statement {
Statement::Block(statements, scope) => {
Statement::Block(statements, _) => {
for s in statements {
self.generate_statement(s);
}
@ -67,9 +63,7 @@ impl<'ctx> LLVMGenerator<'ctx> {
};
}
fn generate_expression(&mut self, expr: Expression) {
match expr {
_ => todo!(),
}
fn generate_expression(&mut self, _expr: Expression) {
todo!()
}
}

4
src/generator/mod.rs

@ -29,10 +29,12 @@ pub trait Generator {
fn generate(prog: Program) -> String;
}
// Since we're using multiple features,
// "unreachable" statements are okay
#[allow(unreachable_code)]
pub fn generate(prog: Program) -> String {
#[cfg(feature = "backend_llvm")]
return llvm::LLVMGenerator::generate(prog);
#[cfg(feature = "backend_c")]
return c::CGenerator::generate(prog);

38
src/generator/x86.rs

@ -20,6 +20,8 @@ struct Assembly {
asm: Vec<String>,
}
// We don't need "From", so we can ignore the lint here
#[allow(clippy::from_over_into)]
impl Into<String> for Assembly {
fn into(self) -> String {
self.build()
@ -55,20 +57,18 @@ impl X86Generator {
fn gen_program(&mut self, prog: Program) -> Assembly {
let mut asm = Assembly::new();
match prog {
Program { func, globals } => {
asm.add(".intel_syntax noprefix");
asm.add(".text");
for f in func {
asm.add(self.gen_function(f));
}
asm.add(".data");
for g in globals {
asm.add(format!("_{0}: .word 0", g));
}
}
};
let Program { func, globals } = prog;
asm.add(".intel_syntax noprefix");
asm.add(".text");
for f in func {
asm.add(self.gen_function(f));
}
asm.add(".data");
for g in globals {
asm.add(format!("_{0}: .word 0", g));
}
asm
}
@ -77,13 +77,9 @@ impl X86Generator {
let mut asm = Assembly::new();
let has_return: bool = match &func.body {
Statement::Block(statements, _) => statements.iter().any(|s| {
if let Statement::Return(_) = *s {
true
} else {
false
}
}),
Statement::Block(statements, _) => statements
.iter()
.any(|s| matches!(*s, Statement::Return(_))),
_ => panic!("Function body should be of type Block"),
};

3
src/lexer/cursor.rs

@ -84,8 +84,7 @@ impl<'a> Cursor<'a> {
}
pub(crate) fn pos(&self) -> Position {
let p = self.pos.clone();
p
*self.pos
}
/// Moves to the next character.

8
src/lexer/mod.rs

@ -181,12 +181,12 @@ pub fn is_whitespace(c: char) -> bool {
/// True if `c` is valid as a first character of an identifier.
pub fn is_id_start(c: char) -> bool {
('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '_'
('a'..='z').contains(&c) || ('A'..='Z').contains(&c) || c == '_'
}
/// True if `c` is valid as a non-first character of an identifier.
pub fn is_id_continue(c: char) -> bool {
('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9') || c == '_'
('a'..='z').contains(&c) || ('A'..='Z').contains(&c) || ('0'..='9').contains(&c) || c == '_'
}
impl Cursor<'_> {
@ -305,9 +305,7 @@ impl Cursor<'_> {
// Cut the original tokens to the length of the token
raw.truncate(len);
let position = self.pos();
let token = Token::new(token_kind, len, raw, position);
token
Token::new(token_kind, len, raw, position)
}
/// Eats symbols while predicate returns true or until the end of file is reached.

408
src/lexer/tests.rs

@ -13,226 +13,222 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#[cfg(test)]
mod tests {
use crate::lexer::*;
#[test]
fn test_basic_tokenizing() {
let raw = tokenize("1 = 2");
let mut tokens = raw.into_iter();
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 1,
kind: TokenKind::Literal(Value::Int),
raw: "1".to_owned(),
pos: Position {
raw: 0,
line: 1,
offset: 0
}
use crate::lexer::*;
#[test]
fn test_basic_tokenizing() {
let raw = tokenize("1 = 2");
let mut tokens = raw.into_iter();
assert_eq!(
tokens.next().unwrap(),
Token {
len: 1,
kind: TokenKind::Literal(Value::Int),
raw: "1".to_owned(),
pos: Position {
raw: 0,
line: 1,
offset: 0
}
);
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 1,
kind: TokenKind::Whitespace,
raw: " ".to_owned(),
pos: Position {
raw: 1,
line: 1,
offset: 1
}
}
);
assert_eq!(
tokens.next().unwrap(),
Token {
len: 1,
kind: TokenKind::Whitespace,
raw: " ".to_owned(),
pos: Position {
raw: 1,
line: 1,
offset: 1
}
);
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 1,
kind: TokenKind::Assign,
raw: "=".to_owned(),
pos: Position {
raw: 2,
line: 1,
offset: 2
}
}
);
assert_eq!(
tokens.next().unwrap(),
Token {
len: 1,
kind: TokenKind::Assign,
raw: "=".to_owned(),
pos: Position {
raw: 2,
line: 1,
offset: 2
}
);
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 1,
kind: TokenKind::Whitespace,
raw: " ".to_owned(),
pos: Position {
raw: 3,
line: 1,
offset: 3
}
}
);
assert_eq!(
tokens.next().unwrap(),
Token {
len: 1,
kind: TokenKind::Whitespace,
raw: " ".to_owned(),
pos: Position {
raw: 3,
line: 1,
offset: 3
}
);
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 1,
kind: TokenKind::Literal(Value::Int),
raw: "2".to_owned(),
pos: Position {
raw: 4,
line: 1,
offset: 4
}
}
);
assert_eq!(
tokens.next().unwrap(),
Token {
len: 1,
kind: TokenKind::Literal(Value::Int),
raw: "2".to_owned(),
pos: Position {
raw: 4,
line: 1,
offset: 4
}
);
}
#[test]
fn test_tokenizing_without_whitespace() {
let mut tokens = tokenize("1=2").into_iter();
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 1,
kind: TokenKind::Literal(Value::Int),
raw: "1".to_owned(),
pos: Position {
raw: 0,
line: 1,
offset: 0
}
}
);
}
#[test]
fn test_tokenizing_without_whitespace() {
let mut tokens = tokenize("1=2").into_iter();
assert_eq!(
tokens.next().unwrap(),
Token {
len: 1,
kind: TokenKind::Literal(Value::Int),
raw: "1".to_owned(),
pos: Position {
raw: 0,
line: 1,
offset: 0
}
);
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 1,
kind: TokenKind::Assign,
raw: "=".to_owned(),
pos: Position {
raw: 1,
line: 1,
offset: 1
}
}
);
assert_eq!(
tokens.next().unwrap(),
Token {
len: 1,
kind: TokenKind::Assign,
raw: "=".to_owned(),
pos: Position {
raw: 1,
line: 1,
offset: 1
}
);
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 1,
kind: TokenKind::Literal(Value::Int),
raw: "2".to_owned(),
pos: Position {
raw: 2,
line: 1,
offset: 2
}
}
);
assert_eq!(
tokens.next().unwrap(),
Token {
len: 1,
kind: TokenKind::Literal(Value::Int),
raw: "2".to_owned(),
pos: Position {
raw: 2,
line: 1,
offset: 2
}
);
}
#[test]
fn test_booleans() {
let mut tokens = tokenize("true false").into_iter();
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 4,
kind: TokenKind::Keyword(Keyword::Boolean),
raw: "true".to_owned(),
pos: Position {
raw: 3,
line: 1,
offset: 3
}
}
);
}
#[test]
fn test_booleans() {
let mut tokens = tokenize("true false").into_iter();
assert_eq!(
tokens.next().unwrap(),
Token {
len: 4,
kind: TokenKind::Keyword(Keyword::Boolean),
raw: "true".to_owned(),
pos: Position {
raw: 3,
line: 1,
offset: 3
}
);
assert_eq!(
tokens.nth(1).unwrap(),
Token {
len: 5,
kind: TokenKind::Keyword(Keyword::Boolean),
raw: "false".to_owned(),
pos: Position {
raw: 9,
line: 1,
offset: 9
}
}
);
assert_eq!(
tokens.nth(1).unwrap(),
Token {
len: 5,
kind: TokenKind::Keyword(Keyword::Boolean),
raw: "false".to_owned(),
pos: Position {
raw: 9,
line: 1,
offset: 9
}
);
}
#[test]
fn test_functions() {
let mut tokens = tokenize("fn fib() {}").into_iter();
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 2,
kind: TokenKind::Keyword(Keyword::Function),
raw: "fn".to_owned(),
pos: Position {
raw: 1,
line: 1,
offset: 1
}
}
);
}
#[test]
fn test_functions() {
let mut tokens = tokenize("fn fib() {}").into_iter();
assert_eq!(
tokens.next().unwrap(),
Token {
len: 2,
kind: TokenKind::Keyword(Keyword::Function),
raw: "fn".to_owned(),
pos: Position {
raw: 1,
line: 1,
offset: 1
}
);
}
}
);
}
#[test]
fn test_comments() {
let mut tokens = tokenize(
"// foo
#[test]
fn test_comments() {
let mut tokens = tokenize(
"// foo
fn fib() {}
",
)
.into_iter()
.filter(|t| {
t.kind != TokenKind::Whitespace
&& t.kind != TokenKind::Tab
&& t.kind != TokenKind::CarriageReturn
});
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 6,
kind: TokenKind::Comment,
raw: "// foo".to_owned(),
pos: Position {
raw: 5,
line: 1,
offset: 5
}
)
.into_iter()
.filter(|t| {
t.kind != TokenKind::Whitespace
&& t.kind != TokenKind::Tab
&& t.kind != TokenKind::CarriageReturn
});
assert_eq!(
tokens.next().unwrap(),
Token {
len: 6,
kind: TokenKind::Comment,
raw: "// foo".to_owned(),
pos: Position {
raw: 5,
line: 1,
offset: 5
}
);
assert_eq!(
tokens.nth(0).unwrap(),
Token {
len: 2,
kind: TokenKind::Keyword(Keyword::Function),
raw: "fn".to_owned(),
pos: Position {
raw: 8,
line: 2,
offset: 2
}
}
);
assert_eq!(
tokens.next().unwrap(),
Token {
len: 2,
kind: TokenKind::Keyword(Keyword::Function),
raw: "fn".to_owned(),
pos: Position {
raw: 8,
line: 2,
offset: 2
}
);
}
}
);
}

22
src/parser/infer.rs

@ -4,30 +4,26 @@ use super::node_type::*;
///
/// TODO: Global symbol table is passed around randomly.
/// This could probably be cleaned up.
pub(super) fn infer(program: &mut Program) -> Result<(), String> {
pub(super) fn infer(program: &mut Program) {
let table = &program.get_symbol_table();
// TODO: Fix aweful nesting
for func in &mut program.func {
if let Statement::Block(statements, _) = &mut func.body {
for statement in statements {
match statement {
Statement::Declare(var, expr) => {
if let None = &var.ty {
if let Some(e) = expr {
var.ty = infer_expression(&e, table);
#[cfg(debug_assertions)]
if let None = var.ty {
println!("Type of {} could not be infered: {:?}", &var.name, e);
}
if let Statement::Declare(var, expr) = statement {
if var.ty.is_none() {
if let Some(e) = expr {
var.ty = infer_expression(&e, table);
#[cfg(debug_assertions)]
if var.ty.is_none() {
println!("Type of {} could not be infered: {:?}", &var.name, e);
}
}
}
_ => {}
}
}
}
}
Ok(())
}
/// Function table is needed to infer possible function calls
@ -42,7 +38,7 @@ fn infer_expression(expr: &Expression, table: &SymbolTable) -> Option<Type> {
}
}
fn infer_array(elements: &Vec<Expression>, table: &SymbolTable) -> Option<Type> {
fn infer_array(elements: &[Expression], table: &SymbolTable) -> Option<Type> {
let types: Vec<Option<Type>> = elements
.iter()
.map(|el| infer_expression(el, table))

4
src/parser/mod.rs

@ -15,7 +15,9 @@ mod infer;
* limitations under the License.
*/
pub mod node_type;
pub mod parser;
// TODO: Resolve this lint by renaming the module
#[allow(clippy::module_inception)]
mod parser;
mod rules;
use crate::lexer::Token;
use node_type::Program;

5
src/parser/node_type.rs

@ -173,7 +173,10 @@ impl TryFrom<TokenKind> for BinOp {
TokenKind::MinusEqual => Ok(BinOp::SubtractAssign),
TokenKind::StarEqual => Ok(BinOp::MultiplyAssign),
TokenKind::SlashEqual => Ok(BinOp::DivideAssign),
other => Err(format!("Token {:?} cannot be converted into a BinOp", other).into()),
other => Err(format!(
"Token {:?} cannot be converted into a BinOp",
other
)),
}
}
}

14
src/parser/parser.rs

@ -1,5 +1,3 @@
use crate::lexer::Keyword;
use crate::lexer::{Token, TokenKind};
/**
* Copyright 2020 Garrit Franke
*
@ -15,6 +13,8 @@ use crate::lexer::{Token, TokenKind};
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::lexer::Keyword;
use crate::lexer::{Token, TokenKind};
use crate::parser::infer::infer;
use crate::parser::node_type::*;
use crate::util::string_util::highlight_position_in_file;
@ -41,14 +41,14 @@ impl Parser {
peeked: vec![],
current: None,
prev: None,
raw: raw,
raw,
}
}
pub fn parse(&mut self) -> Result<Program, String> {
let mut program = self.parse_program()?;
// infer types
infer(&mut program)?;
infer(&mut program);
Ok(program)
}
@ -71,12 +71,6 @@ impl Parser {
Ok(token)
}
pub(super) fn drop(&mut self, count: usize) {
for _ in 0..count {
let _ = self.next();
}
}
pub(super) fn push(&mut self, token: Token) {
self.peeked.push(token);
}

42
src/parser/rules.rs

@ -21,7 +21,7 @@ use crate::lexer::{TokenKind, Value};
use std::convert::TryFrom;
impl Parser {
pub(super) fn parse_program(&mut self) -> Result<Program, String> {
pub fn parse_program(&mut self) -> Result<Program, String> {
let mut functions = Vec::new();
let globals = Vec::new();
@ -31,7 +31,7 @@ impl Parser {
Ok(Program {
func: functions,
globals: globals,
globals,
})
}
@ -42,7 +42,7 @@ impl Parser {
let mut scope = vec![];
// Parse statements until a curly brace is encountered
while let Err(_) = self.peek_token(TokenKind::CurlyBracesClose) {
while self.peek_token(TokenKind::CurlyBracesClose).is_err() {
let statement = self.parse_statement()?;
// If the current statement is a variable declaration,
@ -81,16 +81,16 @@ impl Parser {
let body = self.parse_block()?;
Ok(Function {
name: name,
arguments: arguments,
body: body,
name,
arguments,
body,
ret_type: ty,
})
}
fn parse_arguments(&mut self) -> Result<Vec<Variable>, String> {
let mut args = Vec::new();
while let Err(_) = self.peek_token(TokenKind::BraceClose) {
while self.peek_token(TokenKind::BraceClose).is_err() {
let next = self.next()?;
match next.kind {
TokenKind::Comma => {
@ -98,7 +98,7 @@ impl Parser {
}
TokenKind::Identifier(name) => {
args.push(Variable {
name: name,
name,
ty: Some(self.parse_type()?),
});
}
@ -116,7 +116,7 @@ impl Parser {
TokenKind::Identifier(_) => Type::try_from(self.next()?.raw),
_ => Err("Expected type".into()),
}?;
if let Ok(_) = self.peek_token(TokenKind::SquareBraceOpen) {
if self.peek_token(TokenKind::SquareBraceOpen).is_ok() {
self.match_token(TokenKind::SquareBraceOpen)?;
self.match_token(TokenKind::SquareBraceClose)?;
Ok(Type::Array(Box::new(typ)))
@ -127,7 +127,7 @@ impl Parser {
fn parse_statement(&mut self) -> Result<Statement, String> {
let token = self.peek()?;
let state = match &token.kind {
match &token.kind {
TokenKind::Keyword(Keyword::Let) => self.parse_declare(),
TokenKind::Keyword(Keyword::Return) => self.parse_return(),
TokenKind::Keyword(Keyword::If) => self.parse_conditional_statement(),
@ -138,13 +138,13 @@ impl Parser {
TokenKind::Identifier(_) => {
let ident = self.match_identifier()?;
if let Ok(_) = self.peek_token(TokenKind::BraceOpen) {
if self.peek_token(TokenKind::BraceOpen).is_ok() {
let state = self.parse_function_call(Some(ident))?;
Ok(Statement::Exp(state))
} else if let Ok(_) = self.peek_token(TokenKind::Assign) {
} else if self.peek_token(TokenKind::Assign).is_ok() {
let state = self.parse_assignent(Some(Expression::Variable(ident)))?;
Ok(state)
} else if let Ok(_) = self.peek_token(TokenKind::SquareBraceOpen) {
} else if self.peek_token(TokenKind::SquareBraceOpen).is_ok() {
let expr = self.parse_array_access(Some(ident))?;
let next = self.peek()?;
@ -152,19 +152,19 @@ impl Parser {
TokenKind::Assign => self.parse_assignent(Some(expr)),
_ => Ok(Statement::Exp(expr)),
}
} else if let Ok(_) = BinOp::try_from(self.peek()?.kind) {
let expr = Expression::Variable(ident.into());
} else if BinOp::try_from(self.peek()?.kind).is_ok() {
// Parse Binary operation
let expr = Expression::Variable(ident);
let state = Statement::Exp(self.parse_bin_op(Some(expr))?);
Ok(state)
} else {
let state = Statement::Exp(Expression::Variable(ident.into()));
let state = Statement::Exp(Expression::Variable(ident));
Ok(state)
}
}
TokenKind::Literal(_) => Ok(Statement::Exp(self.parse_expression()?)),
_ => return Err(self.make_error(TokenKind::Unknown, token)),
};
state
_ => Err(self.make_error(TokenKind::Unknown, token)),
}
}
/// Parses a function call from tokens.
@ -398,7 +398,7 @@ impl Parser {
let left = match lhs {
Some(lhs) => lhs,
None => {
let prev = self.prev().ok_or_else(|| "Expected Token")?;
let prev = self.prev().ok_or("Expected token")?;
match &prev.kind {
TokenKind::Identifier(_) | TokenKind::Literal(_) | TokenKind::Keyword(_) => {
Ok(Expression::try_from(prev)?)
@ -411,7 +411,7 @@ impl Parser {
let op = self.match_operator()?;
Ok(Expression::BinOp(
Box::from(Expression::try_from(left).map_err(|e| e.to_string())?),
Box::from(left),
op,
Box::from(self.parse_expression()?),
))

Loading…
Cancel
Save