diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs index 73b3836..e5ec7fe 100644 --- a/src/lexer/mod.rs +++ b/src/lexer/mod.rs @@ -148,6 +148,7 @@ pub enum Keyword { Struct, New, Match, + Import, Unknown, } @@ -376,6 +377,7 @@ impl Cursor<'_> { c if c == "struct" => Keyword::Struct, c if c == "new" => Keyword::New, c if c == "match" => Keyword::Match, + c if c == "import" => Keyword::Import, _ => Keyword::Unknown, } } diff --git a/src/parser/node_type.rs b/src/parser/node_type.rs index 82d78f1..5cb8aeb 100644 --- a/src/parser/node_type.rs +++ b/src/parser/node_type.rs @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +use std::collections::HashSet; use crate::lexer::*; use core::convert::TryFrom; use std::collections::HashMap; @@ -23,7 +24,7 @@ pub type SymbolTable = HashMap>; #[derive(Debug, Clone)] pub struct Module { pub path: String, - pub imports: Vec, + pub imports: HashSet, pub func: Vec, pub structs: Vec, pub globals: Vec, diff --git a/src/parser/rules.rs b/src/parser/rules.rs index a130d25..5db1704 100644 --- a/src/parser/rules.rs +++ b/src/parser/rules.rs @@ -1,3 +1,9 @@ +use super::node_type::Statement; +use super::node_type::*; +use super::parser::Parser; +use crate::lexer::Keyword; +use crate::lexer::{TokenKind, Value}; +use std::collections::HashMap; /** * Copyright 2020 Garrit Franke * @@ -13,25 +19,23 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -use super::node_type::Statement; -use super::node_type::*; -use super::parser::Parser; -use crate::lexer::Keyword; -use crate::lexer::{TokenKind, Value}; -use std::collections::HashMap; +use std::collections::HashSet; use std::convert::TryFrom; impl Parser { pub fn parse_module(&mut self) -> Result { let mut functions = Vec::new(); let mut structs = Vec::new(); - let imports = Vec::new(); + let mut imports = HashSet::new(); let globals = Vec::new(); while self.has_more() { let next = self.peek()?; match next.kind { TokenKind::Keyword(Keyword::Function) => functions.push(self.parse_function()?), + TokenKind::Keyword(Keyword::Import) => { + imports.insert(self.parse_import()?); + } TokenKind::Keyword(Keyword::Struct) => { structs.push(self.parse_struct_definition()?) } @@ -144,6 +148,25 @@ impl Parser { }) } + fn parse_import(&mut self) -> Result { + self.match_keyword(Keyword::Import)?; + let import_path_token = self.match_token(TokenKind::Literal(Value::Str))?; + + // Remove leading and trailing string tokens + let mut chars = import_path_token.raw.chars(); + chars.next(); + chars.next_back(); + + let import_path = if chars.as_str().ends_with(".sb") { + chars.collect() + } else { + format!("{}.sb", chars.collect::()) + }; + + Ok(import_path) + + } + fn parse_type(&mut self) -> Result { self.match_token(TokenKind::Colon)?; let next = self.peek()?;