Browse Source

Generate assembly

github-actions
Garrit Franke 3 years ago
parent
commit
e31446ac51
  1. 7
      examples_out/out.asm
  2. 17
      src/generator/x86.rs
  3. 19
      src/main.rs

7
examples_out/out.asm

@ -0,0 +1,7 @@
.globl main
main:
.globl fib
fib:

17
src/generator/x86.rs

@ -1,10 +1,25 @@
use crate::generator::Generator;
use crate::parser::node_type::Function;
use crate::parser::node_type::Program;
pub struct X86Generator;
impl Generator for X86Generator {
fn generate(prog: Program) -> String {
return prog.func.into_iter().map(|f| format!("{:#?}", f)).collect();
return prog
.func
.into_iter()
.map(|f| generate_function(f))
.collect();
}
}
fn generate_function(func: Function) -> String {
format!(
"
.globl {F}
{F}:
",
F = func.name
)
}

19
src/main.rs

@ -1,26 +1,27 @@
use crate::generator::Generator;
use std::fs::File;
use std::io::Read;
use std::io::Write;
mod generator;
mod lexer;
mod parser;
mod util;
fn main() -> std::io::Result<()> {
let mut file = File::open("examples/hello_world.sb")?;
fn main() -> Result<(), String> {
let mut file = File::open("examples/hello_world.sb").expect("Could not open file");
let mut contents = String::new();
file.read_to_string(&mut contents)?;
file.read_to_string(&mut contents)
.expect("Could not read file");
let tokens = lexer::tokenize(&contents);
// let ast = parser::parse(tokens.into_iter());
let program = parser::parse(tokens, Some(contents));
match program {
Ok(p) => println!("{}", generator::x86::X86Generator::generate(p)),
Err(e) => panic!(e),
}
let program = parser::parse(tokens, Some(contents))?;
let output = generator::x86::X86Generator::generate(program);
let mut file = std::fs::File::create("examples_out/out.asm").expect("create failed");
file.write_all(output.as_bytes()).expect("write failed");
file.flush().expect("Could not flush file");
Ok(())
}

Loading…
Cancel
Save