~starkingdoms/starkingdoms

ref: 11293ac2bf60f33a9dfa9abe6cfbcaaa90d2de8b starkingdoms/kabel/src/lib.rs -rw-r--r-- 3.3 KiB
11293ac2 — ghostly_zsh run binary expressions 1 year, 4 months ago
                                                                                
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
#[cfg(feature = "timer")]
use std::time::Instant;

use codegen::Codegen;
use lexer::{Lexer, Token};
use parser::{Parser, AST};
use semantic_analysis::Analyzer;

pub mod debug;
pub mod error;
pub mod runtime_error;
pub mod lexer;
pub mod macros;
pub mod parser;
pub mod semantic_analysis;
pub mod opcodes;
pub mod codegen;
pub mod vm;

pub fn run_lexer(input: String) -> Lexer {
    let mut lexer = Lexer::new(input);
    while lexer.next_token() {}
    lexer
}

pub fn run_parser(text: String, input: Vec<Token>) -> (AST, Parser) {
    let mut parser = Parser::new(text, input);
    (parser.program(), parser)
}

pub fn run_semantic_analysis(text: String, input: AST) -> Analyzer {
    let mut analyzer = Analyzer::new(text);
    analyzer.visit(input);
    analyzer
}

pub fn run_codegen(text: String, input: AST) -> Codegen {
    let mut codegen = Codegen::new(text);
    codegen.visit(input);
    codegen
}

// TODO: output bytecode
pub fn compile(program: String) -> String {
    let mut output = "".to_string();

    #[cfg(feature = "timer")]
    let program_instant = Instant::now();

    #[cfg(feature = "timer")]
    let lexer_instant = Instant::now();
    let lexer = run_lexer(program.clone());
    #[cfg(feature = "timer")]
    {
        let lexer_elapsed = lexer_instant.elapsed();
        println!("lexer took: {:?}", lexer_elapsed);
    }

    for error in lexer.errors.clone() {
        output += &error.to_string();
        output += "\n";
        //println!("{}", error);
    }
    #[cfg(feature = "debug")]
    {
        output += &debug_token_array(lexer.output.clone());
    }
    if lexer.errors.len() != 0 || lexer.output.len() == 0 {
        return output;
    }

    #[cfg(feature = "timer")]
    let parser_instant = Instant::now();
    let (ast, parser) = run_parser(program.clone(), lexer.output);
    #[cfg(feature = "timer")]
    {
        let parser_elapsed = parser_instant.elapsed();
        println!("parser took: {:?}", parser_elapsed);
    }

    #[cfg(feature = "debug")]
    println!("{:#?}", ast);
    for error in parser.errors.clone() {
        output += &error.to_string();
        output += "\n";
    }
    if parser.errors.len() != 0 {
        return output;
    }
    #[cfg(feature = "timer")]
    let analyzer_instant = Instant::now();
    let analyzer = run_semantic_analysis(program.clone(), ast.clone());
    for error in analyzer.errors.clone() {
        output += &error.to_string();
        output += "\n";
    }
    #[cfg(feature = "timer")]
    {
        let analyzer_elapsed = analyzer_instant.elapsed();
        println!("semantic analysis took: {:?}", analyzer_elapsed);
    }

    #[cfg(feature = "timer")]
    let codegen_instant = Instant::now();
    let codegen = run_codegen(program, ast);
    #[cfg(feature = "timer")]
    {
        let codegen_elapsed = codegen_instant.elapsed();
        println!("codegen took: {:?}", codegen_elapsed);
    }

    #[cfg(feature = "timer")]
    let vm_instant = Instant::now();
    let mut vm = codegen.vm;
    match vm.run(&mut output) {
        Ok(()) => {}
        Err(e) => output += &e.to_string(),
    }
    #[cfg(feature = "timer")]
    {
        let vm_elapsed = vm_instant.elapsed();
        println!("vm took: {:?}", vm_elapsed);

        let program_elapsed = program_instant.elapsed();
        println!("{:?}", program_elapsed);
    }

    output
}