~starkingdoms/starkingdoms

b7552ba591095ee7d312539348fb9568d0550398 — ghostly_zsh 1 year, 4 months ago 4ea4c4f
errors output whole lines
M kabel/src/debug.rs => kabel/src/debug.rs +6 -4
@@ 25,13 25,12 @@ pub fn debug_ast(ast: AST, level: usize) -> String {
        }
        Function(name, args, block) => {
            output += &"| ".repeat(level);
            output += "Function ";
            output += &(name.name + " ");
            output += "Function";
            output += &(" ".to_string() + &name.name);
            for arg in args {
                output += &(arg.name + " ");
                output += &(" ".to_string() + &arg.name);
            }
            output += "\n";
            output += &"| ".repeat(level);
            output += &debug_ast(*block, level+1);
        }
        Return(expr) => {


@@ 160,6 159,9 @@ pub fn debug_ast(ast: AST, level: usize) -> String {
                Num(value) => {
                    output += &value.to_string();
                }
                Bool(value) => {
                    output += &value.to_string();
                }
                Array(value) => {
                    for value in value {
                        output += "\n";

M kabel/src/error.rs => kabel/src/error.rs +2 -2
@@ 21,7 21,7 @@ impl KabelError {

impl std::fmt::Display for KabelError {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        let caret_space: String = vec![' '; self.column - 1].iter().collect();
        let caret_space: String = vec![' '; self.column].iter().collect();
        f.write_str(&format!(
            "Error {:0>4}: {1} at line {2}, column {3}\n\
                    {4}\n\


@@ 29,7 29,7 @@ impl std::fmt::Display for KabelError {
            self.kind.clone() as usize,
            self.message,
            self.line + 1,
            self.column,
            self.column + 1,
            self.code,
            caret_space
        ))

M kabel/src/lexer.rs => kabel/src/lexer.rs +60 -53
@@ 11,6 11,7 @@ pub struct Lexer {
    current: usize,
    line: usize,
    line_start: usize,
    line_current: usize,
    column: usize,
    c: char,
    keywords: HashMap<String, TokenType>,


@@ 31,12 32,15 @@ impl Lexer {
        keywords.insert("if".to_string(), TokenType::If);
        keywords.insert("else".to_string(), TokenType::Else);
        keywords.insert("var".to_string(), TokenType::Var);
        keywords.insert("true".to_string(), TokenType::True);
        keywords.insert("false".to_string(), TokenType::False);
        Self {
            input: input.chars().collect(),
            start: 0,
            current: 0,
            line: 0,
            line_start: 0,
            line_current: 0,
            column: 0,
            c: '\0',
            keywords,


@@ 52,38 56,38 @@ impl Lexer {
                if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::PlusEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else if self.peek() == '+' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::PlusPlus));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Plus));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '-' => {
                if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::MinusEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else if self.peek() == '-' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::MinusMinus));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Minus));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '*' => {
                if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::StarEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Star));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '/' => {


@@ 91,146 95,146 @@ impl Lexer {
                    while self.peek() != '\n' && self.current < self.input.len() {
                        self.read_char();
                    }
                    self.start = self.current;
                    self.start = self.line_current;
                } else if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::SlashEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Slash));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '%' => {
                if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::PercentEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Percent));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '(' => {
                self.output.push(token!(self, TokenType::LeftParen));
                self.start = self.current;
                self.start = self.line_current;
            }
            ')' => {
                self.output.push(token!(self, TokenType::RightParen));
                self.start = self.current;
                self.start = self.line_current;
            }
            '{' => {
                self.output.push(token!(self, TokenType::LeftBrace));
                self.start = self.current;
                self.start = self.line_current;
            }
            '}' => {
                self.output.push(token!(self, TokenType::RightBrace));
                self.start = self.current;
                self.start = self.line_current;
            }
            '[' => {
                self.output.push(token!(self, TokenType::LeftSquare));
                self.start = self.current;
                self.start = self.line_current;
            }
            ']' => {
                self.output.push(token!(self, TokenType::RightSquare));
                self.start = self.current;
                self.start = self.line_current;
            }
            '.' => {
                self.output.push(token!(self, TokenType::Period));
                self.start = self.current;
                self.start = self.line_current;
            }
            ',' => {
                self.output.push(token!(self, TokenType::Comma));
                self.start = self.current;
                self.start = self.line_current;
            }
            ';' => {
                self.output.push(token!(self, TokenType::Semicolon));
                self.start = self.current;
                self.start = self.line_current;
            }
            ':' => {
                self.output.push(token!(self, TokenType::Colon));
                self.start = self.current;
                self.start = self.line_current;
            }
            '?' => {
                self.output.push(token!(self, TokenType::Question));
                self.start = self.current;
                self.start = self.line_current;
            }
            '^' => {
                if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::CaretEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Caret));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '|' => {
                if self.peek() == '|' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::OrOr));
                    self.start = self.current;
                    self.start = self.line_current;
                } else if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::OrEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Or));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '&' => {
                if self.peek() == '&' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::AndAnd));
                    self.start = self.current;
                    self.start = self.line_current;
                } else if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::AndEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::And));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '=' => {
                if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::EqualEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Equal));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '!' => {
                if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::BangEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Bang));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '>' => {
                if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::GreaterEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Greater));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '<' => {
                if self.peek() == '=' {
                    self.read_char();
                    self.output.push(token!(self, TokenType::LessEqual));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.output.push(token!(self, TokenType::Less));
                    self.start = self.current;
                    self.start = self.line_current;
                }
            }
            '"' => {


@@ 248,17 252,19 @@ impl Lexer {
                    }
                    contents.push(self.c as char);
                }
                self.line_current += contents.len();
                self.output.push(token!(self, TokenType::Str(contents)));
                self.start = self.current;
                self.start = self.line_current;
            }
            '\n' => {
                self.line += 1;
                self.line_start = self.current;
                self.start = 0;
                self.line_current = 0;
                self.column = 0;
                self.start = self.current;
            }
            ' ' | '\r' | '\t' => {
                self.start = self.current;
                self.start = self.line_current;
            }
            '\0' => return false,
            c => {


@@ 274,7 280,7 @@ impl Lexer {
                    } else {
                        self.output.push(token!(self, TokenType::Ident(content)));
                    }
                    self.start = self.current;
                    self.start = self.line_current;
                } else if c.is_ascii_digit() {
                    let mut number = (c as char).to_string();
                    while self.peek().is_ascii_digit() {


@@ 290,14 296,14 @@ impl Lexer {
                    // panic = error in this code
                    self.output
                        .push(token!(self, TokenType::Num(number.parse().unwrap())));
                    self.start = self.current;
                    self.start = self.line_current;
                } else {
                    self.errors.push(KabelError::new(
                        ErrorKind::UnexpectedToken,
                        format!("Stray \"{0}\"", c as char),
                        self.line,
                        self.column,
                        self.input[self.line_start..self.current].iter().collect(),
                        self.input[self.line_current..self.current].iter().collect(),
                    ));
                }
            }


@@ 313,6 319,7 @@ impl Lexer {
        self.c = self.input[self.current];
        self.current += 1;
        self.column += 1;
        self.line_current += 1;
        return self.c;
    }
    pub fn peek(&mut self) -> char {


@@ 328,11 335,9 @@ impl Lexer {
#[derive(Debug, Clone)]
pub struct Token {
    pub token_type: TokenType,
    pub column: usize,
    pub start_column: usize,
    pub end_column: usize,
    pub line: usize,
    pub line_start: usize,
    pub start: usize,
    pub end: usize,
}

#[derive(Debug, Clone, PartialEq)]


@@ 348,6 353,8 @@ pub enum TokenType {
    If,
    Else,
    Var,
    True,
    False,

    // characters
    Star,


@@ 399,9 406,9 @@ impl Display for TokenType {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        use TokenType::*;
        token_display!(*self, f, Function, Return, Loop, While, For,
            Break, Continue, If, Else, Var, Star, StarEqual,
            Slash, SlashEqual, Percent, PercentEqual, Plus,
            PlusPlus, PlusEqual, Minus, MinusMinus, MinusEqual,
            Break, Continue, If, Else, Var, True, False,
                Star, StarEqual, Slash, SlashEqual, Percent, PercentEqual,
            Plus, PlusPlus, PlusEqual, Minus, MinusMinus, MinusEqual,
            LeftParen, RightParen, LeftBrace, RightBrace,
            LeftSquare, RightSquare, Equal, EqualEqual,
            Bang, BangEqual, Greater, GreaterEqual, Less,

M kabel/src/macros.rs => kabel/src/macros.rs +66 -28
@@ 2,12 2,10 @@
macro_rules! token {
    ($self:expr, $token:expr) => {
        crate::lexer::Token {
            line: $self.line,
            line_start: $self.line_start,
            column: $self.column,
            start: $self.start,
            end: $self.current,
            token_type: $token,
            start_column: $self.start,
            end_column: $self.line_current,
            line: $self.line,
        }
    };
}


@@ 27,25 25,59 @@ macro_rules! lit {
    ($type:ident, $data:expr, $token:expr) => {
        $crate::parser::AST {
            ast_type: $crate::parser::ASTType::Lit($crate::parser::Lit::$type($data)),
            line_start: $token.line_start,
            start: $token.start,
            end: $token.end,
            line: $token.line,
            column: $token.column,
            start_line: $token.line,
            end_line: $token.line,
            start_column: $token.start_column,
            end_column: $token.end_column,
        }
    };
}

#[macro_export]
macro_rules! ast {
macro_rules! ast_from_token {
    ($ast_type:expr, $start:expr, $end:expr) => {
        AST {
            ast_type: $ast_type,
            start_line: $start.line,
            end_line: $end.line,
            start_column: $start.start_column,
            end_column: $end.end_column,
        }
    };
}
#[macro_export]
macro_rules! ast_from_token_ast {
    ($ast_type:expr, $start:expr, $end:expr) => {
        AST {
            ast_type: $ast_type,
            start_line: $start.line,
            end_line: $end.end_line,
            start_column: $start.start_column,
            end_column: $end.end_column,
        }
    };
}
#[macro_export]
macro_rules! ast_from_ast {
    ($ast_type:expr, $start:expr, $end:expr) => {
        AST {
            ast_type: $ast_type,
            start_line: $start.start_line,
            end_line: $end.end_line,
            start_column: $start.start_column,
            end_column: $end.end_column,
        }
    };
}
#[macro_export]
macro_rules! ast_from_ast_token {
    ($ast_type:expr, $start:expr, $end:expr) => {
        AST {
            ast_type: $ast_type,
            line_start: $start.line_start,
            start: $start.start,
            end: $end.end,
            line: $start.line,
            column: $start.column,
            start_line: $start.start_line,
            end_line: $end.line,
            start_column: $start.start_column,
            end_column: $end.end_column,
        }
    };
}


@@ 55,10 87,9 @@ macro_rules! name {
    ($name:expr, $token:expr) => {
        Name {
            name: $name,
            line_start: $token.line_start,
            end: $token.end,
            start_column: $token.start_column,
            end_column: $token.end_column,
            line: $token.line,
            column: $token.column,
        }
    };
}


@@ 77,10 108,10 @@ macro_rules! unexpected_token {
    ($self:expr, $message:expr, $token:expr) => {
        $crate::error::KabelError::new(
            $crate::error::ErrorKind::UnexpectedToken,
            format!($message, $self.text[$token.start..$token.end].to_string()),
            format!($message, $self.text[$token.line][$token.start_column..$token.end_column].to_string()),
            $token.line,
            $token.column,
            $self.text[$token.line_start..$token.end].to_string(),
            $token.start_column,
            $self.text[$token.line].to_string(),
        )
    };
}


@@ 91,9 122,9 @@ macro_rules! out_of_scope {
        $crate::error::KabelError::new(
            $crate::error::ErrorKind::OutOfScope,
            format!($message, $name),
            $expr.line,
            $expr.column,
            $self.text[$expr.line_start..$expr.end].to_string(),
            $expr.start_line,
            $expr.start_column,
            $crate::collect_lines!($self.text[$expr.start_line..$expr.end_line+1]),
        )
    };
}


@@ 103,9 134,16 @@ macro_rules! out_of_scope_var {
        $crate::error::KabelError::new(
            $crate::error::ErrorKind::OutOfScope,
            format!($message, $name.name),
            $expr.line,
            $name.column,
            $self.text[$expr.line_start..$expr.end].to_string(),
            $expr.start_line,
            $name.start_column,
            $crate::collect_lines!($self.text[$expr.start_line..$expr.end_line+1]),
        )
    };
}

#[macro_export]
macro_rules! collect_lines {
    ($string:expr) => {
        $string.iter().fold("".to_string(), |acc, string| acc + string + "\n").trim_end().to_string()
    };
}

M kabel/src/parser.rs => kabel/src/parser.rs +85 -90
@@ 1,14 1,10 @@
use crate::{
    ast,
    error::{ErrorKind, KabelError},
    lexer::{Token, TokenType},
    lit, unexpected_token, name,
    ast_from_ast, ast_from_ast_token, ast_from_token, ast_from_token_ast, collect_lines, error::{ErrorKind, KabelError}, lexer::{Token, TokenType}, lit, name, unexpected_token
};

pub struct Parser {
    input: Vec<Token>,
    text: String,
    //start: usize,
    text: Vec<String>,
    current: usize,
    token: Token,
    pub errors: Vec<KabelError>,


@@ 18,8 14,7 @@ impl Parser {
    pub fn new(text: String, input: Vec<Token>) -> Self {
        Self {
            input: input.clone(),
            text,
            //start: 0,
            text: text.lines().collect::<Vec<&str>>().iter().map(|s| s.to_string()).collect(),
            current: 0,
            token: input[0].clone(),
            errors: Vec::new(),


@@ 39,11 34,10 @@ impl Parser {
        }
        AST {
            ast_type: ASTType::Program(program),
            line_start: 0,
            start: 0,
            end: 0,
            line: 0,
            column: 0,
            start_line: 0,
            end_line: 0,
            start_column: 0,
            end_column: 0,
        }
    }



@@ 83,7 77,7 @@ impl Parser {
                let right_paren = self.read_token()?;
                if let TokenType::RightParen = right_paren.token_type {
                    let block = self.block()?;
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Function(
                            name!(name, ident),
                            expressions,


@@ 111,7 105,7 @@ impl Parser {
        let return_ident = self.read_token()?;
        if let TokenType::Semicolon = self.peek()?.token_type {
            let semicolon = self.read_token()?;
            return Ok(ast!(
            return Ok(ast_from_token!(
                ASTType::Return(Box::new(None)),
                return_ident,
                semicolon


@@ 120,7 114,7 @@ impl Parser {
        let expression = self.expression()?;
        let semicolon = self.read_token()?;
        if let TokenType::Semicolon = semicolon.token_type {
            Ok(ast!(
            Ok(ast_from_token!(
                ASTType::Return(Box::new(Some(expression))),
                return_ident,
                semicolon


@@ 133,7 127,7 @@ impl Parser {
    pub fn loop_statement(&mut self) -> Result<AST, KabelError> {
        let loop_ident = self.read_token()?;
        let block = self.block()?;
        Ok(ast!(
        Ok(ast_from_token_ast!(
            ASTType::Loop(Box::new(block.clone())),
            loop_ident,
            block


@@ 148,7 142,7 @@ impl Parser {
            let right_paren = self.read_token()?;
            if let TokenType::RightParen = right_paren.token_type {
                let block = self.block()?;
                return Ok(ast!(
                return Ok(ast_from_token_ast!(
                    ASTType::While(Box::new(condition), Box::new(block.clone())),
                    while_ident,
                    block


@@ 190,7 184,7 @@ impl Parser {
                    let right_paren = self.read_token()?;
                    if let TokenType::RightParen = right_paren.token_type {
                        let block = self.block()?;
                        return Ok(ast!(
                        return Ok(ast_from_token_ast!(
                            ASTType::For(
                                Box::new(expression1),
                                Box::new(expression2),


@@ 218,7 212,7 @@ impl Parser {
        let break_ident = self.read_token()?;
        let semicolon = self.read_token()?;
        if let TokenType::Semicolon = semicolon.token_type {
            Ok(ast!(ASTType::Break, break_ident, semicolon))
            Ok(ast_from_token!(ASTType::Break, break_ident, semicolon))
        } else {
            Err(unexpected_token!(self, "Expected ; found {}", semicolon))
        }


@@ 228,7 222,7 @@ impl Parser {
        let continue_ident = self.read_token()?;
        let semicolon = self.read_token()?;
        if let TokenType::Semicolon = semicolon.token_type {
            Ok(ast!(ASTType::Continue, continue_ident, semicolon))
            Ok(ast_from_token!(ASTType::Continue, continue_ident, semicolon))
        } else {
            Err(unexpected_token!(self, "Expected ; found {}", semicolon))
        }


@@ 247,7 241,7 @@ impl Parser {
                    if let TokenType::Else = else_ident.token_type {
                        if let TokenType::LeftBrace = self.peek()?.token_type {
                            let else_block = self.block()?;
                            return Ok(ast!(
                            return Ok(ast_from_token_ast!(
                                ASTType::If(
                                    Box::new(condition),
                                    Box::new(block.clone()),


@@ 260,7 254,7 @@ impl Parser {
                        let else_if_ident = self.peek()?;
                        if let TokenType::If = else_if_ident.token_type {
                            let else_if = self.if_statement()?;
                            return Ok(ast!(
                            return Ok(ast_from_token_ast!(
                                ASTType::If(
                                    Box::new(condition),
                                    Box::new(block.clone()),


@@ 273,7 267,7 @@ impl Parser {
                        return Err(unexpected_token!(self, "Unexpected token {}", else_ident));
                    }
                }
                return Ok(ast!(
                return Ok(ast_from_token_ast!(
                    ASTType::If(Box::new(condition), Box::new(block.clone()), Box::new(None)),
                    if_ident,
                    block


@@ 294,7 288,7 @@ impl Parser {
                stmts.push(self.statement()?);
            }
            let right_brace = self.read_token()?;
            return Ok(ast!(ASTType::Block(stmts), left_brace, right_brace));
            return Ok(ast_from_token!(ASTType::Block(stmts), left_brace, right_brace));
        } else {
            return Err(unexpected_token!(self, "Expected {{ found {}", left_brace));
        }


@@ 307,9 301,9 @@ impl Parser {
            return Err(KabelError::new(
                ErrorKind::UnexpectedEof,
                "Unexpected end of file, expected ;".to_string(),
                expression.line,
                last.column,
                self.text[last.line_start..expression.end].to_string(),
                expression.start_line,
                last.end_column,
                self.text[last.line].to_string(),
            ));
        }
        let semicolon = self.read_token()?;


@@ 336,7 330,7 @@ impl Parser {
            let equal = self.read_token()?;
            if let TokenType::Equal = equal.token_type {
                let expr = self.expression()?;
                return Ok(ast!(
                return Ok(ast_from_token_ast!(
                    ASTType::Decl(name!(name, ident), Box::new(expr.clone())),
                    var,
                    expr


@@ 373,7 367,7 @@ impl Parser {
                let binop = self.read_token()?;
                let expr = self.expression()?;
                if binop.token_type == TokenType::Equal {
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Assign(
                            name!(name, ident),
                            Box::new(expr.clone())


@@ 382,11 376,11 @@ impl Parser {
                        expr
                    ));
                } else if binop.token_type == TokenType::PlusEqual {
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Assign(
                            name!(name.clone(), ident),
                            Box::new(
                                ast!(
                                ast_from_ast!(
                                    ASTType::Binary(
                                        Box::new(lit!(Ident, name, ident)),
                                        BinOp::Add,


@@ 402,11 396,11 @@ impl Parser {
                        expr
                    ));
                } else if binop.token_type == TokenType::MinusEqual {
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Assign(
                            name!(name.clone(), ident),
                            Box::new(
                                ast!(
                                ast_from_ast!(
                                    ASTType::Binary(
                                        Box::new(lit!(Ident, name, ident)),
                                        BinOp::Sub,


@@ 422,11 416,11 @@ impl Parser {
                        expr
                    ));
                } else if binop.token_type == TokenType::StarEqual {
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Assign(
                            name!(name.clone(), ident),
                            Box::new(
                                ast!(
                                ast_from_ast!(
                                    ASTType::Binary(
                                        Box::new(lit!(Ident, name, ident)),
                                        BinOp::Mul,


@@ 442,11 436,11 @@ impl Parser {
                        expr
                    ));
                } else if binop.token_type == TokenType::SlashEqual {
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Assign(
                            name!(name.clone(), ident),
                            Box::new(
                                ast!(
                                ast_from_ast!(
                                    ASTType::Binary(
                                        Box::new(lit!(Ident, name.clone(), ident)),
                                        BinOp::Div,


@@ 462,11 456,11 @@ impl Parser {
                        expr
                    ));
                } else if binop.token_type == TokenType::PercentEqual {
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Assign(
                            name!(name.clone(), ident),
                            Box::new(
                                ast!(
                                ast_from_ast!(
                                    ASTType::Binary(
                                        Box::new(lit!(Ident, name, ident)),
                                        BinOp::Mod,


@@ 482,11 476,11 @@ impl Parser {
                        expr
                    ));
                } else if binop.token_type == TokenType::AndEqual {
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Assign(
                            name!(name.clone(), ident),
                            Box::new(
                                ast!(
                                ast_from_ast!(
                                    ASTType::Binary(
                                        Box::new(lit!(Ident, name, ident)),
                                        BinOp::BitAnd,


@@ 502,11 496,11 @@ impl Parser {
                        expr
                    ));
                } else if binop.token_type == TokenType::CaretEqual {
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Assign(
                            name!(name.clone(), ident),
                            Box::new(
                                ast!(
                                ast_from_ast!(
                                    ASTType::Binary(
                                        Box::new(lit!(Ident, name, ident)),
                                        BinOp::BitXor,


@@ 522,11 516,11 @@ impl Parser {
                        expr
                    ));
                } else {
                    return Ok(ast!(
                    return Ok(ast_from_token_ast!(
                        ASTType::Assign(
                            name!(name.clone(), ident),
                            Box::new(
                                ast!(
                                ast_from_ast!(
                                    ASTType::Binary(
                                        Box::new(lit!(Ident, name, ident)),
                                        BinOp::BitOr,


@@ 559,7 553,7 @@ impl Parser {
            if let TokenType::Colon = self.peek()?.token_type {
                self.read_token()?;
                let false_expr = self.expression()?;
                return Ok(ast!(
                return Ok(ast_from_ast!(
                    ASTType::Ternary(
                        Box::new(condition.clone()),
                        Box::new(true_expr),


@@ 582,7 576,7 @@ impl Parser {
        while self.current < self.input.len() && self.peek()?.token_type == TokenType::OrOr {
            self.read_token()?;
            let right = self.logical_and()?;
            left = ast!(
            left = ast_from_ast!(
                ASTType::Binary(Box::new(left.clone()), BinOp::Or, Box::new(right.clone())),
                left,
                right


@@ 597,7 591,7 @@ impl Parser {
        while self.current < self.input.len() && self.peek()?.token_type == TokenType::AndAnd {
            self.read_token()?;
            let right = self.bit_and()?;
            left = ast!(
            left = ast_from_ast!(
                ASTType::Binary(Box::new(left.clone()), BinOp::And, Box::new(right.clone())),
                left,
                right


@@ 612,7 606,7 @@ impl Parser {
        while self.current < self.input.len() && self.peek()?.token_type == TokenType::And {
            self.read_token()?;
            let right = self.bit_xor()?;
            left = ast!(
            left = ast_from_ast!(
                ASTType::Binary(
                    Box::new(left.clone()),
                    BinOp::BitAnd,


@@ 631,7 625,7 @@ impl Parser {
        while self.current < self.input.len() && self.peek()?.token_type == TokenType::Caret {
            self.read_token()?;
            let right = self.bit_or()?;
            left = ast!(
            left = ast_from_ast!(
                ASTType::Binary(
                    Box::new(left.clone()),
                    BinOp::BitXor,


@@ 650,7 644,7 @@ impl Parser {
        while self.current < self.input.len() && self.peek()?.token_type == TokenType::Or {
            self.read_token()?;
            let right = self.equality()?;
            left = ast!(
            left = ast_from_ast!(
                ASTType::Binary(
                    Box::new(left.clone()),
                    BinOp::BitOr,


@@ 673,13 667,13 @@ impl Parser {
            let binop = self.read_token()?;
            let right = self.comparison()?;
            if binop.token_type == TokenType::EqualEqual {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Eq, Box::new(right.clone())),
                    left,
                    right
                );
            } else {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Ne, Box::new(right.clone())),
                    left,
                    right


@@ 702,25 696,25 @@ impl Parser {
            let binop = self.read_token()?;
            let right = self.term()?;
            if binop.token_type == TokenType::Less {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Ls, Box::new(right.clone())),
                    left,
                    right
                );
            } else if binop.token_type == TokenType::LessEqual {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Le, Box::new(right.clone())),
                    left,
                    right
                );
            } else if binop.token_type == TokenType::Greater {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Gr, Box::new(right.clone())),
                    left,
                    right
                );
            } else {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Ge, Box::new(right.clone())),
                    left,
                    right


@@ 742,13 736,13 @@ impl Parser {
            let right = self.factor()?;

            if binop.token_type == TokenType::Plus {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Add, Box::new(right.clone())),
                    left,
                    right
                );
            } else {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Sub, Box::new(right.clone())),
                    left,
                    right


@@ 769,19 763,19 @@ impl Parser {
            let right = self.unary()?;

            if binop.token_type == TokenType::Star {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Mul, Box::new(right.clone())),
                    left,
                    right
                );
            } else if binop.token_type == TokenType::Slash {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Div, Box::new(right.clone())),
                    left,
                    right
                );
            } else {
                left = ast!(
                left = ast_from_ast!(
                    ASTType::Binary(Box::new(left.clone()), BinOp::Mod, Box::new(right.clone())),
                    left,
                    right


@@ 795,13 789,13 @@ impl Parser {
            let token = self.read_token()?;
            let unary = self.unary()?;
            if token.token_type == TokenType::Bang {
                return Ok(ast!(
                return Ok(ast_from_token_ast!(
                    ASTType::Unary(UnOp::Not, Box::new(unary.clone())),
                    token,
                    unary
                ));
            } else {
                return Ok(ast!(
                return Ok(ast_from_token_ast!(
                    ASTType::Unary(UnOp::Neg, Box::new(unary.clone())),
                    token,
                    unary


@@ 819,7 813,7 @@ impl Parser {
            let expr = self.expression()?;
            let right_brace = self.read_token()?;
            if let TokenType::RightSquare = right_brace.token_type {
                primary = ast!(
                primary = ast_from_ast_token!(
                    ASTType::Subscript(Box::new(primary.clone()), Box::new(expr)),
                    primary,
                    right_brace


@@ 855,6 849,8 @@ impl Parser {
            TokenType::Str(string) => {
                return Ok(lit!(Str, string, token));
            }
            TokenType::True => return Ok(lit!(Bool, true, token)),
            TokenType::False => return Ok(lit!(Bool, false, token)),
            TokenType::LeftParen => {
                return self.group(token);
            }


@@ 876,7 872,7 @@ impl Parser {
            }
        }
        let right_square = self.read_token()?;
        Ok(ast!(
        Ok(ast_from_token!(
            ASTType::Lit(Lit::Array(expressions)),
            left_square,
            right_square


@@ 893,7 889,7 @@ impl Parser {
                    if self.current < self.input.len() {
                        if let TokenType::LeftParen = self.peek()?.token_type {
                            let call = self.call(child)?;
                            expr = ast!(
                            expr = ast_from_ast!(
                                ASTType::Member(Box::new(expr.clone()), Box::new(call.clone())),
                                expr,
                                call


@@ 904,7 900,7 @@ impl Parser {
                            continue;
                        }
                    }
                    expr = ast!(
                    expr = ast_from_ast_token!(
                        ASTType::Member(
                            Box::new(expr.clone()),
                            Box::new(lit!(Ident, child_str, child))


@@ 935,7 931,7 @@ impl Parser {
        }
        let right_paren = self.read_token()?;
        if let TokenType::Ident(name) = ident.token_type {
            return Ok(ast!(
            return Ok(ast_from_token!(
                ASTType::Call(name!(name, ident), expressions),
                ident,
                right_paren


@@ 947,10 943,10 @@ impl Parser {
        if let TokenType::Ident(name) = ident.token_type {
            let oper = self.read_token()?;
            if oper.token_type == TokenType::PlusPlus {
                return Ok(ast!(
                return Ok(ast_from_token!(
                    ASTType::Assign(
                        name!(name.clone(), ident),
                        Box::new(ast!(
                        Box::new(ast_from_token!(
                            ASTType::Binary(
                                Box::new(lit!(Ident, name, ident)),
                                BinOp::Add,


@@ 964,10 960,10 @@ impl Parser {
                    oper
                ));
            } else {
                return Ok(ast!(
                return Ok(ast_from_token!(
                    ASTType::Assign(
                        name!(name.clone(), ident),
                        Box::new(ast!(
                        Box::new(ast_from_token!(
                            ASTType::Binary(
                                Box::new(lit!(Ident, name, ident)),
                                BinOp::Sub,


@@ 994,12 990,12 @@ impl Parser {
                    ErrorKind::MissingDelimiter,
                    "Missing right parenthesis".to_string(),
                    right_paren.line,
                    right_paren.column,
                    self.text[left_paren.start..right_paren.end].to_string(),
                    right_paren.start_column,
                    self.text[left_paren.line..right_paren.line].iter().fold("".to_string(), |acc, string| acc + string + "\n"),
                ));
            }
            self.read_token()?;
            return Ok(ast!(
            return Ok(ast_from_token!(
                expr.ast_type,
                left_paren,
                right_paren


@@ 1011,7 1007,7 @@ impl Parser {
                "Missing right parenthesis".to_string(),
                e.line,
                e.column,
                self.text[left_paren.line_start..expr.end].to_string(),
                collect_lines!(self.text[left_paren.line..expr.end_line]),
            ));
        }
        unreachable!();


@@ 1024,8 1020,8 @@ impl Parser {
                ErrorKind::UnexpectedEof,
                "Unexpected end of file".to_string(),
                last_token.line,
                last_token.column,
                self.text[last_token.line_start..last_token.end].to_string(),
                last_token.start_column,
                self.text[last_token.line].clone(),
            ));
        }
        self.token = self.input[self.current].clone();


@@ 1039,8 1035,8 @@ impl Parser {
                ErrorKind::UnexpectedEof,
                "Unexpected end of file".to_string(),
                last_token.line,
                last_token.column,
                self.text[last_token.line_start..last_token.end].to_string(),
                last_token.start_column,
                self.text[last_token.line].clone(),
            ));
        }
        return Ok(self.input[self.current].clone());


@@ 1050,11 1046,10 @@ impl Parser {
#[derive(Debug, Clone)]
pub struct AST {
    pub ast_type: ASTType,
    pub line_start: usize,
    pub start: usize,
    pub end: usize,
    pub line: usize,
    pub column: usize,
    pub start_line: usize,
    pub end_line: usize,
    pub start_column: usize,
    pub end_column: usize,
}

#[derive(Debug, Clone)]


@@ 1094,10 1089,9 @@ pub enum ASTType {
#[derive(Debug, Clone)]
pub struct Name {
    pub name: String,
    pub line_start: usize,
    pub end: usize,
    pub start_column: usize,
    pub end_column: usize,
    pub line: usize,
    pub column: usize,
}

#[derive(Debug, Clone)]


@@ 1105,6 1099,7 @@ pub enum Lit {
    Ident(String),
    Num(f32),
    Str(String),
    Bool(bool),
    Array(Vec<AST>),
}


M kabel/src/semantic_analysis.rs => kabel/src/semantic_analysis.rs +6 -6
@@ 1,9 1,9 @@
use std::collections::HashMap;

use crate::{error::{ErrorKind, KabelError}, out_of_scope, out_of_scope_var, parser::{ASTType, Lit, Name, AST}};
use crate::{collect_lines, error::{ErrorKind, KabelError}, out_of_scope, out_of_scope_var, parser::{ASTType, Lit, Name, AST}};

pub struct Analyzer {
    text: String,
    text: Vec<String>,
    symbol_table: Vec<HashMap<String, Symbol>>,
    pub errors: Vec<KabelError>,
}


@@ 11,7 11,7 @@ pub struct Analyzer {
impl Analyzer {
    pub fn new(text: String) -> Self {
        Self {
            text,
            text: text.lines().collect::<Vec<&str>>().iter().map(|s| s.to_string()).collect(),
            symbol_table: vec![HashMap::new()],
            errors: Vec::new(),
        }


@@ 192,9 192,9 @@ impl Analyzer {
                        KabelError::new(
                            ErrorKind::OutOfScope,
                            format!("Function {} has {} argument, provided {}", name, *f_arity, arity),
                            ast.line,
                            ast.column,
                            self.text[ast.line_start..ast.end].to_string(),
                            ast.start_line,
                            ast.start_column,
                            collect_lines!(self.text[ast.start_line-1..ast.end_line-1]),
                        )
                    );
                    return true;