Add a variable assignment statement
This commit is contained in:
parent
a8c164819a
commit
746d567554
19 changed files with 195 additions and 84 deletions
16
Cargo.lock
generated
16
Cargo.lock
generated
|
@ -150,6 +150,7 @@ dependencies = [
|
||||||
"color-eyre",
|
"color-eyre",
|
||||||
"dotenv",
|
"dotenv",
|
||||||
"from_variants",
|
"from_variants",
|
||||||
|
"itertools",
|
||||||
"match_any",
|
"match_any",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
|
@ -196,6 +197,12 @@ version = "0.15.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
|
checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "either"
|
||||||
|
version = "1.6.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "eyre"
|
name = "eyre"
|
||||||
version = "0.6.7"
|
version = "0.6.7"
|
||||||
|
@ -282,6 +289,15 @@ dependencies = [
|
||||||
"hashbrown",
|
"hashbrown",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "itertools"
|
||||||
|
version = "0.10.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
|
||||||
|
dependencies = [
|
||||||
|
"either",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lazy_static"
|
name = "lazy_static"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
|
|
|
@ -8,6 +8,7 @@ clap = { version = "3.1.9", features = ["derive"] }
|
||||||
color-eyre = "0.6.1"
|
color-eyre = "0.6.1"
|
||||||
dotenv = "0.15.0"
|
dotenv = "0.15.0"
|
||||||
from_variants = "1.0.0"
|
from_variants = "1.0.0"
|
||||||
|
itertools = "0.10.3"
|
||||||
match_any = "1.0.1"
|
match_any = "1.0.1"
|
||||||
tracing = "0.1.32"
|
tracing = "0.1.32"
|
||||||
tracing-subscriber = { version = "0.3.9", features = ["env-filter"] }
|
tracing-subscriber = { version = "0.3.9", features = ["env-filter"] }
|
||||||
|
|
37
src/ast/ast_parser_iter.rs
Normal file
37
src/ast/ast_parser_iter.rs
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
use itertools::{peek_nth, PeekNth, PeekingNext};
|
||||||
|
|
||||||
|
pub struct ParserIter<T: Iterator>(PeekNth<T>);
|
||||||
|
|
||||||
|
impl<T: Iterator> ParserIter<T> {
|
||||||
|
pub fn new(iter: T) -> ParserIter<T> {
|
||||||
|
ParserIter(peek_nth(iter))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next(&mut self) -> T::Item {
|
||||||
|
self.0.next().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn peek_nth(&mut self, n: usize) -> &T::Item {
|
||||||
|
self.0.peek_nth(n).unwrap()
|
||||||
|
}
|
||||||
|
pub fn peek(&mut self) -> &T::Item {
|
||||||
|
self.peek_nth(0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Iterator> Iterator for ParserIter<T> {
|
||||||
|
type Item = T::Item;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
Some(self.next())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Iterator> PeekingNext for ParserIter<T> {
|
||||||
|
fn peeking_next<F>(&mut self, accept: F) -> Option<Self::Item>
|
||||||
|
where
|
||||||
|
F: FnOnce(&Self::Item) -> bool,
|
||||||
|
{
|
||||||
|
self.0.peeking_next(accept)
|
||||||
|
}
|
||||||
|
}
|
|
@ -120,7 +120,7 @@ pub enum Literal {
|
||||||
Int(i32),
|
Int(i32),
|
||||||
Float(f32),
|
Float(f32),
|
||||||
Bool(bool),
|
Bool(bool),
|
||||||
Nil
|
Nil,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct BinaryExpr {
|
pub struct BinaryExpr {
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use itertools::PeekingNext;
|
||||||
|
|
||||||
use crate::lexer::token::{self, TokenType};
|
use crate::lexer::token::{self, TokenType};
|
||||||
|
|
||||||
use super::super::parser::{InnerASTParsingError, Parser, Result};
|
use super::super::parser::{InnerASTParsingError, Parser, Result};
|
||||||
|
@ -12,7 +14,7 @@ impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
||||||
let mut node = self.comparison()?;
|
let mut node = self.comparison()?;
|
||||||
while let Some(o) = self
|
while let Some(o) = self
|
||||||
.token_iter
|
.token_iter
|
||||||
.next_if(|t| matches!(t.token_type, TokenType::EqualEqual | TokenType::BangEqual))
|
.peeking_next(|t| matches!(t.token_type, TokenType::EqualEqual | TokenType::BangEqual))
|
||||||
{
|
{
|
||||||
node = BinaryExpr::new(
|
node = BinaryExpr::new(
|
||||||
Box::new(node),
|
Box::new(node),
|
||||||
|
@ -27,7 +29,7 @@ impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
||||||
fn comparison(&mut self) -> Result<ExpressionNode> {
|
fn comparison(&mut self) -> Result<ExpressionNode> {
|
||||||
let mut node = self.term()?;
|
let mut node = self.term()?;
|
||||||
|
|
||||||
while let Some(o) = self.token_iter.next_if(|t| {
|
while let Some(o) = self.token_iter.peeking_next(|t| {
|
||||||
matches!(
|
matches!(
|
||||||
t.token_type,
|
t.token_type,
|
||||||
TokenType::Greater
|
TokenType::Greater
|
||||||
|
@ -50,7 +52,7 @@ impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
||||||
|
|
||||||
while let Some(o) = self
|
while let Some(o) = self
|
||||||
.token_iter
|
.token_iter
|
||||||
.next_if(|t| matches!(t.token_type, TokenType::Minus | TokenType::Plus))
|
.peeking_next(|t| matches!(t.token_type, TokenType::Minus | TokenType::Plus))
|
||||||
{
|
{
|
||||||
node = BinaryExpr::new(
|
node = BinaryExpr::new(
|
||||||
Box::new(node),
|
Box::new(node),
|
||||||
|
@ -67,7 +69,7 @@ impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
||||||
|
|
||||||
while let Some(o) = self
|
while let Some(o) = self
|
||||||
.token_iter
|
.token_iter
|
||||||
.next_if(|t| matches!(t.token_type, TokenType::Star | TokenType::Slash))
|
.peeking_next(|t| matches!(t.token_type, TokenType::Star | TokenType::Slash))
|
||||||
{
|
{
|
||||||
node = BinaryExpr::new(
|
node = BinaryExpr::new(
|
||||||
Box::new(node),
|
Box::new(node),
|
||||||
|
@ -82,7 +84,7 @@ impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
||||||
fn unary(&mut self) -> Result<ExpressionNode> {
|
fn unary(&mut self) -> Result<ExpressionNode> {
|
||||||
if let Some(op) = self
|
if let Some(op) = self
|
||||||
.token_iter
|
.token_iter
|
||||||
.next_if(|t| matches!(t.token_type, TokenType::Bang | TokenType::Minus))
|
.peeking_next(|t| matches!(t.token_type, TokenType::Bang | TokenType::Minus))
|
||||||
{
|
{
|
||||||
let right = Box::new(self.unary()?);
|
let right = Box::new(self.unary()?);
|
||||||
Ok(ExpressionNode::UnaryExpr(UnaryExpr::new(
|
Ok(ExpressionNode::UnaryExpr(UnaryExpr::new(
|
||||||
|
@ -95,8 +97,8 @@ impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn primary(&mut self) -> Result<ExpressionNode> {
|
fn primary(&mut self) -> Result<ExpressionNode> {
|
||||||
let node = match self.token_iter.next() {
|
let token = self.token_iter.next();
|
||||||
Some(token) => match token.token_type {
|
let node = match token.token_type {
|
||||||
TokenType::False => ExpressionNode::Literal(Literal::Bool(false)),
|
TokenType::False => ExpressionNode::Literal(Literal::Bool(false)),
|
||||||
TokenType::True => ExpressionNode::Literal(Literal::Bool(true)),
|
TokenType::True => ExpressionNode::Literal(Literal::Bool(true)),
|
||||||
TokenType::Int(i) => ExpressionNode::Literal(Literal::Int(i)),
|
TokenType::Int(i) => ExpressionNode::Literal(Literal::Int(i)),
|
||||||
|
@ -108,17 +110,13 @@ impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
||||||
let group = GroupingExpr::new(Box::new(expr));
|
let group = GroupingExpr::new(Box::new(expr));
|
||||||
match self
|
match self
|
||||||
.token_iter
|
.token_iter
|
||||||
.next_if(|v| matches!(v.token_type, TokenType::RightParen))
|
.peeking_next(|v| matches!(v.token_type, TokenType::RightParen))
|
||||||
{
|
{
|
||||||
Some(_) => return Ok(group.into()),
|
Some(_) => return Ok(group.into()),
|
||||||
None => {
|
None => return Err(token.location.wrap(InnerASTParsingError::UnmatchedBrace)),
|
||||||
return Err(token.location.wrap(InnerASTParsingError::UnmatchedBrace))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
a => return Err(token.location.wrap(InnerASTParsingError::IncorrectToken(a))),
|
a => return Err(token.location.wrap(InnerASTParsingError::IncorrectToken(a))),
|
||||||
},
|
|
||||||
None => todo!(),
|
|
||||||
};
|
};
|
||||||
Ok(node)
|
Ok(node)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
mod ast_parser_iter;
|
||||||
pub mod expression;
|
pub mod expression;
|
||||||
pub mod statement;
|
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
|
pub mod statement;
|
||||||
|
|
|
@ -1,15 +1,14 @@
|
||||||
use super::expression::expression_node;
|
use super::ast_parser_iter::ParserIter;
|
||||||
use super::statement::statement_node;
|
use super::statement::statement_node;
|
||||||
use crate::error::ErrorLocationWrapper;
|
use crate::error::ErrorLocationWrapper;
|
||||||
use crate::lexer::{token, token::TokenType};
|
use crate::lexer::{token, token::TokenType};
|
||||||
|
|
||||||
use std::iter;
|
|
||||||
use std::result::Result as StdResult;
|
use std::result::Result as StdResult;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum InnerASTParsingError {
|
pub enum InnerASTParsingError {
|
||||||
IncorrectToken(TokenType),
|
IncorrectToken(TokenType),
|
||||||
UnmatchedBrace,
|
UnmatchedBrace,
|
||||||
|
ExpectedSemi,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for InnerASTParsingError {
|
impl std::fmt::Display for InnerASTParsingError {
|
||||||
|
@ -17,6 +16,7 @@ impl std::fmt::Display for InnerASTParsingError {
|
||||||
match *self {
|
match *self {
|
||||||
Self::UnmatchedBrace => write!(f, "Unmatched brace"),
|
Self::UnmatchedBrace => write!(f, "Unmatched brace"),
|
||||||
Self::IncorrectToken(ref token) => write!(f, "Incorrect token {:?}", token),
|
Self::IncorrectToken(ref token) => write!(f, "Incorrect token {:?}", token),
|
||||||
|
Self::ExpectedSemi => write!(f, "Expected semicolon"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -26,7 +26,7 @@ pub type ASTParsingError = ErrorLocationWrapper<InnerASTParsingError>;
|
||||||
pub(super) type Result<T> = StdResult<T, ASTParsingError>;
|
pub(super) type Result<T> = StdResult<T, ASTParsingError>;
|
||||||
|
|
||||||
pub struct Parser<'a, T: Iterator<Item = token::Token<'a>>> {
|
pub struct Parser<'a, T: Iterator<Item = token::Token<'a>>> {
|
||||||
pub(super) token_iter: iter::Peekable<T>,
|
pub(super) token_iter: super::ast_parser_iter::ParserIter<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type ParseAllResult = StdResult<Vec<statement_node::Statement>, Vec<ASTParsingError>>;
|
pub type ParseAllResult = StdResult<Vec<statement_node::Statement>, Vec<ASTParsingError>>;
|
||||||
|
@ -34,13 +34,13 @@ pub type ParseAllResult = StdResult<Vec<statement_node::Statement>, Vec<ASTParsi
|
||||||
impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
||||||
pub fn new(iter: T) -> Parser<'a, T> {
|
pub fn new(iter: T) -> Parser<'a, T> {
|
||||||
Parser {
|
Parser {
|
||||||
token_iter: iter.peekable(),
|
token_iter: ParserIter::new(iter),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn parse_all(&mut self) -> ParseAllResult {
|
pub fn parse_all(&mut self) -> ParseAllResult {
|
||||||
let mut res = Ok(Vec::new());
|
let mut res = Ok(Vec::new());
|
||||||
|
|
||||||
while !matches!(self.token_iter.peek().unwrap().token_type, token::TokenType::Eof) {
|
while !matches!(self.token_iter.peek().token_type, token::TokenType::Eof) {
|
||||||
match self.statement() {
|
match self.statement() {
|
||||||
Ok(s) => {
|
Ok(s) => {
|
||||||
if let Ok(ref mut v) = res {
|
if let Ok(ref mut v) = res {
|
||||||
|
|
|
@ -5,6 +5,7 @@ use from_variants::FromVariants;
|
||||||
pub enum Statement {
|
pub enum Statement {
|
||||||
Expression(ExpressionStatement),
|
Expression(ExpressionStatement),
|
||||||
Print(PrintStatement),
|
Print(PrintStatement),
|
||||||
|
VariableAssignment(VariableAssignmentStatement),
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! all_variants {
|
macro_rules! all_variants {
|
||||||
|
@ -12,7 +13,7 @@ macro_rules! all_variants {
|
||||||
{
|
{
|
||||||
use match_any::match_any;
|
use match_any::match_any;
|
||||||
use $crate::ast::statement::statement_node::*;
|
use $crate::ast::statement::statement_node::*;
|
||||||
match_any!($expr, Statement::Expression($val_name) | Statement::Print($val_name) => $expr_arm)
|
match_any!($expr, Statement::Expression($val_name) | Statement::Print($val_name) | Statement::VariableAssignment($val_name) => $expr_arm)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -33,3 +34,15 @@ impl PrintStatement {
|
||||||
Self(expr)
|
Self(expr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct VariableAssignmentStatement {
|
||||||
|
pub var_name: String,
|
||||||
|
pub node: ExpressionNode,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VariableAssignmentStatement {
|
||||||
|
pub fn new(var_name: String, node: ExpressionNode) -> Self {
|
||||||
|
Self { var_name, node }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,37 +1,70 @@
|
||||||
use super::statement_node::{ExpressionStatement, Statement, PrintStatement};
|
use itertools::PeekingNext;
|
||||||
|
|
||||||
|
use super::statement_node::{ExpressionStatement, PrintStatement, Statement};
|
||||||
use crate::{
|
use crate::{
|
||||||
ast::parser::{ASTParsingError, Parser, Result},
|
ast::{
|
||||||
lexer::token,
|
parser::{InnerASTParsingError, Parser, Result},
|
||||||
|
statement::statement_node::VariableAssignmentStatement,
|
||||||
|
},
|
||||||
|
lexer::token::{self, Token, TokenType},
|
||||||
};
|
};
|
||||||
|
|
||||||
impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
impl<'a, T: Iterator<Item = token::Token<'a>>> Parser<'a, T> {
|
||||||
pub fn statement(&mut self) -> Result<Statement> {
|
pub fn statement(&mut self) -> Result<Statement> {
|
||||||
if let Some(_) = self
|
|
||||||
.token_iter
|
|
||||||
.next_if(|t| matches!(t.token_type, token::TokenType::Print))
|
|
||||||
{
|
|
||||||
self.print_statement()
|
self.print_statement()
|
||||||
} else {
|
|
||||||
self.expression_statement()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn print_statement(&mut self) -> Result<Statement> {
|
fn print_statement(&mut self) -> Result<Statement> {
|
||||||
|
if let Some(Token { location: loc, .. }) = self
|
||||||
|
.token_iter
|
||||||
|
.peeking_next(|t| matches!(t.token_type, token::TokenType::Print))
|
||||||
|
{
|
||||||
let expr = self.expression()?;
|
let expr = self.expression()?;
|
||||||
if let token::TokenType::Semicolon = self.token_iter.peek().unwrap().token_type {
|
if let token::TokenType::Semicolon = self.token_iter.peek().token_type {
|
||||||
self.token_iter.next();
|
self.token_iter.next();
|
||||||
Ok(PrintStatement::new(expr).into())
|
Ok(PrintStatement::new(expr).into())
|
||||||
} else {
|
} else {
|
||||||
panic!();
|
Err(loc.wrap(InnerASTParsingError::ExpectedSemi))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.variable_assignment_statement()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn variable_assignment_statement(&mut self) -> Result<Statement> {
|
||||||
|
if matches!(
|
||||||
|
self.token_iter.peek_nth(0).token_type,
|
||||||
|
TokenType::Identifier(_)
|
||||||
|
) && matches!(self.token_iter.peek_nth(1).token_type, TokenType::Equal)
|
||||||
|
{
|
||||||
|
let ident = if let TokenType::Identifier(ident) = self.token_iter.next().token_type {
|
||||||
|
ident
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
self.token_iter.next();
|
||||||
|
let expr = self.expression()?;
|
||||||
|
|
||||||
|
let token = self.token_iter.peek();
|
||||||
|
if let token::TokenType::Semicolon = token.token_type {
|
||||||
|
self.token_iter.next();
|
||||||
|
Ok(VariableAssignmentStatement::new(ident, expr).into())
|
||||||
|
} else {
|
||||||
|
Err(token.location.wrap(InnerASTParsingError::ExpectedSemi))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.expression_statement()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expression_statement(&mut self) -> Result<Statement> {
|
fn expression_statement(&mut self) -> Result<Statement> {
|
||||||
let expr = self.expression()?;
|
let expr = self.expression()?;
|
||||||
if let token::TokenType::Semicolon = self.token_iter.peek().unwrap().token_type {
|
let token = self.token_iter.peek();
|
||||||
|
if let TokenType::Semicolon = token.token_type {
|
||||||
self.token_iter.next();
|
self.token_iter.next();
|
||||||
Ok(ExpressionStatement::new(expr).into())
|
Ok(ExpressionStatement::new(expr).into())
|
||||||
} else {
|
} else {
|
||||||
panic!();
|
Err(token.location.wrap(InnerASTParsingError::ExpectedSemi))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
use super::{RuntimeError, types::Value};
|
|
||||||
use super::Interpret;
|
use super::Interpret;
|
||||||
|
use super::{types::Value, RuntimeError};
|
||||||
use crate::ast::expression::expression_node;
|
use crate::ast::expression::expression_node;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
impl Interpret for expression_node::ExpressionNode {
|
impl Interpret for expression_node::ExpressionNode {
|
||||||
fn interpret(&self) -> Result<Value, RuntimeError> {
|
fn interpret(&self) -> Result<Value, RuntimeError> {
|
||||||
expression_node::all_variants!(self, n => n.interpret())
|
expression_node::all_variants!(self, n => n.interpret())
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
mod expression_interpreter;
|
mod expression_interpreter;
|
||||||
mod statement_interpreter;
|
mod statement_interpreter;
|
||||||
pub use super::{error::RuntimeError, types};
|
|
||||||
use super::types::Value;
|
use super::types::Value;
|
||||||
|
pub use super::{error::RuntimeError, types};
|
||||||
|
|
||||||
pub trait Interpret {
|
pub trait Interpret {
|
||||||
fn interpret(&self) -> Result<Value, RuntimeError>;
|
fn interpret(&self) -> Result<Value, RuntimeError>;
|
||||||
|
|
|
@ -1,16 +1,13 @@
|
||||||
use super::{RuntimeError, types::Value};
|
|
||||||
use super::Interpret;
|
use super::Interpret;
|
||||||
|
use super::{types::Value, RuntimeError};
|
||||||
use crate::ast::statement::statement_node;
|
use crate::ast::statement::statement_node;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
impl Interpret for statement_node::Statement {
|
impl Interpret for statement_node::Statement {
|
||||||
fn interpret(&self) -> Result<Value, RuntimeError> {
|
fn interpret(&self) -> Result<Value, RuntimeError> {
|
||||||
statement_node::all_variants!(self, n => n.interpret())
|
statement_node::all_variants!(self, n => n.interpret())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl Interpret for statement_node::PrintStatement {
|
impl Interpret for statement_node::PrintStatement {
|
||||||
fn interpret(&self) -> Result<Value, RuntimeError> {
|
fn interpret(&self) -> Result<Value, RuntimeError> {
|
||||||
let res = self.0.interpret()?;
|
let res = self.0.interpret()?;
|
||||||
|
@ -24,3 +21,10 @@ impl Interpret for statement_node::ExpressionStatement {
|
||||||
self.0.interpret()
|
self.0.interpret()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Interpret for statement_node::VariableAssignmentStatement {
|
||||||
|
fn interpret(&self) -> Result<Value, RuntimeError> {
|
||||||
|
let expr_val = self.node.interpret()?;
|
||||||
|
Ok(expr_val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
struct Environment {
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,4 +1,3 @@
|
||||||
pub mod ast_walker;
|
pub mod ast_walker;
|
||||||
pub mod types;
|
|
||||||
pub mod error;
|
pub mod error;
|
||||||
mod environment;
|
pub mod types;
|
||||||
|
|
|
@ -42,17 +42,24 @@ impl<'a> LexerIter<'a> {
|
||||||
|
|
||||||
pub fn as_str_while<F: FnMut(char) -> bool>(&mut self, mut predicate: F) -> &'a str {
|
pub fn as_str_while<F: FnMut(char) -> bool>(&mut self, mut predicate: F) -> &'a str {
|
||||||
let str = self.inner.as_str();
|
let str = self.inner.as_str();
|
||||||
let mut end_indice = 0;
|
|
||||||
for (i, c) in str.char_indices() {
|
let mut iter = str.char_indices();
|
||||||
end_indice = i;
|
let end_indice = loop {
|
||||||
|
match iter.next() {
|
||||||
|
Some((i, c)) => {
|
||||||
if !predicate(c) {
|
if !predicate(c) {
|
||||||
break;
|
break i;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
None => {
|
||||||
|
break iter.offset();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
unsafe {
|
unsafe {
|
||||||
self.inner = str.get_unchecked(end_indice..).chars();
|
self.inner = str.get_unchecked(end_indice..).chars();
|
||||||
let res = str.get_unchecked(0..end_indice);
|
|
||||||
res
|
str.get_unchecked(0..end_indice) as _
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -100,7 +100,7 @@ impl<'a, 'b> Lexer<'a, 'b> {
|
||||||
.map(|v| self.get_token(token::TokenType::Int(v)))
|
.map(|v| self.get_token(token::TokenType::Int(v)))
|
||||||
.map_err(|_| LexingErrorKind::IntPrimitiveTooBig)
|
.map_err(|_| LexingErrorKind::IntPrimitiveTooBig)
|
||||||
};
|
};
|
||||||
return res.map(|v| Some(v)).map_err(|e| self.get_error(e))
|
return res.map(Some).map_err(|e| self.get_error(e));
|
||||||
/*
|
/*
|
||||||
|
|
||||||
Err(IntErrorKind::PosOverflow) | Err(IntErrorKind::NegOverflow) => return Err(self.get_error(LexingErrorKind::IntPrimitiveTooBig)),
|
Err(IntErrorKind::PosOverflow) | Err(IntErrorKind::NegOverflow) => return Err(self.get_error(LexingErrorKind::IntPrimitiveTooBig)),
|
||||||
|
@ -191,7 +191,7 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_int_literal_too_large() {
|
fn test_int_literal_too_large() {
|
||||||
let mut lexer = Lexer::new("2222222222222222222", None);
|
let mut lexer = Lexer::new("2222222222222222222223", None);
|
||||||
let errors = lexer.scan_tokens().unwrap_err();
|
let errors = lexer.scan_tokens().unwrap_err();
|
||||||
assert_eq!(errors.len(), 1);
|
assert_eq!(errors.len(), 1);
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
|
|
|
@ -6,6 +6,12 @@ pub struct Token<'a> {
|
||||||
pub location: Location<'a>,
|
pub location: Location<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> Token<'a> {
|
||||||
|
pub fn as_tuple<'b>(&'b self) -> (&'b TokenType, Location<'a>) {
|
||||||
|
(&self.token_type, self.location)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum TokenType {
|
pub enum TokenType {
|
||||||
LeftParen,
|
LeftParen,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#![feature(char_indices_offset)]
|
||||||
pub mod ast;
|
pub mod ast;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod interpreter;
|
pub mod interpreter;
|
||||||
|
|
|
@ -2,12 +2,12 @@ use crftng_intrprtrs::run;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_one_equality() {
|
fn test_one_equality() {
|
||||||
run_check_result_eq_bool("1 == 1", true);
|
run_check_result_eq_bool("1 == 1;", true);
|
||||||
run_check_result_eq_bool("1 >= 1", true);
|
run_check_result_eq_bool("1 >= 1;", true);
|
||||||
run_check_result_eq_bool("1 <= 1", true);
|
run_check_result_eq_bool("1 <= 1;", true);
|
||||||
run_check_result_eq_bool("1 != 1", false);
|
run_check_result_eq_bool("1 != 1;", false);
|
||||||
run_check_result_eq_bool("1 > 1", false);
|
run_check_result_eq_bool("1 > 1;", false);
|
||||||
run_check_result_eq_bool("1 < 1", false);
|
run_check_result_eq_bool("1 < 1;", false);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_check_result_eq_bool(code: &str, value: bool) {
|
fn run_check_result_eq_bool(code: &str, value: bool) {
|
||||||
|
|
Loading…
Reference in a new issue