Skip to content

Commit b134ee0

Browse files
committed
fix: introducing lexical errors
1 parent b67733c commit b134ee0

File tree

34 files changed

+338
-215
lines changed

34 files changed

+338
-215
lines changed

Cargo.lock

Lines changed: 40 additions & 19 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "monkey_interpreter"
3-
version = "0.15.0"
3+
version = "0.15.1"
44
description = "Implementation of an interpreter for the Monkey language written in Rust, currently under active development."
55
authors = ["C <[email protected]>"]
66
edition = "2021"
@@ -19,6 +19,9 @@ structopt = "0.3.25"
1919
log = "0.4.14"
2020
pretty_env_logger = "0.4.0"
2121

22+
# For custom errors.
23+
thiserror = "1.0.30"
24+
2225

2326
[dev-dependencies]
2427
rstest = "0.11.0"

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# Monkey Interpreter
2-
[![pipeline status](https://img.shields.io/badge/Version-0.15.0-blue)](https://gitlab.com/DeveloperC/monkey_interpreter/commits/master) [![pipeline status](https://gitlab.com/DeveloperC/monkey_interpreter/badges/master/pipeline.svg)](https://gitlab.com/DeveloperC/monkey_interpreter/commits/master) [![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg)](https://conventionalcommits.org) [![License: AGPL v3](https://img.shields.io/badge/License-AGPLv3-blue.svg)](https://www.gnu.org/licenses/agpl-3.0)
2+
[![pipeline status](https://img.shields.io/badge/Version-0.15.1-blue)](https://gitlab.com/DeveloperC/monkey_interpreter/commits/master) [![pipeline status](https://gitlab.com/DeveloperC/monkey_interpreter/badges/master/pipeline.svg)](https://gitlab.com/DeveloperC/monkey_interpreter/commits/master) [![Conventional Commits](https://img.shields.io/badge/Conventional%20Commits-1.0.0-yellow.svg)](https://conventionalcommits.org) [![License: AGPL v3](https://img.shields.io/badge/License-AGPLv3-blue.svg)](https://www.gnu.org/licenses/agpl-3.0)
33

44

55
![The Monkey Programming Language Logo](https://cloud.githubusercontent.com/assets/1013641/22617482/9c60c27c-eb09-11e6-9dfa-b04c7fe498ea.png)

src/evaluator/tests/macros.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ macro_rules! assert_expected_returned_object {
55
insta::assert_debug_snapshot!(
66
$snapshot_name,
77
evaluator.evaluate(crate::syntax_analysis::SyntaxAnalysis::from(
8-
crate::lexical_analysis::LexicalAnalysis::from($code)
8+
crate::lexical_analysis::LexicalAnalysis::from($code).unwrap()
99
))
1010
);
1111
};

src/interpreter/mod.rs

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -15,20 +15,26 @@ pub(crate) fn repl() {
1515
let mut evaluator = crate::evaluator::Evaluator::new();
1616

1717
loop {
18-
let tokens = LexicalAnalysis::from(&read());
19-
let abstract_syntax_tree = SyntaxAnalysis::from(tokens);
20-
21-
if !abstract_syntax_tree.syntax_parsing_errors.is_empty() {
22-
for error in abstract_syntax_tree
23-
.syntax_parsing_errors
24-
.iter()
25-
.enumerate()
26-
{
27-
error!("{:?}", error);
18+
match LexicalAnalysis::from(&read()) {
19+
Ok(tokens) => {
20+
let abstract_syntax_tree = SyntaxAnalysis::from(tokens);
21+
22+
if !abstract_syntax_tree.syntax_parsing_errors.is_empty() {
23+
for error in abstract_syntax_tree
24+
.syntax_parsing_errors
25+
.iter()
26+
.enumerate()
27+
{
28+
error!("{:?}", error);
29+
}
30+
} else {
31+
let object = evaluator.evaluate(abstract_syntax_tree);
32+
println!("{:?}", object);
33+
}
34+
}
35+
Err(error) => {
36+
error!("{}", error);
2837
}
29-
} else {
30-
let object = evaluator.evaluate(abstract_syntax_tree);
31-
println!("{:?}", object);
3238
}
3339
}
3440
}
@@ -41,7 +47,7 @@ fn read() -> String {
4147

4248
match stdin().read_line(&mut buffer) {
4349
Ok(_) => {}
44-
Err(error) => error!("Error reading user input: {}", error),
50+
Err(_) => error!("Unable to read user input from standard input."),
4551
}
4652

4753
buffer

src/lexical_analysis/mod.rs

Lines changed: 27 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
use std::iter::{FromIterator, Peekable};
22
use std::str::Chars;
33

4+
use crate::lexical_analysis::model::lexical_error::LexicalError;
45
use crate::lexical_analysis::model::token::Token;
56
use crate::lexical_analysis::utilities::*;
67

@@ -15,19 +16,19 @@ pub(crate) struct LexicalAnalysis<'a> {
1516
}
1617

1718
impl<'a> LexicalAnalysis<'a> {
18-
pub(crate) fn from(code: &str) -> Vec<Token> {
19+
pub(crate) fn from(code: &str) -> Result<Vec<Token>, LexicalError> {
1920
let mut lexical_analysis = LexicalAnalysis {
2021
source_code: code.chars().peekable(),
2122
};
2223

2324
lexical_analysis.get_tokens()
2425
}
2526

26-
fn get_tokens(&mut self) -> Vec<Token> {
27+
fn get_tokens(&mut self) -> Result<Vec<Token>, LexicalError> {
2728
let mut tokens = Vec::new();
2829

2930
loop {
30-
let token = self.get_next_token();
31+
let token = self.get_next_token()?;
3132
let end_of_file = token == Token::EndOfFile;
3233
tokens.push(token);
3334

@@ -36,54 +37,52 @@ impl<'a> LexicalAnalysis<'a> {
3637
}
3738
}
3839

39-
tokens
40+
Ok(tokens)
4041
}
4142

42-
fn get_next_token(&mut self) -> Token {
43+
fn get_next_token(&mut self) -> Result<Token, LexicalError> {
4344
match self.get_next_character() {
4445
Some(character) => {
4546
debug!("Matching the character '{}'.", character);
4647
match character {
4748
'!' => {
48-
check_next_character!(self.source_code, '=', Token::NotEquals);
49-
Token::Not
49+
check_next_character!(self.source_code, '=', Ok(Token::NotEquals));
50+
Ok(Token::Not)
5051
}
51-
'-' => Token::Minus,
52-
'/' => Token::Divide,
53-
'*' => Token::Multiply,
54-
'>' => Token::GreaterThan,
55-
'<' => Token::LesserThan,
52+
'-' => Ok(Token::Minus),
53+
'/' => Ok(Token::Divide),
54+
'*' => Ok(Token::Multiply),
55+
'>' => Ok(Token::GreaterThan),
56+
'<' => Ok(Token::LesserThan),
5657
'=' => {
57-
check_next_character!(self.source_code, '=', Token::Equals);
58-
Token::Assign
58+
check_next_character!(self.source_code, '=', Ok(Token::Equals));
59+
Ok(Token::Assign)
5960
}
60-
'+' => Token::Plus,
61-
'(' => Token::OpeningRoundBracket,
62-
')' => Token::ClosingRoundBracket,
63-
'{' => Token::OpeningCurlyBracket,
64-
'}' => Token::ClosingCurlyBracket,
65-
',' => Token::Comma,
66-
';' => Token::SemiColon,
61+
'+' => Ok(Token::Plus),
62+
'(' => Ok(Token::OpeningRoundBracket),
63+
')' => Ok(Token::ClosingRoundBracket),
64+
'{' => Ok(Token::OpeningCurlyBracket),
65+
'}' => Ok(Token::ClosingCurlyBracket),
66+
',' => Ok(Token::Comma),
67+
';' => Ok(Token::SemiColon),
6768
_ => {
6869
if is_valid_identifier_character(character) {
6970
debug!("Parsing word from characters.");
70-
return get_keyword_token(&self.get_word(character));
71+
return Ok(get_keyword_token(&self.get_word(character)));
7172
}
7273

7374
if is_digit(character) {
7475
debug!("Parsing integer from characters.");
75-
return Token::Integer {
76+
return Ok(Token::Integer {
7677
literal: self.get_integer(character),
77-
};
78+
});
7879
}
7980

80-
Token::Illegal {
81-
literal: character.to_string(),
82-
}
81+
Err(LexicalError::IllegalCharacter(character))
8382
}
8483
}
8584
}
86-
None => Token::EndOfFile,
85+
None => Ok(Token::EndOfFile),
8786
}
8887
}
8988

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
use thiserror::Error;
2+
3+
#[derive(Error, Debug)]
4+
pub enum LexicalError {
5+
#[error("Illegal character provided for lexical analysis {0:?}.")]
6+
IllegalCharacter(char),
7+
}

src/lexical_analysis/model/mod.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1+
pub(crate) mod lexical_error;
12
pub(crate) mod token;

src/lexical_analysis/model/token/mod.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
#[derive(Debug, Clone, PartialEq)]
22
pub(crate) enum Token {
3-
Illegal { literal: String },
43
EndOfFile,
54

65
// Identifiers

src/lexical_analysis/tests/mod.rs

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -108,3 +108,20 @@ fn test_lexical_analysis_tokenization_for_special_characters(code: &str, snapsho
108108
fn test_empty_code() {
109109
assert_expected_returned_tokens!("", "test_empty_code");
110110
}
111+
112+
#[rstest(
113+
code,
114+
snapshot_name,
115+
case("let x = `2`;", "test_lexical_analysis_producing_lexical_errors_case1"),
116+
case(
117+
"let x = 2;\n let z = &x;",
118+
"test_lexical_analysis_producing_lexical_errors_case2"
119+
),
120+
case(
121+
"let x =| sum(2);",
122+
"test_lexical_analysis_producing_lexical_errors_case3"
123+
)
124+
)]
125+
fn test_lexical_analysis_producing_lexical_errors(code: &str, snapshot_name: &str) {
126+
assert_expected_returned_tokens!(code, snapshot_name);
127+
}

0 commit comments

Comments
 (0)