Skip to content

Commit

Permalink
Minimal Pratt Parser is done!!!
Browse files Browse the repository at this point in the history
  • Loading branch information
PakhomovAlexander committed Sep 24, 2024
1 parent 14fe6af commit f137a93
Show file tree
Hide file tree
Showing 4 changed files with 386 additions and 2 deletions.
7 changes: 7 additions & 0 deletions agenda.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,10 @@







Today we are going to:
- [ ] Learn Pratt Parser algorithm
- [ ] Implement minimal Pratt Parser for where expressions
93 changes: 92 additions & 1 deletion src/parser/lexer/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
mod tokens;
pub mod tokens;

use tokens::Token;

Expand All @@ -13,12 +13,19 @@ pub struct Lexer<'a> {
current_position: usize,
is_finished: bool,
cache: Option<char>,
peeked: Option<Result<Token<'a>, LexError>>,
}

impl<'a> Iterator for Lexer<'a> {
type Item = Result<Token<'a>, LexError>;

fn next(&mut self) -> Option<Self::Item> {
if self.peeked.is_some() {
let peeked = self.peeked.clone();
self.peeked = None;
return peeked;
}

let c = self.move_and_skip_whitespace()?;

match c {
Expand Down Expand Up @@ -56,9 +63,21 @@ impl<'a> Lexer<'a> {
current_position: 0,
is_finished: false,
cache: None,
peeked: None,
}
}

pub fn peek(&mut self) -> Option<Result<Token<'a>, LexError>> {
if self.peeked.is_some() {
return self.peeked.clone();
}

let token = self.next();
self.peeked = token.clone();

token
}

fn get_next_and_increment(&mut self) -> Option<char> {
let c = self.input_iterator.next();
if c.is_some() {
Expand Down Expand Up @@ -465,6 +484,22 @@ mod tests {
assert_eq!(actual, expected);
}

#[test]
fn simple_expr() {
let input = "1 + 2";
let lexer = Lexer::new(input);

let actual: Vec<Result<Token, LexError>> = lexer.collect();

let expected = vec![
Ok(Token::NumericLiteral("1".to_string())),
Ok(Token::Plus),
Ok(Token::NumericLiteral("2".to_string())),
];

assert_eq!(actual, expected);
}

#[test]
fn lex_single_chars() {
let input = "* , ; ( ) = < > + - / % |";
Expand Down Expand Up @@ -885,4 +920,60 @@ mod tests {

assert_eq!(actual, expected);
}

#[test]
fn peek() {
let input = "1 + 2";
let mut lexer = Lexer::new(input);

let actual = lexer.peek();
let expected = Some(Ok(Token::NumericLiteral("1".to_string())));
assert_eq!(actual, expected);

let actual = lexer.peek();
let expected = Some(Ok(Token::NumericLiteral("1".to_string())));
assert_eq!(actual, expected);

let actual = lexer.next();
let expected = Some(Ok(Token::NumericLiteral("1".to_string())));
assert_eq!(actual, expected);

let actual = lexer.peek();
let expected = Some(Ok(Token::Plus));
assert_eq!(actual, expected);

let actual = lexer.next();
let expected = Some(Ok(Token::Plus));
assert_eq!(actual, expected);

let actual = lexer.next();
let expected = Some(Ok(Token::NumericLiteral("2".to_string())));
assert_eq!(actual, expected);

let actual = lexer.next();
let expected = None;
assert_eq!(actual, expected);
}

#[test]
fn next_only() {
let input = "1 + 2";
let mut lexer = Lexer::new(input);

let actual = lexer.next();
let expected = Some(Ok(Token::NumericLiteral("1".to_string())));
assert_eq!(actual, expected);

let actual = lexer.next();
let expected = Some(Ok(Token::Plus));
assert_eq!(actual, expected);

let actual = lexer.next();
let expected = Some(Ok(Token::NumericLiteral("2".to_string())));
assert_eq!(actual, expected);

let actual = lexer.next();
let expected = None;
assert_eq!(actual, expected);
}
}
1 change: 1 addition & 0 deletions src/parser/lexer/tokens.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#[derive(Debug, PartialEq, Clone)]

pub enum Token<'a> {
Select,
From,
Expand Down
Loading

0 comments on commit f137a93

Please sign in to comment.