about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--users/tazjin/rlox/src/errors.rs14
-rw-r--r--users/tazjin/rlox/src/main.rs2
-rw-r--r--users/tazjin/rlox/src/scanner.rs123
3 files changed, 139 insertions, 0 deletions
diff --git a/users/tazjin/rlox/src/errors.rs b/users/tazjin/rlox/src/errors.rs
new file mode 100644
index 000000000000..46c739ef2f46
--- /dev/null
+++ b/users/tazjin/rlox/src/errors.rs
@@ -0,0 +1,14 @@
+#[derive(Debug)]
+pub enum ErrorKind {
+    UnexpectedChar(char),
+}
+
+#[derive(Debug)]
+pub struct Error {
+    pub line: usize,
+    pub kind: ErrorKind,
+}
+
+pub fn report(loc: &str, err: &Error) {
+    eprintln!("[line {}] Error {}: {:?}", err.line, loc, err.kind);
+}
diff --git a/users/tazjin/rlox/src/main.rs b/users/tazjin/rlox/src/main.rs
index 83d220c81631..b14ed97d5eda 100644
--- a/users/tazjin/rlox/src/main.rs
+++ b/users/tazjin/rlox/src/main.rs
@@ -4,7 +4,9 @@ use std::io;
 use std::io::Write;
 use std::process;
 
+mod errors;
 mod interpreter;
+mod scanner;
 
 fn main() {
     let mut args = env::args();
diff --git a/users/tazjin/rlox/src/scanner.rs b/users/tazjin/rlox/src/scanner.rs
new file mode 100644
index 000000000000..c1809010547a
--- /dev/null
+++ b/users/tazjin/rlox/src/scanner.rs
@@ -0,0 +1,123 @@
+use crate::errors::{Error, ErrorKind};
+
+#[derive(Debug)]
+pub enum TokenKind {
+    // Single-character tokens.
+    LeftParen,
+    RightParen,
+    LeftBrace,
+    RightBrace,
+    Comma,
+    Dot,
+    Minus,
+    Plus,
+    Semicolon,
+    Slash,
+    Star,
+
+    // One or two character tokens.
+    Bang,
+    BangEqual,
+    Equal,
+    EqualEqual,
+    Greater,
+    GreaterEqual,
+    Less,
+    LessEqual,
+
+    // Literals.
+    Identifier,
+    String,
+    Number,
+
+    // Keywords.
+    And,
+    Class,
+    Else,
+    False,
+    Fun,
+    For,
+    If,
+    Nil,
+    Or,
+    Print,
+    Return,
+    Super,
+    This,
+    True,
+    Var,
+    While,
+
+    // Special things
+    Eof,
+}
+
+#[derive(Debug)]
+pub struct Token<'a> {
+    kind: TokenKind,
+    lexeme: &'a str,
+    // literal: Object, // TODO(tazjin): Uhh?
+    line: usize,
+}
+
+struct Scanner<'a> {
+    source: &'a str,
+    tokens: Vec<Token<'a>>,
+    errors: Vec<Error>,
+    start: usize,   // offset of first character in current lexeme
+    current: usize, // current offset into source
+    line: usize,    // current line in source
+}
+
+impl<'a> Scanner<'a> {
+    fn is_at_end(&self) -> bool {
+        return self.current >= self.source.len();
+    }
+
+    fn advance(&mut self) -> char {
+        self.current += 1;
+
+        // TODO(tazjin): Due to utf8-safety, this is a bit annoying.
+        // Since string iteration is not the point here I'm just
+        // leaving this as is for now.
+        self.source.chars().nth(self.current - 1).unwrap()
+    }
+
+    fn add_token(&mut self, kind: TokenKind) {
+        let lexeme = &self.source[self.start..self.current];
+        self.tokens.push(Token {
+            kind,
+            lexeme,
+            line: self.line,
+        })
+    }
+
+    fn scan_token(&mut self) {
+        match self.advance() {
+            '(' => self.add_token(TokenKind::LeftParen),
+            ')' => self.add_token(TokenKind::RightParen),
+            '{' => self.add_token(TokenKind::LeftBrace),
+            '}' => self.add_token(TokenKind::RightBrace),
+            ',' => self.add_token(TokenKind::Comma),
+            '.' => self.add_token(TokenKind::Dot),
+            '-' => self.add_token(TokenKind::Minus),
+            '+' => self.add_token(TokenKind::Plus),
+            ';' => self.add_token(TokenKind::Semicolon),
+            '*' => self.add_token(TokenKind::Star),
+
+            unexpected => self.errors.push(Error {
+                line: self.line,
+                kind: ErrorKind::UnexpectedChar(unexpected),
+            }),
+        };
+    }
+
+    fn scan_tokens(mut self) -> Vec<Token<'a>> {
+        while !self.is_at_end() {
+            self.start = self.current;
+            self.scan_token();
+        }
+
+        return self.tokens;
+    }
+}