about summary refs log tree commit diff
path: root/users/tazjin/rlox/src/scanner.rs
diff options
context:
space:
mode:
Diffstat (limited to 'users/tazjin/rlox/src/scanner.rs')
-rw-r--r--users/tazjin/rlox/src/scanner.rs123
1 files changed, 123 insertions, 0 deletions
diff --git a/users/tazjin/rlox/src/scanner.rs b/users/tazjin/rlox/src/scanner.rs
new file mode 100644
index 000000000000..c1809010547a
--- /dev/null
+++ b/users/tazjin/rlox/src/scanner.rs
@@ -0,0 +1,123 @@
+use crate::errors::{Error, ErrorKind};
+
+#[derive(Debug)]
+pub enum TokenKind {
+    // Single-character tokens.
+    LeftParen,
+    RightParen,
+    LeftBrace,
+    RightBrace,
+    Comma,
+    Dot,
+    Minus,
+    Plus,
+    Semicolon,
+    Slash,
+    Star,
+
+    // One or two character tokens.
+    Bang,
+    BangEqual,
+    Equal,
+    EqualEqual,
+    Greater,
+    GreaterEqual,
+    Less,
+    LessEqual,
+
+    // Literals.
+    Identifier,
+    String,
+    Number,
+
+    // Keywords.
+    And,
+    Class,
+    Else,
+    False,
+    Fun,
+    For,
+    If,
+    Nil,
+    Or,
+    Print,
+    Return,
+    Super,
+    This,
+    True,
+    Var,
+    While,
+
+    // Special things
+    Eof,
+}
+
+#[derive(Debug)]
+pub struct Token<'a> {
+    kind: TokenKind,
+    lexeme: &'a str,
+    // literal: Object, // TODO(tazjin): Uhh?
+    line: usize,
+}
+
+struct Scanner<'a> {
+    source: &'a str,
+    tokens: Vec<Token<'a>>,
+    errors: Vec<Error>,
+    start: usize,   // offset of first character in current lexeme
+    current: usize, // current offset into source
+    line: usize,    // current line in source
+}
+
+impl<'a> Scanner<'a> {
+    fn is_at_end(&self) -> bool {
+        return self.current >= self.source.len();
+    }
+
+    fn advance(&mut self) -> char {
+        self.current += 1;
+
+        // TODO(tazjin): Due to utf8-safety, this is a bit annoying.
+        // Since string iteration is not the point here I'm just
+        // leaving this as is for now.
+        self.source.chars().nth(self.current - 1).unwrap()
+    }
+
+    fn add_token(&mut self, kind: TokenKind) {
+        let lexeme = &self.source[self.start..self.current];
+        self.tokens.push(Token {
+            kind,
+            lexeme,
+            line: self.line,
+        })
+    }
+
+    fn scan_token(&mut self) {
+        match self.advance() {
+            '(' => self.add_token(TokenKind::LeftParen),
+            ')' => self.add_token(TokenKind::RightParen),
+            '{' => self.add_token(TokenKind::LeftBrace),
+            '}' => self.add_token(TokenKind::RightBrace),
+            ',' => self.add_token(TokenKind::Comma),
+            '.' => self.add_token(TokenKind::Dot),
+            '-' => self.add_token(TokenKind::Minus),
+            '+' => self.add_token(TokenKind::Plus),
+            ';' => self.add_token(TokenKind::Semicolon),
+            '*' => self.add_token(TokenKind::Star),
+
+            unexpected => self.errors.push(Error {
+                line: self.line,
+                kind: ErrorKind::UnexpectedChar(unexpected),
+            }),
+        };
+    }
+
+    fn scan_tokens(mut self) -> Vec<Token<'a>> {
+        while !self.is_at_end() {
+            self.start = self.current;
+            self.scan_token();
+        }
+
+        return self.tokens;
+    }
+}