completed main, added the scanner and token stubs

This commit is contained in:
Sander Hautvast 2020-01-24 12:48:51 +01:00
parent 65ebb63698
commit 4e7b684b0d
3 changed files with 117 additions and 2 deletions

View file

@ -3,6 +3,9 @@ use std::fs::File;
use std::io::{self, BufRead, Read, Write};
use std::process;
mod scanner;
mod tokens;
/// main
/// no arguments: run interactively
/// 1 argument: run the script file specified
@ -71,6 +74,16 @@ fn run_prompt() {
}
/// start interpreting and running the script
fn run(_source: String) -> Result<&'static str, &'static str> {
Ok("")
fn run(source: String) -> Result<&'static str, &'static str> {
return match scanner::scan_tokens(source.as_str()) {
Ok(tokens) => {
for token in tokens {
println!("{:?}", token);
}
Ok("Ok")
}
Err(code) => {
Err(code)
}
};
}

65
src/scanner.rs Normal file
View file

@ -0,0 +1,65 @@
use crate::tokens::{Token, TokenType};
use crate::tokens::TokenType::*;
/// public function for scanning lox source
/// outputs a Vector of Tokens
pub fn scan_tokens(source: &str) -> Result<Vec<Token>, &'static str> {
let mut scanner = Scanner::new(source);
while !scanner.is_at_end() {
scanner.start = scanner.current;
scanner.scan_token()
}
scanner.tokens.push(Token {
token_type: EOF,
lexeme: "lexeme",
literal: Box::new(""),
line: scanner.line,
});
if scanner.error_occured {
Err("Error occurred")
} else {
Ok(scanner.tokens)
}
}
///struct used internally to keep state while scanning
struct Scanner<'a> {
// the source to scan
source: &'a str,
// the tokens that will be the output of the scan function
tokens: Vec<Token<'a>>,
// start of unscanned source (updated after part of the source was scanned)
start: usize,
// current character index while scanning
current: usize,
// flag indicating compilation error
error_occured: bool,
// current line (mainly used to report the line after a compilation error occurred)
line: usize,
}
impl Scanner<'_> {
/// create Scanner struct using the source
fn new(source: &str) -> Scanner {
Scanner { tokens: Vec::new(), source, start: 0, current: 0, line: 1, error_occured: false }
}
///scans the source, character by character
fn scan_token(&mut self) {}
/// adds a token of the given type
fn add_token(&mut self, token_type: TokenType) {}
/// returns true iff the end of the source has been reached
fn is_at_end(&self) -> bool {
true
}
}

37
src/tokens.rs Normal file
View file

@ -0,0 +1,37 @@
use std::any::Any;
use std::fmt;
/// struct that contains a single token
pub struct Token<'a> {
// the type
pub lexeme: &'a str,
// the actual part of the code that resulted in this token
pub literal: Box<dyn Any>,
// numeric (ie 1,2, 1.0 etc) and alphanumeric (any quoted text) values
pub line: usize,
// the line that contains the code for this token instance
pub token_type: TokenType,
}
impl fmt::Debug for Token<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let lit = match self.literal.downcast_ref::<String>() {
Some(as_string) => {
as_string.to_string()
}
None => {
format!("{:?}", self.literal)
}
};
write!(f, "Token [ type: {:?}, lexeme: {}, literal: {}, line: {} ]", self.token_type, self.lexeme, lit, self.line)
}
}
#[derive(Debug, Clone, Copy)]
pub enum TokenType {
EOF
}