added unittests for the functionality sofar
This commit is contained in:
parent
4557505999
commit
92e86032ca
3 changed files with 43 additions and 1 deletions
|
|
@ -6,6 +6,9 @@ use std::process;
|
|||
mod scanner;
|
||||
mod tokens;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
/// main
|
||||
/// no arguments: run interactively
|
||||
/// 1 argument: run the script file specified
|
||||
|
|
|
|||
39
src/tests.rs
Normal file
39
src/tests.rs
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
#[cfg(test)]
|
||||
use crate::scanner::scan_tokens;
|
||||
use crate::tokens::TokenType::*;
|
||||
|
||||
#[test]
|
||||
fn test_scan_empty_source() {
|
||||
let tokens = scan_tokens("").unwrap();
|
||||
assert_eq!(tokens.len(), 1);
|
||||
|
||||
let token = tokens.get(0).unwrap();
|
||||
assert_eq!(token.token_type, EOF);
|
||||
assert_eq!(token.line, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scan_single_char_tokens() {
|
||||
let tokens = scan_tokens(">").unwrap();
|
||||
assert_eq!(tokens.len(), 2);
|
||||
|
||||
let token = tokens.get(0).unwrap();
|
||||
assert_eq!(token.token_type, GREATER);
|
||||
assert_eq!(token.lexeme, ">");
|
||||
|
||||
let token = tokens.get(1).unwrap();
|
||||
assert_eq!(token.token_type, EOF);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scan_double_char_tokens() {
|
||||
let tokens = scan_tokens(">=").unwrap();
|
||||
assert_eq!(tokens.len(), 2);
|
||||
|
||||
let token = tokens.get(0).unwrap();
|
||||
assert_eq!(token.token_type, GREATEREQUAL);
|
||||
assert_eq!(token.lexeme, ">=");
|
||||
|
||||
let token = tokens.get(1).unwrap();
|
||||
assert_eq!(token.token_type, EOF);
|
||||
}
|
||||
|
|
@ -31,7 +31,7 @@ impl fmt::Debug for Token<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
#[derive(Eq, PartialEq, Debug, Clone, Copy)]
|
||||
pub enum TokenType {
|
||||
// Single-character tokens.
|
||||
LEFTPAREN, // (
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue