Skip to content

Commit 8a20eb0

Browse files
committed
Add unit tests.
* try to test a maximum of inputs for the tokenizer * add conditional flags to derive only during tests
1 parent 51e823f commit 8a20eb0

File tree

1 file changed

+123
-0
lines changed

1 file changed

+123
-0
lines changed

src/lib.rs

+123
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ use std::iter::Iterator;
1212
use std::str::Chars;
1313

1414
#[derive(PartialEq)]
15+
#[cfg_attr(test, derive(Debug))]
1516
/// List of tokens.
1617
pub enum Token {
1718
/// Represent the '+' opcode
@@ -70,3 +71,125 @@ impl<'a> Iterator for Tokenizer<'a> {
7071
}
7172
}
7273
}
74+
75+
#[cfg(test)]
76+
mod tests {
77+
use super::*;
78+
79+
#[test]
80+
fn check_parsing_plus() {
81+
let expected = vec![Token::PLUS];
82+
let parsed: Vec<Token>= Tokenizer::new("+").into_iter().collect();
83+
84+
assert_eq!(parsed, expected);
85+
}
86+
87+
#[test]
88+
fn check_parsing_minus() {
89+
let expected = vec![Token::MINUS];
90+
let parsed: Vec<Token> = Tokenizer::new("-").into_iter().collect();
91+
92+
assert_eq!(parsed, expected);
93+
}
94+
95+
#[test]
96+
fn check_parsing_left() {
97+
let expected = vec![Token::LEFT];
98+
let parsed: Vec<Token> = Tokenizer::new("<").into_iter().collect();
99+
100+
assert_eq!(parsed, expected);
101+
}
102+
103+
#[test]
104+
fn check_parsing_right() {
105+
let expected = vec![Token::RIGHT];
106+
let parsed: Vec<Token> = Tokenizer::new(">").into_iter().collect();
107+
108+
assert_eq!(parsed, expected);
109+
}
110+
111+
#[test]
112+
fn check_parsing_scond() {
113+
let expected = vec![Token::SCOND];
114+
let parsed: Vec<Token> = Tokenizer::new("[").into_iter().collect();
115+
116+
assert_eq!(parsed, expected);
117+
}
118+
119+
#[test]
120+
fn check_parsing_econd() {
121+
let expected = vec![Token::ECOND];
122+
let parsed: Vec<Token> = Tokenizer::new("]").into_iter().collect();
123+
124+
assert_eq!(parsed, expected);
125+
}
126+
127+
#[test]
128+
fn check_parsing_print() {
129+
let expected = vec![Token::PRINT];
130+
let parsed: Vec<Token> = Tokenizer::new(".").into_iter().collect();
131+
132+
assert_eq!(parsed, expected);
133+
}
134+
135+
#[test]
136+
fn check_parsing_input() {
137+
let expected = vec![Token::INPUT];
138+
let parsed: Vec<Token> = Tokenizer::new(",").into_iter().collect();
139+
140+
assert_eq!(parsed, expected);
141+
}
142+
143+
#[test]
144+
fn check_empty_string() {
145+
let expected: Vec<Token> = Vec::new();
146+
let parsed: Vec<Token> = Tokenizer::new("").into_iter().collect();
147+
148+
assert_eq!(parsed, expected);
149+
}
150+
151+
#[test]
152+
fn check_correct_program() {
153+
let expected: Vec<Token> = vec![
154+
Token::RIGHT,
155+
Token::PLUS,
156+
Token::MINUS,
157+
Token::LEFT,
158+
Token::PLUS,
159+
Token::SCOND,
160+
Token::MINUS,
161+
Token::ECOND
162+
];
163+
164+
let parsed: Vec<Token> = Tokenizer::new(">+-<+[-]")
165+
.into_iter()
166+
.collect();
167+
168+
assert_eq!(parsed, expected);
169+
}
170+
171+
#[test]
172+
fn check_valid_with_incorrect_chars() {
173+
let expected = vec![
174+
Token::PLUS,
175+
Token::MINUS
176+
];
177+
178+
let parsed: Vec<Token> = Tokenizer::new("AZERTY+123456-")
179+
.into_iter()
180+
.collect();
181+
182+
assert_eq!(parsed, expected);
183+
}
184+
185+
#[test]
186+
fn check_spaces() {
187+
let expected: Vec<Token> = Vec::new();
188+
189+
let parsed: Vec<Token> = Tokenizer::new(" ")
190+
.into_iter()
191+
.collect();
192+
193+
assert_eq!(parsed, expected);
194+
}
195+
}

0 commit comments

Comments
 (0)