raw identifiers, comment out -- and ++ tokens
This commit is contained in:
parent
122f8ff7f1
commit
6e0fed0962
|
@ -217,9 +217,9 @@ tokens!(pub Token: {
|
|||
Bang => "!",
|
||||
Tilde => "~",
|
||||
Plus => "+",
|
||||
PlusPlus => "++",
|
||||
// PlusPlus => "++",
|
||||
Minus => "-",
|
||||
MinusMinus => "--",
|
||||
// MinusMinus => "--",
|
||||
Star => "*",
|
||||
Slash => "/",
|
||||
Percent => "%",
|
||||
|
@ -439,6 +439,18 @@ impl<'a> Iterator for TokenIterator<'a> {
|
|||
|
||||
Some((token, &self.source[start..self.offset]))
|
||||
}
|
||||
Some('`') => {
|
||||
// raw identifier
|
||||
self.skip(1);
|
||||
self.skip_while(|c| is_things::is_id_continue(c));
|
||||
if self.peekable_source().next() == Some('`') {
|
||||
self.skip(1);
|
||||
Some((Token::Ident, &self.source[start..self.offset]))
|
||||
} else {
|
||||
// unterminated raw identifier
|
||||
Some((Token::ParseError, &self.source[start..self.offset]))
|
||||
}
|
||||
}
|
||||
// `//`-style comments or doc-comments
|
||||
_ => match self.parse().map(|tok| match tok {
|
||||
Token::SlashSlash => {
|
||||
|
@ -486,20 +498,42 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_iterator() {
|
||||
let tokens = "fn let void+++(++bool)";
|
||||
let tokens = "fn let void+(+bool)";
|
||||
let mut lexer = TokenIterator::new(&tokens);
|
||||
assert_eq!(lexer.next(), Some((Token::Fn, "fn")));
|
||||
assert_eq!(lexer.next(), Some((Token::Let, "let")));
|
||||
assert_eq!(lexer.next(), Some((Token::Void, "void")));
|
||||
assert_eq!(lexer.next(), Some((Token::PlusPlus, "++")));
|
||||
assert_eq!(lexer.next(), Some((Token::Plus, "+")));
|
||||
assert_eq!(lexer.next(), Some((Token::OpenParens, "(")));
|
||||
assert_eq!(lexer.next(), Some((Token::PlusPlus, "++")));
|
||||
assert_eq!(lexer.next(), Some((Token::Plus, "+")));
|
||||
assert_eq!(lexer.next(), Some((Token::Bool, "bool")));
|
||||
assert_eq!(lexer.next(), Some((Token::CloseParens, ")")));
|
||||
assert_eq!(lexer.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn idents() {
|
||||
let lexer = TokenIterator::new("a a1 a_ a-b _a _1 _- -a -1 -_ `123");
|
||||
assert!(lexer.map(|(tok, _)| tok).all(|tok| tok == Token::Ident));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ident_minus_ambiguity() {
|
||||
let lexer = TokenIterator::new("a-a a- - a -a --a");
|
||||
let tokens = lexer.map(|(tok, _)| tok).collect::<Vec<_>>();
|
||||
assert_eq!(
|
||||
tokens,
|
||||
vec![
|
||||
Token::Ident,
|
||||
Token::Ident,
|
||||
Token::Minus,
|
||||
Token::Ident,
|
||||
Token::Ident,
|
||||
Token::Ident
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn complex_iterator() {
|
||||
let tokens = "fn my-function(x: i32, y: f32) -> f32 { return x + y; }";
|
||||
|
|
Loading…
Reference in a new issue