mirror of
https://github.com/rm-dr/daisy
synced 2025-02-21 22:49:24 -08:00
Fixed another tokenize bug
This commit is contained in:
parent
b71cbcafcd
commit
8a58d2182a
@ -38,12 +38,12 @@ pub fn p_tokenize(input: &String) -> VecDeque<Token> {
|
||||
// Needs special treatment.
|
||||
'-' => {
|
||||
if t.is_some() { g.push_back(update_line_location(t.unwrap(), i)); }
|
||||
match g.back().as_ref().unwrap() {
|
||||
match g.back().as_ref() {
|
||||
// If previous token was any of the following,
|
||||
// this is the "minus" operator
|
||||
Token::PreNumber(_, _) |
|
||||
Token::PreGroup(_, _) |
|
||||
Token::PreWord(_, _) => {
|
||||
Some(Token::PreNumber(_, _)) |
|
||||
Some(Token::PreGroup(_, _)) |
|
||||
Some(Token::PreWord(_, _)) => {
|
||||
t = Some(Token::PreOperator(
|
||||
LineLocation{pos: i, len: 1},
|
||||
Operator::Subtract
|
||||
|
Loading…
x
Reference in New Issue
Block a user