Fixed another tokenize bug

pull/2/head
Mark 2023-03-26 10:15:15 -07:00
parent b71cbcafcd
commit 8a58d2182a
Signed by: Mark
GPG Key ID: AD62BB059C2AAEE4
1 changed files with 4 additions and 4 deletions

View File

@ -38,12 +38,12 @@ pub fn p_tokenize(input: &String) -> VecDeque<Token> {
// Needs special treatment. // Needs special treatment.
'-' => { '-' => {
if t.is_some() { g.push_back(update_line_location(t.unwrap(), i)); } if t.is_some() { g.push_back(update_line_location(t.unwrap(), i)); }
match g.back().as_ref().unwrap() { match g.back().as_ref() {
// If previous token was any of the following, // If previous token was any of the following,
// this is the "minus" operator // this is the "minus" operator
Token::PreNumber(_, _) | Some(Token::PreNumber(_, _)) |
Token::PreGroup(_, _) | Some(Token::PreGroup(_, _)) |
Token::PreWord(_, _) => { Some(Token::PreWord(_, _)) => {
t = Some(Token::PreOperator( t = Some(Token::PreOperator(
LineLocation{pos: i, len: 1}, LineLocation{pos: i, len: 1},
Operator::Subtract Operator::Subtract