mirror of https://github.com/rm-dr/daisy
Fixed another tokenize bug
parent
b71cbcafcd
commit
8a58d2182a
|
@ -38,12 +38,12 @@ pub fn p_tokenize(input: &String) -> VecDeque<Token> {
|
||||||
// Needs special treatment.
|
// Needs special treatment.
|
||||||
'-' => {
|
'-' => {
|
||||||
if t.is_some() { g.push_back(update_line_location(t.unwrap(), i)); }
|
if t.is_some() { g.push_back(update_line_location(t.unwrap(), i)); }
|
||||||
match g.back().as_ref().unwrap() {
|
match g.back().as_ref() {
|
||||||
// If previous token was any of the following,
|
// If previous token was any of the following,
|
||||||
// this is the "minus" operator
|
// this is the "minus" operator
|
||||||
Token::PreNumber(_, _) |
|
Some(Token::PreNumber(_, _)) |
|
||||||
Token::PreGroup(_, _) |
|
Some(Token::PreGroup(_, _)) |
|
||||||
Token::PreWord(_, _) => {
|
Some(Token::PreWord(_, _)) => {
|
||||||
t = Some(Token::PreOperator(
|
t = Some(Token::PreOperator(
|
||||||
LineLocation{pos: i, len: 1},
|
LineLocation{pos: i, len: 1},
|
||||||
Operator::Subtract
|
Operator::Subtract
|
||||||
|
|
Loading…
Reference in New Issue