Skip to content

Commit fcf66c0

Browse files
committed
lexer: run rust-fmt.
1 parent 2cb6bf3 commit fcf66c0

1 file changed

Lines changed: 51 additions & 45 deletions

File tree

syncode/parsers/rust_parser/src/lexer.rs

Lines changed: 51 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@ use std::fmt;
88
/// Token struct to represent lexer tokens.
99
#[derive(Clone, Debug, PartialEq)]
1010
pub struct Token {
11-
pub value: String, // The content of the token.
12-
pub type_name: String, // The type of the token in the grammar, "" if unlexable.
11+
pub value: String, // The content of the token.
12+
pub type_name: String, // The type of the token in the grammar, "" if unlexable.
1313
pub start_pos: usize,
1414
pub end_pos: usize,
1515
pub line: usize,
@@ -370,16 +370,19 @@ impl Lexer {
370370
(line, column + value.chars().count())
371371
};
372372

373-
return Ok((Token {
374-
value: value.to_string(), // Convert &str to String
375-
type_name: type_name.to_string(), // Convert &str to String
376-
start_pos,
377-
end_pos,
378-
line: start_line,
379-
column: start_column,
380-
end_line,
381-
end_column,
382-
}, false));
373+
return Ok((
374+
Token {
375+
value: value.to_string(), // Convert &str to String
376+
type_name: type_name.to_string(), // Convert &str to String
377+
start_pos,
378+
end_pos,
379+
line: start_line,
380+
column: start_column,
381+
end_line,
382+
end_column,
383+
},
384+
false,
385+
));
383386
} else {
384387
// No match found. Suppose that everything left is the
385388
// remainder, which requires us to assume that the string does
@@ -388,17 +391,20 @@ impl Lexer {
388391
// the middle of a sequence of otherwise lexable forms (e.g. `1
389392
// + 0x + 3` in Python). If SynCode is doing its job correctly,
390393
// such a string should never be generated.
391-
return Ok((Token {
392-
type_name: "".to_string(),
393-
value: text[pos..].to_string(),
394-
start_pos: pos,
395-
end_pos: text.len(),
396-
line,
397-
column,
398-
// TODO: How to compute these values?
399-
end_line: usize::MAX,
400-
end_column: usize::MAX,
401-
}, true));
394+
return Ok((
395+
Token {
396+
type_name: "".to_string(),
397+
value: text[pos..].to_string(),
398+
start_pos: pos,
399+
end_pos: text.len(),
400+
line,
401+
column,
402+
// TODO: How to compute these values?
403+
end_line: usize::MAX,
404+
end_column: usize::MAX,
405+
},
406+
true,
407+
));
402408
}
403409
}
404410
}
@@ -433,36 +439,36 @@ impl Lexer {
433439
let (new_token, is_remainder) = self.next_token(text, pos, line, column)?;
434440

435441
if is_remainder {
436-
// We should quit early, because we've seen all there is to see.
437-
let elapsed = start_time.elapsed();
438-
eprintln!(
439-
"Rust lexing completed in {:?} - produced {} tokens",
440-
elapsed,
441-
tokens.len()
442-
);
442+
// We should quit early, because we've seen all there is to see.
443+
let elapsed = start_time.elapsed();
444+
eprintln!(
445+
"Rust lexing completed in {:?} - produced {} tokens",
446+
elapsed,
447+
tokens.len()
448+
);
443449
return Ok((tokens, new_token));
444450
}
445451

446-
// Otherwise, continue counting forward to get new tokens.
447-
pos = new_token.end_pos;
452+
// Otherwise, continue counting forward to get new tokens.
453+
pos = new_token.end_pos;
448454
line = new_token.end_line;
449455
column = new_token.end_column;
450456

451-
tokens.push(new_token.clone());
457+
tokens.push(new_token.clone());
452458

453-
// The remainder will be the last token we've seen, unless
459+
// The remainder will be the last token we've seen, unless
454460
// the last thing we see is unlexable.
455461
remainder = new_token;
456462

457-
if pos >= text.len() {
458-
let elapsed = start_time.elapsed();
459-
eprintln!(
460-
"Rust lexing completed in {:?} - produced {} tokens",
461-
elapsed,
462-
tokens.len()
463-
);
464-
return Ok((tokens, remainder))
465-
}
463+
if pos >= text.len() {
464+
let elapsed = start_time.elapsed();
465+
eprintln!(
466+
"Rust lexing completed in {:?} - produced {} tokens",
467+
elapsed,
468+
tokens.len()
469+
);
470+
return Ok((tokens, remainder));
471+
}
466472
}
467473
}
468474
}
@@ -833,7 +839,7 @@ mod tests {
833839
}
834840
);
835841

836-
assert_eq!(
842+
assert_eq!(
837843
tokens[1],
838844
Token {
839845
value: "ret".to_string(),
@@ -847,7 +853,7 @@ mod tests {
847853
}
848854
);
849855

850-
assert_eq!(tokens[1], remainder);
856+
assert_eq!(tokens[1], remainder);
851857
}
852858

853859
#[test]

0 commit comments

Comments
 (0)