Skip to content

Commit

Permalink
Rename Scanner to Lexer (#276)
Browse files Browse the repository at this point in the history
  • Loading branch information
casey authored Dec 1, 2017
1 parent c5eeb89 commit 66391de
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 13 deletions.
18 changes: 9 additions & 9 deletions src/scanner.rs → src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ fn mixed_whitespace(text: &str) -> bool {
!(text.chars().all(|c| c == ' ') || text.chars().all(|c| c == '\t'))
}

pub struct Scanner<'a> {
pub struct Lexer<'a> {
tokens: Vec<Token<'a>>,
text: &'a str,
rest: &'a str,
Expand All @@ -37,9 +37,9 @@ enum State<'a> {
Interpolation,
}

impl<'a> Scanner<'a> {
pub fn scan(text: &'a str) -> CompilationResult<Vec<Token<'a>>> {
let scanner = Scanner{
impl<'a> Lexer<'a> {
pub fn lex(text: &'a str) -> CompilationResult<Vec<Token<'a>>> {
let lexer = Lexer{
tokens: vec![],
text: text,
rest: text,
Expand All @@ -49,7 +49,7 @@ impl<'a> Scanner<'a> {
state: vec![State::Start],
};

scanner.inner()
lexer.inner()
}

fn error(&self, kind: CompilationErrorKind<'a>) -> CompilationError<'a> {
Expand All @@ -75,7 +75,7 @@ impl<'a> Scanner<'a> {
}
}

fn scan_indent(&mut self) -> CompilationResult<'a, Option<Token<'a>>> {
fn lex_indent(&mut self) -> CompilationResult<'a, Option<Token<'a>>> {
lazy_static! {
static ref INDENT: Regex = re(r"^([ \t]*)[^ \t\n\r]");
}
Expand Down Expand Up @@ -150,7 +150,7 @@ impl<'a> Scanner<'a> {
}

loop {
if let Some(token) = self.scan_indent()? {
if let Some(token) = self.lex_indent()? {
self.tokens.push(token);
}

Expand Down Expand Up @@ -306,7 +306,7 @@ mod test {
fn $name() {
let input = $input;
let expected = $expected;
let tokens = ::Scanner::scan(input).unwrap();
let tokens = ::Lexer::lex(input).unwrap();
let roundtrip = tokens.iter().map(|t| {
let mut s = String::new();
s += t.prefix;
Expand Down Expand Up @@ -369,7 +369,7 @@ mod test {
kind: $kind,
};

if let Err(error) = Scanner::scan(input) {
if let Err(error) = Lexer::lex(input) {
assert_eq!(error.text, expected.text);
assert_eq!(error.index, expected.index);
assert_eq!(error.line, expected.line);
Expand Down
4 changes: 2 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ mod cooked_string;
mod expression;
mod fragment;
mod justfile;
mod lexer;
mod misc;
mod parameter;
mod parser;
Expand All @@ -33,7 +34,6 @@ mod recipe;
mod recipe_resolver;
mod run;
mod runtime_error;
mod scanner;
mod shebang;
mod token;

Expand Down Expand Up @@ -68,7 +68,7 @@ mod common {
pub use recipe::Recipe;
pub use recipe_resolver::RecipeResolver;
pub use runtime_error::{RuntimeError, RunResult};
pub use scanner::Scanner;
pub use lexer::Lexer;
pub use shebang::Shebang;
pub use token::{Token, TokenKind};
}
Expand Down
2 changes: 1 addition & 1 deletion src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ pub struct Parser<'a> {

impl<'a> Parser<'a> {
pub fn parse(text: &'a str) -> CompilationResult<'a, Justfile> {
let tokens = Scanner::scan(text)?;
let tokens = Lexer::lex(text)?;
let parser = Parser::new(text, tokens);
parser.justfile()
}
Expand Down
2 changes: 1 addition & 1 deletion src/testing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ macro_rules! compilation_error_test {
kind: $kind,
};

let tokens = ::Scanner::scan(input).unwrap();
let tokens = ::Lexer::lex(input).unwrap();
let parser = ::Parser::new(input, tokens);

if let Err(error) = parser.justfile() {
Expand Down

0 comments on commit 66391de

Please sign in to comment.