From 82dda1bb51c58b66ed0f09a231dfc31a48128f7a Mon Sep 17 00:00:00 2001 From: Nerijus Arlauskas Date: Mon, 23 Nov 2015 20:41:35 +0200 Subject: [PATCH 1/7] Lexer skeleton --- src/lib.rs | 163 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 163 insertions(+) diff --git a/src/lib.rs b/src/lib.rs index 23b87dc..cbe4976 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -61,3 +61,166 @@ #[macro_use] pub mod error; + +#[cfg(test)] +mod test { + use tokens::{ Lexer, LexerOptions }; + + #[test] + fn lexer_usage() { + // build the lexer once for project environment with extensions. + let lexer = Lexer::new(LexerOptions::default(), vec![]); + + // use many times. + { + let source: String = "{{ var }}".into(); + for token in lexer.tokens(&source) { + println!("{:?}", token); + } + } + } +} + +// I don't know where to put this, keeping it in root for now. +#[derive(Debug, Default, Clone)] +pub struct Position { + pub line: usize, + pub column: usize, +} + +// Named this "tokens", in plural, to mean a place where you should expect +// to find your tokens. +// Similar convention would work for "nodes", that's where AST lives, and possibly +// "instructions", if we decide to go that route. +pub mod tokens { + + // It is possible to delay all string manipulation for later, + // and we can simply store the slices into original full source + // string. + // + // We can keep doing that even for Node<'a>, provided the strings remain untouched. + // If something needs to be changed, we can create a special Node for that. + #[derive(Debug)] + pub enum TokenRef<'a> { + Text(&'a str), + } + + impl<'a> TokenRef<'a> { + // Not used "into", because Gankro criticises using into for anything more than + // moving data around. Not used into_owned, because we don't implement ToOwned trait. + // So the only logical name remains `into_token`. + pub fn into_token(self) -> Token { + match self { + TokenRef::Text(v) => Token::Text(v.into()), + } + } + } + + // This will be used when we need to carry token lifetime longer than original + // source string, for example, in error messages. + pub enum Token { + Text(String), + } + + // Not pub, to make API more convenient. + mod lexing { + use Position; + use tokens::TokenRef; + + + + /// TokenRef wrapper for `Lexer` that additionaly has position. + #[derive(Debug)] + pub struct ItemRef<'t> { + pub token: TokenRef<'t>, + pub position: Position, + } + + + + // TBD simple lexer options (delimiters, whitespace, etc). + #[derive(Copy, Clone)] + pub struct Options; + + impl Options { + pub fn default() -> Options { Options } + } + + + + // I will be refering to 't as template lifetime, 'i as iteration lifetime. + // This lexer should be reusable between the `tokenize` calls. + // In addition to this I had `LexingEnvironment`, but it turned out to be redundant. + pub struct Lexer; + + impl Lexer { + // It's responsibility of someone else to take operators from extensions, + // resolve any conflicts and compile final "operators" list. + // + // It looks like Lexer does not care if they are unary or binary, that will + // become important in parser. + // + // Funny note: I found that "=" is considered neither unary nor binary ;) + pub fn new(options: Options, operators: Vec<&'static str>) -> Lexer { + // Here we will create patterns (I called them matchers), and + // store them in Lexer + Lexer + } + + // twig-rust: https://github.com/colin-kiegel/twig-rust/blob/master/src/lexer/mod.rs#L64 + // twig-rs: https://github.com/Nercury/twig-rs/blob/master/src/tokens/lexer/mod.rs#L40 + // + // I think it is possible to avoid the Template::Raw in lexer API. + // We can probably deal with newlines in patterns? + // Also maybe we won't need to fix line endings, but right now we both do that. + // + // twig-rs result was "Iter", twig-rust - "Job" :) + // + // I changed it to comcrete "Tokens" for now, which will implement Iterator. + // No Result. Let's avoid lexing until Parser requests first token. + pub fn tokens<'i, 't>(&'i self, code: &'t str) -> Tokens<'i, 't> { + // Just take whole lexer by reference ;) + Tokens::new(self, code) + } + } + + + + // 'i is iteration lifetime, or "one use of lexer". + // 't is template lifetime. It will live longer than this iteration. + pub struct Tokens<'i, 't> { + env: &'i Lexer, + code: &'t str, + } + + impl<'i, 't> Tokens<'i, 't> { + + pub fn new<'ii, 'tt>(lexer: &'ii Lexer, code: &'tt str) -> Tokens<'ii, 'tt> { + Tokens { + env: lexer, + code: code, + } + } + } + + // I think we can avoid storing all tokens in Vec, instead just keep in memory the next + // chunk of lexed tokens. + impl<'i, 't> Iterator for Tokens<'i, 't> { + // TODO: Use proper Result once we merge error handling. + type Item = Result, ()>; + + fn next(&mut self) -> Option, ()>> { + + return None; + } + } + + } + + pub use self::lexing::{ + Lexer, + Tokens, + ItemRef as LexerItemRef, + Options as LexerOptions, + }; +} From 5e8b808cef998b6454c048904650779ee5e9930a Mon Sep 17 00:00:00 2001 From: Nerijus Arlauskas Date: Sun, 6 Dec 2015 15:41:25 +0200 Subject: [PATCH 2/7] Move lexer into separate files as discussed in #10. --- src/api/lexer/lexing.rs | 37 +++++++++ src/api/lexer/mod.rs | 47 ++++++++++++ src/api/mod.rs | 13 ++++ src/api/tokens/mod.rs | 26 +++++++ src/lib.rs | 164 +--------------------------------------- 5 files changed, 124 insertions(+), 163 deletions(-) create mode 100644 src/api/lexer/lexing.rs create mode 100644 src/api/lexer/mod.rs create mode 100644 src/api/mod.rs create mode 100644 src/api/tokens/mod.rs diff --git a/src/api/lexer/lexing.rs b/src/api/lexer/lexing.rs new file mode 100644 index 0000000..f486650 --- /dev/null +++ b/src/api/lexer/lexing.rs @@ -0,0 +1,37 @@ +use api::Position; +use api::tokens::TokenRef; + +use super::{ Lexer }; + +/// TokenRef wrapper for `Lexer` that additionaly has position. +#[derive(Debug)] +pub struct ItemRef<'t> { + pub token: TokenRef<'t>, + pub position: Position, +} + +/// Lexer token iterator. +/// +/// 'i is iteration lifetime, or "one use of lexer". +/// 't is template lifetime. It will live longer than this iteration. +pub struct Tokens<'i, 't> { + env: &'i Lexer, + code: &'t str, +} + +impl<'i, 't> Tokens<'i, 't> { + pub fn new<'ii, 'tt>(lexer: &'ii Lexer, code: &'tt str) -> Tokens<'ii, 'tt> { + Tokens { + env: lexer, + code: code, + } + } +} + +impl<'i, 't> Iterator for Tokens<'i, 't> { + type Item = Result, ()>; + + fn next(&mut self) -> Option, ()>> { + return None; + } +} diff --git a/src/api/lexer/mod.rs b/src/api/lexer/mod.rs new file mode 100644 index 0000000..6d4e44c --- /dev/null +++ b/src/api/lexer/mod.rs @@ -0,0 +1,47 @@ +/*! +Twig lexer. + +Produces a token stream from source template. + +# Summary + +This module is capable of taking a Twig input template, for example, this one: + +```twig +Hello +{% if world %} + world +{% else %} + {{ other }} +{% endif %} +``` + +And chopping it into tokens like these: + +TODO: Example code for this. +*/ + +mod lexing; + +pub use self::lexing::{ Tokens, ItemRef }; + +#[derive(Copy, Clone)] +pub struct Options; + +impl Options { + pub fn default() -> Options { Options } +} + +pub struct Lexer; + +impl Lexer { + /// Create a new lexer from options and operator list. + pub fn new(options: Options, operators: Vec<&'static str>) -> Lexer { + Lexer + } + + /// Get a lexed stream of tokens. + pub fn tokens<'i, 't>(&'i self, code: &'t str) -> Tokens<'i, 't> { + Tokens::new(self, code) + } +} diff --git a/src/api/mod.rs b/src/api/mod.rs new file mode 100644 index 0000000..e4454c2 --- /dev/null +++ b/src/api/mod.rs @@ -0,0 +1,13 @@ +/*! +Twig extension writer's API. +*/ + +pub mod tokens; +pub mod lexer; + +/// Line-column position in a file. +#[derive(Debug, Default, Clone)] +pub struct Position { + pub line: usize, + pub column: usize, +} diff --git a/src/api/tokens/mod.rs b/src/api/tokens/mod.rs new file mode 100644 index 0000000..f5f05d3 --- /dev/null +++ b/src/api/tokens/mod.rs @@ -0,0 +1,26 @@ +/*! +Tokens, received from lexer output. +*/ + +/// Token value. +/// +/// The lifetime of this token refers to original source string which +/// should be kept alive as long as this token. +#[derive(Debug)] +pub enum TokenRef<'a> { + Text(&'a str), +} + +impl<'a> TokenRef<'a> { + /// Get owned value for this token. + pub fn into_token(self) -> Token { + match self { + TokenRef::Text(v) => Token::Text(v.into()), + } + } +} + +/// Owned token value. +pub enum Token { + Text(String), +} diff --git a/src/lib.rs b/src/lib.rs index cbe4976..e3f2a0b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -61,166 +61,4 @@ #[macro_use] pub mod error; - -#[cfg(test)] -mod test { - use tokens::{ Lexer, LexerOptions }; - - #[test] - fn lexer_usage() { - // build the lexer once for project environment with extensions. - let lexer = Lexer::new(LexerOptions::default(), vec![]); - - // use many times. - { - let source: String = "{{ var }}".into(); - for token in lexer.tokens(&source) { - println!("{:?}", token); - } - } - } -} - -// I don't know where to put this, keeping it in root for now. -#[derive(Debug, Default, Clone)] -pub struct Position { - pub line: usize, - pub column: usize, -} - -// Named this "tokens", in plural, to mean a place where you should expect -// to find your tokens. -// Similar convention would work for "nodes", that's where AST lives, and possibly -// "instructions", if we decide to go that route. -pub mod tokens { - - // It is possible to delay all string manipulation for later, - // and we can simply store the slices into original full source - // string. - // - // We can keep doing that even for Node<'a>, provided the strings remain untouched. - // If something needs to be changed, we can create a special Node for that. - #[derive(Debug)] - pub enum TokenRef<'a> { - Text(&'a str), - } - - impl<'a> TokenRef<'a> { - // Not used "into", because Gankro criticises using into for anything more than - // moving data around. Not used into_owned, because we don't implement ToOwned trait. - // So the only logical name remains `into_token`. - pub fn into_token(self) -> Token { - match self { - TokenRef::Text(v) => Token::Text(v.into()), - } - } - } - - // This will be used when we need to carry token lifetime longer than original - // source string, for example, in error messages. - pub enum Token { - Text(String), - } - - // Not pub, to make API more convenient. - mod lexing { - use Position; - use tokens::TokenRef; - - - - /// TokenRef wrapper for `Lexer` that additionaly has position. - #[derive(Debug)] - pub struct ItemRef<'t> { - pub token: TokenRef<'t>, - pub position: Position, - } - - - - // TBD simple lexer options (delimiters, whitespace, etc). - #[derive(Copy, Clone)] - pub struct Options; - - impl Options { - pub fn default() -> Options { Options } - } - - - - // I will be refering to 't as template lifetime, 'i as iteration lifetime. - // This lexer should be reusable between the `tokenize` calls. - // In addition to this I had `LexingEnvironment`, but it turned out to be redundant. - pub struct Lexer; - - impl Lexer { - // It's responsibility of someone else to take operators from extensions, - // resolve any conflicts and compile final "operators" list. - // - // It looks like Lexer does not care if they are unary or binary, that will - // become important in parser. - // - // Funny note: I found that "=" is considered neither unary nor binary ;) - pub fn new(options: Options, operators: Vec<&'static str>) -> Lexer { - // Here we will create patterns (I called them matchers), and - // store them in Lexer - Lexer - } - - // twig-rust: https://github.com/colin-kiegel/twig-rust/blob/master/src/lexer/mod.rs#L64 - // twig-rs: https://github.com/Nercury/twig-rs/blob/master/src/tokens/lexer/mod.rs#L40 - // - // I think it is possible to avoid the Template::Raw in lexer API. - // We can probably deal with newlines in patterns? - // Also maybe we won't need to fix line endings, but right now we both do that. - // - // twig-rs result was "Iter", twig-rust - "Job" :) - // - // I changed it to comcrete "Tokens" for now, which will implement Iterator. - // No Result. Let's avoid lexing until Parser requests first token. - pub fn tokens<'i, 't>(&'i self, code: &'t str) -> Tokens<'i, 't> { - // Just take whole lexer by reference ;) - Tokens::new(self, code) - } - } - - - - // 'i is iteration lifetime, or "one use of lexer". - // 't is template lifetime. It will live longer than this iteration. - pub struct Tokens<'i, 't> { - env: &'i Lexer, - code: &'t str, - } - - impl<'i, 't> Tokens<'i, 't> { - - pub fn new<'ii, 'tt>(lexer: &'ii Lexer, code: &'tt str) -> Tokens<'ii, 'tt> { - Tokens { - env: lexer, - code: code, - } - } - } - - // I think we can avoid storing all tokens in Vec, instead just keep in memory the next - // chunk of lexed tokens. - impl<'i, 't> Iterator for Tokens<'i, 't> { - // TODO: Use proper Result once we merge error handling. - type Item = Result, ()>; - - fn next(&mut self) -> Option, ()>> { - - return None; - } - } - - } - - pub use self::lexing::{ - Lexer, - Tokens, - ItemRef as LexerItemRef, - Options as LexerOptions, - }; -} +pub mod api; From f71dcd9b0f43772e67160a621166bc22a1438f5d Mon Sep 17 00:00:00 2001 From: Nerijus Arlauskas Date: Sun, 6 Dec 2015 16:08:30 +0200 Subject: [PATCH 3/7] Add file headers. --- src/api/lexer/lexing.rs | 5 +++++ src/api/lexer/mod.rs | 5 +++++ src/api/mod.rs | 5 +++++ src/api/tokens/mod.rs | 5 +++++ 4 files changed, 20 insertions(+) diff --git a/src/api/lexer/lexing.rs b/src/api/lexer/lexing.rs index f486650..47329b7 100644 --- a/src/api/lexer/lexing.rs +++ b/src/api/lexer/lexing.rs @@ -1,3 +1,8 @@ +// This file is part of rust-web/twig +// +// For the copyright and license information, please view the LICENSE +// file that was distributed with this source code. + use api::Position; use api::tokens::TokenRef; diff --git a/src/api/lexer/mod.rs b/src/api/lexer/mod.rs index 6d4e44c..b0860db 100644 --- a/src/api/lexer/mod.rs +++ b/src/api/lexer/mod.rs @@ -1,3 +1,8 @@ +// This file is part of rust-web/twig +// +// For the copyright and license information, please view the LICENSE +// file that was distributed with this source code. + /*! Twig lexer. diff --git a/src/api/mod.rs b/src/api/mod.rs index e4454c2..a3ffd91 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -1,3 +1,8 @@ +// This file is part of rust-web/twig +// +// For the copyright and license information, please view the LICENSE +// file that was distributed with this source code. + /*! Twig extension writer's API. */ diff --git a/src/api/tokens/mod.rs b/src/api/tokens/mod.rs index f5f05d3..d63871c 100644 --- a/src/api/tokens/mod.rs +++ b/src/api/tokens/mod.rs @@ -1,3 +1,8 @@ +// This file is part of rust-web/twig +// +// For the copyright and license information, please view the LICENSE +// file that was distributed with this source code. + /*! Tokens, received from lexer output. */ From 3d5cc2a5f4def1f070ce1d11ca0a2ee25b9014b6 Mon Sep 17 00:00:00 2001 From: Nerijus Arlauskas Date: Sun, 6 Dec 2015 19:13:02 +0200 Subject: [PATCH 4/7] Implement lexer that can pass first test. --- src/api/error/lexing.rs | 20 +++++++++++++++ src/api/error/mod.rs | 10 ++++++++ src/api/error/syntax.rs | 39 +++++++++++++++++++++++++++++ src/api/lexer/lexing.rs | 55 +++++++++++++++++++++++++++++++++++------ src/api/mod.rs | 3 ++- src/api/tokens/mod.rs | 15 ++++++++--- tests/lexer/mod.rs | 18 ++++++++++++++ tests/lexer_tests.rs | 3 +++ 8 files changed, 151 insertions(+), 12 deletions(-) create mode 100644 src/api/error/lexing.rs create mode 100644 src/api/error/mod.rs create mode 100644 src/api/error/syntax.rs create mode 100644 tests/lexer/mod.rs create mode 100644 tests/lexer_tests.rs diff --git a/src/api/error/lexing.rs b/src/api/error/lexing.rs new file mode 100644 index 0000000..4145fd0 --- /dev/null +++ b/src/api/error/lexing.rs @@ -0,0 +1,20 @@ +use std::io; +use api::error::SyntaxError; + +#[derive(Debug)] +pub enum LexingError { + Syntax(SyntaxError), + Io(io::Error) +} + +impl From for LexingError { + fn from(other: io::Error) -> LexingError { + LexingError::Io(other) + } +} + +impl From for LexingError { + fn from(other: SyntaxError) -> LexingError { + LexingError::Syntax(other) + } +} diff --git a/src/api/error/mod.rs b/src/api/error/mod.rs new file mode 100644 index 0000000..21d0e5c --- /dev/null +++ b/src/api/error/mod.rs @@ -0,0 +1,10 @@ +mod syntax; +mod lexing; + +use std::result; + +pub use self::lexing::LexingError; +pub use self::syntax::{ SyntaxError, SyntaxErrorCode, UnexpectedToken }; + +pub type LexingResult = result::Result; +pub type ParsingResult = result::Result; diff --git a/src/api/error/syntax.rs b/src/api/error/syntax.rs new file mode 100644 index 0000000..aa96b4f --- /dev/null +++ b/src/api/error/syntax.rs @@ -0,0 +1,39 @@ +use api::Position; +use api::tokens::{ Token }; + +#[derive(Debug)] +pub struct SyntaxError { + pub code: SyntaxErrorCode, + pub starts_at: Position, + pub ends_at: Option, +} + +impl SyntaxError { + /// Specify the location where the error ends. + pub fn ends_at(mut self, pos: Position) -> SyntaxError { + self.ends_at = Some(pos); + self + } +} + +#[derive(Debug)] +pub enum SyntaxErrorCode { + ExpectedTokenButReceived { expected: Token, received: UnexpectedToken }, +} + +impl SyntaxErrorCode { + /// Specify the location where the error starts. + pub fn starts_at(self, pos: Position) -> SyntaxError { + SyntaxError { + code: self, + starts_at: pos, + ends_at: None, + } + } +} + +#[derive(Clone, Debug)] +pub enum UnexpectedToken { + Token(Token), + EndOfStream, +} diff --git a/src/api/lexer/lexing.rs b/src/api/lexer/lexing.rs index 47329b7..00fa907 100644 --- a/src/api/lexer/lexing.rs +++ b/src/api/lexer/lexing.rs @@ -6,7 +6,8 @@ use api::Position; use api::tokens::TokenRef; -use super::{ Lexer }; +use api::lexer::Lexer; +use api::error::{ SyntaxErrorCode, UnexpectedToken, LexingResult }; /// TokenRef wrapper for `Lexer` that additionaly has position. #[derive(Debug)] @@ -20,23 +21,63 @@ pub struct ItemRef<'t> { /// 'i is iteration lifetime, or "one use of lexer". /// 't is template lifetime. It will live longer than this iteration. pub struct Tokens<'i, 't> { + /// Position of the next token to get. + next_pos: Position, // temporary field until I get cursor in. env: &'i Lexer, code: &'t str, } +impl<'i, 't> Iterator for Tokens<'i, 't> { + type Item = LexingResult>; + + fn next(&mut self) -> Option>> { + // Hello, my name is Lexer. Twig Lexer. + // I am not very complicated. + match self.next_pos { + Position { line: 1, .. } => { + self.next_pos.line = 2; + Some(Ok(ItemRef { token: TokenRef::BlockStart, position: self.next_pos })) + }, + Position { line: 2, .. } => { + self.next_pos.line = 3; + Some(Ok(ItemRef { token: TokenRef::Name("§"), position: self.next_pos })) + }, + _ => None + } + } +} + impl<'i, 't> Tokens<'i, 't> { pub fn new<'ii, 'tt>(lexer: &'ii Lexer, code: &'tt str) -> Tokens<'ii, 'tt> { Tokens { + next_pos: Position { line: 1, column: 1 }, env: lexer, code: code, } } -} -impl<'i, 't> Iterator for Tokens<'i, 't> { - type Item = Result, ()>; - - fn next(&mut self) -> Option, ()>> { - return None; + pub fn expect(&mut self, expected: TokenRef<'t>) -> LexingResult> { + let maybe_item = self.next(); + match maybe_item { + None => Err( + SyntaxErrorCode::ExpectedTokenButReceived { + expected: expected.into(), + received: UnexpectedToken::EndOfStream + }.starts_at(self.next_pos).into() + ), + Some(Ok(item)) => { + if item.token == expected { + Ok(item.token) + } else { + Err( + SyntaxErrorCode::ExpectedTokenButReceived { + expected: expected.into(), + received: UnexpectedToken::Token(item.token.into()) + }.starts_at(item.position).into() + ) + } + }, + Some(Err(error)) => Err(error), + } } } diff --git a/src/api/mod.rs b/src/api/mod.rs index a3ffd91..49f46cc 100644 --- a/src/api/mod.rs +++ b/src/api/mod.rs @@ -9,9 +9,10 @@ Twig extension writer's API. pub mod tokens; pub mod lexer; +pub mod error; /// Line-column position in a file. -#[derive(Debug, Default, Clone)] +#[derive(Debug, Default, Copy, Clone)] pub struct Position { pub line: usize, pub column: usize, diff --git a/src/api/tokens/mod.rs b/src/api/tokens/mod.rs index d63871c..f8253d5 100644 --- a/src/api/tokens/mod.rs +++ b/src/api/tokens/mod.rs @@ -11,21 +11,28 @@ Tokens, received from lexer output. /// /// The lifetime of this token refers to original source string which /// should be kept alive as long as this token. -#[derive(Debug)] +#[derive(Debug, Copy, Clone, PartialEq)] pub enum TokenRef<'a> { + BlockStart, + Name(&'a str), Text(&'a str), } -impl<'a> TokenRef<'a> { +impl<'a> From> for Token { /// Get owned value for this token. - pub fn into_token(self) -> Token { - match self { + fn from<'r>(other: TokenRef<'r>) -> Self { + match other { + TokenRef::BlockStart => Token::BlockStart, + TokenRef::Name(v) => Token::Name(v.into()), TokenRef::Text(v) => Token::Text(v.into()), } } } /// Owned token value. +#[derive(Debug, Clone)] pub enum Token { + BlockStart, + Name(String), Text(String), } diff --git a/tests/lexer/mod.rs b/tests/lexer/mod.rs new file mode 100644 index 0000000..9205bb5 --- /dev/null +++ b/tests/lexer/mod.rs @@ -0,0 +1,18 @@ +use twig::api::lexer::{ Lexer, Tokens, Options }; +use twig::api::tokens::TokenRef; + +#[test] +fn name_label_for_tag() { + let template = "{% § %}"; + let lexer = Lexer::new(Options::default(), Vec::new()); + let mut s = lexer.tokens(&template); + + expect(&mut s, TokenRef::BlockStart); + expect(&mut s, TokenRef::Name("§")); +} + +fn expect<'i, 'c>(stream: &mut Tokens<'i, 'c>, token_value: TokenRef<'c>) { + if let Err(e) = stream.expect(token_value) { + panic!("Received error {:?}", e); + } +} diff --git a/tests/lexer_tests.rs b/tests/lexer_tests.rs new file mode 100644 index 0000000..39ed5f7 --- /dev/null +++ b/tests/lexer_tests.rs @@ -0,0 +1,3 @@ +extern crate twig; + +mod lexer; From febcfcf882c31a90a3f9e15044fbbce033fe2592 Mon Sep 17 00:00:00 2001 From: Nerijus Arlauskas Date: Sun, 6 Dec 2015 19:28:43 +0200 Subject: [PATCH 5/7] Remove unused ParsingResult for now. --- src/api/error/lexing.rs | 3 +++ src/api/error/mod.rs | 7 +------ 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/api/error/lexing.rs b/src/api/error/lexing.rs index 4145fd0..a009930 100644 --- a/src/api/error/lexing.rs +++ b/src/api/error/lexing.rs @@ -1,5 +1,6 @@ use std::io; use api::error::SyntaxError; +use std::result; #[derive(Debug)] pub enum LexingError { @@ -18,3 +19,5 @@ impl From for LexingError { LexingError::Syntax(other) } } + +pub type LexingResult = result::Result; diff --git a/src/api/error/mod.rs b/src/api/error/mod.rs index 21d0e5c..11bd8fa 100644 --- a/src/api/error/mod.rs +++ b/src/api/error/mod.rs @@ -1,10 +1,5 @@ mod syntax; mod lexing; -use std::result; - -pub use self::lexing::LexingError; +pub use self::lexing::{ LexingError, LexingResult }; pub use self::syntax::{ SyntaxError, SyntaxErrorCode, UnexpectedToken }; - -pub type LexingResult = result::Result; -pub type ParsingResult = result::Result; From 22d78d265b1b14a98bb4cbfbc5e536be9d8790fe Mon Sep 17 00:00:00 2001 From: Nerijus Arlauskas Date: Sun, 6 Dec 2015 19:32:21 +0200 Subject: [PATCH 6/7] Move Lexer error and result into lexer mod, duh. --- src/api/error/mod.rs | 2 -- src/api/{error/lexing.rs => lexer/error.rs} | 0 src/api/lexer/lexing.rs | 4 ++-- src/api/lexer/mod.rs | 2 ++ 4 files changed, 4 insertions(+), 4 deletions(-) rename src/api/{error/lexing.rs => lexer/error.rs} (100%) diff --git a/src/api/error/mod.rs b/src/api/error/mod.rs index 11bd8fa..ea7824a 100644 --- a/src/api/error/mod.rs +++ b/src/api/error/mod.rs @@ -1,5 +1,3 @@ mod syntax; -mod lexing; -pub use self::lexing::{ LexingError, LexingResult }; pub use self::syntax::{ SyntaxError, SyntaxErrorCode, UnexpectedToken }; diff --git a/src/api/error/lexing.rs b/src/api/lexer/error.rs similarity index 100% rename from src/api/error/lexing.rs rename to src/api/lexer/error.rs diff --git a/src/api/lexer/lexing.rs b/src/api/lexer/lexing.rs index 00fa907..a70c31b 100644 --- a/src/api/lexer/lexing.rs +++ b/src/api/lexer/lexing.rs @@ -6,8 +6,8 @@ use api::Position; use api::tokens::TokenRef; -use api::lexer::Lexer; -use api::error::{ SyntaxErrorCode, UnexpectedToken, LexingResult }; +use api::lexer::{ Lexer, LexingResult }; +use api::error::{ SyntaxErrorCode, UnexpectedToken }; /// TokenRef wrapper for `Lexer` that additionaly has position. #[derive(Debug)] diff --git a/src/api/lexer/mod.rs b/src/api/lexer/mod.rs index b0860db..0e46f27 100644 --- a/src/api/lexer/mod.rs +++ b/src/api/lexer/mod.rs @@ -27,8 +27,10 @@ TODO: Example code for this. */ mod lexing; +mod error; pub use self::lexing::{ Tokens, ItemRef }; +pub use self::error::{ LexingError, LexingResult }; #[derive(Copy, Clone)] pub struct Options; From f11703ebf82dc75573b1be63f5253755123f799b Mon Sep 17 00:00:00 2001 From: Nerijus Arlauskas Date: Sun, 6 Dec 2015 19:37:45 +0200 Subject: [PATCH 7/7] File headers. --- src/api/error/mod.rs | 5 +++++ src/api/error/syntax.rs | 5 +++++ src/api/lexer/error.rs | 5 +++++ tests/lexer/mod.rs | 5 +++++ tests/lexer_tests.rs | 5 +++++ 5 files changed, 25 insertions(+) diff --git a/src/api/error/mod.rs b/src/api/error/mod.rs index ea7824a..9128898 100644 --- a/src/api/error/mod.rs +++ b/src/api/error/mod.rs @@ -1,3 +1,8 @@ +// This file is part of rust-web/twig +// +// For the copyright and license information, please view the LICENSE +// file that was distributed with this source code. + mod syntax; pub use self::syntax::{ SyntaxError, SyntaxErrorCode, UnexpectedToken }; diff --git a/src/api/error/syntax.rs b/src/api/error/syntax.rs index aa96b4f..348073b 100644 --- a/src/api/error/syntax.rs +++ b/src/api/error/syntax.rs @@ -1,3 +1,8 @@ +// This file is part of rust-web/twig +// +// For the copyright and license information, please view the LICENSE +// file that was distributed with this source code. + use api::Position; use api::tokens::{ Token }; diff --git a/src/api/lexer/error.rs b/src/api/lexer/error.rs index a009930..727b794 100644 --- a/src/api/lexer/error.rs +++ b/src/api/lexer/error.rs @@ -1,3 +1,8 @@ +// This file is part of rust-web/twig +// +// For the copyright and license information, please view the LICENSE +// file that was distributed with this source code. + use std::io; use api::error::SyntaxError; use std::result; diff --git a/tests/lexer/mod.rs b/tests/lexer/mod.rs index 9205bb5..f78e8a5 100644 --- a/tests/lexer/mod.rs +++ b/tests/lexer/mod.rs @@ -1,3 +1,8 @@ +// This file is part of rust-web/twig +// +// For the copyright and license information, please view the LICENSE +// file that was distributed with this source code. + use twig::api::lexer::{ Lexer, Tokens, Options }; use twig::api::tokens::TokenRef; diff --git a/tests/lexer_tests.rs b/tests/lexer_tests.rs index 39ed5f7..533214f 100644 --- a/tests/lexer_tests.rs +++ b/tests/lexer_tests.rs @@ -1,3 +1,8 @@ +// This file is part of rust-web/twig +// +// For the copyright and license information, please view the LICENSE +// file that was distributed with this source code. + extern crate twig; mod lexer;