From 108297b966c98e1b001b76a671f9127af22e668f Mon Sep 17 00:00:00 2001 From: NotAFile Date: Tue, 1 Feb 2022 23:24:07 +0100 Subject: [PATCH] cargo fmt --- src/parser/tokens.rs | 50 +++++++++++++++++++++++++++++++------------- 1 file changed, 35 insertions(+), 15 deletions(-) diff --git a/src/parser/tokens.rs b/src/parser/tokens.rs index 51cbcec..29df0c1 100644 --- a/src/parser/tokens.rs +++ b/src/parser/tokens.rs @@ -1,16 +1,16 @@ //! convert text into a token stream -use std::io; -use std::fmt; use super::{identifier, ws0, IResult, Span}; use nom::{ branch::alt, bytes::complete::tag, - character::complete::{digit1, anychar}, + character::complete::{anychar, digit1}, combinator::{consumed, map, recognize}, - multi::many0, error::ParseError, + multi::many0, }; +use std::fmt; +use std::io; pub struct Token<'a> { span: Span<'a>, @@ -19,7 +19,13 @@ pub struct Token<'a> { impl fmt::Debug for Token<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{:?} @{} {:?}", self.kind, self.span.location_offset(), self.span.fragment())?; + write!( + f, + "{:?} @{} {:?}", + self.kind, + self.span.location_offset(), + self.span.fragment() + )?; Ok(()) } } @@ -33,7 +39,7 @@ impl<'a> Token<'a> { pub fn pretty_tokens(mut w: impl io::Write, toks: &[Token]) -> io::Result<()> { for tok in toks { writeln!(w, "{:?}", tok)?; - }; + } Ok(()) } @@ -75,8 +81,12 @@ pub struct TokenSpan<'a> { } impl<'a> TokenSpan<'a> { - pub fn new(rest: &'a [Token<'a>]) -> Self { Self { rest, pos: 0 } } - pub fn with_pos(rest: &'a [Token<'a>], pos: usize) -> Self { Self { rest, pos } } + pub fn new(rest: &'a [Token<'a>]) -> Self { + Self { rest, pos: 0 } + } + pub fn with_pos(rest: &'a [Token<'a>], pos: usize) -> Self { + Self { rest, pos } + } } impl nom::InputTake for TokenSpan<'_> { @@ -86,7 +96,10 @@ impl nom::InputTake for TokenSpan<'_> { fn take_split(&self, count: usize) -> (Self, Self) { let (head, tail) = &self.rest.split_at(count); - (TokenSpan::with_pos(&head, self.pos), TokenSpan::with_pos(&tail, self.pos + count)) + ( + TokenSpan::with_pos(&head, self.pos), + TokenSpan::with_pos(&tail, self.pos + count), + ) } } @@ -97,17 +110,22 @@ impl nom_greedyerror::Position for TokenSpan<'_> { } /// combinator that matches a token kind -pub fn token<'a, E>(kind: TokenKind) -> impl FnMut(TokenSpan<'a>) -> nom::IResult - where E: ParseError> +pub fn token<'a, E>( + kind: TokenKind, +) -> impl FnMut(TokenSpan<'a>) -> nom::IResult +where + E: ParseError>, { move |input: TokenSpan| { let next = &input.rest[0]; if next.kind == kind.clone() { let rest = TokenSpan::new(&input.rest[1..]); Ok((rest, next)) - } - else { - Err(nom::Err::Error(E::from_error_kind(input, nom::error::ErrorKind::Tag))) + } else { + Err(nom::Err::Error(E::from_error_kind( + input, + nom::error::ErrorKind::Tag, + ))) } } } @@ -118,7 +136,9 @@ pub fn lex(input: Span) -> IResult> { lex_literals, lex_braces, lex_punctuation, - map(recognize(anychar), |span| Token::new(span, TokenKind::Error)), + map(recognize(anychar), |span| { + Token::new(span, TokenKind::Error) + }), ))))(input) }