add comments to lexer
This commit is contained in:
parent
0307d0537c
commit
b848d5d3ea
|
@ -41,7 +41,7 @@ fn main() {
|
|||
.expect("error reading file");
|
||||
let input: &str = input.as_str();
|
||||
let input = parser::Span::new(input);
|
||||
let lexed = parser::tokens::lex(input).unwrap();
|
||||
let lexed = parser::tokens::lex(input).expect("failed to lex");
|
||||
let tokens = parser::tokens::TokenSpan::new(&lexed.1);
|
||||
let parsed = parser::parse(tokens);
|
||||
match parsed {
|
||||
|
@ -61,6 +61,7 @@ fn main() {
|
|||
let mut frontendcontext = crate::frontend::Context::new();
|
||||
let typed = frontendcontext.type_module(res.1);
|
||||
if let Ok(module) = typed {
|
||||
let mut typed_modules = vec![];
|
||||
for block in module.blocks {
|
||||
if opt.debug {
|
||||
let mut pretty_block = String::new();
|
||||
|
@ -77,9 +78,10 @@ fn main() {
|
|||
.unwrap();
|
||||
println!("{}", &pretty_block);
|
||||
}
|
||||
typed_modules.push(typed_inferred);
|
||||
}
|
||||
let typed_inferred = typed_modules.first().unwrap();
|
||||
// TODO: be able to determine modules that are fully parameterized
|
||||
/*
|
||||
let lowered = frontend::lowering::lower_block(&mut frontendcontext, typed_inferred);
|
||||
match lowered {
|
||||
Ok(res) => {
|
||||
|
@ -91,7 +93,6 @@ fn main() {
|
|||
}
|
||||
Err(err) => eprintln!("{:#?}", err),
|
||||
}
|
||||
*/
|
||||
} else {
|
||||
eprintln!("{:#?}", typed);
|
||||
}
|
||||
|
|
|
@ -87,7 +87,7 @@ impl InputPos for Span<'_> {
|
|||
}
|
||||
|
||||
fn tokspan_to_range(input: TokenSpan) -> std::ops::Range<usize> {
|
||||
let first = input.first().unwrap().span().position();
|
||||
let first = input.first().expect("eof in token").span().position();
|
||||
let last_span = input.last().unwrap().span();
|
||||
let last = last_span.position() + last_span.len();
|
||||
first..last
|
||||
|
|
|
@ -7,11 +7,12 @@ use super::{
|
|||
};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::tag,
|
||||
character::complete::{anychar, digit1},
|
||||
bytes::complete::{is_not, tag},
|
||||
character::complete::{anychar, digit1, line_ending},
|
||||
combinator::{consumed, map, recognize},
|
||||
error::ParseError,
|
||||
multi::many0,
|
||||
sequence::tuple,
|
||||
InputTake,
|
||||
};
|
||||
use std::fmt;
|
||||
|
@ -85,8 +86,11 @@ pub enum TokenKind {
|
|||
State,
|
||||
Proc,
|
||||
Comb,
|
||||
// whitespace
|
||||
Comment,
|
||||
// Error
|
||||
Error,
|
||||
Eof,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
|
@ -147,6 +151,11 @@ pub fn token<'a>(kind: TokenKind) -> impl FnMut(TokenSpan<'a>) -> IResult<TokenS
|
|||
nom::error::ErrorKind::Eof,
|
||||
)));
|
||||
};
|
||||
// TODO: HACKS HACKS HACKS EWW
|
||||
if next.kind == TokenKind::Comment {
|
||||
let (_, tail) = input.take_split(1);
|
||||
return token(kind)(tail);
|
||||
}
|
||||
if next.kind == kind {
|
||||
let rest = TokenSpan::with_pos(&input.rest[1..], input.pos + 1);
|
||||
Ok((rest, next))
|
||||
|
@ -159,6 +168,7 @@ pub fn token<'a>(kind: TokenKind) -> impl FnMut(TokenSpan<'a>) -> IResult<TokenS
|
|||
pub fn lex(input: Span) -> IResult<Span, Vec<Token>> {
|
||||
many0(ws0(alt((
|
||||
lex_keywords,
|
||||
lex_trivials,
|
||||
lex_literals,
|
||||
lex_braces,
|
||||
lex_punctuation,
|
||||
|
@ -227,3 +237,13 @@ fn lex_keywords(input: Span) -> IResult<Span, Token> {
|
|||
|(span, kind)| Token::new(span, kind),
|
||||
)(input)
|
||||
}
|
||||
|
||||
fn lex_trivials(input: Span) -> IResult<Span, Token> {
|
||||
map(
|
||||
consumed(alt((map(
|
||||
tuple((tag("//"), is_not("\r\n"), line_ending)),
|
||||
|_| TokenKind::Comment,
|
||||
),))),
|
||||
|(span, kind)| Token::new(span, kind),
|
||||
)(input)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue