From 95b64a324ad61c59356de49c3ca72521b85dd97e Mon Sep 17 00:00:00 2001 From: NotAFile Date: Tue, 5 Apr 2022 13:34:48 +0200 Subject: [PATCH] small cleanups --- src/frontend.rs | 7 ------- src/frontend/lowering.rs | 4 +--- src/parser/tokens.rs | 15 +++++++++------ 3 files changed, 10 insertions(+), 16 deletions(-) diff --git a/src/frontend.rs b/src/frontend.rs index f9bc00e..ad7bcab 100644 --- a/src/frontend.rs +++ b/src/frontend.rs @@ -1,10 +1,8 @@ use std::cell::Cell; use std::collections::BTreeMap; -use std::fmt::Write; use super::parser; use super::parser::block_expression::BlockExpr; -use crate::rtlil; pub use callable::{Callable, CallableContext, CallableId}; pub use types::{Type, TypeStruct, TypingContext}; @@ -18,9 +16,6 @@ use crate::builtin_cells::get_builtins; // pub use lowering::lower_module; -/// lots of code is still not width-aware, this constant keeps track of that -const TODO_WIDTH: u32 = 1; - fn make_pubid(id: &str) -> String { "\\".to_owned() + id } @@ -141,13 +136,11 @@ impl Context { } } Expression::Literal(lit) => { - // TODO: make this a proper enum instead of having to match on everything let data = match lit.kind { parser::expression::LiteralKind::Num(num) => self.types.make_elabnum_u32(num), parser::expression::LiteralKind::Const(width, val) => { self.types.make_const_u32(width, val as u32) } - _ => unreachable!("non-literal token in literal?"), }; typed_ir::Expr { id, diff --git a/src/frontend/lowering.rs b/src/frontend/lowering.rs index 004464a..c8546f7 100644 --- a/src/frontend/lowering.rs +++ b/src/frontend/lowering.rs @@ -1,8 +1,6 @@ -use std::collections::BTreeMap; - use super::typed_ir; use super::typed_ir::ExprKind; -use super::{make_pubid, CompileError, Context, TODO_WIDTH}; +use super::{make_pubid, CompileError, Context}; use crate::rtlil; use crate::rtlil::RtlilWrite; diff --git a/src/parser/tokens.rs b/src/parser/tokens.rs index 3828e1a..78c8f9a 100644 --- a/src/parser/tokens.rs +++ b/src/parser/tokens.rs @@ -7,7 +7,7 @@ use super::{ }; use nom::{ branch::alt, - bytes::complete::{is_not, tag}, + bytes::complete::{is_not, tag, take_until}, character::complete::{anychar, digit1, line_ending}, combinator::{consumed, map, recognize}, error::ParseError, @@ -90,7 +90,6 @@ pub enum TokenKind { Comment, // Error Error, - Eof, } #[derive(Debug, Copy, Clone)] @@ -240,10 +239,14 @@ fn lex_keywords(input: Span) -> IResult { fn lex_trivials(input: Span) -> IResult { map( - consumed(alt((map( - tuple((tag("//"), is_not("\r\n"), line_ending)), - |_| TokenKind::Comment, - ),))), + consumed(alt(( + map(tuple((tag("//"), is_not("\r\n"), line_ending)), |_| { + TokenKind::Comment + }), + map(tuple((tag("/*"), take_until("*/"), tag("*/"))), |_| { + TokenKind::Comment + }), + ))), |(span, kind)| Token::new(span, kind), )(input) }