small cleanups
This commit is contained in:
parent
b848d5d3ea
commit
95b64a324a
|
@ -1,10 +1,8 @@
|
|||
use std::cell::Cell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::Write;
|
||||
|
||||
use super::parser;
|
||||
use super::parser::block_expression::BlockExpr;
|
||||
use crate::rtlil;
|
||||
pub use callable::{Callable, CallableContext, CallableId};
|
||||
pub use types::{Type, TypeStruct, TypingContext};
|
||||
|
||||
|
@ -18,9 +16,6 @@ use crate::builtin_cells::get_builtins;
|
|||
|
||||
// pub use lowering::lower_module;
|
||||
|
||||
/// lots of code is still not width-aware, this constant keeps track of that
|
||||
const TODO_WIDTH: u32 = 1;
|
||||
|
||||
fn make_pubid(id: &str) -> String {
|
||||
"\\".to_owned() + id
|
||||
}
|
||||
|
@ -141,13 +136,11 @@ impl Context {
|
|||
}
|
||||
}
|
||||
Expression::Literal(lit) => {
|
||||
// TODO: make this a proper enum instead of having to match on everything
|
||||
let data = match lit.kind {
|
||||
parser::expression::LiteralKind::Num(num) => self.types.make_elabnum_u32(num),
|
||||
parser::expression::LiteralKind::Const(width, val) => {
|
||||
self.types.make_const_u32(width, val as u32)
|
||||
}
|
||||
_ => unreachable!("non-literal token in literal?"),
|
||||
};
|
||||
typed_ir::Expr {
|
||||
id,
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use super::typed_ir;
|
||||
use super::typed_ir::ExprKind;
|
||||
use super::{make_pubid, CompileError, Context, TODO_WIDTH};
|
||||
use super::{make_pubid, CompileError, Context};
|
||||
use crate::rtlil;
|
||||
use crate::rtlil::RtlilWrite;
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ use super::{
|
|||
};
|
||||
use nom::{
|
||||
branch::alt,
|
||||
bytes::complete::{is_not, tag},
|
||||
bytes::complete::{is_not, tag, take_until},
|
||||
character::complete::{anychar, digit1, line_ending},
|
||||
combinator::{consumed, map, recognize},
|
||||
error::ParseError,
|
||||
|
@ -90,7 +90,6 @@ pub enum TokenKind {
|
|||
Comment,
|
||||
// Error
|
||||
Error,
|
||||
Eof,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
|
@ -240,10 +239,14 @@ fn lex_keywords(input: Span) -> IResult<Span, Token> {
|
|||
|
||||
fn lex_trivials(input: Span) -> IResult<Span, Token> {
|
||||
map(
|
||||
consumed(alt((map(
|
||||
tuple((tag("//"), is_not("\r\n"), line_ending)),
|
||||
|_| TokenKind::Comment,
|
||||
),))),
|
||||
consumed(alt((
|
||||
map(tuple((tag("//"), is_not("\r\n"), line_ending)), |_| {
|
||||
TokenKind::Comment
|
||||
}),
|
||||
map(tuple((tag("/*"), take_until("*/"), tag("*/"))), |_| {
|
||||
TokenKind::Comment
|
||||
}),
|
||||
))),
|
||||
|(span, kind)| Token::new(span, kind),
|
||||
)(input)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue