small cleanups

This commit is contained in:
NotAFile 2022-04-05 13:34:48 +02:00
parent b848d5d3ea
commit 95b64a324a
3 changed files with 10 additions and 16 deletions

View File

@ -1,10 +1,8 @@
use std::cell::Cell; use std::cell::Cell;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fmt::Write;
use super::parser; use super::parser;
use super::parser::block_expression::BlockExpr; use super::parser::block_expression::BlockExpr;
use crate::rtlil;
pub use callable::{Callable, CallableContext, CallableId}; pub use callable::{Callable, CallableContext, CallableId};
pub use types::{Type, TypeStruct, TypingContext}; pub use types::{Type, TypeStruct, TypingContext};
@ -18,9 +16,6 @@ use crate::builtin_cells::get_builtins;
// pub use lowering::lower_module; // pub use lowering::lower_module;
/// lots of code is still not width-aware, this constant keeps track of that
const TODO_WIDTH: u32 = 1;
fn make_pubid(id: &str) -> String { fn make_pubid(id: &str) -> String {
"\\".to_owned() + id "\\".to_owned() + id
} }
@ -141,13 +136,11 @@ impl Context {
} }
} }
Expression::Literal(lit) => { Expression::Literal(lit) => {
// TODO: make this a proper enum instead of having to match on everything
let data = match lit.kind { let data = match lit.kind {
parser::expression::LiteralKind::Num(num) => self.types.make_elabnum_u32(num), parser::expression::LiteralKind::Num(num) => self.types.make_elabnum_u32(num),
parser::expression::LiteralKind::Const(width, val) => { parser::expression::LiteralKind::Const(width, val) => {
self.types.make_const_u32(width, val as u32) self.types.make_const_u32(width, val as u32)
} }
_ => unreachable!("non-literal token in literal?"),
}; };
typed_ir::Expr { typed_ir::Expr {
id, id,

View File

@ -1,8 +1,6 @@
use std::collections::BTreeMap;
use super::typed_ir; use super::typed_ir;
use super::typed_ir::ExprKind; use super::typed_ir::ExprKind;
use super::{make_pubid, CompileError, Context, TODO_WIDTH}; use super::{make_pubid, CompileError, Context};
use crate::rtlil; use crate::rtlil;
use crate::rtlil::RtlilWrite; use crate::rtlil::RtlilWrite;

View File

@ -7,7 +7,7 @@ use super::{
}; };
use nom::{ use nom::{
branch::alt, branch::alt,
bytes::complete::{is_not, tag}, bytes::complete::{is_not, tag, take_until},
character::complete::{anychar, digit1, line_ending}, character::complete::{anychar, digit1, line_ending},
combinator::{consumed, map, recognize}, combinator::{consumed, map, recognize},
error::ParseError, error::ParseError,
@ -90,7 +90,6 @@ pub enum TokenKind {
Comment, Comment,
// Error // Error
Error, Error,
Eof,
} }
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
@ -240,10 +239,14 @@ fn lex_keywords(input: Span) -> IResult<Span, Token> {
fn lex_trivials(input: Span) -> IResult<Span, Token> { fn lex_trivials(input: Span) -> IResult<Span, Token> {
map( map(
consumed(alt((map( consumed(alt((
tuple((tag("//"), is_not("\r\n"), line_ending)), map(tuple((tag("//"), is_not("\r\n"), line_ending)), |_| {
|_| TokenKind::Comment, TokenKind::Comment
),))), }),
map(tuple((tag("/*"), take_until("*/"), tag("*/"))), |_| {
TokenKind::Comment
}),
))),
|(span, kind)| Token::new(span, kind), |(span, kind)| Token::new(span, kind),
)(input) )(input)
} }