Compare commits

..

No commits in common. "86e3bf4d3222f92b0745262e3f1a7c1700885d82" and "b848d5d3ea0bffc8556c167f4a42a74d95abeefa" have entirely different histories.

10 changed files with 60 additions and 136 deletions

View File

@ -1,3 +1,5 @@
use crate::frontend::types::TypeStruct;
use crate::frontend::Callable;
use crate::rtlil; use crate::rtlil;
use crate::rtlil::SigSpec; use crate::rtlil::SigSpec;

View File

@ -1,8 +1,10 @@
use std::cell::Cell; use std::cell::Cell;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fmt::Write;
use super::parser; use super::parser;
use super::parser::block_expression::BlockExpr; use super::parser::block_expression::BlockExpr;
use crate::rtlil;
pub use callable::{Callable, CallableContext, CallableId}; pub use callable::{Callable, CallableContext, CallableId};
pub use types::{Type, TypeStruct, TypingContext}; pub use types::{Type, TypeStruct, TypingContext};
@ -16,6 +18,9 @@ use crate::builtin_cells::get_builtins;
// pub use lowering::lower_module; // pub use lowering::lower_module;
/// lots of code is still not width-aware, this constant keeps track of that
const TODO_WIDTH: u32 = 1;
fn make_pubid(id: &str) -> String { fn make_pubid(id: &str) -> String {
"\\".to_owned() + id "\\".to_owned() + id
} }
@ -136,11 +141,13 @@ impl Context {
} }
} }
Expression::Literal(lit) => { Expression::Literal(lit) => {
// TODO: make this a proper enum instead of having to match on everything
let data = match lit.kind { let data = match lit.kind {
parser::expression::LiteralKind::Num(num) => self.types.make_elabnum_u32(num), parser::expression::LiteralKind::Num(num) => self.types.make_elabnum_u32(num),
parser::expression::LiteralKind::Const(width, val) => { parser::expression::LiteralKind::Const(width, val) => {
self.types.make_const_u32(width, val as u32) self.types.make_const_u32(width, val as u32)
} }
_ => unreachable!("non-literal token in literal?"),
}; };
typed_ir::Expr { typed_ir::Expr {
id, id,
@ -219,11 +226,8 @@ impl Context {
typ: self.types.primitives.infer, typ: self.types.primitives.infer,
} }
} }
BlockExpr::Block(block) => { BlockExpr::Block(_) => todo!(),
todo!("expression blocks not representable in typed ir yet")
}
}, },
Expression::StructInit(_) => todo!("structure initialization"),
}; };
Ok(t_expr) Ok(t_expr)
} }
@ -309,7 +313,6 @@ impl Context {
parser::ModuleItem::Comb(comb) => self.callable_from_block(comb)?, parser::ModuleItem::Comb(comb) => self.callable_from_block(comb)?,
parser::ModuleItem::Proc(_) => todo!("proc block"), parser::ModuleItem::Proc(_) => todo!("proc block"),
parser::ModuleItem::State(_) => todo!("state block"), parser::ModuleItem::State(_) => todo!("state block"),
parser::ModuleItem::Struct(_) => todo!("struct block"),
}; };
} }
for item in module.items { for item in module.items {
@ -317,7 +320,6 @@ impl Context {
parser::ModuleItem::Comb(comb) => self.type_comb(comb)?, parser::ModuleItem::Comb(comb) => self.type_comb(comb)?,
parser::ModuleItem::Proc(_) => todo!("proc block"), parser::ModuleItem::Proc(_) => todo!("proc block"),
parser::ModuleItem::State(_) => todo!("state block"), parser::ModuleItem::State(_) => todo!("state block"),
parser::ModuleItem::Struct(_) => todo!("struct block"),
}; };
typed_module.blocks.push(block); typed_module.blocks.push(block);
} }
@ -352,7 +354,7 @@ impl Context {
let mut new_type = callee_def.ret_type; let mut new_type = callee_def.ret_type;
if !genargs.is_empty() { if genargs.len() != 0 {
// need to infer generic arguments // need to infer generic arguments
for inf_res in inferred_args { for inf_res in inferred_args {
match inf_res { match inf_res {
@ -375,7 +377,7 @@ impl Context {
let mut new_expr = expr.clone(); let mut new_expr = expr.clone();
new_expr.typ = new_type; new_expr.typ = new_type;
new_expr.kind = typed_ir::ExprKind::Call(typed_ir::Call { new_expr.kind = typed_ir::ExprKind::Call(typed_ir::Call {
called: call.called, called: call.called.clone(),
args: args_typed, args: args_typed,
genargs, genargs,
}); });
@ -417,7 +419,7 @@ impl Context {
let args = callsig let args = callsig
.args .args
.iter() .iter()
.map(|(_name, typ)| { .map(|(name, typ)| {
let mut out = String::new(); let mut out = String::new();
self.types.pretty_type(&mut out, *typ)?; self.types.pretty_type(&mut out, *typ)?;
Ok(out) Ok(out)
@ -426,7 +428,7 @@ impl Context {
let genargs = callsig let genargs = callsig
.genargs .genargs
.iter() .iter()
.map(|(_name, typ)| { .map(|(name, typ)| {
let mut type_str = String::new(); let mut type_str = String::new();
self.types.pretty_type(&mut type_str, *typ)?; self.types.pretty_type(&mut type_str, *typ)?;
Ok(type_str) Ok(type_str)

View File

@ -1,6 +1,8 @@
use std::collections::BTreeMap;
use super::typed_ir; use super::typed_ir;
use super::typed_ir::ExprKind; use super::typed_ir::ExprKind;
use super::{make_pubid, CompileError, Context}; use super::{make_pubid, CompileError, Context, TODO_WIDTH};
use crate::rtlil; use crate::rtlil;
use crate::rtlil::RtlilWrite; use crate::rtlil::RtlilWrite;

View File

@ -25,8 +25,10 @@ enum TypeKind {
ElabType(ElabKind), ElabType(ElabKind),
/// Signal/Wire of generic width /// Signal/Wire of generic width
Logic(ElabData), Logic(ElabData),
/// User-defined ADT /// UInt of generic width
Adt(Adt), UInt(ElabData),
/// Callable
Callable(FnSig),
/// A type that was not given and needs to be inferred /// A type that was not given and needs to be inferred
Infer, Infer,
/// A reference to a type variable as DeBruijn index /// A reference to a type variable as DeBruijn index
@ -35,13 +37,9 @@ enum TypeKind {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum Adt { pub struct FnSig {
Struct(Struct), params: Vec<Type>,
} ret: Type,
#[derive(Debug, Clone)]
pub struct Struct {
members: Vec<Type>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -184,7 +182,8 @@ impl TypingContext {
ElabValueData::Bytes(_) => None, ElabValueData::Bytes(_) => None,
}, },
}, },
TypeKind::Adt(_) => todo!("calculate struct width"), TypeKind::UInt(_) => todo!(),
TypeKind::Callable(_) => None,
TypeKind::Infer => None, TypeKind::Infer => None,
TypeKind::TypeVar(_, _) => None, TypeKind::TypeVar(_, _) => None,
} }
@ -215,7 +214,8 @@ impl TypingContext {
None None
} }
} }
TypeKind::Adt(_) => todo!("can not parameterize struct"), TypeKind::UInt(_) => todo!(),
TypeKind::Callable(_sig) => todo!("callable generic params"),
// need to know what the type is to parameterize it // need to know what the type is to parameterize it
TypeKind::Infer | &TypeKind::TypeVar(_, _) => None, TypeKind::Infer | &TypeKind::TypeVar(_, _) => None,
} }
@ -246,9 +246,14 @@ impl TypingContext {
match kind { match kind {
TypeKind::ElabType(_) => todo!(), TypeKind::ElabType(_) => todo!(),
TypeKind::Logic(data) => { TypeKind::Logic(data) => {
matches!(data.value, ElabValue::Concrete(_)) if let ElabValue::Concrete(_) = data.value {
true
} else {
false
} }
TypeKind::Adt(_) => todo!("adt fully typed"), }
TypeKind::UInt(_) => todo!(),
TypeKind::Callable(_) => todo!(),
TypeKind::Infer => false, TypeKind::Infer => false,
TypeKind::TypeVar(dbi, _tvar) => { TypeKind::TypeVar(dbi, _tvar) => {
// if the DeBruijn index is 0, there is no further information to gain // if the DeBruijn index is 0, there is no further information to gain
@ -282,8 +287,9 @@ impl TypingContext {
self.pretty_value(&mut width, val)?; self.pretty_value(&mut width, val)?;
write!(w, "Logic<{}>", width) write!(w, "Logic<{}>", width)
} }
TypeKind::Adt(_) => write!(w, "Struct{{...}}"),
TypeKind::Infer => write!(w, "?"), TypeKind::Infer => write!(w, "?"),
TypeKind::UInt(_) => todo!("print uint"),
TypeKind::Callable(_sig) => todo!("print callable"),
TypeKind::TypeVar(_, tvar) => write!(w, "T{}", tvar), TypeKind::TypeVar(_, tvar) => write!(w, "T{}", tvar),
} }
} }

View File

@ -20,12 +20,6 @@ pub struct StateVariant<'a> {
pub params: Option<Vec<Token<'a>>>, pub params: Option<Vec<Token<'a>>>,
} }
#[derive(Debug)]
pub struct StructBlock<'a> {
pub name: Token<'a>,
pub members: Vec<NetDecl<'a>>,
}
fn state_variant(input: TokenSpan) -> IResult<TokenSpan, StateVariant> { fn state_variant(input: TokenSpan) -> IResult<TokenSpan, StateVariant> {
map( map(
tuple(( tuple((
@ -37,7 +31,7 @@ fn state_variant(input: TokenSpan) -> IResult<TokenSpan, StateVariant> {
)), )),
)), )),
|(name, param)| StateVariant { |(name, param)| StateVariant {
name, name: name.clone(),
params: param, params: param,
}, },
)(input) )(input)
@ -59,20 +53,3 @@ pub fn state(input: TokenSpan) -> IResult<TokenSpan, StateBlock> {
|(name, variants)| StateBlock { name, variants }, |(name, variants)| StateBlock { name, variants },
)(input) )(input)
} }
pub fn struct_def(input: TokenSpan) -> IResult<TokenSpan, StructBlock> {
map(
preceded(
token(tk::Struct),
cut(tuple((
token(tk::Ident),
delimited(
token(tk::LBrace),
separated_list0(token(tk::Comma), declaration),
token(tk::RBrace),
),
))),
),
|(name, members)| StructBlock { name, members },
)(input)
}

View File

@ -2,29 +2,22 @@ use nom::{
branch::alt, branch::alt,
combinator::map, combinator::map,
error::context, error::context,
multi::{many0, separated_list1}, multi::{many1, separated_list1},
sequence::{delimited, separated_pair, terminated, tuple}, sequence::{delimited, separated_pair, tuple},
}; };
use crate::parser::{ use crate::parser::{
assign_statement, expression,
expression::{expression, Expression}, expression::{expression, Expression},
tokens::{token, Token, TokenKind as tk, TokenSpan}, tokens::{token, TokenKind as tk, TokenSpan},
IResult, Assign, IResult, Span,
}; };
/// a block that is a single expression
#[derive(Debug, Clone)]
pub struct ExpressionBlock<'a> {
assignments: Vec<(Token<'a>, Expression<'a>)>,
value: Expression<'a>,
}
/// an expression that contains a block
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum BlockExpr<'a> { pub enum BlockExpr<'a> {
IfElse(IfElseBlock), IfElse(IfElseBlock),
Match(MatchBlock<'a>), Match(MatchBlock<'a>),
Block(ExpressionBlock<'a>), Block(Vec<BlockExpr<'a>>),
} }
// TODO: postponed because annoying to implement // TODO: postponed because annoying to implement
@ -59,25 +52,6 @@ fn match_block(input: TokenSpan) -> IResult<TokenSpan, MatchBlock> {
)(input) )(input)
} }
fn expression_block(input: TokenSpan) -> IResult<TokenSpan, ExpressionBlock> {
map(
tuple((
many0(tuple((
delimited(token(tk::Let), token(tk::Ident), token(tk::EqAssign)),
terminated(expression, token(tk::Semicolon)),
))),
expression,
)),
|(assignments, value)| ExpressionBlock { assignments, value },
)(input)
}
pub fn block_expr(input: TokenSpan) -> IResult<TokenSpan, BlockExpr> { pub fn block_expr(input: TokenSpan) -> IResult<TokenSpan, BlockExpr> {
alt(( alt((map(match_block, BlockExpr::Match),))(input)
map(match_block, BlockExpr::Match),
map(
delimited(token(tk::LBrace), expression_block, token(tk::RBrace)),
BlockExpr::Block,
),
))(input)
} }

View File

@ -32,6 +32,7 @@ pub struct TypeName<'a> {
pub struct NetDecl<'a> { pub struct NetDecl<'a> {
pub name: Span<'a>, pub name: Span<'a>,
pub typ: TypeName<'a>, pub typ: TypeName<'a>,
pub value: Option<Span<'a>>,
} }
#[derive(Debug)] #[derive(Debug)]
@ -53,10 +54,14 @@ pub fn assign_statement(input: TokenSpan) -> IResult<TokenSpan, Assign> {
// TODO: reallow assignments // TODO: reallow assignments
pub fn declaration(i: TokenSpan) -> IResult<TokenSpan, NetDecl> { pub fn declaration(i: TokenSpan) -> IResult<TokenSpan, NetDecl> {
map( map(
tuple((
separated_pair(token(tk::Ident), token(tk::Colon), typename), separated_pair(token(tk::Ident), token(tk::Colon), typename),
|(ident, typ)| NetDecl { opt(preceded(token(tk::Assign), token(tk::Number))),
)),
|((ident, typ), _value)| NetDecl {
name: ident.span(), name: ident.span(),
typ, typ,
value: None,
}, },
)(i) )(i)
} }

View File

@ -6,7 +6,7 @@ use nom::{
branch::alt, branch::alt,
combinator::{map, opt}, combinator::{map, opt},
multi::separated_list0, multi::separated_list0,
sequence::{delimited, separated_pair, tuple}, sequence::{delimited, preceded, separated_pair, tuple},
}; };
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -27,12 +27,6 @@ pub struct Call<'a> {
pub args: Vec<Expression<'a>>, pub args: Vec<Expression<'a>>,
} }
#[derive(Debug, Clone)]
pub struct StructInit<'a> {
pub name: Span<'a>,
pub args: Vec<(Token<'a>, Expression<'a>)>,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum BinOpKind { pub enum BinOpKind {
And, And,
@ -66,7 +60,6 @@ pub enum Expression<'a> {
UnOp(Box<UnOp<'a>>), UnOp(Box<UnOp<'a>>),
BinOp(Box<BinOp<'a>>), BinOp(Box<BinOp<'a>>),
Call(Box<Call<'a>>), Call(Box<Call<'a>>),
StructInit(Box<StructInit<'a>>),
BlockExpr(Box<BlockExpr<'a>>), BlockExpr(Box<BlockExpr<'a>>),
} }
@ -74,9 +67,6 @@ pub enum Expression<'a> {
fn atom(input: TokenSpan) -> IResult<TokenSpan, Expression> { fn atom(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt(( alt((
map(call_item, |call| Expression::Call(Box::new(call))), map(call_item, |call| Expression::Call(Box::new(call))),
map(struct_init_item, |sinit| {
Expression::StructInit(Box::new(sinit))
}),
map(token(tk::Ident), |it| { map(token(tk::Ident), |it| {
Expression::Path(it.span().fragment()) Expression::Path(it.span().fragment())
}), }),
@ -156,26 +146,6 @@ pub fn call_item(input: TokenSpan) -> IResult<TokenSpan, Call> {
)(input) )(input)
} }
pub fn struct_init_item(input: TokenSpan) -> IResult<TokenSpan, StructInit> {
map(
tuple((
token(tk::Ident),
delimited(
token(tk::LBrace),
separated_list0(
token(tk::Comma),
separated_pair(token(tk::Ident), token(tk::Colon), expression),
),
token(tk::RBrace),
),
)),
|(name, args)| StructInit {
name: name.span(),
args,
},
)(input)
}
pub fn expression(input: TokenSpan) -> IResult<TokenSpan, Expression> { pub fn expression(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt(( alt((
bitop, bitop,
@ -230,9 +200,4 @@ mod test {
fn test_call() { fn test_call() {
fullexpr(TokenSpan::new(&tok("a()"))).unwrap(); fullexpr(TokenSpan::new(&tok("a()"))).unwrap();
} }
#[test]
fn test_struct_init() {
fullexpr(TokenSpan::new(&tok("a{}"))).unwrap();
}
} }

View File

@ -5,7 +5,7 @@ use nom::{
}; };
use crate::parser::{ use crate::parser::{
adt::{state, struct_def, StateBlock, StructBlock}, adt::{state, StateBlock},
comb::{comb_block, CombBlock}, comb::{comb_block, CombBlock},
declaration, declaration,
proc::ProcBlock, proc::ProcBlock,
@ -35,7 +35,6 @@ pub enum ModuleItem<'a> {
Comb(CombBlock<'a>), Comb(CombBlock<'a>),
Proc(ProcBlock<'a>), Proc(ProcBlock<'a>),
State(StateBlock<'a>), State(StateBlock<'a>),
Struct(StructBlock<'a>),
} }
fn port_decl(i: TokenSpan) -> IResult<TokenSpan, PortDecl> { fn port_decl(i: TokenSpan) -> IResult<TokenSpan, PortDecl> {
@ -53,7 +52,6 @@ pub fn module(input: TokenSpan) -> IResult<TokenSpan, Module> {
map( map(
many1(alt(( many1(alt((
map(state, ModuleItem::State), map(state, ModuleItem::State),
map(struct_def, ModuleItem::Struct),
map(comb_block, ModuleItem::Comb), map(comb_block, ModuleItem::Comb),
))), ))),
|items| Module { items }, |items| Module { items },

View File

@ -7,7 +7,7 @@ use super::{
}; };
use nom::{ use nom::{
branch::alt, branch::alt,
bytes::complete::{is_not, tag, take_until}, bytes::complete::{is_not, tag},
character::complete::{anychar, digit1, line_ending}, character::complete::{anychar, digit1, line_ending},
combinator::{consumed, map, recognize}, combinator::{consumed, map, recognize},
error::ParseError, error::ParseError,
@ -86,12 +86,11 @@ pub enum TokenKind {
State, State,
Proc, Proc,
Comb, Comb,
Let,
Struct,
// whitespace // whitespace
Comment, Comment,
// Error // Error
Error, Error,
Eof,
} }
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
@ -234,8 +233,6 @@ fn lex_keywords(input: Span) -> IResult<Span, Token> {
map(tag("proc"), |_| TokenKind::Proc), map(tag("proc"), |_| TokenKind::Proc),
map(tag("comb"), |_| TokenKind::Comb), map(tag("comb"), |_| TokenKind::Comb),
map(tag("state"), |_| TokenKind::State), map(tag("state"), |_| TokenKind::State),
map(tag("let"), |_| TokenKind::Let),
map(tag("struct"), |_| TokenKind::Struct),
))), ))),
|(span, kind)| Token::new(span, kind), |(span, kind)| Token::new(span, kind),
)(input) )(input)
@ -243,14 +240,10 @@ fn lex_keywords(input: Span) -> IResult<Span, Token> {
fn lex_trivials(input: Span) -> IResult<Span, Token> { fn lex_trivials(input: Span) -> IResult<Span, Token> {
map( map(
consumed(alt(( consumed(alt((map(
map(tuple((tag("//"), is_not("\r\n"), line_ending)), |_| { tuple((tag("//"), is_not("\r\n"), line_ending)),
TokenKind::Comment |_| TokenKind::Comment,
}), ),))),
map(tuple((tag("/*"), take_until("*/"), tag("*/"))), |_| {
TokenKind::Comment
}),
))),
|(span, kind)| Token::new(span, kind), |(span, kind)| Token::new(span, kind),
)(input) )(input)
} }