Compare commits

...

6 Commits

Author SHA1 Message Date
NotAFile 86e3bf4d32 add adts to type system 2022-04-05 15:11:26 +02:00
NotAFile c0be718bbc parse struct initialization 2022-04-05 14:52:56 +02:00
NotAFile b6c6a23ca5 parse struct blocks 2022-04-05 14:40:47 +02:00
NotAFile 2dd99ae641 add parsing for expression blocks 2022-04-05 14:14:49 +02:00
NotAFile da0d1dd6d8 clippy fix 2022-04-05 13:37:53 +02:00
NotAFile 95b64a324a small cleanups 2022-04-05 13:34:48 +02:00
10 changed files with 137 additions and 61 deletions

View File

@ -1,5 +1,3 @@
use crate::frontend::types::TypeStruct;
use crate::frontend::Callable;
use crate::rtlil; use crate::rtlil;
use crate::rtlil::SigSpec; use crate::rtlil::SigSpec;

View File

@ -1,10 +1,8 @@
use std::cell::Cell; use std::cell::Cell;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fmt::Write;
use super::parser; use super::parser;
use super::parser::block_expression::BlockExpr; use super::parser::block_expression::BlockExpr;
use crate::rtlil;
pub use callable::{Callable, CallableContext, CallableId}; pub use callable::{Callable, CallableContext, CallableId};
pub use types::{Type, TypeStruct, TypingContext}; pub use types::{Type, TypeStruct, TypingContext};
@ -18,9 +16,6 @@ use crate::builtin_cells::get_builtins;
// pub use lowering::lower_module; // pub use lowering::lower_module;
/// lots of code is still not width-aware, this constant keeps track of that
const TODO_WIDTH: u32 = 1;
fn make_pubid(id: &str) -> String { fn make_pubid(id: &str) -> String {
"\\".to_owned() + id "\\".to_owned() + id
} }
@ -141,13 +136,11 @@ impl Context {
} }
} }
Expression::Literal(lit) => { Expression::Literal(lit) => {
// TODO: make this a proper enum instead of having to match on everything
let data = match lit.kind { let data = match lit.kind {
parser::expression::LiteralKind::Num(num) => self.types.make_elabnum_u32(num), parser::expression::LiteralKind::Num(num) => self.types.make_elabnum_u32(num),
parser::expression::LiteralKind::Const(width, val) => { parser::expression::LiteralKind::Const(width, val) => {
self.types.make_const_u32(width, val as u32) self.types.make_const_u32(width, val as u32)
} }
_ => unreachable!("non-literal token in literal?"),
}; };
typed_ir::Expr { typed_ir::Expr {
id, id,
@ -226,8 +219,11 @@ impl Context {
typ: self.types.primitives.infer, typ: self.types.primitives.infer,
} }
} }
BlockExpr::Block(_) => todo!(), BlockExpr::Block(block) => {
todo!("expression blocks not representable in typed ir yet")
}
}, },
Expression::StructInit(_) => todo!("structure initialization"),
}; };
Ok(t_expr) Ok(t_expr)
} }
@ -313,6 +309,7 @@ impl Context {
parser::ModuleItem::Comb(comb) => self.callable_from_block(comb)?, parser::ModuleItem::Comb(comb) => self.callable_from_block(comb)?,
parser::ModuleItem::Proc(_) => todo!("proc block"), parser::ModuleItem::Proc(_) => todo!("proc block"),
parser::ModuleItem::State(_) => todo!("state block"), parser::ModuleItem::State(_) => todo!("state block"),
parser::ModuleItem::Struct(_) => todo!("struct block"),
}; };
} }
for item in module.items { for item in module.items {
@ -320,6 +317,7 @@ impl Context {
parser::ModuleItem::Comb(comb) => self.type_comb(comb)?, parser::ModuleItem::Comb(comb) => self.type_comb(comb)?,
parser::ModuleItem::Proc(_) => todo!("proc block"), parser::ModuleItem::Proc(_) => todo!("proc block"),
parser::ModuleItem::State(_) => todo!("state block"), parser::ModuleItem::State(_) => todo!("state block"),
parser::ModuleItem::Struct(_) => todo!("struct block"),
}; };
typed_module.blocks.push(block); typed_module.blocks.push(block);
} }
@ -354,7 +352,7 @@ impl Context {
let mut new_type = callee_def.ret_type; let mut new_type = callee_def.ret_type;
if genargs.len() != 0 { if !genargs.is_empty() {
// need to infer generic arguments // need to infer generic arguments
for inf_res in inferred_args { for inf_res in inferred_args {
match inf_res { match inf_res {
@ -377,7 +375,7 @@ impl Context {
let mut new_expr = expr.clone(); let mut new_expr = expr.clone();
new_expr.typ = new_type; new_expr.typ = new_type;
new_expr.kind = typed_ir::ExprKind::Call(typed_ir::Call { new_expr.kind = typed_ir::ExprKind::Call(typed_ir::Call {
called: call.called.clone(), called: call.called,
args: args_typed, args: args_typed,
genargs, genargs,
}); });
@ -419,7 +417,7 @@ impl Context {
let args = callsig let args = callsig
.args .args
.iter() .iter()
.map(|(name, typ)| { .map(|(_name, typ)| {
let mut out = String::new(); let mut out = String::new();
self.types.pretty_type(&mut out, *typ)?; self.types.pretty_type(&mut out, *typ)?;
Ok(out) Ok(out)
@ -428,7 +426,7 @@ impl Context {
let genargs = callsig let genargs = callsig
.genargs .genargs
.iter() .iter()
.map(|(name, typ)| { .map(|(_name, typ)| {
let mut type_str = String::new(); let mut type_str = String::new();
self.types.pretty_type(&mut type_str, *typ)?; self.types.pretty_type(&mut type_str, *typ)?;
Ok(type_str) Ok(type_str)

View File

@ -1,8 +1,6 @@
use std::collections::BTreeMap;
use super::typed_ir; use super::typed_ir;
use super::typed_ir::ExprKind; use super::typed_ir::ExprKind;
use super::{make_pubid, CompileError, Context, TODO_WIDTH}; use super::{make_pubid, CompileError, Context};
use crate::rtlil; use crate::rtlil;
use crate::rtlil::RtlilWrite; use crate::rtlil::RtlilWrite;

View File

@ -25,10 +25,8 @@ enum TypeKind {
ElabType(ElabKind), ElabType(ElabKind),
/// Signal/Wire of generic width /// Signal/Wire of generic width
Logic(ElabData), Logic(ElabData),
/// UInt of generic width /// User-defined ADT
UInt(ElabData), Adt(Adt),
/// Callable
Callable(FnSig),
/// A type that was not given and needs to be inferred /// A type that was not given and needs to be inferred
Infer, Infer,
/// A reference to a type variable as DeBruijn index /// A reference to a type variable as DeBruijn index
@ -37,9 +35,13 @@ enum TypeKind {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct FnSig { pub enum Adt {
params: Vec<Type>, Struct(Struct),
ret: Type, }
#[derive(Debug, Clone)]
pub struct Struct {
members: Vec<Type>,
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -182,8 +184,7 @@ impl TypingContext {
ElabValueData::Bytes(_) => None, ElabValueData::Bytes(_) => None,
}, },
}, },
TypeKind::UInt(_) => todo!(), TypeKind::Adt(_) => todo!("calculate struct width"),
TypeKind::Callable(_) => None,
TypeKind::Infer => None, TypeKind::Infer => None,
TypeKind::TypeVar(_, _) => None, TypeKind::TypeVar(_, _) => None,
} }
@ -214,8 +215,7 @@ impl TypingContext {
None None
} }
} }
TypeKind::UInt(_) => todo!(), TypeKind::Adt(_) => todo!("can not parameterize struct"),
TypeKind::Callable(_sig) => todo!("callable generic params"),
// need to know what the type is to parameterize it // need to know what the type is to parameterize it
TypeKind::Infer | &TypeKind::TypeVar(_, _) => None, TypeKind::Infer | &TypeKind::TypeVar(_, _) => None,
} }
@ -246,14 +246,9 @@ impl TypingContext {
match kind { match kind {
TypeKind::ElabType(_) => todo!(), TypeKind::ElabType(_) => todo!(),
TypeKind::Logic(data) => { TypeKind::Logic(data) => {
if let ElabValue::Concrete(_) = data.value { matches!(data.value, ElabValue::Concrete(_))
true
} else {
false
}
} }
TypeKind::UInt(_) => todo!(), TypeKind::Adt(_) => todo!("adt fully typed"),
TypeKind::Callable(_) => todo!(),
TypeKind::Infer => false, TypeKind::Infer => false,
TypeKind::TypeVar(dbi, _tvar) => { TypeKind::TypeVar(dbi, _tvar) => {
// if the DeBruijn index is 0, there is no further information to gain // if the DeBruijn index is 0, there is no further information to gain
@ -287,9 +282,8 @@ impl TypingContext {
self.pretty_value(&mut width, val)?; self.pretty_value(&mut width, val)?;
write!(w, "Logic<{}>", width) write!(w, "Logic<{}>", width)
} }
TypeKind::Adt(_) => write!(w, "Struct{{...}}"),
TypeKind::Infer => write!(w, "?"), TypeKind::Infer => write!(w, "?"),
TypeKind::UInt(_) => todo!("print uint"),
TypeKind::Callable(_sig) => todo!("print callable"),
TypeKind::TypeVar(_, tvar) => write!(w, "T{}", tvar), TypeKind::TypeVar(_, tvar) => write!(w, "T{}", tvar),
} }
} }

View File

@ -20,6 +20,12 @@ pub struct StateVariant<'a> {
pub params: Option<Vec<Token<'a>>>, pub params: Option<Vec<Token<'a>>>,
} }
#[derive(Debug)]
pub struct StructBlock<'a> {
pub name: Token<'a>,
pub members: Vec<NetDecl<'a>>,
}
fn state_variant(input: TokenSpan) -> IResult<TokenSpan, StateVariant> { fn state_variant(input: TokenSpan) -> IResult<TokenSpan, StateVariant> {
map( map(
tuple(( tuple((
@ -31,7 +37,7 @@ fn state_variant(input: TokenSpan) -> IResult<TokenSpan, StateVariant> {
)), )),
)), )),
|(name, param)| StateVariant { |(name, param)| StateVariant {
name: name.clone(), name,
params: param, params: param,
}, },
)(input) )(input)
@ -53,3 +59,20 @@ pub fn state(input: TokenSpan) -> IResult<TokenSpan, StateBlock> {
|(name, variants)| StateBlock { name, variants }, |(name, variants)| StateBlock { name, variants },
)(input) )(input)
} }
pub fn struct_def(input: TokenSpan) -> IResult<TokenSpan, StructBlock> {
map(
preceded(
token(tk::Struct),
cut(tuple((
token(tk::Ident),
delimited(
token(tk::LBrace),
separated_list0(token(tk::Comma), declaration),
token(tk::RBrace),
),
))),
),
|(name, members)| StructBlock { name, members },
)(input)
}

View File

@ -2,22 +2,29 @@ use nom::{
branch::alt, branch::alt,
combinator::map, combinator::map,
error::context, error::context,
multi::{many1, separated_list1}, multi::{many0, separated_list1},
sequence::{delimited, separated_pair, tuple}, sequence::{delimited, separated_pair, terminated, tuple},
}; };
use crate::parser::{ use crate::parser::{
assign_statement, expression,
expression::{expression, Expression}, expression::{expression, Expression},
tokens::{token, TokenKind as tk, TokenSpan}, tokens::{token, Token, TokenKind as tk, TokenSpan},
Assign, IResult, Span, IResult,
}; };
/// a block that is a single expression
#[derive(Debug, Clone)]
pub struct ExpressionBlock<'a> {
assignments: Vec<(Token<'a>, Expression<'a>)>,
value: Expression<'a>,
}
/// an expression that contains a block
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum BlockExpr<'a> { pub enum BlockExpr<'a> {
IfElse(IfElseBlock), IfElse(IfElseBlock),
Match(MatchBlock<'a>), Match(MatchBlock<'a>),
Block(Vec<BlockExpr<'a>>), Block(ExpressionBlock<'a>),
} }
// TODO: postponed because annoying to implement // TODO: postponed because annoying to implement
@ -52,6 +59,25 @@ fn match_block(input: TokenSpan) -> IResult<TokenSpan, MatchBlock> {
)(input) )(input)
} }
pub fn block_expr(input: TokenSpan) -> IResult<TokenSpan, BlockExpr> { fn expression_block(input: TokenSpan) -> IResult<TokenSpan, ExpressionBlock> {
alt((map(match_block, BlockExpr::Match),))(input) map(
tuple((
many0(tuple((
delimited(token(tk::Let), token(tk::Ident), token(tk::EqAssign)),
terminated(expression, token(tk::Semicolon)),
))),
expression,
)),
|(assignments, value)| ExpressionBlock { assignments, value },
)(input)
}
pub fn block_expr(input: TokenSpan) -> IResult<TokenSpan, BlockExpr> {
alt((
map(match_block, BlockExpr::Match),
map(
delimited(token(tk::LBrace), expression_block, token(tk::RBrace)),
BlockExpr::Block,
),
))(input)
} }

View File

@ -32,7 +32,6 @@ pub struct TypeName<'a> {
pub struct NetDecl<'a> { pub struct NetDecl<'a> {
pub name: Span<'a>, pub name: Span<'a>,
pub typ: TypeName<'a>, pub typ: TypeName<'a>,
pub value: Option<Span<'a>>,
} }
#[derive(Debug)] #[derive(Debug)]
@ -54,14 +53,10 @@ pub fn assign_statement(input: TokenSpan) -> IResult<TokenSpan, Assign> {
// TODO: reallow assignments // TODO: reallow assignments
pub fn declaration(i: TokenSpan) -> IResult<TokenSpan, NetDecl> { pub fn declaration(i: TokenSpan) -> IResult<TokenSpan, NetDecl> {
map( map(
tuple(( separated_pair(token(tk::Ident), token(tk::Colon), typename),
separated_pair(token(tk::Ident), token(tk::Colon), typename), |(ident, typ)| NetDecl {
opt(preceded(token(tk::Assign), token(tk::Number))),
)),
|((ident, typ), _value)| NetDecl {
name: ident.span(), name: ident.span(),
typ, typ,
value: None,
}, },
)(i) )(i)
} }

View File

@ -6,7 +6,7 @@ use nom::{
branch::alt, branch::alt,
combinator::{map, opt}, combinator::{map, opt},
multi::separated_list0, multi::separated_list0,
sequence::{delimited, preceded, separated_pair, tuple}, sequence::{delimited, separated_pair, tuple},
}; };
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -27,6 +27,12 @@ pub struct Call<'a> {
pub args: Vec<Expression<'a>>, pub args: Vec<Expression<'a>>,
} }
#[derive(Debug, Clone)]
pub struct StructInit<'a> {
pub name: Span<'a>,
pub args: Vec<(Token<'a>, Expression<'a>)>,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum BinOpKind { pub enum BinOpKind {
And, And,
@ -60,6 +66,7 @@ pub enum Expression<'a> {
UnOp(Box<UnOp<'a>>), UnOp(Box<UnOp<'a>>),
BinOp(Box<BinOp<'a>>), BinOp(Box<BinOp<'a>>),
Call(Box<Call<'a>>), Call(Box<Call<'a>>),
StructInit(Box<StructInit<'a>>),
BlockExpr(Box<BlockExpr<'a>>), BlockExpr(Box<BlockExpr<'a>>),
} }
@ -67,6 +74,9 @@ pub enum Expression<'a> {
fn atom(input: TokenSpan) -> IResult<TokenSpan, Expression> { fn atom(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt(( alt((
map(call_item, |call| Expression::Call(Box::new(call))), map(call_item, |call| Expression::Call(Box::new(call))),
map(struct_init_item, |sinit| {
Expression::StructInit(Box::new(sinit))
}),
map(token(tk::Ident), |it| { map(token(tk::Ident), |it| {
Expression::Path(it.span().fragment()) Expression::Path(it.span().fragment())
}), }),
@ -146,6 +156,26 @@ pub fn call_item(input: TokenSpan) -> IResult<TokenSpan, Call> {
)(input) )(input)
} }
pub fn struct_init_item(input: TokenSpan) -> IResult<TokenSpan, StructInit> {
map(
tuple((
token(tk::Ident),
delimited(
token(tk::LBrace),
separated_list0(
token(tk::Comma),
separated_pair(token(tk::Ident), token(tk::Colon), expression),
),
token(tk::RBrace),
),
)),
|(name, args)| StructInit {
name: name.span(),
args,
},
)(input)
}
pub fn expression(input: TokenSpan) -> IResult<TokenSpan, Expression> { pub fn expression(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt(( alt((
bitop, bitop,
@ -200,4 +230,9 @@ mod test {
fn test_call() { fn test_call() {
fullexpr(TokenSpan::new(&tok("a()"))).unwrap(); fullexpr(TokenSpan::new(&tok("a()"))).unwrap();
} }
#[test]
fn test_struct_init() {
fullexpr(TokenSpan::new(&tok("a{}"))).unwrap();
}
} }

View File

@ -5,7 +5,7 @@ use nom::{
}; };
use crate::parser::{ use crate::parser::{
adt::{state, StateBlock}, adt::{state, struct_def, StateBlock, StructBlock},
comb::{comb_block, CombBlock}, comb::{comb_block, CombBlock},
declaration, declaration,
proc::ProcBlock, proc::ProcBlock,
@ -35,6 +35,7 @@ pub enum ModuleItem<'a> {
Comb(CombBlock<'a>), Comb(CombBlock<'a>),
Proc(ProcBlock<'a>), Proc(ProcBlock<'a>),
State(StateBlock<'a>), State(StateBlock<'a>),
Struct(StructBlock<'a>),
} }
fn port_decl(i: TokenSpan) -> IResult<TokenSpan, PortDecl> { fn port_decl(i: TokenSpan) -> IResult<TokenSpan, PortDecl> {
@ -52,6 +53,7 @@ pub fn module(input: TokenSpan) -> IResult<TokenSpan, Module> {
map( map(
many1(alt(( many1(alt((
map(state, ModuleItem::State), map(state, ModuleItem::State),
map(struct_def, ModuleItem::Struct),
map(comb_block, ModuleItem::Comb), map(comb_block, ModuleItem::Comb),
))), ))),
|items| Module { items }, |items| Module { items },

View File

@ -7,7 +7,7 @@ use super::{
}; };
use nom::{ use nom::{
branch::alt, branch::alt,
bytes::complete::{is_not, tag}, bytes::complete::{is_not, tag, take_until},
character::complete::{anychar, digit1, line_ending}, character::complete::{anychar, digit1, line_ending},
combinator::{consumed, map, recognize}, combinator::{consumed, map, recognize},
error::ParseError, error::ParseError,
@ -86,11 +86,12 @@ pub enum TokenKind {
State, State,
Proc, Proc,
Comb, Comb,
Let,
Struct,
// whitespace // whitespace
Comment, Comment,
// Error // Error
Error, Error,
Eof,
} }
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
@ -233,6 +234,8 @@ fn lex_keywords(input: Span) -> IResult<Span, Token> {
map(tag("proc"), |_| TokenKind::Proc), map(tag("proc"), |_| TokenKind::Proc),
map(tag("comb"), |_| TokenKind::Comb), map(tag("comb"), |_| TokenKind::Comb),
map(tag("state"), |_| TokenKind::State), map(tag("state"), |_| TokenKind::State),
map(tag("let"), |_| TokenKind::Let),
map(tag("struct"), |_| TokenKind::Struct),
))), ))),
|(span, kind)| Token::new(span, kind), |(span, kind)| Token::new(span, kind),
)(input) )(input)
@ -240,10 +243,14 @@ fn lex_keywords(input: Span) -> IResult<Span, Token> {
fn lex_trivials(input: Span) -> IResult<Span, Token> { fn lex_trivials(input: Span) -> IResult<Span, Token> {
map( map(
consumed(alt((map( consumed(alt((
tuple((tag("//"), is_not("\r\n"), line_ending)), map(tuple((tag("//"), is_not("\r\n"), line_ending)), |_| {
|_| TokenKind::Comment, TokenKind::Comment
),))), }),
map(tuple((tag("/*"), take_until("*/"), tag("*/"))), |_| {
TokenKind::Comment
}),
))),
|(span, kind)| Token::new(span, kind), |(span, kind)| Token::new(span, kind),
)(input) )(input)
} }