Compare commits

..

3 Commits

Author SHA1 Message Date
NotAFile a69c6ab0b3 get examples semi-working again 2022-02-05 00:58:47 +01:00
NotAFile fd3a553072 clippy fix 2022-02-03 21:49:44 +01:00
NotAFile d36991a0b6 remove broken tests 2022-02-03 21:48:20 +01:00
11 changed files with 230 additions and 232 deletions

View File

@ -1,4 +1,4 @@
module clockdiv_2 ( proc clockdiv_2 (
clk: Logic, clk: Logic,
rst: Logic rst: Logic
) -> Logic ) -> Logic

View File

@ -1,7 +1,7 @@
module comparator ( comb comparator (
a: Logic<8>, a: Logic<8>,
b: Logic<8> b: Logic<8>
) -> Logic ) -> Logic
{ {
assign eq = ~reduce_or(a ^ b); ~reduce_or(a ^ b)
} }

View File

@ -1,8 +1,12 @@
module halfadd ( struct Add {
sum: Logic,
carry: Logic
}
comb halfadd (
a: Logic, a: Logic,
b: Logic b: Logic
) -> (Logic, Logic) ) -> Add
{ {
assign sum = a ^ b; Add{sum: a ^ b, carry: a & b}
assign carry = a & b;
} }

View File

@ -1,6 +1,6 @@
module reduce_or ( comb reduce_or (
a: Logic a: Logic
) )
-> Logic<1> { -> Logic<1> {

View File

@ -2,6 +2,7 @@ use std::collections::BTreeMap;
use crate::builtin_cells::get_builtins; use crate::builtin_cells::get_builtins;
use crate::parser; use crate::parser;
use crate::parser::expression::Expression;
use crate::rtlil; use crate::rtlil;
use crate::rtlil::RtlilWrite; use crate::rtlil::RtlilWrite;
pub use callable::Callable; pub use callable::Callable;
@ -169,70 +170,59 @@ fn lower_process(
Ok(()) Ok(())
} }
fn desugar_operation<'a>(op: parser::Operation<'a>) -> parser::Call<'a> { fn desugar_binop<'a>(op: parser::expression::BinOp<'a>) -> parser::expression::Call<'a> {
match op { let a = desugar_expression(op.a);
parser::Operation::And { a, b } => { let b = desugar_expression(op.b);
let a = desugar_expression(a); let op_func = match op.kind {
let b = desugar_expression(b); parser::expression::BinOpKind::And => "and",
parser::Call { parser::expression::BinOpKind::Or => "or",
name: "and".into(), parser::expression::BinOpKind::Xor => "xor",
};
parser::expression::Call {
name: op_func.into(),
args: vec![a, b], args: vec![a, b],
} }
}
parser::Operation::Or { a, b } => {
let a = desugar_expression(a);
let b = desugar_expression(b);
parser::Call {
name: "or".into(),
args: vec![a, b],
}
}
parser::Operation::Xor { a, b } => {
let a = desugar_expression(a);
let b = desugar_expression(b);
parser::Call {
name: "xor".into(),
args: vec![a, b],
}
}
parser::Operation::Not(a) => {
let a = desugar_expression(a);
parser::Call {
name: "not".into(),
args: vec![a],
}
}
}
} }
fn desugar_expression<'a>(expr: parser::Expression<'a>) -> parser::Expression<'a> { fn desugar_unop<'a>(op: parser::expression::UnOp<'a>) -> parser::expression::Call<'a> {
let a = desugar_expression(op.a);
let op_func = match op.kind {
parser::expression::UnOpKind::BitNot => "not",
parser::expression::UnOpKind::Not => todo!(),
};
parser::expression::Call {
name: op_func.into(),
args: vec![a],
}
}
fn desugar_expression<'a>(expr: Expression<'a>) -> Expression<'a> {
// TODO: allow ergonomic traversal of AST // TODO: allow ergonomic traversal of AST
match expr { match expr {
parser::Expression::Ident(_) => expr, Expression::Path(_) => expr,
parser::Expression::Literal(_) => expr, Expression::Literal(_) => expr,
parser::Expression::Call(mut call) => { Expression::Call(mut call) => {
let new_args = call.args.into_iter().map(desugar_expression).collect(); let new_args = call.args.into_iter().map(desugar_expression).collect();
call.args = new_args; call.args = new_args;
parser::Expression::Call(call) Expression::Call(call)
}
parser::Expression::Operation(op) => {
parser::Expression::Call(Box::new(desugar_operation(*op)))
} }
Expression::BinOp(op) => Expression::Call(Box::new(desugar_binop(*op))),
Expression::UnOp(op) => Expression::Call(Box::new(desugar_unop(*op))),
} }
} }
fn lower_expression( fn lower_expression(
ctx: &Context, ctx: &Context,
module: &mut rtlil::Module, module: &mut rtlil::Module,
expr: &parser::Expression, expr: &Expression,
) -> Result<rtlil::SigSpec, CompileError> { ) -> Result<rtlil::SigSpec, CompileError> {
let expr = desugar_expression(expr.clone()); let expr = desugar_expression(expr.clone());
match expr { match expr {
parser::Expression::Ident(ident) => { Expression::Path(ident) => {
let signal = ctx.try_get_signal(ident)?; let signal = ctx.try_get_signal(ident)?;
Ok(signal.sigspec()) Ok(signal.sigspec())
} }
parser::Expression::Call(call) => { Expression::Call(call) => {
let args_resolved = call let args_resolved = call
.args .args
.iter() .iter()
@ -268,8 +258,12 @@ fn lower_expression(
} }
// TODO: instantiate operators directly here instead of desugaring, once the callable infrastructure improves // TODO: instantiate operators directly here instead of desugaring, once the callable infrastructure improves
// to get better errors // to get better errors
parser::Expression::Operation(_op) => todo!("operators not yet implemented"), Expression::Literal(lit) => Ok(rtlil::SigSpec::Const(
parser::Expression::Literal(lit) => Ok(rtlil::SigSpec::Const(lit as i64, TODO_WIDTH)), lit.span().fragment().parse().unwrap(),
TODO_WIDTH,
)),
Expression::UnOp(_) => todo!(),
Expression::BinOp(_) => todo!(),
} }
} }
@ -284,6 +278,39 @@ fn lower_assignment(
Ok(()) Ok(())
} }
fn lower_comb(
ctx: &mut Context,
module: &mut rtlil::Module,
pa_comb: parser::comb::CombBlock,
) -> Result<(), CompileError> {
for (num, port) in pa_comb.ports.iter().enumerate() {
let port_id = make_pubid(port.net.name.fragment());
module.add_wire(rtlil::Wire::new(
port_id.clone(),
TODO_WIDTH,
Some(rtlil::PortOption::Input((num + 1) as i32)),
));
let typ = TypeStruct::logic_width(TODO_WIDTH);
let signal = Signal {
name: port.net.name.fragment().to_string(),
il_id: port_id,
// TODO: CRIMES CRIMES CRIMES
typ: Box::leak(Box::new(typ)),
};
ctx.signals
.insert(port.net.name.fragment().to_string(), signal);
}
let ret_id = module.make_genid("ret");
module.add_wire(rtlil::Wire::new(
ret_id.clone(),
TODO_WIDTH,
Some(rtlil::PortOption::Input(99)),
));
let out_sig = lower_expression(ctx, module, &pa_comb.expr)?;
module.add_connection(&rtlil::SigSpec::Wire(ret_id), &out_sig);
Ok(())
}
pub fn lower_module(pa_module: parser::Module) -> Result<String, CompileError> { pub fn lower_module(pa_module: parser::Module) -> Result<String, CompileError> {
let mut writer = rtlil::ILWriter::new(); let mut writer = rtlil::ILWriter::new();
let mut ir_module = rtlil::Module::new(make_pubid("test")); let mut ir_module = rtlil::Module::new(make_pubid("test"));
@ -297,38 +324,13 @@ pub fn lower_module(pa_module: parser::Module) -> Result<String, CompileError> {
}; };
writer.write_line("autoidx 1"); writer.write_line("autoidx 1");
/*
for (idx, port) in pa_module.ports.iter().enumerate() {
// FIXME: Actually resolve types
let sigtype = TypeStruct::logic_width(TODO_WIDTH);
// FIXME: CRIMES CRIMES CRIMES
let sigtype = Box::leak(Box::new(sigtype));
let sig = Signal {
name: port.net.name.fragment().to_string(),
il_id: make_pubid(port.net.name.fragment()),
typ: sigtype,
};
let sig = context
.signals
.entry(port.net.name.fragment().to_string())
.or_insert(sig);
let dir_option = match port.direction {
parser::PortDirection::Input => rtlil::PortOption::Input(idx as i32 + 1),
parser::PortDirection::Output => rtlil::PortOption::Output(idx as i32 + 1),
};
let wire = rtlil::Wire::new(sig.il_id.to_owned(), TODO_WIDTH, Some(dir_option));
ir_module.add_wire(wire);
}
for item in pa_module.items { for item in pa_module.items {
match item { match item {
parser::ModuleItem::Assign(assignment) => { parser::ModuleItem::Comb(comb) => lower_comb(&mut context, &mut ir_module, comb)?,
lower_assignment(&context, &mut ir_module, assignment)? parser::ModuleItem::Proc(_) => todo!(),
} parser::ModuleItem::State(_) => todo!(),
parser::ModuleItem::Proc(proc) => lower_process(&context, &mut ir_module, &proc)?,
} }
} }
ir_module.write_rtlil(&mut writer); ir_module.write_rtlil(&mut writer);
*/
Ok(writer.finish()) Ok(writer.finish())
} }

View File

@ -1,4 +1,6 @@
use super::{ use super::{
declaration::TypeName,
expression::{expression, Expression},
module::inputs_list, module::inputs_list,
module::PortDecl, module::PortDecl,
tokens::{token, TokenKind as tk, TokenSpan}, tokens::{token, TokenKind as tk, TokenSpan},
@ -16,6 +18,8 @@ use nom::{
pub struct CombBlock<'a> { pub struct CombBlock<'a> {
pub name: Span<'a>, pub name: Span<'a>,
pub ports: Vec<PortDecl<'a>>, pub ports: Vec<PortDecl<'a>>,
pub ret: TypeName<'a>,
pub expr: Expression<'a>,
} }
pub fn comb_block(input: TokenSpan) -> IResult<TokenSpan, CombBlock> { pub fn comb_block(input: TokenSpan) -> IResult<TokenSpan, CombBlock> {
@ -26,17 +30,15 @@ pub fn comb_block(input: TokenSpan) -> IResult<TokenSpan, CombBlock> {
token(tk::Ident), token(tk::Ident),
delimited(token(tk::LParen), inputs_list, token(tk::RParen)), delimited(token(tk::LParen), inputs_list, token(tk::RParen)),
preceded(token(tk::RArrow), typename), preceded(token(tk::RArrow), typename),
delimited( delimited(token(tk::LBrace), expression, token(tk::RBrace)),
token(tk::LBrace),
many0(token(tk::Error)),
token(tk::RBrace),
),
))), ))),
), ),
|(name, inputs, _ret, _items)| CombBlock { |(name, inputs, ret, expr)| CombBlock {
// TODO: bring back returns // TODO: bring back returns
name: name.span(), name: name.span(),
ports: inputs, ports: inputs,
ret,
expr,
}, },
)(input) )(input)
} }

View File

@ -1,4 +1,4 @@
use super::tokens::{token, TokenKind as tk, TokenSpan}; use super::tokens::{token, Token, TokenKind as tk, TokenSpan};
use super::{IResult, Span}; use super::{IResult, Span};
use nom::{ use nom::{
branch::alt, branch::alt,
@ -7,48 +7,95 @@ use nom::{
sequence::{delimited, preceded, separated_pair, tuple}, sequence::{delimited, preceded, separated_pair, tuple},
}; };
#[derive(Debug, Clone)]
pub enum Operation<'a> {
And {
a: Expression<'a>,
b: Expression<'a>,
},
Or {
a: Expression<'a>,
b: Expression<'a>,
},
Xor {
a: Expression<'a>,
b: Expression<'a>,
},
Not(Expression<'a>),
}
pub fn operation(input: TokenSpan) -> IResult<TokenSpan, Operation> {
// temporarily given up on before I learn the shunting yard algorithm
alt((
map(
separated_pair(expression_nonrecurse, token(tk::BitAnd), expression),
|(a, b)| Operation::And { a, b },
),
map(
separated_pair(expression_nonrecurse, token(tk::BitOr), expression),
|(a, b)| Operation::Or { a, b },
),
map(
separated_pair(expression_nonrecurse, token(tk::BitXor), expression),
|(a, b)| Operation::Xor { a, b },
),
map(preceded(token(tk::BitNot), expression), Operation::Not),
))(input)
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Call<'a> { pub struct Call<'a> {
pub name: Span<'a>, pub name: Span<'a>,
pub args: Vec<Expression<'a>>, pub args: Vec<Expression<'a>>,
} }
#[derive(Debug, Clone)]
pub enum BinOpKind {
And,
Or,
Xor,
}
#[derive(Debug, Clone)]
pub enum UnOpKind {
Not,
BitNot,
}
#[derive(Debug, Clone)]
pub struct BinOp<'a> {
pub a: Expression<'a>,
pub b: Expression<'a>,
pub kind: BinOpKind,
}
#[derive(Debug, Clone)]
pub struct UnOp<'a> {
pub a: Expression<'a>,
pub kind: UnOpKind,
}
#[derive(Debug, Clone)]
pub enum Expression<'a> {
Path(&'a str),
Literal(Token<'a>),
UnOp(Box<UnOp<'a>>),
BinOp(Box<BinOp<'a>>),
Call(Box<Call<'a>>),
}
/// expressions that can't be subdivided further
fn atom(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt((
map(call_item, |call| Expression::Call(Box::new(call))),
map(token(tk::Ident), |it| {
Expression::Path(it.span().fragment())
}),
map(token(tk::Number), Expression::Literal),
delimited(token(tk::LParen), expression, token(tk::RParen)),
))(input)
}
fn unop_kind(input: TokenSpan) -> IResult<TokenSpan, UnOpKind> {
alt((
map(token(tk::Not), |_| UnOpKind::Not),
map(token(tk::BitNot), |_| UnOpKind::BitNot),
))(input)
}
/// unary expressions e.g `~expr`, `!expr`
fn unary(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt((
map(tuple((unop_kind, unary)), |(kind, expr)| {
Expression::UnOp(Box::new(UnOp { a: expr, kind }))
}),
atom,
))(input)
}
fn bitop_kind(input: TokenSpan) -> IResult<TokenSpan, BinOpKind> {
alt((
map(token(tk::BitXor), |_| BinOpKind::Xor),
map(token(tk::BitOr), |_| BinOpKind::Xor),
map(token(tk::BitAnd), |_| BinOpKind::Xor),
))(input)
}
/// bit and, or, xor e.g. a ^ b & c
/// TODO: make precedence rules for bit ops
fn bitop(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt((
map(tuple((unary, bitop_kind, bitop)), |(a, kind, b)| {
Expression::BinOp(Box::new(BinOp { a, b, kind }))
}),
unary,
))(input)
}
pub fn call_item(input: TokenSpan) -> IResult<TokenSpan, Call> { pub fn call_item(input: TokenSpan) -> IResult<TokenSpan, Call> {
map( map(
tuple(( tuple((
@ -66,32 +113,54 @@ pub fn call_item(input: TokenSpan) -> IResult<TokenSpan, Call> {
)(input) )(input)
} }
#[derive(Debug, Clone)]
pub enum Expression<'a> {
Ident(&'a str),
Literal(u64),
Call(Box<Call<'a>>),
Operation(Box<Operation<'a>>),
}
/// parser combinators can not parse left-recursive grammars. To work around this, we split /// parser combinators can not parse left-recursive grammars. To work around this, we split
/// expressions into a recursive and non-recursive portion. /// expressions into a recursive and non-recursive portion.
/// Parsers reachable from this point must call expression_nonrecurse instead /// Parsers reachable from this point must call expression_nonrecurse instead
pub fn expression(input: TokenSpan) -> IResult<TokenSpan, Expression> { pub fn expression(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt(( bitop(input)
map(operation, |op| Expression::Operation(Box::new(op))),
expression_nonrecurse,
))(input)
} }
/// the portion of the expression grammar that can be parsed without left recursion #[cfg(test)]
fn expression_nonrecurse(input: TokenSpan) -> IResult<TokenSpan, Expression> { mod test {
alt(( use super::*;
map(token(tk::Number), |_| Expression::Literal(42)), use crate::parser::tokens::{lex, Token, TokenSpan};
map(call_item, |call| Expression::Call(Box::new(call))), use crate::parser::Span;
map(token(tk::Ident), |ident| { use nom::combinator::all_consuming;
Expression::Ident(*ident.span().fragment())
}), fn tok(input: &'static str) -> Vec<Token> {
delimited(token(tk::LParen), expression, token(tk::RParen)), let input = Span::from(input);
))(input) lex(input).unwrap().1
}
fn fullexpr(input: TokenSpan) -> IResult<TokenSpan, Expression> {
all_consuming(expression)(input)
}
#[test]
fn test_atoms() {
fullexpr(TokenSpan::new(&tok("a"))).unwrap();
fullexpr(TokenSpan::new(&tok("(a)"))).unwrap();
}
#[test]
fn test_unary() {
fullexpr(TokenSpan::new(&tok("asdf"))).unwrap();
fullexpr(TokenSpan::new(&tok("~(asdf)"))).unwrap();
fullexpr(TokenSpan::new(&tok("!asdf"))).unwrap();
// unary(TokenSpan::new(&tok("~!(asdf)"))).unwrap();
}
#[test]
fn test_bitop() {
fullexpr(TokenSpan::new(&tok("a | b"))).unwrap();
fullexpr(TokenSpan::new(&tok("a ^ b"))).unwrap();
fullexpr(TokenSpan::new(&tok("~(a ^ b)"))).unwrap();
fullexpr(TokenSpan::new(&tok("a ^ !b"))).unwrap();
fullexpr(TokenSpan::new(&tok("a & !b & c"))).unwrap();
}
#[test]
fn test_call() {
fullexpr(TokenSpan::new(&tok("a()"))).unwrap();
}
} }

View File

@ -20,7 +20,6 @@ pub type IResult<I, O, E = IErr<I>> = nom::IResult<I, O, E>;
pub use crate::parser::declaration::{ pub use crate::parser::declaration::{
assign_statement, declaration, typename, Assign, NetDecl, TypeName, assign_statement, declaration, typename, Assign, NetDecl, TypeName,
}; };
pub use crate::parser::expression::{expression, Call, Expression, Operation};
pub use crate::parser::module::{module, Module, ModuleItem, PortDirection}; pub use crate::parser::module::{module, Module, ModuleItem, PortDirection};
use crate::parser::tokens::TokenSpan; use crate::parser::tokens::TokenSpan;
use nom::combinator::all_consuming; use nom::combinator::all_consuming;
@ -28,36 +27,3 @@ use nom::combinator::all_consuming;
pub fn parse(input: TokenSpan) -> IResult<TokenSpan, Module> { pub fn parse(input: TokenSpan) -> IResult<TokenSpan, Module> {
all_consuming(module)(input) all_consuming(module)(input)
} }
#[cfg(test)]
mod test {
use super::*;
use nom::combinator::all_consuming;
#[test]
fn test_operation() {
operation(" a | b ".into()).unwrap();
operation(" a & b ".into()).unwrap();
}
#[test]
fn test_expression() {
expression(" a ".into()).unwrap();
expression(" a | b ".into()).unwrap();
expression(" a | b | c ".into()).unwrap();
}
#[test]
fn test_assignment() {
// TODO: make wrapper and use for all tests
all_consuming(assign_statement)(" a = b ".into()).unwrap();
all_consuming(assign_statement)(" a = b | c ".into()).unwrap();
}
#[test]
fn test_call() {
call_item("thing ( )".into()).unwrap();
call_item("thing ( a , b , c )".into()).unwrap();
call_item("thing(a,b,c)".into()).unwrap();
}
}

View File

@ -10,7 +10,7 @@ use crate::parser::{
declaration, declaration,
proc::ProcBlock, proc::ProcBlock,
tokens::{token, TokenKind as tk, TokenSpan}, tokens::{token, TokenKind as tk, TokenSpan},
IResult, NetDecl, Span, IResult, NetDecl,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -57,25 +57,3 @@ pub fn module(input: TokenSpan) -> IResult<TokenSpan, Module> {
|items| Module { items }, |items| Module { items },
)(input) )(input)
} }
#[cfg(test)]
mod test {
use super::*;
use nom::combinator::all_consuming;
#[test]
fn test_decl() {
declaration("reg abcd".into()).unwrap();
}
#[test]
fn test_assignment_item() {
all_consuming(assign_item)(" assign a = b ; ".into()).unwrap();
all_consuming(assign_item)(" assign a = b | c ; ".into()).unwrap();
}
#[test]
fn test_module_item() {
all_consuming(module_item)(" assign a = b ;".into()).unwrap();
}
}

View File

@ -8,8 +8,9 @@ use nom::{
use crate::parser::{ use crate::parser::{
assign_statement, expression, assign_statement, expression,
expression::{expression, Expression},
tokens::{token, TokenKind as tk, TokenSpan}, tokens::{token, TokenKind as tk, TokenSpan},
Assign, Expression, IResult, Span, Assign, IResult, Span,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -91,29 +92,3 @@ pub fn proc_block(input: TokenSpan) -> IResult<TokenSpan, ProcBlock> {
), ),
)(input) )(input)
} }
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_statement() {
proc_statement(" abc = def ".into()).unwrap();
}
#[test]
fn test_match_arm() {
match_arm(" 1 => abc = def ".into()).unwrap();
}
#[test]
fn test_match_block() {
let input = "
match (asdf) {
1 => a = 0,
2 => c = ~d
}
";
match_block(input.into()).unwrap();
}
}

View File

@ -3,7 +3,7 @@
use super::{ use super::{
error::{Error, InputPos}, error::{Error, InputPos},
literals::{identifier, ws0}, literals::{identifier, ws0},
IErr, IResult, Span, IResult, Span,
}; };
use nom::{ use nom::{
branch::alt, branch::alt,
@ -70,6 +70,7 @@ pub enum TokenKind {
BitOr, BitOr,
BitXor, BitXor,
EqAssign, EqAssign,
Not,
// Multi Chars // Multi Chars
FatArrow, FatArrow,
RArrow, RArrow,
@ -204,6 +205,7 @@ fn lex_punctuation(input: Span) -> IResult<Span, Token> {
map(tag("&"), |_| TokenKind::BitAnd), map(tag("&"), |_| TokenKind::BitAnd),
map(tag("^"), |_| TokenKind::BitXor), map(tag("^"), |_| TokenKind::BitXor),
map(tag("|"), |_| TokenKind::BitOr), map(tag("|"), |_| TokenKind::BitOr),
map(tag("!"), |_| TokenKind::Not),
map(tag("="), |_| TokenKind::EqAssign), map(tag("="), |_| TokenKind::EqAssign),
))), ))),
|(span, kind)| Token::new(span, kind), |(span, kind)| Token::new(span, kind),