Compare commits

..

No commits in common. "a69c6ab0b30e041b1d43af37f67ba9a41d5275e0" and "faf92307e235d0c226e26a139d4e9b6ae3973c65" have entirely different histories.

11 changed files with 232 additions and 230 deletions

View File

@ -1,4 +1,4 @@
proc clockdiv_2 ( module clockdiv_2 (
clk: Logic, clk: Logic,
rst: Logic rst: Logic
) -> Logic ) -> Logic

View File

@ -1,7 +1,7 @@
comb comparator ( module comparator (
a: Logic<8>, a: Logic<8>,
b: Logic<8> b: Logic<8>
) -> Logic ) -> Logic
{ {
~reduce_or(a ^ b) assign eq = ~reduce_or(a ^ b);
} }

View File

@ -1,12 +1,8 @@
struct Add { module halfadd (
sum: Logic,
carry: Logic
}
comb halfadd (
a: Logic, a: Logic,
b: Logic b: Logic
) -> Add ) -> (Logic, Logic)
{ {
Add{sum: a ^ b, carry: a & b} assign sum = a ^ b;
assign carry = a & b;
} }

View File

@ -1,6 +1,6 @@
comb reduce_or ( module reduce_or (
a: Logic a: Logic
) )
-> Logic<1> { -> Logic<1> {

View File

@ -2,7 +2,6 @@ use std::collections::BTreeMap;
use crate::builtin_cells::get_builtins; use crate::builtin_cells::get_builtins;
use crate::parser; use crate::parser;
use crate::parser::expression::Expression;
use crate::rtlil; use crate::rtlil;
use crate::rtlil::RtlilWrite; use crate::rtlil::RtlilWrite;
pub use callable::Callable; pub use callable::Callable;
@ -170,59 +169,70 @@ fn lower_process(
Ok(()) Ok(())
} }
fn desugar_binop<'a>(op: parser::expression::BinOp<'a>) -> parser::expression::Call<'a> { fn desugar_operation<'a>(op: parser::Operation<'a>) -> parser::Call<'a> {
let a = desugar_expression(op.a); match op {
let b = desugar_expression(op.b); parser::Operation::And { a, b } => {
let op_func = match op.kind { let a = desugar_expression(a);
parser::expression::BinOpKind::And => "and", let b = desugar_expression(b);
parser::expression::BinOpKind::Or => "or", parser::Call {
parser::expression::BinOpKind::Xor => "xor", name: "and".into(),
}; args: vec![a, b],
parser::expression::Call { }
name: op_func.into(), }
args: vec![a, b], parser::Operation::Or { a, b } => {
let a = desugar_expression(a);
let b = desugar_expression(b);
parser::Call {
name: "or".into(),
args: vec![a, b],
}
}
parser::Operation::Xor { a, b } => {
let a = desugar_expression(a);
let b = desugar_expression(b);
parser::Call {
name: "xor".into(),
args: vec![a, b],
}
}
parser::Operation::Not(a) => {
let a = desugar_expression(a);
parser::Call {
name: "not".into(),
args: vec![a],
}
}
} }
} }
fn desugar_unop<'a>(op: parser::expression::UnOp<'a>) -> parser::expression::Call<'a> { fn desugar_expression<'a>(expr: parser::Expression<'a>) -> parser::Expression<'a> {
let a = desugar_expression(op.a);
let op_func = match op.kind {
parser::expression::UnOpKind::BitNot => "not",
parser::expression::UnOpKind::Not => todo!(),
};
parser::expression::Call {
name: op_func.into(),
args: vec![a],
}
}
fn desugar_expression<'a>(expr: Expression<'a>) -> Expression<'a> {
// TODO: allow ergonomic traversal of AST // TODO: allow ergonomic traversal of AST
match expr { match expr {
Expression::Path(_) => expr, parser::Expression::Ident(_) => expr,
Expression::Literal(_) => expr, parser::Expression::Literal(_) => expr,
Expression::Call(mut call) => { parser::Expression::Call(mut call) => {
let new_args = call.args.into_iter().map(desugar_expression).collect(); let new_args = call.args.into_iter().map(desugar_expression).collect();
call.args = new_args; call.args = new_args;
Expression::Call(call) parser::Expression::Call(call)
}
parser::Expression::Operation(op) => {
parser::Expression::Call(Box::new(desugar_operation(*op)))
} }
Expression::BinOp(op) => Expression::Call(Box::new(desugar_binop(*op))),
Expression::UnOp(op) => Expression::Call(Box::new(desugar_unop(*op))),
} }
} }
fn lower_expression( fn lower_expression(
ctx: &Context, ctx: &Context,
module: &mut rtlil::Module, module: &mut rtlil::Module,
expr: &Expression, expr: &parser::Expression,
) -> Result<rtlil::SigSpec, CompileError> { ) -> Result<rtlil::SigSpec, CompileError> {
let expr = desugar_expression(expr.clone()); let expr = desugar_expression(expr.clone());
match expr { match expr {
Expression::Path(ident) => { parser::Expression::Ident(ident) => {
let signal = ctx.try_get_signal(ident)?; let signal = ctx.try_get_signal(ident)?;
Ok(signal.sigspec()) Ok(signal.sigspec())
} }
Expression::Call(call) => { parser::Expression::Call(call) => {
let args_resolved = call let args_resolved = call
.args .args
.iter() .iter()
@ -258,12 +268,8 @@ fn lower_expression(
} }
// TODO: instantiate operators directly here instead of desugaring, once the callable infrastructure improves // TODO: instantiate operators directly here instead of desugaring, once the callable infrastructure improves
// to get better errors // to get better errors
Expression::Literal(lit) => Ok(rtlil::SigSpec::Const( parser::Expression::Operation(_op) => todo!("operators not yet implemented"),
lit.span().fragment().parse().unwrap(), parser::Expression::Literal(lit) => Ok(rtlil::SigSpec::Const(lit as i64, TODO_WIDTH)),
TODO_WIDTH,
)),
Expression::UnOp(_) => todo!(),
Expression::BinOp(_) => todo!(),
} }
} }
@ -278,39 +284,6 @@ fn lower_assignment(
Ok(()) Ok(())
} }
fn lower_comb(
ctx: &mut Context,
module: &mut rtlil::Module,
pa_comb: parser::comb::CombBlock,
) -> Result<(), CompileError> {
for (num, port) in pa_comb.ports.iter().enumerate() {
let port_id = make_pubid(port.net.name.fragment());
module.add_wire(rtlil::Wire::new(
port_id.clone(),
TODO_WIDTH,
Some(rtlil::PortOption::Input((num + 1) as i32)),
));
let typ = TypeStruct::logic_width(TODO_WIDTH);
let signal = Signal {
name: port.net.name.fragment().to_string(),
il_id: port_id,
// TODO: CRIMES CRIMES CRIMES
typ: Box::leak(Box::new(typ)),
};
ctx.signals
.insert(port.net.name.fragment().to_string(), signal);
}
let ret_id = module.make_genid("ret");
module.add_wire(rtlil::Wire::new(
ret_id.clone(),
TODO_WIDTH,
Some(rtlil::PortOption::Input(99)),
));
let out_sig = lower_expression(ctx, module, &pa_comb.expr)?;
module.add_connection(&rtlil::SigSpec::Wire(ret_id), &out_sig);
Ok(())
}
pub fn lower_module(pa_module: parser::Module) -> Result<String, CompileError> { pub fn lower_module(pa_module: parser::Module) -> Result<String, CompileError> {
let mut writer = rtlil::ILWriter::new(); let mut writer = rtlil::ILWriter::new();
let mut ir_module = rtlil::Module::new(make_pubid("test")); let mut ir_module = rtlil::Module::new(make_pubid("test"));
@ -324,13 +297,38 @@ pub fn lower_module(pa_module: parser::Module) -> Result<String, CompileError> {
}; };
writer.write_line("autoidx 1"); writer.write_line("autoidx 1");
/*
for (idx, port) in pa_module.ports.iter().enumerate() {
// FIXME: Actually resolve types
let sigtype = TypeStruct::logic_width(TODO_WIDTH);
// FIXME: CRIMES CRIMES CRIMES
let sigtype = Box::leak(Box::new(sigtype));
let sig = Signal {
name: port.net.name.fragment().to_string(),
il_id: make_pubid(port.net.name.fragment()),
typ: sigtype,
};
let sig = context
.signals
.entry(port.net.name.fragment().to_string())
.or_insert(sig);
let dir_option = match port.direction {
parser::PortDirection::Input => rtlil::PortOption::Input(idx as i32 + 1),
parser::PortDirection::Output => rtlil::PortOption::Output(idx as i32 + 1),
};
let wire = rtlil::Wire::new(sig.il_id.to_owned(), TODO_WIDTH, Some(dir_option));
ir_module.add_wire(wire);
}
for item in pa_module.items { for item in pa_module.items {
match item { match item {
parser::ModuleItem::Comb(comb) => lower_comb(&mut context, &mut ir_module, comb)?, parser::ModuleItem::Assign(assignment) => {
parser::ModuleItem::Proc(_) => todo!(), lower_assignment(&context, &mut ir_module, assignment)?
parser::ModuleItem::State(_) => todo!(), }
parser::ModuleItem::Proc(proc) => lower_process(&context, &mut ir_module, &proc)?,
} }
} }
ir_module.write_rtlil(&mut writer); ir_module.write_rtlil(&mut writer);
*/
Ok(writer.finish()) Ok(writer.finish())
} }

View File

@ -1,6 +1,4 @@
use super::{ use super::{
declaration::TypeName,
expression::{expression, Expression},
module::inputs_list, module::inputs_list,
module::PortDecl, module::PortDecl,
tokens::{token, TokenKind as tk, TokenSpan}, tokens::{token, TokenKind as tk, TokenSpan},
@ -18,8 +16,6 @@ use nom::{
pub struct CombBlock<'a> { pub struct CombBlock<'a> {
pub name: Span<'a>, pub name: Span<'a>,
pub ports: Vec<PortDecl<'a>>, pub ports: Vec<PortDecl<'a>>,
pub ret: TypeName<'a>,
pub expr: Expression<'a>,
} }
pub fn comb_block(input: TokenSpan) -> IResult<TokenSpan, CombBlock> { pub fn comb_block(input: TokenSpan) -> IResult<TokenSpan, CombBlock> {
@ -30,15 +26,17 @@ pub fn comb_block(input: TokenSpan) -> IResult<TokenSpan, CombBlock> {
token(tk::Ident), token(tk::Ident),
delimited(token(tk::LParen), inputs_list, token(tk::RParen)), delimited(token(tk::LParen), inputs_list, token(tk::RParen)),
preceded(token(tk::RArrow), typename), preceded(token(tk::RArrow), typename),
delimited(token(tk::LBrace), expression, token(tk::RBrace)), delimited(
token(tk::LBrace),
many0(token(tk::Error)),
token(tk::RBrace),
),
))), ))),
), ),
|(name, inputs, ret, expr)| CombBlock { |(name, inputs, _ret, _items)| CombBlock {
// TODO: bring back returns // TODO: bring back returns
name: name.span(), name: name.span(),
ports: inputs, ports: inputs,
ret,
expr,
}, },
)(input) )(input)
} }

View File

@ -1,4 +1,4 @@
use super::tokens::{token, Token, TokenKind as tk, TokenSpan}; use super::tokens::{token, TokenKind as tk, TokenSpan};
use super::{IResult, Span}; use super::{IResult, Span};
use nom::{ use nom::{
branch::alt, branch::alt,
@ -7,95 +7,48 @@ use nom::{
sequence::{delimited, preceded, separated_pair, tuple}, sequence::{delimited, preceded, separated_pair, tuple},
}; };
#[derive(Debug, Clone)]
pub enum Operation<'a> {
And {
a: Expression<'a>,
b: Expression<'a>,
},
Or {
a: Expression<'a>,
b: Expression<'a>,
},
Xor {
a: Expression<'a>,
b: Expression<'a>,
},
Not(Expression<'a>),
}
pub fn operation(input: TokenSpan) -> IResult<TokenSpan, Operation> {
// temporarily given up on before I learn the shunting yard algorithm
alt((
map(
separated_pair(expression_nonrecurse, token(tk::BitAnd), expression),
|(a, b)| Operation::And { a, b },
),
map(
separated_pair(expression_nonrecurse, token(tk::BitOr), expression),
|(a, b)| Operation::Or { a, b },
),
map(
separated_pair(expression_nonrecurse, token(tk::BitXor), expression),
|(a, b)| Operation::Xor { a, b },
),
map(preceded(token(tk::BitNot), expression), Operation::Not),
))(input)
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Call<'a> { pub struct Call<'a> {
pub name: Span<'a>, pub name: Span<'a>,
pub args: Vec<Expression<'a>>, pub args: Vec<Expression<'a>>,
} }
#[derive(Debug, Clone)]
pub enum BinOpKind {
And,
Or,
Xor,
}
#[derive(Debug, Clone)]
pub enum UnOpKind {
Not,
BitNot,
}
#[derive(Debug, Clone)]
pub struct BinOp<'a> {
pub a: Expression<'a>,
pub b: Expression<'a>,
pub kind: BinOpKind,
}
#[derive(Debug, Clone)]
pub struct UnOp<'a> {
pub a: Expression<'a>,
pub kind: UnOpKind,
}
#[derive(Debug, Clone)]
pub enum Expression<'a> {
Path(&'a str),
Literal(Token<'a>),
UnOp(Box<UnOp<'a>>),
BinOp(Box<BinOp<'a>>),
Call(Box<Call<'a>>),
}
/// expressions that can't be subdivided further
fn atom(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt((
map(call_item, |call| Expression::Call(Box::new(call))),
map(token(tk::Ident), |it| {
Expression::Path(it.span().fragment())
}),
map(token(tk::Number), Expression::Literal),
delimited(token(tk::LParen), expression, token(tk::RParen)),
))(input)
}
fn unop_kind(input: TokenSpan) -> IResult<TokenSpan, UnOpKind> {
alt((
map(token(tk::Not), |_| UnOpKind::Not),
map(token(tk::BitNot), |_| UnOpKind::BitNot),
))(input)
}
/// unary expressions e.g `~expr`, `!expr`
fn unary(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt((
map(tuple((unop_kind, unary)), |(kind, expr)| {
Expression::UnOp(Box::new(UnOp { a: expr, kind }))
}),
atom,
))(input)
}
fn bitop_kind(input: TokenSpan) -> IResult<TokenSpan, BinOpKind> {
alt((
map(token(tk::BitXor), |_| BinOpKind::Xor),
map(token(tk::BitOr), |_| BinOpKind::Xor),
map(token(tk::BitAnd), |_| BinOpKind::Xor),
))(input)
}
/// bit and, or, xor e.g. a ^ b & c
/// TODO: make precedence rules for bit ops
fn bitop(input: TokenSpan) -> IResult<TokenSpan, Expression> {
alt((
map(tuple((unary, bitop_kind, bitop)), |(a, kind, b)| {
Expression::BinOp(Box::new(BinOp { a, b, kind }))
}),
unary,
))(input)
}
pub fn call_item(input: TokenSpan) -> IResult<TokenSpan, Call> { pub fn call_item(input: TokenSpan) -> IResult<TokenSpan, Call> {
map( map(
tuple(( tuple((
@ -113,54 +66,32 @@ pub fn call_item(input: TokenSpan) -> IResult<TokenSpan, Call> {
)(input) )(input)
} }
#[derive(Debug, Clone)]
pub enum Expression<'a> {
Ident(&'a str),
Literal(u64),
Call(Box<Call<'a>>),
Operation(Box<Operation<'a>>),
}
/// parser combinators can not parse left-recursive grammars. To work around this, we split /// parser combinators can not parse left-recursive grammars. To work around this, we split
/// expressions into a recursive and non-recursive portion. /// expressions into a recursive and non-recursive portion.
/// Parsers reachable from this point must call expression_nonrecurse instead /// Parsers reachable from this point must call expression_nonrecurse instead
pub fn expression(input: TokenSpan) -> IResult<TokenSpan, Expression> { pub fn expression(input: TokenSpan) -> IResult<TokenSpan, Expression> {
bitop(input) alt((
map(operation, |op| Expression::Operation(Box::new(op))),
expression_nonrecurse,
))(input)
} }
#[cfg(test)] /// the portion of the expression grammar that can be parsed without left recursion
mod test { fn expression_nonrecurse(input: TokenSpan) -> IResult<TokenSpan, Expression> {
use super::*; alt((
use crate::parser::tokens::{lex, Token, TokenSpan}; map(token(tk::Number), |_| Expression::Literal(42)),
use crate::parser::Span; map(call_item, |call| Expression::Call(Box::new(call))),
use nom::combinator::all_consuming; map(token(tk::Ident), |ident| {
Expression::Ident(*ident.span().fragment())
fn tok(input: &'static str) -> Vec<Token> { }),
let input = Span::from(input); delimited(token(tk::LParen), expression, token(tk::RParen)),
lex(input).unwrap().1 ))(input)
}
fn fullexpr(input: TokenSpan) -> IResult<TokenSpan, Expression> {
all_consuming(expression)(input)
}
#[test]
fn test_atoms() {
fullexpr(TokenSpan::new(&tok("a"))).unwrap();
fullexpr(TokenSpan::new(&tok("(a)"))).unwrap();
}
#[test]
fn test_unary() {
fullexpr(TokenSpan::new(&tok("asdf"))).unwrap();
fullexpr(TokenSpan::new(&tok("~(asdf)"))).unwrap();
fullexpr(TokenSpan::new(&tok("!asdf"))).unwrap();
// unary(TokenSpan::new(&tok("~!(asdf)"))).unwrap();
}
#[test]
fn test_bitop() {
fullexpr(TokenSpan::new(&tok("a | b"))).unwrap();
fullexpr(TokenSpan::new(&tok("a ^ b"))).unwrap();
fullexpr(TokenSpan::new(&tok("~(a ^ b)"))).unwrap();
fullexpr(TokenSpan::new(&tok("a ^ !b"))).unwrap();
fullexpr(TokenSpan::new(&tok("a & !b & c"))).unwrap();
}
#[test]
fn test_call() {
fullexpr(TokenSpan::new(&tok("a()"))).unwrap();
}
} }

View File

@ -20,6 +20,7 @@ pub type IResult<I, O, E = IErr<I>> = nom::IResult<I, O, E>;
pub use crate::parser::declaration::{ pub use crate::parser::declaration::{
assign_statement, declaration, typename, Assign, NetDecl, TypeName, assign_statement, declaration, typename, Assign, NetDecl, TypeName,
}; };
pub use crate::parser::expression::{expression, Call, Expression, Operation};
pub use crate::parser::module::{module, Module, ModuleItem, PortDirection}; pub use crate::parser::module::{module, Module, ModuleItem, PortDirection};
use crate::parser::tokens::TokenSpan; use crate::parser::tokens::TokenSpan;
use nom::combinator::all_consuming; use nom::combinator::all_consuming;
@ -27,3 +28,36 @@ use nom::combinator::all_consuming;
pub fn parse(input: TokenSpan) -> IResult<TokenSpan, Module> { pub fn parse(input: TokenSpan) -> IResult<TokenSpan, Module> {
all_consuming(module)(input) all_consuming(module)(input)
} }
#[cfg(test)]
mod test {
use super::*;
use nom::combinator::all_consuming;
#[test]
fn test_operation() {
operation(" a | b ".into()).unwrap();
operation(" a & b ".into()).unwrap();
}
#[test]
fn test_expression() {
expression(" a ".into()).unwrap();
expression(" a | b ".into()).unwrap();
expression(" a | b | c ".into()).unwrap();
}
#[test]
fn test_assignment() {
// TODO: make wrapper and use for all tests
all_consuming(assign_statement)(" a = b ".into()).unwrap();
all_consuming(assign_statement)(" a = b | c ".into()).unwrap();
}
#[test]
fn test_call() {
call_item("thing ( )".into()).unwrap();
call_item("thing ( a , b , c )".into()).unwrap();
call_item("thing(a,b,c)".into()).unwrap();
}
}

View File

@ -10,7 +10,7 @@ use crate::parser::{
declaration, declaration,
proc::ProcBlock, proc::ProcBlock,
tokens::{token, TokenKind as tk, TokenSpan}, tokens::{token, TokenKind as tk, TokenSpan},
IResult, NetDecl, IResult, NetDecl, Span,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -57,3 +57,25 @@ pub fn module(input: TokenSpan) -> IResult<TokenSpan, Module> {
|items| Module { items }, |items| Module { items },
)(input) )(input)
} }
#[cfg(test)]
mod test {
use super::*;
use nom::combinator::all_consuming;
#[test]
fn test_decl() {
declaration("reg abcd".into()).unwrap();
}
#[test]
fn test_assignment_item() {
all_consuming(assign_item)(" assign a = b ; ".into()).unwrap();
all_consuming(assign_item)(" assign a = b | c ; ".into()).unwrap();
}
#[test]
fn test_module_item() {
all_consuming(module_item)(" assign a = b ;".into()).unwrap();
}
}

View File

@ -8,9 +8,8 @@ use nom::{
use crate::parser::{ use crate::parser::{
assign_statement, expression, assign_statement, expression,
expression::{expression, Expression},
tokens::{token, TokenKind as tk, TokenSpan}, tokens::{token, TokenKind as tk, TokenSpan},
Assign, IResult, Span, Assign, Expression, IResult, Span,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -92,3 +91,29 @@ pub fn proc_block(input: TokenSpan) -> IResult<TokenSpan, ProcBlock> {
), ),
)(input) )(input)
} }
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_statement() {
proc_statement(" abc = def ".into()).unwrap();
}
#[test]
fn test_match_arm() {
match_arm(" 1 => abc = def ".into()).unwrap();
}
#[test]
fn test_match_block() {
let input = "
match (asdf) {
1 => a = 0,
2 => c = ~d
}
";
match_block(input.into()).unwrap();
}
}

View File

@ -3,7 +3,7 @@
use super::{ use super::{
error::{Error, InputPos}, error::{Error, InputPos},
literals::{identifier, ws0}, literals::{identifier, ws0},
IResult, Span, IErr, IResult, Span,
}; };
use nom::{ use nom::{
branch::alt, branch::alt,
@ -70,7 +70,6 @@ pub enum TokenKind {
BitOr, BitOr,
BitXor, BitXor,
EqAssign, EqAssign,
Not,
// Multi Chars // Multi Chars
FatArrow, FatArrow,
RArrow, RArrow,
@ -205,7 +204,6 @@ fn lex_punctuation(input: Span) -> IResult<Span, Token> {
map(tag("&"), |_| TokenKind::BitAnd), map(tag("&"), |_| TokenKind::BitAnd),
map(tag("^"), |_| TokenKind::BitXor), map(tag("^"), |_| TokenKind::BitXor),
map(tag("|"), |_| TokenKind::BitOr), map(tag("|"), |_| TokenKind::BitOr),
map(tag("!"), |_| TokenKind::Not),
map(tag("="), |_| TokenKind::EqAssign), map(tag("="), |_| TokenKind::EqAssign),
))), ))),
|(span, kind)| Token::new(span, kind), |(span, kind)| Token::new(span, kind),