You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

251 lines
5.3 KiB

use proc_macro as pm;
use proc_macro2 as pm2;
use pm2::Ident;
use quote::ToTokens;
use syn::{
parse::{Parse, ParseStream},
punctuated::Punctuated,
Token,
};
#[derive(Clone)]
struct BasicType {
ty: Ident,
}
#[derive(Clone)]
struct ArrayType {
size: syn::LitInt,
memb: Box<ComplType>,
}
#[derive(Clone)]
struct FunctType {
args: Punctuated<ComplType, Token![,]>,
retn: Box<ComplType>,
}
#[derive(Clone)]
enum ComplType {
BasicType(BasicType),
ArrayType(ArrayType),
FunctType(FunctType),
}
impl Parse for BasicType {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(Self { ty: input.parse()? })
}
}
impl Parse for ArrayType {
fn parse(input: ParseStream) -> syn::Result<Self> {
let inner;
syn::bracketed!(inner in input);
let memb = inner.parse()?;
input.parse::<Token![;]>()?;
let size = inner.parse()?;
Ok(Self { size, memb })
}
}
impl Parse for FunctType {
fn parse(input: ParseStream) -> syn::Result<Self> {
input.parse::<Token![|]>()?;
let mut args = Punctuated::new();
if !input.peek(Token![|]) {
loop {
args.push_value(input.parse()?);
if input.peek(Token![|]) {
break;
}
args.push_punct(input.parse()?);
}
}
input.parse::<Token![|]>()?;
input.parse::<Token![->]>()?;
let retn = input.parse()?;
Ok(Self { args, retn })
}
}
impl Parse for ComplType {
fn parse(input: ParseStream) -> syn::Result<Self> {
if input.peek(Token![|]) {
let funct = input.parse()?;
Ok(Self::FunctType(funct))
} else if input.peek(syn::token::Bracket) {
let array = input.parse()?;
Ok(Self::ArrayType(array))
} else {
let basic = input.parse()?;
Ok(Self::BasicType(basic))
}
}
}
impl ToTokens for BasicType {
fn to_tokens(&self, tokens: &mut pm2::TokenStream) {
let ty = self.ty.clone();
tokens.extend(quote::quote! {
c.basic_type(Basic::#ty)
});
}
}
impl ToTokens for ArrayType {
fn to_tokens(&self, tokens: &mut pm2::TokenStream) {
let size = self.size.clone();
let memb = self.memb.clone();
tokens.extend(quote::quote! {
&ArrayType::new(#size, #memb)
});
}
}
impl ToTokens for FunctType {
fn to_tokens(&self, tokens: &mut pm2::TokenStream) {
let ret = self.retn.clone();
let arg = self.args.iter().collect::<Vec<_>>();
tokens.extend(quote::quote! {
&FunctType::new(&[#(#arg),*], #ret)
});
}
}
impl ToTokens for ComplType {
fn to_tokens(&self, tokens: &mut pm2::TokenStream) {
match self {
Self::BasicType(basic) => basic.to_tokens(tokens),
Self::ArrayType(array) => array.to_tokens(tokens),
Self::FunctType(funct) => funct.to_tokens(tokens),
}
}
}
#[proc_macro]
pub fn compl_type(input: pm::TokenStream) -> pm::TokenStream {
struct InputComplType {
contx: syn::ExprPath,
compl: ComplType,
}
impl Parse for InputComplType {
fn parse(input: ParseStream) -> syn::Result<Self> {
let contx = input.parse()?;
input.parse::<Token![,]>()?;
let compl = input.parse()?;
Ok(Self { contx, compl })
}
}
let InputComplType { contx, compl } =
syn::parse_macro_input!(input as InputComplType);
let expr = quote::quote! {
{
let c = #contx;
#compl
}
};
let mut output = pm2::TokenStream::new();
expr.to_tokens(&mut output);
output.into()
}
#[proc_macro_attribute]
pub fn serialize_config(
attr: pm::TokenStream,
item: pm::TokenStream,
) -> pm::TokenStream {
use syn::Item;
assert!(attr.is_empty());
let item = syn::parse_macro_input!(item as Item);
let item = match item {
Item::Struct(mut item) => {
item.attrs.push(syn::parse_quote! {
#[derive(serde::Deserialize, serde::Serialize)]
});
item.attrs.push(syn::parse_quote! {
#[serde(rename_all = "kebab-case", deny_unknown_fields, default)]
});
Item::Struct(item)
}
Item::Enum(mut item) => {
item.attrs.push(syn::parse_quote! {
#[derive(serde::Deserialize, serde::Serialize)]
});
item.attrs.push(syn::parse_quote! {
#[serde(deny_unknown_fields)]
});
Item::Enum(item)
}
_ => panic!("Must be used on struct or enum item"),
};
let mut output = pm2::TokenStream::new();
item.to_tokens(&mut output);
output.into()
}
#[proc_macro_attribute]
pub fn nat_call(
args: pm::TokenStream,
item: pm::TokenStream,
) -> pm::TokenStream {
struct Args {
name: syn::LitStr,
funct_type: FunctType,
}
impl Parse for Args {
fn parse(input: ParseStream) -> syn::Result<Self> {
let name = input.parse()?;
input.parse::<Token![,]>()?;
let funct_type = input.parse()?;
Ok(Self { name, funct_type })
}
}
fn to_decl_item(
i_name: Ident,
c_name: Ident,
name: syn::LitStr,
funct_type: FunctType,
) -> pm2::TokenStream {
quote::quote! {
pub fn #c_name(c: &mut Context) {
let name = c_str!(#name);
let ftyp = #funct_type;
let fdef = #i_name as _;
unsafe {
let func = LLVMAddFunction(c.modul, name, ftyp.handle());
LLVMAddGlobalMapping(c.engin, func, fdef);
c.funcs.push(Function::new(func));
}
}
}
}
let args = syn::parse_macro_input!(args as Args);
let mut item = syn::parse_macro_input!(item as syn::ItemFn);
item.sig.abi = Some(syn::parse_quote! { extern "C" });
let i_name = item.sig.ident.clone();
let c_name = quote::format_ident!("c_{}", i_name);
let mut output = pm2::TokenStream::new();
item.to_tokens(&mut output);
output.extend(to_decl_item(i_name, c_name, args.name, args.funct_type));
output.into()
}
// EOF