use darling to parse attributes

the code is much more readable now
This commit is contained in:
Max Känner 2024-08-16 18:14:56 +02:00
parent 96f081b9da
commit aa1b8d9272
4 changed files with 512 additions and 553 deletions

View File

@ -28,6 +28,7 @@ syn = { version = "2.0", features = ["full", "extra-traits"] }
proc-macro2 = "1.0"
quote = "1.0"
convert_case = "0.6.0"
darling = "0.20"
[dev-dependencies]
miniconf = { version = "0.13", features = ["json-core"] }

View File

@ -1,16 +1,12 @@
//! This crate creates `miniconf::Tree` implementations fields in a struct. These carry some extra
//! extra information about the field.
use std::iter::once;
use convert_case::{Case, Casing};
use darling::{util::PathList, FromMeta};
use proc_macro::TokenStream;
use proc_macro2::{Span as Span2, TokenStream as TokenStream2, TokenTree as TokenTree2};
use quote::{format_ident, quote, ToTokens};
use syn::{
parse_macro_input, parse_quote, Attribute, DataStruct, DeriveInput, Expr, ExprLit, Field,
Ident, Lit, Type, Visibility,
};
use quote::quote;
use syn::{parse_macro_input, parse_quote, DataStruct, DeriveInput};
mod parser;
/// Creates structs for the values to extend them with extra metadata.
///
@ -23,564 +19,71 @@ use syn::{
/// #[config]
/// struct Config {
/// /// This will be parsed as description
/// #[min] // This will use i32::MIN for the minimum
/// #[max = 50] // The value 50 is used for the maximum
/// #[default = 42] // A `Default` implementation will be generated returning 42
/// // this value will have a minimum of i32::MIN,
/// // a maximum of 50 and a default of 42.
/// #[config(min, max = "50", default = "42")]
/// field1: i32,
/// }
/// ```
#[proc_macro_attribute]
pub fn config(_attr: TokenStream, item: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(item as DeriveInput);
let mut new_types = vec![];
let config = match parser::Config::parse(&input) {
Ok(c) => c,
Err(e) => return e.write_errors().into(),
};
match input.data {
syn::Data::Struct(DataStruct {
let newtypes = config
.fields()
.iter()
.map(|field| field.helper(&config.attrs[..]));
let newtype_types = newtypes.clone().map(|(gen, ty)| (ty, gen.is_none()));
let newtypes = newtypes.map(|(gen, _)| gen);
let syn::Data::Struct(DataStruct {
struct_token: _,
ref mut fields,
semi_token: _,
}) => {
for field in fields.iter_mut() {
if let Some(new_type) = generate_helper_struct(field, &input.ident, &input.vis) {
new_types.push(new_type);
}
}
}
syn::Data::Enum(_) => {
return quote! {compile_error!("Enums are not supported")}.into();
}
syn::Data::Union(_) => {
return quote! {compile_error!("Unions are not supported")}.into();
}) = input.data
else {
unreachable!()
};
// change types of fields to newtypes and remove the config attributes
for (field, (ty, skip)) in fields.iter_mut().zip(newtype_types) {
if skip {
continue;
}
field.ty = ty;
field.attrs.retain(|attr| !attr.path().is_ident("config"));
field.attrs.push(parse_quote!(#[tree(depth=1)]));
}
if let Some(attr) = input
if let Some(derive) = input
.attrs
.iter_mut()
.find(|attr| attr.path().is_ident("derive"))
{
if let Ok(meta) = attr.meta.require_list() {
let derives_tree = meta
.tokens
.clone()
.into_iter()
.filter_map(|token| match token {
TokenTree2::Ident(ident) if ident == Ident::new("Tree", ident.span()) => {
Some(ident)
}
_ => None,
})
.count()
== 1;
if !derives_tree {
match PathList::from_meta(&derive.meta) {
Ok(derives) => {
if !derives.iter().any(|path| {
path.segments
.last()
.map(|s| s.ident.to_string().contains("Tree"))
== Some(true)
}) {
input.attrs.push(parse_quote!(#[derive(::miniconf::Tree)]));
}
}
Err(e) => return e.write_errors().into(),
}
} else {
input.attrs.push(parse_quote!(#[derive(::miniconf::Tree)]));
}
quote! {
#input
#(#new_types)*
#(#newtypes)*
}
.into()
}
fn generate_helper_struct(
field: &mut Field,
input_ident: &Ident,
input_visibility: &Visibility,
) -> Option<TokenStream2> {
let ty = field.ty.clone();
let new_type_ident = format_ident!(
"__{}{}",
input_ident,
field
.ident
.as_ref()
.map_or("Value".to_owned(), |v| v.to_string().to_case(Case::Pascal))
);
let mut new_type_impls = TokenStream2::new();
let mut new_type_miniconf_names = vec![];
let mut new_type_miniconf_consts = vec![];
let mut extra_new_checks = TokenStream2::new();
for attr in &field.attrs {
if let Some((new_type_impl, new_check, const_ident, key)) =
parse_min(&new_type_ident, attr, &ty)
{
new_type_impls.extend(new_type_impl);
new_type_miniconf_consts.push(const_ident);
new_type_miniconf_names.push(key);
extra_new_checks.extend(new_check);
}
if let Some((new_type_impl, new_check, const_ident, key)) =
parse_max(&new_type_ident, attr, &ty)
{
new_type_impls.extend(new_type_impl);
new_type_miniconf_consts.push(const_ident);
new_type_miniconf_names.push(key);
extra_new_checks.extend(new_check);
}
if let Some((new_type_impl, const_ident, key)) = parse_default(&new_type_ident, attr, &ty) {
new_type_impls.extend(new_type_impl);
new_type_miniconf_consts.push(const_ident);
new_type_miniconf_names.push(key);
}
if let Some((new_type_impl, const_ident, key)) = parse_description(&new_type_ident, attr) {
new_type_impls.extend(new_type_impl);
new_type_miniconf_consts.push(const_ident);
new_type_miniconf_names.push(key);
}
}
if new_type_miniconf_names.is_empty() {
return None;
}
field.attrs.retain(|attr| {
!["min", "max", "default"]
.iter()
.any(|key| attr.path().is_ident(key))
});
field.attrs.push(parse_quote!(#[tree(depth=1)]));
let vis = if matches!(field.vis, Visibility::Public(_))
|| matches!(field.vis, Visibility::Inherited)
{
input_visibility
} else {
&field.vis
};
field.ty = parse_quote!(#new_type_ident);
let miniconf_fields = new_type_miniconf_names.len() + 1;
let new = generate_new(&new_type_ident, &ty, &extra_new_checks);
let serde = generate_serde(&new_type_ident, &ty, !extra_new_checks.is_empty());
let tree_key = generate_tree_key(&new_type_ident, new_type_miniconf_names.iter());
let tree_serialize = generate_tree_serialize(&new_type_ident, &new_type_miniconf_consts[..]);
let tree_deserialize = generate_tree_deserialize(&new_type_ident, miniconf_fields);
let tree_any = generate_tree_any(&new_type_ident, &new_type_miniconf_consts, miniconf_fields);
Some(quote! {
#[allow(clippy::derive_partial_eq_without_eq)]
#[derive(Debug, PartialEq, PartialOrd, Clone, Copy)]
#vis struct #new_type_ident(#ty);
#new_type_impls
#new
#serde
#tree_key
#tree_serialize
#tree_deserialize
#tree_any
})
}
fn parse_min(
ident: &Ident,
attr: &Attribute,
ty: &Type,
) -> Option<(TokenStream2, Option<TokenStream2>, Ident, &'static str)> {
const KEY: &str = "min";
if !attr.path().is_ident(KEY) {
return None;
}
let const_ident = Ident::new(KEY.to_case(Case::Upper).as_str(), Span2::mixed_site());
let (value, new_check) = attr.meta.require_path_only().map_or_else(
|_| {
let value = match &attr.meta.require_name_value() {
Ok(meta) => &meta.value,
Err(e) => return (e.to_owned().into_compile_error(), None),
};
(
value.to_token_stream(),
Some(quote! {
if (value < Self::#const_ident.0) {
return None;
}
}),
)
},
|_| (quote!(#ty::#const_ident), None),
);
let impl_quote = quote! {
impl #ident {
const #const_ident: Self = Self(#value);
}
};
Some((impl_quote, new_check, const_ident, KEY))
}
fn parse_max(
ident: &Ident,
attr: &Attribute,
ty: &Type,
) -> Option<(TokenStream2, Option<TokenStream2>, Ident, &'static str)> {
const KEY: &str = "max";
if !attr.path().is_ident(KEY) {
return None;
}
let const_ident = Ident::new(KEY.to_case(Case::Upper).as_str(), Span2::mixed_site());
let (value, new_check) = attr.meta.require_path_only().map_or_else(
|_| {
let value = match &attr.meta.require_name_value() {
Ok(meta) => &meta.value,
Err(e) => return (e.to_owned().into_compile_error(), None),
};
(
value.to_token_stream(),
Some(quote! {
if (value > Self::#const_ident.0) {
return None;
}
}),
)
},
|_| (quote!(#ty::#const_ident), None),
);
let impl_quote = quote! {
impl #ident {
const #const_ident: Self = Self(#value);
}
};
Some((impl_quote, new_check, const_ident, KEY))
}
fn parse_default(
ident: &Ident,
attr: &Attribute,
ty: &Type,
) -> Option<(TokenStream2, Ident, &'static str)> {
const KEY: &str = "default";
if !attr.path().is_ident(KEY) {
return None;
}
let const_ident = Ident::new(KEY.to_case(Case::Upper).as_str(), Span2::mixed_site());
let value = attr.meta.require_path_only().map_or_else(
|_| match &attr.meta.require_name_value() {
Ok(meta) => meta.value.to_token_stream(),
Err(e) => e.to_owned().into_compile_error(),
},
|_| quote!(#ty::#const_ident),
);
let impl_quote = quote! {
impl #ident {
const #const_ident: Self = Self(#value);
}
impl ::core::default::Default for #ident {
fn default() -> Self {
Self::#const_ident
}
}
};
Some((impl_quote, const_ident, KEY))
}
fn parse_description(
ident: &Ident,
attr: &Attribute,
) -> Option<(TokenStream2, Ident, &'static str)> {
const KEY: &str = "description";
if !attr.path().is_ident("doc") {
return None;
}
let const_ident = Ident::new(KEY.to_case(Case::Upper).as_str(), Span2::mixed_site());
let value = match attr.meta.require_name_value() {
Ok(meta) => &meta.value,
Err(e) => {
return Some((e.into_compile_error(), const_ident, KEY));
}
};
let value = match value {
Expr::Lit(ExprLit {
attrs: _,
lit: Lit::Str(ref string),
}) => string.value(),
_ => {
return Some((
quote!(compile_error!("doc comment must be a string")),
const_ident,
KEY,
))
}
};
let trimmed_value = value.trim();
let impl_quote = quote! {
impl #ident {
const #const_ident: &'static str = #trimmed_value;
}
};
Some((impl_quote, const_ident, KEY))
}
fn generate_new(ident: &Ident, ty: &Type, extra_checks: &TokenStream2) -> TokenStream2 {
if extra_checks.is_empty() {
quote! {
impl #ident {
pub const fn new(value: #ty) -> Self {
Self(value)
}
}
impl ::core::ops::Deref for #ident {
type Target = #ty;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl ::core::ops::DerefMut for #ident {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
}
} else {
let const_new = if [
parse_quote!(u8),
parse_quote!(u16),
parse_quote!(u32),
parse_quote!(u64),
parse_quote!(u128),
parse_quote!(i8),
parse_quote!(i16),
parse_quote!(i32),
parse_quote!(i64),
parse_quote!(i128),
]
.contains(ty)
{
Some(quote!(const))
} else {
None
};
quote! {
impl #ident {
pub #const_new fn new(value: #ty) -> ::core::option::Option<Self> {
#extra_checks
::core::option::Option::Some(Self(value))
}
#[allow(dead_code)]
const unsafe fn new_unchecked(value: #ty) -> Self {
Self(value)
}
}
impl ::core::ops::Deref for #ident {
type Target = #ty;
fn deref(&self) -> &Self::Target {
&self.0
}
}
}
}
}
fn generate_serde(ident: &Ident, ty: &Type, checked_new: bool) -> TokenStream2 {
let conversion = if checked_new {
quote! {
Self::new(value).ok_or_else(|| {
<D::Error as ::serde::de::Error>::custom("checking value bounds")
})
}
} else {
quote! {
Ok(Self::new(value))
}
};
quote! {
impl ::serde::Serialize for #ident {
fn serialize<S>(&self, serializer: S) -> ::core::result::Result<S::Ok, S::Error>
where
S: ::serde::Serializer,
{
self.0.serialize(serializer)
}
}
impl<'de> ::serde::Deserialize<'de> for #ident {
fn deserialize<D>(deserializer: D) -> ::core::result::Result<Self, D::Error>
where
D: ::serde::Deserializer<'de>,
{
type T = #ty;
let value = T::deserialize(deserializer)?;
#conversion
}
}
}
}
fn generate_tree_key<'a>(
ident: &Ident,
keys: impl ExactSizeIterator<Item = &'a &'a str> + Clone,
) -> TokenStream2 {
let keys = once(&"value").chain(keys);
let num_keys = keys
.size_hint()
.1
.expect("safe because both iterators (once and original keys) are exact");
let max_length = keys.clone().map(|v| v.len()).max();
quote! {
impl ::miniconf::KeyLookup for #ident {
const LEN: usize = #num_keys;
const NAMES: &'static [&'static str] = &[#(#keys,)*];
fn name_to_index(value: &str) -> Option<usize> {
Self::NAMES.iter().position(|name| *name == value)
}
}
impl ::miniconf::TreeKey<1> for #ident {
fn metadata() -> miniconf::Metadata {
let mut metadata = miniconf::Metadata::default();
metadata.max_depth = 1;
metadata.count = #num_keys;
metadata.max_length = #max_length;
metadata
}
fn traverse_by_key<K, F, E>(mut keys: K, mut func: F) -> ::core::result::Result<usize, ::miniconf::Error<E>>
where
K: ::miniconf::Keys,
// Writing this to return an iterator instead of using a callback
// would have worse performance (O(n^2) instead of O(n) for matching)
F: FnMut(usize, Option<&'static str>, usize) -> ::core::result::Result<(), E>,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else { return ::core::result::Result::Ok(0) };
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
let name = <Self as ::miniconf::KeyLookup>::NAMES
.get(index)
.ok_or(::miniconf::Traversal::NotFound(1))?;
func(index, Some(name), #num_keys).map_err(|err| ::miniconf::Error::Inner(1, err))?;
::miniconf::Error::increment_result(::core::result::Result::Ok(0))
}
}
}
}
fn generate_tree_serialize(ident: &Ident, consts: &[Ident]) -> TokenStream2 {
let matches = consts.iter().enumerate().map(|(i, ident)| {
let index = i + 1;
quote! {
#index => Self::#ident.serialize(ser).map_err(|err| ::miniconf::Error::Inner(0, err))?,
}
});
quote! {
impl ::miniconf::TreeSerialize<1> for #ident {
fn serialize_by_key<K, S>(
&self,
mut keys: K,
ser: S,
) -> ::core::result::Result<usize, ::miniconf::Error<S::Error>>
where
K: ::miniconf::Keys,
S: ::serde::Serializer,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
return ::miniconf::Error::increment_result({
self.serialize(ser).map_err(|err| ::miniconf::Error::Inner(0, err))?;
::core::result::Result::Ok(0)
});
};
let index = ::miniconf::Key::find::<Self>(&key).ok_or(miniconf::Traversal::NotFound(1))?;
if !keys.finalize() {
::core::result::Result::Err(::miniconf::Traversal::TooLong(1))?;
}
::miniconf::Error::increment_result({
match index {
0 => self.serialize(ser).map_err(|err| ::miniconf::Error::Inner(0, err))?,
#(#matches)*
_ => unreachable!(),
};
::core::result::Result::Ok(0)
})
}
}
}
}
fn generate_tree_deserialize(ident: &Ident, num_keys: usize) -> TokenStream2 {
quote! {
impl<'de> ::miniconf::TreeDeserialize<'de, 1> for #ident {
fn deserialize_by_key<K, D>(
&mut self,
mut keys: K,
de: D,
) -> ::core::result::Result<usize, ::miniconf::Error<D::Error>>
where
K: ::miniconf::Keys,
D: ::serde::Deserializer<'de>,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
*self = Self::deserialize(de).map_err(|err| ::miniconf::Error::Inner(0, err))?;
return ::core::result::Result::Ok(0);
};
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
if !keys.finalize() {
::core::result::Result::Err(::miniconf::Traversal::TooLong(1))?;
}
match index {
0 => ::miniconf::Error::increment_result((||{
*self = Self::deserialize(de).map_err(|err| ::miniconf::Error::Inner(0, err))?;
Ok(0)
})()),
1..=#num_keys => ::core::result::Result::Err(::miniconf::Traversal::Absent(0))?,
_ => unreachable!(),
}
}
}
}
}
fn generate_tree_any(ident: &Ident, consts: &[Ident], num_keys: usize) -> TokenStream2 {
let matches = consts.iter().enumerate().map(|(i, ident)| {
let index = i + 1;
quote! {
#index => ::core::result::Result::Ok(&Self::#ident),
}
});
quote! {
impl ::miniconf::TreeAny<1> for #ident {
fn ref_any_by_key<K>(&self, mut keys: K) -> ::core::result::Result<&dyn ::core::any::Any, ::miniconf::Traversal>
where
K: ::miniconf::Keys,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
return ::core::result::Result::Ok(&self.0);
};
let index = ::miniconf::Key::find::<Self>(&key).ok_or(miniconf::Traversal::NotFound(1))?;
if !keys.finalize() {
::core::result::Result::Err(::miniconf::Traversal::TooLong(1))?;
}
match index {
0 => ::core::result::Result::Ok(&self.0),
#(#matches)*
_ => unreachable!(),
}
}
fn mut_any_by_key<K>(&mut self, mut keys: K) -> ::core::result::Result<&mut dyn ::core::any::Any, ::miniconf::Traversal>
where
K: ::miniconf::Keys,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
return ::core::result::Result::Ok(&mut self.0);
};
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
if !keys.finalize() {
::core::result::Result::Err(::miniconf::Traversal::TooLong(1))?;
}
match index {
0 => ::core::result::Result::Ok(&mut self.0),
1..=#num_keys => ::core::result::Result::Err(::miniconf::Traversal::Absent(0)),
_ => unreachable!(),
}
}
}
}
}

456
src/parser.rs Normal file
View File

@ -0,0 +1,456 @@
#![allow(clippy::option_if_let_else)]
use convert_case::{Case, Casing};
use darling::{
ast::{self, Data},
util::{Override, PathList},
Error, FromDeriveInput, FromField, FromMeta,
};
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote};
use syn::parse_quote;
#[derive(Debug, FromField)]
#[darling(attributes(config))]
#[darling(forward_attrs(doc))]
pub struct ConfigField {
ident: Option<syn::Ident>,
vis: syn::Visibility,
ty: syn::Type,
attrs: Vec<syn::Attribute>,
min: Option<Override<syn::Expr>>,
max: Option<Override<syn::Expr>>,
default: Option<Override<syn::Expr>>,
#[darling(skip)]
description: Option<syn::Expr>,
#[darling(skip)]
parent_ident: Option<syn::Ident>,
}
impl ConfigField {
pub(crate) fn needs_newtype(&self) -> bool {
self.helper_keys().len() > 1
}
pub(crate) fn helper(&self, attrs: &[syn::Attribute]) -> (Option<TokenStream>, syn::Type) {
if self.needs_newtype() {
let derives = attrs
.iter()
.find(|attr| attr.path().is_ident("derive"))
.map(|attr| {
let mut derives = (*PathList::from_meta(&attr.meta).unwrap()).clone();
derives.retain(|derive| {
derive.segments.last().map(|s| {
let derive = s.ident.to_string();
derive.contains("Tree")
|| derive.contains("Serialize")
|| derive.contains("Deserialize")
}) == Some(false)
});
quote! {#[derive(#(#derives,)*)]}
});
let new_type_ident = self.helper_ident();
let vis = &self.vis;
let ty = &self.ty;
let new = self.helper_new();
let default = self.helper_default();
let serde = self.helper_serde();
let tree = self.helper_tree();
(
Some(quote! {
#derives
#vis struct #new_type_ident(#ty);
#new
#default
#serde
#tree
}),
parse_quote!(#new_type_ident),
)
} else {
(None, self.ty.clone())
}
}
pub(crate) fn helper_ident(&self) -> syn::Ident {
format_ident!(
"__{}{}",
self.parent_ident
.as_ref()
.unwrap_or(&syn::Ident::new("Struct", Span::mixed_site())),
self.ident
.as_ref()
.map_or("Field".to_owned(), |v| v.to_string().to_case(Case::Pascal))
)
}
pub(crate) fn has_custom_limits(&self) -> bool {
!((self.min.is_none() || self.min == Some(Override::Inherit))
&& (self.max.is_none() || self.max == Some(Override::Inherit)))
}
pub(crate) fn helper_new(&self) -> TokenStream {
let ident = self.helper_ident();
let ty = &self.ty;
if self.has_custom_limits() {
let min = self.min.as_ref().and_then(|v| v.as_ref().explicit());
let max = self.max.as_ref().and_then(|v| v.as_ref().explicit());
quote! {
impl #ident {
pub fn new(value: #ty) -> Option<Self> {
if (#min..=#max).contains(&value) {
Self(value)
} else {
None
}
}
#[allow(dead_code)]
pub const unsafe fn new_unchecked(value: #ty) -> Self {
Self(value)
}
}
impl ::core::ops::Deref for #ident {
type Target = #ty;
fn deref(&self) -> &Self::Target {
&self.0
}
}
}
} else {
quote! {
impl #ident {
pub const fn new(value: #ty) -> Self {
Self(value)
}
}
impl ::core::ops::Deref for #ident {
type Target = #ty;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl ::core::ops::DerefMut for #ident {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
}
}
}
pub(crate) fn helper_default(&self) -> Option<TokenStream> {
self.default.as_ref().map(|default| {
let ident = self.helper_ident();
let default_default = parse_quote!(default::Default());
let default = default.as_ref().unwrap_or(&default_default);
quote! {
impl ::core::default::Default for #ident {
fn default() -> Self {
Self(#default)
}
}
}
})
}
pub(crate) fn helper_serde(&self) -> TokenStream {
let ident = self.helper_ident();
let conversion = if self.has_custom_limits() {
quote! {
Self::new(value).ok_or_else(|| {
<D:Error as ::serde::de::Error>::custom("checking value bounds")
})
}
} else {
quote! {
Ok(Self::new(value))
}
};
let ty = &self.ty;
quote! {
impl ::serde::Serialize for #ident {
fn serialize<S>(&self, serializer: S) -> ::core::result::Result::<S::Ok, S::Error>
where
S: ::serde::Serializer,
{
self.0.serialize(serializer)
}
}
impl<'de> ::serde::Deserialize<'de> for #ident {
fn deserialize<D>(deserializer: D) -> ::core::result::Result<Self, D::Error>
where
D: ::serde::Deserializer<'de>,
{
type T = #ty;
let value = T::deserialize(deserializer)?;
#conversion
}
}
}
}
pub(crate) fn helper_tree(&self) -> TokenStream {
let mut tokens = self.helper_tree_key();
tokens.extend(self.helper_tree_serialize());
tokens.extend(self.helper_tree_deserialize());
tokens.extend(self.helper_tree_any());
tokens
}
pub(crate) fn helper_keys(&self) -> Vec<(&'static str, syn::Expr)> {
macro_rules! field_to_key {
($field: ident, $default: expr) => {
self.$field.as_ref().map(|value| {
let ty = &self.ty;
(stringify!($field),
value.clone().explicit().map(|val| {
parse_quote!({
let val: #ty = #val;
val
})
}).unwrap_or_else(|| {
parse_quote!(#ty::$default)
}))
})
};
($field: ident) => {
self.$field.as_ref().map(|value| {
(stringify!($field), value.clone())
})
}
}
[
Some(("value", parse_quote!(self))),
field_to_key!(default, default()),
field_to_key!(min, MIN),
field_to_key!(max, MAX),
field_to_key!(description),
]
.into_iter()
.flatten()
.collect::<Vec<_>>()
}
pub(crate) fn helper_tree_key(&self) -> TokenStream {
let ident = self.helper_ident();
let keys = self.helper_keys();
let num_keys = keys.len();
let keys = keys.iter().map(|(name, _)| name);
let max_length = keys.clone().map(|v| v.len()).max();
quote! {
impl ::miniconf::KeyLookup for #ident {
const LEN: usize = #num_keys;
const NAMES: &'static [&'static str] = &[#(#keys,)*];
fn name_to_index(value: &str) -> Option<usize> {
Self::NAMES.iter().position(|name| *name == value)
}
}
impl ::miniconf::TreeKey<1> for #ident {
fn metadata() -> ::miniconf::Metadata {
let mut metadata = ::miniconf::Metadata::default();
metadata.max_depth = 1;
metadata.count = #num_keys;
metadata.max_length = #max_length;
metadata
}
fn traverse_by_key<K, F, E>(mut keys: K, mut func: F) -> ::core::result::Result<usize, ::miniconf::Error<E>>
where
K: ::miniconf::Keys,
// Writing this to return an iterator instead of using a callback
// would have worse performance (O(n^2) instead of O(n) for matching)
F: FnMut(usize, Option<&'static str>, usize) -> ::core::result::Result<(), E>,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else { return ::core::result::Result::Ok(0) };
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
let name = <Self as ::miniconf::KeyLookup>::NAMES
.get(index)
.ok_or(::miniconf::Traversal::NotFound(1))?;
func(index, Some(name), #num_keys).map_err(|err| ::miniconf::Error::Inner(1, err))?;
::core::result::Result::Ok(1)
}
}
}
}
pub(crate) fn helper_tree_serialize(&self) -> TokenStream {
let matches = self
.helper_keys()
.iter()
.enumerate()
.map(|(i, (_, expr))| {
quote! {
#i => #expr.serialize(ser).map_err(|err| ::miniconf::Error::Inner(0, err)),
}
})
.collect::<Vec<_>>();
let ident = self.helper_ident();
quote! {
impl ::miniconf::TreeSerialize<1> for #ident {
fn serialize_by_key<K, S>(
&self,
mut keys: K,
ser: S,
) -> ::core::result::Result<usize, ::miniconf::Error<S::Error>>
where
K: ::miniconf::Keys,
S: ::serde::Serializer,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
return self.serialize(ser).map_err(|err| ::miniconf::Error::Inner(1, err)).map(|_| 1);
};
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
if !keys.finalize() {
return ::core::result::Result::Err(::miniconf::Traversal::TooLong(1).into());
}
match index {
#(#matches)*
_ => unreachable!(),
}?;
Ok(0)
}
}
}
}
pub(crate) fn helper_tree_deserialize(&self) -> TokenStream {
let ident = self.helper_ident();
let num_keys = self.helper_keys().len();
quote! {
impl<'de> ::miniconf::TreeDeserialize<'de, 1> for #ident {
fn deserialize_by_key<K, D>(
&mut self,
mut keys: K,
de: D,
) -> ::core::result::Result<usize, ::miniconf::Error<D::Error>>
where
K: ::miniconf::Keys,
D: ::serde::Deserializer<'de>,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
*self = Self::deserialize(de).map_err(|err| ::miniconf::Error::Inner(0, err))?;
return ::core::result::Result::Ok(0);
};
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
if !keys.finalize() {
return ::core::result::Result::Err(::miniconf::Traversal::TooLong(1).into());
}
match index {
0 => {
*self = Self::deserialize(de).map_err(|err| ::miniconf::Error::Inner(0, err))?;
Ok(0)
}
,
1..=#num_keys => ::core::result::Result::Err(::miniconf::Traversal::Access(0, "Cannot write limits").into()),
_ => unreachable!(),
}
}
}
}
}
pub(crate) fn helper_tree_any(&self) -> TokenStream {
let ident = self.helper_ident();
let num_keys = self.helper_keys().len();
let ref_mut = if self.has_custom_limits() {
quote! {::core::result::Result::Err(::miniconf::Traversal::Access(0, "field has custom limits"))}
} else {
quote!(::core::result::Result::Ok(&mut *self))
};
quote! {
impl ::miniconf::TreeAny<1> for #ident {
fn ref_any_by_key<K>(&self, mut keys: K) -> ::core::result::Result<&dyn ::core::any::Any, ::miniconf::Traversal>
where
K: ::miniconf::Keys,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
return ::core::result::Result::Ok(&*self);
};
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
if !keys.finalize() {
return ::core::result::Result::Err(::miniconf::Traversal::TooLong(1));
}
match index {
0 => ::core::result::Result::Ok(&*self),
1..#num_keys => ::core::result::Result::Err(::miniconf::Traversal::Access(1, "cannot return reference to local variable")),
_ => unreachable!(),
}
}
fn mut_any_by_key<K>(&mut self, mut keys: K) -> ::core::result::Result<&mut dyn ::core::any::Any, ::miniconf::Traversal>
where
K: ::miniconf::Keys,
{
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
return #ref_mut;
};
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
if !keys.finalize() {
return ::core::result::Result::Err(::miniconf::Traversal::TooLong(1));
}
match index {
0 => #ref_mut,
1..#num_keys => ::core::result::Result::Err(::miniconf::Traversal::Access(1, "cannot return reference to local variable")),
_ => unreachable!(),
}
}
}
}
}
}
#[derive(Debug, FromDeriveInput)]
#[darling(forward_attrs(derive))]
#[darling(supports(struct_any))]
pub struct Config {
ident: syn::Ident,
// generics: syn::Generics,
// vis: syn::Visibility,
data: ast::Data<(), ConfigField>,
pub attrs: Vec<syn::Attribute>,
}
impl Config {
pub(crate) fn parse(input: &syn::DeriveInput) -> Result<Self, Error> {
let mut config = Self::from_derive_input(input)?;
let ident = config.ident.clone();
let fields = config.fields_mut();
for field in fields.iter_mut() {
field.description = field
.attrs
.iter()
.find(|attr| attr.path().is_ident("doc"))
.map(|attr| {
let description = syn::LitStr::from_meta(&attr.meta).unwrap().value();
let description = description.trim();
parse_quote!(#description)
});
field.parent_ident = Some(ident.clone());
}
Ok(config)
}
pub(crate) fn fields(&self) -> &Vec<ConfigField> {
let Data::Struct(fields) = &self.data else {
unreachable!()
};
&fields.fields
}
pub(crate) fn fields_mut(&mut self) -> &mut Vec<ConfigField> {
let Data::Struct(fields) = &mut self.data else {
unreachable!()
};
&mut fields.fields
}
}

View File

@ -4,7 +4,7 @@ use macroconf::config;
use miniconf::{
Error::Traversal,
IntoKeys, JsonCoreSlash, Path,
Traversal::{Absent, TooLong},
Traversal::{Access, TooLong},
Tree, TreeKey,
};
use serde::{Deserialize, Serialize};
@ -13,11 +13,11 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Tree)]
struct SubConfig {
skipped: i32,
#[min]
#[config(min)]
min: i32,
#[max]
#[config(max)]
max: i32,
#[default = 0]
#[config(default = "0")]
default: i32,
/// This is a description
description: i32,
@ -94,9 +94,8 @@ fn serialize() {
("/description/value", "5"),
("/description/description", "\"This is a description\""),
] {
let res = config.get_json(input, &mut buffer);
assert_eq!(res, Ok(output.len()));
assert_eq!(from_utf8(&buffer[..output.len()]), Ok(output));
let len = config.get_json(input, &mut buffer).unwrap();
assert_eq!(from_utf8(&buffer[..len]), Ok(output));
}
}
@ -132,7 +131,7 @@ fn deserialize() {
"/description/description",
] {
let res = config.set_json(input, b"10");
assert_eq!(res, Err(Traversal(Absent(1))));
assert_eq!(res, Err(Traversal(Access(1, "Cannot write limits"))));
}
}