From ed32fb9d0ff1debe4812868376026d56c330c46d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Max=20K=C3=A4nner?= Date: Mon, 15 Apr 2024 20:27:39 +0200 Subject: [PATCH] Initial commit --- Cargo.toml | 14 +- src/lib.rs | 480 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 491 insertions(+), 3 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index fa75be1..c98f1df 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,10 +1,10 @@ [package] -name = "lib-rs" +name = "macroconf" version = "0.1.0" edition = "2021" -description = "library template" +description = "macro for creating configurations using miniconf" license = "MIT OR Apache-2.0" -repository = "https://git.mkaenner.de/max/lib-rs" +repository = "https://git.mkaenner.de/max/macroconf.git" [lints.rust] unsafe_code = "forbid" @@ -14,8 +14,16 @@ enum_glob_use = "deny" pedantic = "deny" nursery = "deny" unwrap_used = "deny" +expect_used = "deny" cargo = "warn" +[lib] +proc-macro = true + # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +syn = { version = "2.0", features = ["full", "extra-traits"] } +proc-macro2 = "1.0" +quote = "1.0" +convert_case = "0.6.0" diff --git a/src/lib.rs b/src/lib.rs index 139597f..bf6d479 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,2 +1,482 @@ +use std::iter::once; +use convert_case::{Case, Casing}; +use proc_macro::TokenStream; +use proc_macro2::{Span as Span2, TokenStream as TokenStream2}; +use quote::{format_ident, quote}; +use syn::{ + parse_macro_input, parse_quote, Attribute, DataStruct, DeriveInput, Expr, ExprLit, Field, + Ident, Lit, Type, Visibility, +}; +/// Creates structs for the values to extend them with extra metadata. +/// +/// supported metadata is `min`, `max` and `default`. Doc comments are parsed as `description` +/// +/// # Panics +/// +/// Panics if it is unable to generate a config struct from the input +#[proc_macro_attribute] +pub fn config(_attr: TokenStream, item: TokenStream) -> TokenStream { + let mut input = parse_macro_input!(item as DeriveInput); + let mut new_types = vec![]; + + match input.data { + syn::Data::Struct(DataStruct { + struct_token: _, + ref mut fields, + semi_token: _, + }) => { + for field in fields.iter_mut() { + if let Some(new_type) = generate_helper_struct(field, &input.ident, &input.vis) { + new_types.push(new_type); + } + } + } + syn::Data::Enum(_) => panic!("Enums are not supported"), + syn::Data::Union(_) => panic!("Unions are not supported"), + } + + quote! { + #input + #(#new_types)* + } + .into() +} + +fn generate_helper_struct( + field: &mut Field, + input_ident: &Ident, + input_visibility: &Visibility, +) -> Option { + let ty = field.ty.clone(); + let new_type_ident = format_ident!( + "__{}{}", + input_ident, + field + .ident + .as_ref() + .map_or("Value".to_owned(), |v| v.to_string().to_case(Case::Pascal)) + ); + let mut new_type_impls = TokenStream2::new(); + let mut new_type_miniconf_names = vec![]; + let mut new_type_miniconf_consts = vec![]; + let mut extra_new_checks = TokenStream2::new(); + let mut generate_new_type = false; + for attr in &field.attrs { + if let Some((new_type_impl, new_check, const_ident, key)) = + parse_min(&new_type_ident, attr, &ty) + { + new_type_impls.extend(new_type_impl); + new_type_miniconf_consts.push(const_ident); + new_type_miniconf_names.push(key); + extra_new_checks.extend(new_check); + generate_new_type = true; + } + if let Some((new_type_impl, new_check, const_ident, key)) = + parse_max(&new_type_ident, attr, &ty) + { + new_type_impls.extend(new_type_impl); + new_type_miniconf_consts.push(const_ident); + new_type_miniconf_names.push(key); + extra_new_checks.extend(new_check); + generate_new_type = true; + } + if let Some((new_type_impl, const_ident, key)) = parse_default(&new_type_ident, attr, &ty) { + new_type_impls.extend(new_type_impl); + new_type_miniconf_consts.push(const_ident); + new_type_miniconf_names.push(key); + generate_new_type = true; + } + if let Some((new_type_impl, const_ident, key)) = parse_description(&new_type_ident, attr) { + new_type_impls.extend(new_type_impl); + new_type_miniconf_consts.push(const_ident); + new_type_miniconf_names.push(key); + } + } + if !generate_new_type { + return None; + } + field.attrs.retain(|attr| { + !["min", "max", "default"] + .iter() + .any(|key| attr.path().is_ident(key)) + }); + + field.attrs.push(parse_quote!(#[tree(depth(1))])); + let vis = if matches!(field.vis, Visibility::Public(_)) + || matches!(field.vis, Visibility::Inherited) + { + input_visibility + } else { + &field.vis + }; + field.ty = parse_quote!(#new_type_ident); + let miniconf_fields = new_type_miniconf_names.len() + 1; + + let new = generate_new(&new_type_ident, &ty, &extra_new_checks); + let serde = generate_serde(&new_type_ident, &ty, !extra_new_checks.is_empty()); + + let tree_key = generate_tree_key(&new_type_ident, new_type_miniconf_names.iter()); + let tree_serialize = generate_tree_serialize(&new_type_ident, &new_type_miniconf_consts[..]); + let tree_deserialize = generate_tree_deserialize(&new_type_ident, miniconf_fields); + + Some(quote! { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Debug, PartialEq, PartialOrd, Clone, Copy)] + #vis struct #new_type_ident(#ty); + + #new_type_impls + + #new + #serde + + #tree_key + #tree_serialize + #tree_deserialize + }) +} + +fn parse_min( + ident: &Ident, + attr: &Attribute, + ty: &Type, +) -> Option<(TokenStream2, Option, Ident, &'static str)> { + const KEY: &str = "min"; + if !attr.path().is_ident(KEY) { + return None; + } + let const_ident = Ident::new(KEY.to_case(Case::Upper).as_str(), Span2::mixed_site()); + let (value, new_check) = attr.meta.require_path_only().map_or_else( + |_| { + let value = &attr.meta.require_name_value().unwrap().value; + ( + quote!(#value), + Some(quote! { + if (value < Self::#const_ident.0) { + return None; + } + }), + ) + }, + |_| (quote!(#ty::#const_ident), None), + ); + let impl_quote = quote! { + impl #ident { + const #const_ident: Self = Self(#value); + } + }; + Some((impl_quote, new_check, const_ident, KEY)) +} + +fn parse_max( + ident: &Ident, + attr: &Attribute, + ty: &Type, +) -> Option<(TokenStream2, Option, Ident, &'static str)> { + const KEY: &str = "max"; + if !attr.path().is_ident(KEY) { + return None; + } + let const_ident = Ident::new(KEY.to_case(Case::Upper).as_str(), Span2::mixed_site()); + let (value, new_check) = attr.meta.require_path_only().map_or_else( + |_| { + let value = &attr.meta.require_name_value().unwrap().value; + ( + quote!(#value), + Some(quote! { + if (value > Self::#const_ident.0) { + return None; + } + }), + ) + }, + |_| (quote!(#ty::#const_ident), None), + ); + let impl_quote = quote! { + impl #ident { + const #const_ident: Self = Self(#value); + } + }; + Some((impl_quote, new_check, const_ident, KEY)) +} + +fn parse_default( + ident: &Ident, + attr: &Attribute, + ty: &Type, +) -> Option<(TokenStream2, Ident, &'static str)> { + const KEY: &str = "default"; + if !attr.path().is_ident(KEY) { + return None; + } + let const_ident = Ident::new(KEY.to_case(Case::Upper).as_str(), Span2::mixed_site()); + let value = attr.meta.require_path_only().map_or_else( + |_| { + let value = &attr.meta.require_name_value().unwrap().value; + quote!(#value) + }, + |_| quote!(#ty::#const_ident), + ); + let impl_quote = quote! { + impl #ident { + const #const_ident: Self = Self(#value); + } + + impl ::core::default::Default for #ident { + fn default() -> Self { + Self::#const_ident + } + } + }; + Some((impl_quote, const_ident, KEY)) +} + +fn parse_description( + ident: &Ident, + attr: &Attribute, +) -> Option<(TokenStream2, Ident, &'static str)> { + const KEY: &str = "description"; + if !attr.path().is_ident("doc") { + return None; + } + let const_ident = Ident::new(KEY.to_case(Case::Upper).as_str(), Span2::mixed_site()); + let value = &attr.meta.require_name_value().ok()?.value; + let value = match value { + Expr::Lit(ExprLit { + attrs: _, + lit: Lit::Str(ref string), + }) => string.value(), + _ => panic!("doc comment must be a string"), + }; + let trimmed_value = value.trim(); + let impl_quote = quote! { + impl #ident { + const #const_ident: &'static str = #trimmed_value; + } + }; + Some((impl_quote, const_ident, KEY)) +} + +fn generate_new(ident: &Ident, ty: &Type, extra_checks: &TokenStream2) -> TokenStream2 { + if extra_checks.is_empty() { + quote! { + impl #ident { + pub const fn new(value: #ty) -> Self { + Self(value) + } + } + + impl ::core::ops::Deref for #ident { + type Target = #ty; + fn deref(&self) -> &Self::Target { + &self.0 + } + } + + impl ::core::ops::DerefMut for #ident { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } + } + } + } else { + let const_new = if [ + parse_quote!(u8), + parse_quote!(u16), + parse_quote!(u32), + parse_quote!(u64), + parse_quote!(u128), + parse_quote!(i8), + parse_quote!(i16), + parse_quote!(i32), + parse_quote!(i64), + parse_quote!(i128), + ] + .contains(ty) + { + Some(quote!(const)) + } else { + None + }; + quote! { + impl #ident { + pub #const_new fn new(value: #ty) -> ::core::option::Option { + #extra_checks + ::core::option::Option::Some(Self(value)) + } + + #[allow(dead_code)] + const unsafe fn new_unchecked(value: #ty) -> Self { + Self(value) + } + } + + impl ::core::ops::Deref for #ident { + type Target = #ty; + fn deref(&self) -> &Self::Target { + &self.0 + } + } + } + } +} + +fn generate_serde(ident: &Ident, ty: &Type, checked_new: bool) -> TokenStream2 { + let conversion = if checked_new { + quote! { + Self::new(value).ok_or_else(|| { + D::Error::custom("checking value bounds") + }) + } + } else { + quote! { + Ok(Self::new(value)) + } + }; + quote! { + impl ::serde::Serialize for #ident { + fn serialize(&self, serializer: S) -> ::core::result::Result + where + S: ::serde::Serializer, + { + self.0.serialize(serializer) + } + } + + impl<'de> ::serde::Deserialize<'de> for #ident { + fn deserialize(deserializer: D) -> ::core::result::Result + where + D: ::serde::Deserializer<'de>, + { + type T = #ty; + let value = T::deserialize(deserializer)?; + #conversion + } + } + } +} + +fn generate_tree_key<'a>( + ident: &Ident, + keys: impl Iterator + ExactSizeIterator + Clone, +) -> TokenStream2 { + let keys = once(&"value").chain(keys); + let num_keys = keys + .size_hint() + .1 + .expect("safe because both iterators (once and original keys) are exact"); + let max_length = keys.clone().map(|v| v.len()).max(); + quote! { + impl #ident { + const __MINICONF_NAMES: [&'static str; #num_keys] = [#(#keys,)*]; + } + + impl ::miniconf::TreeKey<1> for #ident { + fn name_to_index(name: &str) -> ::core::option::Option { + Self::__MINICONF_NAMES.iter().position(|&n| n == name) + } + + fn traverse_by_key(mut keys: K, mut func: F) -> ::core::result::Result> + where + K: ::core::iter::Iterator, + K::Item: ::miniconf::Key, + // Writing this to return an iterator instead of using a callback + // would have worse performance (O(n^2) instead of O(n) for matching) + F: FnMut(usize, &str) -> ::core::result::Result<(), E>, + { + let ::core::option::Option::Some(key) = keys.next() else { return ::core::result::Result::Ok(0) }; + let index = ::miniconf::Key::find::<1, Self>(&key).ok_or(::miniconf::Error::NotFound(1))?; + let name = Self::__MINICONF_NAMES + .get(index) + .ok_or(::miniconf::Error::NotFound(1))?; + func(index, name)?; + ::miniconf::Increment::increment(::core::result::Result::Ok(0)) + } + + fn metadata() -> miniconf::Metadata { + let mut metadata = miniconf::Metadata::default(); + metadata.max_depth = 1; + metadata.count = #num_keys; + metadata.max_length = #max_length; + metadata + } + } + } +} + +fn generate_tree_serialize(ident: &Ident, consts: &[Ident]) -> TokenStream2 { + let matches = consts.iter().enumerate().map(|(i, ident)| { + let index = i + 1; + quote! { + #index => ::miniconf::Serialize::serialize(&Self::#ident, ser)?, + } + }); + quote! { + impl ::miniconf::TreeSerialize<1> for #ident { + fn serialize_by_key( + &self, + mut keys: K, + ser: S, + ) -> ::core::result::Result> + where + K: ::core::iter::Iterator, + K::Item: ::miniconf::Key, + S: ::serde::Serializer, + { + let ::core::option::Option::Some(key) = keys.next() else { + return ::miniconf::Increment::increment({ + ::miniconf::Serialize::serialize(&self.0, ser)?; + ::core::result::Result::Ok(0) + }); + }; + let index = ::miniconf::Key::find::<1, Self>(&key).ok_or(miniconf::Error::NotFound(1))?; + if keys.next().is_some() { + return ::core::result::Result::Err(::miniconf::Error::TooLong(1)); + } + ::miniconf::Increment::increment({ + match index { + 0 => ::miniconf::Serialize::serialize(&self.0, ser)?, + #(#matches)* + _ => unreachable!(), + }; + ::core::result::Result::Ok(0) + }) + } + } + } +} + +fn generate_tree_deserialize(ident: &Ident, num_keys: usize) -> TokenStream2 { + quote! { + impl<'de> ::miniconf::TreeDeserialize<'de, 1> for #ident { + fn deserialize_by_key( + &mut self, + mut keys: K, + de: D, + ) -> ::core::result::Result> + where + K: ::core::iter::Iterator, + K::Item: ::miniconf::Key, + D: ::serde::Deserializer<'de>, + { + let ::core::option::Option::Some(key) = keys.next() else { + self.0 = ::miniconf::Deserialize::deserialize(de)?; + return ::core::result::Result::Ok(0); + }; + let index = ::miniconf::Key::find::<1, Self>(&key).ok_or(::miniconf::Error::NotFound(1))?; + if keys.next().is_some() { + return ::core::result::Result::Err(miniconf::Error::TooLong(1)); + } + match index { + 0 => ::miniconf::Increment::increment({ + self.0 = ::miniconf::Deserialize::deserialize(de)?; + Ok(0) + }), + 1..=#num_keys => ::core::result::Result::Err(::miniconf::Error::Absent(0)), + _ => unreachable!(), + } + } + } + } +}