Compare commits
6 Commits
7364eec25f
...
v0.2.0
Author | SHA1 | Date | |
---|---|---|---|
9fb96bd2c3 | |||
14ce7ba845 | |||
d142655a25 | |||
35afc0f484 | |||
f5935b673b | |||
29d466906f |
@ -1,18 +1,19 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "macroconf"
|
name = "macroconf"
|
||||||
version = "0.1.0"
|
version = "0.2.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
description = "macro for creating configurations using miniconf"
|
description = "macro for creating configurations using miniconf"
|
||||||
license = "MIT OR Apache-2.0"
|
license = "MIT OR Apache-2.0"
|
||||||
repository = "https://git.mkaenner.de/max/macroconf.git"
|
repository = "https://git.mkaenner.de/max/macroconf"
|
||||||
|
readme = "README.md"
|
||||||
categories = ["config", "embedded", "data-structures"]
|
categories = ["config", "embedded", "data-structures"]
|
||||||
keywords = ["miniconf", "macro", "config"]
|
keywords = ["miniconf", "macro", "config"]
|
||||||
|
authors = ["Max Känner <max.kaenner@gmail.com>"]
|
||||||
|
|
||||||
[lints.rust]
|
[lints.rust]
|
||||||
unsafe_code = "forbid"
|
unsafe_code = "forbid"
|
||||||
|
|
||||||
[lints.clippy]
|
[lints.clippy]
|
||||||
enum_glob_use = "deny"
|
|
||||||
pedantic = "deny"
|
pedantic = "deny"
|
||||||
nursery = "deny"
|
nursery = "deny"
|
||||||
cargo = "warn"
|
cargo = "warn"
|
||||||
@ -29,5 +30,5 @@ quote = "1.0"
|
|||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
miniconf = "0.9"
|
miniconf = { version = "0.13", features = ["json-core"] }
|
||||||
serde = "1.0"
|
serde = "1.0"
|
||||||
|
189
src/lib.rs
189
src/lib.rs
@ -1,8 +1,11 @@
|
|||||||
|
//! This crate creates `miniconf::Tree` implementations fields in a struct. These carry some extra
|
||||||
|
//! extra information about the field.
|
||||||
|
|
||||||
use std::iter::once;
|
use std::iter::once;
|
||||||
|
|
||||||
use convert_case::{Case, Casing};
|
use convert_case::{Case, Casing};
|
||||||
use proc_macro::TokenStream;
|
use proc_macro::TokenStream;
|
||||||
use proc_macro2::{Span as Span2, TokenStream as TokenStream2};
|
use proc_macro2::{Span as Span2, TokenStream as TokenStream2, TokenTree as TokenTree2};
|
||||||
use quote::{format_ident, quote, ToTokens};
|
use quote::{format_ident, quote, ToTokens};
|
||||||
use syn::{
|
use syn::{
|
||||||
parse_macro_input, parse_quote, Attribute, DataStruct, DeriveInput, Expr, ExprLit, Field,
|
parse_macro_input, parse_quote, Attribute, DataStruct, DeriveInput, Expr, ExprLit, Field,
|
||||||
@ -12,6 +15,20 @@ use syn::{
|
|||||||
/// Creates structs for the values to extend them with extra metadata.
|
/// Creates structs for the values to extend them with extra metadata.
|
||||||
///
|
///
|
||||||
/// supported metadata is `min`, `max` and `default`. Doc comments are parsed as `description`
|
/// supported metadata is `min`, `max` and `default`. Doc comments are parsed as `description`
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
/// ```
|
||||||
|
/// use macroconf::config;
|
||||||
|
///
|
||||||
|
/// #[config]
|
||||||
|
/// struct Config {
|
||||||
|
/// /// This will be parsed as description
|
||||||
|
/// #[min] // This will use i32::MIN for the minimum
|
||||||
|
/// #[max = 50] // The value 50 is used for the maximum
|
||||||
|
/// #[default = 42] // A `Default` implementation will be generated returning 42
|
||||||
|
/// field1: i32,
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn config(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
pub fn config(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||||
let mut input = parse_macro_input!(item as DeriveInput);
|
let mut input = parse_macro_input!(item as DeriveInput);
|
||||||
@ -37,6 +54,32 @@ pub fn config(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(attr) = input
|
||||||
|
.attrs
|
||||||
|
.iter_mut()
|
||||||
|
.find(|attr| attr.path().is_ident("derive"))
|
||||||
|
{
|
||||||
|
if let Ok(meta) = attr.meta.require_list() {
|
||||||
|
let derives_tree = meta
|
||||||
|
.tokens
|
||||||
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|token| match token {
|
||||||
|
TokenTree2::Ident(ident) if ident == Ident::new("Tree", ident.span()) => {
|
||||||
|
Some(ident)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
.count()
|
||||||
|
== 1;
|
||||||
|
if !derives_tree {
|
||||||
|
input.attrs.push(parse_quote!(#[derive(::miniconf::Tree)]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
input.attrs.push(parse_quote!(#[derive(::miniconf::Tree)]));
|
||||||
|
}
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
#input
|
#input
|
||||||
#(#new_types)*
|
#(#new_types)*
|
||||||
@ -99,7 +142,7 @@ fn generate_helper_struct(
|
|||||||
.any(|key| attr.path().is_ident(key))
|
.any(|key| attr.path().is_ident(key))
|
||||||
});
|
});
|
||||||
|
|
||||||
field.attrs.push(parse_quote!(#[tree(depth(1))]));
|
field.attrs.push(parse_quote!(#[tree(depth=1)]));
|
||||||
let vis = if matches!(field.vis, Visibility::Public(_))
|
let vis = if matches!(field.vis, Visibility::Public(_))
|
||||||
|| matches!(field.vis, Visibility::Inherited)
|
|| matches!(field.vis, Visibility::Inherited)
|
||||||
{
|
{
|
||||||
@ -116,6 +159,7 @@ fn generate_helper_struct(
|
|||||||
let tree_key = generate_tree_key(&new_type_ident, new_type_miniconf_names.iter());
|
let tree_key = generate_tree_key(&new_type_ident, new_type_miniconf_names.iter());
|
||||||
let tree_serialize = generate_tree_serialize(&new_type_ident, &new_type_miniconf_consts[..]);
|
let tree_serialize = generate_tree_serialize(&new_type_ident, &new_type_miniconf_consts[..]);
|
||||||
let tree_deserialize = generate_tree_deserialize(&new_type_ident, miniconf_fields);
|
let tree_deserialize = generate_tree_deserialize(&new_type_ident, miniconf_fields);
|
||||||
|
let tree_any = generate_tree_any(&new_type_ident, &new_type_miniconf_consts, miniconf_fields);
|
||||||
|
|
||||||
Some(quote! {
|
Some(quote! {
|
||||||
#[allow(clippy::derive_partial_eq_without_eq)]
|
#[allow(clippy::derive_partial_eq_without_eq)]
|
||||||
@ -130,6 +174,7 @@ fn generate_helper_struct(
|
|||||||
#tree_key
|
#tree_key
|
||||||
#tree_serialize
|
#tree_serialize
|
||||||
#tree_deserialize
|
#tree_deserialize
|
||||||
|
#tree_any
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -339,7 +384,7 @@ fn generate_serde(ident: &Ident, ty: &Type, checked_new: bool) -> TokenStream2 {
|
|||||||
let conversion = if checked_new {
|
let conversion = if checked_new {
|
||||||
quote! {
|
quote! {
|
||||||
Self::new(value).ok_or_else(|| {
|
Self::new(value).ok_or_else(|| {
|
||||||
D::Error::custom("checking value bounds")
|
<D::Error as ::serde::de::Error>::custom("checking value bounds")
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -372,7 +417,7 @@ fn generate_serde(ident: &Ident, ty: &Type, checked_new: bool) -> TokenStream2 {
|
|||||||
|
|
||||||
fn generate_tree_key<'a>(
|
fn generate_tree_key<'a>(
|
||||||
ident: &Ident,
|
ident: &Ident,
|
||||||
keys: impl Iterator<Item = &'a &'a str> + ExactSizeIterator + Clone,
|
keys: impl ExactSizeIterator<Item = &'a &'a str> + Clone,
|
||||||
) -> TokenStream2 {
|
) -> TokenStream2 {
|
||||||
let keys = once(&"value").chain(keys);
|
let keys = once(&"value").chain(keys);
|
||||||
let num_keys = keys
|
let num_keys = keys
|
||||||
@ -381,32 +426,16 @@ fn generate_tree_key<'a>(
|
|||||||
.expect("safe because both iterators (once and original keys) are exact");
|
.expect("safe because both iterators (once and original keys) are exact");
|
||||||
let max_length = keys.clone().map(|v| v.len()).max();
|
let max_length = keys.clone().map(|v| v.len()).max();
|
||||||
quote! {
|
quote! {
|
||||||
impl #ident {
|
impl ::miniconf::KeyLookup for #ident {
|
||||||
const __MINICONF_NAMES: [&'static str; #num_keys] = [#(#keys,)*];
|
const LEN: usize = #num_keys;
|
||||||
|
const NAMES: &'static [&'static str] = &[#(#keys,)*];
|
||||||
|
|
||||||
|
fn name_to_index(value: &str) -> Option<usize> {
|
||||||
|
Self::NAMES.iter().position(|name| *name == value)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ::miniconf::TreeKey<1> for #ident {
|
impl ::miniconf::TreeKey<1> for #ident {
|
||||||
fn name_to_index(name: &str) -> ::core::option::Option<usize> {
|
|
||||||
Self::__MINICONF_NAMES.iter().position(|&n| n == name)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn traverse_by_key<K, F, E>(mut keys: K, mut func: F) -> ::core::result::Result<usize, ::miniconf::Error<E>>
|
|
||||||
where
|
|
||||||
K: ::core::iter::Iterator,
|
|
||||||
K::Item: ::miniconf::Key,
|
|
||||||
// Writing this to return an iterator instead of using a callback
|
|
||||||
// would have worse performance (O(n^2) instead of O(n) for matching)
|
|
||||||
F: FnMut(usize, &str) -> ::core::result::Result<(), E>,
|
|
||||||
{
|
|
||||||
let ::core::option::Option::Some(key) = keys.next() else { return ::core::result::Result::Ok(0) };
|
|
||||||
let index = ::miniconf::Key::find::<1, Self>(&key).ok_or(::miniconf::Error::NotFound(1))?;
|
|
||||||
let name = Self::__MINICONF_NAMES
|
|
||||||
.get(index)
|
|
||||||
.ok_or(::miniconf::Error::NotFound(1))?;
|
|
||||||
func(index, name)?;
|
|
||||||
::miniconf::Increment::increment(::core::result::Result::Ok(0))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn metadata() -> miniconf::Metadata {
|
fn metadata() -> miniconf::Metadata {
|
||||||
let mut metadata = miniconf::Metadata::default();
|
let mut metadata = miniconf::Metadata::default();
|
||||||
metadata.max_depth = 1;
|
metadata.max_depth = 1;
|
||||||
@ -414,6 +443,22 @@ fn generate_tree_key<'a>(
|
|||||||
metadata.max_length = #max_length;
|
metadata.max_length = #max_length;
|
||||||
metadata
|
metadata
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn traverse_by_key<K, F, E>(mut keys: K, mut func: F) -> ::core::result::Result<usize, ::miniconf::Error<E>>
|
||||||
|
where
|
||||||
|
K: ::miniconf::Keys,
|
||||||
|
// Writing this to return an iterator instead of using a callback
|
||||||
|
// would have worse performance (O(n^2) instead of O(n) for matching)
|
||||||
|
F: FnMut(usize, Option<&'static str>, usize) -> ::core::result::Result<(), E>,
|
||||||
|
{
|
||||||
|
let ::core::result::Result::Ok(key) = keys.next::<Self>() else { return ::core::result::Result::Ok(0) };
|
||||||
|
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
|
||||||
|
let name = <Self as ::miniconf::KeyLookup>::NAMES
|
||||||
|
.get(index)
|
||||||
|
.ok_or(::miniconf::Traversal::NotFound(1))?;
|
||||||
|
func(index, Some(name), #num_keys).map_err(|err| ::miniconf::Error::Inner(1, err))?;
|
||||||
|
::miniconf::Error::increment_result(::core::result::Result::Ok(0))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -422,7 +467,7 @@ fn generate_tree_serialize(ident: &Ident, consts: &[Ident]) -> TokenStream2 {
|
|||||||
let matches = consts.iter().enumerate().map(|(i, ident)| {
|
let matches = consts.iter().enumerate().map(|(i, ident)| {
|
||||||
let index = i + 1;
|
let index = i + 1;
|
||||||
quote! {
|
quote! {
|
||||||
#index => ::miniconf::Serialize::serialize(&Self::#ident, ser)?,
|
#index => ::miniconf::Serialize::serialize(&Self::#ident, ser).map_err(|err| ::miniconf::Error::Inner(0, err))?,
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
quote! {
|
quote! {
|
||||||
@ -433,23 +478,22 @@ fn generate_tree_serialize(ident: &Ident, consts: &[Ident]) -> TokenStream2 {
|
|||||||
ser: S,
|
ser: S,
|
||||||
) -> ::core::result::Result<usize, ::miniconf::Error<S::Error>>
|
) -> ::core::result::Result<usize, ::miniconf::Error<S::Error>>
|
||||||
where
|
where
|
||||||
K: ::core::iter::Iterator,
|
K: ::miniconf::Keys,
|
||||||
K::Item: ::miniconf::Key,
|
|
||||||
S: ::serde::Serializer,
|
S: ::serde::Serializer,
|
||||||
{
|
{
|
||||||
let ::core::option::Option::Some(key) = keys.next() else {
|
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
|
||||||
return ::miniconf::Increment::increment({
|
return ::miniconf::Error::increment_result({
|
||||||
::miniconf::Serialize::serialize(&self.0, ser)?;
|
::miniconf::Serialize::serialize(&self.0, ser).map_err(|err| ::miniconf::Error::Inner(0, err))?;
|
||||||
::core::result::Result::Ok(0)
|
::core::result::Result::Ok(0)
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
let index = ::miniconf::Key::find::<1, Self>(&key).ok_or(miniconf::Error::NotFound(1))?;
|
let index = ::miniconf::Key::find::<Self>(&key).ok_or(miniconf::Traversal::NotFound(1))?;
|
||||||
if keys.next().is_some() {
|
if !keys.finalize() {
|
||||||
return ::core::result::Result::Err(::miniconf::Error::TooLong(1));
|
::core::result::Result::Err(::miniconf::Traversal::TooLong(1))?;
|
||||||
}
|
}
|
||||||
::miniconf::Increment::increment({
|
::miniconf::Error::increment_result({
|
||||||
match index {
|
match index {
|
||||||
0 => ::miniconf::Serialize::serialize(&self.0, ser)?,
|
0 => ::miniconf::Serialize::serialize(&self.0, ser).map_err(|err| ::miniconf::Error::Inner(0, err))?,
|
||||||
#(#matches)*
|
#(#matches)*
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
@ -469,24 +513,71 @@ fn generate_tree_deserialize(ident: &Ident, num_keys: usize) -> TokenStream2 {
|
|||||||
de: D,
|
de: D,
|
||||||
) -> ::core::result::Result<usize, ::miniconf::Error<D::Error>>
|
) -> ::core::result::Result<usize, ::miniconf::Error<D::Error>>
|
||||||
where
|
where
|
||||||
K: ::core::iter::Iterator,
|
K: ::miniconf::Keys,
|
||||||
K::Item: ::miniconf::Key,
|
|
||||||
D: ::serde::Deserializer<'de>,
|
D: ::serde::Deserializer<'de>,
|
||||||
{
|
{
|
||||||
let ::core::option::Option::Some(key) = keys.next() else {
|
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
|
||||||
self.0 = ::miniconf::Deserialize::deserialize(de)?;
|
self.0 = ::miniconf::Deserialize::deserialize(de).map_err(|err| ::miniconf::Error::Inner(0, err))?;
|
||||||
return ::core::result::Result::Ok(0);
|
return ::core::result::Result::Ok(0);
|
||||||
};
|
};
|
||||||
let index = ::miniconf::Key::find::<1, Self>(&key).ok_or(::miniconf::Error::NotFound(1))?;
|
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
|
||||||
if keys.next().is_some() {
|
if !keys.finalize() {
|
||||||
return ::core::result::Result::Err(miniconf::Error::TooLong(1));
|
::core::result::Result::Err(::miniconf::Traversal::TooLong(1))?;
|
||||||
}
|
}
|
||||||
match index {
|
match index {
|
||||||
0 => ::miniconf::Increment::increment({
|
0 => ::miniconf::Error::increment_result((||{
|
||||||
self.0 = ::miniconf::Deserialize::deserialize(de)?;
|
self.0 = ::miniconf::Deserialize::deserialize(de).map_err(|err| ::miniconf::Error::Inner(0, err))?;
|
||||||
Ok(0)
|
Ok(0)
|
||||||
}),
|
})()),
|
||||||
1..=#num_keys => ::core::result::Result::Err(::miniconf::Error::Absent(0)),
|
1..=#num_keys => ::core::result::Result::Err(::miniconf::Traversal::Absent(0))?,
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generate_tree_any(ident: &Ident, consts: &[Ident], num_keys: usize) -> TokenStream2 {
|
||||||
|
let matches = consts.iter().enumerate().map(|(i, ident)| {
|
||||||
|
let index = i + 1;
|
||||||
|
quote! {
|
||||||
|
#index => ::core::result::Result::Ok(&Self::#ident),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
quote! {
|
||||||
|
impl ::miniconf::TreeAny<1> for #ident {
|
||||||
|
fn ref_any_by_key<K>(&self, mut keys: K) -> ::core::result::Result<&dyn ::core::any::Any, ::miniconf::Traversal>
|
||||||
|
where
|
||||||
|
K: ::miniconf::Keys,
|
||||||
|
{
|
||||||
|
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
|
||||||
|
return ::core::result::Result::Ok(&self.0);
|
||||||
|
};
|
||||||
|
let index = ::miniconf::Key::find::<Self>(&key).ok_or(miniconf::Traversal::NotFound(1))?;
|
||||||
|
if !keys.finalize() {
|
||||||
|
::core::result::Result::Err(::miniconf::Traversal::TooLong(1))?;
|
||||||
|
}
|
||||||
|
match index {
|
||||||
|
0 => ::core::result::Result::Ok(&self.0),
|
||||||
|
#(#matches)*
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mut_any_by_key<K>(&mut self, mut keys: K) -> ::core::result::Result<&mut dyn ::core::any::Any, ::miniconf::Traversal>
|
||||||
|
where
|
||||||
|
K: ::miniconf::Keys,
|
||||||
|
{
|
||||||
|
let ::core::result::Result::Ok(key) = keys.next::<Self>() else {
|
||||||
|
return ::core::result::Result::Ok(&mut self.0);
|
||||||
|
};
|
||||||
|
let index = ::miniconf::Key::find::<Self>(&key).ok_or(::miniconf::Traversal::NotFound(1))?;
|
||||||
|
if !keys.finalize() {
|
||||||
|
::core::result::Result::Err(::miniconf::Traversal::TooLong(1))?;
|
||||||
|
}
|
||||||
|
match index {
|
||||||
|
0 => ::core::result::Result::Ok(&mut self.0),
|
||||||
|
1..=#num_keys => ::core::result::Result::Err(::miniconf::Traversal::Absent(0)),
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,17 @@
|
|||||||
use std::str::from_utf8;
|
use std::str::from_utf8;
|
||||||
|
|
||||||
use macroconf::config;
|
use macroconf::config;
|
||||||
use miniconf::{Error::Absent, JsonCoreSlash, Tree, TreeKey};
|
use miniconf::{
|
||||||
|
Error::Traversal,
|
||||||
|
IntoKeys, JsonCoreSlash, Path,
|
||||||
|
Traversal::{Absent, TooLong},
|
||||||
|
Tree, TreeKey,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[config]
|
#[config]
|
||||||
#[derive(Debug, Clone, Copy, Tree)]
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Tree)]
|
||||||
struct Config {
|
struct SubConfig {
|
||||||
skipped: i32,
|
skipped: i32,
|
||||||
#[min]
|
#[min]
|
||||||
min: i32,
|
min: i32,
|
||||||
@ -17,6 +23,13 @@ struct Config {
|
|||||||
description: i32,
|
description: i32,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[config]
|
||||||
|
#[derive(Debug, Clone, Copy, Tree)]
|
||||||
|
struct Config {
|
||||||
|
#[tree(depth = 2)]
|
||||||
|
sub_config: SubConfig,
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn keys() {
|
fn keys() {
|
||||||
for (id, field) in ["skipped", "min", "max", "default", "description"]
|
for (id, field) in ["skipped", "min", "max", "default", "description"]
|
||||||
@ -24,8 +37,8 @@ fn keys() {
|
|||||||
.enumerate()
|
.enumerate()
|
||||||
{
|
{
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Config::traverse_by_key(std::iter::once(field), |index, name| {
|
SubConfig::traverse_by_key([field].into_keys(), |index, name, _len| {
|
||||||
assert_eq!((id, field), (index, name));
|
assert_eq!((id, Some(field)), (index, name));
|
||||||
Ok::<_, ()>(())
|
Ok::<_, ()>(())
|
||||||
}),
|
}),
|
||||||
Ok(1)
|
Ok(1)
|
||||||
@ -36,16 +49,20 @@ fn keys() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn sub_keys() {
|
fn sub_keys() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Config::traverse_by_key(["skipped", "value"].into_iter(), |_, _| Ok::<_, ()>(())),
|
SubConfig::traverse_by_key(["skipped", "value"].into_keys(), |_, _, _| Ok::<_, ()>(())),
|
||||||
|
Err(Traversal(TooLong(1)))
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
SubConfig::traverse_by_key(["skipped"].into_keys(), |_, _, _| Ok::<_, ()>(())),
|
||||||
Ok(1)
|
Ok(1)
|
||||||
);
|
);
|
||||||
for field in ["min", "max", "default", "description"] {
|
for field in ["min", "max", "default", "description"] {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Config::traverse_by_key([field, "value"].into_iter(), |_, _| Ok::<_, ()>(())),
|
SubConfig::traverse_by_key([field, "value"].into_keys(), |_, _, _| Ok::<_, ()>(())),
|
||||||
Ok(2)
|
Ok(2)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
Config::traverse_by_key([field, field].into_iter(), |_, _| Ok::<_, ()>(())),
|
SubConfig::traverse_by_key([field, field].into_keys(), |_, _, _| Ok::<_, ()>(())),
|
||||||
Ok(2)
|
Ok(2)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -54,12 +71,12 @@ fn sub_keys() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn serialize() {
|
fn serialize() {
|
||||||
let mut buffer = [0u8; 32];
|
let mut buffer = [0u8; 32];
|
||||||
let config = Config {
|
let config = SubConfig {
|
||||||
skipped: 1,
|
skipped: 1,
|
||||||
min: __ConfigMin::new(2),
|
min: __SubConfigMin::new(2),
|
||||||
max: __ConfigMax::new(3),
|
max: __SubConfigMax::new(3),
|
||||||
default: __ConfigDefault::new(4),
|
default: __SubConfigDefault::new(4),
|
||||||
description: __ConfigDescription::new(5),
|
description: __SubConfigDescription::new(5),
|
||||||
};
|
};
|
||||||
|
|
||||||
for (input, output) in [
|
for (input, output) in [
|
||||||
@ -85,12 +102,12 @@ fn serialize() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn deserialize() {
|
fn deserialize() {
|
||||||
let mut config = Config {
|
let mut config = SubConfig {
|
||||||
skipped: 0,
|
skipped: 0,
|
||||||
min: __ConfigMin::new(0),
|
min: __SubConfigMin::new(0),
|
||||||
max: __ConfigMax::new(0),
|
max: __SubConfigMax::new(0),
|
||||||
default: __ConfigDefault::new(0),
|
default: __SubConfigDefault::new(0),
|
||||||
description: __ConfigDescription::new(0),
|
description: __SubConfigDescription::new(0),
|
||||||
};
|
};
|
||||||
|
|
||||||
for input in [
|
for input in [
|
||||||
@ -115,6 +132,25 @@ fn deserialize() {
|
|||||||
"/description/description",
|
"/description/description",
|
||||||
] {
|
] {
|
||||||
let res = config.set_json(input, b"10");
|
let res = config.set_json(input, b"10");
|
||||||
assert_eq!(res, Err(Absent(1)));
|
assert_eq!(res, Err(Traversal(Absent(1))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn subconfig() {
|
||||||
|
let control = vec![
|
||||||
|
"/sub_config/skipped".to_owned(),
|
||||||
|
"/sub_config/min/value".to_owned(),
|
||||||
|
"/sub_config/min/min".to_owned(),
|
||||||
|
"/sub_config/max/value".to_owned(),
|
||||||
|
"/sub_config/max/max".to_owned(),
|
||||||
|
"/sub_config/default/value".to_owned(),
|
||||||
|
"/sub_config/default/default".to_owned(),
|
||||||
|
"/sub_config/description/value".to_owned(),
|
||||||
|
"/sub_config/description/description".to_owned(),
|
||||||
|
];
|
||||||
|
let paths: Vec<String> = Config::nodes::<Path<String, '/'>>()
|
||||||
|
.filter_map(|path| path.ok().map(|(n, _)| n.0))
|
||||||
|
.collect();
|
||||||
|
assert_eq!(paths, control);
|
||||||
|
}
|
||||||
|
Reference in New Issue
Block a user