time_macros/format_description/public/
modifier.rs

1use std::num::NonZero;
2
3use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
4
5use crate::to_tokens::{ToTokenStream, ToTokenTree};
6
7macro_rules! to_tokens {
8    (
9        $(#[$struct_attr:meta])*
10        $struct_vis:vis struct $struct_name:ident {$(
11            $(#[$field_attr:meta])*
12            $field_vis:vis $field_name:ident : $field_ty:ty = $default:pat
13        ),* $(,)?}
14    ) => {
15        $(#[$struct_attr])*
16        $struct_vis struct $struct_name {$(
17            $(#[$field_attr])*
18            $field_vis $field_name: $field_ty
19        ),*}
20
21        impl ToTokenTree for $struct_name {
22            fn into_token_tree(self) -> TokenTree {
23                let Self {$($field_name),*} = self;
24
25                #[allow(clippy::redundant_pattern_matching)]
26                if matches!(($(&$field_name,)*), ($($default,)*)) {
27                    return TokenTree::Group(Group::new(
28                        Delimiter::None,
29                        quote_! { $struct_name::default() }
30                    ));
31                }
32
33                let mut tokens = quote_! {
34                    let mut value = $struct_name::default();
35                };
36                $(
37                    #[allow(clippy::redundant_pattern_matching)]
38                    if !matches!($field_name, $default) {
39                        quote_append!(tokens value.$field_name =);
40                        $field_name.append_to(&mut tokens);
41                        quote_append!(tokens ;);
42                    }
43                )*
44                quote_append!(tokens value);
45
46                TokenTree::Group(Group::new(
47                    Delimiter::Brace,
48                    tokens,
49                ))
50            }
51        }
52    };
53
54    (
55        $(#[$enum_attr:meta])*
56        $enum_vis:vis enum $enum_name:ident {$(
57            $(#[$variant_attr:meta])*
58            $variant_name:ident
59        ),+ $(,)?}
60    ) => {
61        $(#[$enum_attr])*
62        $enum_vis enum $enum_name {$(
63            $(#[$variant_attr])*
64            $variant_name
65        ),+}
66
67        impl ToTokenStream for $enum_name {
68            fn append_to(self, ts: &mut TokenStream) {
69                quote_append! { ts
70                    $enum_name::
71                };
72                let name = match self {
73                    $(Self::$variant_name => stringify!($variant_name)),+
74                };
75                ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
76            }
77        }
78    }
79}
80
81pub(crate) struct Day {
    pub(crate) padding: Padding,
}
impl ToTokenTree for Day {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Day",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Day",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
82    pub(crate) struct Day {
83        pub(crate) padding: Padding = Padding::Zero,
84    }
85}
86
87pub(crate) enum MonthRepr { Numerical, Long, Short, }
impl ToTokenStream for MonthRepr {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"MonthRepr",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Numerical => "Numerical",
                Self::Long => "Long",
                Self::Short => "Short",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
88    pub(crate) enum MonthRepr {
89        Numerical,
90        Long,
91        Short,
92    }
93}
94
95pub(crate) struct Month {
    pub(crate) padding: Padding,
    pub(crate) repr: MonthRepr,
    pub(crate) case_sensitive: bool,
}
impl ToTokenTree for Month {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding, repr, case_sensitive } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding, &repr,
                    &case_sensitive) {
                (Padding::Zero, MonthRepr::Numerical, true) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Month",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Month",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match repr {
                    MonthRepr::Numerical => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"repr",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            repr.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match case_sensitive {
                    true => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"case_sensitive",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            case_sensitive.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
96    pub(crate) struct Month {
97        pub(crate) padding: Padding = Padding::Zero,
98        pub(crate) repr: MonthRepr = MonthRepr::Numerical,
99        pub(crate) case_sensitive: bool = true,
100    }
101}
102
103pub(crate) struct Ordinal {
    pub(crate) padding: Padding,
}
impl ToTokenTree for Ordinal {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Ordinal",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Ordinal",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
104    pub(crate) struct Ordinal {
105        pub(crate) padding: Padding = Padding::Zero,
106    }
107}
108
109pub(crate) enum WeekdayRepr { Short, Long, Sunday, Monday, }
impl ToTokenStream for WeekdayRepr {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"WeekdayRepr",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Short => "Short",
                Self::Long => "Long",
                Self::Sunday => "Sunday",
                Self::Monday => "Monday",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
110    pub(crate) enum WeekdayRepr {
111        Short,
112        Long,
113        Sunday,
114        Monday,
115    }
116}
117
118pub(crate) struct Weekday {
    pub(crate) repr: WeekdayRepr,
    pub(crate) one_indexed: bool,
    pub(crate) case_sensitive: bool,
}
impl ToTokenTree for Weekday {
    fn into_token_tree(self) -> TokenTree {
        let Self { repr, one_indexed, case_sensitive } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&repr,
                    &one_indexed, &case_sensitive) {
                (WeekdayRepr::Long, true, true) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Weekday",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Weekday",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match repr {
                    WeekdayRepr::Long => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"repr",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            repr.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match one_indexed {
                    true => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"one_indexed",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            one_indexed.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match case_sensitive {
                    true => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"case_sensitive",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            case_sensitive.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
119    pub(crate) struct Weekday {
120        pub(crate) repr: WeekdayRepr = WeekdayRepr::Long,
121        pub(crate) one_indexed: bool = true,
122        pub(crate) case_sensitive: bool = true,
123    }
124}
125
126pub(crate) enum WeekNumberRepr { Iso, Sunday, Monday, }
impl ToTokenStream for WeekNumberRepr {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"WeekNumberRepr",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Iso => "Iso",
                Self::Sunday => "Sunday",
                Self::Monday => "Monday",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
127    pub(crate) enum WeekNumberRepr {
128        Iso,
129        Sunday,
130        Monday,
131    }
132}
133
134pub(crate) struct WeekNumber {
    pub(crate) padding: Padding,
    pub(crate) repr: WeekNumberRepr,
}
impl ToTokenTree for WeekNumber {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding, repr } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding, &repr) {
                (Padding::Zero, WeekNumberRepr::Iso) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"WeekNumber",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"WeekNumber",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match repr {
                    WeekNumberRepr::Iso => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"repr",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            repr.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
135    pub(crate) struct WeekNumber {
136        pub(crate) padding: Padding = Padding::Zero,
137        pub(crate) repr: WeekNumberRepr = WeekNumberRepr::Iso,
138    }
139}
140
141pub(crate) enum YearRepr { Full, Century, LastTwo, }
impl ToTokenStream for YearRepr {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"YearRepr",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Full => "Full",
                Self::Century => "Century",
                Self::LastTwo => "LastTwo",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
142    pub(crate) enum YearRepr {
143        Full,
144        Century,
145        LastTwo,
146    }
147}
148
149pub(crate) enum YearRange { Standard, Extended, }
impl ToTokenStream for YearRange {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"YearRange",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Standard => "Standard",
                Self::Extended => "Extended",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
150    pub(crate) enum YearRange {
151        Standard,
152        Extended,
153    }
154}
155
156pub(crate) struct Year {
    pub(crate) padding: Padding,
    pub(crate) repr: YearRepr,
    pub(crate) range: YearRange,
    pub(crate) iso_week_based: bool,
    pub(crate) sign_is_mandatory: bool,
}
impl ToTokenTree for Year {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding, repr, range, iso_week_based, sign_is_mandatory } =
            self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding, &repr,
                    &range, &iso_week_based, &sign_is_mandatory) {
                (Padding::Zero, YearRepr::Full, YearRange::Extended, false,
                    false) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Year",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Year",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match repr {
                    YearRepr::Full => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"repr",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            repr.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match range {
                    YearRange::Extended => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"range",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            range.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match iso_week_based {
                    false => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"iso_week_based",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            iso_week_based.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match sign_is_mandatory
                    {
                    false => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"sign_is_mandatory",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            sign_is_mandatory.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
157    pub(crate) struct Year {
158        pub(crate) padding: Padding = Padding::Zero,
159        pub(crate) repr: YearRepr = YearRepr::Full,
160        pub(crate) range: YearRange = YearRange::Extended,
161        pub(crate) iso_week_based: bool = false,
162        pub(crate) sign_is_mandatory: bool = false,
163    }
164}
165
166pub(crate) struct Hour {
    pub(crate) padding: Padding,
    pub(crate) is_12_hour_clock: bool,
}
impl ToTokenTree for Hour {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding, is_12_hour_clock } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,
                    &is_12_hour_clock) {
                (Padding::Zero, false) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Hour",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Hour",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match is_12_hour_clock {
                    false => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"is_12_hour_clock",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            is_12_hour_clock.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
167    pub(crate) struct Hour {
168        pub(crate) padding: Padding = Padding::Zero,
169        pub(crate) is_12_hour_clock: bool = false,
170    }
171}
172
173pub(crate) struct Minute {
    pub(crate) padding: Padding,
}
impl ToTokenTree for Minute {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Minute",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Minute",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
174    pub(crate) struct Minute {
175        pub(crate) padding: Padding = Padding::Zero,
176    }
177}
178
179pub(crate) struct Period {
    pub(crate) is_uppercase: bool,
    pub(crate) case_sensitive: bool,
}
impl ToTokenTree for Period {
    fn into_token_tree(self) -> TokenTree {
        let Self { is_uppercase, case_sensitive } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&is_uppercase,
                    &case_sensitive) {
                (true, true) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Period",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Period",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match is_uppercase {
                    true => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"is_uppercase",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            is_uppercase.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match case_sensitive {
                    true => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"case_sensitive",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            case_sensitive.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
180    pub(crate) struct Period {
181        pub(crate) is_uppercase: bool = true,
182        pub(crate) case_sensitive: bool = true,
183    }
184}
185
186pub(crate) struct Second {
    pub(crate) padding: Padding,
}
impl ToTokenTree for Second {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Second",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Second",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
187    pub(crate) struct Second {
188        pub(crate) padding: Padding = Padding::Zero,
189    }
190}
191
192pub(crate) enum SubsecondDigits {
    One,
    Two,
    Three,
    Four,
    Five,
    Six,
    Seven,
    Eight,
    Nine,
    OneOrMore,
}
impl ToTokenStream for SubsecondDigits {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"SubsecondDigits",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::One => "One",
                Self::Two => "Two",
                Self::Three => "Three",
                Self::Four => "Four",
                Self::Five => "Five",
                Self::Six => "Six",
                Self::Seven => "Seven",
                Self::Eight => "Eight",
                Self::Nine => "Nine",
                Self::OneOrMore => "OneOrMore",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
193    pub(crate) enum SubsecondDigits {
194        One,
195        Two,
196        Three,
197        Four,
198        Five,
199        Six,
200        Seven,
201        Eight,
202        Nine,
203        OneOrMore,
204    }
205}
206
207pub(crate) struct Subsecond {
    pub(crate) digits: SubsecondDigits,
}
impl ToTokenTree for Subsecond {
    fn into_token_tree(self) -> TokenTree {
        let Self { digits } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&digits,) {
                (SubsecondDigits::OneOrMore,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Subsecond",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"Subsecond",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match digits {
                    SubsecondDigits::OneOrMore => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"digits",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            digits.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
208    pub(crate) struct Subsecond {
209        pub(crate) digits: SubsecondDigits = SubsecondDigits::OneOrMore,
210    }
211}
212
213pub(crate) struct OffsetHour {
    pub(crate) sign_is_mandatory: bool,
    pub(crate) padding: Padding,
}
impl ToTokenTree for OffsetHour {
    fn into_token_tree(self) -> TokenTree {
        let Self { sign_is_mandatory, padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&sign_is_mandatory,
                    &padding) {
                (false, Padding::Zero) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"OffsetHour",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"OffsetHour",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match sign_is_mandatory
                    {
                    false => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"sign_is_mandatory",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            sign_is_mandatory.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
214    pub(crate) struct OffsetHour {
215        pub(crate) sign_is_mandatory: bool = false,
216        pub(crate) padding: Padding = Padding::Zero,
217    }
218}
219
220pub(crate) struct OffsetMinute {
    pub(crate) padding: Padding,
}
impl ToTokenTree for OffsetMinute {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"OffsetMinute",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"OffsetMinute",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
221    pub(crate) struct OffsetMinute {
222        pub(crate) padding: Padding = Padding::Zero,
223    }
224}
225
226pub(crate) struct OffsetSecond {
    pub(crate) padding: Padding,
}
impl ToTokenTree for OffsetSecond {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"OffsetSecond",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"OffsetSecond",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"padding",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            padding.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
227    pub(crate) struct OffsetSecond {
228        pub(crate) padding: Padding = Padding::Zero,
229    }
230}
231
232pub(crate) enum Padding { Space, Zero, None, }
impl ToTokenStream for Padding {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"Padding",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Space => "Space",
                Self::Zero => "Zero",
                Self::None => "None",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
233    pub(crate) enum Padding {
234        Space,
235        Zero,
236        None,
237    }
238}
239
240pub(crate) struct Ignore {
241    pub(crate) count: NonZero<u16>,
242}
243
244impl ToTokenTree for Ignore {
245    fn into_token_tree(self) -> TokenTree {
246        {
    use proc_macro::*;
    TokenTree::Group(Group::new(Delimiter::Brace,
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Ignore",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"count",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        ts_mut.extend([crate::to_tokens::ToTokenTree::into_token_tree(self.count)]);
                                        ;
                                        ts
                                    }))]);
                ;
                ts
            }))
}quote_group! {{
247            Ignore::count(#(self.count))
248        }}
249    }
250}
251
252pub(crate) enum UnixTimestampPrecision {
    Second,
    Millisecond,
    Microsecond,
    Nanosecond,
}
impl ToTokenStream for UnixTimestampPrecision {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"UnixTimestampPrecision",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Second => "Second",
                Self::Millisecond => "Millisecond",
                Self::Microsecond => "Microsecond",
                Self::Nanosecond => "Nanosecond",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
253    pub(crate) enum UnixTimestampPrecision {
254        Second,
255        Millisecond,
256        Microsecond,
257        Nanosecond,
258    }
259}
260
261pub(crate) struct UnixTimestamp {
    pub(crate) precision: UnixTimestampPrecision,
    pub(crate) sign_is_mandatory: bool,
}
impl ToTokenTree for UnixTimestamp {
    fn into_token_tree(self) -> TokenTree {
        let Self { precision, sign_is_mandatory } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&precision,
                    &sign_is_mandatory) {
                (UnixTimestampPrecision::Second, false) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"UnixTimestamp",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"UnixTimestamp",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match precision {
                    UnixTimestampPrecision::Second => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"precision",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            precision.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match sign_is_mandatory
                    {
                    false => true,
                    _ => false,
                } {
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([TokenTree::from(Ident::new(&"sign_is_mandatory",
                                    Span::mixed_site()))]);
                tokens.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ;
            };
            sign_is_mandatory.append_to(&mut tokens);
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
            };
        }
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
262    pub(crate) struct UnixTimestamp {
263        pub(crate) precision: UnixTimestampPrecision = UnixTimestampPrecision::Second,
264        pub(crate) sign_is_mandatory: bool = false,
265    }
266}
267
268pub(crate) struct End {}
impl ToTokenTree for End {
    fn into_token_tree(self) -> TokenTree {
        let Self {} = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match () {
                () => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"End",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"let",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"mut",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Ident::new(&"value",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new('=',
                                    Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"End",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ts_mut.extend([TokenTree::from(Punct::new(';',
                                    Spacing::Alone))]);
                ;
                ;
                ts
            };
        {
            use proc_macro::*;
            tokens.extend([TokenTree::from(Ident::new(&"value",
                                Span::mixed_site()))]);
            ;
        };
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
269    pub(crate) struct End {}
270}