Skip to main content

time_macros/format_description/public/
modifier.rs

1use std::num::NonZero;
2
3use proc_macro::{Delimiter, Group, Ident, Span, TokenStream, TokenTree};
4
5use crate::to_tokens::{ToTokenStream, ToTokenTree};
6
7macro_rules! to_tokens {
8    (
9        $(#[$struct_attr:meta])*
10        $struct_vis:vis struct $struct_name:ident {$(
11            $(#[$field_attr:meta])*
12            $field_vis:vis $field_name:ident : $field_ty:ty = $default:pat
13        ),* $(,)?}
14    ) => {
15        $(#[$struct_attr])*
16        $struct_vis struct $struct_name {$(
17            $(#[$field_attr])*
18            $field_vis $field_name: $field_ty
19        ),*}
20
21        impl ToTokenTree for $struct_name {
22            fn into_token_tree(self) -> TokenTree {
23                let Self {$($field_name),*} = self;
24
25                #[allow(clippy::redundant_pattern_matching)]
26                if matches!(($(&$field_name,)*), ($($default,)*)) {
27                    return TokenTree::Group(Group::new(
28                        Delimiter::None,
29                        quote_! { $struct_name::default() }
30                    ));
31                }
32
33                let mut tokens = quote_! {
34                    $struct_name::default()
35                };
36                $(
37                    #[allow(clippy::redundant_pattern_matching)]
38                    if !matches!($field_name, $default) {
39                        let method_name = Ident::new(concat!("with_", stringify!($field_name)), Span::mixed_site());
40                        quote_append!(tokens .#(method_name)(#S($field_name)));
41                    }
42                )*
43
44                TokenTree::Group(Group::new(
45                    Delimiter::Brace,
46                    tokens,
47                ))
48            }
49        }
50    };
51
52    (
53        $(#[$enum_attr:meta])*
54        $enum_vis:vis enum $enum_name:ident {$(
55            $(#[$variant_attr:meta])*
56            $variant_name:ident
57        ),+ $(,)?}
58    ) => {
59        $(#[$enum_attr])*
60        $enum_vis enum $enum_name {$(
61            $(#[$variant_attr])*
62            $variant_name
63        ),+}
64
65        impl ToTokenStream for $enum_name {
66            fn append_to(self, ts: &mut TokenStream) {
67                quote_append! { ts
68                    $enum_name::
69                };
70                let name = match self {
71                    $(Self::$variant_name => stringify!($variant_name)),+
72                };
73                ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
74            }
75        }
76    }
77}
78
79pub(crate) struct Day {
    pub(crate) padding: Padding,
}
impl ToTokenTree for Day {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Day",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Day",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
80    pub(crate) struct Day {
81        pub(crate) padding: Padding = Padding::Zero,
82    }
83}
84
85pub(crate) enum MonthRepr { Numerical, Long, Short, }
impl ToTokenStream for MonthRepr {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"MonthRepr",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Numerical => "Numerical",
                Self::Long => "Long",
                Self::Short => "Short",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
86    pub(crate) enum MonthRepr {
87        Numerical,
88        Long,
89        Short,
90    }
91}
92
93pub(crate) struct Month {
    pub(crate) padding: Padding,
    pub(crate) repr: MonthRepr,
    pub(crate) case_sensitive: bool,
}
impl ToTokenTree for Month {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding, repr, case_sensitive } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding, &repr,
                    &case_sensitive) {
                (Padding::Zero, MonthRepr::Numerical, true) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Month",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Month",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match repr {
                    MonthRepr::Numerical => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_repr", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(repr, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match case_sensitive {
                    true => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_case_sensitive", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(case_sensitive,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
94    pub(crate) struct Month {
95        pub(crate) padding: Padding = Padding::Zero,
96        pub(crate) repr: MonthRepr = MonthRepr::Numerical,
97        pub(crate) case_sensitive: bool = true,
98    }
99}
100
101pub(crate) struct Ordinal {
    pub(crate) padding: Padding,
}
impl ToTokenTree for Ordinal {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Ordinal",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Ordinal",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
102    pub(crate) struct Ordinal {
103        pub(crate) padding: Padding = Padding::Zero,
104    }
105}
106
107pub(crate) enum WeekdayRepr { Short, Long, Sunday, Monday, }
impl ToTokenStream for WeekdayRepr {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"WeekdayRepr",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Short => "Short",
                Self::Long => "Long",
                Self::Sunday => "Sunday",
                Self::Monday => "Monday",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
108    pub(crate) enum WeekdayRepr {
109        Short,
110        Long,
111        Sunday,
112        Monday,
113    }
114}
115
116pub(crate) struct Weekday {
    pub(crate) repr: WeekdayRepr,
    pub(crate) one_indexed: bool,
    pub(crate) case_sensitive: bool,
}
impl ToTokenTree for Weekday {
    fn into_token_tree(self) -> TokenTree {
        let Self { repr, one_indexed, case_sensitive } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&repr,
                    &one_indexed, &case_sensitive) {
                (WeekdayRepr::Long, true, true) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Weekday",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Weekday",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match repr {
                    WeekdayRepr::Long => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_repr", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(repr, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match one_indexed {
                    true => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_one_indexed", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(one_indexed,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match case_sensitive {
                    true => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_case_sensitive", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(case_sensitive,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
117    pub(crate) struct Weekday {
118        pub(crate) repr: WeekdayRepr = WeekdayRepr::Long,
119        pub(crate) one_indexed: bool = true,
120        pub(crate) case_sensitive: bool = true,
121    }
122}
123
124pub(crate) enum WeekNumberRepr { Iso, Sunday, Monday, }
impl ToTokenStream for WeekNumberRepr {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"WeekNumberRepr",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Iso => "Iso",
                Self::Sunday => "Sunday",
                Self::Monday => "Monday",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
125    pub(crate) enum WeekNumberRepr {
126        Iso,
127        Sunday,
128        Monday,
129    }
130}
131
132pub(crate) struct WeekNumber {
    pub(crate) padding: Padding,
    pub(crate) repr: WeekNumberRepr,
}
impl ToTokenTree for WeekNumber {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding, repr } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding, &repr) {
                (Padding::Zero, WeekNumberRepr::Iso) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"WeekNumber",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"WeekNumber",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match repr {
                    WeekNumberRepr::Iso => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_repr", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(repr, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
133    pub(crate) struct WeekNumber {
134        pub(crate) padding: Padding = Padding::Zero,
135        pub(crate) repr: WeekNumberRepr = WeekNumberRepr::Iso,
136    }
137}
138
139pub(crate) enum YearRepr { Full, Century, LastTwo, }
impl ToTokenStream for YearRepr {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"YearRepr",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Full => "Full",
                Self::Century => "Century",
                Self::LastTwo => "LastTwo",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
140    pub(crate) enum YearRepr {
141        Full,
142        Century,
143        LastTwo,
144    }
145}
146
147pub(crate) enum YearRange { Standard, Extended, }
impl ToTokenStream for YearRange {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"YearRange",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Standard => "Standard",
                Self::Extended => "Extended",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
148    pub(crate) enum YearRange {
149        Standard,
150        Extended,
151    }
152}
153
154pub(crate) struct Year {
    pub(crate) padding: Padding,
    pub(crate) repr: YearRepr,
    pub(crate) range: YearRange,
    pub(crate) iso_week_based: bool,
    pub(crate) sign_is_mandatory: bool,
}
impl ToTokenTree for Year {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding, repr, range, iso_week_based, sign_is_mandatory } =
            self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding, &repr,
                    &range, &iso_week_based, &sign_is_mandatory) {
                (Padding::Zero, YearRepr::Full, YearRange::Extended, false,
                    false) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Year",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Year",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match repr {
                    YearRepr::Full => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_repr", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(repr, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match range {
                    YearRange::Extended => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_range", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(range, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match iso_week_based {
                    false => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_iso_week_based", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(iso_week_based,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match sign_is_mandatory
                    {
                    false => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_sign_is_mandatory", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(sign_is_mandatory,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
155    pub(crate) struct Year {
156        pub(crate) padding: Padding = Padding::Zero,
157        pub(crate) repr: YearRepr = YearRepr::Full,
158        pub(crate) range: YearRange = YearRange::Extended,
159        pub(crate) iso_week_based: bool = false,
160        pub(crate) sign_is_mandatory: bool = false,
161    }
162}
163
164pub(crate) struct Hour {
    pub(crate) padding: Padding,
    pub(crate) is_12_hour_clock: bool,
}
impl ToTokenTree for Hour {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding, is_12_hour_clock } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,
                    &is_12_hour_clock) {
                (Padding::Zero, false) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Hour",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Hour",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match is_12_hour_clock {
                    false => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_is_12_hour_clock", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(is_12_hour_clock,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
165    pub(crate) struct Hour {
166        pub(crate) padding: Padding = Padding::Zero,
167        pub(crate) is_12_hour_clock: bool = false,
168    }
169}
170
171pub(crate) struct Minute {
    pub(crate) padding: Padding,
}
impl ToTokenTree for Minute {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Minute",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Minute",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
172    pub(crate) struct Minute {
173        pub(crate) padding: Padding = Padding::Zero,
174    }
175}
176
177pub(crate) struct Period {
    pub(crate) is_uppercase: bool,
    pub(crate) case_sensitive: bool,
}
impl ToTokenTree for Period {
    fn into_token_tree(self) -> TokenTree {
        let Self { is_uppercase, case_sensitive } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&is_uppercase,
                    &case_sensitive) {
                (true, true) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Period",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Period",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match is_uppercase {
                    true => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_is_uppercase", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(is_uppercase,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match case_sensitive {
                    true => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_case_sensitive", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(case_sensitive,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
178    pub(crate) struct Period {
179        pub(crate) is_uppercase: bool = true,
180        pub(crate) case_sensitive: bool = true,
181    }
182}
183
184pub(crate) struct Second {
    pub(crate) padding: Padding,
}
impl ToTokenTree for Second {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Second",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Second",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
185    pub(crate) struct Second {
186        pub(crate) padding: Padding = Padding::Zero,
187    }
188}
189
190pub(crate) enum SubsecondDigits {
    One,
    Two,
    Three,
    Four,
    Five,
    Six,
    Seven,
    Eight,
    Nine,
    OneOrMore,
}
impl ToTokenStream for SubsecondDigits {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"SubsecondDigits",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::One => "One",
                Self::Two => "Two",
                Self::Three => "Three",
                Self::Four => "Four",
                Self::Five => "Five",
                Self::Six => "Six",
                Self::Seven => "Seven",
                Self::Eight => "Eight",
                Self::Nine => "Nine",
                Self::OneOrMore => "OneOrMore",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
191    pub(crate) enum SubsecondDigits {
192        One,
193        Two,
194        Three,
195        Four,
196        Five,
197        Six,
198        Seven,
199        Eight,
200        Nine,
201        OneOrMore,
202    }
203}
204
205pub(crate) struct Subsecond {
    pub(crate) digits: SubsecondDigits,
}
impl ToTokenTree for Subsecond {
    fn into_token_tree(self) -> TokenTree {
        let Self { digits } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&digits,) {
                (SubsecondDigits::OneOrMore,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"Subsecond",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Subsecond",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match digits {
                    SubsecondDigits::OneOrMore => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_digits", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(digits, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
206    pub(crate) struct Subsecond {
207        pub(crate) digits: SubsecondDigits = SubsecondDigits::OneOrMore,
208    }
209}
210
211pub(crate) struct OffsetHour {
    pub(crate) sign_is_mandatory: bool,
    pub(crate) padding: Padding,
}
impl ToTokenTree for OffsetHour {
    fn into_token_tree(self) -> TokenTree {
        let Self { sign_is_mandatory, padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&sign_is_mandatory,
                    &padding) {
                (false, Padding::Zero) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"OffsetHour",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"OffsetHour",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match sign_is_mandatory
                    {
                    false => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_sign_is_mandatory", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(sign_is_mandatory,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
212    pub(crate) struct OffsetHour {
213        pub(crate) sign_is_mandatory: bool = false,
214        pub(crate) padding: Padding = Padding::Zero,
215    }
216}
217
218pub(crate) struct OffsetMinute {
    pub(crate) padding: Padding,
}
impl ToTokenTree for OffsetMinute {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"OffsetMinute",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"OffsetMinute",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
219    pub(crate) struct OffsetMinute {
220        pub(crate) padding: Padding = Padding::Zero,
221    }
222}
223
224pub(crate) struct OffsetSecond {
    pub(crate) padding: Padding,
}
impl ToTokenTree for OffsetSecond {
    fn into_token_tree(self) -> TokenTree {
        let Self { padding } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&padding,) {
                (Padding::Zero,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"OffsetSecond",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"OffsetSecond",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match padding {
                    Padding::Zero => true,
                    _ => false,
                } {
            let method_name = Ident::new("with_padding", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(padding, ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
225    pub(crate) struct OffsetSecond {
226        pub(crate) padding: Padding = Padding::Zero,
227    }
228}
229
230pub(crate) enum Padding { Space, Zero, None, }
impl ToTokenStream for Padding {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"Padding",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Space => "Space",
                Self::Zero => "Zero",
                Self::None => "None",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
231    pub(crate) enum Padding {
232        Space,
233        Zero,
234        None,
235    }
236}
237
238pub(crate) struct Ignore {
239    pub(crate) count: NonZero<u16>,
240}
241
242impl ToTokenTree for Ignore {
243    fn into_token_tree(self) -> TokenTree {
244        {
    use proc_macro::*;
    TokenTree::Group(Group::new(Delimiter::Brace,
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"Ignore",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"count",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        ts_mut.extend([crate::to_tokens::ToTokenTree::into_token_tree(self.count)]);
                                        ;
                                        ts
                                    }))]);
                ;
                ts
            }))
}quote_group! {{
245            Ignore::count(#(self.count))
246        }}
247    }
248}
249
250pub(crate) enum UnixTimestampPrecision {
    Second,
    Millisecond,
    Microsecond,
    Nanosecond,
}
impl ToTokenStream for UnixTimestampPrecision {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"UnixTimestampPrecision",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Second => "Second",
                Self::Millisecond => "Millisecond",
                Self::Microsecond => "Microsecond",
                Self::Nanosecond => "Nanosecond",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
251    pub(crate) enum UnixTimestampPrecision {
252        Second,
253        Millisecond,
254        Microsecond,
255        Nanosecond,
256    }
257}
258
259pub(crate) struct UnixTimestamp {
    pub(crate) precision: UnixTimestampPrecision,
    pub(crate) sign_is_mandatory: bool,
}
impl ToTokenTree for UnixTimestamp {
    fn into_token_tree(self) -> TokenTree {
        let Self { precision, sign_is_mandatory } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&precision,
                    &sign_is_mandatory) {
                (UnixTimestampPrecision::Second, false) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"UnixTimestamp",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"UnixTimestamp",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match precision {
                    UnixTimestampPrecision::Second => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_precision", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(precision,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match sign_is_mandatory
                    {
                    false => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_sign_is_mandatory", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(sign_is_mandatory,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
260    pub(crate) struct UnixTimestamp {
261        pub(crate) precision: UnixTimestampPrecision = UnixTimestampPrecision::Second,
262        pub(crate) sign_is_mandatory: bool = false,
263    }
264}
265
266pub(crate) enum TrailingInput { Prohibit, Discard, }
impl ToTokenStream for TrailingInput {
    fn append_to(self, ts: &mut TokenStream) {
        {
            use proc_macro::*;
            ts.extend([TokenTree::from(Ident::new(&"TrailingInput",
                                Span::mixed_site()))]);
            ts.extend([TokenTree::from(Punct::new(':', Spacing::Joint)),
                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
            ;
            ;
        };
        let name =
            match self {
                Self::Prohibit => "Prohibit",
                Self::Discard => "Discard",
            };
        ts.extend([TokenTree::Ident(Ident::new(name, Span::mixed_site()))]);
    }
}to_tokens! {
267    pub(crate) enum TrailingInput {
268        Prohibit,
269        Discard,
270    }
271}
272
273pub(crate) struct End {
    pub(crate) trailing_input: TrailingInput,
}
impl ToTokenTree for End {
    fn into_token_tree(self) -> TokenTree {
        let Self { trailing_input } = self;

        #[allow(clippy :: redundant_pattern_matching)]
        if #[allow(non_exhaustive_omitted_patterns)] match (&trailing_input,)
                {
                (TrailingInput::Prohibit,) => true,
                _ => false,
            } {
            return TokenTree::Group(Group::new(Delimiter::None,
                        {
                            use proc_macro::*;
                            let mut ts = TokenStream::new();
                            let ts_mut = &mut ts;
                            ts_mut.extend([TokenTree::from(Ident::new(&"End",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::from(Punct::new(':',
                                                Spacing::Joint)),
                                        TokenTree::from(Punct::new(':', Spacing::Alone))]);
                            ;
                            ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                                Span::mixed_site()))]);
                            ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                                proc_macro::TokenStream::new()))]);
                            ;
                            ts
                        }));
        }
        let mut tokens =
            {
                use proc_macro::*;
                let mut ts = TokenStream::new();
                let ts_mut = &mut ts;
                ts_mut.extend([TokenTree::from(Ident::new(&"End",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::from(Punct::new(':',
                                    Spacing::Joint)),
                            TokenTree::from(Punct::new(':', Spacing::Alone))]);
                ;
                ts_mut.extend([TokenTree::from(Ident::new(&"default",
                                    Span::mixed_site()))]);
                ts_mut.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    proc_macro::TokenStream::new()))]);
                ;
                ts
            };

        #[allow(clippy :: redundant_pattern_matching)]
        if !#[allow(non_exhaustive_omitted_patterns)] match trailing_input {
                    TrailingInput::Prohibit => true,
                    _ => false,
                } {
            let method_name =
                Ident::new("with_trailing_input", Span::mixed_site());
            {
                use proc_macro::*;
                tokens.extend([TokenTree::from(Punct::new('.',
                                    Spacing::Alone))]);
                ;
                tokens.extend([crate::to_tokens::ToTokenTree::into_token_tree(method_name)]);
                tokens.extend([TokenTree::Group(Group::new(Delimiter::Parenthesis,
                                    {
                                        use proc_macro::*;
                                        let mut ts = TokenStream::new();
                                        let ts_mut = &mut ts;
                                        crate::to_tokens::ToTokenStream::append_to(trailing_input,
                                            ts_mut);
                                        ;
                                        ts
                                    }))]);
                ;
            };
        }
        TokenTree::Group(Group::new(Delimiter::Brace, tokens))
    }
}to_tokens! {
274    pub(crate) struct End {
275        pub(crate) trailing_input: TrailingInput = TrailingInput::Prohibit,
276    }
277}