Skip to main content

diesel/pg/types/
array.rs

1use byteorder::{NetworkEndian, ReadBytesExt, WriteBytesExt};
2use core::fmt;
3use std::io::Write;
4
5use crate::deserialize::{self, FromSql, FromSqlRow};
6use crate::pg::{Pg, PgTypeMetadata, PgValue};
7use crate::query_builder::bind_collector::ByteWrapper;
8use crate::serialize::{self, IsNull, Output, ToSql};
9use crate::sql_types::{Array, HasSqlType, Nullable};
10
11#[cfg(feature = "postgres_backend")]
12#[derive(#[automatically_derived]
impl<T: ::core::fmt::Debug> ::core::fmt::Debug for NdArray<T> {
    #[inline]
    fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
        ::core::fmt::Formatter::debug_struct_field2_finish(f, "NdArray",
            "dims", &self.dims, "data", &&self.data)
    }
}Debug, #[automatically_derived]
impl<T: ::core::clone::Clone> ::core::clone::Clone for NdArray<T> {
    #[inline]
    fn clone(&self) -> NdArray<T> {
        NdArray {
            dims: ::core::clone::Clone::clone(&self.dims),
            data: ::core::clone::Clone::clone(&self.data),
        }
    }
}Clone, #[automatically_derived]
impl<T: ::core::cmp::PartialEq> ::core::cmp::PartialEq for NdArray<T> {
    #[inline]
    fn eq(&self, other: &NdArray<T>) -> bool {
        self.dims == other.dims && self.data == other.data
    }
}PartialEq, #[automatically_derived]
impl<T: ::core::cmp::Eq> ::core::cmp::Eq for NdArray<T> {
    #[inline]
    #[doc(hidden)]
    #[coverage(off)]
    fn assert_fields_are_eq(&self) {
        let _: ::core::cmp::AssertParamIsEq<Vec<usize>>;
        let _: ::core::cmp::AssertParamIsEq<Vec<T>>;
    }
}Eq, #[automatically_derived]
impl<T: ::core::hash::Hash> ::core::hash::Hash for NdArray<T> {
    #[inline]
    fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) {
        ::core::hash::Hash::hash(&self.dims, state);
        ::core::hash::Hash::hash(&self.data, state)
    }
}Hash, #[automatically_derived]
impl<T: ::core::cmp::PartialOrd> ::core::cmp::PartialOrd for NdArray<T> {
    #[inline]
    fn partial_cmp(&self, other: &NdArray<T>)
        -> ::core::option::Option<::core::cmp::Ordering> {
        match ::core::cmp::PartialOrd::partial_cmp(&self.dims, &other.dims) {
            ::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
                ::core::cmp::PartialOrd::partial_cmp(&self.data, &other.data),
            cmp => cmp,
        }
    }
}PartialOrd, #[automatically_derived]
impl<T: ::core::cmp::Ord> ::core::cmp::Ord for NdArray<T> {
    #[inline]
    fn cmp(&self, other: &NdArray<T>) -> ::core::cmp::Ordering {
        match ::core::cmp::Ord::cmp(&self.dims, &other.dims) {
            ::core::cmp::Ordering::Equal =>
                ::core::cmp::Ord::cmp(&self.data, &other.data),
            cmp => cmp,
        }
    }
}Ord, const _: () =
    {
        use diesel;
        impl<'__expr, T> diesel::expression::AsExpression<Array<T>> for
            &'__expr NdArray<T> {
            type Expression =
                diesel::internal::derives::as_expression::Bound<Array<T>,
                Self>;
            fn as_expression(self)
                ->
                    <Self as
                    diesel::expression::AsExpression<Array<T>>>::Expression {
                diesel::internal::derives::as_expression::Bound::new(self)
            }
        }
        impl<'__expr, T>
            diesel::expression::AsExpression<diesel::sql_types::Nullable<Array<T>>>
            for &'__expr NdArray<T> {
            type Expression =
                diesel::internal::derives::as_expression::Bound<diesel::sql_types::Nullable<Array<T>>,
                Self>;
            fn as_expression(self)
                ->
                    <Self as
                    diesel::expression::AsExpression<diesel::sql_types::Nullable<Array<T>>>>::Expression {
                diesel::internal::derives::as_expression::Bound::new(self)
            }
        }
        impl<'__expr, '__expr2, T> diesel::expression::AsExpression<Array<T>>
            for &'__expr2 &'__expr NdArray<T> {
            type Expression =
                diesel::internal::derives::as_expression::Bound<Array<T>,
                Self>;
            fn as_expression(self)
                ->
                    <Self as
                    diesel::expression::AsExpression<Array<T>>>::Expression {
                diesel::internal::derives::as_expression::Bound::new(self)
            }
        }
        impl<'__expr, '__expr2, T>
            diesel::expression::AsExpression<diesel::sql_types::Nullable<Array<T>>>
            for &'__expr2 &'__expr NdArray<T> {
            type Expression =
                diesel::internal::derives::as_expression::Bound<diesel::sql_types::Nullable<Array<T>>,
                Self>;
            fn as_expression(self)
                ->
                    <Self as
                    diesel::expression::AsExpression<diesel::sql_types::Nullable<Array<T>>>>::Expression {
                diesel::internal::derives::as_expression::Bound::new(self)
            }
        }
        impl<T, __DB>
            diesel::serialize::ToSql<diesel::sql_types::Nullable<Array<T>>,
            __DB> for NdArray<T> where __DB: diesel::backend::Backend,
            Self: diesel::serialize::ToSql<Array<T>, __DB> {
            fn to_sql<'__b>(&'__b self,
                out: &mut diesel::serialize::Output<'__b, '_, __DB>)
                -> diesel::serialize::Result {
                diesel::serialize::ToSql::<Array<T>, __DB>::to_sql(self, out)
            }
        }
        impl<T> diesel::expression::AsExpression<Array<T>> for NdArray<T> {
            type Expression =
                diesel::internal::derives::as_expression::Bound<Array<T>,
                Self>;
            fn as_expression(self)
                ->
                    <Self as
                    diesel::expression::AsExpression<Array<T>>>::Expression {
                diesel::internal::derives::as_expression::Bound::new(self)
            }
        }
        impl<T>
            diesel::expression::AsExpression<diesel::sql_types::Nullable<Array<T>>>
            for NdArray<T> {
            type Expression =
                diesel::internal::derives::as_expression::Bound<diesel::sql_types::Nullable<Array<T>>,
                Self>;
            fn as_expression(self)
                ->
                    <Self as
                    diesel::expression::AsExpression<diesel::sql_types::Nullable<Array<T>>>>::Expression {
                diesel::internal::derives::as_expression::Bound::new(self)
            }
        }
    };AsExpression, const _: () =
    {
        use diesel;
        impl<T, __DB, __ST> diesel::deserialize::Queryable<__ST, __DB> for
            NdArray<T> where __DB: diesel::backend::Backend,
            __ST: diesel::sql_types::SingleValue,
            Self: diesel::deserialize::FromSql<__ST, __DB> {
            type Row = Self;
            fn build(row: Self) -> diesel::deserialize::Result<Self> {
                diesel::deserialize::Result::Ok(row)
            }
        }
    };FromSqlRow)]
13#[diesel(sql_type = Array<T>)]
14/// Postgres allows multi-dimensional arrays of at most 6 dimensions. Internally they are stored as a flattened
15/// representation with the dimension information encoded in the header. This struct represents a
16/// multi-dimensional array with elements of type `T` as opposed to Vec<T> which can be used for 1d-arrays.
17pub struct NdArray<T> {
18    /// A list that describes how many values for each dimension are returned
19    pub dims: Vec<usize>,
20    /// The actual data flattened to a single array
21    ///
22    /// This array contains values ordered by the left most dimension
23    /// which means there will be dim[0] values for the first element of the second dimension
24    /// followed by dim[0] values for the second element of the second dimensions
25    /// and so up to dim[1] times. Afterwards that number of values is repeated for dim[2],
26    /// and so for all dimensions in the dimension field above
27    pub data: Vec<T>,
28}
29
30#[cfg(feature = "postgres_backend")]
31impl<T> HasSqlType<Array<T>> for Pg
32where
33    Pg: HasSqlType<T>,
34{
35    fn metadata(lookup: &mut Self::MetadataLookup) -> PgTypeMetadata {
36        match <Pg as HasSqlType<T>>::metadata(lookup).0 {
37            Ok(tpe) => PgTypeMetadata::new(tpe.array_oid, 0),
38            c @ Err(_) => PgTypeMetadata(c),
39        }
40    }
41}
42
43#[cfg(feature = "postgres_backend")]
44impl<T, ST> FromSql<Array<ST>, Pg> for Vec<T>
45where
46    T: FromSql<ST, Pg>,
47{
48    fn from_sql(value: PgValue<'_>) -> deserialize::Result<Self> {
49        let mut bytes = value.as_bytes();
50        let num_dimensions = bytes.read_i32::<NetworkEndian>()?;
51        let has_null = bytes.read_i32::<NetworkEndian>()? != 0;
52        let _oid = bytes.read_i32::<NetworkEndian>()?;
53
54        if num_dimensions == 0 {
55            return Ok(Vec::new());
56        }
57
58        let num_elements = bytes.read_i32::<NetworkEndian>()?;
59        let _lower_bound = bytes.read_i32::<NetworkEndian>()?;
60
61        if num_dimensions != 1 {
62            return Err("multi-dimensional arrays are not supported".into());
63        }
64
65        (0..num_elements)
66            .map(|_| {
67                let elem_size = bytes.read_i32::<NetworkEndian>()?;
68                if has_null && elem_size == -1 {
69                    T::from_nullable_sql(None)
70                } else {
71                    let (elem_bytes, new_bytes) = bytes.split_at(elem_size.try_into()?);
72                    bytes = new_bytes;
73                    T::from_sql(PgValue::new_internal(elem_bytes, &value))
74                }
75            })
76            .collect()
77    }
78}
79
80#[cfg(feature = "postgres_backend")]
81impl<T, ST> FromSql<Array<ST>, Pg> for NdArray<T>
82where
83    T: FromSql<ST, Pg>,
84{
85    fn from_sql(value: PgValue<'_>) -> deserialize::Result<Self> {
86        let mut bytes = value.as_bytes();
87        let num_dimensions = bytes.read_i32::<NetworkEndian>()?;
88        let has_null = bytes.read_i32::<NetworkEndian>()? != 0;
89        let _oid = bytes.read_i32::<NetworkEndian>()?;
90
91        if num_dimensions == 0 {
92            return Ok(NdArray {
93                dims: Vec::new(),
94                data: Vec::new(),
95            });
96        }
97
98        let num_dims: usize = num_dimensions
99            .try_into()
100            .map_err(|_| "number of dimensions must be positive")?;
101
102        let dims = (0..num_dims)
103            .map(|_| {
104                let num_elements = bytes.read_i32::<NetworkEndian>()?;
105                let _lower_bound = bytes.read_i32::<NetworkEndian>()?;
106
107                let dim: usize = num_elements
108                    .try_into()
109                    .map_err(|_| "array dimension length must be positive")?;
110                Ok(dim)
111            })
112            .collect::<deserialize::Result<Vec<_>>>()?;
113
114        let data = (0..dims.iter().product::<usize>())
115            .map(|_| -> deserialize::Result<T> {
116                let elem_size = bytes.read_i32::<NetworkEndian>()?;
117                if has_null && elem_size == -1 {
118                    T::from_nullable_sql(None)
119                } else {
120                    let (elem_bytes, new_bytes) = bytes.split_at(elem_size.try_into()?);
121                    bytes = new_bytes;
122                    T::from_sql(PgValue::new_internal(elem_bytes, &value))
123                }
124            })
125            .collect::<deserialize::Result<Vec<T>>>()?;
126        Ok(NdArray { dims, data })
127    }
128}
129
130use crate::expression::AsExpression;
131use crate::expression::bound::Bound;
132
133macro_rules! array_as_expression {
134    ($ty:ty, $sql_type:ty) => {
135        #[cfg(feature = "postgres_backend")]
136        // this simplifies the macro implementation
137        // as some macro calls use this lifetime
138        #[allow(clippy::extra_unused_lifetimes)]
139        impl<'a, 'b, ST: 'static, T> AsExpression<$sql_type> for $ty {
140            type Expression = Bound<$sql_type, Self>;
141
142            fn as_expression(self) -> Self::Expression {
143                Bound::new(self)
144            }
145        }
146    };
147}
148
149#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Array<ST>> for &'a [T] {
    type Expression = Bound<Array<ST>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(&'a [T], Array<ST>);
150#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Nullable<Array<ST>>> for &'a [T] {
    type Expression = Bound<Nullable<Array<ST>>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(&'a [T], Nullable<Array<ST>>);
151#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Array<ST>> for &'a &'b [T] {
    type Expression = Bound<Array<ST>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(&'a &'b [T], Array<ST>);
152#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Nullable<Array<ST>>> for &'a &'b [T]
    {
    type Expression = Bound<Nullable<Array<ST>>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(&'a &'b [T], Nullable<Array<ST>>);
153#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Array<ST>> for Vec<T> {
    type Expression = Bound<Array<ST>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(Vec<T>, Array<ST>);
154#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Nullable<Array<ST>>> for Vec<T> {
    type Expression = Bound<Nullable<Array<ST>>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(Vec<T>, Nullable<Array<ST>>);
155#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Array<ST>> for &'a Vec<T> {
    type Expression = Bound<Array<ST>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(&'a Vec<T>, Array<ST>);
156#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Nullable<Array<ST>>> for &'a Vec<T>
    {
    type Expression = Bound<Nullable<Array<ST>>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(&'a Vec<T>, Nullable<Array<ST>>);
157#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Array<ST>> for &'a &'b Vec<T> {
    type Expression = Bound<Array<ST>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(&'a &'b Vec<T>, Array<ST>);
158#[allow(clippy :: extra_unused_lifetimes)]
impl<'a, 'b, ST: 'static, T> AsExpression<Nullable<Array<ST>>> for
    &'a &'b Vec<T> {
    type Expression = Bound<Nullable<Array<ST>>, Self>;
    fn as_expression(self) -> Self::Expression { Bound::new(self) }
}array_as_expression!(&'a &'b Vec<T>, Nullable<Array<ST>>);
159
160#[cfg(feature = "postgres_backend")]
161impl<ST, T> ToSql<Array<ST>, Pg> for [T]
162where
163    Pg: HasSqlType<ST>,
164    T: ToSql<ST, Pg>,
165{
166    fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
167        let num_dimensions = 1;
168        out.write_i32::<NetworkEndian>(num_dimensions)?;
169        let flags = 0;
170        out.write_i32::<NetworkEndian>(flags)?;
171        let element_oid = Pg::metadata(out.metadata_lookup()).oid()?;
172        out.write_u32::<NetworkEndian>(element_oid)?;
173        out.write_i32::<NetworkEndian>(self.len().try_into()?)?;
174        let lower_bound = 1;
175        out.write_i32::<NetworkEndian>(lower_bound)?;
176
177        // This buffer is created outside of the loop to reuse the underlying memory allocation
178        // For most cases all array elements will have the same serialized size
179        let mut buffer = Vec::new();
180
181        for elem in self.iter() {
182            let is_null = {
183                let mut temp_buffer = Output::new(ByteWrapper(&mut buffer), out.metadata_lookup());
184                elem.to_sql(&mut temp_buffer)?
185            };
186
187            if let IsNull::No = is_null {
188                out.write_i32::<NetworkEndian>(buffer.len().try_into()?)?;
189                out.write_all(&buffer)?;
190                buffer.clear();
191            } else {
192                // https://github.com/postgres/postgres/blob/82f8107b92c9104ec9d9465f3f6a4c6dab4c124a/src/backend/utils/adt/arrayfuncs.c#L1461
193                out.write_i32::<NetworkEndian>(-1)?;
194            }
195        }
196
197        Ok(IsNull::No)
198    }
199}
200
201#[cfg(feature = "postgres_backend")]
202impl<ST, T> ToSql<Nullable<Array<ST>>, Pg> for [T]
203where
204    [T]: ToSql<Array<ST>, Pg>,
205    ST: 'static,
206{
207    fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
208        ToSql::<Array<ST>, Pg>::to_sql(self, out)
209    }
210}
211
212#[cfg(feature = "postgres_backend")]
213impl<ST, T> ToSql<Array<ST>, Pg> for Vec<T>
214where
215    ST: 'static,
216    [T]: ToSql<Array<ST>, Pg>,
217    T: fmt::Debug,
218{
219    fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
220        (self as &[T]).to_sql(out)
221    }
222}
223
224#[cfg(feature = "postgres_backend")]
225impl<ST, T> ToSql<Nullable<Array<ST>>, Pg> for Vec<T>
226where
227    ST: 'static,
228    Vec<T>: ToSql<Array<ST>, Pg>,
229{
230    fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
231        ToSql::<Array<ST>, Pg>::to_sql(self, out)
232    }
233}