zerotrie/builder/
bytestr.rs1use core::borrow::Borrow;
6
7#[cfg(feature = "serde")]
8use alloc::boxed::Box;
9
10#[repr(transparent)]
12#[derive(#[automatically_derived]
impl ::core::cmp::PartialEq for ByteStr {
#[inline]
fn eq(&self, other: &ByteStr) -> bool { self.0 == other.0 }
}PartialEq, #[automatically_derived]
impl ::core::cmp::Eq for ByteStr {
#[inline]
#[doc(hidden)]
#[coverage(off)]
fn assert_receiver_is_total_eq(&self) -> () {
let _: ::core::cmp::AssertParamIsEq<[u8]>;
}
}Eq, #[automatically_derived]
impl ::core::cmp::PartialOrd for ByteStr {
#[inline]
fn partial_cmp(&self, other: &ByteStr)
-> ::core::option::Option<::core::cmp::Ordering> {
::core::cmp::PartialOrd::partial_cmp(&self.0, &other.0)
}
}PartialOrd, #[automatically_derived]
impl ::core::cmp::Ord for ByteStr {
#[inline]
fn cmp(&self, other: &ByteStr) -> ::core::cmp::Ordering {
::core::cmp::Ord::cmp(&self.0, &other.0)
}
}Ord)]
13pub(crate) struct ByteStr([u8]);
14
15impl ByteStr {
16 pub const fn from_byte_slice_with_value<'a, 'l>(
17 input: &'l [(&'a [u8], usize)],
18 ) -> &'l [(&'a ByteStr, usize)] {
19 unsafe { core::mem::transmute(input) }
21 }
22
23 pub const fn from_str_slice_with_value<'a, 'l>(
24 input: &'l [(&'a str, usize)],
25 ) -> &'l [(&'a ByteStr, usize)] {
26 unsafe { core::mem::transmute(input) }
28 }
29
30 pub fn from_bytes(input: &[u8]) -> &Self {
31 unsafe { core::mem::transmute(input) }
33 }
34
35 #[cfg(feature = "serde")]
36 pub fn from_boxed_bytes(input: Box<[u8]>) -> Box<Self> {
37 unsafe { core::mem::transmute(input) }
39 }
40
41 #[allow(dead_code)] pub fn from_str(input: &str) -> &Self {
43 Self::from_bytes(input.as_bytes())
44 }
45
46 #[allow(dead_code)] pub fn empty() -> &'static Self {
48 Self::from_bytes(&[])
49 }
50
51 #[allow(dead_code)] pub const fn as_bytes(&self) -> &[u8] {
53 &self.0
54 }
55
56 pub const fn len(&self) -> usize {
57 self.0.len()
58 }
59
60 #[allow(dead_code)] pub fn is_all_ascii(&self) -> bool {
62 for byte in self.0.iter() {
63 if !byte.is_ascii() {
64 return false;
65 }
66 }
67 true
68 }
69
70 #[allow(dead_code)] pub(crate) fn byte_at(&self, index: usize) -> Option<u8> {
72 self.0.get(index).copied()
73 }
74
75 #[allow(clippy::indexing_slicing)] pub(crate) const fn byte_at_or_panic(&self, index: usize) -> u8 {
78 self.0[index]
79 }
80
81 #[allow(clippy::indexing_slicing)] pub(crate) const fn is_less_then(&self, other: &Self) -> bool {
84 let mut i = 0;
85 while i < self.len() && i < other.len() {
86 if self.0[i] < other.0[i] {
87 return true;
88 }
89 if self.0[i] > other.0[i] {
90 return false;
91 }
92 i += 1;
93 }
94 self.len() < other.len()
95 }
96
97 #[allow(clippy::indexing_slicing)] pub(crate) const fn prefix_eq(&self, other: &ByteStr, prefix_len: usize) -> bool {
104 if !(prefix_len <= self.len()) {
::core::panicking::panic("assertion failed: prefix_len <= self.len()")
};assert!(prefix_len <= self.len());
105 if !(prefix_len <= other.len()) {
::core::panicking::panic("assertion failed: prefix_len <= other.len()")
};assert!(prefix_len <= other.len());
106 let mut i = 0;
107 while i < prefix_len {
108 if self.0[i] != other.0[i] {
109 return false;
110 }
111 i += 1;
112 }
113 true
114 }
115}
116
117impl Borrow<[u8]> for ByteStr {
118 fn borrow(&self) -> &[u8] {
119 self.as_bytes()
120 }
121}
122
123#[cfg(feature = "alloc")]
124impl Borrow<[u8]> for alloc::boxed::Box<ByteStr> {
125 fn borrow(&self) -> &[u8] {
126 self.as_bytes()
127 }
128}