1use std::{
4 borrow::Borrow,
5 cmp::Ordering,
6 fmt,
7 hash::{Hash, Hasher},
8 mem,
9 ops::{Bound, Deref, RangeBounds},
10 slice,
11};
12
13use crate::{ffi, translate::*};
14
15wrapper! {
16 #[doc(alias = "GBytes")]
34 pub struct Bytes(Shared<ffi::GBytes>);
35
36 match fn {
37 ref => |ptr| ffi::g_bytes_ref(ptr),
38 unref => |ptr| ffi::g_bytes_unref(ptr),
39 type_ => || ffi::g_bytes_get_type(),
40 }
41}
42
43impl Bytes {
44 #[doc(alias = "g_bytes_new")]
47 #[inline]
48 fn new<T: AsRef<[u8]>>(data: T) -> Bytes {
49 let data = data.as_ref();
50 unsafe { from_glib_full(ffi::g_bytes_new(data.as_ptr() as *const _, data.len())) }
51 }
52
53 #[doc(alias = "g_bytes_new_static")]
56 #[inline]
57 pub fn from_static(data: &'static [u8]) -> Bytes {
58 unsafe {
59 from_glib_full(ffi::g_bytes_new_static(
60 data.as_ptr() as *const _,
61 data.len(),
62 ))
63 }
64 }
65
66 #[doc(alias = "g_bytes_new")]
69 pub fn from_owned<T: AsRef<[u8]> + Send + 'static>(data: T) -> Bytes {
70 let data: Box<T> = Box::new(data);
71 let (size, data_ptr) = {
72 let data = (*data).as_ref();
73 (data.len(), data.as_ptr())
74 };
75
76 unsafe extern "C" fn drop_box<T: AsRef<[u8]> + Send + 'static>(b: ffi::gpointer) {
77 let _: Box<T> = Box::from_raw(b as *mut _);
78 }
79
80 unsafe {
81 from_glib_full(ffi::g_bytes_new_with_free_func(
82 data_ptr as *const _,
83 size,
84 Some(drop_box::<T>),
85 Box::into_raw(data) as *mut _,
86 ))
87 }
88 }
89
90 #[doc(alias = "g_bytes_unref_to_data")]
96 pub fn into_data(self) -> crate::collections::Slice<u8> {
97 unsafe {
98 let mut size = mem::MaybeUninit::uninit();
99 let ret = ffi::g_bytes_unref_to_data(self.into_glib_ptr(), size.as_mut_ptr());
100 crate::collections::Slice::from_glib_full_num(ret as *mut u8, size.assume_init())
101 }
102 }
103
104 fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (usize, usize) {
105 let len = self.len();
106
107 let start_offset = match range.start_bound() {
108 Bound::Included(v) => *v,
109 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
110 Bound::Unbounded => 0,
111 };
112 assert!(start_offset < len, "Start offset after valid range");
113
114 let end_offset = match range.end_bound() {
115 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
116 Bound::Excluded(v) => *v,
117 Bound::Unbounded => len,
118 };
119 assert!(end_offset <= len, "End offset after valid range");
120
121 let size = end_offset.saturating_sub(start_offset);
122
123 (start_offset, size)
124 }
125
126 #[doc(alias = "g_bytes_new_from_bytes")]
129 pub fn from_bytes(bytes: &Self, range: impl RangeBounds<usize>) -> Self {
130 let (offset, size) = bytes.calculate_offset_size(range);
131 unsafe {
132 from_glib_full(ffi::g_bytes_new_from_bytes(
133 bytes.to_glib_none().0,
134 offset,
135 size,
136 ))
137 }
138 }
139}
140
141unsafe impl Send for Bytes {}
142unsafe impl Sync for Bytes {}
143
144impl<'a, T: ?Sized + Borrow<[u8]> + 'a> From<&'a T> for Bytes {
145 #[inline]
146 fn from(value: &'a T) -> Bytes {
147 Bytes::new(value.borrow())
148 }
149}
150
151impl fmt::Debug for Bytes {
152 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
153 f.debug_struct("Bytes")
154 .field("ptr", &ToGlibPtr::<*const _>::to_glib_none(self).0)
155 .field("data", &&self[..])
156 .finish()
157 }
158}
159
160impl AsRef<[u8]> for Bytes {
161 #[inline]
162 fn as_ref(&self) -> &[u8] {
163 self
164 }
165}
166
167impl Deref for Bytes {
168 type Target = [u8];
169
170 #[inline]
171 fn deref(&self) -> &[u8] {
172 unsafe {
173 let mut len = 0;
174 let ptr = ffi::g_bytes_get_data(self.to_glib_none().0, &mut len);
175 if ptr.is_null() || len == 0 {
176 &[]
177 } else {
178 slice::from_raw_parts(ptr as *const u8, len)
179 }
180 }
181 }
182}
183
184impl PartialEq for Bytes {
185 #[doc(alias = "g_bytes_equal")]
186 #[inline]
187 fn eq(&self, other: &Self) -> bool {
188 unsafe {
189 from_glib(ffi::g_bytes_equal(
190 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
191 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
192 ))
193 }
194 }
195}
196
197impl Eq for Bytes {}
198
199impl PartialOrd for Bytes {
200 #[inline]
201 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
202 Some(self.cmp(other))
203 }
204}
205
206impl Ord for Bytes {
207 #[inline]
208 fn cmp(&self, other: &Self) -> Ordering {
209 unsafe {
210 let ret = ffi::g_bytes_compare(
211 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
212 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
213 );
214 ret.cmp(&0)
215 }
216 }
217}
218
219macro_rules! impl_cmp {
220 ($lhs:ty, $rhs: ty) => {
221 #[allow(clippy::redundant_slicing)]
222 #[allow(clippy::extra_unused_lifetimes)]
223 impl<'a, 'b> PartialEq<$rhs> for $lhs {
224 #[inline]
225 fn eq(&self, other: &$rhs) -> bool {
226 self[..].eq(&other[..])
227 }
228 }
229
230 #[allow(clippy::redundant_slicing)]
231 #[allow(clippy::extra_unused_lifetimes)]
232 impl<'a, 'b> PartialEq<$lhs> for $rhs {
233 #[inline]
234 fn eq(&self, other: &$lhs) -> bool {
235 self[..].eq(&other[..])
236 }
237 }
238
239 #[allow(clippy::redundant_slicing)]
240 #[allow(clippy::extra_unused_lifetimes)]
241 impl<'a, 'b> PartialOrd<$rhs> for $lhs {
242 #[inline]
243 fn partial_cmp(&self, other: &$rhs) -> Option<Ordering> {
244 self[..].partial_cmp(&other[..])
245 }
246 }
247
248 #[allow(clippy::redundant_slicing)]
249 #[allow(clippy::extra_unused_lifetimes)]
250 impl<'a, 'b> PartialOrd<$lhs> for $rhs {
251 #[inline]
252 fn partial_cmp(&self, other: &$lhs) -> Option<Ordering> {
253 self[..].partial_cmp(&other[..])
254 }
255 }
256 };
257}
258
259impl_cmp!(Bytes, [u8]);
260impl_cmp!(Bytes, &'a [u8]);
261impl_cmp!(&'a Bytes, [u8]);
262impl_cmp!(Bytes, Vec<u8>);
263impl_cmp!(&'a Bytes, Vec<u8>);
264
265impl Hash for Bytes {
266 #[inline]
267 fn hash<H: Hasher>(&self, state: &mut H) {
268 self.len().hash(state);
269 Hash::hash_slice(self, state)
270 }
271}
272
273#[cfg(test)]
274mod tests {
275 use std::collections::HashSet;
276
277 use super::*;
278
279 #[test]
280 fn eq() {
281 let abc: &[u8] = b"abc";
282 let def: &[u8] = b"def";
283 let a1 = Bytes::from(abc);
284 let a2 = Bytes::from(abc);
285 let d = Bytes::from(def);
286 assert_eq!(a1, a2);
287 assert_eq!(def, d);
288 assert_ne!(a1, d);
289 assert_ne!(a1, def);
290 }
291
292 #[test]
293 fn ord() {
294 let abc: &[u8] = b"abc";
295 let def: &[u8] = b"def";
296 let a = Bytes::from(abc);
297 let d = Bytes::from(def);
298 assert!(a < d);
299 assert!(a < def);
300 assert!(abc < d);
301 assert!(d > a);
302 assert!(d > abc);
303 assert!(def > a);
304 }
305
306 #[test]
307 fn hash() {
308 let b1 = Bytes::from(b"this is a test");
309 let b2 = Bytes::from(b"this is a test");
310 let b3 = Bytes::from(b"test");
311 let mut set = HashSet::new();
312 set.insert(b1);
313 assert!(set.contains(&b2));
314 assert!(!set.contains(&b3));
315 }
316
317 #[test]
318 fn from_static() {
319 let b1 = Bytes::from_static(b"this is a test");
320 let b2 = Bytes::from(b"this is a test");
321 assert_eq!(b1, b2);
322 }
323
324 #[test]
325 fn from_owned() {
326 let b = Bytes::from_owned(vec![1, 2, 3]);
327 assert_eq!(b, [1u8, 2u8, 3u8].as_ref());
328 }
329
330 #[test]
331 fn from_bytes() {
332 let b1 = Bytes::from_owned(vec![1, 2, 3]);
333 let b2 = Bytes::from_bytes(&b1, 1..=1);
334 assert_eq!(b2, [2u8].as_ref());
335 let b2 = Bytes::from_bytes(&b1, 1..);
336 assert_eq!(b2, [2u8, 3u8].as_ref());
337 let b2 = Bytes::from_bytes(&b1, ..2);
338 assert_eq!(b2, [1u8, 2u8].as_ref());
339 let b2 = Bytes::from_bytes(&b1, ..);
340 assert_eq!(b2, [1u8, 2u8, 3u8].as_ref());
341 }
342
343 #[test]
344 pub fn into_data() {
345 let b = Bytes::from(b"this is a test");
346 let d = b.into_data();
347 assert_eq!(d.as_slice(), b"this is a test");
348 }
349}