1use std::{
4 borrow::Borrow,
5 cmp::Ordering,
6 fmt,
7 hash::{Hash, Hasher},
8 mem,
9 ops::{Bound, Deref, RangeBounds},
10 slice,
11};
12
13use crate::{ffi, translate::*};
14
15wrapper! {
16 #[doc(alias = "GBytes")]
62 pub struct Bytes(Shared<ffi::GBytes>);
63
64 match fn {
65 ref => |ptr| ffi::g_bytes_ref(ptr),
66 unref => |ptr| ffi::g_bytes_unref(ptr),
67 type_ => || ffi::g_bytes_get_type(),
68 }
69}
70
71impl Bytes {
72 #[doc(alias = "g_bytes_new")]
90 #[inline]
91 fn new<T: AsRef<[u8]>>(data: T) -> Bytes {
92 let data = data.as_ref();
93 unsafe { from_glib_full(ffi::g_bytes_new(data.as_ptr() as *const _, data.len())) }
94 }
95
96 #[doc(alias = "g_bytes_new_static")]
99 #[inline]
100 pub fn from_static(data: &'static [u8]) -> Bytes {
101 unsafe {
102 from_glib_full(ffi::g_bytes_new_static(
103 data.as_ptr() as *const _,
104 data.len(),
105 ))
106 }
107 }
108
109 #[doc(alias = "g_bytes_new")]
112 pub fn from_owned<T: AsRef<[u8]> + Send + 'static>(data: T) -> Bytes {
113 let data: Box<T> = Box::new(data);
114 let (size, data_ptr) = {
115 let data = (*data).as_ref();
116 (data.len(), data.as_ptr())
117 };
118
119 unsafe extern "C" fn drop_box<T: AsRef<[u8]> + Send + 'static>(b: ffi::gpointer) {
120 let _: Box<T> = Box::from_raw(b as *mut _);
121 }
122
123 unsafe {
124 from_glib_full(ffi::g_bytes_new_with_free_func(
125 data_ptr as *const _,
126 size,
127 Some(drop_box::<T>),
128 Box::into_raw(data) as *mut _,
129 ))
130 }
131 }
132
133 #[doc(alias = "g_bytes_unref_to_data")]
139 pub fn into_data(self) -> crate::collections::Slice<u8> {
140 unsafe {
141 let mut size = mem::MaybeUninit::uninit();
142 let ret = ffi::g_bytes_unref_to_data(self.into_glib_ptr(), size.as_mut_ptr());
143 crate::collections::Slice::from_glib_full_num(ret as *mut u8, size.assume_init())
144 }
145 }
146
147 fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (usize, usize) {
148 let len = self.len();
149
150 let start_offset = match range.start_bound() {
151 Bound::Included(v) => *v,
152 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
153 Bound::Unbounded => 0,
154 };
155 assert!(start_offset <= len, "Start offset after valid range");
156
157 let end_offset = match range.end_bound() {
158 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
159 Bound::Excluded(v) => *v,
160 Bound::Unbounded => len,
161 };
162 assert!(end_offset <= len, "End offset after valid range");
163
164 let size = end_offset.saturating_sub(start_offset);
165
166 (start_offset, size)
167 }
168
169 #[doc(alias = "g_bytes_new_from_bytes")]
195 pub fn from_bytes(bytes: &Self, range: impl RangeBounds<usize>) -> Self {
196 let (offset, size) = bytes.calculate_offset_size(range);
197 unsafe {
198 from_glib_full(ffi::g_bytes_new_from_bytes(
199 bytes.to_glib_none().0,
200 offset,
201 size,
202 ))
203 }
204 }
205}
206
207unsafe impl Send for Bytes {}
208unsafe impl Sync for Bytes {}
209
210impl<'a, T: ?Sized + Borrow<[u8]> + 'a> From<&'a T> for Bytes {
211 #[inline]
212 fn from(value: &'a T) -> Bytes {
213 Bytes::new(value.borrow())
214 }
215}
216
217impl fmt::Debug for Bytes {
218 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
219 f.debug_struct("Bytes")
220 .field("ptr", &ToGlibPtr::<*const _>::to_glib_none(self).0)
221 .field("data", &&self[..])
222 .finish()
223 }
224}
225
226impl AsRef<[u8]> for Bytes {
227 #[inline]
228 fn as_ref(&self) -> &[u8] {
229 self
230 }
231}
232
233impl Deref for Bytes {
234 type Target = [u8];
235
236 #[inline]
237 fn deref(&self) -> &[u8] {
238 unsafe {
239 let mut len = 0;
240 let ptr = ffi::g_bytes_get_data(self.to_glib_none().0, &mut len);
241 if ptr.is_null() || len == 0 {
242 &[]
243 } else {
244 slice::from_raw_parts(ptr as *const u8, len)
245 }
246 }
247 }
248}
249
250impl PartialEq for Bytes {
251 #[doc(alias = "g_bytes_equal")]
252 #[inline]
253 fn eq(&self, other: &Self) -> bool {
254 unsafe {
255 from_glib(ffi::g_bytes_equal(
256 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
257 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
258 ))
259 }
260 }
261}
262
263impl Eq for Bytes {}
264
265impl PartialOrd for Bytes {
266 #[inline]
267 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
268 Some(self.cmp(other))
269 }
270}
271
272impl Ord for Bytes {
273 #[inline]
274 fn cmp(&self, other: &Self) -> Ordering {
275 unsafe {
276 let ret = ffi::g_bytes_compare(
277 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
278 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
279 );
280 ret.cmp(&0)
281 }
282 }
283}
284
285macro_rules! impl_cmp {
286 ($lhs:ty, $rhs: ty) => {
287 #[allow(clippy::redundant_slicing)]
288 #[allow(clippy::extra_unused_lifetimes)]
289 impl<'a, 'b> PartialEq<$rhs> for $lhs {
290 #[inline]
291 fn eq(&self, other: &$rhs) -> bool {
292 self[..].eq(&other[..])
293 }
294 }
295
296 #[allow(clippy::redundant_slicing)]
297 #[allow(clippy::extra_unused_lifetimes)]
298 impl<'a, 'b> PartialEq<$lhs> for $rhs {
299 #[inline]
300 fn eq(&self, other: &$lhs) -> bool {
301 self[..].eq(&other[..])
302 }
303 }
304
305 #[allow(clippy::redundant_slicing)]
306 #[allow(clippy::extra_unused_lifetimes)]
307 impl<'a, 'b> PartialOrd<$rhs> for $lhs {
308 #[inline]
309 fn partial_cmp(&self, other: &$rhs) -> Option<Ordering> {
310 self[..].partial_cmp(&other[..])
311 }
312 }
313
314 #[allow(clippy::redundant_slicing)]
315 #[allow(clippy::extra_unused_lifetimes)]
316 impl<'a, 'b> PartialOrd<$lhs> for $rhs {
317 #[inline]
318 fn partial_cmp(&self, other: &$lhs) -> Option<Ordering> {
319 self[..].partial_cmp(&other[..])
320 }
321 }
322 };
323}
324
325impl_cmp!(Bytes, [u8]);
326impl_cmp!(Bytes, &'a [u8]);
327impl_cmp!(&'a Bytes, [u8]);
328impl_cmp!(Bytes, Vec<u8>);
329impl_cmp!(&'a Bytes, Vec<u8>);
330
331impl Hash for Bytes {
332 #[inline]
333 fn hash<H: Hasher>(&self, state: &mut H) {
334 self.len().hash(state);
335 Hash::hash_slice(self, state)
336 }
337}
338
339#[cfg(test)]
340mod tests {
341 use std::collections::HashSet;
342
343 use super::*;
344
345 #[test]
346 fn eq() {
347 let abc: &[u8] = b"abc";
348 let def: &[u8] = b"def";
349 let a1 = Bytes::from(abc);
350 let a2 = Bytes::from(abc);
351 let d = Bytes::from(def);
352 assert_eq!(a1, a2);
353 assert_eq!(def, d);
354 assert_ne!(a1, d);
355 assert_ne!(a1, def);
356 }
357
358 #[test]
359 fn ord() {
360 let abc: &[u8] = b"abc";
361 let def: &[u8] = b"def";
362 let a = Bytes::from(abc);
363 let d = Bytes::from(def);
364 assert!(a < d);
365 assert!(a < def);
366 assert!(abc < d);
367 assert!(d > a);
368 assert!(d > abc);
369 assert!(def > a);
370 }
371
372 #[test]
373 fn hash() {
374 let b1 = Bytes::from(b"this is a test");
375 let b2 = Bytes::from(b"this is a test");
376 let b3 = Bytes::from(b"test");
377 let mut set = HashSet::new();
378 set.insert(b1);
379 assert!(set.contains(&b2));
380 assert!(!set.contains(&b3));
381 }
382
383 #[test]
384 fn from_static() {
385 let b1 = Bytes::from_static(b"this is a test");
386 let b2 = Bytes::from(b"this is a test");
387 assert_eq!(b1, b2);
388 }
389
390 #[test]
391 fn from_owned() {
392 let b = Bytes::from_owned(vec![1, 2, 3]);
393 assert_eq!(b, [1u8, 2u8, 3u8].as_ref());
394 }
395
396 #[test]
397 fn from_bytes() {
398 let b1 = Bytes::from_owned(vec![1, 2, 3]);
399 let b2 = Bytes::from_bytes(&b1, 1..=1);
400 assert_eq!(b2, [2u8].as_ref());
401 let b2 = Bytes::from_bytes(&b1, 1..);
402 assert_eq!(b2, [2u8, 3u8].as_ref());
403 let b2 = Bytes::from_bytes(&b1, ..2);
404 assert_eq!(b2, [1u8, 2u8].as_ref());
405 let b2 = Bytes::from_bytes(&b1, ..);
406 assert_eq!(b2, [1u8, 2u8, 3u8].as_ref());
407 let b2 = Bytes::from_bytes(&b1, 3..);
408 assert_eq!(b2, [].as_ref());
409 }
410
411 #[test]
412 pub fn into_data() {
413 let b = Bytes::from(b"this is a test");
414 let d = b.into_data();
415 assert_eq!(d.as_slice(), b"this is a test");
416 }
417}