1use std::{
4 borrow::Borrow,
5 cmp::Ordering,
6 fmt,
7 hash::{Hash, Hasher},
8 mem,
9 ops::{Bound, Deref, RangeBounds},
10 slice,
11};
12
13use crate::{ffi, translate::*};
14
15wrapper! {
16 #[doc(alias = "GBytes")]
62 pub struct Bytes(Shared<ffi::GBytes>);
63
64 match fn {
65 ref => |ptr| ffi::g_bytes_ref(ptr),
66 unref => |ptr| ffi::g_bytes_unref(ptr),
67 type_ => || ffi::g_bytes_get_type(),
68 }
69}
70
71impl Bytes {
72 #[doc(alias = "g_bytes_new")]
90 #[inline]
91 fn new<T: AsRef<[u8]>>(data: T) -> Bytes {
92 let data = data.as_ref();
93 unsafe { from_glib_full(ffi::g_bytes_new(data.as_ptr() as *const _, data.len())) }
94 }
95
96 #[doc(alias = "g_bytes_new_static")]
99 #[inline]
100 pub fn from_static(data: &'static [u8]) -> Bytes {
101 unsafe {
102 from_glib_full(ffi::g_bytes_new_static(
103 data.as_ptr() as *const _,
104 data.len(),
105 ))
106 }
107 }
108
109 #[doc(alias = "g_bytes_new")]
112 pub fn from_owned<T: AsRef<[u8]> + Send + 'static>(data: T) -> Bytes {
113 let data: Box<T> = Box::new(data);
114 let (size, data_ptr) = {
115 let data = (*data).as_ref();
116 (data.len(), data.as_ptr())
117 };
118
119 unsafe extern "C" fn drop_box<T: AsRef<[u8]> + Send + 'static>(b: ffi::gpointer) {
120 unsafe {
121 let _: Box<T> = Box::from_raw(b as *mut _);
122 }
123 }
124
125 unsafe {
126 from_glib_full(ffi::g_bytes_new_with_free_func(
127 data_ptr as *const _,
128 size,
129 Some(drop_box::<T>),
130 Box::into_raw(data) as *mut _,
131 ))
132 }
133 }
134
135 #[doc(alias = "g_bytes_unref_to_data")]
141 pub fn into_data(self) -> crate::collections::Slice<u8> {
142 unsafe {
143 let mut size = mem::MaybeUninit::uninit();
144 let ret = ffi::g_bytes_unref_to_data(self.into_glib_ptr(), size.as_mut_ptr());
145 crate::collections::Slice::from_glib_full_num(ret as *mut u8, size.assume_init())
146 }
147 }
148
149 fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (usize, usize) {
150 let len = self.len();
151
152 let start_offset = match range.start_bound() {
153 Bound::Included(v) => *v,
154 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
155 Bound::Unbounded => 0,
156 };
157 assert!(start_offset <= len, "Start offset after valid range");
158
159 let end_offset = match range.end_bound() {
160 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
161 Bound::Excluded(v) => *v,
162 Bound::Unbounded => len,
163 };
164 assert!(end_offset <= len, "End offset after valid range");
165
166 let size = end_offset.saturating_sub(start_offset);
167
168 (start_offset, size)
169 }
170
171 #[doc(alias = "g_bytes_new_from_bytes")]
197 pub fn from_bytes(bytes: &Self, range: impl RangeBounds<usize>) -> Self {
198 let (offset, size) = bytes.calculate_offset_size(range);
199 unsafe {
200 from_glib_full(ffi::g_bytes_new_from_bytes(
201 bytes.to_glib_none().0,
202 offset,
203 size,
204 ))
205 }
206 }
207}
208
209unsafe impl Send for Bytes {}
210unsafe impl Sync for Bytes {}
211
212impl<'a, T: ?Sized + Borrow<[u8]> + 'a> From<&'a T> for Bytes {
213 #[inline]
214 fn from(value: &'a T) -> Bytes {
215 Bytes::new(value.borrow())
216 }
217}
218
219impl fmt::Debug for Bytes {
220 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
221 f.debug_struct("Bytes")
222 .field("ptr", &ToGlibPtr::<*const _>::to_glib_none(self).0)
223 .field("data", &&self[..])
224 .finish()
225 }
226}
227
228impl AsRef<[u8]> for Bytes {
229 #[inline]
230 fn as_ref(&self) -> &[u8] {
231 self
232 }
233}
234
235impl Deref for Bytes {
236 type Target = [u8];
237
238 #[inline]
239 fn deref(&self) -> &[u8] {
240 unsafe {
241 let mut len = 0;
242 let ptr = ffi::g_bytes_get_data(self.to_glib_none().0, &mut len);
243 if ptr.is_null() || len == 0 {
244 &[]
245 } else {
246 slice::from_raw_parts(ptr as *const u8, len)
247 }
248 }
249 }
250}
251
252impl PartialEq for Bytes {
253 #[doc(alias = "g_bytes_equal")]
254 #[inline]
255 fn eq(&self, other: &Self) -> bool {
256 unsafe {
257 from_glib(ffi::g_bytes_equal(
258 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
259 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
260 ))
261 }
262 }
263}
264
265impl Eq for Bytes {}
266
267impl PartialOrd for Bytes {
268 #[inline]
269 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
270 Some(self.cmp(other))
271 }
272}
273
274impl Ord for Bytes {
275 #[inline]
276 fn cmp(&self, other: &Self) -> Ordering {
277 unsafe {
278 let ret = ffi::g_bytes_compare(
279 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
280 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
281 );
282 ret.cmp(&0)
283 }
284 }
285}
286
287macro_rules! impl_cmp {
288 ($lhs:ty, $rhs: ty) => {
289 #[allow(clippy::redundant_slicing)]
290 #[allow(clippy::extra_unused_lifetimes)]
291 impl<'a, 'b> PartialEq<$rhs> for $lhs {
292 #[inline]
293 fn eq(&self, other: &$rhs) -> bool {
294 self[..].eq(&other[..])
295 }
296 }
297
298 #[allow(clippy::redundant_slicing)]
299 #[allow(clippy::extra_unused_lifetimes)]
300 impl<'a, 'b> PartialEq<$lhs> for $rhs {
301 #[inline]
302 fn eq(&self, other: &$lhs) -> bool {
303 self[..].eq(&other[..])
304 }
305 }
306
307 #[allow(clippy::redundant_slicing)]
308 #[allow(clippy::extra_unused_lifetimes)]
309 impl<'a, 'b> PartialOrd<$rhs> for $lhs {
310 #[inline]
311 fn partial_cmp(&self, other: &$rhs) -> Option<Ordering> {
312 self[..].partial_cmp(&other[..])
313 }
314 }
315
316 #[allow(clippy::redundant_slicing)]
317 #[allow(clippy::extra_unused_lifetimes)]
318 impl<'a, 'b> PartialOrd<$lhs> for $rhs {
319 #[inline]
320 fn partial_cmp(&self, other: &$lhs) -> Option<Ordering> {
321 self[..].partial_cmp(&other[..])
322 }
323 }
324 };
325}
326
327impl_cmp!(Bytes, [u8]);
328impl_cmp!(Bytes, &'a [u8]);
329impl_cmp!(&'a Bytes, [u8]);
330impl_cmp!(Bytes, Vec<u8>);
331impl_cmp!(&'a Bytes, Vec<u8>);
332
333impl Hash for Bytes {
334 #[inline]
335 fn hash<H: Hasher>(&self, state: &mut H) {
336 self.len().hash(state);
337 Hash::hash_slice(self, state)
338 }
339}
340
341#[cfg(test)]
342mod tests {
343 use std::collections::HashSet;
344
345 use super::*;
346
347 #[test]
348 fn eq() {
349 let abc: &[u8] = b"abc";
350 let def: &[u8] = b"def";
351 let a1 = Bytes::from(abc);
352 let a2 = Bytes::from(abc);
353 let d = Bytes::from(def);
354 assert_eq!(a1, a2);
355 assert_eq!(def, d);
356 assert_ne!(a1, d);
357 assert_ne!(a1, def);
358 }
359
360 #[test]
361 fn ord() {
362 let abc: &[u8] = b"abc";
363 let def: &[u8] = b"def";
364 let a = Bytes::from(abc);
365 let d = Bytes::from(def);
366 assert!(a < d);
367 assert!(a < def);
368 assert!(abc < d);
369 assert!(d > a);
370 assert!(d > abc);
371 assert!(def > a);
372 }
373
374 #[test]
375 fn hash() {
376 let b1 = Bytes::from(b"this is a test");
377 let b2 = Bytes::from(b"this is a test");
378 let b3 = Bytes::from(b"test");
379 let mut set = HashSet::new();
380 set.insert(b1);
381 assert!(set.contains(&b2));
382 assert!(!set.contains(&b3));
383 }
384
385 #[test]
386 fn from_static() {
387 let b1 = Bytes::from_static(b"this is a test");
388 let b2 = Bytes::from(b"this is a test");
389 assert_eq!(b1, b2);
390 }
391
392 #[test]
393 fn from_owned() {
394 let b = Bytes::from_owned(vec![1, 2, 3]);
395 assert_eq!(b, [1u8, 2u8, 3u8].as_ref());
396 }
397
398 #[test]
399 fn from_bytes() {
400 let b1 = Bytes::from_owned(vec![1, 2, 3]);
401 let b2 = Bytes::from_bytes(&b1, 1..=1);
402 assert_eq!(b2, [2u8].as_ref());
403 let b2 = Bytes::from_bytes(&b1, 1..);
404 assert_eq!(b2, [2u8, 3u8].as_ref());
405 let b2 = Bytes::from_bytes(&b1, ..2);
406 assert_eq!(b2, [1u8, 2u8].as_ref());
407 let b2 = Bytes::from_bytes(&b1, ..);
408 assert_eq!(b2, [1u8, 2u8, 3u8].as_ref());
409 let b2 = Bytes::from_bytes(&b1, 3..);
410 assert_eq!(b2, [].as_ref());
411 }
412
413 #[test]
414 pub fn into_data() {
415 let b = Bytes::from(b"this is a test");
416 let d = b.into_data();
417 assert_eq!(d.as_slice(), b"this is a test");
418 }
419}