1use std::{
4 borrow::Borrow,
5 cmp::Ordering,
6 fmt,
7 hash::{Hash, Hasher},
8 mem,
9 ops::{Bound, Deref, RangeBounds},
10 slice,
11};
12
13use crate::{ffi, translate::*};
14
15wrapper! {
16 #[doc(alias = "GBytes")]
90 pub struct Bytes(Shared<ffi::GBytes>);
91
92 match fn {
93 ref => |ptr| ffi::g_bytes_ref(ptr),
94 unref => |ptr| ffi::g_bytes_unref(ptr),
95 type_ => || ffi::g_bytes_get_type(),
96 }
97}
98
99impl Bytes {
100 #[doc(alias = "g_bytes_new")]
133 #[inline]
134 fn new<T: AsRef<[u8]>>(data: T) -> Bytes {
135 let data = data.as_ref();
136 unsafe { from_glib_full(ffi::g_bytes_new(data.as_ptr() as *const _, data.len())) }
137 }
138
139 #[doc(alias = "g_bytes_new_static")]
142 #[inline]
143 pub fn from_static(data: &'static [u8]) -> Bytes {
144 unsafe {
145 from_glib_full(ffi::g_bytes_new_static(
146 data.as_ptr() as *const _,
147 data.len(),
148 ))
149 }
150 }
151
152 #[doc(alias = "g_bytes_new")]
155 pub fn from_owned<T: AsRef<[u8]> + Send + 'static>(data: T) -> Bytes {
156 let data: Box<T> = Box::new(data);
157 let (size, data_ptr) = {
158 let data = (*data).as_ref();
159 (data.len(), data.as_ptr())
160 };
161
162 unsafe extern "C" fn drop_box<T: AsRef<[u8]> + Send + 'static>(b: ffi::gpointer) {
163 let _: Box<T> = Box::from_raw(b as *mut _);
164 }
165
166 unsafe {
167 from_glib_full(ffi::g_bytes_new_with_free_func(
168 data_ptr as *const _,
169 size,
170 Some(drop_box::<T>),
171 Box::into_raw(data) as *mut _,
172 ))
173 }
174 }
175
176 #[doc(alias = "g_bytes_unref_to_data")]
182 pub fn into_data(self) -> crate::collections::Slice<u8> {
183 unsafe {
184 let mut size = mem::MaybeUninit::uninit();
185 let ret = ffi::g_bytes_unref_to_data(self.into_glib_ptr(), size.as_mut_ptr());
186 crate::collections::Slice::from_glib_full_num(ret as *mut u8, size.assume_init())
187 }
188 }
189
190 fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (usize, usize) {
191 let len = self.len();
192
193 let start_offset = match range.start_bound() {
194 Bound::Included(v) => *v,
195 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
196 Bound::Unbounded => 0,
197 };
198 assert!(start_offset < len, "Start offset after valid range");
199
200 let end_offset = match range.end_bound() {
201 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
202 Bound::Excluded(v) => *v,
203 Bound::Unbounded => len,
204 };
205 assert!(end_offset <= len, "End offset after valid range");
206
207 let size = end_offset.saturating_sub(start_offset);
208
209 (start_offset, size)
210 }
211
212 #[doc(alias = "g_bytes_new_from_bytes")]
215 pub fn from_bytes(bytes: &Self, range: impl RangeBounds<usize>) -> Self {
216 let (offset, size) = bytes.calculate_offset_size(range);
217 unsafe {
218 from_glib_full(ffi::g_bytes_new_from_bytes(
219 bytes.to_glib_none().0,
220 offset,
221 size,
222 ))
223 }
224 }
225}
226
227unsafe impl Send for Bytes {}
228unsafe impl Sync for Bytes {}
229
230impl<'a, T: ?Sized + Borrow<[u8]> + 'a> From<&'a T> for Bytes {
231 #[inline]
232 fn from(value: &'a T) -> Bytes {
233 Bytes::new(value.borrow())
234 }
235}
236
237impl fmt::Debug for Bytes {
238 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
239 f.debug_struct("Bytes")
240 .field("ptr", &ToGlibPtr::<*const _>::to_glib_none(self).0)
241 .field("data", &&self[..])
242 .finish()
243 }
244}
245
246impl AsRef<[u8]> for Bytes {
247 #[inline]
248 fn as_ref(&self) -> &[u8] {
249 self
250 }
251}
252
253impl Deref for Bytes {
254 type Target = [u8];
255
256 #[inline]
257 fn deref(&self) -> &[u8] {
258 unsafe {
259 let mut len = 0;
260 let ptr = ffi::g_bytes_get_data(self.to_glib_none().0, &mut len);
261 if ptr.is_null() || len == 0 {
262 &[]
263 } else {
264 slice::from_raw_parts(ptr as *const u8, len)
265 }
266 }
267 }
268}
269
270impl PartialEq for Bytes {
271 #[doc(alias = "g_bytes_equal")]
272 #[inline]
273 fn eq(&self, other: &Self) -> bool {
274 unsafe {
275 from_glib(ffi::g_bytes_equal(
276 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
277 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
278 ))
279 }
280 }
281}
282
283impl Eq for Bytes {}
284
285impl PartialOrd for Bytes {
286 #[inline]
287 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
288 Some(self.cmp(other))
289 }
290}
291
292impl Ord for Bytes {
293 #[inline]
294 fn cmp(&self, other: &Self) -> Ordering {
295 unsafe {
296 let ret = ffi::g_bytes_compare(
297 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
298 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
299 );
300 ret.cmp(&0)
301 }
302 }
303}
304
305macro_rules! impl_cmp {
306 ($lhs:ty, $rhs: ty) => {
307 #[allow(clippy::redundant_slicing)]
308 #[allow(clippy::extra_unused_lifetimes)]
309 impl<'a, 'b> PartialEq<$rhs> for $lhs {
310 #[inline]
311 fn eq(&self, other: &$rhs) -> bool {
312 self[..].eq(&other[..])
313 }
314 }
315
316 #[allow(clippy::redundant_slicing)]
317 #[allow(clippy::extra_unused_lifetimes)]
318 impl<'a, 'b> PartialEq<$lhs> for $rhs {
319 #[inline]
320 fn eq(&self, other: &$lhs) -> bool {
321 self[..].eq(&other[..])
322 }
323 }
324
325 #[allow(clippy::redundant_slicing)]
326 #[allow(clippy::extra_unused_lifetimes)]
327 impl<'a, 'b> PartialOrd<$rhs> for $lhs {
328 #[inline]
329 fn partial_cmp(&self, other: &$rhs) -> Option<Ordering> {
330 self[..].partial_cmp(&other[..])
331 }
332 }
333
334 #[allow(clippy::redundant_slicing)]
335 #[allow(clippy::extra_unused_lifetimes)]
336 impl<'a, 'b> PartialOrd<$lhs> for $rhs {
337 #[inline]
338 fn partial_cmp(&self, other: &$lhs) -> Option<Ordering> {
339 self[..].partial_cmp(&other[..])
340 }
341 }
342 };
343}
344
345impl_cmp!(Bytes, [u8]);
346impl_cmp!(Bytes, &'a [u8]);
347impl_cmp!(&'a Bytes, [u8]);
348impl_cmp!(Bytes, Vec<u8>);
349impl_cmp!(&'a Bytes, Vec<u8>);
350
351impl Hash for Bytes {
352 #[inline]
353 fn hash<H: Hasher>(&self, state: &mut H) {
354 self.len().hash(state);
355 Hash::hash_slice(self, state)
356 }
357}
358
359#[cfg(test)]
360mod tests {
361 use std::collections::HashSet;
362
363 use super::*;
364
365 #[test]
366 fn eq() {
367 let abc: &[u8] = b"abc";
368 let def: &[u8] = b"def";
369 let a1 = Bytes::from(abc);
370 let a2 = Bytes::from(abc);
371 let d = Bytes::from(def);
372 assert_eq!(a1, a2);
373 assert_eq!(def, d);
374 assert_ne!(a1, d);
375 assert_ne!(a1, def);
376 }
377
378 #[test]
379 fn ord() {
380 let abc: &[u8] = b"abc";
381 let def: &[u8] = b"def";
382 let a = Bytes::from(abc);
383 let d = Bytes::from(def);
384 assert!(a < d);
385 assert!(a < def);
386 assert!(abc < d);
387 assert!(d > a);
388 assert!(d > abc);
389 assert!(def > a);
390 }
391
392 #[test]
393 fn hash() {
394 let b1 = Bytes::from(b"this is a test");
395 let b2 = Bytes::from(b"this is a test");
396 let b3 = Bytes::from(b"test");
397 let mut set = HashSet::new();
398 set.insert(b1);
399 assert!(set.contains(&b2));
400 assert!(!set.contains(&b3));
401 }
402
403 #[test]
404 fn from_static() {
405 let b1 = Bytes::from_static(b"this is a test");
406 let b2 = Bytes::from(b"this is a test");
407 assert_eq!(b1, b2);
408 }
409
410 #[test]
411 fn from_owned() {
412 let b = Bytes::from_owned(vec![1, 2, 3]);
413 assert_eq!(b, [1u8, 2u8, 3u8].as_ref());
414 }
415
416 #[test]
417 fn from_bytes() {
418 let b1 = Bytes::from_owned(vec![1, 2, 3]);
419 let b2 = Bytes::from_bytes(&b1, 1..=1);
420 assert_eq!(b2, [2u8].as_ref());
421 let b2 = Bytes::from_bytes(&b1, 1..);
422 assert_eq!(b2, [2u8, 3u8].as_ref());
423 let b2 = Bytes::from_bytes(&b1, ..2);
424 assert_eq!(b2, [1u8, 2u8].as_ref());
425 let b2 = Bytes::from_bytes(&b1, ..);
426 assert_eq!(b2, [1u8, 2u8, 3u8].as_ref());
427 }
428
429 #[test]
430 pub fn into_data() {
431 let b = Bytes::from(b"this is a test");
432 let d = b.into_data();
433 assert_eq!(d.as_slice(), b"this is a test");
434 }
435}