1use std::{
4 borrow::Borrow,
5 cmp::Ordering,
6 fmt,
7 hash::{Hash, Hasher},
8 mem,
9 ops::{Bound, Deref, RangeBounds},
10 slice,
11};
12
13use crate::{ffi, translate::*};
14
15wrapper! {
16 #[doc(alias = "GBytes")]
34 pub struct Bytes(Shared<ffi::GBytes>);
35
36 match fn {
37 ref => |ptr| ffi::g_bytes_ref(ptr),
38 unref => |ptr| ffi::g_bytes_unref(ptr),
39 type_ => || ffi::g_bytes_get_type(),
40 }
41}
42
43impl Bytes {
44 #[doc(alias = "g_bytes_new")]
47 #[inline]
48 fn new<T: AsRef<[u8]>>(data: T) -> Bytes {
49 let data = data.as_ref();
50 unsafe { from_glib_full(ffi::g_bytes_new(data.as_ptr() as *const _, data.len())) }
51 }
52
53 #[doc(alias = "g_bytes_new_static")]
56 #[inline]
57 pub fn from_static(data: &'static [u8]) -> Bytes {
58 unsafe {
59 from_glib_full(ffi::g_bytes_new_static(
60 data.as_ptr() as *const _,
61 data.len(),
62 ))
63 }
64 }
65
66 #[doc(alias = "g_bytes_new")]
69 pub fn from_owned<T: AsRef<[u8]> + Send + 'static>(data: T) -> Bytes {
70 let data: Box<T> = Box::new(data);
71 let (size, data_ptr) = {
72 let data = (*data).as_ref();
73 (data.len(), data.as_ptr())
74 };
75
76 unsafe extern "C" fn drop_box<T: AsRef<[u8]> + Send + 'static>(b: ffi::gpointer) {
77 unsafe {
78 let _: Box<T> = Box::from_raw(b as *mut _);
79 }
80 }
81
82 unsafe {
83 from_glib_full(ffi::g_bytes_new_with_free_func(
84 data_ptr as *const _,
85 size,
86 Some(drop_box::<T>),
87 Box::into_raw(data) as *mut _,
88 ))
89 }
90 }
91
92 #[doc(alias = "g_bytes_unref_to_data")]
98 pub fn into_data(self) -> crate::collections::Slice<u8> {
99 unsafe {
100 let mut size = mem::MaybeUninit::uninit();
101 let ret = ffi::g_bytes_unref_to_data(self.into_glib_ptr(), size.as_mut_ptr());
102 crate::collections::Slice::from_glib_full_num(ret as *mut u8, size.assume_init())
103 }
104 }
105
106 fn calculate_offset_size(&self, range: impl RangeBounds<usize>) -> (usize, usize) {
107 let len = self.len();
108
109 let start_offset = match range.start_bound() {
110 Bound::Included(v) => *v,
111 Bound::Excluded(v) => v.checked_add(1).expect("Invalid start offset"),
112 Bound::Unbounded => 0,
113 };
114 assert!(start_offset <= len, "Start offset after valid range");
115
116 let end_offset = match range.end_bound() {
117 Bound::Included(v) => v.checked_add(1).expect("Invalid end offset"),
118 Bound::Excluded(v) => *v,
119 Bound::Unbounded => len,
120 };
121 assert!(end_offset <= len, "End offset after valid range");
122
123 let size = end_offset.saturating_sub(start_offset);
124
125 (start_offset, size)
126 }
127
128 #[doc(alias = "g_bytes_new_from_bytes")]
131 pub fn from_bytes(bytes: &Self, range: impl RangeBounds<usize>) -> Self {
132 let (offset, size) = bytes.calculate_offset_size(range);
133 unsafe {
134 from_glib_full(ffi::g_bytes_new_from_bytes(
135 bytes.to_glib_none().0,
136 offset,
137 size,
138 ))
139 }
140 }
141}
142
143unsafe impl Send for Bytes {}
144unsafe impl Sync for Bytes {}
145
146impl<'a, T: ?Sized + Borrow<[u8]> + 'a> From<&'a T> for Bytes {
147 #[inline]
148 fn from(value: &'a T) -> Bytes {
149 Bytes::new(value.borrow())
150 }
151}
152
153impl fmt::Debug for Bytes {
154 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
155 f.debug_struct("Bytes")
156 .field("ptr", &ToGlibPtr::<*const _>::to_glib_none(self).0)
157 .field("data", &&self[..])
158 .finish()
159 }
160}
161
162impl AsRef<[u8]> for Bytes {
163 #[inline]
164 fn as_ref(&self) -> &[u8] {
165 self
166 }
167}
168
169impl Deref for Bytes {
170 type Target = [u8];
171
172 #[inline]
173 fn deref(&self) -> &[u8] {
174 unsafe {
175 let mut len = 0;
176 let ptr = ffi::g_bytes_get_data(self.to_glib_none().0, &mut len);
177 if ptr.is_null() || len == 0 {
178 &[]
179 } else {
180 slice::from_raw_parts(ptr as *const u8, len)
181 }
182 }
183 }
184}
185
186impl PartialEq for Bytes {
187 #[doc(alias = "g_bytes_equal")]
188 #[inline]
189 fn eq(&self, other: &Self) -> bool {
190 unsafe {
191 from_glib(ffi::g_bytes_equal(
192 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
193 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
194 ))
195 }
196 }
197}
198
199impl Eq for Bytes {}
200
201impl PartialOrd for Bytes {
202 #[inline]
203 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
204 Some(self.cmp(other))
205 }
206}
207
208impl Ord for Bytes {
209 #[inline]
210 fn cmp(&self, other: &Self) -> Ordering {
211 unsafe {
212 let ret = ffi::g_bytes_compare(
213 ToGlibPtr::<*const _>::to_glib_none(self).0 as *const _,
214 ToGlibPtr::<*const _>::to_glib_none(other).0 as *const _,
215 );
216 ret.cmp(&0)
217 }
218 }
219}
220
221macro_rules! impl_cmp {
222 ($lhs:ty, $rhs: ty) => {
223 #[allow(clippy::redundant_slicing)]
224 #[allow(clippy::extra_unused_lifetimes)]
225 impl<'a, 'b> PartialEq<$rhs> for $lhs {
226 #[inline]
227 fn eq(&self, other: &$rhs) -> bool {
228 self[..].eq(&other[..])
229 }
230 }
231
232 #[allow(clippy::redundant_slicing)]
233 #[allow(clippy::extra_unused_lifetimes)]
234 impl<'a, 'b> PartialEq<$lhs> for $rhs {
235 #[inline]
236 fn eq(&self, other: &$lhs) -> bool {
237 self[..].eq(&other[..])
238 }
239 }
240
241 #[allow(clippy::redundant_slicing)]
242 #[allow(clippy::extra_unused_lifetimes)]
243 impl<'a, 'b> PartialOrd<$rhs> for $lhs {
244 #[inline]
245 fn partial_cmp(&self, other: &$rhs) -> Option<Ordering> {
246 self[..].partial_cmp(&other[..])
247 }
248 }
249
250 #[allow(clippy::redundant_slicing)]
251 #[allow(clippy::extra_unused_lifetimes)]
252 impl<'a, 'b> PartialOrd<$lhs> for $rhs {
253 #[inline]
254 fn partial_cmp(&self, other: &$lhs) -> Option<Ordering> {
255 self[..].partial_cmp(&other[..])
256 }
257 }
258 };
259}
260
261impl_cmp!(Bytes, [u8]);
262impl_cmp!(Bytes, &'a [u8]);
263impl_cmp!(&'a Bytes, [u8]);
264impl_cmp!(Bytes, Vec<u8>);
265impl_cmp!(&'a Bytes, Vec<u8>);
266
267impl Hash for Bytes {
268 #[inline]
269 fn hash<H: Hasher>(&self, state: &mut H) {
270 self.len().hash(state);
271 Hash::hash_slice(self, state)
272 }
273}
274
275#[cfg(test)]
276mod tests {
277 use std::collections::HashSet;
278
279 use super::*;
280
281 #[test]
282 fn eq() {
283 let abc: &[u8] = b"abc";
284 let def: &[u8] = b"def";
285 let a1 = Bytes::from(abc);
286 let a2 = Bytes::from(abc);
287 let d = Bytes::from(def);
288 assert_eq!(a1, a2);
289 assert_eq!(def, d);
290 assert_ne!(a1, d);
291 assert_ne!(a1, def);
292 }
293
294 #[test]
295 fn ord() {
296 let abc: &[u8] = b"abc";
297 let def: &[u8] = b"def";
298 let a = Bytes::from(abc);
299 let d = Bytes::from(def);
300 assert!(a < d);
301 assert!(a < def);
302 assert!(abc < d);
303 assert!(d > a);
304 assert!(d > abc);
305 assert!(def > a);
306 }
307
308 #[test]
309 fn hash() {
310 let b1 = Bytes::from(b"this is a test");
311 let b2 = Bytes::from(b"this is a test");
312 let b3 = Bytes::from(b"test");
313 let mut set = HashSet::new();
314 set.insert(b1);
315 assert!(set.contains(&b2));
316 assert!(!set.contains(&b3));
317 }
318
319 #[test]
320 fn from_static() {
321 let b1 = Bytes::from_static(b"this is a test");
322 let b2 = Bytes::from(b"this is a test");
323 assert_eq!(b1, b2);
324 }
325
326 #[test]
327 fn from_owned() {
328 let b = Bytes::from_owned(vec![1, 2, 3]);
329 assert_eq!(b, [1u8, 2u8, 3u8].as_ref());
330 }
331
332 #[test]
333 fn from_bytes() {
334 let b1 = Bytes::from_owned(vec![1, 2, 3]);
335 let b2 = Bytes::from_bytes(&b1, 1..=1);
336 assert_eq!(b2, [2u8].as_ref());
337 let b2 = Bytes::from_bytes(&b1, 1..);
338 assert_eq!(b2, [2u8, 3u8].as_ref());
339 let b2 = Bytes::from_bytes(&b1, ..2);
340 assert_eq!(b2, [1u8, 2u8].as_ref());
341 let b2 = Bytes::from_bytes(&b1, ..);
342 assert_eq!(b2, [1u8, 2u8, 3u8].as_ref());
343 let b2 = Bytes::from_bytes(&b1, 3..);
344 assert_eq!(b2, [].as_ref());
345 }
346
347 #[test]
348 pub fn into_data() {
349 let b = Bytes::from(b"this is a test");
350 let d = b.into_data();
351 assert_eq!(d.as_slice(), b"this is a test");
352 }
353}