diff options
-rw-r--r-- | src/bytes.rs | 89 | ||||
-rw-r--r-- | src/bytes_mut.rs | 23 | ||||
-rw-r--r-- | tests/test_bytes.rs | 70 | ||||
-rw-r--r-- | tests/test_bytes_odd_alloc.rs | 29 | ||||
-rw-r--r-- | tests/test_bytes_vec_alloc.rs | 29 |
5 files changed, 240 insertions, 0 deletions
diff --git a/src/bytes.rs b/src/bytes.rs index 576e905..948b10e 100644 --- a/src/bytes.rs +++ b/src/bytes.rs @@ -109,6 +109,10 @@ pub(crate) struct Vtable { /// fn(data, ptr, len) pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes, /// fn(data, ptr, len) + /// + /// takes `Bytes` to value + pub to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec<u8>, + /// fn(data, ptr, len) pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize), } @@ -845,6 +849,13 @@ impl From<String> for Bytes { } } +impl From<Bytes> for Vec<u8> { + fn from(bytes: Bytes) -> Vec<u8> { + let bytes = mem::ManuallyDrop::new(bytes); + unsafe { (bytes.vtable.to_vec)(&bytes.data, bytes.ptr, bytes.len) } + } +} + // ===== impl Vtable ===== impl fmt::Debug for Vtable { @@ -860,6 +871,7 @@ impl fmt::Debug for Vtable { const STATIC_VTABLE: Vtable = Vtable { clone: static_clone, + to_vec: static_to_vec, drop: static_drop, }; @@ -868,6 +880,11 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { Bytes::from_static(slice) } +unsafe fn static_to_vec(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> { + let slice = slice::from_raw_parts(ptr, len); + slice.to_vec() +} + unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { // nothing to drop for &'static [u8] } @@ -876,11 +893,13 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable { clone: promotable_even_clone, + to_vec: promotable_even_to_vec, drop: promotable_even_drop, }; static PROMOTABLE_ODD_VTABLE: Vtable = Vtable { clone: promotable_odd_clone, + to_vec: promotable_odd_to_vec, drop: promotable_odd_drop, }; @@ -897,6 +916,38 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize } } +unsafe fn promotable_to_vec( + data: &AtomicPtr<()>, + ptr: *const u8, + len: usize, + f: fn(*mut ()) -> *mut u8, +) -> Vec<u8> { + let shared = data.load(Ordering::Acquire); + let kind = shared as usize & KIND_MASK; + + if kind == KIND_ARC { + shared_to_vec_impl(shared.cast(), ptr, len) + } else { + // If Bytes holds a Vec, then the offset must be 0. + debug_assert_eq!(kind, KIND_VEC); + + let buf = f(shared); + + let cap = (ptr as usize - buf as usize) + len; + + // Copy back buffer + ptr::copy(ptr, buf, len); + + Vec::from_raw_parts(buf, len, cap) + } +} + +unsafe fn promotable_even_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> { + promotable_to_vec(data, ptr, len, |shared| { + ptr_map(shared.cast(), |addr| addr & !KIND_MASK) + }) +} + unsafe fn promotable_even_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { data.with_mut(|shared| { let shared = *shared; @@ -924,6 +975,10 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) } } +unsafe fn promotable_odd_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> { + promotable_to_vec(data, ptr, len, |shared| shared.cast()) +} + unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { data.with_mut(|shared| { let shared = *shared; @@ -967,6 +1022,7 @@ const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; // Assert that the alignm static SHARED_VTABLE: Vtable = Vtable { clone: shared_clone, + to_vec: shared_to_vec, drop: shared_drop, }; @@ -979,6 +1035,39 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte shallow_clone_arc(shared as _, ptr, len) } +unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec<u8> { + // Check that the ref_cnt is 1 (unique). + // + // If it is unique, then it is set to 0 with AcqRel fence for the same + // reason in release_shared. + // + // Otherwise, we take the other branch and call release_shared. + if (*shared) + .ref_cnt + .compare_exchange(1, 0, Ordering::AcqRel, Ordering::Relaxed) + .is_ok() + { + let buf = (*shared).buf; + let cap = (*shared).cap; + + // Deallocate Shared + drop(Box::from_raw(shared as *mut mem::ManuallyDrop<Shared>)); + + // Copy back buffer + ptr::copy(ptr, buf, len); + + Vec::from_raw_parts(buf, len, cap) + } else { + let v = slice::from_raw_parts(ptr, len).to_vec(); + release_shared(shared); + v + } +} + +unsafe fn shared_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> { + shared_to_vec_impl(data.load(Ordering::Relaxed).cast(), ptr, len) +} + unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) { data.with_mut(|shared| { release_shared(shared.cast()); diff --git a/src/bytes_mut.rs b/src/bytes_mut.rs index 25f0ecc..3026f09 100644 --- a/src/bytes_mut.rs +++ b/src/bytes_mut.rs @@ -1611,6 +1611,7 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize) static SHARED_VTABLE: Vtable = Vtable { clone: shared_v_clone, + to_vec: shared_v_to_vec, drop: shared_v_drop, }; @@ -1622,6 +1623,28 @@ unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> By Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE) } +unsafe fn shared_v_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> { + let shared: *mut Shared = data.load(Ordering::Relaxed).cast(); + + if (*shared).is_unique() { + let shared = &mut *shared; + + // Drop shared + let mut vec = mem::replace(&mut shared.vec, Vec::new()); + release_shared(shared); + + // Copy back buffer + ptr::copy(ptr, vec.as_mut_ptr(), len); + vec.set_len(len); + + vec + } else { + let v = slice::from_raw_parts(ptr, len).to_vec(); + release_shared(shared); + v + } +} + unsafe fn shared_v_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) { data.with_mut(|shared| { release_shared(*shared as *mut Shared); diff --git a/tests/test_bytes.rs b/tests/test_bytes.rs index ef7512b..6bf01d6 100644 --- a/tests/test_bytes.rs +++ b/tests/test_bytes.rs @@ -1065,3 +1065,73 @@ fn bytes_into_vec() { let vec: Vec<u8> = bytes.into(); assert_eq!(&vec, prefix); } + +#[test] +fn test_bytes_into_vec() { + // Test STATIC_VTABLE.to_vec + let bs = b"1b23exfcz3r"; + let vec: Vec<u8> = Bytes::from_static(bs).into(); + assert_eq!(&*vec, bs); + + // Test bytes_mut.SHARED_VTABLE.to_vec impl + eprintln!("1"); + let mut bytes_mut: BytesMut = bs[..].into(); + + // Set kind to KIND_ARC so that after freeze, Bytes will use bytes_mut.SHARED_VTABLE + eprintln!("2"); + drop(bytes_mut.split_off(bs.len())); + + eprintln!("3"); + let b1 = bytes_mut.freeze(); + eprintln!("4"); + let b2 = b1.clone(); + + eprintln!("{:#?}", (&*b1).as_ptr()); + + // shared.is_unique() = False + eprintln!("5"); + assert_eq!(&*Vec::from(b2), bs); + + // shared.is_unique() = True + eprintln!("6"); + assert_eq!(&*Vec::from(b1), bs); + + // Test bytes_mut.SHARED_VTABLE.to_vec impl where offset != 0 + let mut bytes_mut1: BytesMut = bs[..].into(); + let bytes_mut2 = bytes_mut1.split_off(9); + + let b1 = bytes_mut1.freeze(); + let b2 = bytes_mut2.freeze(); + + assert_eq!(Vec::from(b2), bs[9..]); + assert_eq!(Vec::from(b1), bs[..9]); +} + +#[test] +fn test_bytes_into_vec_promotable_even() { + let vec = vec![33u8; 1024]; + + // Test cases where kind == KIND_VEC + let b1 = Bytes::from(vec.clone()); + assert_eq!(Vec::from(b1), vec); + + // Test cases where kind == KIND_ARC, ref_cnt == 1 + let b1 = Bytes::from(vec.clone()); + drop(b1.clone()); + assert_eq!(Vec::from(b1), vec); + + // Test cases where kind == KIND_ARC, ref_cnt == 2 + let b1 = Bytes::from(vec.clone()); + let b2 = b1.clone(); + assert_eq!(Vec::from(b1), vec); + + // Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1 + assert_eq!(Vec::from(b2), vec); + + // Test cases where offset != 0 + let mut b1 = Bytes::from(vec.clone()); + let b2 = b1.split_off(20); + + assert_eq!(Vec::from(b2), vec[20..]); + assert_eq!(Vec::from(b1), vec[..20]); +} diff --git a/tests/test_bytes_odd_alloc.rs b/tests/test_bytes_odd_alloc.rs index 1c243cb..27ed877 100644 --- a/tests/test_bytes_odd_alloc.rs +++ b/tests/test_bytes_odd_alloc.rs @@ -66,3 +66,32 @@ fn test_bytes_clone_drop() { let b1 = Bytes::from(vec); let _b2 = b1.clone(); } + +#[test] +fn test_bytes_into_vec() { + let vec = vec![33u8; 1024]; + + // Test cases where kind == KIND_VEC + let b1 = Bytes::from(vec.clone()); + assert_eq!(Vec::from(b1), vec); + + // Test cases where kind == KIND_ARC, ref_cnt == 1 + let b1 = Bytes::from(vec.clone()); + drop(b1.clone()); + assert_eq!(Vec::from(b1), vec); + + // Test cases where kind == KIND_ARC, ref_cnt == 2 + let b1 = Bytes::from(vec.clone()); + let b2 = b1.clone(); + assert_eq!(Vec::from(b1), vec); + + // Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1 + assert_eq!(Vec::from(b2), vec); + + // Test cases where offset != 0 + let mut b1 = Bytes::from(vec.clone()); + let b2 = b1.split_off(20); + + assert_eq!(Vec::from(b2), vec[20..]); + assert_eq!(Vec::from(b1), vec[..20]); +} diff --git a/tests/test_bytes_vec_alloc.rs b/tests/test_bytes_vec_alloc.rs index 752009f..107e56e 100644 --- a/tests/test_bytes_vec_alloc.rs +++ b/tests/test_bytes_vec_alloc.rs @@ -112,3 +112,32 @@ fn invalid_ptr<T>(addr: usize) -> *mut T { debug_assert_eq!(ptr as usize, addr); ptr.cast::<T>() } + +#[test] +fn test_bytes_into_vec() { + let vec = vec![33u8; 1024]; + + // Test cases where kind == KIND_VEC + let b1 = Bytes::from(vec.clone()); + assert_eq!(Vec::from(b1), vec); + + // Test cases where kind == KIND_ARC, ref_cnt == 1 + let b1 = Bytes::from(vec.clone()); + drop(b1.clone()); + assert_eq!(Vec::from(b1), vec); + + // Test cases where kind == KIND_ARC, ref_cnt == 2 + let b1 = Bytes::from(vec.clone()); + let b2 = b1.clone(); + assert_eq!(Vec::from(b1), vec); + + // Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1 + assert_eq!(Vec::from(b2), vec); + + // Test cases where offset != 0 + let mut b1 = Bytes::from(vec.clone()); + let b2 = b1.split_off(20); + + assert_eq!(Vec::from(b2), vec[20..]); + assert_eq!(Vec::from(b1), vec[..20]); +} |