aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar Jiahao XU <Jiahao_XU@outlook.com> 2022-07-13 17:04:23 +1000
committerGravatar GitHub <noreply@github.com> 2022-07-13 09:04:23 +0200
commitcd188cbd67c073128fdc339be8d5d67436d76f36 (patch)
tree448ab7b1505f323296e2a94c8be2e90a0784acd0 /src
parent10d1f6ec5c4e878aad9d13140d3479b3ea78ffcf (diff)
downloadbytes-cd188cbd67c073128fdc339be8d5d67436d76f36.tar.gz
bytes-cd188cbd67c073128fdc339be8d5d67436d76f36.tar.zst
bytes-cd188cbd67c073128fdc339be8d5d67436d76f36.zip
Add conversion from Bytes to Vec<u8> (#547)
Signed-off-by: Jiahao XU <Jiahao_XU@outlook.com> Co-authored-by: Alice Ryhl <aliceryhl@google.com>
Diffstat (limited to 'src')
-rw-r--r--src/bytes.rs89
-rw-r--r--src/bytes_mut.rs23
2 files changed, 112 insertions, 0 deletions
diff --git a/src/bytes.rs b/src/bytes.rs
index 576e905..948b10e 100644
--- a/src/bytes.rs
+++ b/src/bytes.rs
@@ -109,6 +109,10 @@ pub(crate) struct Vtable {
/// fn(data, ptr, len)
pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes,
/// fn(data, ptr, len)
+ ///
+ /// takes `Bytes` to value
+ pub to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec<u8>,
+ /// fn(data, ptr, len)
pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize),
}
@@ -845,6 +849,13 @@ impl From<String> for Bytes {
}
}
+impl From<Bytes> for Vec<u8> {
+ fn from(bytes: Bytes) -> Vec<u8> {
+ let bytes = mem::ManuallyDrop::new(bytes);
+ unsafe { (bytes.vtable.to_vec)(&bytes.data, bytes.ptr, bytes.len) }
+ }
+}
+
// ===== impl Vtable =====
impl fmt::Debug for Vtable {
@@ -860,6 +871,7 @@ impl fmt::Debug for Vtable {
const STATIC_VTABLE: Vtable = Vtable {
clone: static_clone,
+ to_vec: static_to_vec,
drop: static_drop,
};
@@ -868,6 +880,11 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
Bytes::from_static(slice)
}
+unsafe fn static_to_vec(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
+ let slice = slice::from_raw_parts(ptr, len);
+ slice.to_vec()
+}
+
unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
// nothing to drop for &'static [u8]
}
@@ -876,11 +893,13 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable {
clone: promotable_even_clone,
+ to_vec: promotable_even_to_vec,
drop: promotable_even_drop,
};
static PROMOTABLE_ODD_VTABLE: Vtable = Vtable {
clone: promotable_odd_clone,
+ to_vec: promotable_odd_to_vec,
drop: promotable_odd_drop,
};
@@ -897,6 +916,38 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize
}
}
+unsafe fn promotable_to_vec(
+ data: &AtomicPtr<()>,
+ ptr: *const u8,
+ len: usize,
+ f: fn(*mut ()) -> *mut u8,
+) -> Vec<u8> {
+ let shared = data.load(Ordering::Acquire);
+ let kind = shared as usize & KIND_MASK;
+
+ if kind == KIND_ARC {
+ shared_to_vec_impl(shared.cast(), ptr, len)
+ } else {
+ // If Bytes holds a Vec, then the offset must be 0.
+ debug_assert_eq!(kind, KIND_VEC);
+
+ let buf = f(shared);
+
+ let cap = (ptr as usize - buf as usize) + len;
+
+ // Copy back buffer
+ ptr::copy(ptr, buf, len);
+
+ Vec::from_raw_parts(buf, len, cap)
+ }
+}
+
+unsafe fn promotable_even_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
+ promotable_to_vec(data, ptr, len, |shared| {
+ ptr_map(shared.cast(), |addr| addr & !KIND_MASK)
+ })
+}
+
unsafe fn promotable_even_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
data.with_mut(|shared| {
let shared = *shared;
@@ -924,6 +975,10 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize)
}
}
+unsafe fn promotable_odd_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
+ promotable_to_vec(data, ptr, len, |shared| shared.cast())
+}
+
unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
data.with_mut(|shared| {
let shared = *shared;
@@ -967,6 +1022,7 @@ const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; // Assert that the alignm
static SHARED_VTABLE: Vtable = Vtable {
clone: shared_clone,
+ to_vec: shared_to_vec,
drop: shared_drop,
};
@@ -979,6 +1035,39 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte
shallow_clone_arc(shared as _, ptr, len)
}
+unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec<u8> {
+ // Check that the ref_cnt is 1 (unique).
+ //
+ // If it is unique, then it is set to 0 with AcqRel fence for the same
+ // reason in release_shared.
+ //
+ // Otherwise, we take the other branch and call release_shared.
+ if (*shared)
+ .ref_cnt
+ .compare_exchange(1, 0, Ordering::AcqRel, Ordering::Relaxed)
+ .is_ok()
+ {
+ let buf = (*shared).buf;
+ let cap = (*shared).cap;
+
+ // Deallocate Shared
+ drop(Box::from_raw(shared as *mut mem::ManuallyDrop<Shared>));
+
+ // Copy back buffer
+ ptr::copy(ptr, buf, len);
+
+ Vec::from_raw_parts(buf, len, cap)
+ } else {
+ let v = slice::from_raw_parts(ptr, len).to_vec();
+ release_shared(shared);
+ v
+ }
+}
+
+unsafe fn shared_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
+ shared_to_vec_impl(data.load(Ordering::Relaxed).cast(), ptr, len)
+}
+
unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
data.with_mut(|shared| {
release_shared(shared.cast());
diff --git a/src/bytes_mut.rs b/src/bytes_mut.rs
index 25f0ecc..3026f09 100644
--- a/src/bytes_mut.rs
+++ b/src/bytes_mut.rs
@@ -1611,6 +1611,7 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize)
static SHARED_VTABLE: Vtable = Vtable {
clone: shared_v_clone,
+ to_vec: shared_v_to_vec,
drop: shared_v_drop,
};
@@ -1622,6 +1623,28 @@ unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> By
Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE)
}
+unsafe fn shared_v_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
+ let shared: *mut Shared = data.load(Ordering::Relaxed).cast();
+
+ if (*shared).is_unique() {
+ let shared = &mut *shared;
+
+ // Drop shared
+ let mut vec = mem::replace(&mut shared.vec, Vec::new());
+ release_shared(shared);
+
+ // Copy back buffer
+ ptr::copy(ptr, vec.as_mut_ptr(), len);
+ vec.set_len(len);
+
+ vec
+ } else {
+ let v = slice::from_raw_parts(ptr, len).to_vec();
+ release_shared(shared);
+ v
+ }
+}
+
unsafe fn shared_v_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
data.with_mut(|shared| {
release_shared(*shared as *mut Shared);