aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar nanpuyue <nanpuyue@gmail.com> 2019-07-31 08:05:51 +0800
committerGravatar Douman <douman@gmx.se> 2019-08-16 15:49:37 +0200
commit79e4b2847f27137faaf149d116a352cbeae47fd1 (patch)
tree99def22b279c6cf3c9726506559e002d3fee7050 /src
parentb1c17169d94d216d5532dd630a50a4c8c7fdeea6 (diff)
downloadbytes-79e4b2847f27137faaf149d116a352cbeae47fd1.tar.gz
bytes-79e4b2847f27137faaf149d116a352cbeae47fd1.tar.zst
bytes-79e4b2847f27137faaf149d116a352cbeae47fd1.zip
use `mem::MaybeUninit` instead of `mem::uninitialized`
Diffstat (limited to 'src')
-rw-r--r--src/bytes.rs6
1 files changed, 3 insertions, 3 deletions
diff --git a/src/bytes.rs b/src/bytes.rs
index 9db6362..3f8b23a 100644
--- a/src/bytes.rs
+++ b/src/bytes.rs
@@ -1666,7 +1666,7 @@ impl<'a> From<&'a [u8]> for BytesMut {
BytesMut::new()
} else if len <= INLINE_CAP {
unsafe {
- let mut inner: Inner = mem::uninitialized();
+ let mut inner: Inner = mem::MaybeUninit::uninit().assume_init();
// Set inline mask
inner.arc = AtomicPtr::new(KIND_INLINE as *mut Shared);
@@ -1858,7 +1858,7 @@ impl Inner {
if capacity <= INLINE_CAP {
unsafe {
// Using uninitialized memory is ~30% faster
- let mut inner: Inner = mem::uninitialized();
+ let mut inner: Inner = mem::MaybeUninit::uninit().assume_init();
inner.arc = AtomicPtr::new(KIND_INLINE as *mut Shared);
inner
}
@@ -2176,7 +2176,7 @@ impl Inner {
if self.is_inline_or_static() {
// In this case, a shallow_clone still involves copying the data.
- let mut inner: Inner = mem::uninitialized();
+ let mut inner: Inner = mem::MaybeUninit::uninit().assume_init();
ptr::copy_nonoverlapping(
self,
&mut inner,