|  | 
| 72 | 72 | use crate::cmp::Ordering; | 
| 73 | 73 | use crate::fmt; | 
| 74 | 74 | use crate::hash; | 
| 75 |  | -use crate::intrinsics; | 
|  | 75 | +use crate::intrinsics::{self, is_aligned_and_not_null, is_nonoverlapping}; | 
| 76 | 76 | use crate::mem::{self, MaybeUninit}; | 
| 77 | 77 | 
 | 
| 78 | 78 | #[stable(feature = "rust1", since = "1.0.0")] | 
| @@ -389,6 +389,10 @@ pub unsafe fn swap<T>(x: *mut T, y: *mut T) { | 
| 389 | 389 | #[inline] | 
| 390 | 390 | #[stable(feature = "swap_nonoverlapping", since = "1.27.0")] | 
| 391 | 391 | pub unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) { | 
|  | 392 | +    debug_assert!(is_aligned_and_not_null(x), "attempt to swap unaligned or null pointer"); | 
|  | 393 | +    debug_assert!(is_aligned_and_not_null(y), "attempt to swap unaligned or null pointer"); | 
|  | 394 | +    debug_assert!(is_nonoverlapping(x, y, count), "attempt to swap overlapping memory"); | 
|  | 395 | + | 
| 392 | 396 |     let x = x as *mut u8; | 
| 393 | 397 |     let y = y as *mut u8; | 
| 394 | 398 |     let len = mem::size_of::<T>() * count; | 
| @@ -612,6 +616,7 @@ pub unsafe fn replace<T>(dst: *mut T, mut src: T) -> T { | 
| 612 | 616 | #[inline] | 
| 613 | 617 | #[stable(feature = "rust1", since = "1.0.0")] | 
| 614 | 618 | pub unsafe fn read<T>(src: *const T) -> T { | 
|  | 619 | +    // `copy_nonoverlapping` takes care of debug_assert. | 
| 615 | 620 |     let mut tmp = MaybeUninit::<T>::uninit(); | 
| 616 | 621 |     copy_nonoverlapping(src, tmp.as_mut_ptr(), 1); | 
| 617 | 622 |     tmp.assume_init() | 
| @@ -703,6 +708,7 @@ pub unsafe fn read<T>(src: *const T) -> T { | 
| 703 | 708 | #[inline] | 
| 704 | 709 | #[stable(feature = "ptr_unaligned", since = "1.17.0")] | 
| 705 | 710 | pub unsafe fn read_unaligned<T>(src: *const T) -> T { | 
|  | 711 | +    // `copy_nonoverlapping` takes care of debug_assert. | 
| 706 | 712 |     let mut tmp = MaybeUninit::<T>::uninit(); | 
| 707 | 713 |     copy_nonoverlapping(src as *const u8, tmp.as_mut_ptr() as *mut u8, mem::size_of::<T>()); | 
| 708 | 714 |     tmp.assume_init() | 
| @@ -795,6 +801,7 @@ pub unsafe fn read_unaligned<T>(src: *const T) -> T { | 
| 795 | 801 | #[inline] | 
| 796 | 802 | #[stable(feature = "rust1", since = "1.0.0")] | 
| 797 | 803 | pub unsafe fn write<T>(dst: *mut T, src: T) { | 
|  | 804 | +    debug_assert!(is_aligned_and_not_null(dst), "attempt to write to unaligned or null pointer"); | 
| 798 | 805 |     intrinsics::move_val_init(&mut *dst, src) | 
| 799 | 806 | } | 
| 800 | 807 | 
 | 
| @@ -887,6 +894,7 @@ pub unsafe fn write<T>(dst: *mut T, src: T) { | 
| 887 | 894 | #[inline] | 
| 888 | 895 | #[stable(feature = "ptr_unaligned", since = "1.17.0")] | 
| 889 | 896 | pub unsafe fn write_unaligned<T>(dst: *mut T, src: T) { | 
|  | 897 | +    // `copy_nonoverlapping` takes care of debug_assert. | 
| 890 | 898 |     copy_nonoverlapping(&src as *const T as *const u8, dst as *mut u8, mem::size_of::<T>()); | 
| 891 | 899 |     mem::forget(src); | 
| 892 | 900 | } | 
| @@ -956,6 +964,7 @@ pub unsafe fn write_unaligned<T>(dst: *mut T, src: T) { | 
| 956 | 964 | #[inline] | 
| 957 | 965 | #[stable(feature = "volatile", since = "1.9.0")] | 
| 958 | 966 | pub unsafe fn read_volatile<T>(src: *const T) -> T { | 
|  | 967 | +    debug_assert!(is_aligned_and_not_null(src), "attempt to read from unaligned or null pointer"); | 
| 959 | 968 |     intrinsics::volatile_load(src) | 
| 960 | 969 | } | 
| 961 | 970 | 
 | 
| @@ -1024,6 +1033,7 @@ pub unsafe fn read_volatile<T>(src: *const T) -> T { | 
| 1024 | 1033 | #[inline] | 
| 1025 | 1034 | #[stable(feature = "volatile", since = "1.9.0")] | 
| 1026 | 1035 | pub unsafe fn write_volatile<T>(dst: *mut T, src: T) { | 
|  | 1036 | +    debug_assert!(is_aligned_and_not_null(dst), "attempt to write to unaligned or null pointer"); | 
| 1027 | 1037 |     intrinsics::volatile_store(dst, src); | 
| 1028 | 1038 | } | 
| 1029 | 1039 | 
 | 
|  | 
0 commit comments