core/ptr/const_ptr.rs
1use super::*;
2use crate::cmp::Ordering::{Equal, Greater, Less};
3use crate::intrinsics::const_eval_select;
4use crate::mem::{self, SizedTypeProperties};
5use crate::slice::{self, SliceIndex};
6
7impl<T: ?Sized> *const T {
8 /// Returns `true` if the pointer is null.
9 ///
10 /// Note that unsized types have many possible null pointers, as only the
11 /// raw data pointer is considered, not their length, vtable, etc.
12 /// Therefore, two pointers that are null may still not compare equal to
13 /// each other.
14 ///
15 /// # Panics during const evaluation
16 ///
17 /// If this method is used during const evaluation, and `self` is a pointer
18 /// that is offset beyond the bounds of the memory it initially pointed to,
19 /// then there might not be enough information to determine whether the
20 /// pointer is null. This is because the absolute address in memory is not
21 /// known at compile time. If the nullness of the pointer cannot be
22 /// determined, this method will panic.
23 ///
24 /// In-bounds pointers are never null, so the method will never panic for
25 /// such pointers.
26 ///
27 /// # Examples
28 ///
29 /// ```
30 /// let s: &str = "Follow the rabbit";
31 /// let ptr: *const u8 = s.as_ptr();
32 /// assert!(!ptr.is_null());
33 /// ```
34 #[stable(feature = "rust1", since = "1.0.0")]
35 #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
36 #[rustc_diagnostic_item = "ptr_const_is_null"]
37 #[inline]
38 #[rustc_allow_const_fn_unstable(const_eval_select)]
39 pub const fn is_null(self) -> bool {
40 // Compare via a cast to a thin pointer, so fat pointers are only
41 // considering their "data" part for null-ness.
42 let ptr = self as *const u8;
43 const_eval_select!(
44 @capture { ptr: *const u8 } -> bool:
45 // This use of `const_raw_ptr_comparison` has been explicitly blessed by t-lang.
46 if const #[rustc_allow_const_fn_unstable(const_raw_ptr_comparison)] {
47 match (ptr).guaranteed_eq(null_mut()) {
48 Some(res) => res,
49 // To remain maximally convervative, we stop execution when we don't
50 // know whether the pointer is null or not.
51 // We can *not* return `false` here, that would be unsound in `NonNull::new`!
52 None => panic!("null-ness of this pointer cannot be determined in const context"),
53 }
54 } else {
55 ptr.addr() == 0
56 }
57 )
58 }
59
60 /// Casts to a pointer of another type.
61 #[stable(feature = "ptr_cast", since = "1.38.0")]
62 #[rustc_const_stable(feature = "const_ptr_cast", since = "1.38.0")]
63 #[rustc_diagnostic_item = "const_ptr_cast"]
64 #[inline(always)]
65 pub const fn cast<U>(self) -> *const U {
66 self as _
67 }
68
69 /// Try to cast to a pointer of another type by checking aligment.
70 ///
71 /// If the pointer is properly aligned to the target type, it will be
72 /// cast to the target type. Otherwise, `None` is returned.
73 ///
74 /// # Examples
75 ///
76 /// ```rust
77 /// #![feature(pointer_try_cast_aligned)]
78 ///
79 /// let aligned: *const u8 = 0x1000 as _;
80 ///
81 /// // i32 has at most 4-byte alignment, so this will succeed
82 /// assert!(aligned.try_cast_aligned::<i32>().is_some());
83 ///
84 /// let unaligned: *const u8 = 0x1001 as _;
85 ///
86 /// // i32 has at least 2-byte alignment, so this will fail
87 /// assert!(unaligned.try_cast_aligned::<i32>().is_none());
88 /// ```
89 #[unstable(feature = "pointer_try_cast_aligned", issue = "141221")]
90 #[must_use = "this returns the result of the operation, \
91 without modifying the original"]
92 #[inline]
93 pub fn try_cast_aligned<U>(self) -> Option<*const U> {
94 if self.is_aligned_to(align_of::<U>()) { Some(self.cast()) } else { None }
95 }
96
97 /// Uses the address value in a new pointer of another type.
98 ///
99 /// This operation will ignore the address part of its `meta` operand and discard existing
100 /// metadata of `self`. For pointers to a sized types (thin pointers), this has the same effect
101 /// as a simple cast. For pointers to an unsized type (fat pointers) this recombines the address
102 /// with new metadata such as slice lengths or `dyn`-vtable.
103 ///
104 /// The resulting pointer will have provenance of `self`. This operation is semantically the
105 /// same as creating a new pointer with the data pointer value of `self` but the metadata of
106 /// `meta`, being fat or thin depending on the `meta` operand.
107 ///
108 /// # Examples
109 ///
110 /// This function is primarily useful for enabling pointer arithmetic on potentially fat
111 /// pointers. The pointer is cast to a sized pointee to utilize offset operations and then
112 /// recombined with its own original metadata.
113 ///
114 /// ```
115 /// #![feature(set_ptr_value)]
116 /// # use core::fmt::Debug;
117 /// let arr: [i32; 3] = [1, 2, 3];
118 /// let mut ptr = arr.as_ptr() as *const dyn Debug;
119 /// let thin = ptr as *const u8;
120 /// unsafe {
121 /// ptr = thin.add(8).with_metadata_of(ptr);
122 /// # assert_eq!(*(ptr as *const i32), 3);
123 /// println!("{:?}", &*ptr); // will print "3"
124 /// }
125 /// ```
126 ///
127 /// # *Incorrect* usage
128 ///
129 /// The provenance from pointers is *not* combined. The result must only be used to refer to the
130 /// address allowed by `self`.
131 ///
132 /// ```rust,no_run
133 /// #![feature(set_ptr_value)]
134 /// let x = 0u32;
135 /// let y = 1u32;
136 ///
137 /// let x = (&x) as *const u32;
138 /// let y = (&y) as *const u32;
139 ///
140 /// let offset = (x as usize - y as usize) / 4;
141 /// let bad = x.wrapping_add(offset).with_metadata_of(y);
142 ///
143 /// // This dereference is UB. The pointer only has provenance for `x` but points to `y`.
144 /// println!("{:?}", unsafe { &*bad });
145 /// ```
146 #[unstable(feature = "set_ptr_value", issue = "75091")]
147 #[must_use = "returns a new pointer rather than modifying its argument"]
148 #[inline]
149 pub const fn with_metadata_of<U>(self, meta: *const U) -> *const U
150 where
151 U: ?Sized,
152 {
153 from_raw_parts::<U>(self as *const (), metadata(meta))
154 }
155
156 /// Changes constness without changing the type.
157 ///
158 /// This is a bit safer than `as` because it wouldn't silently change the type if the code is
159 /// refactored.
160 #[stable(feature = "ptr_const_cast", since = "1.65.0")]
161 #[rustc_const_stable(feature = "ptr_const_cast", since = "1.65.0")]
162 #[rustc_diagnostic_item = "ptr_cast_mut"]
163 #[inline(always)]
164 pub const fn cast_mut(self) -> *mut T {
165 self as _
166 }
167
168 /// Gets the "address" portion of the pointer.
169 ///
170 /// This is similar to `self as usize`, except that the [provenance][crate::ptr#provenance] of
171 /// the pointer is discarded and not [exposed][crate::ptr#exposed-provenance]. This means that
172 /// casting the returned address back to a pointer yields a [pointer without
173 /// provenance][without_provenance], which is undefined behavior to dereference. To properly
174 /// restore the lost information and obtain a dereferenceable pointer, use
175 /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
176 ///
177 /// If using those APIs is not possible because there is no way to preserve a pointer with the
178 /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
179 /// or [`expose_provenance`][pointer::expose_provenance] and [`with_exposed_provenance`][with_exposed_provenance]
180 /// instead. However, note that this makes your code less portable and less amenable to tools
181 /// that check for compliance with the Rust memory model.
182 ///
183 /// On most platforms this will produce a value with the same bytes as the original
184 /// pointer, because all the bytes are dedicated to describing the address.
185 /// Platforms which need to store additional information in the pointer may
186 /// perform a change of representation to produce a value containing only the address
187 /// portion of the pointer. What that means is up to the platform to define.
188 ///
189 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
190 #[must_use]
191 #[inline(always)]
192 #[stable(feature = "strict_provenance", since = "1.84.0")]
193 pub fn addr(self) -> usize {
194 // A pointer-to-integer transmute currently has exactly the right semantics: it returns the
195 // address without exposing the provenance. Note that this is *not* a stable guarantee about
196 // transmute semantics, it relies on sysroot crates having special status.
197 // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
198 // provenance).
199 unsafe { mem::transmute(self.cast::<()>()) }
200 }
201
202 /// Exposes the ["provenance"][crate::ptr#provenance] part of the pointer for future use in
203 /// [`with_exposed_provenance`] and returns the "address" portion.
204 ///
205 /// This is equivalent to `self as usize`, which semantically discards provenance information.
206 /// Furthermore, this (like the `as` cast) has the implicit side-effect of marking the
207 /// provenance as 'exposed', so on platforms that support it you can later call
208 /// [`with_exposed_provenance`] to reconstitute the original pointer including its provenance.
209 ///
210 /// Due to its inherent ambiguity, [`with_exposed_provenance`] may not be supported by tools
211 /// that help you to stay conformant with the Rust memory model. It is recommended to use
212 /// [Strict Provenance][crate::ptr#strict-provenance] APIs such as [`with_addr`][pointer::with_addr]
213 /// wherever possible, in which case [`addr`][pointer::addr] should be used instead of `expose_provenance`.
214 ///
215 /// On most platforms this will produce a value with the same bytes as the original pointer,
216 /// because all the bytes are dedicated to describing the address. Platforms which need to store
217 /// additional information in the pointer may not support this operation, since the 'expose'
218 /// side-effect which is required for [`with_exposed_provenance`] to work is typically not
219 /// available.
220 ///
221 /// This is an [Exposed Provenance][crate::ptr#exposed-provenance] API.
222 ///
223 /// [`with_exposed_provenance`]: with_exposed_provenance
224 #[inline(always)]
225 #[stable(feature = "exposed_provenance", since = "1.84.0")]
226 pub fn expose_provenance(self) -> usize {
227 self.cast::<()>() as usize
228 }
229
230 /// Creates a new pointer with the given address and the [provenance][crate::ptr#provenance] of
231 /// `self`.
232 ///
233 /// This is similar to a `addr as *const T` cast, but copies
234 /// the *provenance* of `self` to the new pointer.
235 /// This avoids the inherent ambiguity of the unary cast.
236 ///
237 /// This is equivalent to using [`wrapping_offset`][pointer::wrapping_offset] to offset
238 /// `self` to the given address, and therefore has all the same capabilities and restrictions.
239 ///
240 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
241 #[must_use]
242 #[inline]
243 #[stable(feature = "strict_provenance", since = "1.84.0")]
244 pub fn with_addr(self, addr: usize) -> Self {
245 // This should probably be an intrinsic to avoid doing any sort of arithmetic, but
246 // meanwhile, we can implement it with `wrapping_offset`, which preserves the pointer's
247 // provenance.
248 let self_addr = self.addr() as isize;
249 let dest_addr = addr as isize;
250 let offset = dest_addr.wrapping_sub(self_addr);
251 self.wrapping_byte_offset(offset)
252 }
253
254 /// Creates a new pointer by mapping `self`'s address to a new one, preserving the
255 /// [provenance][crate::ptr#provenance] of `self`.
256 ///
257 /// This is a convenience for [`with_addr`][pointer::with_addr], see that method for details.
258 ///
259 /// This is a [Strict Provenance][crate::ptr#strict-provenance] API.
260 #[must_use]
261 #[inline]
262 #[stable(feature = "strict_provenance", since = "1.84.0")]
263 pub fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self {
264 self.with_addr(f(self.addr()))
265 }
266
267 /// Decompose a (possibly wide) pointer into its data pointer and metadata components.
268 ///
269 /// The pointer can be later reconstructed with [`from_raw_parts`].
270 #[unstable(feature = "ptr_metadata", issue = "81513")]
271 #[inline]
272 pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata) {
273 (self.cast(), metadata(self))
274 }
275
276 /// Returns `None` if the pointer is null, or else returns a shared reference to
277 /// the value wrapped in `Some`. If the value may be uninitialized, [`as_uninit_ref`]
278 /// must be used instead.
279 ///
280 /// [`as_uninit_ref`]: #method.as_uninit_ref
281 ///
282 /// # Safety
283 ///
284 /// When calling this method, you have to ensure that *either* the pointer is null *or*
285 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
286 ///
287 /// # Panics during const evaluation
288 ///
289 /// This method will panic during const evaluation if the pointer cannot be
290 /// determined to be null or not. See [`is_null`] for more information.
291 ///
292 /// [`is_null`]: #method.is_null
293 ///
294 /// # Examples
295 ///
296 /// ```
297 /// let ptr: *const u8 = &10u8 as *const u8;
298 ///
299 /// unsafe {
300 /// if let Some(val_back) = ptr.as_ref() {
301 /// assert_eq!(val_back, &10);
302 /// }
303 /// }
304 /// ```
305 ///
306 /// # Null-unchecked version
307 ///
308 /// If you are sure the pointer can never be null and are looking for some kind of
309 /// `as_ref_unchecked` that returns the `&T` instead of `Option<&T>`, know that you can
310 /// dereference the pointer directly.
311 ///
312 /// ```
313 /// let ptr: *const u8 = &10u8 as *const u8;
314 ///
315 /// unsafe {
316 /// let val_back = &*ptr;
317 /// assert_eq!(val_back, &10);
318 /// }
319 /// ```
320 #[stable(feature = "ptr_as_ref", since = "1.9.0")]
321 #[rustc_const_stable(feature = "const_ptr_is_null", since = "1.84.0")]
322 #[inline]
323 pub const unsafe fn as_ref<'a>(self) -> Option<&'a T> {
324 // SAFETY: the caller must guarantee that `self` is valid
325 // for a reference if it isn't null.
326 if self.is_null() { None } else { unsafe { Some(&*self) } }
327 }
328
329 /// Returns a shared reference to the value behind the pointer.
330 /// If the pointer may be null or the value may be uninitialized, [`as_uninit_ref`] must be used instead.
331 /// If the pointer may be null, but the value is known to have been initialized, [`as_ref`] must be used instead.
332 ///
333 /// [`as_ref`]: #method.as_ref
334 /// [`as_uninit_ref`]: #method.as_uninit_ref
335 ///
336 /// # Safety
337 ///
338 /// When calling this method, you have to ensure that
339 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
340 ///
341 /// # Examples
342 ///
343 /// ```
344 /// #![feature(ptr_as_ref_unchecked)]
345 /// let ptr: *const u8 = &10u8 as *const u8;
346 ///
347 /// unsafe {
348 /// assert_eq!(ptr.as_ref_unchecked(), &10);
349 /// }
350 /// ```
351 // FIXME: mention it in the docs for `as_ref` and `as_uninit_ref` once stabilized.
352 #[unstable(feature = "ptr_as_ref_unchecked", issue = "122034")]
353 #[inline]
354 #[must_use]
355 pub const unsafe fn as_ref_unchecked<'a>(self) -> &'a T {
356 // SAFETY: the caller must guarantee that `self` is valid for a reference
357 unsafe { &*self }
358 }
359
360 /// Returns `None` if the pointer is null, or else returns a shared reference to
361 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
362 /// that the value has to be initialized.
363 ///
364 /// [`as_ref`]: #method.as_ref
365 ///
366 /// # Safety
367 ///
368 /// When calling this method, you have to ensure that *either* the pointer is null *or*
369 /// the pointer is [convertible to a reference](crate::ptr#pointer-to-reference-conversion).
370 ///
371 /// # Panics during const evaluation
372 ///
373 /// This method will panic during const evaluation if the pointer cannot be
374 /// determined to be null or not. See [`is_null`] for more information.
375 ///
376 /// [`is_null`]: #method.is_null
377 ///
378 /// # Examples
379 ///
380 /// ```
381 /// #![feature(ptr_as_uninit)]
382 ///
383 /// let ptr: *const u8 = &10u8 as *const u8;
384 ///
385 /// unsafe {
386 /// if let Some(val_back) = ptr.as_uninit_ref() {
387 /// assert_eq!(val_back.assume_init(), 10);
388 /// }
389 /// }
390 /// ```
391 #[inline]
392 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
393 pub const unsafe fn as_uninit_ref<'a>(self) -> Option<&'a MaybeUninit<T>>
394 where
395 T: Sized,
396 {
397 // SAFETY: the caller must guarantee that `self` meets all the
398 // requirements for a reference.
399 if self.is_null() { None } else { Some(unsafe { &*(self as *const MaybeUninit<T>) }) }
400 }
401
402 /// Adds a signed offset to a pointer.
403 ///
404 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
405 /// offset of `3 * size_of::<T>()` bytes.
406 ///
407 /// # Safety
408 ///
409 /// If any of the following conditions are violated, the result is Undefined Behavior:
410 ///
411 /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
412 /// "wrapping around"), must fit in an `isize`.
413 ///
414 /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
415 /// [allocated object], and the entire memory range between `self` and the result must be in
416 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
417 /// of the address space. Note that "range" here refers to a half-open range as usual in Rust,
418 /// i.e., `self..result` for non-negative offsets and `result..self` for negative offsets.
419 ///
420 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
421 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
422 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
423 /// safe.
424 ///
425 /// Consider using [`wrapping_offset`] instead if these constraints are
426 /// difficult to satisfy. The only advantage of this method is that it
427 /// enables more aggressive compiler optimizations.
428 ///
429 /// [`wrapping_offset`]: #method.wrapping_offset
430 /// [allocated object]: crate::ptr#allocated-object
431 ///
432 /// # Examples
433 ///
434 /// ```
435 /// let s: &str = "123";
436 /// let ptr: *const u8 = s.as_ptr();
437 ///
438 /// unsafe {
439 /// assert_eq!(*ptr.offset(1) as char, '2');
440 /// assert_eq!(*ptr.offset(2) as char, '3');
441 /// }
442 /// ```
443 #[stable(feature = "rust1", since = "1.0.0")]
444 #[must_use = "returns a new pointer rather than modifying its argument"]
445 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
446 #[inline(always)]
447 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
448 pub const unsafe fn offset(self, count: isize) -> *const T
449 where
450 T: Sized,
451 {
452 #[inline]
453 #[rustc_allow_const_fn_unstable(const_eval_select)]
454 const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
455 // We can use const_eval_select here because this is only for UB checks.
456 const_eval_select!(
457 @capture { this: *const (), count: isize, size: usize } -> bool:
458 if const {
459 true
460 } else {
461 // `size` is the size of a Rust type, so we know that
462 // `size <= isize::MAX` and thus `as` cast here is not lossy.
463 let Some(byte_offset) = count.checked_mul(size as isize) else {
464 return false;
465 };
466 let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
467 !overflow
468 }
469 )
470 }
471
472 ub_checks::assert_unsafe_precondition!(
473 check_language_ub,
474 "ptr::offset requires the address calculation to not overflow",
475 (
476 this: *const () = self as *const (),
477 count: isize = count,
478 size: usize = size_of::<T>(),
479 ) => runtime_offset_nowrap(this, count, size)
480 );
481
482 // SAFETY: the caller must uphold the safety contract for `offset`.
483 unsafe { intrinsics::offset(self, count) }
484 }
485
486 /// Adds a signed offset in bytes to a pointer.
487 ///
488 /// `count` is in units of **bytes**.
489 ///
490 /// This is purely a convenience for casting to a `u8` pointer and
491 /// using [offset][pointer::offset] on it. See that method for documentation
492 /// and safety requirements.
493 ///
494 /// For non-`Sized` pointees this operation changes only the data pointer,
495 /// leaving the metadata untouched.
496 #[must_use]
497 #[inline(always)]
498 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
499 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
500 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
501 pub const unsafe fn byte_offset(self, count: isize) -> Self {
502 // SAFETY: the caller must uphold the safety contract for `offset`.
503 unsafe { self.cast::<u8>().offset(count).with_metadata_of(self) }
504 }
505
506 /// Adds a signed offset to a pointer using wrapping arithmetic.
507 ///
508 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
509 /// offset of `3 * size_of::<T>()` bytes.
510 ///
511 /// # Safety
512 ///
513 /// This operation itself is always safe, but using the resulting pointer is not.
514 ///
515 /// The resulting pointer "remembers" the [allocated object] that `self` points to
516 /// (this is called "[Provenance](ptr/index.html#provenance)").
517 /// The pointer must not be used to read or write other allocated objects.
518 ///
519 /// In other words, `let z = x.wrapping_offset((y as isize) - (x as isize))` does *not* make `z`
520 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
521 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
522 /// `x` and `y` point into the same allocated object.
523 ///
524 /// Compared to [`offset`], this method basically delays the requirement of staying within the
525 /// same allocated object: [`offset`] is immediate Undefined Behavior when crossing object
526 /// boundaries; `wrapping_offset` produces a pointer but still leads to Undefined Behavior if a
527 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`offset`]
528 /// can be optimized better and is thus preferable in performance-sensitive code.
529 ///
530 /// The delayed check only considers the value of the pointer that was dereferenced, not the
531 /// intermediate values used during the computation of the final result. For example,
532 /// `x.wrapping_offset(o).wrapping_offset(o.wrapping_neg())` is always the same as `x`. In other
533 /// words, leaving the allocated object and then re-entering it later is permitted.
534 ///
535 /// [`offset`]: #method.offset
536 /// [allocated object]: crate::ptr#allocated-object
537 ///
538 /// # Examples
539 ///
540 /// ```
541 /// # use std::fmt::Write;
542 /// // Iterate using a raw pointer in increments of two elements
543 /// let data = [1u8, 2, 3, 4, 5];
544 /// let mut ptr: *const u8 = data.as_ptr();
545 /// let step = 2;
546 /// let end_rounded_up = ptr.wrapping_offset(6);
547 ///
548 /// let mut out = String::new();
549 /// while ptr != end_rounded_up {
550 /// unsafe {
551 /// write!(&mut out, "{}, ", *ptr)?;
552 /// }
553 /// ptr = ptr.wrapping_offset(step);
554 /// }
555 /// assert_eq!(out.as_str(), "1, 3, 5, ");
556 /// # std::fmt::Result::Ok(())
557 /// ```
558 #[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
559 #[must_use = "returns a new pointer rather than modifying its argument"]
560 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
561 #[inline(always)]
562 pub const fn wrapping_offset(self, count: isize) -> *const T
563 where
564 T: Sized,
565 {
566 // SAFETY: the `arith_offset` intrinsic has no prerequisites to be called.
567 unsafe { intrinsics::arith_offset(self, count) }
568 }
569
570 /// Adds a signed offset in bytes to a pointer using wrapping arithmetic.
571 ///
572 /// `count` is in units of **bytes**.
573 ///
574 /// This is purely a convenience for casting to a `u8` pointer and
575 /// using [wrapping_offset][pointer::wrapping_offset] on it. See that method
576 /// for documentation.
577 ///
578 /// For non-`Sized` pointees this operation changes only the data pointer,
579 /// leaving the metadata untouched.
580 #[must_use]
581 #[inline(always)]
582 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
583 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
584 pub const fn wrapping_byte_offset(self, count: isize) -> Self {
585 self.cast::<u8>().wrapping_offset(count).with_metadata_of(self)
586 }
587
588 /// Masks out bits of the pointer according to a mask.
589 ///
590 /// This is convenience for `ptr.map_addr(|a| a & mask)`.
591 ///
592 /// For non-`Sized` pointees this operation changes only the data pointer,
593 /// leaving the metadata untouched.
594 ///
595 /// ## Examples
596 ///
597 /// ```
598 /// #![feature(ptr_mask)]
599 /// let v = 17_u32;
600 /// let ptr: *const u32 = &v;
601 ///
602 /// // `u32` is 4 bytes aligned,
603 /// // which means that lower 2 bits are always 0.
604 /// let tag_mask = 0b11;
605 /// let ptr_mask = !tag_mask;
606 ///
607 /// // We can store something in these lower bits
608 /// let tagged_ptr = ptr.map_addr(|a| a | 0b10);
609 ///
610 /// // Get the "tag" back
611 /// let tag = tagged_ptr.addr() & tag_mask;
612 /// assert_eq!(tag, 0b10);
613 ///
614 /// // Note that `tagged_ptr` is unaligned, it's UB to read from it.
615 /// // To get original pointer `mask` can be used:
616 /// let masked_ptr = tagged_ptr.mask(ptr_mask);
617 /// assert_eq!(unsafe { *masked_ptr }, 17);
618 /// ```
619 #[unstable(feature = "ptr_mask", issue = "98290")]
620 #[must_use = "returns a new pointer rather than modifying its argument"]
621 #[inline(always)]
622 pub fn mask(self, mask: usize) -> *const T {
623 intrinsics::ptr_mask(self.cast::<()>(), mask).with_metadata_of(self)
624 }
625
626 /// Calculates the distance between two pointers within the same allocation. The returned value is in
627 /// units of T: the distance in bytes divided by `size_of::<T>()`.
628 ///
629 /// This is equivalent to `(self as isize - origin as isize) / (size_of::<T>() as isize)`,
630 /// except that it has a lot more opportunities for UB, in exchange for the compiler
631 /// better understanding what you are doing.
632 ///
633 /// The primary motivation of this method is for computing the `len` of an array/slice
634 /// of `T` that you are currently representing as a "start" and "end" pointer
635 /// (and "end" is "one past the end" of the array).
636 /// In that case, `end.offset_from(start)` gets you the length of the array.
637 ///
638 /// All of the following safety requirements are trivially satisfied for this usecase.
639 ///
640 /// [`offset`]: #method.offset
641 ///
642 /// # Safety
643 ///
644 /// If any of the following conditions are violated, the result is Undefined Behavior:
645 ///
646 /// * `self` and `origin` must either
647 ///
648 /// * point to the same address, or
649 /// * both be [derived from][crate::ptr#provenance] a pointer to the same [allocated object], and the memory range between
650 /// the two pointers must be in bounds of that object. (See below for an example.)
651 ///
652 /// * The distance between the pointers, in bytes, must be an exact multiple
653 /// of the size of `T`.
654 ///
655 /// As a consequence, the absolute distance between the pointers, in bytes, computed on
656 /// mathematical integers (without "wrapping around"), cannot overflow an `isize`. This is
657 /// implied by the in-bounds requirement, and the fact that no allocated object can be larger
658 /// than `isize::MAX` bytes.
659 ///
660 /// The requirement for pointers to be derived from the same allocated object is primarily
661 /// needed for `const`-compatibility: the distance between pointers into *different* allocated
662 /// objects is not known at compile-time. However, the requirement also exists at
663 /// runtime and may be exploited by optimizations. If you wish to compute the difference between
664 /// pointers that are not guaranteed to be from the same allocation, use `(self as isize -
665 /// origin as isize) / size_of::<T>()`.
666 // FIXME: recommend `addr()` instead of `as usize` once that is stable.
667 ///
668 /// [`add`]: #method.add
669 /// [allocated object]: crate::ptr#allocated-object
670 ///
671 /// # Panics
672 ///
673 /// This function panics if `T` is a Zero-Sized Type ("ZST").
674 ///
675 /// # Examples
676 ///
677 /// Basic usage:
678 ///
679 /// ```
680 /// let a = [0; 5];
681 /// let ptr1: *const i32 = &a[1];
682 /// let ptr2: *const i32 = &a[3];
683 /// unsafe {
684 /// assert_eq!(ptr2.offset_from(ptr1), 2);
685 /// assert_eq!(ptr1.offset_from(ptr2), -2);
686 /// assert_eq!(ptr1.offset(2), ptr2);
687 /// assert_eq!(ptr2.offset(-2), ptr1);
688 /// }
689 /// ```
690 ///
691 /// *Incorrect* usage:
692 ///
693 /// ```rust,no_run
694 /// let ptr1 = Box::into_raw(Box::new(0u8)) as *const u8;
695 /// let ptr2 = Box::into_raw(Box::new(1u8)) as *const u8;
696 /// let diff = (ptr2 as isize).wrapping_sub(ptr1 as isize);
697 /// // Make ptr2_other an "alias" of ptr2.add(1), but derived from ptr1.
698 /// let ptr2_other = (ptr1 as *const u8).wrapping_offset(diff).wrapping_offset(1);
699 /// assert_eq!(ptr2 as usize, ptr2_other as usize);
700 /// // Since ptr2_other and ptr2 are derived from pointers to different objects,
701 /// // computing their offset is undefined behavior, even though
702 /// // they point to addresses that are in-bounds of the same object!
703 /// unsafe {
704 /// let one = ptr2_other.offset_from(ptr2); // Undefined Behavior! ⚠️
705 /// }
706 /// ```
707 #[stable(feature = "ptr_offset_from", since = "1.47.0")]
708 #[rustc_const_stable(feature = "const_ptr_offset_from", since = "1.65.0")]
709 #[inline]
710 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
711 pub const unsafe fn offset_from(self, origin: *const T) -> isize
712 where
713 T: Sized,
714 {
715 let pointee_size = size_of::<T>();
716 assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
717 // SAFETY: the caller must uphold the safety contract for `ptr_offset_from`.
718 unsafe { intrinsics::ptr_offset_from(self, origin) }
719 }
720
721 /// Calculates the distance between two pointers within the same allocation. The returned value is in
722 /// units of **bytes**.
723 ///
724 /// This is purely a convenience for casting to a `u8` pointer and
725 /// using [`offset_from`][pointer::offset_from] on it. See that method for
726 /// documentation and safety requirements.
727 ///
728 /// For non-`Sized` pointees this operation considers only the data pointers,
729 /// ignoring the metadata.
730 #[inline(always)]
731 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
732 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
733 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
734 pub const unsafe fn byte_offset_from<U: ?Sized>(self, origin: *const U) -> isize {
735 // SAFETY: the caller must uphold the safety contract for `offset_from`.
736 unsafe { self.cast::<u8>().offset_from(origin.cast::<u8>()) }
737 }
738
739 /// Calculates the distance between two pointers within the same allocation, *where it's known that
740 /// `self` is equal to or greater than `origin`*. The returned value is in
741 /// units of T: the distance in bytes is divided by `size_of::<T>()`.
742 ///
743 /// This computes the same value that [`offset_from`](#method.offset_from)
744 /// would compute, but with the added precondition that the offset is
745 /// guaranteed to be non-negative. This method is equivalent to
746 /// `usize::try_from(self.offset_from(origin)).unwrap_unchecked()`,
747 /// but it provides slightly more information to the optimizer, which can
748 /// sometimes allow it to optimize slightly better with some backends.
749 ///
750 /// This method can be thought of as recovering the `count` that was passed
751 /// to [`add`](#method.add) (or, with the parameters in the other order,
752 /// to [`sub`](#method.sub)). The following are all equivalent, assuming
753 /// that their safety preconditions are met:
754 /// ```rust
755 /// # unsafe fn blah(ptr: *const i32, origin: *const i32, count: usize) -> bool { unsafe {
756 /// ptr.offset_from_unsigned(origin) == count
757 /// # &&
758 /// origin.add(count) == ptr
759 /// # &&
760 /// ptr.sub(count) == origin
761 /// # } }
762 /// ```
763 ///
764 /// # Safety
765 ///
766 /// - The distance between the pointers must be non-negative (`self >= origin`)
767 ///
768 /// - *All* the safety conditions of [`offset_from`](#method.offset_from)
769 /// apply to this method as well; see it for the full details.
770 ///
771 /// Importantly, despite the return type of this method being able to represent
772 /// a larger offset, it's still *not permitted* to pass pointers which differ
773 /// by more than `isize::MAX` *bytes*. As such, the result of this method will
774 /// always be less than or equal to `isize::MAX as usize`.
775 ///
776 /// # Panics
777 ///
778 /// This function panics if `T` is a Zero-Sized Type ("ZST").
779 ///
780 /// # Examples
781 ///
782 /// ```
783 /// let a = [0; 5];
784 /// let ptr1: *const i32 = &a[1];
785 /// let ptr2: *const i32 = &a[3];
786 /// unsafe {
787 /// assert_eq!(ptr2.offset_from_unsigned(ptr1), 2);
788 /// assert_eq!(ptr1.add(2), ptr2);
789 /// assert_eq!(ptr2.sub(2), ptr1);
790 /// assert_eq!(ptr2.offset_from_unsigned(ptr2), 0);
791 /// }
792 ///
793 /// // This would be incorrect, as the pointers are not correctly ordered:
794 /// // ptr1.offset_from_unsigned(ptr2)
795 /// ```
796 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
797 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
798 #[inline]
799 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
800 pub const unsafe fn offset_from_unsigned(self, origin: *const T) -> usize
801 where
802 T: Sized,
803 {
804 #[rustc_allow_const_fn_unstable(const_eval_select)]
805 const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
806 const_eval_select!(
807 @capture { this: *const (), origin: *const () } -> bool:
808 if const {
809 true
810 } else {
811 this >= origin
812 }
813 )
814 }
815
816 ub_checks::assert_unsafe_precondition!(
817 check_language_ub,
818 "ptr::offset_from_unsigned requires `self >= origin`",
819 (
820 this: *const () = self as *const (),
821 origin: *const () = origin as *const (),
822 ) => runtime_ptr_ge(this, origin)
823 );
824
825 let pointee_size = size_of::<T>();
826 assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
827 // SAFETY: the caller must uphold the safety contract for `ptr_offset_from_unsigned`.
828 unsafe { intrinsics::ptr_offset_from_unsigned(self, origin) }
829 }
830
831 /// Calculates the distance between two pointers within the same allocation, *where it's known that
832 /// `self` is equal to or greater than `origin`*. The returned value is in
833 /// units of **bytes**.
834 ///
835 /// This is purely a convenience for casting to a `u8` pointer and
836 /// using [`offset_from_unsigned`][pointer::offset_from_unsigned] on it.
837 /// See that method for documentation and safety requirements.
838 ///
839 /// For non-`Sized` pointees this operation considers only the data pointers,
840 /// ignoring the metadata.
841 #[stable(feature = "ptr_sub_ptr", since = "1.87.0")]
842 #[rustc_const_stable(feature = "const_ptr_sub_ptr", since = "1.87.0")]
843 #[inline]
844 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
845 pub const unsafe fn byte_offset_from_unsigned<U: ?Sized>(self, origin: *const U) -> usize {
846 // SAFETY: the caller must uphold the safety contract for `offset_from_unsigned`.
847 unsafe { self.cast::<u8>().offset_from_unsigned(origin.cast::<u8>()) }
848 }
849
850 /// Returns whether two pointers are guaranteed to be equal.
851 ///
852 /// At runtime this function behaves like `Some(self == other)`.
853 /// However, in some contexts (e.g., compile-time evaluation),
854 /// it is not always possible to determine equality of two pointers, so this function may
855 /// spuriously return `None` for pointers that later actually turn out to have its equality known.
856 /// But when it returns `Some`, the pointers' equality is guaranteed to be known.
857 ///
858 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
859 /// version and unsafe code must not
860 /// rely on the result of this function for soundness. It is suggested to only use this function
861 /// for performance optimizations where spurious `None` return values by this function do not
862 /// affect the outcome, but just the performance.
863 /// The consequences of using this method to make runtime and compile-time code behave
864 /// differently have not been explored. This method should not be used to introduce such
865 /// differences, and it should also not be stabilized before we have a better understanding
866 /// of this issue.
867 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
868 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
869 #[inline]
870 pub const fn guaranteed_eq(self, other: *const T) -> Option<bool>
871 where
872 T: Sized,
873 {
874 match intrinsics::ptr_guaranteed_cmp(self, other) {
875 2 => None,
876 other => Some(other == 1),
877 }
878 }
879
880 /// Returns whether two pointers are guaranteed to be inequal.
881 ///
882 /// At runtime this function behaves like `Some(self != other)`.
883 /// However, in some contexts (e.g., compile-time evaluation),
884 /// it is not always possible to determine inequality of two pointers, so this function may
885 /// spuriously return `None` for pointers that later actually turn out to have its inequality known.
886 /// But when it returns `Some`, the pointers' inequality is guaranteed to be known.
887 ///
888 /// The return value may change from `Some` to `None` and vice versa depending on the compiler
889 /// version and unsafe code must not
890 /// rely on the result of this function for soundness. It is suggested to only use this function
891 /// for performance optimizations where spurious `None` return values by this function do not
892 /// affect the outcome, but just the performance.
893 /// The consequences of using this method to make runtime and compile-time code behave
894 /// differently have not been explored. This method should not be used to introduce such
895 /// differences, and it should also not be stabilized before we have a better understanding
896 /// of this issue.
897 #[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
898 #[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
899 #[inline]
900 pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
901 where
902 T: Sized,
903 {
904 match self.guaranteed_eq(other) {
905 None => None,
906 Some(eq) => Some(!eq),
907 }
908 }
909
910 /// Adds an unsigned offset to a pointer.
911 ///
912 /// This can only move the pointer forward (or not move it). If you need to move forward or
913 /// backward depending on the value, then you might want [`offset`](#method.offset) instead
914 /// which takes a signed offset.
915 ///
916 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
917 /// offset of `3 * size_of::<T>()` bytes.
918 ///
919 /// # Safety
920 ///
921 /// If any of the following conditions are violated, the result is Undefined Behavior:
922 ///
923 /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
924 /// "wrapping around"), must fit in an `isize`.
925 ///
926 /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
927 /// [allocated object], and the entire memory range between `self` and the result must be in
928 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
929 /// of the address space.
930 ///
931 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
932 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
933 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
934 /// safe.
935 ///
936 /// Consider using [`wrapping_add`] instead if these constraints are
937 /// difficult to satisfy. The only advantage of this method is that it
938 /// enables more aggressive compiler optimizations.
939 ///
940 /// [`wrapping_add`]: #method.wrapping_add
941 /// [allocated object]: crate::ptr#allocated-object
942 ///
943 /// # Examples
944 ///
945 /// ```
946 /// let s: &str = "123";
947 /// let ptr: *const u8 = s.as_ptr();
948 ///
949 /// unsafe {
950 /// assert_eq!(*ptr.add(1), b'2');
951 /// assert_eq!(*ptr.add(2), b'3');
952 /// }
953 /// ```
954 #[stable(feature = "pointer_methods", since = "1.26.0")]
955 #[must_use = "returns a new pointer rather than modifying its argument"]
956 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
957 #[inline(always)]
958 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
959 pub const unsafe fn add(self, count: usize) -> Self
960 where
961 T: Sized,
962 {
963 #[cfg(debug_assertions)]
964 #[inline]
965 #[rustc_allow_const_fn_unstable(const_eval_select)]
966 const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
967 const_eval_select!(
968 @capture { this: *const (), count: usize, size: usize } -> bool:
969 if const {
970 true
971 } else {
972 let Some(byte_offset) = count.checked_mul(size) else {
973 return false;
974 };
975 let (_, overflow) = this.addr().overflowing_add(byte_offset);
976 byte_offset <= (isize::MAX as usize) && !overflow
977 }
978 )
979 }
980
981 #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
982 ub_checks::assert_unsafe_precondition!(
983 check_language_ub,
984 "ptr::add requires that the address calculation does not overflow",
985 (
986 this: *const () = self as *const (),
987 count: usize = count,
988 size: usize = size_of::<T>(),
989 ) => runtime_add_nowrap(this, count, size)
990 );
991
992 // SAFETY: the caller must uphold the safety contract for `offset`.
993 unsafe { intrinsics::offset(self, count) }
994 }
995
996 /// Adds an unsigned offset in bytes to a pointer.
997 ///
998 /// `count` is in units of bytes.
999 ///
1000 /// This is purely a convenience for casting to a `u8` pointer and
1001 /// using [add][pointer::add] on it. See that method for documentation
1002 /// and safety requirements.
1003 ///
1004 /// For non-`Sized` pointees this operation changes only the data pointer,
1005 /// leaving the metadata untouched.
1006 #[must_use]
1007 #[inline(always)]
1008 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1009 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1010 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1011 pub const unsafe fn byte_add(self, count: usize) -> Self {
1012 // SAFETY: the caller must uphold the safety contract for `add`.
1013 unsafe { self.cast::<u8>().add(count).with_metadata_of(self) }
1014 }
1015
1016 /// Subtracts an unsigned offset from a pointer.
1017 ///
1018 /// This can only move the pointer backward (or not move it). If you need to move forward or
1019 /// backward depending on the value, then you might want [`offset`](#method.offset) instead
1020 /// which takes a signed offset.
1021 ///
1022 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1023 /// offset of `3 * size_of::<T>()` bytes.
1024 ///
1025 /// # Safety
1026 ///
1027 /// If any of the following conditions are violated, the result is Undefined Behavior:
1028 ///
1029 /// * The offset in bytes, `count * size_of::<T>()`, computed on mathematical integers (without
1030 /// "wrapping around"), must fit in an `isize`.
1031 ///
1032 /// * If the computed offset is non-zero, then `self` must be [derived from][crate::ptr#provenance] a pointer to some
1033 /// [allocated object], and the entire memory range between `self` and the result must be in
1034 /// bounds of that allocated object. In particular, this range must not "wrap around" the edge
1035 /// of the address space.
1036 ///
1037 /// Allocated objects can never be larger than `isize::MAX` bytes, so if the computed offset
1038 /// stays in bounds of the allocated object, it is guaranteed to satisfy the first requirement.
1039 /// This implies, for instance, that `vec.as_ptr().add(vec.len())` (for `vec: Vec<T>`) is always
1040 /// safe.
1041 ///
1042 /// Consider using [`wrapping_sub`] instead if these constraints are
1043 /// difficult to satisfy. The only advantage of this method is that it
1044 /// enables more aggressive compiler optimizations.
1045 ///
1046 /// [`wrapping_sub`]: #method.wrapping_sub
1047 /// [allocated object]: crate::ptr#allocated-object
1048 ///
1049 /// # Examples
1050 ///
1051 /// ```
1052 /// let s: &str = "123";
1053 ///
1054 /// unsafe {
1055 /// let end: *const u8 = s.as_ptr().add(3);
1056 /// assert_eq!(*end.sub(1), b'3');
1057 /// assert_eq!(*end.sub(2), b'2');
1058 /// }
1059 /// ```
1060 #[stable(feature = "pointer_methods", since = "1.26.0")]
1061 #[must_use = "returns a new pointer rather than modifying its argument"]
1062 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1063 #[inline(always)]
1064 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1065 pub const unsafe fn sub(self, count: usize) -> Self
1066 where
1067 T: Sized,
1068 {
1069 #[cfg(debug_assertions)]
1070 #[inline]
1071 #[rustc_allow_const_fn_unstable(const_eval_select)]
1072 const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
1073 const_eval_select!(
1074 @capture { this: *const (), count: usize, size: usize } -> bool:
1075 if const {
1076 true
1077 } else {
1078 let Some(byte_offset) = count.checked_mul(size) else {
1079 return false;
1080 };
1081 byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
1082 }
1083 )
1084 }
1085
1086 #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
1087 ub_checks::assert_unsafe_precondition!(
1088 check_language_ub,
1089 "ptr::sub requires that the address calculation does not overflow",
1090 (
1091 this: *const () = self as *const (),
1092 count: usize = count,
1093 size: usize = size_of::<T>(),
1094 ) => runtime_sub_nowrap(this, count, size)
1095 );
1096
1097 if T::IS_ZST {
1098 // Pointer arithmetic does nothing when the pointee is a ZST.
1099 self
1100 } else {
1101 // SAFETY: the caller must uphold the safety contract for `offset`.
1102 // Because the pointee is *not* a ZST, that means that `count` is
1103 // at most `isize::MAX`, and thus the negation cannot overflow.
1104 unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
1105 }
1106 }
1107
1108 /// Subtracts an unsigned offset in bytes from a pointer.
1109 ///
1110 /// `count` is in units of bytes.
1111 ///
1112 /// This is purely a convenience for casting to a `u8` pointer and
1113 /// using [sub][pointer::sub] on it. See that method for documentation
1114 /// and safety requirements.
1115 ///
1116 /// For non-`Sized` pointees this operation changes only the data pointer,
1117 /// leaving the metadata untouched.
1118 #[must_use]
1119 #[inline(always)]
1120 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1121 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1122 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1123 pub const unsafe fn byte_sub(self, count: usize) -> Self {
1124 // SAFETY: the caller must uphold the safety contract for `sub`.
1125 unsafe { self.cast::<u8>().sub(count).with_metadata_of(self) }
1126 }
1127
1128 /// Adds an unsigned offset to a pointer using wrapping arithmetic.
1129 ///
1130 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1131 /// offset of `3 * size_of::<T>()` bytes.
1132 ///
1133 /// # Safety
1134 ///
1135 /// This operation itself is always safe, but using the resulting pointer is not.
1136 ///
1137 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1138 /// be used to read or write other allocated objects.
1139 ///
1140 /// In other words, `let z = x.wrapping_add((y as usize) - (x as usize))` does *not* make `z`
1141 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1142 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1143 /// `x` and `y` point into the same allocated object.
1144 ///
1145 /// Compared to [`add`], this method basically delays the requirement of staying within the
1146 /// same allocated object: [`add`] is immediate Undefined Behavior when crossing object
1147 /// boundaries; `wrapping_add` produces a pointer but still leads to Undefined Behavior if a
1148 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`add`]
1149 /// can be optimized better and is thus preferable in performance-sensitive code.
1150 ///
1151 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1152 /// intermediate values used during the computation of the final result. For example,
1153 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1154 /// allocated object and then re-entering it later is permitted.
1155 ///
1156 /// [`add`]: #method.add
1157 /// [allocated object]: crate::ptr#allocated-object
1158 ///
1159 /// # Examples
1160 ///
1161 /// ```
1162 /// # use std::fmt::Write;
1163 /// // Iterate using a raw pointer in increments of two elements
1164 /// let data = [1u8, 2, 3, 4, 5];
1165 /// let mut ptr: *const u8 = data.as_ptr();
1166 /// let step = 2;
1167 /// let end_rounded_up = ptr.wrapping_add(6);
1168 ///
1169 /// let mut out = String::new();
1170 /// while ptr != end_rounded_up {
1171 /// unsafe {
1172 /// write!(&mut out, "{}, ", *ptr)?;
1173 /// }
1174 /// ptr = ptr.wrapping_add(step);
1175 /// }
1176 /// assert_eq!(out, "1, 3, 5, ");
1177 /// # std::fmt::Result::Ok(())
1178 /// ```
1179 #[stable(feature = "pointer_methods", since = "1.26.0")]
1180 #[must_use = "returns a new pointer rather than modifying its argument"]
1181 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1182 #[inline(always)]
1183 pub const fn wrapping_add(self, count: usize) -> Self
1184 where
1185 T: Sized,
1186 {
1187 self.wrapping_offset(count as isize)
1188 }
1189
1190 /// Adds an unsigned offset in bytes to a pointer using wrapping arithmetic.
1191 ///
1192 /// `count` is in units of bytes.
1193 ///
1194 /// This is purely a convenience for casting to a `u8` pointer and
1195 /// using [wrapping_add][pointer::wrapping_add] on it. See that method for documentation.
1196 ///
1197 /// For non-`Sized` pointees this operation changes only the data pointer,
1198 /// leaving the metadata untouched.
1199 #[must_use]
1200 #[inline(always)]
1201 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1202 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1203 pub const fn wrapping_byte_add(self, count: usize) -> Self {
1204 self.cast::<u8>().wrapping_add(count).with_metadata_of(self)
1205 }
1206
1207 /// Subtracts an unsigned offset from a pointer using wrapping arithmetic.
1208 ///
1209 /// `count` is in units of T; e.g., a `count` of 3 represents a pointer
1210 /// offset of `3 * size_of::<T>()` bytes.
1211 ///
1212 /// # Safety
1213 ///
1214 /// This operation itself is always safe, but using the resulting pointer is not.
1215 ///
1216 /// The resulting pointer "remembers" the [allocated object] that `self` points to; it must not
1217 /// be used to read or write other allocated objects.
1218 ///
1219 /// In other words, `let z = x.wrapping_sub((x as usize) - (y as usize))` does *not* make `z`
1220 /// the same as `y` even if we assume `T` has size `1` and there is no overflow: `z` is still
1221 /// attached to the object `x` is attached to, and dereferencing it is Undefined Behavior unless
1222 /// `x` and `y` point into the same allocated object.
1223 ///
1224 /// Compared to [`sub`], this method basically delays the requirement of staying within the
1225 /// same allocated object: [`sub`] is immediate Undefined Behavior when crossing object
1226 /// boundaries; `wrapping_sub` produces a pointer but still leads to Undefined Behavior if a
1227 /// pointer is dereferenced when it is out-of-bounds of the object it is attached to. [`sub`]
1228 /// can be optimized better and is thus preferable in performance-sensitive code.
1229 ///
1230 /// The delayed check only considers the value of the pointer that was dereferenced, not the
1231 /// intermediate values used during the computation of the final result. For example,
1232 /// `x.wrapping_add(o).wrapping_sub(o)` is always the same as `x`. In other words, leaving the
1233 /// allocated object and then re-entering it later is permitted.
1234 ///
1235 /// [`sub`]: #method.sub
1236 /// [allocated object]: crate::ptr#allocated-object
1237 ///
1238 /// # Examples
1239 ///
1240 /// ```
1241 /// # use std::fmt::Write;
1242 /// // Iterate using a raw pointer in increments of two elements (backwards)
1243 /// let data = [1u8, 2, 3, 4, 5];
1244 /// let mut ptr: *const u8 = data.as_ptr();
1245 /// let start_rounded_down = ptr.wrapping_sub(2);
1246 /// ptr = ptr.wrapping_add(4);
1247 /// let step = 2;
1248 /// let mut out = String::new();
1249 /// while ptr != start_rounded_down {
1250 /// unsafe {
1251 /// write!(&mut out, "{}, ", *ptr)?;
1252 /// }
1253 /// ptr = ptr.wrapping_sub(step);
1254 /// }
1255 /// assert_eq!(out, "5, 3, 1, ");
1256 /// # std::fmt::Result::Ok(())
1257 /// ```
1258 #[stable(feature = "pointer_methods", since = "1.26.0")]
1259 #[must_use = "returns a new pointer rather than modifying its argument"]
1260 #[rustc_const_stable(feature = "const_ptr_offset", since = "1.61.0")]
1261 #[inline(always)]
1262 pub const fn wrapping_sub(self, count: usize) -> Self
1263 where
1264 T: Sized,
1265 {
1266 self.wrapping_offset((count as isize).wrapping_neg())
1267 }
1268
1269 /// Subtracts an unsigned offset in bytes from a pointer using wrapping arithmetic.
1270 ///
1271 /// `count` is in units of bytes.
1272 ///
1273 /// This is purely a convenience for casting to a `u8` pointer and
1274 /// using [wrapping_sub][pointer::wrapping_sub] on it. See that method for documentation.
1275 ///
1276 /// For non-`Sized` pointees this operation changes only the data pointer,
1277 /// leaving the metadata untouched.
1278 #[must_use]
1279 #[inline(always)]
1280 #[stable(feature = "pointer_byte_offsets", since = "1.75.0")]
1281 #[rustc_const_stable(feature = "const_pointer_byte_offsets", since = "1.75.0")]
1282 pub const fn wrapping_byte_sub(self, count: usize) -> Self {
1283 self.cast::<u8>().wrapping_sub(count).with_metadata_of(self)
1284 }
1285
1286 /// Reads the value from `self` without moving it. This leaves the
1287 /// memory in `self` unchanged.
1288 ///
1289 /// See [`ptr::read`] for safety concerns and examples.
1290 ///
1291 /// [`ptr::read`]: crate::ptr::read()
1292 #[stable(feature = "pointer_methods", since = "1.26.0")]
1293 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1294 #[inline]
1295 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1296 pub const unsafe fn read(self) -> T
1297 where
1298 T: Sized,
1299 {
1300 // SAFETY: the caller must uphold the safety contract for `read`.
1301 unsafe { read(self) }
1302 }
1303
1304 /// Performs a volatile read of the value from `self` without moving it. This
1305 /// leaves the memory in `self` unchanged.
1306 ///
1307 /// Volatile operations are intended to act on I/O memory, and are guaranteed
1308 /// to not be elided or reordered by the compiler across other volatile
1309 /// operations.
1310 ///
1311 /// See [`ptr::read_volatile`] for safety concerns and examples.
1312 ///
1313 /// [`ptr::read_volatile`]: crate::ptr::read_volatile()
1314 #[stable(feature = "pointer_methods", since = "1.26.0")]
1315 #[inline]
1316 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1317 pub unsafe fn read_volatile(self) -> T
1318 where
1319 T: Sized,
1320 {
1321 // SAFETY: the caller must uphold the safety contract for `read_volatile`.
1322 unsafe { read_volatile(self) }
1323 }
1324
1325 /// Reads the value from `self` without moving it. This leaves the
1326 /// memory in `self` unchanged.
1327 ///
1328 /// Unlike `read`, the pointer may be unaligned.
1329 ///
1330 /// See [`ptr::read_unaligned`] for safety concerns and examples.
1331 ///
1332 /// [`ptr::read_unaligned`]: crate::ptr::read_unaligned()
1333 #[stable(feature = "pointer_methods", since = "1.26.0")]
1334 #[rustc_const_stable(feature = "const_ptr_read", since = "1.71.0")]
1335 #[inline]
1336 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1337 pub const unsafe fn read_unaligned(self) -> T
1338 where
1339 T: Sized,
1340 {
1341 // SAFETY: the caller must uphold the safety contract for `read_unaligned`.
1342 unsafe { read_unaligned(self) }
1343 }
1344
1345 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1346 /// and destination may overlap.
1347 ///
1348 /// NOTE: this has the *same* argument order as [`ptr::copy`].
1349 ///
1350 /// See [`ptr::copy`] for safety concerns and examples.
1351 ///
1352 /// [`ptr::copy`]: crate::ptr::copy()
1353 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1354 #[stable(feature = "pointer_methods", since = "1.26.0")]
1355 #[inline]
1356 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1357 pub const unsafe fn copy_to(self, dest: *mut T, count: usize)
1358 where
1359 T: Sized,
1360 {
1361 // SAFETY: the caller must uphold the safety contract for `copy`.
1362 unsafe { copy(self, dest, count) }
1363 }
1364
1365 /// Copies `count * size_of::<T>()` bytes from `self` to `dest`. The source
1366 /// and destination may *not* overlap.
1367 ///
1368 /// NOTE: this has the *same* argument order as [`ptr::copy_nonoverlapping`].
1369 ///
1370 /// See [`ptr::copy_nonoverlapping`] for safety concerns and examples.
1371 ///
1372 /// [`ptr::copy_nonoverlapping`]: crate::ptr::copy_nonoverlapping()
1373 #[rustc_const_stable(feature = "const_intrinsic_copy", since = "1.83.0")]
1374 #[stable(feature = "pointer_methods", since = "1.26.0")]
1375 #[inline]
1376 #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
1377 pub const unsafe fn copy_to_nonoverlapping(self, dest: *mut T, count: usize)
1378 where
1379 T: Sized,
1380 {
1381 // SAFETY: the caller must uphold the safety contract for `copy_nonoverlapping`.
1382 unsafe { copy_nonoverlapping(self, dest, count) }
1383 }
1384
1385 /// Computes the offset that needs to be applied to the pointer in order to make it aligned to
1386 /// `align`.
1387 ///
1388 /// If it is not possible to align the pointer, the implementation returns
1389 /// `usize::MAX`.
1390 ///
1391 /// The offset is expressed in number of `T` elements, and not bytes. The value returned can be
1392 /// used with the `wrapping_add` method.
1393 ///
1394 /// There are no guarantees whatsoever that offsetting the pointer will not overflow or go
1395 /// beyond the allocation that the pointer points into. It is up to the caller to ensure that
1396 /// the returned offset is correct in all terms other than alignment.
1397 ///
1398 /// # Panics
1399 ///
1400 /// The function panics if `align` is not a power-of-two.
1401 ///
1402 /// # Examples
1403 ///
1404 /// Accessing adjacent `u8` as `u16`
1405 ///
1406 /// ```
1407 /// # unsafe {
1408 /// let x = [5_u8, 6, 7, 8, 9];
1409 /// let ptr = x.as_ptr();
1410 /// let offset = ptr.align_offset(align_of::<u16>());
1411 ///
1412 /// if offset < x.len() - 1 {
1413 /// let u16_ptr = ptr.add(offset).cast::<u16>();
1414 /// assert!(*u16_ptr == u16::from_ne_bytes([5, 6]) || *u16_ptr == u16::from_ne_bytes([6, 7]));
1415 /// } else {
1416 /// // while the pointer can be aligned via `offset`, it would point
1417 /// // outside the allocation
1418 /// }
1419 /// # }
1420 /// ```
1421 #[must_use]
1422 #[inline]
1423 #[stable(feature = "align_offset", since = "1.36.0")]
1424 pub fn align_offset(self, align: usize) -> usize
1425 where
1426 T: Sized,
1427 {
1428 if !align.is_power_of_two() {
1429 panic!("align_offset: align is not a power-of-two");
1430 }
1431
1432 // SAFETY: `align` has been checked to be a power of 2 above
1433 let ret = unsafe { align_offset(self, align) };
1434
1435 // Inform Miri that we want to consider the resulting pointer to be suitably aligned.
1436 #[cfg(miri)]
1437 if ret != usize::MAX {
1438 intrinsics::miri_promise_symbolic_alignment(self.wrapping_add(ret).cast(), align);
1439 }
1440
1441 ret
1442 }
1443
1444 /// Returns whether the pointer is properly aligned for `T`.
1445 ///
1446 /// # Examples
1447 ///
1448 /// ```
1449 /// // On some platforms, the alignment of i32 is less than 4.
1450 /// #[repr(align(4))]
1451 /// struct AlignedI32(i32);
1452 ///
1453 /// let data = AlignedI32(42);
1454 /// let ptr = &data as *const AlignedI32;
1455 ///
1456 /// assert!(ptr.is_aligned());
1457 /// assert!(!ptr.wrapping_byte_add(1).is_aligned());
1458 /// ```
1459 #[must_use]
1460 #[inline]
1461 #[stable(feature = "pointer_is_aligned", since = "1.79.0")]
1462 pub fn is_aligned(self) -> bool
1463 where
1464 T: Sized,
1465 {
1466 self.is_aligned_to(align_of::<T>())
1467 }
1468
1469 /// Returns whether the pointer is aligned to `align`.
1470 ///
1471 /// For non-`Sized` pointees this operation considers only the data pointer,
1472 /// ignoring the metadata.
1473 ///
1474 /// # Panics
1475 ///
1476 /// The function panics if `align` is not a power-of-two (this includes 0).
1477 ///
1478 /// # Examples
1479 ///
1480 /// ```
1481 /// #![feature(pointer_is_aligned_to)]
1482 ///
1483 /// // On some platforms, the alignment of i32 is less than 4.
1484 /// #[repr(align(4))]
1485 /// struct AlignedI32(i32);
1486 ///
1487 /// let data = AlignedI32(42);
1488 /// let ptr = &data as *const AlignedI32;
1489 ///
1490 /// assert!(ptr.is_aligned_to(1));
1491 /// assert!(ptr.is_aligned_to(2));
1492 /// assert!(ptr.is_aligned_to(4));
1493 ///
1494 /// assert!(ptr.wrapping_byte_add(2).is_aligned_to(2));
1495 /// assert!(!ptr.wrapping_byte_add(2).is_aligned_to(4));
1496 ///
1497 /// assert_ne!(ptr.is_aligned_to(8), ptr.wrapping_add(1).is_aligned_to(8));
1498 /// ```
1499 #[must_use]
1500 #[inline]
1501 #[unstable(feature = "pointer_is_aligned_to", issue = "96284")]
1502 pub fn is_aligned_to(self, align: usize) -> bool {
1503 if !align.is_power_of_two() {
1504 panic!("is_aligned_to: align is not a power-of-two");
1505 }
1506
1507 self.addr() & (align - 1) == 0
1508 }
1509}
1510
1511impl<T> *const [T] {
1512 /// Returns the length of a raw slice.
1513 ///
1514 /// The returned value is the number of **elements**, not the number of bytes.
1515 ///
1516 /// This function is safe, even when the raw slice cannot be cast to a slice
1517 /// reference because the pointer is null or unaligned.
1518 ///
1519 /// # Examples
1520 ///
1521 /// ```rust
1522 /// use std::ptr;
1523 ///
1524 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1525 /// assert_eq!(slice.len(), 3);
1526 /// ```
1527 #[inline]
1528 #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1529 #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1530 pub const fn len(self) -> usize {
1531 metadata(self)
1532 }
1533
1534 /// Returns `true` if the raw slice has a length of 0.
1535 ///
1536 /// # Examples
1537 ///
1538 /// ```
1539 /// use std::ptr;
1540 ///
1541 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1542 /// assert!(!slice.is_empty());
1543 /// ```
1544 #[inline(always)]
1545 #[stable(feature = "slice_ptr_len", since = "1.79.0")]
1546 #[rustc_const_stable(feature = "const_slice_ptr_len", since = "1.79.0")]
1547 pub const fn is_empty(self) -> bool {
1548 self.len() == 0
1549 }
1550
1551 /// Returns a raw pointer to the slice's buffer.
1552 ///
1553 /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1554 ///
1555 /// # Examples
1556 ///
1557 /// ```rust
1558 /// #![feature(slice_ptr_get)]
1559 /// use std::ptr;
1560 ///
1561 /// let slice: *const [i8] = ptr::slice_from_raw_parts(ptr::null(), 3);
1562 /// assert_eq!(slice.as_ptr(), ptr::null());
1563 /// ```
1564 #[inline]
1565 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1566 pub const fn as_ptr(self) -> *const T {
1567 self as *const T
1568 }
1569
1570 /// Gets a raw pointer to the underlying array.
1571 ///
1572 /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1573 #[unstable(feature = "slice_as_array", issue = "133508")]
1574 #[inline]
1575 #[must_use]
1576 pub const fn as_array<const N: usize>(self) -> Option<*const [T; N]> {
1577 if self.len() == N {
1578 let me = self.as_ptr() as *const [T; N];
1579 Some(me)
1580 } else {
1581 None
1582 }
1583 }
1584
1585 /// Returns a raw pointer to an element or subslice, without doing bounds
1586 /// checking.
1587 ///
1588 /// Calling this method with an out-of-bounds index or when `self` is not dereferenceable
1589 /// is *[undefined behavior]* even if the resulting pointer is not used.
1590 ///
1591 /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
1592 ///
1593 /// # Examples
1594 ///
1595 /// ```
1596 /// #![feature(slice_ptr_get)]
1597 ///
1598 /// let x = &[1, 2, 4] as *const [i32];
1599 ///
1600 /// unsafe {
1601 /// assert_eq!(x.get_unchecked(1), x.as_ptr().add(1));
1602 /// }
1603 /// ```
1604 #[unstable(feature = "slice_ptr_get", issue = "74265")]
1605 #[inline]
1606 pub unsafe fn get_unchecked<I>(self, index: I) -> *const I::Output
1607 where
1608 I: SliceIndex<[T]>,
1609 {
1610 // SAFETY: the caller ensures that `self` is dereferenceable and `index` in-bounds.
1611 unsafe { index.get_unchecked(self) }
1612 }
1613
1614 /// Returns `None` if the pointer is null, or else returns a shared slice to
1615 /// the value wrapped in `Some`. In contrast to [`as_ref`], this does not require
1616 /// that the value has to be initialized.
1617 ///
1618 /// [`as_ref`]: #method.as_ref
1619 ///
1620 /// # Safety
1621 ///
1622 /// When calling this method, you have to ensure that *either* the pointer is null *or*
1623 /// all of the following is true:
1624 ///
1625 /// * The pointer must be [valid] for reads for `ptr.len() * size_of::<T>()` many bytes,
1626 /// and it must be properly aligned. This means in particular:
1627 ///
1628 /// * The entire memory range of this slice must be contained within a single [allocated object]!
1629 /// Slices can never span across multiple allocated objects.
1630 ///
1631 /// * The pointer must be aligned even for zero-length slices. One
1632 /// reason for this is that enum layout optimizations may rely on references
1633 /// (including slices of any length) being aligned and non-null to distinguish
1634 /// them from other data. You can obtain a pointer that is usable as `data`
1635 /// for zero-length slices using [`NonNull::dangling()`].
1636 ///
1637 /// * The total size `ptr.len() * size_of::<T>()` of the slice must be no larger than `isize::MAX`.
1638 /// See the safety documentation of [`pointer::offset`].
1639 ///
1640 /// * You must enforce Rust's aliasing rules, since the returned lifetime `'a` is
1641 /// arbitrarily chosen and does not necessarily reflect the actual lifetime of the data.
1642 /// In particular, while this reference exists, the memory the pointer points to must
1643 /// not get mutated (except inside `UnsafeCell`).
1644 ///
1645 /// This applies even if the result of this method is unused!
1646 ///
1647 /// See also [`slice::from_raw_parts`][].
1648 ///
1649 /// [valid]: crate::ptr#safety
1650 /// [allocated object]: crate::ptr#allocated-object
1651 ///
1652 /// # Panics during const evaluation
1653 ///
1654 /// This method will panic during const evaluation if the pointer cannot be
1655 /// determined to be null or not. See [`is_null`] for more information.
1656 ///
1657 /// [`is_null`]: #method.is_null
1658 #[inline]
1659 #[unstable(feature = "ptr_as_uninit", issue = "75402")]
1660 pub const unsafe fn as_uninit_slice<'a>(self) -> Option<&'a [MaybeUninit<T>]> {
1661 if self.is_null() {
1662 None
1663 } else {
1664 // SAFETY: the caller must uphold the safety contract for `as_uninit_slice`.
1665 Some(unsafe { slice::from_raw_parts(self as *const MaybeUninit<T>, self.len()) })
1666 }
1667 }
1668}
1669
1670impl<T, const N: usize> *const [T; N] {
1671 /// Returns a raw pointer to the array's buffer.
1672 ///
1673 /// This is equivalent to casting `self` to `*const T`, but more type-safe.
1674 ///
1675 /// # Examples
1676 ///
1677 /// ```rust
1678 /// #![feature(array_ptr_get)]
1679 /// use std::ptr;
1680 ///
1681 /// let arr: *const [i8; 3] = ptr::null();
1682 /// assert_eq!(arr.as_ptr(), ptr::null());
1683 /// ```
1684 #[inline]
1685 #[unstable(feature = "array_ptr_get", issue = "119834")]
1686 pub const fn as_ptr(self) -> *const T {
1687 self as *const T
1688 }
1689
1690 /// Returns a raw pointer to a slice containing the entire array.
1691 ///
1692 /// # Examples
1693 ///
1694 /// ```
1695 /// #![feature(array_ptr_get)]
1696 ///
1697 /// let arr: *const [i32; 3] = &[1, 2, 4] as *const [i32; 3];
1698 /// let slice: *const [i32] = arr.as_slice();
1699 /// assert_eq!(slice.len(), 3);
1700 /// ```
1701 #[inline]
1702 #[unstable(feature = "array_ptr_get", issue = "119834")]
1703 pub const fn as_slice(self) -> *const [T] {
1704 self
1705 }
1706}
1707
1708/// Pointer equality is by address, as produced by the [`<*const T>::addr`](pointer::addr) method.
1709#[stable(feature = "rust1", since = "1.0.0")]
1710impl<T: ?Sized> PartialEq for *const T {
1711 #[inline]
1712 #[allow(ambiguous_wide_pointer_comparisons)]
1713 fn eq(&self, other: &*const T) -> bool {
1714 *self == *other
1715 }
1716}
1717
1718/// Pointer equality is an equivalence relation.
1719#[stable(feature = "rust1", since = "1.0.0")]
1720impl<T: ?Sized> Eq for *const T {}
1721
1722/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1723#[stable(feature = "rust1", since = "1.0.0")]
1724impl<T: ?Sized> Ord for *const T {
1725 #[inline]
1726 #[allow(ambiguous_wide_pointer_comparisons)]
1727 fn cmp(&self, other: &*const T) -> Ordering {
1728 if self < other {
1729 Less
1730 } else if self == other {
1731 Equal
1732 } else {
1733 Greater
1734 }
1735 }
1736}
1737
1738/// Pointer comparison is by address, as produced by the `[`<*const T>::addr`](pointer::addr)` method.
1739#[stable(feature = "rust1", since = "1.0.0")]
1740impl<T: ?Sized> PartialOrd for *const T {
1741 #[inline]
1742 #[allow(ambiguous_wide_pointer_comparisons)]
1743 fn partial_cmp(&self, other: &*const T) -> Option<Ordering> {
1744 Some(self.cmp(other))
1745 }
1746
1747 #[inline]
1748 #[allow(ambiguous_wide_pointer_comparisons)]
1749 fn lt(&self, other: &*const T) -> bool {
1750 *self < *other
1751 }
1752
1753 #[inline]
1754 #[allow(ambiguous_wide_pointer_comparisons)]
1755 fn le(&self, other: &*const T) -> bool {
1756 *self <= *other
1757 }
1758
1759 #[inline]
1760 #[allow(ambiguous_wide_pointer_comparisons)]
1761 fn gt(&self, other: &*const T) -> bool {
1762 *self > *other
1763 }
1764
1765 #[inline]
1766 #[allow(ambiguous_wide_pointer_comparisons)]
1767 fn ge(&self, other: &*const T) -> bool {
1768 *self >= *other
1769 }
1770}
1771
1772#[stable(feature = "raw_ptr_default", since = "1.88.0")]
1773impl<T: ?Sized + Thin> Default for *const T {
1774 /// Returns the default value of [`null()`][crate::ptr::null].
1775 fn default() -> Self {
1776 crate::ptr::null()
1777 }
1778}