diff --git a/.lock b/.lock new file mode 100644 index 000000000..e69de29bb diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 000000000..e69de29bb diff --git a/cactusref/adopt/trait.Adopt.html b/cactusref/adopt/trait.Adopt.html new file mode 100644 index 000000000..ac7f88b27 --- /dev/null +++ b/cactusref/adopt/trait.Adopt.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../cactusref/trait.Adopt.html...

+ + + \ No newline at end of file diff --git a/cactusref/all.html b/cactusref/all.html new file mode 100644 index 000000000..dd9929005 --- /dev/null +++ b/cactusref/all.html @@ -0,0 +1 @@ +List of all items in this crate

List of all items

Structs

Traits

Type Aliases

\ No newline at end of file diff --git a/cactusref/implementing_self_referential_data_structures/index.html b/cactusref/implementing_self_referential_data_structures/index.html new file mode 100644 index 000000000..2cd106c73 --- /dev/null +++ b/cactusref/implementing_self_referential_data_structures/index.html @@ -0,0 +1,116 @@ +cactusref::implementing_self_referential_data_structures - Rust
Expand description

Examples of implementing self-referential data structures with CactusRef. +CactusRef can be used to implement collections that own strong references +to themselves.

+

§Doubly-linked List

+

The following implements a doubly-linked list that is fully deallocated once +the list binding is dropped.

+ +
use std::cell::RefCell;
+use std::iter;
+
+use cactusref::{Adopt, Rc};
+
+struct Node<T> {
+    pub prev: Option<Rc<RefCell<Self>>>,
+    pub next: Option<Rc<RefCell<Self>>>,
+    pub data: T,
+}
+
+struct List<T> {
+    pub head: Option<Rc<RefCell<Node<T>>>>,
+}
+
+impl<T> List<T> {
+    fn pop(&mut self) -> Option<Rc<RefCell<Node<T>>>> {
+        let head = self.head.take()?;
+        let tail = head.borrow_mut().prev.take();
+        let next = head.borrow_mut().next.take();
+        if let Some(ref tail) = tail {
+            Rc::unadopt(&head, tail);
+            Rc::unadopt(tail, &head);
+
+            tail.borrow_mut().next.clone_from(&next);
+            if let Some(ref next) = next {
+                unsafe {
+                    Rc::adopt_unchecked(tail, next);
+                }
+            }
+        }
+        if let Some(ref next) = next {
+            Rc::unadopt(&head, next);
+            Rc::unadopt(next, &head);
+
+            next.borrow_mut().prev.clone_from(&tail);
+            if let Some(ref tail) = tail {
+                unsafe {
+                    Rc::adopt_unchecked(next, tail);
+                }
+            }
+        }
+        self.head = next;
+        Some(head)
+    }
+}
+
+impl<T> From<Vec<T>> for List<T> {
+    fn from(list: Vec<T>) -> Self {
+        let nodes = list
+            .into_iter()
+            .map(|data| {
+                Rc::new(RefCell::new(Node {
+                    prev: None,
+                    next: None,
+                    data,
+                }))
+            })
+            .collect::<Vec<_>>();
+        for i in 0..nodes.len() - 1 {
+            let curr = &nodes[i];
+            let next = &nodes[i + 1];
+            curr.borrow_mut().next = Some(Rc::clone(next));
+            next.borrow_mut().prev = Some(Rc::clone(curr));
+            unsafe {
+                Rc::adopt_unchecked(curr, next);
+                Rc::adopt_unchecked(next, curr);
+            }
+        }
+        let tail = &nodes[nodes.len() - 1];
+        let head = &nodes[0];
+        tail.borrow_mut().next = Some(Rc::clone(head));
+        head.borrow_mut().prev = Some(Rc::clone(tail));
+        unsafe {
+            Rc::adopt_unchecked(tail, head);
+            Rc::adopt_unchecked(head, tail);
+        }
+
+        let head = Rc::clone(head);
+        Self { head: Some(head) }
+    }
+}
+
+let list = iter::repeat(())
+    .map(|_| "a".repeat(1024 * 1024))
+    .take(10)
+    .collect::<Vec<_>>();
+let mut list = List::from(list);
+
+let head = list.pop().unwrap();
+assert_eq!(Rc::strong_count(&head), 1);
+assert!(head.borrow().data.starts_with('a'));
+
+// The new head of the list is owned three times:
+//
+// - itself.
+// - the `prev` pointer to it from it's next element.
+// - the `next` pointer from the list's tail.
+assert_eq!(list.head.as_ref().map(Rc::strong_count), Some(3));
+
+// The popped head is no longer part of the graph and can be safely dropped
+// and deallocated.
+let weak = Rc::downgrade(&head);
+drop(head);
+assert!(weak.upgrade().is_none());
+
+drop(list);
+// all memory consumed by the list nodes is reclaimed.
+
\ No newline at end of file diff --git a/cactusref/implementing_self_referential_data_structures/sidebar-items.js b/cactusref/implementing_self_referential_data_structures/sidebar-items.js new file mode 100644 index 000000000..5244ce01c --- /dev/null +++ b/cactusref/implementing_self_referential_data_structures/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {}; \ No newline at end of file diff --git a/cactusref/index.html b/cactusref/index.html new file mode 100644 index 000000000..592efd765 --- /dev/null +++ b/cactusref/index.html @@ -0,0 +1,75 @@ +cactusref - Rust

Crate cactusref

source ·
Expand description

Single-threaded, cycle-aware, reference-counting pointers. ‘Rc’ stands +for ‘Reference Counted’.

+

The type Rc<T> provides shared ownership of a value of type T, +allocated in the heap. Invoking clone on Rc produces a new pointer +to the same value in the heap. When the last externally reachable Rc +pointer to a given value is destroyed, the pointed-to value is also +destroyed.

+

Rc can detect and deallocate cycles of Rcs through the use of +Adopt. Cycle detection is opt-in and no reachability checks are +performed unless graphs have adoptions.

+

§Nightly

+

CactusRef depends on several unstable Rust features and can only be built +on a nightly toolchain.

+

§Maturity

+

CactusRef is experimental. This crate has several limitations:

+
    +
  • CactusRef is nightly only.
  • +
  • Cycle detection requires unsafe code to use.
  • +
+

CactusRef is a non-trivial extension to std::rc::Rc and has not been +proven to be safe. Although CactusRef makes a best effort to abort the +program if it detects a dangling Rc, this crate may be unsound.

+

§CactusRef vs. std::rc

+

The Rc in CactusRef is derived from std::rc::Rc and CactusRef +implements most of the API from std.

+

CactusRef does not implement the following APIs that are present on +std::rc::Rc:

+ +

CactusRef cannot be used with unsized types like [T] or str.

+

If you do not depend on these APIs, CactusRef is a drop-in replacement for +std::rc::Rc.

+

Like std::rc, Rc and Weak are not Send and are not Sync.

+

§Building an object graph

+

CactusRef smart pointers can be used to implement a tracing garbage +collector local to a graph objects. Graphs of CactusRefs are cycle-aware and +can deallocate a cycle of strong references that is otherwise unreachable +from the rest of the object graph, unlike std::rc::Rc.

+

CactusRef relies on proper use of Adopt::adopt_unchecked and Adopt::unadopt +to maintain bookkeeping about the object graph for breaking cycles. These +functions are unsafe because improperly managing the bookkeeping can cause +the Rc drop implementation to deallocate cycles while they are still +externally reachable. Failure to uphold Adopt’s safety invariants will +result in undefined behavior and held Rcs that point to members of the +now deallocated cycle may dangle.

+

CactusRef makes a best-effort attempt to abort the program if it detects an +access to a dangling Rc.

+

§Cycle Detection

+

Rc implements Adopt to log bookkeeping entries for strong ownership +links to other Rcs that may form a cycle. The ownership links tracked by +these bookkeeping entries form an object graph of reachable Rcs. On +drop, Rc uses these entries to conduct a reachability trace of the +object graph to determine if it is part of an orphaned cycle. An orphaned +cycle is a cycle where the only strong references to all nodes in the cycle +come from other nodes in the cycle.

+

Cycle detection is a zero-cost abstraction. If you never +use cactusref::Adopt;, drop uses the same implementation as +std::rc::Rc (and leaks in the same way as std::rc::Rc if you form a +cycle of strong references). The only costs you pay are the memory costs of +one empty hash map used to track adoptions and an if statement to check if +these structures are empty on drop.

+

Cycle detection uses breadth-first search for traversing the object graph. +The algorithm supports arbitrarily large object graphs and will not overflow +the stack during the reachability trace.

+

Modules§

Structs§

  • A single-threaded reference-counting pointer. ‘Rc’ stands for ‘Reference +Counted’.
  • Weak is a version of Rc that holds a non-owning reference to the +managed allocation. The allocation is accessed by calling upgrade on the Weak +pointer, which returns an Option<Rc<T>>.

Traits§

  • Build a graph of linked Rc smart pointers to enable busting cycles on +drop.

Type Aliases§

\ No newline at end of file diff --git a/cactusref/rc/struct.Rc.html b/cactusref/rc/struct.Rc.html new file mode 100644 index 000000000..190bb204f --- /dev/null +++ b/cactusref/rc/struct.Rc.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../cactusref/struct.Rc.html...

+ + + \ No newline at end of file diff --git a/cactusref/rc/struct.Weak.html b/cactusref/rc/struct.Weak.html new file mode 100644 index 000000000..ba3950b6e --- /dev/null +++ b/cactusref/rc/struct.Weak.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../cactusref/struct.Weak.html...

+ + + \ No newline at end of file diff --git a/cactusref/sidebar-items.js b/cactusref/sidebar-items.js new file mode 100644 index 000000000..92c0cbf09 --- /dev/null +++ b/cactusref/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {"mod":["implementing_self_referential_data_structures"],"struct":["Rc","Weak"],"trait":["Adopt"],"type":["CactusRef","CactusWeakRef"]}; \ No newline at end of file diff --git a/cactusref/struct.Rc.html b/cactusref/struct.Rc.html new file mode 100644 index 000000000..167e29ec6 --- /dev/null +++ b/cactusref/struct.Rc.html @@ -0,0 +1,539 @@ +Rc in cactusref - Rust

Struct cactusref::Rc

source ·
pub struct Rc<T> { /* private fields */ }
Expand description

A single-threaded reference-counting pointer. ‘Rc’ stands for ‘Reference +Counted’.

+

See the module-level documentation for more details.

+

The inherent methods of Rc are all associated functions, which means +that you have to call them as e.g., Rc::get_mut(&mut value) instead of +value.get_mut(). This avoids conflicts with methods of the inner type T.

+

Implementations§

source§

impl<T> Rc<T>

source

pub fn new(value: T) -> Rc<T>

Constructs a new Rc<T>.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
source

pub fn new_uninit() -> Rc<MaybeUninit<T>>

Constructs a new Rc with uninitialized contents.

+
§Examples
+
use cactusref::Rc;
+
+let mut five = Rc::<u32>::new_uninit();
+
+let five = unsafe {
+    // Deferred initialization:
+    Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+
+    five.assume_init()
+};
+
+assert_eq!(*five, 5)
+
source

pub fn pin(value: T) -> Pin<Rc<T>>

Constructs a new Pin<Rc<T>>. If T does not implement Unpin, then +value will be pinned in memory and unable to be moved.

+
source

pub fn try_unwrap(this: Self) -> Result<T, Self>

Returns the inner value, if the Rc has exactly one strong reference.

+

Otherwise, an Err is returned with the same Rc that was +passed in.

+

This will succeed even if there are outstanding weak references.

+
§Examples
+
use cactusref::Rc;
+
+let x = Rc::new(3);
+assert_eq!(Rc::try_unwrap(x), Ok(3));
+
+let x = Rc::new(4);
+let _y = Rc::clone(&x);
+assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
+
§Errors
+

If the given Rc does not have exactly one strong reference, it is +returned in the Err variant of the returned Result.

+
source§

impl<T> Rc<MaybeUninit<T>>

source

pub unsafe fn assume_init(self) -> Rc<T>

Converts to Rc<T>.

+
§Safety
+

As with MaybeUninit::assume_init, +it is up to the caller to guarantee that the inner value +really is in an initialized state. +Calling this when the content is not yet fully initialized +causes immediate undefined behavior.

+
§Examples
+
use cactusref::Rc;
+
+let mut five = Rc::<u32>::new_uninit();
+
+let five = unsafe {
+    // Deferred initialization:
+    Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+
+    five.assume_init()
+};
+
+assert_eq!(*five, 5)
+
source§

impl<T> Rc<T>

source

pub fn into_raw(this: Self) -> *const T

Consumes the Rc, returning the wrapped pointer.

+

To avoid a memory leak the pointer must be converted back to an Rc using +Rc::from_raw.

+
§Examples
+
use cactusref::Rc;
+
+let x = Rc::new("hello".to_owned());
+let x_ptr = Rc::into_raw(x);
+assert_eq!(unsafe { &*x_ptr }, "hello");
+// Reconstruct the `Rc` to avoid a leak.
+let _ = unsafe { Rc::from_raw(x_ptr) };
+
source

pub fn as_ptr(this: &Self) -> *const T

Provides a raw pointer to the data.

+

The counts are not affected in any way and the Rc is not consumed. The pointer is valid +for as long there are strong counts in the Rc.

+
§Examples
+
use cactusref::Rc;
+
+let x = Rc::new("hello".to_owned());
+let y = Rc::clone(&x);
+let x_ptr = Rc::as_ptr(&x);
+assert_eq!(x_ptr, Rc::as_ptr(&y));
+assert_eq!(unsafe { &*x_ptr }, "hello");
+
source

pub unsafe fn from_raw(ptr: *const T) -> Self

Constructs an Rc<T> from a raw pointer.

+

The raw pointer must have been previously returned by a call to +Rc<U>::into_raw where U must have the same size +and alignment as T. This is trivially true if U is T. +Note that if U is not T but has the same size and alignment, this is +basically like transmuting references of different types. See +mem::transmute for more information on what +restrictions apply in this case.

+

The user of from_raw has to make sure a specific value of T is only +dropped once.

+

This function is unsafe because improper use may lead to memory unsafety, +even if the returned Rc<T> is never accessed.

+
§Examples
+
use cactusref::Rc;
+
+let x = Rc::new("hello".to_owned());
+let x_ptr = Rc::into_raw(x);
+
+unsafe {
+    // Convert back to an `Rc` to prevent leak.
+    let x = Rc::from_raw(x_ptr);
+    assert_eq!(&*x, "hello");
+
+    // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
+}
+
+// The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+
§Safety
+

Callers must ensure that ptr points to a live Rc and was created +with a call to Rc::into_raw.

+
source

pub fn downgrade(this: &Self) -> Weak<T>

Creates a new Weak pointer to this allocation.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+let weak_five = Rc::downgrade(&five);
+
source

pub fn weak_count(this: &Self) -> usize

Gets the number of Weak pointers to this allocation.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+let _weak_five = Rc::downgrade(&five);
+
+assert_eq!(1, Rc::weak_count(&five));
+
source

pub fn strong_count(this: &Self) -> usize

Gets the number of strong (Rc) pointers to this allocation.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+let _also_five = Rc::clone(&five);
+
+assert_eq!(2, Rc::strong_count(&five));
+
source

pub unsafe fn increment_strong_count(ptr: *const T)

Increments the strong reference count on the Rc<T> associated with the +provided pointer by one.

+
§Safety
+

The pointer must have been obtained through Rc::into_raw, and the +associated Rc instance must be valid (i.e. the strong count must be at +least 1) for the duration of this method.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+unsafe {
+    let ptr = Rc::into_raw(five);
+    Rc::increment_strong_count(ptr);
+
+    let five = Rc::from_raw(ptr);
+    assert_eq!(2, Rc::strong_count(&five));
+
+    // Decrement the strong count to avoid a leak.
+    Rc::decrement_strong_count(ptr);
+}
+
source

pub unsafe fn decrement_strong_count(ptr: *const T)

Decrements the strong reference count on the Rc<T> associated with the +provided pointer by one.

+
§Safety
+

The pointer must have been obtained through Rc::into_raw, and the +associated Rc instance must be valid (i.e. the strong count must be at +least 1) when invoking this method. This method can be used to release +the final Rc and backing storage, but should not be called after +the final Rc has been released.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+unsafe {
+    let ptr = Rc::into_raw(five);
+    Rc::increment_strong_count(ptr);
+
+    let five = Rc::from_raw(ptr);
+    assert_eq!(2, Rc::strong_count(&five));
+    Rc::decrement_strong_count(ptr);
+    assert_eq!(1, Rc::strong_count(&five));
+}
+
source

pub fn get_mut(this: &mut Self) -> Option<&mut T>

Returns a mutable reference into the given Rc, if there are +no other Rc or Weak pointers to the same allocation.

+

Returns None otherwise, because it is not safe to +mutate a shared value.

+

See also make_mut, which will clone +the inner value when there are other pointers.

+
§Examples
+
use cactusref::Rc;
+
+let mut x = Rc::new(3);
+*Rc::get_mut(&mut x).unwrap() = 4;
+assert_eq!(*x, 4);
+
+let _y = Rc::clone(&x);
+assert!(Rc::get_mut(&mut x).is_none());
+
source

pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T

Returns a mutable reference into the given Rc, +without any check.

+

See also get_mut, which is safe and does appropriate checks.

+
§Safety
+

Any other Rc or Weak pointers to the same allocation must not be dereferenced +for the duration of the returned borrow. +This is trivially the case if no such pointers exist, +for example immediately after Rc::new.

+
§Examples
+
use cactusref::Rc;
+
+let mut x = Rc::new(String::new());
+unsafe {
+    Rc::get_mut_unchecked(&mut x).push_str("foo")
+}
+assert_eq!(*x, "foo");
+
source

pub fn ptr_eq(this: &Self, other: &Self) -> bool

Returns true if the two Rcs point to the same allocation +(in a vein similar to ptr::eq).

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+let same_five = Rc::clone(&five);
+let other_five = Rc::new(5);
+
+assert!(Rc::ptr_eq(&five, &same_five));
+assert!(!Rc::ptr_eq(&five, &other_five));
+
source§

impl<T: Clone> Rc<T>

source

pub fn make_mut(this: &mut Self) -> &mut T

Makes a mutable reference into the given Rc.

+

If there are other Rc pointers to the same allocation, then make_mut will +clone the inner value to a new allocation to ensure unique ownership. This is also +referred to as clone-on-write.

+

If there are no other Rc pointers to this allocation, then Weak +pointers to this allocation will be disassociated.

+

See also get_mut, which will fail rather than cloning.

+
§Examples
+
use cactusref::Rc;
+
+let mut data = Rc::new(5);
+
+*Rc::make_mut(&mut data) += 1;        // Won't clone anything
+let mut other_data = Rc::clone(&data);    // Won't clone inner data
+*Rc::make_mut(&mut data) += 1;        // Clones inner data
+*Rc::make_mut(&mut data) += 1;        // Won't clone anything
+*Rc::make_mut(&mut other_data) *= 2;  // Won't clone anything
+
+// Now `data` and `other_data` point to different allocations.
+assert_eq!(*data, 8);
+assert_eq!(*other_data, 12);
+

Weak pointers will be disassociated:

+ +
use cactusref::Rc;
+
+let mut data = Rc::new(75);
+let weak = Rc::downgrade(&data);
+
+assert!(75 == *data);
+assert!(75 == *weak.upgrade().unwrap());
+
+*Rc::make_mut(&mut data) += 1;
+
+assert!(76 == *data);
+assert!(weak.upgrade().is_none());
+

Trait Implementations§

source§

impl<T> Adopt for Rc<T>

Implementation of Adopt for Rc which enables Rcs to form a cycle +of strong references that are reaped by Rc’s Drop implementation.

+
source§

unsafe fn adopt_unchecked(this: &Self, other: &Self)

Perform bookkeeping to record that this has an owned reference to +other.

+

Adoption is a one-way link, or a directed edge in the object graph which +means “this owns other”.

+

adopt can be called multiple times for a pair of Rcs. Each call to +adopt indicates that this owns one distinct clone of other.

+

This is an associated function that needs to be used as +Rc::adopt_unchecked(...). A method would interfere with methods of the same +name on the contents of a Rc used through Deref.

+
§Safety
+

Callers must ensure that this owns a strong reference to other.

+

Callers should call unadopt when this no longer holds a strong +reference to other to avoid memory leaks, but this is not required for +soundness.

+

Calling adopt does not increment the strong count of other. Callers +must ensure that other has been cloned and stored in the T contained +by this.

+
§Examples
+

The following implements a self-referential array.

+ +
use cactusref::{Adopt, Rc};
+use std::cell::RefCell;
+
+#[derive(Default)]
+struct Array {
+    buffer: Vec<Rc<RefCell<Self>>>,
+}
+
+let array = Rc::new(RefCell::new(Array::default()));
+for _ in 0..10 {
+    let item = Rc::clone(&array);
+    unsafe {
+        Rc::adopt_unchecked(&array, &item);
+    }
+    array.borrow_mut().buffer.push(item);
+}
+let weak = Rc::downgrade(&array);
+// 1 for the array binding, 10 for the `Rc`s in buffer
+assert_eq!(Rc::strong_count(&array), 11);
+drop(array);
+assert!(weak.upgrade().is_none());
+assert_eq!(weak.weak_count(), 0);
+
source§

fn unadopt(this: &Self, other: &Self)

Perform bookkeeping to record that this has removed an owned reference +to other.

+

Adoption is a one-way link, or a directed edge in the object graph which +means “this owns other”.

+

This is an associated function that needs to be used as +Adopt::unadopt(...). A method would interfere with methods of the same +name on the contents of a Rc used through Deref.

+
§Memory Leaks
+

Failure to call this function when removing an owned Rc from this +is safe, but may result in a memory leak.

+
§Examples
+

The following implements a self-referential array.

+ +
use cactusref::{Adopt, Rc};
+use std::cell::RefCell;
+
+#[derive(Default)]
+struct Array {
+    buffer: Vec<Rc<RefCell<Self>>>,
+}
+
+let array = Rc::new(RefCell::new(Array::default()));
+for _ in 0..10 {
+    let item = Rc::clone(&array);
+    unsafe {
+        Rc::adopt_unchecked(&array, &item);
+    }
+    array.borrow_mut().buffer.push(item);
+}
+let weak = Rc::downgrade(&array);
+// 1 for the array binding, 10 for the `Rc`s in buffer
+assert_eq!(Rc::strong_count(&array), 11);
+
+let head = array.borrow_mut().buffer.pop().unwrap();
+Rc::unadopt(&array, &head);
+
+drop(head);
+assert_eq!(Rc::strong_count(&array), 10);
+drop(array);
+assert!(weak.upgrade().is_none());
+assert_eq!(weak.weak_count(), 0);
+
source§

impl<T> AsRef<T> for Rc<T>

source§

fn as_ref(&self) -> &T

Converts this type into a shared reference of the (usually inferred) input type.
source§

impl<T> Borrow<T> for Rc<T>

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> Clone for Rc<T>

source§

fn clone(&self) -> Rc<T>

Makes a clone of the Rc pointer.

+

This creates another pointer to the same allocation, increasing the +strong reference count.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+let _ = Rc::clone(&five);
+
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T: Debug> Debug for Rc<T>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T: Default> Default for Rc<T>

source§

fn default() -> Rc<T>

Creates a new Rc<T>, with the Default value for T.

+
§Examples
+
use cactusref::Rc;
+
+let x: Rc<i32> = Default::default();
+assert_eq!(*x, 0);
+
source§

impl<T> Deref for Rc<T>

§

type Target = T

The resulting type after dereferencing.
source§

fn deref(&self) -> &T

Dereferences the value.
source§

impl<T: Display> Display for Rc<T>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T> Drop for Rc<T>

source§

fn drop(&mut self)

Drops the Rc.

+

This will decrement the strong reference count. If the strong reference +count reaches zero then the only other references (if any) are Weak, +so we drop the inner value.

+

If this Rc has adopted any other Rcs, drop will trace the reachable +object graph and detect if this Rc is part of an orphaned cycle. An +orphaned cycle is a cycle in which all members have no owned references +held by Rcs outside of the cycle.

+

Rcs do not pay the cost of the reachability check unless they use +Adopt::adopt_unchecked.

+
§Examples
+
use cactusref::Rc;
+
+struct Foo;
+
+impl Drop for Foo {
+    fn drop(&mut self) {
+        println!("dropped!");
+    }
+}
+
+let foo  = Rc::new(Foo);
+let foo2 = Rc::clone(&foo);
+
+drop(foo);    // Doesn't print anything
+drop(foo2);   // Prints "dropped!"
+ +
use cactusref::{Adopt, Rc};
+
+struct Foo(u8);
+
+impl Drop for Foo {
+    fn drop(&mut self) {
+        println!("dropped {}!", self.0);
+    }
+}
+
+let foo  = Rc::new(Foo(10));
+let foo2 = Rc::new(Foo(20));
+
+unsafe {
+    Rc::adopt_unchecked(&foo, &foo2);
+    Rc::adopt_unchecked(&foo2, &foo);
+}
+
+drop(foo);    // Doesn't print anything
+drop(foo2);   // Prints "dropped 10!" and "dropped 20!"
+
§Cycle Detection and Deallocation Algorithm
+

Rc::adopt_unchecked does explicit bookkeeping to store links to +adoptee Rcs. These links form a graph of reachable objects which are +used to detect cycles.

+

On drop, if an Rc has no links, it is dropped like a normal Rc. If +the Rc has links, Drop performs a breadth first search by traversing +the forward and backward links stored in each Rc. Deallocating cycles +requires correct use of Adopt::adopt_unchecked and Adopt::unadopt +to perform the reachability bookkeeping.

+

After determining all reachable objects, Rc reduces the graph to +objects that form a cycle by performing pairwise reachability checks. +During this step, for each object in the cycle, Rc counts the number +of refs held by other objects in the cycle.

+

Using the cycle-held references, Rc computes whether the object graph +is reachable by any non-cycle nodes by comparing strong counts.

+

If the cycle is orphaned, Rc busts all the link structures and +deallocates each object.

+
§Performance
+

Cycle detection uses breadth first search to trace the object graph. +The runtime complexity of detecting a cycle is O(links + nodes) where +links is the number of adoptions that are alive and nodes is the number +of objects in the cycle.

+

Determining whether the cycle is orphaned builds on cycle detection and +iterates over all nodes in the graph to see if their strong count is +greater than the number of references in the cycle. The runtime +complexity of finding an orphaned cycle is O(links + nodes) where +links is the number of adoptions that are alive and nodes is the number +objects in the cycle.

+
source§

impl<T> From<Box<T>> for Rc<T>

source§

fn from(v: Box<T>) -> Rc<T>

Move a boxed object to a new, reference counted, allocation.

+
§Example
+
let original: Box<i32> = Box::new(1);
+let shared: Rc<i32> = Rc::from(original);
+assert_eq!(1, *shared);
+
source§

impl<T> From<T> for Rc<T>

source§

fn from(t: T) -> Self

Converts a generic type T into a Rc<T>

+

The conversion allocates on the heap and moves t +from the stack into it.

+
§Example
+
let x = 5;
+let rc = Rc::new(5);
+
+assert_eq!(Rc::from(x), rc);
+
source§

impl<T: Hash> Hash for Rc<T>

source§

fn hash<H: Hasher>(&self, state: &mut H)

Feeds this value into the given Hasher. Read more
1.3.0 · source§

fn hash_slice<H>(data: &[Self], state: &mut H)
where + H: Hasher, + Self: Sized,

Feeds a slice of this type into the given Hasher. Read more
source§

impl<T: Ord> Ord for Rc<T>

source§

fn cmp(&self, other: &Rc<T>) -> Ordering

Comparison for two Rcs.

+

The two are compared by calling cmp() on their inner values.

+
§Examples
+
use cactusref::Rc;
+use std::cmp::Ordering;
+
+let five = Rc::new(5);
+
+assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
+
1.21.0 · source§

fn max(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the maximum of two values. Read more
1.21.0 · source§

fn min(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the minimum of two values. Read more
1.50.0 · source§

fn clamp(self, min: Self, max: Self) -> Self
where + Self: Sized + PartialOrd,

Restrict a value to a certain interval. Read more
source§

impl<T: PartialEq> PartialEq for Rc<T>

source§

fn eq(&self, other: &Rc<T>) -> bool

Equality for two Rcs.

+

Two Rcs are equal if their inner values are equal, even if they are +stored in different allocation.

+

If T also implements Eq (implying reflexivity of equality), +two Rcs that point to the same allocation are +always equal.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+assert!(five == Rc::new(5));
+
source§

fn ne(&self, other: &Rc<T>) -> bool

Inequality for two Rcs.

+

Two Rcs are unequal if their inner values are unequal.

+

If T also implements Eq (implying reflexivity of equality), +two Rcs that point to the same allocation are +never unequal.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+assert!(five != Rc::new(6));
+
source§

impl<T: PartialOrd> PartialOrd for Rc<T>

source§

fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering>

Partial comparison for two Rcs.

+

The two are compared by calling partial_cmp() on their inner values.

+
§Examples
+
use cactusref::Rc;
+use std::cmp::Ordering;
+
+let five = Rc::new(5);
+
+assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
+
source§

fn lt(&self, other: &Rc<T>) -> bool

Less-than comparison for two Rcs.

+

The two are compared by calling < on their inner values.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+assert!(five < Rc::new(6));
+
source§

fn le(&self, other: &Rc<T>) -> bool

‘Less than or equal to’ comparison for two Rcs.

+

The two are compared by calling <= on their inner values.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+assert!(five <= Rc::new(5));
+
source§

fn gt(&self, other: &Rc<T>) -> bool

Greater-than comparison for two Rcs.

+

The two are compared by calling > on their inner values.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+assert!(five > Rc::new(4));
+
source§

fn ge(&self, other: &Rc<T>) -> bool

‘Greater than or equal to’ comparison for two Rcs.

+

The two are compared by calling >= on their inner values.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+assert!(five >= Rc::new(5));
+
source§

impl<T> Pointer for Rc<T>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T: Eq> Eq for Rc<T>

source§

impl<T> Unpin for Rc<T>

Auto Trait Implementations§

§

impl<T> Freeze for Rc<T>

§

impl<T> !RefUnwindSafe for Rc<T>

§

impl<T> !Send for Rc<T>

§

impl<T> !Sync for Rc<T>

§

impl<T> !UnwindSafe for Rc<T>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<Q, K> Equivalent<K> for Q
where + Q: Eq + ?Sized, + K: Borrow<Q> + ?Sized,

source§

fn equivalent(&self, key: &K) -> bool

Checks if this value is equivalent to the given key. Read more
source§

impl<T> From<!> for T

source§

fn from(t: !) -> T

Converts to this type from the input type.
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T> ToString for T
where + T: Display + ?Sized,

source§

default fn to_string(&self) -> String

Converts the given value to a String. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/cactusref/struct.Weak.html b/cactusref/struct.Weak.html new file mode 100644 index 000000000..fc87bea64 --- /dev/null +++ b/cactusref/struct.Weak.html @@ -0,0 +1,186 @@ +Weak in cactusref - Rust

Struct cactusref::Weak

source ·
pub struct Weak<T> { /* private fields */ }
Expand description

Weak is a version of Rc that holds a non-owning reference to the +managed allocation. The allocation is accessed by calling upgrade on the Weak +pointer, which returns an Option<Rc<T>>.

+

Since a Weak reference does not count towards ownership, it will not +prevent the value stored in the allocation from being dropped, and Weak itself makes no +guarantees about the value still being present. Thus it may return None +when upgraded. Note however that a Weak reference does prevent the allocation +itself (the backing store) from being deallocated.

+

A Weak pointer is useful for keeping a temporary reference to the allocation +managed by Rc without preventing its inner value from being dropped. It is also used to +prevent circular references between Rc pointers, since mutual owning references +would never allow either Rc to be dropped. For example, a tree could +have strong Rc pointers from parent nodes to children, and Weak +pointers from children back to their parents.

+

The typical way to obtain a Weak pointer is to call Rc::downgrade.

+

Implementations§

source§

impl<T> Weak<T>

source

pub fn new() -> Weak<T>

Constructs a new Weak<T>, without allocating any memory. +Calling upgrade on the return value always gives None.

+
§Examples
+
use cactusref::Weak;
+
+let empty: Weak<i64> = Weak::new();
+assert!(empty.upgrade().is_none());
+
source§

impl<T> Weak<T>

source

pub fn as_ptr(&self) -> *const T

Returns a raw pointer to the object T pointed to by this Weak<T>.

+

The pointer is valid only if there are some strong references. The pointer may be dangling, +unaligned or even null otherwise.

+
§Examples
+
use cactusref::Rc;
+use std::ptr;
+
+let strong = Rc::new("hello".to_owned());
+let weak = Rc::downgrade(&strong);
+// Both point to the same object
+assert!(ptr::eq(&*strong, weak.as_ptr()));
+// The strong here keeps it alive, so we can still access the object.
+assert_eq!("hello", unsafe { &*weak.as_ptr() });
+
+drop(strong);
+// But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
+// undefined behaviour.
+// assert_eq!("hello", unsafe { &*weak.as_ptr() });
+
source

pub fn into_raw(self) -> *const T

Consumes the Weak<T> and turns it into a raw pointer.

+

This converts the weak pointer into a raw pointer, while still preserving the ownership of +one weak reference (the weak count is not modified by this operation). It can be turned +back into the Weak<T> with from_raw.

+

The same restrictions of accessing the target of the pointer as with +as_ptr apply.

+
§Examples
+
use cactusref::{Rc, Weak};
+
+let strong = Rc::new("hello".to_owned());
+let weak = Rc::downgrade(&strong);
+let raw = weak.into_raw();
+
+assert_eq!(1, Rc::weak_count(&strong));
+assert_eq!("hello", unsafe { &*raw });
+
+drop(unsafe { Weak::from_raw(raw) });
+assert_eq!(0, Rc::weak_count(&strong));
+
source

pub unsafe fn from_raw(ptr: *const T) -> Self

Converts a raw pointer previously created by into_raw back into Weak<T>.

+

This can be used to safely get a strong reference (by calling upgrade +later) or to deallocate the weak count by dropping the Weak<T>.

+

It takes ownership of one weak reference (with the exception of pointers created by new, +as these don’t own anything; the method still works on them).

+
§Safety
+

The pointer must have originated from the into_raw and must still own its potential +weak reference.

+

It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this +takes ownership of one weak reference currently represented as a raw pointer (the weak +count is not modified by this operation) and therefore it must be paired with a previous +call to into_raw.

+
§Examples
+
use cactusref::{Rc, Weak};
+
+let strong = Rc::new("hello".to_owned());
+
+let raw_1 = Rc::downgrade(&strong).into_raw();
+let raw_2 = Rc::downgrade(&strong).into_raw();
+
+assert_eq!(2, Rc::weak_count(&strong));
+
+assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
+assert_eq!(1, Rc::weak_count(&strong));
+
+drop(strong);
+
+// Decrement the last weak count.
+assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
+
source

pub fn upgrade(&self) -> Option<Rc<T>>

Attempts to upgrade the Weak pointer to an Rc, delaying +dropping of the inner value if successful.

+

Returns None if the inner value has since been dropped.

+
§Examples
+
use cactusref::Rc;
+
+let five = Rc::new(5);
+
+let weak_five = Rc::downgrade(&five);
+
+let strong_five: Option<Rc<_>> = weak_five.upgrade();
+assert!(strong_five.is_some());
+
+// Destroy all strong pointers.
+drop(strong_five);
+drop(five);
+
+assert!(weak_five.upgrade().is_none());
+
source

pub fn strong_count(&self) -> usize

Gets the number of strong (Rc) pointers pointing to this allocation.

+

If self was created using Weak::new, this will return 0.

+
source

pub fn weak_count(&self) -> usize

Gets the number of Weak pointers pointing to this allocation.

+

If no strong pointers remain, this will return zero.

+
source

pub fn ptr_eq(&self, other: &Self) -> bool

Returns true if the two Weaks point to the same allocation (similar to +ptr::eq), or if both don’t point to any allocation +(because they were created with Weak::new()).

+
§Notes
+

Since this compares pointers it means that Weak::new() will equal each +other, even though they don’t point to any allocation.

+
§Examples
+
use cactusref::Rc;
+
+let first_rc = Rc::new(5);
+let first = Rc::downgrade(&first_rc);
+let second = Rc::downgrade(&first_rc);
+
+assert!(first.ptr_eq(&second));
+
+let third_rc = Rc::new(5);
+let third = Rc::downgrade(&third_rc);
+
+assert!(!first.ptr_eq(&third));
+

Comparing Weak::new.

+ +
use cactusref::{Rc, Weak};
+
+let first = Weak::new();
+let second = Weak::new();
+assert!(first.ptr_eq(&second));
+
+let third_rc = Rc::new(());
+let third = Rc::downgrade(&third_rc);
+assert!(!first.ptr_eq(&third));
+

Trait Implementations§

source§

impl<T> Clone for Weak<T>

source§

fn clone(&self) -> Weak<T>

Makes a clone of the Weak pointer that points to the same allocation.

+
§Examples
+
use cactusref::{Rc, Weak};
+
+let weak_five = Rc::downgrade(&Rc::new(5));
+
+let _ = Weak::clone(&weak_five);
+
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T: Debug> Debug for Weak<T>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T> Default for Weak<T>

source§

fn default() -> Weak<T>

Constructs a new Weak<T>, without allocating any memory. +Calling upgrade on the return value always gives None.

+
§Examples
+
use cactusref::Weak;
+
+let empty: Weak<i64> = Default::default();
+assert!(empty.upgrade().is_none());
+
source§

impl<T> Drop for Weak<T>

source§

fn drop(&mut self)

Drops the Weak pointer.

+
§Examples
+
use cactusref::{Rc, Weak};
+
+struct Foo;
+
+impl Drop for Foo {
+    fn drop(&mut self) {
+        println!("dropped!");
+    }
+}
+
+let foo = Rc::new(Foo);
+let weak_foo = Rc::downgrade(&foo);
+let other_weak_foo = Weak::clone(&weak_foo);
+
+drop(weak_foo);   // Doesn't print anything
+drop(foo);        // Prints "dropped!"
+
+assert!(other_weak_foo.upgrade().is_none());
+

Auto Trait Implementations§

§

impl<T> Freeze for Weak<T>

§

impl<T> !RefUnwindSafe for Weak<T>

§

impl<T> !Send for Weak<T>

§

impl<T> !Sync for Weak<T>

§

impl<T> Unpin for Weak<T>
where + T: Unpin,

§

impl<T> !UnwindSafe for Weak<T>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/cactusref/trait.Adopt.html b/cactusref/trait.Adopt.html new file mode 100644 index 000000000..bdbe9f136 --- /dev/null +++ b/cactusref/trait.Adopt.html @@ -0,0 +1,46 @@ +Adopt in cactusref - Rust

Trait cactusref::Adopt

source ·
pub unsafe trait Adopt: Sealed {
+    // Required methods
+    unsafe fn adopt_unchecked(this: &Self, other: &Self);
+    fn unadopt(this: &Self, other: &Self);
+}
Expand description

Build a graph of linked Rc smart pointers to enable busting cycles on +drop.

+

Calling adopt_unchecked builds an object graph which can be used by to +detect cycles.

+

§Safety

+

Implementors of this trait must ensure that bookkeeping edges in the object +graph is correct because these links are used to determine whether an Rc +is reachable in Rc’s Drop implementation. Failure to properly bookkeep +the object graph will result in undefined behavior.

+

Undefined behavior may include:

+
    +
  • Memory leaks.
  • +
  • Double-frees.
  • +
  • Dangling Rcs which will cause a use after free.
  • +
+

Required Methods§

source

unsafe fn adopt_unchecked(this: &Self, other: &Self)

Perform bookkeeping to record that this has an owned reference to +other.

+

Adoption is a one-way link, or a directed edge in the object graph which +means “this owns other”.

+

adopt can be called multiple times for a pair of Rcs. Each call to +adopt indicates that this owns one distinct clone of other.

+

This is an associated function that needs to be used as +Adopt::adopt_unchecked(...). A method would interfere with methods of the same +name on the contents of a Rc used through Deref.

+
§Safety
+

Callers must ensure that this owns a strong reference to other.

+

Callers should call unadopt when this no longer holds a strong +reference to other to avoid memory leaks, but this is not required for +soundness.

+
source

fn unadopt(this: &Self, other: &Self)

Perform bookkeeping to record that this has removed an owned reference +to other.

+

Adoption is a one-way link, or a directed edge in the object graph which +means “this owns other”.

+

This is an associated function that needs to be used as +Adopt::unadopt(...). A method would interfere with methods of the same +name on the contents of a Rc used through Deref.

+
§Memory Leaks
+

Failure to call this function when removing an owned Rc from this +is safe, but may result in a memory leak.

+

Object Safety§

This trait is not object safe.

Implementors§

source§

impl<T> Adopt for Rc<T>

Implementation of Adopt for Rc which enables Rcs to form a cycle +of strong references that are reaped by Rc’s Drop implementation.

+
\ No newline at end of file diff --git a/cactusref/type.CactusRef.html b/cactusref/type.CactusRef.html new file mode 100644 index 000000000..67dc229f6 --- /dev/null +++ b/cactusref/type.CactusRef.html @@ -0,0 +1,2 @@ +CactusRef in cactusref - Rust

Type Alias cactusref::CactusRef

source ·
pub type CactusRef<T> = Rc<T>;
Expand description

Cactus alias for Rc.

+

Aliased Type§

struct CactusRef<T> { /* private fields */ }
\ No newline at end of file diff --git a/cactusref/type.CactusWeakRef.html b/cactusref/type.CactusWeakRef.html new file mode 100644 index 000000000..2465b9258 --- /dev/null +++ b/cactusref/type.CactusWeakRef.html @@ -0,0 +1,2 @@ +CactusWeakRef in cactusref - Rust

Type Alias cactusref::CactusWeakRef

source ·
pub type CactusWeakRef<T> = Weak<T>;
Expand description

Cactus alias for Weak.

+

Aliased Type§

struct CactusWeakRef<T> { /* private fields */ }
\ No newline at end of file diff --git a/crates.js b/crates.js new file mode 100644 index 000000000..af014f808 --- /dev/null +++ b/crates.js @@ -0,0 +1 @@ +window.ALL_CRATES = ["cactusref","hashbrown","log","rustc_hash"]; \ No newline at end of file diff --git a/hashbrown/all.html b/hashbrown/all.html new file mode 100644 index 000000000..4af51c2db --- /dev/null +++ b/hashbrown/all.html @@ -0,0 +1 @@ +List of all items in this crate

List of all items

Structs

Enums

Traits

\ No newline at end of file diff --git a/hashbrown/enum.TryReserveError.html b/hashbrown/enum.TryReserveError.html new file mode 100644 index 000000000..56f82215e --- /dev/null +++ b/hashbrown/enum.TryReserveError.html @@ -0,0 +1,26 @@ +TryReserveError in hashbrown - Rust

Enum hashbrown::TryReserveError

source ·
pub enum TryReserveError {
+    CapacityOverflow,
+    AllocError {
+        layout: Layout,
+    },
+}
Expand description

The error type for try_reserve methods.

+

Variants§

§

CapacityOverflow

Error due to the computed capacity exceeding the collection’s maximum +(usually isize::MAX bytes).

+
§

AllocError

The memory allocator returned an error

+

Fields

§layout: Layout

The layout of the allocation request that failed.

+

Trait Implementations§

source§

impl Clone for TryReserveError

source§

fn clone(&self) -> TryReserveError

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl Debug for TryReserveError

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl PartialEq for TryReserveError

source§

fn eq(&self, other: &TryReserveError) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl Eq for TryReserveError

source§

impl StructuralPartialEq for TryReserveError

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<Q, K> Equivalent<K> for Q
where + Q: Eq + ?Sized, + K: Borrow<Q> + ?Sized,

source§

fn equivalent(&self, key: &K) -> bool

Checks if this value is equivalent to the given key. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/enum.DefaultHashBuilder.html b/hashbrown/hash_map/enum.DefaultHashBuilder.html new file mode 100644 index 000000000..629cd863d --- /dev/null +++ b/hashbrown/hash_map/enum.DefaultHashBuilder.html @@ -0,0 +1,12 @@ +DefaultHashBuilder in hashbrown::hash_map - Rust

Enum hashbrown::hash_map::DefaultHashBuilder

source ·
pub enum DefaultHashBuilder {}
Expand description

Dummy default hasher for HashMap.

+

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/enum.Entry.html b/hashbrown/hash_map/enum.Entry.html new file mode 100644 index 000000000..b6cc5790b --- /dev/null +++ b/hashbrown/hash_map/enum.Entry.html @@ -0,0 +1,238 @@ +Entry in hashbrown::hash_map - Rust

Enum hashbrown::hash_map::Entry

source ·
pub enum Entry<'a, K, V, S, A = Global>
where + A: Allocator,
{ + Occupied(OccupiedEntry<'a, K, V, S, A>), + Vacant(VacantEntry<'a, K, V, S, A>), +}
Expand description

A view into a single entry in a map, which may either be vacant or occupied.

+

This enum is constructed from the entry method on HashMap.

+

§Examples

+
use hashbrown::hash_map::{Entry, HashMap, OccupiedEntry};
+
+let mut map = HashMap::new();
+map.extend([("a", 10), ("b", 20), ("c", 30)]);
+assert_eq!(map.len(), 3);
+
+// Existing key (insert)
+let entry: Entry<_, _, _> = map.entry("a");
+let _raw_o: OccupiedEntry<_, _, _> = entry.insert(1);
+assert_eq!(map.len(), 3);
+// Nonexistent key (insert)
+map.entry("d").insert(4);
+
+// Existing key (or_insert)
+let v = map.entry("b").or_insert(2);
+assert_eq!(std::mem::replace(v, 2), 20);
+// Nonexistent key (or_insert)
+map.entry("e").or_insert(5);
+
+// Existing key (or_insert_with)
+let v = map.entry("c").or_insert_with(|| 3);
+assert_eq!(std::mem::replace(v, 3), 30);
+// Nonexistent key (or_insert_with)
+map.entry("f").or_insert_with(|| 6);
+
+println!("Our HashMap: {:?}", map);
+
+let mut vec: Vec<_> = map.iter().map(|(&k, &v)| (k, v)).collect();
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3), ("d", 4), ("e", 5), ("f", 6)]);
+

Variants§

§

Occupied(OccupiedEntry<'a, K, V, S, A>)

An occupied entry.

+

§Examples

+
use hashbrown::hash_map::{Entry, HashMap};
+let mut map: HashMap<_, _> = [("a", 100), ("b", 200)].into();
+
+match map.entry("a") {
+    Entry::Vacant(_) => unreachable!(),
+    Entry::Occupied(_) => { }
+}
+
§

Vacant(VacantEntry<'a, K, V, S, A>)

A vacant entry.

+

§Examples

+
use hashbrown::hash_map::{Entry, HashMap};
+let mut map: HashMap<&str, i32> = HashMap::new();
+
+match map.entry("a") {
+    Entry::Occupied(_) => unreachable!(),
+    Entry::Vacant(_) => { }
+}
+

Implementations§

source§

impl<'a, K, V, S, A: Allocator> Entry<'a, K, V, S, A>

source

pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S, A>
where + K: Hash, + S: BuildHasher,

Sets the value of the entry, and returns an OccupiedEntry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+let entry = map.entry("horseyland").insert(37);
+
+assert_eq!(entry.key(), &"horseyland");
+
source

pub fn or_insert(self, default: V) -> &'a mut V
where + K: Hash, + S: BuildHasher,

Ensures a value is in the entry by inserting the default if empty, and returns +a mutable reference to the value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+
+// nonexistent key
+map.entry("poneyland").or_insert(3);
+assert_eq!(map["poneyland"], 3);
+
+// existing key
+*map.entry("poneyland").or_insert(10) *= 2;
+assert_eq!(map["poneyland"], 6);
+
source

pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
where + K: Hash, + S: BuildHasher,

Ensures a value is in the entry by inserting the result of the default function if empty, +and returns a mutable reference to the value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+
+// nonexistent key
+map.entry("poneyland").or_insert_with(|| 3);
+assert_eq!(map["poneyland"], 3);
+
+// existing key
+*map.entry("poneyland").or_insert_with(|| 10) *= 2;
+assert_eq!(map["poneyland"], 6);
+
source

pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V
where + K: Hash, + S: BuildHasher,

Ensures a value is in the entry by inserting, if empty, the result of the default function. +This method allows for generating key-derived values for insertion by providing the default +function a reference to the key that was moved during the .entry(key) method call.

+

The reference to the moved key is provided so that cloning or copying the key is +unnecessary, unlike with .or_insert_with(|| ... ).

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, usize> = HashMap::new();
+
+// nonexistent key
+map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
+assert_eq!(map["poneyland"], 9);
+
+// existing key
+*map.entry("poneyland").or_insert_with_key(|key| key.chars().count() * 10) *= 2;
+assert_eq!(map["poneyland"], 18);
+
source

pub fn key(&self) -> &K

Returns a reference to this entry’s key.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+map.entry("poneyland").or_insert(3);
+// existing key
+assert_eq!(map.entry("poneyland").key(), &"poneyland");
+// nonexistent key
+assert_eq!(map.entry("horseland").key(), &"horseland");
+
source

pub fn and_modify<F>(self, f: F) -> Self
where + F: FnOnce(&mut V),

Provides in-place mutable access to an occupied entry before any +potential inserts into the map.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+
+map.entry("poneyland")
+   .and_modify(|e| { *e += 1 })
+   .or_insert(42);
+assert_eq!(map["poneyland"], 42);
+
+map.entry("poneyland")
+   .and_modify(|e| { *e += 1 })
+   .or_insert(42);
+assert_eq!(map["poneyland"], 43);
+
source

pub fn and_replace_entry_with<F>(self, f: F) -> Self
where + F: FnOnce(&K, V) -> Option<V>,

Provides shared access to the key and owned access to the value of +an occupied entry and allows to replace or remove it based on the +value of the returned option.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::Entry;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+
+let entry = map
+    .entry("poneyland")
+    .and_replace_entry_with(|_k, _v| panic!());
+
+match entry {
+    Entry::Vacant(e) => {
+        assert_eq!(e.key(), &"poneyland");
+    }
+    Entry::Occupied(_) => panic!(),
+}
+
+map.insert("poneyland", 42);
+
+let entry = map
+    .entry("poneyland")
+    .and_replace_entry_with(|k, v| {
+        assert_eq!(k, &"poneyland");
+        assert_eq!(v, 42);
+        Some(v + 1)
+    });
+
+match entry {
+    Entry::Occupied(e) => {
+        assert_eq!(e.key(), &"poneyland");
+        assert_eq!(e.get(), &43);
+    }
+    Entry::Vacant(_) => panic!(),
+}
+
+assert_eq!(map["poneyland"], 43);
+
+let entry = map
+    .entry("poneyland")
+    .and_replace_entry_with(|_k, _v| None);
+
+match entry {
+    Entry::Vacant(e) => assert_eq!(e.key(), &"poneyland"),
+    Entry::Occupied(_) => panic!(),
+}
+
+assert!(!map.contains_key("poneyland"));
+
source§

impl<'a, K, V: Default, S, A: Allocator> Entry<'a, K, V, S, A>

source

pub fn or_default(self) -> &'a mut V
where + K: Hash, + S: BuildHasher,

Ensures a value is in the entry by inserting the default value if empty, +and returns a mutable reference to the value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, Option<u32>> = HashMap::new();
+
+// nonexistent key
+map.entry("poneyland").or_default();
+assert_eq!(map["poneyland"], None);
+
+map.insert("horseland", Some(3));
+
+// existing key
+assert_eq!(map.entry("horseland").or_default(), &mut Some(3));
+

Trait Implementations§

source§

impl<K: Debug, V: Debug, S, A: Allocator> Debug for Entry<'_, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, K, V, S, A> Freeze for Entry<'a, K, V, S, A>
where + K: Freeze,

§

impl<'a, K, V, S, A> RefUnwindSafe for Entry<'a, K, V, S, A>

§

impl<'a, K, V, S, A> Send for Entry<'a, K, V, S, A>
where + K: Send, + V: Send, + S: Send, + A: Send,

§

impl<'a, K, V, S, A> Sync for Entry<'a, K, V, S, A>
where + K: Sync, + V: Sync, + S: Sync, + A: Sync,

§

impl<'a, K, V, S, A> Unpin for Entry<'a, K, V, S, A>
where + K: Unpin,

§

impl<'a, K, V, S, A = Global> !UnwindSafe for Entry<'a, K, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/enum.EntryRef.html b/hashbrown/hash_map/enum.EntryRef.html new file mode 100644 index 000000000..230e6a018 --- /dev/null +++ b/hashbrown/hash_map/enum.EntryRef.html @@ -0,0 +1,246 @@ +EntryRef in hashbrown::hash_map - Rust

Enum hashbrown::hash_map::EntryRef

source ·
pub enum EntryRef<'a, 'b, K, Q: ?Sized, V, S, A = Global>
where + A: Allocator,
{ + Occupied(OccupiedEntryRef<'a, 'b, K, Q, V, S, A>), + Vacant(VacantEntryRef<'a, 'b, K, Q, V, S, A>), +}
Expand description

A view into a single entry in a map, which may either be vacant or occupied, +with any borrowed form of the map’s key type.

+

This enum is constructed from the entry_ref method on HashMap.

+

Hash and Eq on the borrowed form of the map’s key type must match those +for the key type. It also require that key may be constructed from the borrowed +form through the From trait.

+

§Examples

+
use hashbrown::hash_map::{EntryRef, HashMap, OccupiedEntryRef};
+
+let mut map = HashMap::new();
+map.extend([("a".to_owned(), 10), ("b".into(), 20), ("c".into(), 30)]);
+assert_eq!(map.len(), 3);
+
+// Existing key (insert)
+let key = String::from("a");
+let entry: EntryRef<_, _, _, _> = map.entry_ref(&key);
+let _raw_o: OccupiedEntryRef<_, _, _, _> = entry.insert(1);
+assert_eq!(map.len(), 3);
+// Nonexistent key (insert)
+map.entry_ref("d").insert(4);
+
+// Existing key (or_insert)
+let v = map.entry_ref("b").or_insert(2);
+assert_eq!(std::mem::replace(v, 2), 20);
+// Nonexistent key (or_insert)
+map.entry_ref("e").or_insert(5);
+
+// Existing key (or_insert_with)
+let v = map.entry_ref("c").or_insert_with(|| 3);
+assert_eq!(std::mem::replace(v, 3), 30);
+// Nonexistent key (or_insert_with)
+map.entry_ref("f").or_insert_with(|| 6);
+
+println!("Our HashMap: {:?}", map);
+
+for (key, value) in ["a", "b", "c", "d", "e", "f"].into_iter().zip(1..=6) {
+    assert_eq!(map[key], value)
+}
+assert_eq!(map.len(), 6);
+

Variants§

§

Occupied(OccupiedEntryRef<'a, 'b, K, Q, V, S, A>)

An occupied entry.

+

§Examples

+
use hashbrown::hash_map::{EntryRef, HashMap};
+let mut map: HashMap<_, _> = [("a".to_owned(), 100), ("b".into(), 200)].into();
+
+match map.entry_ref("a") {
+    EntryRef::Vacant(_) => unreachable!(),
+    EntryRef::Occupied(_) => { }
+}
+
§

Vacant(VacantEntryRef<'a, 'b, K, Q, V, S, A>)

A vacant entry.

+

§Examples

+
use hashbrown::hash_map::{EntryRef, HashMap};
+let mut map: HashMap<String, i32> = HashMap::new();
+
+match map.entry_ref("a") {
+    EntryRef::Occupied(_) => unreachable!(),
+    EntryRef::Vacant(_) => { }
+}
+

Implementations§

source§

impl<'a, 'b, K, Q: ?Sized, V, S, A: Allocator> EntryRef<'a, 'b, K, Q, V, S, A>

source

pub fn insert(self, value: V) -> OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
where + K: Hash + From<&'b Q>, + S: BuildHasher,

Sets the value of the entry, and returns an OccupiedEntryRef.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+let entry = map.entry_ref("horseyland").insert(37);
+
+assert_eq!(entry.key(), "horseyland");
+
source

pub fn or_insert(self, default: V) -> &'a mut V
where + K: Hash + From<&'b Q>, + S: BuildHasher,

Ensures a value is in the entry by inserting the default if empty, and returns +a mutable reference to the value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+
+// nonexistent key
+map.entry_ref("poneyland").or_insert(3);
+assert_eq!(map["poneyland"], 3);
+
+// existing key
+*map.entry_ref("poneyland").or_insert(10) *= 2;
+assert_eq!(map["poneyland"], 6);
+
source

pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
where + K: Hash + From<&'b Q>, + S: BuildHasher,

Ensures a value is in the entry by inserting the result of the default function if empty, +and returns a mutable reference to the value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+
+// nonexistent key
+map.entry_ref("poneyland").or_insert_with(|| 3);
+assert_eq!(map["poneyland"], 3);
+
+// existing key
+*map.entry_ref("poneyland").or_insert_with(|| 10) *= 2;
+assert_eq!(map["poneyland"], 6);
+
source

pub fn or_insert_with_key<F: FnOnce(&Q) -> V>(self, default: F) -> &'a mut V
where + K: Hash + Borrow<Q> + From<&'b Q>, + S: BuildHasher,

Ensures a value is in the entry by inserting, if empty, the result of the default function. +This method allows for generating key-derived values for insertion by providing the default +function an access to the borrower form of the key.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<String, usize> = HashMap::new();
+
+// nonexistent key
+map.entry_ref("poneyland").or_insert_with_key(|key| key.chars().count());
+assert_eq!(map["poneyland"], 9);
+
+// existing key
+*map.entry_ref("poneyland").or_insert_with_key(|key| key.chars().count() * 10) *= 2;
+assert_eq!(map["poneyland"], 18);
+
source

pub fn key(&self) -> &Q
where + K: Borrow<Q>,

Returns a reference to this entry’s key.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+map.entry_ref("poneyland").or_insert(3);
+// existing key
+assert_eq!(map.entry_ref("poneyland").key(), "poneyland");
+// nonexistent key
+assert_eq!(map.entry_ref("horseland").key(), "horseland");
+
source

pub fn and_modify<F>(self, f: F) -> Self
where + F: FnOnce(&mut V),

Provides in-place mutable access to an occupied entry before any +potential inserts into the map.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+
+map.entry_ref("poneyland")
+   .and_modify(|e| { *e += 1 })
+   .or_insert(42);
+assert_eq!(map["poneyland"], 42);
+
+map.entry_ref("poneyland")
+   .and_modify(|e| { *e += 1 })
+   .or_insert(42);
+assert_eq!(map["poneyland"], 43);
+
source

pub fn and_replace_entry_with<F>(self, f: F) -> Self
where + F: FnOnce(&K, V) -> Option<V>,

Provides shared access to the key and owned access to the value of +an occupied entry and allows to replace or remove it based on the +value of the returned option.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::EntryRef;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+
+let entry = map
+    .entry_ref("poneyland")
+    .and_replace_entry_with(|_k, _v| panic!());
+
+match entry {
+    EntryRef::Vacant(e) => {
+        assert_eq!(e.key(), "poneyland");
+    }
+    EntryRef::Occupied(_) => panic!(),
+}
+
+map.insert("poneyland".to_string(), 42);
+
+let entry = map
+    .entry_ref("poneyland")
+    .and_replace_entry_with(|k, v| {
+        assert_eq!(k, "poneyland");
+        assert_eq!(v, 42);
+        Some(v + 1)
+    });
+
+match entry {
+    EntryRef::Occupied(e) => {
+        assert_eq!(e.key(), "poneyland");
+        assert_eq!(e.get(), &43);
+    }
+    EntryRef::Vacant(_) => panic!(),
+}
+
+assert_eq!(map["poneyland"], 43);
+
+let entry = map
+    .entry_ref("poneyland")
+    .and_replace_entry_with(|_k, _v| None);
+
+match entry {
+    EntryRef::Vacant(e) => assert_eq!(e.key(), "poneyland"),
+    EntryRef::Occupied(_) => panic!(),
+}
+
+assert!(!map.contains_key("poneyland"));
+
source§

impl<'a, 'b, K, Q: ?Sized, V: Default, S, A: Allocator> EntryRef<'a, 'b, K, Q, V, S, A>

source

pub fn or_default(self) -> &'a mut V
where + K: Hash + From<&'b Q>, + S: BuildHasher,

Ensures a value is in the entry by inserting the default value if empty, +and returns a mutable reference to the value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<String, Option<u32>> = HashMap::new();
+
+// nonexistent key
+map.entry_ref("poneyland").or_default();
+assert_eq!(map["poneyland"], None);
+
+map.insert("horseland".to_string(), Some(3));
+
+// existing key
+assert_eq!(map.entry_ref("horseland").or_default(), &mut Some(3));
+

Trait Implementations§

source§

impl<K: Borrow<Q>, Q: ?Sized + Debug, V: Debug, S, A: Allocator> Debug for EntryRef<'_, '_, K, Q, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, 'b, K, Q, V, S, A> Freeze for EntryRef<'a, 'b, K, Q, V, S, A>
where + K: Freeze, + Q: ?Sized,

§

impl<'a, 'b, K, Q, V, S, A> RefUnwindSafe for EntryRef<'a, 'b, K, Q, V, S, A>

§

impl<'a, 'b, K, Q, V, S, A> Send for EntryRef<'a, 'b, K, Q, V, S, A>
where + K: Send, + Q: Sync + ?Sized, + V: Send, + S: Send, + A: Send,

§

impl<'a, 'b, K, Q, V, S, A> Sync for EntryRef<'a, 'b, K, Q, V, S, A>
where + K: Sync, + Q: Sync + ?Sized, + V: Sync, + S: Sync, + A: Sync,

§

impl<'a, 'b, K, Q, V, S, A> Unpin for EntryRef<'a, 'b, K, Q, V, S, A>
where + K: Unpin, + Q: ?Sized,

§

impl<'a, 'b, K, Q, V, S, A = Global> !UnwindSafe for EntryRef<'a, 'b, K, Q, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/enum.RawEntryMut.html b/hashbrown/hash_map/enum.RawEntryMut.html new file mode 100644 index 000000000..493a252ac --- /dev/null +++ b/hashbrown/hash_map/enum.RawEntryMut.html @@ -0,0 +1,227 @@ +RawEntryMut in hashbrown::hash_map - Rust

Enum hashbrown::hash_map::RawEntryMut

source ·
pub enum RawEntryMut<'a, K, V, S, A: Allocator = Global> {
+    Occupied(RawOccupiedEntryMut<'a, K, V, S, A>),
+    Vacant(RawVacantEntryMut<'a, K, V, S, A>),
+}
Expand description

A view into a single entry in a map, which may either be vacant or occupied.

+

This is a lower-level version of Entry.

+

This enum is constructed through the raw_entry_mut method on HashMap, +then calling one of the methods of that RawEntryBuilderMut.

+

§Examples

+
use core::hash::{BuildHasher, Hash};
+use hashbrown::hash_map::{HashMap, RawEntryMut, RawOccupiedEntryMut};
+
+let mut map = HashMap::new();
+map.extend([('a', 1), ('b', 2), ('c', 3)]);
+assert_eq!(map.len(), 3);
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+// Existing key (insert)
+let raw: RawEntryMut<_, _, _> = map.raw_entry_mut().from_key(&'a');
+let _raw_o: RawOccupiedEntryMut<_, _, _> = raw.insert('a', 10);
+assert_eq!(map.len(), 3);
+
+// Nonexistent key (insert)
+map.raw_entry_mut().from_key(&'d').insert('d', 40);
+assert_eq!(map.len(), 4);
+
+// Existing key (or_insert)
+let hash = compute_hash(map.hasher(), &'b');
+let kv = map
+    .raw_entry_mut()
+    .from_key_hashed_nocheck(hash, &'b')
+    .or_insert('b', 20);
+assert_eq!(kv, (&mut 'b', &mut 2));
+*kv.1 = 20;
+assert_eq!(map.len(), 4);
+
+// Nonexistent key (or_insert)
+let hash = compute_hash(map.hasher(), &'e');
+let kv = map
+    .raw_entry_mut()
+    .from_key_hashed_nocheck(hash, &'e')
+    .or_insert('e', 50);
+assert_eq!(kv, (&mut 'e', &mut 50));
+assert_eq!(map.len(), 5);
+
+// Existing key (or_insert_with)
+let hash = compute_hash(map.hasher(), &'c');
+let kv = map
+    .raw_entry_mut()
+    .from_hash(hash, |q| q == &'c')
+    .or_insert_with(|| ('c', 30));
+assert_eq!(kv, (&mut 'c', &mut 3));
+*kv.1 = 30;
+assert_eq!(map.len(), 5);
+
+// Nonexistent key (or_insert_with)
+let hash = compute_hash(map.hasher(), &'f');
+let kv = map
+    .raw_entry_mut()
+    .from_hash(hash, |q| q == &'f')
+    .or_insert_with(|| ('f', 60));
+assert_eq!(kv, (&mut 'f', &mut 60));
+assert_eq!(map.len(), 6);
+
+println!("Our HashMap: {:?}", map);
+
+let mut vec: Vec<_> = map.iter().map(|(&k, &v)| (k, v)).collect();
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [('a', 10), ('b', 20), ('c', 30), ('d', 40), ('e', 50), ('f', 60)]);
+

Variants§

§

Occupied(RawOccupiedEntryMut<'a, K, V, S, A>)

An occupied entry.

+

§Examples

+
use hashbrown::{hash_map::RawEntryMut, HashMap};
+let mut map: HashMap<_, _> = [("a", 100), ("b", 200)].into();
+
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(_) => { }
+}
+
§

Vacant(RawVacantEntryMut<'a, K, V, S, A>)

A vacant entry.

+

§Examples

+
use hashbrown::{hash_map::RawEntryMut, HashMap};
+let mut map: HashMap<&str, i32> = HashMap::new();
+
+match map.raw_entry_mut().from_key("a") {
+    RawEntryMut::Occupied(_) => unreachable!(),
+    RawEntryMut::Vacant(_) => { }
+}
+

Implementations§

source§

impl<'a, K, V, S, A: Allocator> RawEntryMut<'a, K, V, S, A>

source

pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S, A>
where + K: Hash, + S: BuildHasher,

Sets the value of the entry, and returns a RawOccupiedEntryMut.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+let entry = map.raw_entry_mut().from_key("horseyland").insert("horseyland", 37);
+
+assert_eq!(entry.remove_entry(), ("horseyland", 37));
+
source

pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
where + K: Hash, + S: BuildHasher,

Ensures a value is in the entry by inserting the default if empty, and returns +mutable references to the key and value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+
+map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3);
+assert_eq!(map["poneyland"], 3);
+
+*map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2;
+assert_eq!(map["poneyland"], 6);
+
source

pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
where + F: FnOnce() -> (K, V), + K: Hash, + S: BuildHasher,

Ensures a value is in the entry by inserting the result of the default function if empty, +and returns mutable references to the key and value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, String> = HashMap::new();
+
+map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
+    ("poneyland", "hoho".to_string())
+});
+
+assert_eq!(map["poneyland"], "hoho".to_string());
+
source

pub fn and_modify<F>(self, f: F) -> Self
where + F: FnOnce(&mut K, &mut V),

Provides in-place mutable access to an occupied entry before any +potential inserts into the map.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+
+map.raw_entry_mut()
+   .from_key("poneyland")
+   .and_modify(|_k, v| { *v += 1 })
+   .or_insert("poneyland", 42);
+assert_eq!(map["poneyland"], 42);
+
+map.raw_entry_mut()
+   .from_key("poneyland")
+   .and_modify(|_k, v| { *v += 1 })
+   .or_insert("poneyland", 0);
+assert_eq!(map["poneyland"], 43);
+
source

pub fn and_replace_entry_with<F>(self, f: F) -> Self
where + F: FnOnce(&K, V) -> Option<V>,

Provides shared access to the key and owned access to the value of +an occupied entry and allows to replace or remove it based on the +value of the returned option.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::RawEntryMut;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+
+let entry = map
+    .raw_entry_mut()
+    .from_key("poneyland")
+    .and_replace_entry_with(|_k, _v| panic!());
+
+match entry {
+    RawEntryMut::Vacant(_) => {},
+    RawEntryMut::Occupied(_) => panic!(),
+}
+
+map.insert("poneyland", 42);
+
+let entry = map
+    .raw_entry_mut()
+    .from_key("poneyland")
+    .and_replace_entry_with(|k, v| {
+        assert_eq!(k, &"poneyland");
+        assert_eq!(v, 42);
+        Some(v + 1)
+    });
+
+match entry {
+    RawEntryMut::Occupied(e) => {
+        assert_eq!(e.key(), &"poneyland");
+        assert_eq!(e.get(), &43);
+    },
+    RawEntryMut::Vacant(_) => panic!(),
+}
+
+assert_eq!(map["poneyland"], 43);
+
+let entry = map
+    .raw_entry_mut()
+    .from_key("poneyland")
+    .and_replace_entry_with(|_k, _v| None);
+
+match entry {
+    RawEntryMut::Vacant(_) => {},
+    RawEntryMut::Occupied(_) => panic!(),
+}
+
+assert!(!map.contains_key("poneyland"));
+

Trait Implementations§

source§

impl<K: Debug, V: Debug, S, A: Allocator> Debug for RawEntryMut<'_, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, K, V, S, A> Freeze for RawEntryMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A> RefUnwindSafe for RawEntryMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A> Send for RawEntryMut<'a, K, V, S, A>
where + K: Send, + V: Send, + S: Send + Sync, + A: Send,

§

impl<'a, K, V, S, A> Sync for RawEntryMut<'a, K, V, S, A>
where + K: Sync, + V: Sync, + S: Sync, + A: Sync,

§

impl<'a, K, V, S, A> Unpin for RawEntryMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A = Global> !UnwindSafe for RawEntryMut<'a, K, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/index.html b/hashbrown/hash_map/index.html new file mode 100644 index 000000000..34056d545 --- /dev/null +++ b/hashbrown/hash_map/index.html @@ -0,0 +1,19 @@ +hashbrown::hash_map - Rust

Module hashbrown::hash_map

source ·
Expand description

A hash map implemented with quadratic probing and SIMD lookup.

+

Structs§

  • A draining iterator over the entries of a HashMap in arbitrary +order. The iterator element type is (K, V).
  • A draining iterator over entries of a HashMap which don’t satisfy the predicate +f(&k, &mut v) in arbitrary order. The iterator element type is (K, V).
  • A hash map implemented with quadratic probing and SIMD lookup.
  • An owning iterator over the entries of a HashMap in arbitrary order. +The iterator element type is (K, V).
  • An owning iterator over the keys of a HashMap in arbitrary order. +The iterator element type is K.
  • An owning iterator over the values of a HashMap in arbitrary order. +The iterator element type is V.
  • An iterator over the entries of a HashMap in arbitrary order. +The iterator element type is (&'a K, &'a V).
  • A mutable iterator over the entries of a HashMap in arbitrary order. +The iterator element type is (&'a K, &'a mut V).
  • An iterator over the keys of a HashMap in arbitrary order. +The iterator element type is &'a K.
  • A view into an occupied entry in a HashMap. +It is part of the Entry enum.
  • A view into an occupied entry in a HashMap. +It is part of the EntryRef enum.
  • The error returned by try_insert when the key already exists.
  • A builder for computing where in a HashMap a key-value pair would be stored.
  • A builder for computing where in a HashMap a key-value pair would be stored.
  • A view into an occupied entry in a HashMap. +It is part of the RawEntryMut enum.
  • A view into a vacant entry in a HashMap. +It is part of the RawEntryMut enum.
  • A view into a vacant entry in a HashMap. +It is part of the Entry enum.
  • A view into a vacant entry in a HashMap. +It is part of the EntryRef enum.
  • An iterator over the values of a HashMap in arbitrary order. +The iterator element type is &'a V.
  • A mutable iterator over the values of a HashMap in arbitrary order. +The iterator element type is &'a mut V.

Enums§

  • Dummy default hasher for HashMap.
  • A view into a single entry in a map, which may either be vacant or occupied.
  • A view into a single entry in a map, which may either be vacant or occupied, +with any borrowed form of the map’s key type.
  • A view into a single entry in a map, which may either be vacant or occupied.
\ No newline at end of file diff --git a/hashbrown/hash_map/sidebar-items.js b/hashbrown/hash_map/sidebar-items.js new file mode 100644 index 000000000..2853fe6a4 --- /dev/null +++ b/hashbrown/hash_map/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {"enum":["DefaultHashBuilder","Entry","EntryRef","RawEntryMut"],"struct":["Drain","ExtractIf","HashMap","IntoIter","IntoKeys","IntoValues","Iter","IterMut","Keys","OccupiedEntry","OccupiedEntryRef","OccupiedError","RawEntryBuilder","RawEntryBuilderMut","RawOccupiedEntryMut","RawVacantEntryMut","VacantEntry","VacantEntryRef","Values","ValuesMut"]}; \ No newline at end of file diff --git a/hashbrown/hash_map/struct.Drain.html b/hashbrown/hash_map/struct.Drain.html new file mode 100644 index 000000000..a00e55ebc --- /dev/null +++ b/hashbrown/hash_map/struct.Drain.html @@ -0,0 +1,224 @@ +Drain in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::Drain

source ·
pub struct Drain<'a, K, V, A: Allocator = Global> { /* private fields */ }
Expand description

A draining iterator over the entries of a HashMap in arbitrary +order. The iterator element type is (K, V).

+

This struct is created by the drain method on HashMap. See its +documentation for more.

+

§Examples

+
use hashbrown::HashMap;
+
+let mut map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+
+let mut drain_iter = map.drain();
+let mut vec = vec![drain_iter.next(), drain_iter.next(), drain_iter.next()];
+
+// The `Drain` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [Some((1, "a")), Some((2, "b")), Some((3, "c"))]);
+
+// It is fused iterator
+assert_eq!(drain_iter.next(), None);
+assert_eq!(drain_iter.next(), None);
+

Trait Implementations§

source§

impl<K, V, A> Debug for Drain<'_, K, V, A>
where + K: Debug, + V: Debug, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V, A: Allocator> ExactSizeIterator for Drain<'_, K, V, A>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<'a, K, V, A: Allocator> Iterator for Drain<'a, K, V, A>

§

type Item = (K, V)

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<(K, V)>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V, A: Allocator> FusedIterator for Drain<'_, K, V, A>

Auto Trait Implementations§

§

impl<'a, K, V, A> Freeze for Drain<'a, K, V, A>

§

impl<'a, K, V, A> RefUnwindSafe for Drain<'a, K, V, A>
where + A: RefUnwindSafe, + K: RefUnwindSafe, + V: RefUnwindSafe,

§

impl<'a, K, V, A> Send for Drain<'a, K, V, A>
where + A: Send, + K: Send, + V: Send,

§

impl<'a, K, V, A> Sync for Drain<'a, K, V, A>
where + A: Sync, + K: Sync, + V: Sync,

§

impl<'a, K, V, A> Unpin for Drain<'a, K, V, A>

§

impl<'a, K, V, A> UnwindSafe for Drain<'a, K, V, A>
where + A: RefUnwindSafe, + K: RefUnwindSafe, + V: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.ExtractIf.html b/hashbrown/hash_map/struct.ExtractIf.html new file mode 100644 index 000000000..b697a04b0 --- /dev/null +++ b/hashbrown/hash_map/struct.ExtractIf.html @@ -0,0 +1,230 @@ +ExtractIf in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::ExtractIf

source ·
pub struct ExtractIf<'a, K, V, F, A: Allocator = Global>
where + F: FnMut(&K, &mut V) -> bool,
{ /* private fields */ }
Expand description

A draining iterator over entries of a HashMap which don’t satisfy the predicate +f(&k, &mut v) in arbitrary order. The iterator element type is (K, V).

+

This struct is created by the extract_if method on HashMap. See its +documentation for more.

+

§Examples

+
use hashbrown::HashMap;
+
+let mut map: HashMap<i32, &str> = [(1, "a"), (2, "b"), (3, "c")].into();
+
+let mut extract_if = map.extract_if(|k, _v| k % 2 != 0);
+let mut vec = vec![extract_if.next(), extract_if.next()];
+
+// The `ExtractIf` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [Some((1, "a")),Some((3, "c"))]);
+
+// It is fused iterator
+assert_eq!(extract_if.next(), None);
+assert_eq!(extract_if.next(), None);
+drop(extract_if);
+
+assert_eq!(map.len(), 1);
+

Trait Implementations§

source§

impl<K, V, F, A> Iterator for ExtractIf<'_, K, V, F, A>
where + F: FnMut(&K, &mut V) -> bool, + A: Allocator,

§

type Item = (K, V)

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<Self::Item>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.0.0 · source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V, F> FusedIterator for ExtractIf<'_, K, V, F>
where + F: FnMut(&K, &mut V) -> bool,

Auto Trait Implementations§

§

impl<'a, K, V, F, A> Freeze for ExtractIf<'a, K, V, F, A>
where + F: Freeze,

§

impl<'a, K, V, F, A> RefUnwindSafe for ExtractIf<'a, K, V, F, A>

§

impl<'a, K, V, F, A> Send for ExtractIf<'a, K, V, F, A>
where + F: Send, + A: Send, + K: Send, + V: Send,

§

impl<'a, K, V, F, A> Sync for ExtractIf<'a, K, V, F, A>
where + F: Sync, + A: Sync, + K: Sync, + V: Sync,

§

impl<'a, K, V, F, A> Unpin for ExtractIf<'a, K, V, F, A>
where + F: Unpin,

§

impl<'a, K, V, F, A = Global> !UnwindSafe for ExtractIf<'a, K, V, F, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.HashMap.html b/hashbrown/hash_map/struct.HashMap.html new file mode 100644 index 000000000..b4b5c96ea --- /dev/null +++ b/hashbrown/hash_map/struct.HashMap.html @@ -0,0 +1,1340 @@ +HashMap in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::HashMap

source ·
pub struct HashMap<K, V, S = DefaultHashBuilder, A: Allocator = Global> { /* private fields */ }
Expand description

A hash map implemented with quadratic probing and SIMD lookup.

+

The default hashing algorithm is currently AHash, though this is +subject to change at any point in the future. This hash function is very +fast for all types of keys, but this algorithm will typically not protect +against attacks such as HashDoS.

+

The hashing algorithm can be replaced on a per-HashMap basis using the +default, with_hasher, and with_capacity_and_hasher methods. Many +alternative algorithms are available on crates.io, such as the fnv crate.

+

It is required that the keys implement the Eq and Hash traits, although +this can frequently be achieved by using #[derive(PartialEq, Eq, Hash)]. +If you implement these yourself, it is important that the following +property holds:

+
k1 == k2 -> hash(k1) == hash(k2)
+
+

In other words, if two keys are equal, their hashes must be equal.

+

It is a logic error for a key to be modified in such a way that the key’s +hash, as determined by the Hash trait, or its equality, as determined by +the Eq trait, changes while it is in the map. This is normally only +possible through Cell, RefCell, global state, I/O, or unsafe code.

+

It is also a logic error for the Hash implementation of a key to panic. +This is generally only possible if the trait is implemented manually. If a +panic does occur then the contents of the HashMap may become corrupted and +some items may be dropped from the table.

+

§Examples

+
use hashbrown::HashMap;
+
+// Type inference lets us omit an explicit type signature (which
+// would be `HashMap<String, String>` in this example).
+let mut book_reviews = HashMap::new();
+
+// Review some books.
+book_reviews.insert(
+    "Adventures of Huckleberry Finn".to_string(),
+    "My favorite book.".to_string(),
+);
+book_reviews.insert(
+    "Grimms' Fairy Tales".to_string(),
+    "Masterpiece.".to_string(),
+);
+book_reviews.insert(
+    "Pride and Prejudice".to_string(),
+    "Very enjoyable.".to_string(),
+);
+book_reviews.insert(
+    "The Adventures of Sherlock Holmes".to_string(),
+    "Eye lyked it alot.".to_string(),
+);
+
+// Check for a specific one.
+// When collections store owned values (String), they can still be
+// queried using references (&str).
+if !book_reviews.contains_key("Les Misérables") {
+    println!("We've got {} reviews, but Les Misérables ain't one.",
+             book_reviews.len());
+}
+
+// oops, this review has a lot of spelling mistakes, let's delete it.
+book_reviews.remove("The Adventures of Sherlock Holmes");
+
+// Look up the values associated with some keys.
+let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
+for &book in &to_find {
+    match book_reviews.get(book) {
+        Some(review) => println!("{}: {}", book, review),
+        None => println!("{} is unreviewed.", book)
+    }
+}
+
+// Look up the value for a key (will panic if the key is not found).
+println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
+
+// Iterate over everything.
+for (book, review) in &book_reviews {
+    println!("{}: \"{}\"", book, review);
+}
+

HashMap also implements an Entry API, which allows +for more complex methods of getting, setting, updating and removing keys and +their values:

+ +
use hashbrown::HashMap;
+
+// type inference lets us omit an explicit type signature (which
+// would be `HashMap<&str, u8>` in this example).
+let mut player_stats = HashMap::new();
+
+fn random_stat_buff() -> u8 {
+    // could actually return some random value here - let's just return
+    // some fixed value for now
+    42
+}
+
+// insert a key only if it doesn't already exist
+player_stats.entry("health").or_insert(100);
+
+// insert a key using a function that provides a new value only if it
+// doesn't already exist
+player_stats.entry("defence").or_insert_with(random_stat_buff);
+
+// update a key, guarding against the key possibly not being set
+let stat = player_stats.entry("attack").or_insert(100);
+*stat += random_stat_buff();
+

The easiest way to use HashMap with a custom key type is to derive Eq and Hash. +We must also derive PartialEq.

+ +
use hashbrown::HashMap;
+
+#[derive(Hash, Eq, PartialEq, Debug)]
+struct Viking {
+    name: String,
+    country: String,
+}
+
+impl Viking {
+    /// Creates a new Viking.
+    fn new(name: &str, country: &str) -> Viking {
+        Viking { name: name.to_string(), country: country.to_string() }
+    }
+}
+
+// Use a HashMap to store the vikings' health points.
+let mut vikings = HashMap::new();
+
+vikings.insert(Viking::new("Einar", "Norway"), 25);
+vikings.insert(Viking::new("Olaf", "Denmark"), 24);
+vikings.insert(Viking::new("Harald", "Iceland"), 12);
+
+// Use derived implementation to print the status of the vikings.
+for (viking, health) in &vikings {
+    println!("{:?} has {} hp", viking, health);
+}
+

A HashMap with fixed list of elements can be initialized from an array:

+ +
use hashbrown::HashMap;
+
+let timber_resources: HashMap<&str, i32> = [("Norway", 100), ("Denmark", 50), ("Iceland", 10)]
+    .into_iter().collect();
+// use the values stored in map
+

Implementations§

source§

impl<K, V, S> HashMap<K, V, S>

source

pub const fn with_hasher(hash_builder: S) -> Self

Creates an empty HashMap which will use the given hash builder to hash +keys.

+

The hash map is initially created with a capacity of 0, so it will not +allocate until it is first inserted into.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashMap to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashMap.

+

The hash_builder passed should implement the BuildHasher trait for +the HashMap to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut map = HashMap::with_hasher(s);
+assert_eq!(map.len(), 0);
+assert_eq!(map.capacity(), 0);
+
+map.insert(1, 2);
+
source

pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self

Creates an empty HashMap with the specified capacity, using hash_builder +to hash the keys.

+

The hash map will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash map will not allocate.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashMap to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashMap.

+

The hash_builder passed should implement the BuildHasher trait for +the HashMap to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut map = HashMap::with_capacity_and_hasher(10, s);
+assert_eq!(map.len(), 0);
+assert!(map.capacity() >= 10);
+
+map.insert(1, 2);
+
source§

impl<K, V, S, A: Allocator> HashMap<K, V, S, A>

source

pub fn allocator(&self) -> &A

Returns a reference to the underlying allocator.

+
source

pub const fn with_hasher_in(hash_builder: S, alloc: A) -> Self

Creates an empty HashMap which will use the given hash builder to hash +keys. It will be allocated with the given allocator.

+

The hash map is initially created with a capacity of 0, so it will not allocate until it +is first inserted into.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashMap to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashMap.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut map = HashMap::with_hasher(s);
+map.insert(1, 2);
+
source

pub fn with_capacity_and_hasher_in( + capacity: usize, + hash_builder: S, + alloc: A +) -> Self

Creates an empty HashMap with the specified capacity, using hash_builder +to hash the keys. It will be allocated with the given allocator.

+

The hash map will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash map will not allocate.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashMap to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashMap.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut map = HashMap::with_capacity_and_hasher(10, s);
+map.insert(1, 2);
+
source

pub fn hasher(&self) -> &S

Returns a reference to the map’s BuildHasher.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let hasher = DefaultHashBuilder::default();
+let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
+let hasher: &DefaultHashBuilder = map.hasher();
+
source

pub fn capacity(&self) -> usize

Returns the number of elements the map can hold without reallocating.

+

This number is a lower bound; the HashMap<K, V> might be able to hold +more, but is guaranteed to be able to hold at least this many.

+
§Examples
+
use hashbrown::HashMap;
+let map: HashMap<i32, i32> = HashMap::with_capacity(100);
+assert_eq!(map.len(), 0);
+assert!(map.capacity() >= 100);
+
source

pub fn keys(&self) -> Keys<'_, K, V>

An iterator visiting all keys in arbitrary order. +The iterator element type is &'a K.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+assert_eq!(map.len(), 3);
+let mut vec: Vec<&str> = Vec::new();
+
+for key in map.keys() {
+    println!("{}", key);
+    vec.push(*key);
+}
+
+// The `Keys` iterator produces keys in arbitrary order, so the
+// keys must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, ["a", "b", "c"]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn values(&self) -> Values<'_, K, V>

An iterator visiting all values in arbitrary order. +The iterator element type is &'a V.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+assert_eq!(map.len(), 3);
+let mut vec: Vec<i32> = Vec::new();
+
+for val in map.values() {
+    println!("{}", val);
+    vec.push(*val);
+}
+
+// The `Values` iterator produces values in arbitrary order, so the
+// values must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [1, 2, 3]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn values_mut(&mut self) -> ValuesMut<'_, K, V>

An iterator visiting all values mutably in arbitrary order. +The iterator element type is &'a mut V.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+
+for val in map.values_mut() {
+    *val = *val + 10;
+}
+
+assert_eq!(map.len(), 3);
+let mut vec: Vec<i32> = Vec::new();
+
+for val in map.values() {
+    println!("{}", val);
+    vec.push(*val);
+}
+
+// The `Values` iterator produces values in arbitrary order, so the
+// values must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [11, 12, 13]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn iter(&self) -> Iter<'_, K, V>

An iterator visiting all key-value pairs in arbitrary order. +The iterator element type is (&'a K, &'a V).

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+assert_eq!(map.len(), 3);
+let mut vec: Vec<(&str, i32)> = Vec::new();
+
+for (key, val) in map.iter() {
+    println!("key: {} val: {}", key, val);
+    vec.push((*key, *val));
+}
+
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3)]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn iter_mut(&mut self) -> IterMut<'_, K, V>

An iterator visiting all key-value pairs in arbitrary order, +with mutable references to the values. +The iterator element type is (&'a K, &'a mut V).

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+
+// Update all values
+for (_, val) in map.iter_mut() {
+    *val *= 2;
+}
+
+assert_eq!(map.len(), 3);
+let mut vec: Vec<(&str, i32)> = Vec::new();
+
+for (key, val) in &map {
+    println!("key: {} val: {}", key, val);
+    vec.push((*key, *val));
+}
+
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [("a", 2), ("b", 4), ("c", 6)]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn len(&self) -> usize

Returns the number of elements in the map.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut a = HashMap::new();
+assert_eq!(a.len(), 0);
+a.insert(1, "a");
+assert_eq!(a.len(), 1);
+
source

pub fn is_empty(&self) -> bool

Returns true if the map contains no elements.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut a = HashMap::new();
+assert!(a.is_empty());
+a.insert(1, "a");
+assert!(!a.is_empty());
+
source

pub fn drain(&mut self) -> Drain<'_, K, V, A>

Clears the map, returning all key-value pairs as an iterator. Keeps the +allocated memory for reuse.

+

If the returned iterator is dropped before being fully consumed, it +drops the remaining key-value pairs. The returned iterator keeps a +mutable borrow on the vector to optimize its implementation.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut a = HashMap::new();
+a.insert(1, "a");
+a.insert(2, "b");
+let capacity_before_drain = a.capacity();
+
+for (k, v) in a.drain().take(1) {
+    assert!(k == 1 || k == 2);
+    assert!(v == "a" || v == "b");
+}
+
+// As we can see, the map is empty and contains no element.
+assert!(a.is_empty() && a.len() == 0);
+// But map capacity is equal to old one.
+assert_eq!(a.capacity(), capacity_before_drain);
+
+let mut a = HashMap::new();
+a.insert(1, "a");
+a.insert(2, "b");
+
+{   // Iterator is dropped without being consumed.
+    let d = a.drain();
+}
+
+// But the map is empty even if we do not use Drain iterator.
+assert!(a.is_empty());
+
source

pub fn retain<F>(&mut self, f: F)
where + F: FnMut(&K, &mut V) -> bool,

Retains only the elements specified by the predicate. Keeps the +allocated memory for reuse.

+

In other words, remove all pairs (k, v) such that f(&k, &mut v) returns false. +The elements are visited in unsorted (and unspecified) order.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
+assert_eq!(map.len(), 8);
+
+map.retain(|&k, _| k % 2 == 0);
+
+// We can see, that the number of elements inside map is changed.
+assert_eq!(map.len(), 4);
+
+let mut vec: Vec<(i32, i32)> = map.iter().map(|(&k, &v)| (k, v)).collect();
+vec.sort_unstable();
+assert_eq!(vec, [(0, 0), (2, 20), (4, 40), (6, 60)]);
+
source

pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, K, V, F, A>
where + F: FnMut(&K, &mut V) -> bool,

Drains elements which are true under the given predicate, +and returns an iterator over the removed items.

+

In other words, move all pairs (k, v) such that f(&k, &mut v) returns true out +into another iterator.

+

Note that extract_if lets you mutate every value in the filter closure, regardless of +whether you choose to keep or remove it.

+

If the returned ExtractIf is not exhausted, e.g. because it is dropped without iterating +or the iteration short-circuits, then the remaining elements will be retained. +Use retain() with a negated predicate if you do not need the returned iterator.

+

Keeps the allocated memory for reuse.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+
+let drained: HashMap<i32, i32> = map.extract_if(|k, _v| k % 2 == 0).collect();
+
+let mut evens = drained.keys().cloned().collect::<Vec<_>>();
+let mut odds = map.keys().cloned().collect::<Vec<_>>();
+evens.sort();
+odds.sort();
+
+assert_eq!(evens, vec![0, 2, 4, 6]);
+assert_eq!(odds, vec![1, 3, 5, 7]);
+
+let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+
+{   // Iterator is dropped without being consumed.
+    let d = map.extract_if(|k, _v| k % 2 != 0);
+}
+
+// ExtractIf was not exhausted, therefore no elements were drained.
+assert_eq!(map.len(), 8);
+
source

pub fn clear(&mut self)

Clears the map, removing all key-value pairs. Keeps the allocated memory +for reuse.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut a = HashMap::new();
+a.insert(1, "a");
+let capacity_before_clear = a.capacity();
+
+a.clear();
+
+// Map is empty.
+assert!(a.is_empty());
+// But map capacity is equal to old one.
+assert_eq!(a.capacity(), capacity_before_clear);
+
source

pub fn into_keys(self) -> IntoKeys<K, V, A>

Creates a consuming iterator visiting all the keys in arbitrary order. +The map cannot be used after calling this. +The iterator element type is K.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+
+let mut vec: Vec<&str> = map.into_keys().collect();
+
+// The `IntoKeys` iterator produces keys in arbitrary order, so the
+// keys must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, ["a", "b", "c"]);
+
source

pub fn into_values(self) -> IntoValues<K, V, A>

Creates a consuming iterator visiting all the values in arbitrary order. +The map cannot be used after calling this. +The iterator element type is V.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+
+let mut vec: Vec<i32> = map.into_values().collect();
+
+// The `IntoValues` iterator produces values in arbitrary order, so
+// the values must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [1, 2, 3]);
+
source§

impl<K, V, S, A> HashMap<K, V, S, A>
where + K: Eq + Hash, + S: BuildHasher, + A: Allocator,

source

pub fn reserve(&mut self, additional: usize)

Reserves capacity for at least additional more elements to be inserted +in the HashMap. The collection may reserve more space to avoid +frequent reallocations.

+
§Panics
+

Panics if the new capacity exceeds isize::MAX bytes and abort the program +in case of allocation error. Use try_reserve instead +if you want to handle memory allocation failure.

+
§Examples
+
use hashbrown::HashMap;
+let mut map: HashMap<&str, i32> = HashMap::new();
+// Map is empty and doesn't allocate memory
+assert_eq!(map.capacity(), 0);
+
+map.reserve(10);
+
+// And now map can hold at least 10 elements
+assert!(map.capacity() >= 10);
+
source

pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError>

Tries to reserve capacity for at least additional more elements to be inserted +in the given HashMap<K,V>. The collection may reserve more space to avoid +frequent reallocations.

+
§Errors
+

If the capacity overflows, or the allocator reports a failure, then an error +is returned.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, isize> = HashMap::new();
+// Map is empty and doesn't allocate memory
+assert_eq!(map.capacity(), 0);
+
+map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
+
+// And now map can hold at least 10 elements
+assert!(map.capacity() >= 10);
+

If the capacity overflows, or the allocator reports a failure, then an error +is returned:

+ +
use hashbrown::HashMap;
+use hashbrown::TryReserveError;
+let mut map: HashMap<i32, i32> = HashMap::new();
+
+match map.try_reserve(usize::MAX) {
+    Err(error) => match error {
+        TryReserveError::CapacityOverflow => {}
+        _ => panic!("TryReserveError::AllocError ?"),
+    },
+    _ => panic!(),
+}
+
source

pub fn shrink_to_fit(&mut self)

Shrinks the capacity of the map as much as possible. It will drop +down as much as possible while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
+map.insert(1, 2);
+map.insert(3, 4);
+assert!(map.capacity() >= 100);
+map.shrink_to_fit();
+assert!(map.capacity() >= 2);
+
source

pub fn shrink_to(&mut self, min_capacity: usize)

Shrinks the capacity of the map with a lower limit. It will drop +down no lower than the supplied limit while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+

This function does nothing if the current capacity is smaller than the +supplied minimum capacity.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
+map.insert(1, 2);
+map.insert(3, 4);
+assert!(map.capacity() >= 100);
+map.shrink_to(10);
+assert!(map.capacity() >= 10);
+map.shrink_to(0);
+assert!(map.capacity() >= 2);
+map.shrink_to(10);
+assert!(map.capacity() >= 2);
+
source

pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A>

Gets the given key’s corresponding entry in the map for in-place manipulation.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut letters = HashMap::new();
+
+for ch in "a short treatise on fungi".chars() {
+    let counter = letters.entry(ch).or_insert(0);
+    *counter += 1;
+}
+
+assert_eq!(letters[&'s'], 2);
+assert_eq!(letters[&'t'], 3);
+assert_eq!(letters[&'u'], 1);
+assert_eq!(letters.get(&'y'), None);
+
source

pub fn entry_ref<'a, 'b, Q>( + &'a mut self, + key: &'b Q +) -> EntryRef<'a, 'b, K, Q, V, S, A>
where + Q: Hash + Equivalent<K> + ?Sized,

Gets the given key’s corresponding entry by reference in the map for in-place manipulation.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut words: HashMap<String, usize> = HashMap::new();
+let source = ["poneyland", "horseyland", "poneyland", "poneyland"];
+for (i, &s) in source.iter().enumerate() {
+    let counter = words.entry_ref(s).or_insert(0);
+    *counter += 1;
+}
+
+assert_eq!(words["poneyland"], 3);
+assert_eq!(words["horseyland"], 1);
+
source

pub fn get<Q>(&self, k: &Q) -> Option<&V>
where + Q: Hash + Equivalent<K> + ?Sized,

Returns a reference to the value corresponding to the key.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+assert_eq!(map.get(&1), Some(&"a"));
+assert_eq!(map.get(&2), None);
+
source

pub fn get_key_value<Q>(&self, k: &Q) -> Option<(&K, &V)>
where + Q: Hash + Equivalent<K> + ?Sized,

Returns the key-value pair corresponding to the supplied key.

+

The supplied key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
+assert_eq!(map.get_key_value(&2), None);
+
source

pub fn get_key_value_mut<Q>(&mut self, k: &Q) -> Option<(&K, &mut V)>
where + Q: Hash + Equivalent<K> + ?Sized,

Returns the key-value pair corresponding to the supplied key, with a mutable reference to value.

+

The supplied key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+let (k, v) = map.get_key_value_mut(&1).unwrap();
+assert_eq!(k, &1);
+assert_eq!(v, &mut "a");
+*v = "b";
+assert_eq!(map.get_key_value_mut(&1), Some((&1, &mut "b")));
+assert_eq!(map.get_key_value_mut(&2), None);
+
source

pub fn contains_key<Q>(&self, k: &Q) -> bool
where + Q: Hash + Equivalent<K> + ?Sized,

Returns true if the map contains a value for the specified key.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+assert_eq!(map.contains_key(&1), true);
+assert_eq!(map.contains_key(&2), false);
+
source

pub fn get_mut<Q>(&mut self, k: &Q) -> Option<&mut V>
where + Q: Hash + Equivalent<K> + ?Sized,

Returns a mutable reference to the value corresponding to the key.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+if let Some(x) = map.get_mut(&1) {
+    *x = "b";
+}
+assert_eq!(map[&1], "b");
+
+assert_eq!(map.get_mut(&2), None);
+
source

pub fn get_many_mut<Q, const N: usize>( + &mut self, + ks: [&Q; N] +) -> Option<[&mut V; N]>
where + Q: Hash + Equivalent<K> + ?Sized,

Attempts to get mutable references to N values in the map at once.

+

Returns an array of length N with the results of each query. For soundness, at most one +mutable reference will be returned to any value. None will be returned if any of the +keys are duplicates or missing.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut libraries = HashMap::new();
+libraries.insert("Bodleian Library".to_string(), 1602);
+libraries.insert("Athenæum".to_string(), 1807);
+libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+libraries.insert("Library of Congress".to_string(), 1800);
+
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "Library of Congress",
+]);
+assert_eq!(
+    got,
+    Some([
+        &mut 1807,
+        &mut 1800,
+    ]),
+);
+
+// Missing keys result in None
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "New York Public Library",
+]);
+assert_eq!(got, None);
+
+// Duplicate keys result in None
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "Athenæum",
+]);
+assert_eq!(got, None);
+
source

pub unsafe fn get_many_unchecked_mut<Q, const N: usize>( + &mut self, + ks: [&Q; N] +) -> Option<[&mut V; N]>
where + Q: Hash + Equivalent<K> + ?Sized,

Attempts to get mutable references to N values in the map at once, without validating that +the values are unique.

+

Returns an array of length N with the results of each query. None will be returned if +any of the keys are missing.

+

For a safe alternative see get_many_mut.

+
§Safety
+

Calling this method with overlapping keys is undefined behavior even if the resulting +references are not used.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut libraries = HashMap::new();
+libraries.insert("Bodleian Library".to_string(), 1602);
+libraries.insert("Athenæum".to_string(), 1807);
+libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+libraries.insert("Library of Congress".to_string(), 1800);
+
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "Library of Congress",
+]);
+assert_eq!(
+    got,
+    Some([
+        &mut 1807,
+        &mut 1800,
+    ]),
+);
+
+// Missing keys result in None
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "New York Public Library",
+]);
+assert_eq!(got, None);
+
source

pub fn get_many_key_value_mut<Q, const N: usize>( + &mut self, + ks: [&Q; N] +) -> Option<[(&K, &mut V); N]>
where + Q: Hash + Equivalent<K> + ?Sized,

Attempts to get mutable references to N values in the map at once, with immutable +references to the corresponding keys.

+

Returns an array of length N with the results of each query. For soundness, at most one +mutable reference will be returned to any value. None will be returned if any of the keys +are duplicates or missing.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut libraries = HashMap::new();
+libraries.insert("Bodleian Library".to_string(), 1602);
+libraries.insert("Athenæum".to_string(), 1807);
+libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+libraries.insert("Library of Congress".to_string(), 1800);
+
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Herzogin-Anna-Amalia-Bibliothek",
+]);
+assert_eq!(
+    got,
+    Some([
+        (&"Bodleian Library".to_string(), &mut 1602),
+        (&"Herzogin-Anna-Amalia-Bibliothek".to_string(), &mut 1691),
+    ]),
+);
+// Missing keys result in None
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Gewandhaus",
+]);
+assert_eq!(got, None);
+
+// Duplicate keys result in None
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Herzogin-Anna-Amalia-Bibliothek",
+    "Herzogin-Anna-Amalia-Bibliothek",
+]);
+assert_eq!(got, None);
+
source

pub unsafe fn get_many_key_value_unchecked_mut<Q, const N: usize>( + &mut self, + ks: [&Q; N] +) -> Option<[(&K, &mut V); N]>
where + Q: Hash + Equivalent<K> + ?Sized,

Attempts to get mutable references to N values in the map at once, with immutable +references to the corresponding keys, without validating that the values are unique.

+

Returns an array of length N with the results of each query. None will be returned if +any of the keys are missing.

+

For a safe alternative see get_many_key_value_mut.

+
§Safety
+

Calling this method with overlapping keys is undefined behavior even if the resulting +references are not used.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut libraries = HashMap::new();
+libraries.insert("Bodleian Library".to_string(), 1602);
+libraries.insert("Athenæum".to_string(), 1807);
+libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+libraries.insert("Library of Congress".to_string(), 1800);
+
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Herzogin-Anna-Amalia-Bibliothek",
+]);
+assert_eq!(
+    got,
+    Some([
+        (&"Bodleian Library".to_string(), &mut 1602),
+        (&"Herzogin-Anna-Amalia-Bibliothek".to_string(), &mut 1691),
+    ]),
+);
+// Missing keys result in None
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Gewandhaus",
+]);
+assert_eq!(got, None);
+
source

pub fn insert(&mut self, k: K, v: V) -> Option<V>

Inserts a key-value pair into the map.

+

If the map did not have this key present, None is returned.

+

If the map did have this key present, the value is updated, and the old +value is returned. The key is not updated, though; this matters for +types that can be == without being identical. See the std::collections +module-level documentation for more.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+assert_eq!(map.insert(37, "a"), None);
+assert_eq!(map.is_empty(), false);
+
+map.insert(37, "b");
+assert_eq!(map.insert(37, "c"), Some("b"));
+assert_eq!(map[&37], "c");
+
source

pub fn insert_unique_unchecked(&mut self, k: K, v: V) -> (&K, &mut V)

Insert a key-value pair into the map without checking +if the key already exists in the map.

+

Returns a reference to the key and value just inserted.

+

This operation is safe if a key does not exist in the map.

+

However, if a key exists in the map already, the behavior is unspecified: +this operation may panic, loop forever, or any following operation with the map +may panic, loop forever or return arbitrary result.

+

That said, this operation (and following operations) are guaranteed to +not violate memory safety.

+

This operation is faster than regular insert, because it does not perform +lookup before insertion.

+

This operation is useful during initial population of the map. +For example, when constructing a map from another map, we know +that keys are unique.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map1 = HashMap::new();
+assert_eq!(map1.insert(1, "a"), None);
+assert_eq!(map1.insert(2, "b"), None);
+assert_eq!(map1.insert(3, "c"), None);
+assert_eq!(map1.len(), 3);
+
+let mut map2 = HashMap::new();
+
+for (key, value) in map1.into_iter() {
+    map2.insert_unique_unchecked(key, value);
+}
+
+let (key, value) = map2.insert_unique_unchecked(4, "d");
+assert_eq!(key, &4);
+assert_eq!(value, &mut "d");
+*value = "e";
+
+assert_eq!(map2[&1], "a");
+assert_eq!(map2[&2], "b");
+assert_eq!(map2[&3], "c");
+assert_eq!(map2[&4], "e");
+assert_eq!(map2.len(), 4);
+
source

pub fn try_insert( + &mut self, + key: K, + value: V +) -> Result<&mut V, OccupiedError<'_, K, V, S, A>>

Tries to insert a key-value pair into the map, and returns +a mutable reference to the value in the entry.

+
§Errors
+

If the map already had this key present, nothing is updated, and +an error containing the occupied entry and the value is returned.

+
§Examples
+

Basic usage:

+ +
use hashbrown::HashMap;
+use hashbrown::hash_map::OccupiedError;
+
+let mut map = HashMap::new();
+assert_eq!(map.try_insert(37, "a").unwrap(), &"a");
+
+match map.try_insert(37, "b") {
+    Err(OccupiedError { entry, value }) => {
+        assert_eq!(entry.key(), &37);
+        assert_eq!(entry.get(), &"a");
+        assert_eq!(value, "b");
+    }
+    _ => panic!()
+}
+
source

pub fn remove<Q>(&mut self, k: &Q) -> Option<V>
where + Q: Hash + Equivalent<K> + ?Sized,

Removes a key from the map, returning the value at the key if the key +was previously in the map. Keeps the allocated memory for reuse.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+// The map is empty
+assert!(map.is_empty() && map.capacity() == 0);
+
+map.insert(1, "a");
+
+assert_eq!(map.remove(&1), Some("a"));
+assert_eq!(map.remove(&1), None);
+
+// Now map holds none elements
+assert!(map.is_empty());
+
source

pub fn remove_entry<Q>(&mut self, k: &Q) -> Option<(K, V)>
where + Q: Hash + Equivalent<K> + ?Sized,

Removes a key from the map, returning the stored key and value if the +key was previously in the map. Keeps the allocated memory for reuse.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+// The map is empty
+assert!(map.is_empty() && map.capacity() == 0);
+
+map.insert(1, "a");
+
+assert_eq!(map.remove_entry(&1), Some((1, "a")));
+assert_eq!(map.remove(&1), None);
+
+// Now map hold none elements
+assert!(map.is_empty());
+
source§

impl<K, V, S, A: Allocator> HashMap<K, V, S, A>

source

pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S, A>

Creates a raw entry builder for the HashMap.

+

Raw entries provide the lowest level of control for searching and +manipulating a map. They must be manually initialized with a hash and +then manually searched. After this, insertions into a vacant entry +still require an owned key to be provided.

+

Raw entries are useful for such exotic situations as:

+
    +
  • Hash memoization
  • +
  • Deferring the creation of an owned key until it is known to be required
  • +
  • Using a search key that doesn’t work with the Borrow trait
  • +
  • Using custom comparison logic without newtype wrappers
  • +
+

Because raw entries provide much more low-level control, it’s much easier +to put the HashMap into an inconsistent state which, while memory-safe, +will cause the map to produce seemingly random results. Higher-level and +more foolproof APIs like entry should be preferred when possible.

+

In particular, the hash used to initialized the raw entry must still be +consistent with the hash of the key that is ultimately stored in the entry. +This is because implementations of HashMap may need to recompute hashes +when resizing, at which point only the keys are available.

+

Raw entries give mutable access to the keys. This must not be used +to modify how the key would compare or hash, as the map will not re-evaluate +where the key should go, meaning the keys may become “lost” if their +location does not reflect their state. For instance, if you change a key +so that the map now contains keys which compare equal, search may start +acting erratically, with two keys randomly masking each other. Implementations +are free to assume this doesn’t happen (within the limits of memory-safety).

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map = HashMap::new();
+map.extend([("a", 100), ("b", 200), ("c", 300)]);
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+// Existing key (insert and update)
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(mut view) => {
+        assert_eq!(view.get(), &100);
+        let v = view.get_mut();
+        let new_v = (*v) * 10;
+        *v = new_v;
+        assert_eq!(view.insert(1111), 1000);
+    }
+}
+
+assert_eq!(map[&"a"], 1111);
+assert_eq!(map.len(), 3);
+
+// Existing key (take)
+let hash = compute_hash(map.hasher(), &"c");
+match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"c") {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(view) => {
+        assert_eq!(view.remove_entry(), ("c", 300));
+    }
+}
+assert_eq!(map.raw_entry().from_key(&"c"), None);
+assert_eq!(map.len(), 2);
+
+// Nonexistent key (insert and update)
+let key = "d";
+let hash = compute_hash(map.hasher(), &key);
+match map.raw_entry_mut().from_hash(hash, |q| *q == key) {
+    RawEntryMut::Occupied(_) => unreachable!(),
+    RawEntryMut::Vacant(view) => {
+        let (k, value) = view.insert("d", 4000);
+        assert_eq!((*k, *value), ("d", 4000));
+        *value = 40000;
+    }
+}
+assert_eq!(map[&"d"], 40000);
+assert_eq!(map.len(), 3);
+
+match map.raw_entry_mut().from_hash(hash, |q| *q == key) {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(view) => {
+        assert_eq!(view.remove_entry(), ("d", 40000));
+    }
+}
+assert_eq!(map.get(&"d"), None);
+assert_eq!(map.len(), 2);
+
source

pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S, A>

Creates a raw immutable entry builder for the HashMap.

+

Raw entries provide the lowest level of control for searching and +manipulating a map. They must be manually initialized with a hash and +then manually searched.

+

This is useful for

+
    +
  • Hash memoization
  • +
  • Using a search key that doesn’t work with the Borrow trait
  • +
  • Using custom comparison logic without newtype wrappers
  • +
+

Unless you are in such a situation, higher-level and more foolproof APIs like +get should be preferred.

+

Immutable raw entries have very limited use; you might instead want raw_entry_mut.

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.extend([("a", 100), ("b", 200), ("c", 300)]);
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+for k in ["a", "b", "c", "d", "e", "f"] {
+    let hash = compute_hash(map.hasher(), k);
+    let v = map.get(&k).cloned();
+    let kv = v.as_ref().map(|v| (&k, v));
+
+    println!("Key: {} and value: {:?}", k, v);
+
+    assert_eq!(map.raw_entry().from_key(&k), kv);
+    assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
+    assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
+}
+

Trait Implementations§

source§

impl<K: Clone, V: Clone, S: Clone, A: Allocator + Clone> Clone for HashMap<K, V, S, A>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<K, V, S, A> Debug for HashMap<K, V, S, A>
where + K: Debug, + V: Debug, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V, S, A> Default for HashMap<K, V, S, A>
where + S: Default, + A: Default + Allocator,

source§

fn default() -> Self

Creates an empty HashMap<K, V, S, A>, with the Default value for the hasher and allocator.

+
§Examples
+
use hashbrown::HashMap;
+use std::collections::hash_map::RandomState;
+
+// You can specify all types of HashMap, including hasher and allocator.
+// Created map is empty and don't allocate memory
+let map: HashMap<u32, String> = Default::default();
+assert_eq!(map.capacity(), 0);
+let map: HashMap<u32, String, RandomState> = HashMap::default();
+assert_eq!(map.capacity(), 0);
+
source§

impl<'a, K, V, S, A> Extend<&'a (K, V)> for HashMap<K, V, S, A>
where + K: Eq + Hash + Copy, + V: Copy, + S: BuildHasher, + A: Allocator,

Inserts all new key-values from the iterator and replaces values with existing +keys with new values returned from the iterator.

+
source§

fn extend<T: IntoIterator<Item = &'a (K, V)>>(&mut self, iter: T)

Inserts all new key-values from the iterator to existing HashMap<K, V, S, A>. +Replace values with existing keys with new values returned from the iterator. +The keys and values must implement Copy trait.

+
§Examples
+
use hashbrown::hash_map::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, 100);
+
+let arr = [(1, 1), (2, 2)];
+let some_iter = arr.iter();
+map.extend(some_iter);
+// Replace values with existing keys with new values returned from the iterator.
+// So that the map.get(&1) doesn't return Some(&100).
+assert_eq!(map.get(&1), Some(&1));
+
+let some_vec: Vec<_> = vec![(3, 3), (4, 4)];
+map.extend(&some_vec);
+
+let some_arr = [(5, 5), (6, 6)];
+map.extend(&some_arr);
+
+let mut vec: Vec<_> = map.into_iter().collect();
+// The `IntoIter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]);
+
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<'a, K, V, S, A> Extend<(&'a K, &'a V)> for HashMap<K, V, S, A>
where + K: Eq + Hash + Copy, + V: Copy, + S: BuildHasher, + A: Allocator,

Inserts all new key-values from the iterator and replaces values with existing +keys with new values returned from the iterator.

+
source§

fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T)

Inserts all new key-values from the iterator to existing HashMap<K, V, S, A>. +Replace values with existing keys with new values returned from the iterator. +The keys and values must implement Copy trait.

+
§Examples
+
use hashbrown::hash_map::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, 100);
+
+let arr = [(1, 1), (2, 2)];
+let some_iter = arr.iter().map(|(k, v)| (k, v));
+map.extend(some_iter);
+// Replace values with existing keys with new values returned from the iterator.
+// So that the map.get(&1) doesn't return Some(&100).
+assert_eq!(map.get(&1), Some(&1));
+
+let some_vec: Vec<_> = vec![(3, 3), (4, 4)];
+map.extend(some_vec.iter().map(|(k, v)| (k, v)));
+
+let some_arr = [(5, 5), (6, 6)];
+map.extend(some_arr.iter().map(|(k, v)| (k, v)));
+
+// You can also extend from another HashMap
+let mut new_map = HashMap::new();
+new_map.extend(&map);
+assert_eq!(new_map, map);
+
+let mut vec: Vec<_> = new_map.into_iter().collect();
+// The `IntoIter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]);
+
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<K, V, S, A> Extend<(K, V)> for HashMap<K, V, S, A>
where + K: Eq + Hash, + S: BuildHasher, + A: Allocator,

Inserts all new key-values from the iterator and replaces values with existing +keys with new values returned from the iterator.

+
source§

fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T)

Inserts all new key-values from the iterator to existing HashMap<K, V, S, A>. +Replace values with existing keys with new values returned from the iterator.

+
§Examples
+
use hashbrown::hash_map::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, 100);
+
+let some_iter = [(1, 1), (2, 2)].into_iter();
+map.extend(some_iter);
+// Replace values with existing keys with new values returned from the iterator.
+// So that the map.get(&1) doesn't return Some(&100).
+assert_eq!(map.get(&1), Some(&1));
+
+let some_vec: Vec<_> = vec![(3, 3), (4, 4)];
+map.extend(some_vec);
+
+let some_arr = [(5, 5), (6, 6)];
+map.extend(some_arr);
+let old_map_len = map.len();
+
+// You can also extend from another HashMap
+let mut new_map = HashMap::new();
+new_map.extend(map);
+assert_eq!(new_map.len(), old_map_len);
+
+let mut vec: Vec<_> = new_map.into_iter().collect();
+// The `IntoIter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]);
+
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<T, S, A> From<HashMap<T, (), S, A>> for HashSet<T, S, A>
where + A: Allocator,

source§

fn from(map: HashMap<T, (), S, A>) -> Self

Converts to this type from the input type.
source§

impl<K, V, S, A> FromIterator<(K, V)> for HashMap<K, V, S, A>
where + K: Eq + Hash, + S: BuildHasher + Default, + A: Default + Allocator,

source§

fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self

Creates a value from an iterator. Read more
source§

impl<K, Q, V, S, A> Index<&Q> for HashMap<K, V, S, A>
where + K: Eq + Hash, + Q: Hash + Equivalent<K> + ?Sized, + S: BuildHasher, + A: Allocator,

source§

fn index(&self, key: &Q) -> &V

Returns a reference to the value corresponding to the supplied key.

+
§Panics
+

Panics if the key is not present in the HashMap.

+
§Examples
+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [("a", "One"), ("b", "Two")].into();
+
+assert_eq!(map[&"a"], "One");
+assert_eq!(map[&"b"], "Two");
+
§

type Output = V

The returned type after indexing.
source§

impl<'a, K, V, S, A: Allocator> IntoIterator for &'a HashMap<K, V, S, A>

source§

fn into_iter(self) -> Iter<'a, K, V>

Creates an iterator over the entries of a HashMap in arbitrary order. +The iterator element type is (&'a K, &'a V).

+

Return the same Iter struct as by the iter method on HashMap.

+
§Examples
+
use hashbrown::HashMap;
+let map_one: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+let mut map_two = HashMap::new();
+
+for (key, value) in &map_one {
+    println!("Key: {}, Value: {}", key, value);
+    map_two.insert_unique_unchecked(*key, *value);
+}
+
+assert_eq!(map_one, map_two);
+
§

type Item = (&'a K, &'a V)

The type of the elements being iterated over.
§

type IntoIter = Iter<'a, K, V>

Which kind of iterator are we turning this into?
source§

impl<'a, K, V, S, A: Allocator> IntoIterator for &'a mut HashMap<K, V, S, A>

source§

fn into_iter(self) -> IterMut<'a, K, V>

Creates an iterator over the entries of a HashMap in arbitrary order +with mutable references to the values. The iterator element type is +(&'a K, &'a mut V).

+

Return the same IterMut struct as by the iter_mut method on +HashMap.

+
§Examples
+
use hashbrown::HashMap;
+let mut map: HashMap<_, _> = [("a", 1), ("b", 2), ("c", 3)].into();
+
+for (key, value) in &mut map {
+    println!("Key: {}, Value: {}", key, value);
+    *value *= 2;
+}
+
+let mut vec = map.iter().collect::<Vec<_>>();
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [(&"a", &2), (&"b", &4), (&"c", &6)]);
+
§

type Item = (&'a K, &'a mut V)

The type of the elements being iterated over.
§

type IntoIter = IterMut<'a, K, V>

Which kind of iterator are we turning this into?
source§

impl<K, V, S, A: Allocator> IntoIterator for HashMap<K, V, S, A>

source§

fn into_iter(self) -> IntoIter<K, V, A>

Creates a consuming iterator, that is, one that moves each key-value +pair out of the map in arbitrary order. The map cannot be used after +calling this.

+
§Examples
+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [("a", 1), ("b", 2), ("c", 3)].into();
+
+// Not possible with .iter()
+let mut vec: Vec<(&str, i32)> = map.into_iter().collect();
+// The `IntoIter` iterator produces items in arbitrary order, so
+// the items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3)]);
+
§

type Item = (K, V)

The type of the elements being iterated over.
§

type IntoIter = IntoIter<K, V, A>

Which kind of iterator are we turning this into?
source§

impl<K, V, S, A> PartialEq for HashMap<K, V, S, A>
where + K: Eq + Hash, + V: PartialEq, + S: BuildHasher, + A: Allocator,

source§

fn eq(&self, other: &Self) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl<K, V, S, A> Eq for HashMap<K, V, S, A>
where + K: Eq + Hash, + V: Eq, + S: BuildHasher, + A: Allocator,

Auto Trait Implementations§

§

impl<K, V, S, A> Freeze for HashMap<K, V, S, A>
where + S: Freeze, + A: Freeze,

§

impl<K, V, S, A> RefUnwindSafe for HashMap<K, V, S, A>

§

impl<K, V, S, A> Send for HashMap<K, V, S, A>
where + S: Send, + A: Send, + K: Send, + V: Send,

§

impl<K, V, S, A> Sync for HashMap<K, V, S, A>
where + S: Sync, + A: Sync, + K: Sync, + V: Sync,

§

impl<K, V, S, A> Unpin for HashMap<K, V, S, A>
where + S: Unpin, + A: Unpin, + K: Unpin, + V: Unpin,

§

impl<K, V, S, A> UnwindSafe for HashMap<K, V, S, A>
where + S: UnwindSafe, + A: UnwindSafe, + K: UnwindSafe, + V: UnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<Q, K> Equivalent<K> for Q
where + Q: Eq + ?Sized, + K: Borrow<Q> + ?Sized,

source§

fn equivalent(&self, key: &K) -> bool

Checks if this value is equivalent to the given key. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.IntoIter.html b/hashbrown/hash_map/struct.IntoIter.html new file mode 100644 index 000000000..1977d1d70 --- /dev/null +++ b/hashbrown/hash_map/struct.IntoIter.html @@ -0,0 +1,226 @@ +IntoIter in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::IntoIter

source ·
pub struct IntoIter<K, V, A: Allocator = Global> { /* private fields */ }
Expand description

An owning iterator over the entries of a HashMap in arbitrary order. +The iterator element type is (K, V).

+

This struct is created by the into_iter method on HashMap +(provided by the IntoIterator trait). See its documentation for more. +The map cannot be used after calling that method.

+

§Examples

+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+
+let mut iter = map.into_iter();
+let mut vec = vec![iter.next(), iter.next(), iter.next()];
+
+// The `IntoIter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [Some((1, "a")), Some((2, "b")), Some((3, "c"))]);
+
+// It is fused iterator
+assert_eq!(iter.next(), None);
+assert_eq!(iter.next(), None);
+

Trait Implementations§

source§

impl<K: Debug, V: Debug, A: Allocator> Debug for IntoIter<K, V, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V, A: Allocator> ExactSizeIterator for IntoIter<K, V, A>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<K, V, A: Allocator> Iterator for IntoIter<K, V, A>

§

type Item = (K, V)

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<(K, V)>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V, A: Allocator> FusedIterator for IntoIter<K, V, A>

Auto Trait Implementations§

§

impl<K, V, A> Freeze for IntoIter<K, V, A>
where + A: Freeze,

§

impl<K, V, A> RefUnwindSafe for IntoIter<K, V, A>
where + A: RefUnwindSafe, + K: RefUnwindSafe, + V: RefUnwindSafe,

§

impl<K, V, A> Send for IntoIter<K, V, A>
where + A: Send, + K: Send, + V: Send,

§

impl<K, V, A> Sync for IntoIter<K, V, A>
where + A: Sync, + K: Sync, + V: Sync,

§

impl<K, V, A> Unpin for IntoIter<K, V, A>
where + A: Unpin, + K: Unpin, + V: Unpin,

§

impl<K, V, A> UnwindSafe for IntoIter<K, V, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.IntoKeys.html b/hashbrown/hash_map/struct.IntoKeys.html new file mode 100644 index 000000000..fadf6c770 --- /dev/null +++ b/hashbrown/hash_map/struct.IntoKeys.html @@ -0,0 +1,226 @@ +IntoKeys in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::IntoKeys

source ·
pub struct IntoKeys<K, V, A: Allocator = Global> { /* private fields */ }
Expand description

An owning iterator over the keys of a HashMap in arbitrary order. +The iterator element type is K.

+

This struct is created by the into_keys method on HashMap. +See its documentation for more. +The map cannot be used after calling that method.

+

§Examples

+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+
+let mut keys = map.into_keys();
+let mut vec = vec![keys.next(), keys.next(), keys.next()];
+
+// The `IntoKeys` iterator produces keys in arbitrary order, so the
+// keys must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [Some(1), Some(2), Some(3)]);
+
+// It is fused iterator
+assert_eq!(keys.next(), None);
+assert_eq!(keys.next(), None);
+

Trait Implementations§

source§

impl<K: Debug, V: Debug, A: Allocator> Debug for IntoKeys<K, V, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V, A: Allocator> ExactSizeIterator for IntoKeys<K, V, A>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<K, V, A: Allocator> Iterator for IntoKeys<K, V, A>

§

type Item = K

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<K>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V, A: Allocator> FusedIterator for IntoKeys<K, V, A>

Auto Trait Implementations§

§

impl<K, V, A> Freeze for IntoKeys<K, V, A>
where + A: Freeze,

§

impl<K, V, A> RefUnwindSafe for IntoKeys<K, V, A>
where + A: RefUnwindSafe, + K: RefUnwindSafe, + V: RefUnwindSafe,

§

impl<K, V, A> Send for IntoKeys<K, V, A>
where + A: Send, + K: Send, + V: Send,

§

impl<K, V, A> Sync for IntoKeys<K, V, A>
where + A: Sync, + K: Sync, + V: Sync,

§

impl<K, V, A> Unpin for IntoKeys<K, V, A>
where + A: Unpin, + K: Unpin, + V: Unpin,

§

impl<K, V, A> UnwindSafe for IntoKeys<K, V, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.IntoValues.html b/hashbrown/hash_map/struct.IntoValues.html new file mode 100644 index 000000000..8b1a5cf7a --- /dev/null +++ b/hashbrown/hash_map/struct.IntoValues.html @@ -0,0 +1,225 @@ +IntoValues in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::IntoValues

source ·
pub struct IntoValues<K, V, A: Allocator = Global> { /* private fields */ }
Expand description

An owning iterator over the values of a HashMap in arbitrary order. +The iterator element type is V.

+

This struct is created by the into_values method on HashMap. +See its documentation for more. The map cannot be used after calling that method.

+

§Examples

+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+
+let mut values = map.into_values();
+let mut vec = vec![values.next(), values.next(), values.next()];
+
+// The `IntoValues` iterator produces values in arbitrary order, so
+// the values must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [Some("a"), Some("b"), Some("c")]);
+
+// It is fused iterator
+assert_eq!(values.next(), None);
+assert_eq!(values.next(), None);
+

Trait Implementations§

source§

impl<K, V: Debug, A: Allocator> Debug for IntoValues<K, V, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V, A: Allocator> ExactSizeIterator for IntoValues<K, V, A>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<K, V, A: Allocator> Iterator for IntoValues<K, V, A>

§

type Item = V

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<V>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V, A: Allocator> FusedIterator for IntoValues<K, V, A>

Auto Trait Implementations§

§

impl<K, V, A> Freeze for IntoValues<K, V, A>
where + A: Freeze,

§

impl<K, V, A> RefUnwindSafe for IntoValues<K, V, A>
where + A: RefUnwindSafe, + K: RefUnwindSafe, + V: RefUnwindSafe,

§

impl<K, V, A> Send for IntoValues<K, V, A>
where + A: Send, + K: Send, + V: Send,

§

impl<K, V, A> Sync for IntoValues<K, V, A>
where + A: Sync, + K: Sync, + V: Sync,

§

impl<K, V, A> Unpin for IntoValues<K, V, A>
where + A: Unpin, + K: Unpin, + V: Unpin,

§

impl<K, V, A> UnwindSafe for IntoValues<K, V, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.Iter.html b/hashbrown/hash_map/struct.Iter.html new file mode 100644 index 000000000..c3304ae4a --- /dev/null +++ b/hashbrown/hash_map/struct.Iter.html @@ -0,0 +1,219 @@ +Iter in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::Iter

source ·
pub struct Iter<'a, K, V> { /* private fields */ }
Expand description

An iterator over the entries of a HashMap in arbitrary order. +The iterator element type is (&'a K, &'a V).

+

This struct is created by the iter method on HashMap. See its +documentation for more.

+

§Examples

+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+
+let mut iter = map.iter();
+let mut vec = vec![iter.next(), iter.next(), iter.next()];
+
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [Some((&1, &"a")), Some((&2, &"b")), Some((&3, &"c"))]);
+
+// It is fused iterator
+assert_eq!(iter.next(), None);
+assert_eq!(iter.next(), None);
+

Trait Implementations§

source§

impl<K, V> Clone for Iter<'_, K, V>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<K: Debug, V: Debug> Debug for Iter<'_, K, V>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V> ExactSizeIterator for Iter<'_, K, V>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<'a, K, V> Iterator for Iter<'a, K, V>

§

type Item = (&'a K, &'a V)

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<(&'a K, &'a V)>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · source§

fn cycle(self) -> Cycle<Self>
where + Self: Sized + Clone,

Repeats an iterator endlessly. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V> FusedIterator for Iter<'_, K, V>

Auto Trait Implementations§

§

impl<'a, K, V> Freeze for Iter<'a, K, V>

§

impl<'a, K, V> RefUnwindSafe for Iter<'a, K, V>
where + K: RefUnwindSafe, + V: RefUnwindSafe,

§

impl<'a, K, V> Send for Iter<'a, K, V>
where + K: Sync, + V: Sync,

§

impl<'a, K, V> Sync for Iter<'a, K, V>
where + K: Sync, + V: Sync,

§

impl<'a, K, V> Unpin for Iter<'a, K, V>

§

impl<'a, K, V> UnwindSafe for Iter<'a, K, V>
where + K: RefUnwindSafe, + V: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.IterMut.html b/hashbrown/hash_map/struct.IterMut.html new file mode 100644 index 000000000..0117442fc --- /dev/null +++ b/hashbrown/hash_map/struct.IterMut.html @@ -0,0 +1,214 @@ +IterMut in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::IterMut

source ·
pub struct IterMut<'a, K, V> { /* private fields */ }
Expand description

A mutable iterator over the entries of a HashMap in arbitrary order. +The iterator element type is (&'a K, &'a mut V).

+

This struct is created by the iter_mut method on HashMap. See its +documentation for more.

+

§Examples

+
use hashbrown::HashMap;
+
+let mut map: HashMap<_, _> = [(1, "One".to_owned()), (2, "Two".into())].into();
+
+let mut iter = map.iter_mut();
+iter.next().map(|(_, v)| v.push_str(" Mississippi"));
+iter.next().map(|(_, v)| v.push_str(" Mississippi"));
+
+// It is fused iterator
+assert_eq!(iter.next(), None);
+assert_eq!(iter.next(), None);
+
+assert_eq!(map.get(&1).unwrap(), &"One Mississippi".to_owned());
+assert_eq!(map.get(&2).unwrap(), &"Two Mississippi".to_owned());
+

Trait Implementations§

source§

impl<K, V> Debug for IterMut<'_, K, V>
where + K: Debug, + V: Debug,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V> ExactSizeIterator for IterMut<'_, K, V>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<'a, K, V> Iterator for IterMut<'a, K, V>

§

type Item = (&'a K, &'a mut V)

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<(&'a K, &'a mut V)>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V> FusedIterator for IterMut<'_, K, V>

source§

impl<K: Send, V: Send> Send for IterMut<'_, K, V>

Auto Trait Implementations§

§

impl<'a, K, V> Freeze for IterMut<'a, K, V>

§

impl<'a, K, V> RefUnwindSafe for IterMut<'a, K, V>
where + K: RefUnwindSafe, + V: RefUnwindSafe,

§

impl<'a, K, V> Sync for IterMut<'a, K, V>
where + K: Sync, + V: Sync,

§

impl<'a, K, V> Unpin for IterMut<'a, K, V>

§

impl<'a, K, V> !UnwindSafe for IterMut<'a, K, V>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.Keys.html b/hashbrown/hash_map/struct.Keys.html new file mode 100644 index 000000000..12e310c00 --- /dev/null +++ b/hashbrown/hash_map/struct.Keys.html @@ -0,0 +1,219 @@ +Keys in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::Keys

source ·
pub struct Keys<'a, K, V> { /* private fields */ }
Expand description

An iterator over the keys of a HashMap in arbitrary order. +The iterator element type is &'a K.

+

This struct is created by the keys method on HashMap. See its +documentation for more.

+

§Examples

+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+
+let mut keys = map.keys();
+let mut vec = vec![keys.next(), keys.next(), keys.next()];
+
+// The `Keys` iterator produces keys in arbitrary order, so the
+// keys must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [Some(&1), Some(&2), Some(&3)]);
+
+// It is fused iterator
+assert_eq!(keys.next(), None);
+assert_eq!(keys.next(), None);
+

Trait Implementations§

source§

impl<K, V> Clone for Keys<'_, K, V>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<K: Debug, V> Debug for Keys<'_, K, V>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V> ExactSizeIterator for Keys<'_, K, V>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<'a, K, V> Iterator for Keys<'a, K, V>

§

type Item = &'a K

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<&'a K>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · source§

fn cycle(self) -> Cycle<Self>
where + Self: Sized + Clone,

Repeats an iterator endlessly. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V> FusedIterator for Keys<'_, K, V>

Auto Trait Implementations§

§

impl<'a, K, V> Freeze for Keys<'a, K, V>

§

impl<'a, K, V> RefUnwindSafe for Keys<'a, K, V>
where + K: RefUnwindSafe, + V: RefUnwindSafe,

§

impl<'a, K, V> Send for Keys<'a, K, V>
where + K: Sync, + V: Sync,

§

impl<'a, K, V> Sync for Keys<'a, K, V>
where + K: Sync, + V: Sync,

§

impl<'a, K, V> Unpin for Keys<'a, K, V>

§

impl<'a, K, V> UnwindSafe for Keys<'a, K, V>
where + K: RefUnwindSafe, + V: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.OccupiedEntry.html b/hashbrown/hash_map/struct.OccupiedEntry.html new file mode 100644 index 000000000..15784cabc --- /dev/null +++ b/hashbrown/hash_map/struct.OccupiedEntry.html @@ -0,0 +1,281 @@ +OccupiedEntry in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::OccupiedEntry

source ·
pub struct OccupiedEntry<'a, K, V, S = DefaultHashBuilder, A: Allocator = Global> { /* private fields */ }
Expand description

A view into an occupied entry in a HashMap. +It is part of the Entry enum.

+

§Examples

+
use hashbrown::hash_map::{Entry, HashMap, OccupiedEntry};
+
+let mut map = HashMap::new();
+map.extend([("a", 10), ("b", 20), ("c", 30)]);
+
+let _entry_o: OccupiedEntry<_, _, _> = map.entry("a").insert(100);
+assert_eq!(map.len(), 3);
+
+// Existing key (insert and update)
+match map.entry("a") {
+    Entry::Vacant(_) => unreachable!(),
+    Entry::Occupied(mut view) => {
+        assert_eq!(view.get(), &100);
+        let v = view.get_mut();
+        *v *= 10;
+        assert_eq!(view.insert(1111), 1000);
+    }
+}
+
+assert_eq!(map[&"a"], 1111);
+assert_eq!(map.len(), 3);
+
+// Existing key (take)
+match map.entry("c") {
+    Entry::Vacant(_) => unreachable!(),
+    Entry::Occupied(view) => {
+        assert_eq!(view.remove_entry(), ("c", 30));
+    }
+}
+assert_eq!(map.get(&"c"), None);
+assert_eq!(map.len(), 2);
+

Implementations§

source§

impl<'a, K, V, S, A: Allocator> OccupiedEntry<'a, K, V, S, A>

source

pub fn key(&self) -> &K

Gets a reference to the key in the entry.

+
§Examples
+
use hashbrown::hash_map::{Entry, HashMap};
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+map.entry("poneyland").or_insert(12);
+
+match map.entry("poneyland") {
+    Entry::Vacant(_) => panic!(),
+    Entry::Occupied(entry) => assert_eq!(entry.key(), &"poneyland"),
+}
+
source

pub fn remove_entry(self) -> (K, V)

Take the ownership of the key and value from the map. +Keeps the allocated memory for reuse.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::Entry;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+// The map is empty
+assert!(map.is_empty() && map.capacity() == 0);
+
+map.entry("poneyland").or_insert(12);
+
+if let Entry::Occupied(o) = map.entry("poneyland") {
+    // We delete the entry from the map.
+    assert_eq!(o.remove_entry(), ("poneyland", 12));
+}
+
+assert_eq!(map.contains_key("poneyland"), false);
+// Now map hold none elements
+assert!(map.is_empty());
+
source

pub fn get(&self) -> &V

Gets a reference to the value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::Entry;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+map.entry("poneyland").or_insert(12);
+
+match map.entry("poneyland") {
+    Entry::Vacant(_) => panic!(),
+    Entry::Occupied(entry) => assert_eq!(entry.get(), &12),
+}
+
source

pub fn get_mut(&mut self) -> &mut V

Gets a mutable reference to the value in the entry.

+

If you need a reference to the OccupiedEntry which may outlive the +destruction of the Entry value, see into_mut.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::Entry;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+map.entry("poneyland").or_insert(12);
+
+assert_eq!(map["poneyland"], 12);
+if let Entry::Occupied(mut o) = map.entry("poneyland") {
+    *o.get_mut() += 10;
+    assert_eq!(*o.get(), 22);
+
+    // We can use the same Entry multiple times.
+    *o.get_mut() += 2;
+}
+
+assert_eq!(map["poneyland"], 24);
+
source

pub fn into_mut(self) -> &'a mut V

Converts the OccupiedEntry into a mutable reference to the value in the entry +with a lifetime bound to the map itself.

+

If you need multiple references to the OccupiedEntry, see get_mut.

+
§Examples
+
use hashbrown::hash_map::{Entry, HashMap};
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+map.entry("poneyland").or_insert(12);
+
+assert_eq!(map["poneyland"], 12);
+
+let value: &mut u32;
+match map.entry("poneyland") {
+    Entry::Occupied(entry) => value = entry.into_mut(),
+    Entry::Vacant(_) => panic!(),
+}
+*value += 10;
+
+assert_eq!(map["poneyland"], 22);
+
source

pub fn insert(&mut self, value: V) -> V

Sets the value of the entry, and returns the entry’s old value.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::Entry;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+map.entry("poneyland").or_insert(12);
+
+if let Entry::Occupied(mut o) = map.entry("poneyland") {
+    assert_eq!(o.insert(15), 12);
+}
+
+assert_eq!(map["poneyland"], 15);
+
source

pub fn remove(self) -> V

Takes the value out of the entry, and returns it. +Keeps the allocated memory for reuse.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::Entry;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+// The map is empty
+assert!(map.is_empty() && map.capacity() == 0);
+
+map.entry("poneyland").or_insert(12);
+
+if let Entry::Occupied(o) = map.entry("poneyland") {
+    assert_eq!(o.remove(), 12);
+}
+
+assert_eq!(map.contains_key("poneyland"), false);
+// Now map hold none elements
+assert!(map.is_empty());
+
source

pub fn replace_entry(self, value: V) -> (K, V)

Replaces the entry, returning the old key and value. The new key in the hash map will be +the key used to create this entry.

+
§Panics
+

Will panic if this OccupiedEntry was created through Entry::insert.

+
§Examples
+
 use hashbrown::hash_map::{Entry, HashMap};
+ use std::rc::Rc;
+
+ let mut map: HashMap<Rc<String>, u32> = HashMap::new();
+ let key_one = Rc::new("Stringthing".to_string());
+ let key_two = Rc::new("Stringthing".to_string());
+
+ map.insert(key_one.clone(), 15);
+ assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+
+ match map.entry(key_two.clone()) {
+     Entry::Occupied(entry) => {
+         let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
+         assert!(Rc::ptr_eq(&key_one, &old_key) && old_value == 15);
+     }
+     Entry::Vacant(_) => panic!(),
+ }
+
+ assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+ assert_eq!(map[&"Stringthing".to_owned()], 16);
+
source

pub fn replace_key(self) -> K

Replaces the key in the hash map with the key used to create this entry.

+
§Panics
+

Will panic if this OccupiedEntry was created through Entry::insert.

+
§Examples
+
use hashbrown::hash_map::{Entry, HashMap};
+use std::rc::Rc;
+
+let mut map: HashMap<Rc<String>, usize> = HashMap::with_capacity(6);
+let mut keys_one: Vec<Rc<String>> = Vec::with_capacity(6);
+let mut keys_two: Vec<Rc<String>> = Vec::with_capacity(6);
+
+for (value, key) in ["a", "b", "c", "d", "e", "f"].into_iter().enumerate() {
+    let rc_key = Rc::new(key.to_owned());
+    keys_one.push(rc_key.clone());
+    map.insert(rc_key.clone(), value);
+    keys_two.push(Rc::new(key.to_owned()));
+}
+
+assert!(
+    keys_one.iter().all(|key| Rc::strong_count(key) == 2)
+        && keys_two.iter().all(|key| Rc::strong_count(key) == 1)
+);
+
+reclaim_memory(&mut map, &keys_two);
+
+assert!(
+    keys_one.iter().all(|key| Rc::strong_count(key) == 1)
+        && keys_two.iter().all(|key| Rc::strong_count(key) == 2)
+);
+
+fn reclaim_memory(map: &mut HashMap<Rc<String>, usize>, keys: &[Rc<String>]) {
+    for key in keys {
+        if let Entry::Occupied(entry) = map.entry(key.clone()) {
+        // Replaces the entry's key with our version of it in `keys`.
+            entry.replace_key();
+        }
+    }
+}
+
source

pub fn replace_entry_with<F>(self, f: F) -> Entry<'a, K, V, S, A>
where + F: FnOnce(&K, V) -> Option<V>,

Provides shared access to the key and owned access to the value of +the entry and allows to replace or remove it based on the +value of the returned option.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::Entry;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+map.insert("poneyland", 42);
+
+let entry = match map.entry("poneyland") {
+    Entry::Occupied(e) => {
+        e.replace_entry_with(|k, v| {
+            assert_eq!(k, &"poneyland");
+            assert_eq!(v, 42);
+            Some(v + 1)
+        })
+    }
+    Entry::Vacant(_) => panic!(),
+};
+
+match entry {
+    Entry::Occupied(e) => {
+        assert_eq!(e.key(), &"poneyland");
+        assert_eq!(e.get(), &43);
+    }
+    Entry::Vacant(_) => panic!(),
+}
+
+assert_eq!(map["poneyland"], 43);
+
+let entry = match map.entry("poneyland") {
+    Entry::Occupied(e) => e.replace_entry_with(|_k, _v| None),
+    Entry::Vacant(_) => panic!(),
+};
+
+match entry {
+    Entry::Vacant(e) => {
+        assert_eq!(e.key(), &"poneyland");
+    }
+    Entry::Occupied(_) => panic!(),
+}
+
+assert!(!map.contains_key("poneyland"));
+

Trait Implementations§

source§

impl<K: Debug, V: Debug, S, A: Allocator> Debug for OccupiedEntry<'_, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V, S, A> Send for OccupiedEntry<'_, K, V, S, A>
where + K: Send, + V: Send, + S: Send, + A: Send + Allocator,

source§

impl<K, V, S, A> Sync for OccupiedEntry<'_, K, V, S, A>
where + K: Sync, + V: Sync, + S: Sync, + A: Sync + Allocator,

Auto Trait Implementations§

§

impl<'a, K, V, S, A> Freeze for OccupiedEntry<'a, K, V, S, A>
where + K: Freeze,

§

impl<'a, K, V, S, A> RefUnwindSafe for OccupiedEntry<'a, K, V, S, A>

§

impl<'a, K, V, S, A> Unpin for OccupiedEntry<'a, K, V, S, A>
where + K: Unpin,

§

impl<'a, K, V, S = DefaultHashBuilder, A = Global> !UnwindSafe for OccupiedEntry<'a, K, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.OccupiedEntryRef.html b/hashbrown/hash_map/struct.OccupiedEntryRef.html new file mode 100644 index 000000000..b2cd3f12b --- /dev/null +++ b/hashbrown/hash_map/struct.OccupiedEntryRef.html @@ -0,0 +1,280 @@ +OccupiedEntryRef in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::OccupiedEntryRef

source ·
pub struct OccupiedEntryRef<'a, 'b, K, Q: ?Sized, V, S, A: Allocator = Global> { /* private fields */ }
Expand description

A view into an occupied entry in a HashMap. +It is part of the EntryRef enum.

+

§Examples

+
use hashbrown::hash_map::{EntryRef, HashMap, OccupiedEntryRef};
+
+let mut map = HashMap::new();
+map.extend([("a".to_owned(), 10), ("b".into(), 20), ("c".into(), 30)]);
+
+let key = String::from("a");
+let _entry_o: OccupiedEntryRef<_, _, _, _> = map.entry_ref(&key).insert(100);
+assert_eq!(map.len(), 3);
+
+// Existing key (insert and update)
+match map.entry_ref("a") {
+    EntryRef::Vacant(_) => unreachable!(),
+    EntryRef::Occupied(mut view) => {
+        assert_eq!(view.get(), &100);
+        let v = view.get_mut();
+        *v *= 10;
+        assert_eq!(view.insert(1111), 1000);
+    }
+}
+
+assert_eq!(map["a"], 1111);
+assert_eq!(map.len(), 3);
+
+// Existing key (take)
+match map.entry_ref("c") {
+    EntryRef::Vacant(_) => unreachable!(),
+    EntryRef::Occupied(view) => {
+        assert_eq!(view.remove_entry(), ("c".to_owned(), 30));
+    }
+}
+assert_eq!(map.get("c"), None);
+assert_eq!(map.len(), 2);
+

Implementations§

source§

impl<'a, 'b, K, Q: ?Sized, V, S, A: Allocator> OccupiedEntryRef<'a, 'b, K, Q, V, S, A>

source

pub fn key(&self) -> &K

Gets a reference to the key in the entry.

+
§Examples
+
use hashbrown::hash_map::{EntryRef, HashMap};
+
+let mut map: HashMap<String, u32> = HashMap::new();
+map.entry_ref("poneyland").or_insert(12);
+
+match map.entry_ref("poneyland") {
+    EntryRef::Vacant(_) => panic!(),
+    EntryRef::Occupied(entry) => assert_eq!(entry.key(), "poneyland"),
+}
+
source

pub fn remove_entry(self) -> (K, V)

Take the ownership of the key and value from the map. +Keeps the allocated memory for reuse.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::EntryRef;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+// The map is empty
+assert!(map.is_empty() && map.capacity() == 0);
+
+map.entry_ref("poneyland").or_insert(12);
+
+if let EntryRef::Occupied(o) = map.entry_ref("poneyland") {
+    // We delete the entry from the map.
+    assert_eq!(o.remove_entry(), ("poneyland".to_owned(), 12));
+}
+
+assert_eq!(map.contains_key("poneyland"), false);
+// Now map hold none elements but capacity is equal to the old one
+assert!(map.is_empty());
+
source

pub fn get(&self) -> &V

Gets a reference to the value in the entry.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::EntryRef;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+map.entry_ref("poneyland").or_insert(12);
+
+match map.entry_ref("poneyland") {
+    EntryRef::Vacant(_) => panic!(),
+    EntryRef::Occupied(entry) => assert_eq!(entry.get(), &12),
+}
+
source

pub fn get_mut(&mut self) -> &mut V

Gets a mutable reference to the value in the entry.

+

If you need a reference to the OccupiedEntryRef which may outlive the +destruction of the EntryRef value, see into_mut.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::EntryRef;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+map.entry_ref("poneyland").or_insert(12);
+
+assert_eq!(map["poneyland"], 12);
+if let EntryRef::Occupied(mut o) = map.entry_ref("poneyland") {
+    *o.get_mut() += 10;
+    assert_eq!(*o.get(), 22);
+
+    // We can use the same Entry multiple times.
+    *o.get_mut() += 2;
+}
+
+assert_eq!(map["poneyland"], 24);
+
source

pub fn into_mut(self) -> &'a mut V

Converts the OccupiedEntryRef into a mutable reference to the value in the entry +with a lifetime bound to the map itself.

+

If you need multiple references to the OccupiedEntryRef, see get_mut.

+
§Examples
+
use hashbrown::hash_map::{EntryRef, HashMap};
+
+let mut map: HashMap<String, u32> = HashMap::new();
+map.entry_ref("poneyland").or_insert(12);
+
+let value: &mut u32;
+match map.entry_ref("poneyland") {
+    EntryRef::Occupied(entry) => value = entry.into_mut(),
+    EntryRef::Vacant(_) => panic!(),
+}
+*value += 10;
+
+assert_eq!(map["poneyland"], 22);
+
source

pub fn insert(&mut self, value: V) -> V

Sets the value of the entry, and returns the entry’s old value.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::EntryRef;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+map.entry_ref("poneyland").or_insert(12);
+
+if let EntryRef::Occupied(mut o) = map.entry_ref("poneyland") {
+    assert_eq!(o.insert(15), 12);
+}
+
+assert_eq!(map["poneyland"], 15);
+
source

pub fn remove(self) -> V

Takes the value out of the entry, and returns it. +Keeps the allocated memory for reuse.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::EntryRef;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+// The map is empty
+assert!(map.is_empty() && map.capacity() == 0);
+
+map.entry_ref("poneyland").or_insert(12);
+
+if let EntryRef::Occupied(o) = map.entry_ref("poneyland") {
+    assert_eq!(o.remove(), 12);
+}
+
+assert_eq!(map.contains_key("poneyland"), false);
+// Now map hold none elements but capacity is equal to the old one
+assert!(map.is_empty());
+
source

pub fn replace_entry(self, value: V) -> (K, V)
where + K: From<&'b Q>,

Replaces the entry, returning the old key and value. The new key in the hash map will be +the key used to create this entry.

+
§Panics
+

Will panic if this OccupiedEntryRef was created through EntryRef::insert.

+
§Examples
+
use hashbrown::hash_map::{EntryRef, HashMap};
+use std::rc::Rc;
+
+let mut map: HashMap<Rc<str>, u32> = HashMap::new();
+let key: Rc<str> = Rc::from("Stringthing");
+
+map.insert(key.clone(), 15);
+assert_eq!(Rc::strong_count(&key), 2);
+
+match map.entry_ref("Stringthing") {
+    EntryRef::Occupied(entry) => {
+        let (old_key, old_value): (Rc<str>, u32) = entry.replace_entry(16);
+        assert!(Rc::ptr_eq(&key, &old_key) && old_value == 15);
+    }
+    EntryRef::Vacant(_) => panic!(),
+}
+
+assert_eq!(Rc::strong_count(&key), 1);
+assert_eq!(map["Stringthing"], 16);
+
source

pub fn replace_key(self) -> K
where + K: From<&'b Q>,

Replaces the key in the hash map with the key used to create this entry.

+
§Panics
+

Will panic if this OccupiedEntryRef was created through EntryRef::insert.

+
§Examples
+
use hashbrown::hash_map::{EntryRef, HashMap};
+use std::rc::Rc;
+
+let mut map: HashMap<Rc<str>, usize> = HashMap::with_capacity(6);
+let mut keys: Vec<Rc<str>> = Vec::with_capacity(6);
+
+for (value, key) in ["a", "b", "c", "d", "e", "f"].into_iter().enumerate() {
+    let rc_key: Rc<str> = Rc::from(key);
+    keys.push(rc_key.clone());
+    map.insert(rc_key.clone(), value);
+}
+
+assert!(keys.iter().all(|key| Rc::strong_count(key) == 2));
+
+// It doesn't matter that we kind of use a vector with the same keys,
+// because all keys will be newly created from the references
+reclaim_memory(&mut map, &keys);
+
+assert!(keys.iter().all(|key| Rc::strong_count(key) == 1));
+
+fn reclaim_memory(map: &mut HashMap<Rc<str>, usize>, keys: &[Rc<str>]) {
+    for key in keys {
+        if let EntryRef::Occupied(entry) = map.entry_ref(key.as_ref()) {
+            // Replaces the entry's key with our version of it in `keys`.
+            entry.replace_key();
+        }
+    }
+}
+
source

pub fn replace_entry_with<F>(self, f: F) -> EntryRef<'a, 'b, K, Q, V, S, A>
where + F: FnOnce(&K, V) -> Option<V>,

Provides shared access to the key and owned access to the value of +the entry and allows to replace or remove it based on the +value of the returned option.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::EntryRef;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+map.insert("poneyland".to_string(), 42);
+
+let entry = match map.entry_ref("poneyland") {
+    EntryRef::Occupied(e) => {
+        e.replace_entry_with(|k, v| {
+            assert_eq!(k, "poneyland");
+            assert_eq!(v, 42);
+            Some(v + 1)
+        })
+    }
+    EntryRef::Vacant(_) => panic!(),
+};
+
+match entry {
+    EntryRef::Occupied(e) => {
+        assert_eq!(e.key(), "poneyland");
+        assert_eq!(e.get(), &43);
+    }
+    EntryRef::Vacant(_) => panic!(),
+}
+
+assert_eq!(map["poneyland"], 43);
+
+let entry = match map.entry_ref("poneyland") {
+    EntryRef::Occupied(e) => e.replace_entry_with(|_k, _v| None),
+    EntryRef::Vacant(_) => panic!(),
+};
+
+match entry {
+    EntryRef::Vacant(e) => {
+        assert_eq!(e.key(), "poneyland");
+    }
+    EntryRef::Occupied(_) => panic!(),
+}
+
+assert!(!map.contains_key("poneyland"));
+

Trait Implementations§

source§

impl<K: Borrow<Q>, Q: ?Sized + Debug, V: Debug, S, A: Allocator> Debug for OccupiedEntryRef<'_, '_, K, Q, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a, 'b, K, Q, V, S, A> Send for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
where + K: Send, + Q: Sync + ?Sized, + V: Send, + S: Send, + A: Send + Allocator,

source§

impl<'a, 'b, K, Q, V, S, A> Sync for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
where + K: Sync, + Q: Sync + ?Sized, + V: Sync, + S: Sync, + A: Sync + Allocator,

Auto Trait Implementations§

§

impl<'a, 'b, K, Q, V, S, A> Freeze for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
where + K: Freeze, + Q: ?Sized,

§

impl<'a, 'b, K, Q, V, S, A> RefUnwindSafe for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>

§

impl<'a, 'b, K, Q, V, S, A> Unpin for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
where + K: Unpin, + Q: ?Sized,

§

impl<'a, 'b, K, Q, V, S, A = Global> !UnwindSafe for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.OccupiedError.html b/hashbrown/hash_map/struct.OccupiedError.html new file mode 100644 index 000000000..ba0a7d81c --- /dev/null +++ b/hashbrown/hash_map/struct.OccupiedError.html @@ -0,0 +1,52 @@ +OccupiedError in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::OccupiedError

source ·
pub struct OccupiedError<'a, K, V, S, A: Allocator = Global> {
+    pub entry: OccupiedEntry<'a, K, V, S, A>,
+    pub value: V,
+}
Expand description

The error returned by try_insert when the key already exists.

+

Contains the occupied entry, and the value that was not inserted.

+

§Examples

+
use hashbrown::hash_map::{HashMap, OccupiedError};
+
+let mut map: HashMap<_, _> = [("a", 10), ("b", 20)].into();
+
+// try_insert method returns mutable reference to the value if keys are vacant,
+// but if the map did have key present, nothing is updated, and the provided
+// value is returned inside `Err(_)` variant
+match map.try_insert("a", 100) {
+    Err(OccupiedError { mut entry, value }) => {
+        assert_eq!(entry.key(), &"a");
+        assert_eq!(value, 100);
+        assert_eq!(entry.insert(100), 10)
+    }
+    _ => unreachable!(),
+}
+assert_eq!(map[&"a"], 100);
+

Fields§

§entry: OccupiedEntry<'a, K, V, S, A>

The entry in the map that was already occupied.

+
§value: V

The value which was not inserted, because the entry was already occupied.

+

Trait Implementations§

source§

impl<K: Debug, V: Debug, S, A: Allocator> Debug for OccupiedError<'_, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a, K: Debug, V: Debug, S, A: Allocator> Display for OccupiedError<'a, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, K, V, S, A> Freeze for OccupiedError<'a, K, V, S, A>
where + V: Freeze, + K: Freeze,

§

impl<'a, K, V, S, A> RefUnwindSafe for OccupiedError<'a, K, V, S, A>

§

impl<'a, K, V, S, A> Send for OccupiedError<'a, K, V, S, A>
where + V: Send, + K: Send, + S: Send, + A: Send,

§

impl<'a, K, V, S, A> Sync for OccupiedError<'a, K, V, S, A>
where + V: Sync, + K: Sync, + S: Sync, + A: Sync,

§

impl<'a, K, V, S, A> Unpin for OccupiedError<'a, K, V, S, A>
where + V: Unpin, + K: Unpin,

§

impl<'a, K, V, S, A = Global> !UnwindSafe for OccupiedError<'a, K, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToString for T
where + T: Display + ?Sized,

source§

default fn to_string(&self) -> String

Converts the given value to a String. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.RawEntryBuilder.html b/hashbrown/hash_map/struct.RawEntryBuilder.html new file mode 100644 index 000000000..c4f1b4c46 --- /dev/null +++ b/hashbrown/hash_map/struct.RawEntryBuilder.html @@ -0,0 +1,101 @@ +RawEntryBuilder in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::RawEntryBuilder

source ·
pub struct RawEntryBuilder<'a, K, V, S, A: Allocator = Global> { /* private fields */ }
Expand description

A builder for computing where in a HashMap a key-value pair would be stored.

+

See the HashMap::raw_entry docs for usage examples.

+

§Examples

+
use hashbrown::hash_map::{HashMap, RawEntryBuilder};
+use core::hash::{BuildHasher, Hash};
+
+let mut map = HashMap::new();
+map.extend([(1, 10), (2, 20), (3, 30)]);
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+for k in 0..6 {
+    let hash = compute_hash(map.hasher(), &k);
+    let v = map.get(&k).cloned();
+    let kv = v.as_ref().map(|v| (&k, v));
+
+    println!("Key: {} and value: {:?}", k, v);
+    let builder: RawEntryBuilder<_, _, _> = map.raw_entry();
+    assert_eq!(builder.from_key(&k), kv);
+    assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
+    assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
+}
+

Implementations§

source§

impl<'a, K, V, S, A: Allocator> RawEntryBuilder<'a, K, V, S, A>

source

pub fn from_key<Q>(self, k: &Q) -> Option<(&'a K, &'a V)>
where + S: BuildHasher, + Q: Hash + Equivalent<K> + ?Sized,

Access an immutable entry by key.

+
§Examples
+
use hashbrown::HashMap;
+
+let map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+let key = "a";
+assert_eq!(map.raw_entry().from_key(&key), Some((&"a", &100)));
+
source

pub fn from_key_hashed_nocheck<Q>( + self, + hash: u64, + k: &Q +) -> Option<(&'a K, &'a V)>
where + Q: Equivalent<K> + ?Sized,

Access an immutable entry by a key and its hash.

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::HashMap;
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+let map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+let key = "a";
+let hash = compute_hash(map.hasher(), &key);
+assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &key), Some((&"a", &100)));
+
source

pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
where + F: FnMut(&K) -> bool,

Access an immutable entry by hash and matching function.

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::HashMap;
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+let map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+let key = "a";
+let hash = compute_hash(map.hasher(), &key);
+assert_eq!(map.raw_entry().from_hash(hash, |k| k == &key), Some((&"a", &100)));
+

Trait Implementations§

source§

impl<K, V, S, A: Allocator> Debug for RawEntryBuilder<'_, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, K, V, S, A> Freeze for RawEntryBuilder<'a, K, V, S, A>

§

impl<'a, K, V, S, A> RefUnwindSafe for RawEntryBuilder<'a, K, V, S, A>

§

impl<'a, K, V, S, A> Send for RawEntryBuilder<'a, K, V, S, A>
where + S: Sync, + A: Sync, + K: Sync, + V: Sync,

§

impl<'a, K, V, S, A> Sync for RawEntryBuilder<'a, K, V, S, A>
where + S: Sync, + A: Sync, + K: Sync, + V: Sync,

§

impl<'a, K, V, S, A> Unpin for RawEntryBuilder<'a, K, V, S, A>

§

impl<'a, K, V, S, A> UnwindSafe for RawEntryBuilder<'a, K, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.RawEntryBuilderMut.html b/hashbrown/hash_map/struct.RawEntryBuilderMut.html new file mode 100644 index 000000000..dfd2a6af4 --- /dev/null +++ b/hashbrown/hash_map/struct.RawEntryBuilderMut.html @@ -0,0 +1,124 @@ +RawEntryBuilderMut in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::RawEntryBuilderMut

source ·
pub struct RawEntryBuilderMut<'a, K, V, S, A: Allocator = Global> { /* private fields */ }
Expand description

A builder for computing where in a HashMap a key-value pair would be stored.

+

See the HashMap::raw_entry_mut docs for usage examples.

+

§Examples

+
use hashbrown::hash_map::{RawEntryBuilderMut, RawEntryMut::Vacant, RawEntryMut::Occupied};
+use hashbrown::HashMap;
+use core::hash::{BuildHasher, Hash};
+
+let mut map = HashMap::new();
+map.extend([(1, 11), (2, 12), (3, 13), (4, 14), (5, 15), (6, 16)]);
+assert_eq!(map.len(), 6);
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+let builder: RawEntryBuilderMut<_, _, _> = map.raw_entry_mut();
+
+// Existing key
+match builder.from_key(&6) {
+    Vacant(_) => unreachable!(),
+    Occupied(view) => assert_eq!(view.get(), &16),
+}
+
+for key in 0..12 {
+    let hash = compute_hash(map.hasher(), &key);
+    let value = map.get(&key).cloned();
+    let key_value = value.as_ref().map(|v| (&key, v));
+
+    println!("Key: {} and value: {:?}", key, value);
+
+    match map.raw_entry_mut().from_key(&key) {
+        Occupied(mut o) => assert_eq!(Some(o.get_key_value()), key_value),
+        Vacant(_) => assert_eq!(value, None),
+    }
+    match map.raw_entry_mut().from_key_hashed_nocheck(hash, &key) {
+        Occupied(mut o) => assert_eq!(Some(o.get_key_value()), key_value),
+        Vacant(_) => assert_eq!(value, None),
+    }
+    match map.raw_entry_mut().from_hash(hash, |q| *q == key) {
+        Occupied(mut o) => assert_eq!(Some(o.get_key_value()), key_value),
+        Vacant(_) => assert_eq!(value, None),
+    }
+}
+
+assert_eq!(map.len(), 6);
+

Implementations§

source§

impl<'a, K, V, S, A: Allocator> RawEntryBuilderMut<'a, K, V, S, A>

source

pub fn from_key<Q>(self, k: &Q) -> RawEntryMut<'a, K, V, S, A>
where + S: BuildHasher, + Q: Hash + Equivalent<K> + ?Sized,

Creates a RawEntryMut from the given key.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+let key = "a";
+let entry: RawEntryMut<&str, u32, _> = map.raw_entry_mut().from_key(&key);
+entry.insert(key, 100);
+assert_eq!(map[&"a"], 100);
+
source

pub fn from_key_hashed_nocheck<Q>( + self, + hash: u64, + k: &Q +) -> RawEntryMut<'a, K, V, S, A>
where + Q: Equivalent<K> + ?Sized,

Creates a RawEntryMut from the given key and its hash.

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+let key = "a";
+let hash = compute_hash(map.hasher(), &key);
+let entry: RawEntryMut<&str, u32, _> = map.raw_entry_mut().from_key_hashed_nocheck(hash, &key);
+entry.insert(key, 100);
+assert_eq!(map[&"a"], 100);
+
source§

impl<'a, K, V, S, A: Allocator> RawEntryBuilderMut<'a, K, V, S, A>

source

pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A>
where + for<'b> F: FnMut(&'b K) -> bool,

Creates a RawEntryMut from the given hash and matching function.

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+let key = "a";
+let hash = compute_hash(map.hasher(), &key);
+let entry: RawEntryMut<&str, u32, _> = map.raw_entry_mut().from_hash(hash, |k| k == &key);
+entry.insert(key, 100);
+assert_eq!(map[&"a"], 100);
+

Trait Implementations§

source§

impl<K, V, S, A: Allocator> Debug for RawEntryBuilderMut<'_, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, K, V, S, A> Freeze for RawEntryBuilderMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A> RefUnwindSafe for RawEntryBuilderMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A> Send for RawEntryBuilderMut<'a, K, V, S, A>
where + S: Send, + A: Send, + K: Send, + V: Send,

§

impl<'a, K, V, S, A> Sync for RawEntryBuilderMut<'a, K, V, S, A>
where + S: Sync, + A: Sync, + K: Sync, + V: Sync,

§

impl<'a, K, V, S, A> Unpin for RawEntryBuilderMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A = Global> !UnwindSafe for RawEntryBuilderMut<'a, K, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.RawOccupiedEntryMut.html b/hashbrown/hash_map/struct.RawOccupiedEntryMut.html new file mode 100644 index 000000000..b4988f9d9 --- /dev/null +++ b/hashbrown/hash_map/struct.RawOccupiedEntryMut.html @@ -0,0 +1,322 @@ +RawOccupiedEntryMut in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::RawOccupiedEntryMut

source ·
pub struct RawOccupiedEntryMut<'a, K, V, S, A: Allocator = Global> { /* private fields */ }
Expand description

A view into an occupied entry in a HashMap. +It is part of the RawEntryMut enum.

+

§Examples

+
use core::hash::{BuildHasher, Hash};
+use hashbrown::hash_map::{HashMap, RawEntryMut, RawOccupiedEntryMut};
+
+let mut map = HashMap::new();
+map.extend([("a", 10), ("b", 20), ("c", 30)]);
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+let _raw_o: RawOccupiedEntryMut<_, _, _> = map.raw_entry_mut().from_key(&"a").insert("a", 100);
+assert_eq!(map.len(), 3);
+
+// Existing key (insert and update)
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(mut view) => {
+        assert_eq!(view.get(), &100);
+        let v = view.get_mut();
+        let new_v = (*v) * 10;
+        *v = new_v;
+        assert_eq!(view.insert(1111), 1000);
+    }
+}
+
+assert_eq!(map[&"a"], 1111);
+assert_eq!(map.len(), 3);
+
+// Existing key (take)
+let hash = compute_hash(map.hasher(), &"c");
+match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"c") {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(view) => {
+        assert_eq!(view.remove_entry(), ("c", 30));
+    }
+}
+assert_eq!(map.raw_entry().from_key(&"c"), None);
+assert_eq!(map.len(), 2);
+
+let hash = compute_hash(map.hasher(), &"b");
+match map.raw_entry_mut().from_hash(hash, |q| *q == "b") {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(view) => {
+        assert_eq!(view.remove_entry(), ("b", 20));
+    }
+}
+assert_eq!(map.get(&"b"), None);
+assert_eq!(map.len(), 1);
+

Implementations§

source§

impl<'a, K, V, S, A: Allocator> RawOccupiedEntryMut<'a, K, V, S, A>

source

pub fn key(&self) -> &K

Gets a reference to the key in the entry.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => assert_eq!(o.key(), &"a")
+}
+
source

pub fn key_mut(&mut self) -> &mut K

Gets a mutable reference to the key in the entry.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+use std::rc::Rc;
+
+let key_one = Rc::new("a");
+let key_two = Rc::new("a");
+
+let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+map.insert(key_one.clone(), 10);
+
+assert_eq!(map[&key_one], 10);
+assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+
+match map.raw_entry_mut().from_key(&key_one) {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(mut o) => {
+        *o.key_mut() = key_two.clone();
+    }
+}
+assert_eq!(map[&key_two], 10);
+assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+
source

pub fn into_key(self) -> &'a mut K

Converts the entry into a mutable reference to the key in the entry +with a lifetime bound to the map itself.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+use std::rc::Rc;
+
+let key_one = Rc::new("a");
+let key_two = Rc::new("a");
+
+let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+map.insert(key_one.clone(), 10);
+
+assert_eq!(map[&key_one], 10);
+assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+
+let inside_key: &mut Rc<&str>;
+
+match map.raw_entry_mut().from_key(&key_one) {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => inside_key = o.into_key(),
+}
+*inside_key = key_two.clone();
+
+assert_eq!(map[&key_two], 10);
+assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+
source

pub fn get(&self) -> &V

Gets a reference to the value in the entry.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => assert_eq!(o.get(), &100),
+}
+
source

pub fn into_mut(self) -> &'a mut V

Converts the OccupiedEntry into a mutable reference to the value in the entry +with a lifetime bound to the map itself.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+let value: &mut u32;
+
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => value = o.into_mut(),
+}
+*value += 900;
+
+assert_eq!(map[&"a"], 1000);
+
source

pub fn get_mut(&mut self) -> &mut V

Gets a mutable reference to the value in the entry.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(mut o) => *o.get_mut() += 900,
+}
+
+assert_eq!(map[&"a"], 1000);
+
source

pub fn get_key_value(&self) -> (&K, &V)

Gets a reference to the key and value in the entry.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => assert_eq!(o.get_key_value(), (&"a", &100)),
+}
+
source

pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V)

Gets a mutable reference to the key and value in the entry.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+use std::rc::Rc;
+
+let key_one = Rc::new("a");
+let key_two = Rc::new("a");
+
+let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+map.insert(key_one.clone(), 10);
+
+assert_eq!(map[&key_one], 10);
+assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+
+match map.raw_entry_mut().from_key(&key_one) {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(mut o) => {
+        let (inside_key, inside_value) = o.get_key_value_mut();
+        *inside_key = key_two.clone();
+        *inside_value = 100;
+    }
+}
+assert_eq!(map[&key_two], 100);
+assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+
source

pub fn into_key_value(self) -> (&'a mut K, &'a mut V)

Converts the OccupiedEntry into a mutable reference to the key and value in the entry +with a lifetime bound to the map itself.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+use std::rc::Rc;
+
+let key_one = Rc::new("a");
+let key_two = Rc::new("a");
+
+let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+map.insert(key_one.clone(), 10);
+
+assert_eq!(map[&key_one], 10);
+assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+
+let inside_key: &mut Rc<&str>;
+let inside_value: &mut u32;
+match map.raw_entry_mut().from_key(&key_one) {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => {
+        let tuple = o.into_key_value();
+        inside_key = tuple.0;
+        inside_value = tuple.1;
+    }
+}
+*inside_key = key_two.clone();
+*inside_value = 100;
+assert_eq!(map[&key_two], 100);
+assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+
source

pub fn insert(&mut self, value: V) -> V

Sets the value of the entry, and returns the entry’s old value.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(mut o) => assert_eq!(o.insert(1000), 100),
+}
+
+assert_eq!(map[&"a"], 1000);
+
source

pub fn insert_key(&mut self, key: K) -> K

Sets the value of the entry, and returns the entry’s old value.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+use std::rc::Rc;
+
+let key_one = Rc::new("a");
+let key_two = Rc::new("a");
+
+let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+map.insert(key_one.clone(), 10);
+
+assert_eq!(map[&key_one], 10);
+assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+
+match map.raw_entry_mut().from_key(&key_one) {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(mut o) => {
+        let old_key = o.insert_key(key_two.clone());
+        assert!(Rc::ptr_eq(&old_key, &key_one));
+    }
+}
+assert_eq!(map[&key_two], 10);
+assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+
source

pub fn remove(self) -> V

Takes the value out of the entry, and returns it.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => assert_eq!(o.remove(), 100),
+}
+assert_eq!(map.get(&"a"), None);
+
source

pub fn remove_entry(self) -> (K, V)

Take the ownership of the key and value from the map.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => assert_eq!(o.remove_entry(), ("a", 100)),
+}
+assert_eq!(map.get(&"a"), None);
+
source

pub fn replace_entry_with<F>(self, f: F) -> RawEntryMut<'a, K, V, S, A>
where + F: FnOnce(&K, V) -> Option<V>,

Provides shared access to the key and owned access to the value of +the entry and allows to replace or remove it based on the +value of the returned option.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+let raw_entry = match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => o.replace_entry_with(|k, v| {
+        assert_eq!(k, &"a");
+        assert_eq!(v, 100);
+        Some(v + 900)
+    }),
+};
+let raw_entry = match raw_entry {
+    RawEntryMut::Vacant(_) => panic!(),
+    RawEntryMut::Occupied(o) => o.replace_entry_with(|k, v| {
+        assert_eq!(k, &"a");
+        assert_eq!(v, 1000);
+        None
+    }),
+};
+match raw_entry {
+    RawEntryMut::Vacant(_) => { },
+    RawEntryMut::Occupied(_) => panic!(),
+};
+assert_eq!(map.get(&"a"), None);
+

Trait Implementations§

source§

impl<K: Debug, V: Debug, S, A: Allocator> Debug for RawOccupiedEntryMut<'_, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V, S, A> Send for RawOccupiedEntryMut<'_, K, V, S, A>
where + K: Send, + V: Send, + S: Send, + A: Send + Allocator,

source§

impl<K, V, S, A> Sync for RawOccupiedEntryMut<'_, K, V, S, A>
where + K: Sync, + V: Sync, + S: Sync, + A: Sync + Allocator,

Auto Trait Implementations§

§

impl<'a, K, V, S, A> Freeze for RawOccupiedEntryMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A> RefUnwindSafe for RawOccupiedEntryMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A> Unpin for RawOccupiedEntryMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A = Global> !UnwindSafe for RawOccupiedEntryMut<'a, K, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.RawVacantEntryMut.html b/hashbrown/hash_map/struct.RawVacantEntryMut.html new file mode 100644 index 000000000..654b51fa8 --- /dev/null +++ b/hashbrown/hash_map/struct.RawVacantEntryMut.html @@ -0,0 +1,152 @@ +RawVacantEntryMut in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::RawVacantEntryMut

source ·
pub struct RawVacantEntryMut<'a, K, V, S, A: Allocator = Global> { /* private fields */ }
Expand description

A view into a vacant entry in a HashMap. +It is part of the RawEntryMut enum.

+

§Examples

+
use core::hash::{BuildHasher, Hash};
+use hashbrown::hash_map::{HashMap, RawEntryMut, RawVacantEntryMut};
+
+let mut map = HashMap::<&str, i32>::new();
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+let raw_v: RawVacantEntryMut<_, _, _> = match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(view) => view,
+    RawEntryMut::Occupied(_) => unreachable!(),
+};
+raw_v.insert("a", 10);
+assert!(map[&"a"] == 10 && map.len() == 1);
+
+// Nonexistent key (insert and update)
+let hash = compute_hash(map.hasher(), &"b");
+match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"b") {
+    RawEntryMut::Occupied(_) => unreachable!(),
+    RawEntryMut::Vacant(view) => {
+        let (k, value) = view.insert("b", 2);
+        assert_eq!((*k, *value), ("b", 2));
+        *value = 20;
+    }
+}
+assert!(map[&"b"] == 20 && map.len() == 2);
+
+let hash = compute_hash(map.hasher(), &"c");
+match map.raw_entry_mut().from_hash(hash, |q| *q == "c") {
+    RawEntryMut::Occupied(_) => unreachable!(),
+    RawEntryMut::Vacant(view) => {
+        assert_eq!(view.insert("c", 30), (&mut "c", &mut 30));
+    }
+}
+assert!(map[&"c"] == 30 && map.len() == 3);
+

Implementations§

source§

impl<'a, K, V, S, A: Allocator> RawVacantEntryMut<'a, K, V, S, A>

source

pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
where + K: Hash, + S: BuildHasher,

Sets the value of the entry with the VacantEntry’s key, +and returns a mutable reference to it.

+
§Examples
+
use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+
+match map.raw_entry_mut().from_key(&"c") {
+    RawEntryMut::Occupied(_) => panic!(),
+    RawEntryMut::Vacant(v) => assert_eq!(v.insert("c", 300), (&mut "c", &mut 300)),
+}
+
+assert_eq!(map[&"c"], 300);
+
source

pub fn insert_hashed_nocheck( + self, + hash: u64, + key: K, + value: V +) -> (&'a mut K, &'a mut V)
where + K: Hash, + S: BuildHasher,

Sets the value of the entry with the VacantEntry’s key, +and returns a mutable reference to it.

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+let key = "c";
+let hash = compute_hash(map.hasher(), &key);
+
+match map.raw_entry_mut().from_key_hashed_nocheck(hash, &key) {
+    RawEntryMut::Occupied(_) => panic!(),
+    RawEntryMut::Vacant(v) => assert_eq!(
+        v.insert_hashed_nocheck(hash, key, 300),
+        (&mut "c", &mut 300)
+    ),
+}
+
+assert_eq!(map[&"c"], 300);
+
source

pub fn insert_with_hasher<H>( + self, + hash: u64, + key: K, + value: V, + hasher: H +) -> (&'a mut K, &'a mut V)
where + H: Fn(&K) -> u64,

Set the value of an entry with a custom hasher function.

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+fn make_hasher<K, S>(hash_builder: &S) -> impl Fn(&K) -> u64 + '_
+where
+    K: Hash + ?Sized,
+    S: BuildHasher,
+{
+    move |key: &K| {
+        use core::hash::Hasher;
+        let mut state = hash_builder.build_hasher();
+        key.hash(&mut state);
+        state.finish()
+    }
+}
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+let key = "a";
+let hash_builder = map.hasher().clone();
+let hash = make_hasher(&hash_builder)(&key);
+
+match map.raw_entry_mut().from_hash(hash, |q| q == &key) {
+    RawEntryMut::Occupied(_) => panic!(),
+    RawEntryMut::Vacant(v) => assert_eq!(
+        v.insert_with_hasher(hash, key, 100, make_hasher(&hash_builder)),
+        (&mut "a", &mut 100)
+    ),
+}
+map.extend([("b", 200), ("c", 300), ("d", 400), ("e", 500), ("f", 600)]);
+assert_eq!(map[&"a"], 100);
+

Trait Implementations§

source§

impl<K, V, S, A: Allocator> Debug for RawVacantEntryMut<'_, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, K, V, S, A> Freeze for RawVacantEntryMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A> RefUnwindSafe for RawVacantEntryMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A> Send for RawVacantEntryMut<'a, K, V, S, A>
where + S: Sync, + A: Send, + K: Send, + V: Send,

§

impl<'a, K, V, S, A> Sync for RawVacantEntryMut<'a, K, V, S, A>
where + S: Sync, + A: Sync, + K: Sync, + V: Sync,

§

impl<'a, K, V, S, A> Unpin for RawVacantEntryMut<'a, K, V, S, A>

§

impl<'a, K, V, S, A = Global> !UnwindSafe for RawVacantEntryMut<'a, K, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.VacantEntry.html b/hashbrown/hash_map/struct.VacantEntry.html new file mode 100644 index 000000000..a05309145 --- /dev/null +++ b/hashbrown/hash_map/struct.VacantEntry.html @@ -0,0 +1,80 @@ +VacantEntry in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::VacantEntry

source ·
pub struct VacantEntry<'a, K, V, S = DefaultHashBuilder, A: Allocator = Global> { /* private fields */ }
Expand description

A view into a vacant entry in a HashMap. +It is part of the Entry enum.

+

§Examples

+
use hashbrown::hash_map::{Entry, HashMap, VacantEntry};
+
+let mut map = HashMap::<&str, i32>::new();
+
+let entry_v: VacantEntry<_, _, _> = match map.entry("a") {
+    Entry::Vacant(view) => view,
+    Entry::Occupied(_) => unreachable!(),
+};
+entry_v.insert(10);
+assert!(map[&"a"] == 10 && map.len() == 1);
+
+// Nonexistent key (insert and update)
+match map.entry("b") {
+    Entry::Occupied(_) => unreachable!(),
+    Entry::Vacant(view) => {
+        let value = view.insert(2);
+        assert_eq!(*value, 2);
+        *value = 20;
+    }
+}
+assert!(map[&"b"] == 20 && map.len() == 2);
+

Implementations§

source§

impl<'a, K, V, S, A: Allocator> VacantEntry<'a, K, V, S, A>

source

pub fn key(&self) -> &K

Gets a reference to the key that would be used when inserting a value +through the VacantEntry.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+assert_eq!(map.entry("poneyland").key(), &"poneyland");
+
source

pub fn into_key(self) -> K

Take ownership of the key.

+
§Examples
+
use hashbrown::hash_map::{Entry, HashMap};
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+
+match map.entry("poneyland") {
+    Entry::Occupied(_) => panic!(),
+    Entry::Vacant(v) => assert_eq!(v.into_key(), "poneyland"),
+}
+
source

pub fn insert(self, value: V) -> &'a mut V
where + K: Hash, + S: BuildHasher,

Sets the value of the entry with the VacantEntry’s key, +and returns a mutable reference to it.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::Entry;
+
+let mut map: HashMap<&str, u32> = HashMap::new();
+
+if let Entry::Vacant(o) = map.entry("poneyland") {
+    o.insert(37);
+}
+assert_eq!(map["poneyland"], 37);
+

Trait Implementations§

source§

impl<K: Debug, V, S, A: Allocator> Debug for VacantEntry<'_, K, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, K, V, S, A> Freeze for VacantEntry<'a, K, V, S, A>
where + K: Freeze,

§

impl<'a, K, V, S, A> RefUnwindSafe for VacantEntry<'a, K, V, S, A>

§

impl<'a, K, V, S, A> Send for VacantEntry<'a, K, V, S, A>
where + K: Send, + S: Send, + A: Send, + V: Send,

§

impl<'a, K, V, S, A> Sync for VacantEntry<'a, K, V, S, A>
where + K: Sync, + S: Sync, + A: Sync, + V: Sync,

§

impl<'a, K, V, S, A> Unpin for VacantEntry<'a, K, V, S, A>
where + K: Unpin,

§

impl<'a, K, V, S = DefaultHashBuilder, A = Global> !UnwindSafe for VacantEntry<'a, K, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.VacantEntryRef.html b/hashbrown/hash_map/struct.VacantEntryRef.html new file mode 100644 index 000000000..9b905da51 --- /dev/null +++ b/hashbrown/hash_map/struct.VacantEntryRef.html @@ -0,0 +1,90 @@ +VacantEntryRef in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::VacantEntryRef

source ·
pub struct VacantEntryRef<'a, 'b, K, Q: ?Sized, V, S, A: Allocator = Global> { /* private fields */ }
Expand description

A view into a vacant entry in a HashMap. +It is part of the EntryRef enum.

+

§Examples

+
use hashbrown::hash_map::{EntryRef, HashMap, VacantEntryRef};
+
+let mut map = HashMap::<String, i32>::new();
+
+let entry_v: VacantEntryRef<_, _, _, _> = match map.entry_ref("a") {
+    EntryRef::Vacant(view) => view,
+    EntryRef::Occupied(_) => unreachable!(),
+};
+entry_v.insert(10);
+assert!(map["a"] == 10 && map.len() == 1);
+
+// Nonexistent key (insert and update)
+match map.entry_ref("b") {
+    EntryRef::Occupied(_) => unreachable!(),
+    EntryRef::Vacant(view) => {
+        let value = view.insert(2);
+        assert_eq!(*value, 2);
+        *value = 20;
+    }
+}
+assert!(map["b"] == 20 && map.len() == 2);
+

Implementations§

source§

impl<'a, 'b, K, Q: ?Sized, V, S, A: Allocator> VacantEntryRef<'a, 'b, K, Q, V, S, A>

source

pub fn key(&self) -> &Q
where + K: Borrow<Q>,

Gets a reference to the key that would be used when inserting a value +through the VacantEntryRef.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+let key: &str = "poneyland";
+assert_eq!(map.entry_ref(key).key(), "poneyland");
+
source

pub fn into_key(self) -> K
where + K: From<&'b Q>,

Take ownership of the key.

+
§Examples
+
use hashbrown::hash_map::{EntryRef, HashMap};
+
+let mut map: HashMap<String, u32> = HashMap::new();
+let key: &str = "poneyland";
+
+match map.entry_ref(key) {
+    EntryRef::Occupied(_) => panic!(),
+    EntryRef::Vacant(v) => assert_eq!(v.into_key(), "poneyland".to_owned()),
+}
+
source

pub fn insert(self, value: V) -> &'a mut V
where + K: Hash + From<&'b Q>, + S: BuildHasher,

Sets the value of the entry with the VacantEntryRef’s key, +and returns a mutable reference to it.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::EntryRef;
+
+let mut map: HashMap<String, u32> = HashMap::new();
+let key: &str = "poneyland";
+
+if let EntryRef::Vacant(o) = map.entry_ref(key) {
+    o.insert(37);
+}
+assert_eq!(map["poneyland"], 37);
+

Trait Implementations§

source§

impl<K: Borrow<Q>, Q: ?Sized + Debug, V, S, A: Allocator> Debug for VacantEntryRef<'_, '_, K, Q, V, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, 'b, K, Q, V, S, A> Freeze for VacantEntryRef<'a, 'b, K, Q, V, S, A>
where + K: Freeze, + Q: ?Sized,

§

impl<'a, 'b, K, Q, V, S, A> RefUnwindSafe for VacantEntryRef<'a, 'b, K, Q, V, S, A>

§

impl<'a, 'b, K, Q, V, S, A> Send for VacantEntryRef<'a, 'b, K, Q, V, S, A>
where + K: Send, + Q: Sync + ?Sized, + S: Send, + A: Send, + V: Send,

§

impl<'a, 'b, K, Q, V, S, A> Sync for VacantEntryRef<'a, 'b, K, Q, V, S, A>
where + K: Sync, + Q: Sync + ?Sized, + S: Sync, + A: Sync, + V: Sync,

§

impl<'a, 'b, K, Q, V, S, A> Unpin for VacantEntryRef<'a, 'b, K, Q, V, S, A>
where + K: Unpin, + Q: ?Sized,

§

impl<'a, 'b, K, Q, V, S, A = Global> !UnwindSafe for VacantEntryRef<'a, 'b, K, Q, V, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.Values.html b/hashbrown/hash_map/struct.Values.html new file mode 100644 index 000000000..d54235842 --- /dev/null +++ b/hashbrown/hash_map/struct.Values.html @@ -0,0 +1,219 @@ +Values in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::Values

source ·
pub struct Values<'a, K, V> { /* private fields */ }
Expand description

An iterator over the values of a HashMap in arbitrary order. +The iterator element type is &'a V.

+

This struct is created by the values method on HashMap. See its +documentation for more.

+

§Examples

+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+
+let mut values = map.values();
+let mut vec = vec![values.next(), values.next(), values.next()];
+
+// The `Values` iterator produces values in arbitrary order, so the
+// values must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [Some(&"a"), Some(&"b"), Some(&"c")]);
+
+// It is fused iterator
+assert_eq!(values.next(), None);
+assert_eq!(values.next(), None);
+

Trait Implementations§

source§

impl<K, V> Clone for Values<'_, K, V>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<K, V: Debug> Debug for Values<'_, K, V>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V> ExactSizeIterator for Values<'_, K, V>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<'a, K, V> Iterator for Values<'a, K, V>

§

type Item = &'a V

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<&'a V>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · source§

fn cycle(self) -> Cycle<Self>
where + Self: Sized + Clone,

Repeats an iterator endlessly. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V> FusedIterator for Values<'_, K, V>

Auto Trait Implementations§

§

impl<'a, K, V> Freeze for Values<'a, K, V>

§

impl<'a, K, V> RefUnwindSafe for Values<'a, K, V>
where + K: RefUnwindSafe, + V: RefUnwindSafe,

§

impl<'a, K, V> Send for Values<'a, K, V>
where + K: Sync, + V: Sync,

§

impl<'a, K, V> Sync for Values<'a, K, V>
where + K: Sync, + V: Sync,

§

impl<'a, K, V> Unpin for Values<'a, K, V>

§

impl<'a, K, V> UnwindSafe for Values<'a, K, V>
where + K: RefUnwindSafe, + V: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_map/struct.ValuesMut.html b/hashbrown/hash_map/struct.ValuesMut.html new file mode 100644 index 000000000..4a2ef53b3 --- /dev/null +++ b/hashbrown/hash_map/struct.ValuesMut.html @@ -0,0 +1,214 @@ +ValuesMut in hashbrown::hash_map - Rust

Struct hashbrown::hash_map::ValuesMut

source ·
pub struct ValuesMut<'a, K, V> { /* private fields */ }
Expand description

A mutable iterator over the values of a HashMap in arbitrary order. +The iterator element type is &'a mut V.

+

This struct is created by the values_mut method on HashMap. See its +documentation for more.

+

§Examples

+
use hashbrown::HashMap;
+
+let mut map: HashMap<_, _> = [(1, "One".to_owned()), (2, "Two".into())].into();
+
+let mut values = map.values_mut();
+values.next().map(|v| v.push_str(" Mississippi"));
+values.next().map(|v| v.push_str(" Mississippi"));
+
+// It is fused iterator
+assert_eq!(values.next(), None);
+assert_eq!(values.next(), None);
+
+assert_eq!(map.get(&1).unwrap(), &"One Mississippi".to_owned());
+assert_eq!(map.get(&2).unwrap(), &"Two Mississippi".to_owned());
+

Trait Implementations§

source§

impl<K, V: Debug> Debug for ValuesMut<'_, K, V>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<'a, K, V> Iterator for ValuesMut<'a, K, V>

§

type Item = &'a mut V

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<&'a mut V>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, V> FusedIterator for ValuesMut<'_, K, V>

Auto Trait Implementations§

§

impl<'a, K, V> Freeze for ValuesMut<'a, K, V>

§

impl<'a, K, V> RefUnwindSafe for ValuesMut<'a, K, V>
where + K: RefUnwindSafe, + V: RefUnwindSafe,

§

impl<'a, K, V> Send for ValuesMut<'a, K, V>
where + K: Send, + V: Send,

§

impl<'a, K, V> Sync for ValuesMut<'a, K, V>
where + K: Sync, + V: Sync,

§

impl<'a, K, V> Unpin for ValuesMut<'a, K, V>

§

impl<'a, K, V> !UnwindSafe for ValuesMut<'a, K, V>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/enum.Entry.html b/hashbrown/hash_set/enum.Entry.html new file mode 100644 index 000000000..50a896ce0 --- /dev/null +++ b/hashbrown/hash_set/enum.Entry.html @@ -0,0 +1,108 @@ +Entry in hashbrown::hash_set - Rust

Enum hashbrown::hash_set::Entry

source ·
pub enum Entry<'a, T, S, A = Global>
where + A: Allocator,
{ + Occupied(OccupiedEntry<'a, T, S, A>), + Vacant(VacantEntry<'a, T, S, A>), +}
Expand description

A view into a single entry in a set, which may either be vacant or occupied.

+

This enum is constructed from the entry method on HashSet.

+

§Examples

+
use hashbrown::hash_set::{Entry, HashSet, OccupiedEntry};
+
+let mut set = HashSet::new();
+set.extend(["a", "b", "c"]);
+assert_eq!(set.len(), 3);
+
+// Existing value (insert)
+let entry: Entry<_, _> = set.entry("a");
+let _raw_o: OccupiedEntry<_, _> = entry.insert();
+assert_eq!(set.len(), 3);
+// Nonexistent value (insert)
+set.entry("d").insert();
+
+// Existing value (or_insert)
+set.entry("b").or_insert();
+// Nonexistent value (or_insert)
+set.entry("e").or_insert();
+
+println!("Our HashSet: {:?}", set);
+
+let mut vec: Vec<_> = set.iter().copied().collect();
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, ["a", "b", "c", "d", "e"]);
+

Variants§

§

Occupied(OccupiedEntry<'a, T, S, A>)

An occupied entry.

+

§Examples

+
use hashbrown::hash_set::{Entry, HashSet};
+let mut set: HashSet<_> = ["a", "b"].into();
+
+match set.entry("a") {
+    Entry::Vacant(_) => unreachable!(),
+    Entry::Occupied(_) => { }
+}
+
§

Vacant(VacantEntry<'a, T, S, A>)

A vacant entry.

+

§Examples

+
use hashbrown::hash_set::{Entry, HashSet};
+let mut set: HashSet<&str> = HashSet::new();
+
+match set.entry("a") {
+    Entry::Occupied(_) => unreachable!(),
+    Entry::Vacant(_) => { }
+}
+

Implementations§

source§

impl<'a, T, S, A: Allocator> Entry<'a, T, S, A>

source

pub fn insert(self) -> OccupiedEntry<'a, T, S, A>
where + T: Hash, + S: BuildHasher,

Sets the value of the entry, and returns an OccupiedEntry.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<&str> = HashSet::new();
+let entry = set.entry("horseyland").insert();
+
+assert_eq!(entry.get(), &"horseyland");
+
source

pub fn or_insert(self)
where + T: Hash, + S: BuildHasher,

Ensures a value is in the entry by inserting if it was vacant.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<&str> = HashSet::new();
+
+// nonexistent key
+set.entry("poneyland").or_insert();
+assert!(set.contains("poneyland"));
+
+// existing key
+set.entry("poneyland").or_insert();
+assert!(set.contains("poneyland"));
+assert_eq!(set.len(), 1);
+
source

pub fn get(&self) -> &T

Returns a reference to this entry’s value.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<&str> = HashSet::new();
+set.entry("poneyland").or_insert();
+// existing key
+assert_eq!(set.entry("poneyland").get(), &"poneyland");
+// nonexistent key
+assert_eq!(set.entry("horseland").get(), &"horseland");
+

Trait Implementations§

source§

impl<T: Debug, S, A: Allocator> Debug for Entry<'_, T, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, T, S, A> Freeze for Entry<'a, T, S, A>
where + T: Freeze,

§

impl<'a, T, S, A> RefUnwindSafe for Entry<'a, T, S, A>
where + T: RefUnwindSafe, + S: RefUnwindSafe, + A: RefUnwindSafe,

§

impl<'a, T, S, A> Send for Entry<'a, T, S, A>
where + T: Send, + S: Send, + A: Send,

§

impl<'a, T, S, A> Sync for Entry<'a, T, S, A>
where + T: Sync, + S: Sync, + A: Sync,

§

impl<'a, T, S, A> Unpin for Entry<'a, T, S, A>
where + T: Unpin,

§

impl<'a, T, S, A = Global> !UnwindSafe for Entry<'a, T, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/index.html b/hashbrown/hash_set/index.html new file mode 100644 index 000000000..975d131d7 --- /dev/null +++ b/hashbrown/hash_set/index.html @@ -0,0 +1,4 @@ +hashbrown::hash_set - Rust

Module hashbrown::hash_set

source ·
Expand description

A hash set implemented as a HashMap where the value is ().

+

Structs§

  • A lazy iterator producing elements in the difference of HashSets.
  • A draining iterator over the items of a HashSet.
  • A draining iterator over entries of a HashSet which don’t satisfy the predicate f.
  • A hash set implemented as a HashMap where the value is ().
  • A lazy iterator producing elements in the intersection of HashSets.
  • An owning iterator over the items of a HashSet.
  • An iterator over the items of a HashSet.
  • A view into an occupied entry in a HashSet. +It is part of the Entry enum.
  • A lazy iterator producing elements in the symmetric difference of HashSets.
  • A lazy iterator producing elements in the union of HashSets.
  • A view into a vacant entry in a HashSet. +It is part of the Entry enum.

Enums§

  • A view into a single entry in a set, which may either be vacant or occupied.
\ No newline at end of file diff --git a/hashbrown/hash_set/sidebar-items.js b/hashbrown/hash_set/sidebar-items.js new file mode 100644 index 000000000..cc309fcd9 --- /dev/null +++ b/hashbrown/hash_set/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {"enum":["Entry"],"struct":["Difference","Drain","ExtractIf","HashSet","Intersection","IntoIter","Iter","OccupiedEntry","SymmetricDifference","Union","VacantEntry"]}; \ No newline at end of file diff --git a/hashbrown/hash_set/struct.Difference.html b/hashbrown/hash_set/struct.Difference.html new file mode 100644 index 000000000..32fbdc44f --- /dev/null +++ b/hashbrown/hash_set/struct.Difference.html @@ -0,0 +1,215 @@ +Difference in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::Difference

source ·
pub struct Difference<'a, T, S, A: Allocator = Global> { /* private fields */ }
Expand description

A lazy iterator producing elements in the difference of HashSets.

+

This struct is created by the difference method on HashSet. +See its documentation for more.

+

Trait Implementations§

source§

impl<T, S, A: Allocator> Clone for Difference<'_, T, S, A>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T, S, A> Debug for Difference<'_, T, S, A>
where + T: Debug + Eq + Hash, + S: BuildHasher, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a, T, S, A> Iterator for Difference<'a, T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

§

type Item = &'a T

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<&'a T>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · source§

fn cycle(self) -> Cycle<Self>
where + Self: Sized + Clone,

Repeats an iterator endlessly. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<T, S, A> FusedIterator for Difference<'_, T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

Auto Trait Implementations§

§

impl<'a, T, S, A> Freeze for Difference<'a, T, S, A>

§

impl<'a, T, S, A> RefUnwindSafe for Difference<'a, T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<'a, T, S, A> Send for Difference<'a, T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<'a, T, S, A> Sync for Difference<'a, T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<'a, T, S, A> Unpin for Difference<'a, T, S, A>

§

impl<'a, T, S, A> UnwindSafe for Difference<'a, T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.Drain.html b/hashbrown/hash_set/struct.Drain.html new file mode 100644 index 000000000..8202be8c6 --- /dev/null +++ b/hashbrown/hash_set/struct.Drain.html @@ -0,0 +1,200 @@ +Drain in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::Drain

source ·
pub struct Drain<'a, K, A: Allocator = Global> { /* private fields */ }
Expand description

A draining iterator over the items of a HashSet.

+

This struct is created by the drain method on HashSet. +See its documentation for more.

+

Trait Implementations§

source§

impl<K: Debug, A: Allocator> Debug for Drain<'_, K, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, A: Allocator> ExactSizeIterator for Drain<'_, K, A>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<K, A: Allocator> Iterator for Drain<'_, K, A>

§

type Item = K

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<K>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, A: Allocator> FusedIterator for Drain<'_, K, A>

Auto Trait Implementations§

§

impl<'a, K, A> Freeze for Drain<'a, K, A>

§

impl<'a, K, A> RefUnwindSafe for Drain<'a, K, A>
where + A: RefUnwindSafe, + K: RefUnwindSafe,

§

impl<'a, K, A> Send for Drain<'a, K, A>
where + A: Send, + K: Send,

§

impl<'a, K, A> Sync for Drain<'a, K, A>
where + A: Sync, + K: Sync,

§

impl<'a, K, A> Unpin for Drain<'a, K, A>

§

impl<'a, K, A> UnwindSafe for Drain<'a, K, A>
where + A: RefUnwindSafe, + K: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.ExtractIf.html b/hashbrown/hash_set/struct.ExtractIf.html new file mode 100644 index 000000000..3b8a64832 --- /dev/null +++ b/hashbrown/hash_set/struct.ExtractIf.html @@ -0,0 +1,206 @@ +ExtractIf in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::ExtractIf

source ·
pub struct ExtractIf<'a, K, F, A: Allocator = Global>
where + F: FnMut(&K) -> bool,
{ /* private fields */ }
Expand description

A draining iterator over entries of a HashSet which don’t satisfy the predicate f.

+

This struct is created by the extract_if method on HashSet. See its +documentation for more.

+

Trait Implementations§

source§

impl<K, F, A: Allocator> Iterator for ExtractIf<'_, K, F, A>
where + F: FnMut(&K) -> bool,

§

type Item = K

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<Self::Item>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.0.0 · source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, F, A: Allocator> FusedIterator for ExtractIf<'_, K, F, A>
where + F: FnMut(&K) -> bool,

Auto Trait Implementations§

§

impl<'a, K, F, A> Freeze for ExtractIf<'a, K, F, A>
where + F: Freeze,

§

impl<'a, K, F, A> RefUnwindSafe for ExtractIf<'a, K, F, A>
where + F: RefUnwindSafe, + A: RefUnwindSafe, + K: RefUnwindSafe,

§

impl<'a, K, F, A> Send for ExtractIf<'a, K, F, A>
where + F: Send, + A: Send, + K: Send,

§

impl<'a, K, F, A> Sync for ExtractIf<'a, K, F, A>
where + F: Sync, + A: Sync, + K: Sync,

§

impl<'a, K, F, A> Unpin for ExtractIf<'a, K, F, A>
where + F: Unpin,

§

impl<'a, K, F, A = Global> !UnwindSafe for ExtractIf<'a, K, F, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.HashSet.html b/hashbrown/hash_set/struct.HashSet.html new file mode 100644 index 000000000..46dbe948a --- /dev/null +++ b/hashbrown/hash_set/struct.HashSet.html @@ -0,0 +1,700 @@ +HashSet in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::HashSet

source ·
pub struct HashSet<T, S = DefaultHashBuilder, A: Allocator = Global> { /* private fields */ }
Expand description

A hash set implemented as a HashMap where the value is ().

+

As with the HashMap type, a HashSet requires that the elements +implement the Eq and Hash traits. This can frequently be achieved by +using #[derive(PartialEq, Eq, Hash)]. If you implement these yourself, +it is important that the following property holds:

+
k1 == k2 -> hash(k1) == hash(k2)
+
+

In other words, if two keys are equal, their hashes must be equal.

+

It is a logic error for an item to be modified in such a way that the +item’s hash, as determined by the Hash trait, or its equality, as +determined by the Eq trait, changes while it is in the set. This is +normally only possible through Cell, RefCell, global state, I/O, or +unsafe code.

+

It is also a logic error for the Hash implementation of a key to panic. +This is generally only possible if the trait is implemented manually. If a +panic does occur then the contents of the HashSet may become corrupted and +some items may be dropped from the table.

+

§Examples

+
use hashbrown::HashSet;
+// Type inference lets us omit an explicit type signature (which
+// would be `HashSet<String>` in this example).
+let mut books = HashSet::new();
+
+// Add some books.
+books.insert("A Dance With Dragons".to_string());
+books.insert("To Kill a Mockingbird".to_string());
+books.insert("The Odyssey".to_string());
+books.insert("The Great Gatsby".to_string());
+
+// Check for a specific one.
+if !books.contains("The Winds of Winter") {
+    println!("We have {} books, but The Winds of Winter ain't one.",
+             books.len());
+}
+
+// Remove a book.
+books.remove("The Odyssey");
+
+// Iterate over everything.
+for book in &books {
+    println!("{}", book);
+}
+

The easiest way to use HashSet with a custom type is to derive +Eq and Hash. We must also derive PartialEq. This will in the +future be implied by Eq.

+ +
use hashbrown::HashSet;
+#[derive(Hash, Eq, PartialEq, Debug)]
+struct Viking {
+    name: String,
+    power: usize,
+}
+
+let mut vikings = HashSet::new();
+
+vikings.insert(Viking { name: "Einar".to_string(), power: 9 });
+vikings.insert(Viking { name: "Einar".to_string(), power: 9 });
+vikings.insert(Viking { name: "Olaf".to_string(), power: 4 });
+vikings.insert(Viking { name: "Harald".to_string(), power: 8 });
+
+// Use derived implementation to print the vikings.
+for x in &vikings {
+    println!("{:?}", x);
+}
+

A HashSet with fixed list of elements can be initialized from an array:

+ +
use hashbrown::HashSet;
+
+let viking_names: HashSet<&'static str> =
+    [ "Einar", "Olaf", "Harald" ].into_iter().collect();
+// use the values stored in the set
+

Implementations§

source§

impl<T, S, A: Allocator> HashSet<T, S, A>

source

pub fn capacity(&self) -> usize

Returns the number of elements the set can hold without reallocating.

+
§Examples
+
use hashbrown::HashSet;
+let set: HashSet<i32> = HashSet::with_capacity(100);
+assert!(set.capacity() >= 100);
+
source

pub fn iter(&self) -> Iter<'_, T>

An iterator visiting all elements in arbitrary order. +The iterator element type is &'a T.

+
§Examples
+
use hashbrown::HashSet;
+let mut set = HashSet::new();
+set.insert("a");
+set.insert("b");
+
+// Will print in an arbitrary order.
+for x in set.iter() {
+    println!("{}", x);
+}
+
source

pub fn len(&self) -> usize

Returns the number of elements in the set.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut v = HashSet::new();
+assert_eq!(v.len(), 0);
+v.insert(1);
+assert_eq!(v.len(), 1);
+
source

pub fn is_empty(&self) -> bool

Returns true if the set contains no elements.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut v = HashSet::new();
+assert!(v.is_empty());
+v.insert(1);
+assert!(!v.is_empty());
+
source

pub fn drain(&mut self) -> Drain<'_, T, A>

Clears the set, returning all elements in an iterator.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert!(!set.is_empty());
+
+// print 1, 2, 3 in an arbitrary order
+for i in set.drain() {
+    println!("{}", i);
+}
+
+assert!(set.is_empty());
+
source

pub fn retain<F>(&mut self, f: F)
where + F: FnMut(&T) -> bool,

Retains only the elements specified by the predicate.

+

In other words, remove all elements e such that f(&e) returns false.

+
§Examples
+
use hashbrown::HashSet;
+
+let xs = [1,2,3,4,5,6];
+let mut set: HashSet<i32> = xs.into_iter().collect();
+set.retain(|&k| k % 2 == 0);
+assert_eq!(set.len(), 3);
+
source

pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, T, F, A>
where + F: FnMut(&T) -> bool,

Drains elements which are true under the given predicate, +and returns an iterator over the removed items.

+

In other words, move all elements e such that f(&e) returns true out +into another iterator.

+

If the returned ExtractIf is not exhausted, e.g. because it is dropped without iterating +or the iteration short-circuits, then the remaining elements will be retained. +Use retain() with a negated predicate if you do not need the returned iterator.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<i32> = (0..8).collect();
+let drained: HashSet<i32> = set.extract_if(|v| v % 2 == 0).collect();
+
+let mut evens = drained.into_iter().collect::<Vec<_>>();
+let mut odds = set.into_iter().collect::<Vec<_>>();
+evens.sort();
+odds.sort();
+
+assert_eq!(evens, vec![0, 2, 4, 6]);
+assert_eq!(odds, vec![1, 3, 5, 7]);
+
source

pub fn clear(&mut self)

Clears the set, removing all values.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut v = HashSet::new();
+v.insert(1);
+v.clear();
+assert!(v.is_empty());
+
source§

impl<T, S> HashSet<T, S, Global>

source

pub const fn with_hasher(hasher: S) -> Self

Creates a new empty hash set which will use the given hasher to hash +keys.

+

The hash set is initially created with a capacity of 0, so it will not +allocate until it is first inserted into.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashSet to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashSet.

+

The hash_builder passed should implement the BuildHasher trait for +the HashSet to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut set = HashSet::with_hasher(s);
+set.insert(2);
+
source

pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self

Creates an empty HashSet with the specified capacity, using +hasher to hash the keys.

+

The hash set will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash set will not allocate.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashSet to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashSet.

+

The hash_builder passed should implement the BuildHasher trait for +the HashSet to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut set = HashSet::with_capacity_and_hasher(10, s);
+set.insert(1);
+
source§

impl<T, S, A> HashSet<T, S, A>
where + A: Allocator,

source

pub fn allocator(&self) -> &A

Returns a reference to the underlying allocator.

+
source

pub const fn with_hasher_in(hasher: S, alloc: A) -> Self

Creates a new empty hash set which will use the given hasher to hash +keys.

+

The hash set is initially created with a capacity of 0, so it will not +allocate until it is first inserted into.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashSet to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashSet.

+

The hash_builder passed should implement the BuildHasher trait for +the HashSet to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut set = HashSet::with_hasher(s);
+set.insert(2);
+
source

pub fn with_capacity_and_hasher_in(capacity: usize, hasher: S, alloc: A) -> Self

Creates an empty HashSet with the specified capacity, using +hasher to hash the keys.

+

The hash set will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash set will not allocate.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashSet to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashSet.

+

The hash_builder passed should implement the BuildHasher trait for +the HashSet to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut set = HashSet::with_capacity_and_hasher(10, s);
+set.insert(1);
+
source

pub fn hasher(&self) -> &S

Returns a reference to the set’s BuildHasher.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let hasher = DefaultHashBuilder::default();
+let set: HashSet<i32> = HashSet::with_hasher(hasher);
+let hasher: &DefaultHashBuilder = set.hasher();
+
source§

impl<T, S, A> HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

source

pub fn reserve(&mut self, additional: usize)

Reserves capacity for at least additional more elements to be inserted +in the HashSet. The collection may reserve more space to avoid +frequent reallocations.

+
§Panics
+

Panics if the new capacity exceeds isize::MAX bytes and abort the program +in case of allocation error. Use try_reserve instead +if you want to handle memory allocation failure.

+
§Examples
+
use hashbrown::HashSet;
+let mut set: HashSet<i32> = HashSet::new();
+set.reserve(10);
+assert!(set.capacity() >= 10);
+
source

pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError>

Tries to reserve capacity for at least additional more elements to be inserted +in the given HashSet<K,V>. The collection may reserve more space to avoid +frequent reallocations.

+
§Errors
+

If the capacity overflows, or the allocator reports a failure, then an error +is returned.

+
§Examples
+
use hashbrown::HashSet;
+let mut set: HashSet<i32> = HashSet::new();
+set.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
+
source

pub fn shrink_to_fit(&mut self)

Shrinks the capacity of the set as much as possible. It will drop +down as much as possible while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::with_capacity(100);
+set.insert(1);
+set.insert(2);
+assert!(set.capacity() >= 100);
+set.shrink_to_fit();
+assert!(set.capacity() >= 2);
+
source

pub fn shrink_to(&mut self, min_capacity: usize)

Shrinks the capacity of the set with a lower limit. It will drop +down no lower than the supplied limit while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+

Panics if the current capacity is smaller than the supplied +minimum capacity.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::with_capacity(100);
+set.insert(1);
+set.insert(2);
+assert!(set.capacity() >= 100);
+set.shrink_to(10);
+assert!(set.capacity() >= 10);
+set.shrink_to(0);
+assert!(set.capacity() >= 2);
+
source

pub fn difference<'a>(&'a self, other: &'a Self) -> Difference<'a, T, S, A>

Visits the values representing the difference, +i.e., the values that are in self but not in other.

+
§Examples
+
use hashbrown::HashSet;
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+
+// Can be seen as `a - b`.
+for x in a.difference(&b) {
+    println!("{}", x); // Print 1
+}
+
+let diff: HashSet<_> = a.difference(&b).collect();
+assert_eq!(diff, [1].iter().collect());
+
+// Note that difference is not symmetric,
+// and `b - a` means something else:
+let diff: HashSet<_> = b.difference(&a).collect();
+assert_eq!(diff, [4].iter().collect());
+
source

pub fn symmetric_difference<'a>( + &'a self, + other: &'a Self +) -> SymmetricDifference<'a, T, S, A>

Visits the values representing the symmetric difference, +i.e., the values that are in self or in other but not in both.

+
§Examples
+
use hashbrown::HashSet;
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+
+// Print 1, 4 in arbitrary order.
+for x in a.symmetric_difference(&b) {
+    println!("{}", x);
+}
+
+let diff1: HashSet<_> = a.symmetric_difference(&b).collect();
+let diff2: HashSet<_> = b.symmetric_difference(&a).collect();
+
+assert_eq!(diff1, diff2);
+assert_eq!(diff1, [1, 4].iter().collect());
+
source

pub fn intersection<'a>(&'a self, other: &'a Self) -> Intersection<'a, T, S, A>

Visits the values representing the intersection, +i.e., the values that are both in self and other.

+
§Examples
+
use hashbrown::HashSet;
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+
+// Print 2, 3 in arbitrary order.
+for x in a.intersection(&b) {
+    println!("{}", x);
+}
+
+let intersection: HashSet<_> = a.intersection(&b).collect();
+assert_eq!(intersection, [2, 3].iter().collect());
+
source

pub fn union<'a>(&'a self, other: &'a Self) -> Union<'a, T, S, A>

Visits the values representing the union, +i.e., all the values in self or other, without duplicates.

+
§Examples
+
use hashbrown::HashSet;
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+
+// Print 1, 2, 3, 4 in arbitrary order.
+for x in a.union(&b) {
+    println!("{}", x);
+}
+
+let union: HashSet<_> = a.union(&b).collect();
+assert_eq!(union, [1, 2, 3, 4].iter().collect());
+
source

pub fn contains<Q>(&self, value: &Q) -> bool
where + Q: Hash + Equivalent<T> + ?Sized,

Returns true if the set contains a value.

+

The value may be any borrowed form of the set’s value type, but +Hash and Eq on the borrowed form must match those for +the value type.

+
§Examples
+
use hashbrown::HashSet;
+
+let set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert_eq!(set.contains(&1), true);
+assert_eq!(set.contains(&4), false);
+
source

pub fn get<Q>(&self, value: &Q) -> Option<&T>
where + Q: Hash + Equivalent<T> + ?Sized,

Returns a reference to the value in the set, if any, that is equal to the given value.

+

The value may be any borrowed form of the set’s value type, but +Hash and Eq on the borrowed form must match those for +the value type.

+
§Examples
+
use hashbrown::HashSet;
+
+let set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert_eq!(set.get(&2), Some(&2));
+assert_eq!(set.get(&4), None);
+
source

pub fn get_or_insert(&mut self, value: T) -> &T

Inserts the given value into the set if it is not present, then +returns a reference to the value in the set.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert_eq!(set.len(), 3);
+assert_eq!(set.get_or_insert(2), &2);
+assert_eq!(set.get_or_insert(100), &100);
+assert_eq!(set.len(), 4); // 100 was inserted
+
source

pub fn get_or_insert_owned<Q>(&mut self, value: &Q) -> &T
where + Q: Hash + Equivalent<T> + ToOwned<Owned = T> + ?Sized,

Inserts an owned copy of the given value into the set if it is not +present, then returns a reference to the value in the set.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<String> = ["cat", "dog", "horse"]
+    .iter().map(|&pet| pet.to_owned()).collect();
+
+assert_eq!(set.len(), 3);
+for &pet in &["cat", "dog", "fish"] {
+    let value = set.get_or_insert_owned(pet);
+    assert_eq!(value, pet);
+}
+assert_eq!(set.len(), 4); // a new "fish" was inserted
+
source

pub fn get_or_insert_with<Q, F>(&mut self, value: &Q, f: F) -> &T
where + Q: Hash + Equivalent<T> + ?Sized, + F: FnOnce(&Q) -> T,

Inserts a value computed from f into the set if the given value is +not present, then returns a reference to the value in the set.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<String> = ["cat", "dog", "horse"]
+    .iter().map(|&pet| pet.to_owned()).collect();
+
+assert_eq!(set.len(), 3);
+for &pet in &["cat", "dog", "fish"] {
+    let value = set.get_or_insert_with(pet, str::to_owned);
+    assert_eq!(value, pet);
+}
+assert_eq!(set.len(), 4); // a new "fish" was inserted
+
source

pub fn entry(&mut self, value: T) -> Entry<'_, T, S, A>

Gets the given value’s corresponding entry in the set for in-place manipulation.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_set::Entry::*;
+
+let mut singles = HashSet::new();
+let mut dupes = HashSet::new();
+
+for ch in "a short treatise on fungi".chars() {
+    if let Vacant(dupe_entry) = dupes.entry(ch) {
+        // We haven't already seen a duplicate, so
+        // check if we've at least seen it once.
+        match singles.entry(ch) {
+            Vacant(single_entry) => {
+                // We found a new character for the first time.
+                single_entry.insert()
+            }
+            Occupied(single_entry) => {
+                // We've already seen this once, "move" it to dupes.
+                single_entry.remove();
+                dupe_entry.insert();
+            }
+        }
+    }
+}
+
+assert!(!singles.contains(&'t') && dupes.contains(&'t'));
+assert!(singles.contains(&'u') && !dupes.contains(&'u'));
+assert!(!singles.contains(&'v') && !dupes.contains(&'v'));
+
source

pub fn is_disjoint(&self, other: &Self) -> bool

Returns true if self has no elements in common with other. +This is equivalent to checking for an empty intersection.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let mut b = HashSet::new();
+
+assert_eq!(a.is_disjoint(&b), true);
+b.insert(4);
+assert_eq!(a.is_disjoint(&b), true);
+b.insert(1);
+assert_eq!(a.is_disjoint(&b), false);
+
source

pub fn is_subset(&self, other: &Self) -> bool

Returns true if the set is a subset of another, +i.e., other contains at least all the values in self.

+
§Examples
+
use hashbrown::HashSet;
+
+let sup: HashSet<_> = [1, 2, 3].into_iter().collect();
+let mut set = HashSet::new();
+
+assert_eq!(set.is_subset(&sup), true);
+set.insert(2);
+assert_eq!(set.is_subset(&sup), true);
+set.insert(4);
+assert_eq!(set.is_subset(&sup), false);
+
source

pub fn is_superset(&self, other: &Self) -> bool

Returns true if the set is a superset of another, +i.e., self contains at least all the values in other.

+
§Examples
+
use hashbrown::HashSet;
+
+let sub: HashSet<_> = [1, 2].into_iter().collect();
+let mut set = HashSet::new();
+
+assert_eq!(set.is_superset(&sub), false);
+
+set.insert(0);
+set.insert(1);
+assert_eq!(set.is_superset(&sub), false);
+
+set.insert(2);
+assert_eq!(set.is_superset(&sub), true);
+
source

pub fn insert(&mut self, value: T) -> bool

Adds a value to the set.

+

If the set did not have this value present, true is returned.

+

If the set did have this value present, false is returned.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::new();
+
+assert_eq!(set.insert(2), true);
+assert_eq!(set.insert(2), false);
+assert_eq!(set.len(), 1);
+
source

pub fn insert_unique_unchecked(&mut self, value: T) -> &T

Insert a value the set without checking if the value already exists in the set.

+

Returns a reference to the value just inserted.

+

This operation is safe if a value does not exist in the set.

+

However, if a value exists in the set already, the behavior is unspecified: +this operation may panic, loop forever, or any following operation with the set +may panic, loop forever or return arbitrary result.

+

That said, this operation (and following operations) are guaranteed to +not violate memory safety.

+

This operation is faster than regular insert, because it does not perform +lookup before insertion.

+

This operation is useful during initial population of the set. +For example, when constructing a set from another set, we know +that values are unique.

+
source

pub fn replace(&mut self, value: T) -> Option<T>

Adds a value to the set, replacing the existing value, if any, that is equal to the given +one. Returns the replaced value.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::new();
+set.insert(Vec::<i32>::new());
+
+assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
+set.replace(Vec::with_capacity(10));
+assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
+
source

pub fn remove<Q>(&mut self, value: &Q) -> bool
where + Q: Hash + Equivalent<T> + ?Sized,

Removes a value from the set. Returns whether the value was +present in the set.

+

The value may be any borrowed form of the set’s value type, but +Hash and Eq on the borrowed form must match those for +the value type.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::new();
+
+set.insert(2);
+assert_eq!(set.remove(&2), true);
+assert_eq!(set.remove(&2), false);
+
source

pub fn take<Q>(&mut self, value: &Q) -> Option<T>
where + Q: Hash + Equivalent<T> + ?Sized,

Removes and returns the value in the set, if any, that is equal to the given one.

+

The value may be any borrowed form of the set’s value type, but +Hash and Eq on the borrowed form must match those for +the value type.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert_eq!(set.take(&2), Some(2));
+assert_eq!(set.take(&2), None);
+

Trait Implementations§

source§

impl<T, S, A> BitAnd<&HashSet<T, S, A>> for &HashSet<T, S, A>
where + T: Eq + Hash + Clone, + S: BuildHasher + Default, + A: Allocator,

source§

fn bitand(self, rhs: &HashSet<T, S, A>) -> HashSet<T, S>

Returns the intersection of self and rhs as a new HashSet<T, S>.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+let b: HashSet<_> = vec![2, 3, 4].into_iter().collect();
+
+let set = &a & &b;
+
+let mut i = 0;
+let expected = [2, 3];
+for x in &set {
+    assert!(expected.contains(x));
+    i += 1;
+}
+assert_eq!(i, expected.len());
+
§

type Output = HashSet<T, S>

The resulting type after applying the & operator.
source§

impl<T, S, A> BitOr<&HashSet<T, S, A>> for &HashSet<T, S, A>
where + T: Eq + Hash + Clone, + S: BuildHasher + Default, + A: Allocator,

source§

fn bitor(self, rhs: &HashSet<T, S, A>) -> HashSet<T, S>

Returns the union of self and rhs as a new HashSet<T, S>.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+
+let set = &a | &b;
+
+let mut i = 0;
+let expected = [1, 2, 3, 4, 5];
+for x in &set {
+    assert!(expected.contains(x));
+    i += 1;
+}
+assert_eq!(i, expected.len());
+
§

type Output = HashSet<T, S>

The resulting type after applying the | operator.
source§

impl<T, S> BitXor<&HashSet<T, S>> for &HashSet<T, S>
where + T: Eq + Hash + Clone, + S: BuildHasher + Default,

source§

fn bitxor(self, rhs: &HashSet<T, S>) -> HashSet<T, S>

Returns the symmetric difference of self and rhs as a new HashSet<T, S>.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+
+let set = &a ^ &b;
+
+let mut i = 0;
+let expected = [1, 2, 4, 5];
+for x in &set {
+    assert!(expected.contains(x));
+    i += 1;
+}
+assert_eq!(i, expected.len());
+
§

type Output = HashSet<T, S>

The resulting type after applying the ^ operator.
source§

impl<T: Clone, S: Clone, A: Allocator + Clone> Clone for HashSet<T, S, A>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T, S, A> Debug for HashSet<T, S, A>
where + T: Debug, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T, S, A> Default for HashSet<T, S, A>
where + S: Default, + A: Default + Allocator,

source§

fn default() -> Self

Creates an empty HashSet<T, S> with the Default value for the hasher.

+
source§

impl<'a, T, S, A> Extend<&'a T> for HashSet<T, S, A>
where + T: 'a + Eq + Hash + Copy, + S: BuildHasher, + A: Allocator,

source§

fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I)

Extends a collection with the contents of an iterator. Read more
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<T, S, A> Extend<T> for HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

source§

fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I)

Extends a collection with the contents of an iterator. Read more
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<T, S, A> From<HashMap<T, (), S, A>> for HashSet<T, S, A>
where + A: Allocator,

source§

fn from(map: HashMap<T, (), S, A>) -> Self

Converts to this type from the input type.
source§

impl<T, S, A> FromIterator<T> for HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher + Default, + A: Default + Allocator,

source§

fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self

Creates a value from an iterator. Read more
source§

impl<'a, T, S, A: Allocator> IntoIterator for &'a HashSet<T, S, A>

§

type Item = &'a T

The type of the elements being iterated over.
§

type IntoIter = Iter<'a, T>

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> Iter<'a, T>

Creates an iterator from a value. Read more
source§

impl<T, S, A: Allocator> IntoIterator for HashSet<T, S, A>

source§

fn into_iter(self) -> IntoIter<T, A>

Creates a consuming iterator, that is, one that moves each value out +of the set in arbitrary order. The set cannot be used after calling +this.

+
§Examples
+
use hashbrown::HashSet;
+let mut set = HashSet::new();
+set.insert("a".to_string());
+set.insert("b".to_string());
+
+// Not possible to collect to a Vec<String> with a regular `.iter()`.
+let v: Vec<String> = set.into_iter().collect();
+
+// Will print in an arbitrary order.
+for x in &v {
+    println!("{}", x);
+}
+
§

type Item = T

The type of the elements being iterated over.
§

type IntoIter = IntoIter<T, A>

Which kind of iterator are we turning this into?
source§

impl<T, S, A> PartialEq for HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

source§

fn eq(&self, other: &Self) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl<T, S> Sub<&HashSet<T, S>> for &HashSet<T, S>
where + T: Eq + Hash + Clone, + S: BuildHasher + Default,

source§

fn sub(self, rhs: &HashSet<T, S>) -> HashSet<T, S>

Returns the difference of self and rhs as a new HashSet<T, S>.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+
+let set = &a - &b;
+
+let mut i = 0;
+let expected = [1, 2];
+for x in &set {
+    assert!(expected.contains(x));
+    i += 1;
+}
+assert_eq!(i, expected.len());
+
§

type Output = HashSet<T, S>

The resulting type after applying the - operator.
source§

impl<T, S, A> Eq for HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

Auto Trait Implementations§

§

impl<T, S, A> Freeze for HashSet<T, S, A>
where + S: Freeze, + A: Freeze,

§

impl<T, S, A> RefUnwindSafe for HashSet<T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<T, S, A> Send for HashSet<T, S, A>
where + S: Send, + A: Send, + T: Send,

§

impl<T, S, A> Sync for HashSet<T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<T, S, A> Unpin for HashSet<T, S, A>
where + S: Unpin, + A: Unpin, + T: Unpin,

§

impl<T, S, A> UnwindSafe for HashSet<T, S, A>
where + S: UnwindSafe, + A: UnwindSafe, + T: UnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<Q, K> Equivalent<K> for Q
where + Q: Eq + ?Sized, + K: Borrow<Q> + ?Sized,

source§

fn equivalent(&self, key: &K) -> bool

Checks if this value is equivalent to the given key. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.Intersection.html b/hashbrown/hash_set/struct.Intersection.html new file mode 100644 index 000000000..08b2c4371 --- /dev/null +++ b/hashbrown/hash_set/struct.Intersection.html @@ -0,0 +1,215 @@ +Intersection in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::Intersection

source ·
pub struct Intersection<'a, T, S, A: Allocator = Global> { /* private fields */ }
Expand description

A lazy iterator producing elements in the intersection of HashSets.

+

This struct is created by the intersection method on HashSet. +See its documentation for more.

+

Trait Implementations§

source§

impl<T, S, A: Allocator> Clone for Intersection<'_, T, S, A>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T, S, A> Debug for Intersection<'_, T, S, A>
where + T: Debug + Eq + Hash, + S: BuildHasher, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a, T, S, A> Iterator for Intersection<'a, T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

§

type Item = &'a T

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<&'a T>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · source§

fn cycle(self) -> Cycle<Self>
where + Self: Sized + Clone,

Repeats an iterator endlessly. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<T, S, A> FusedIterator for Intersection<'_, T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

Auto Trait Implementations§

§

impl<'a, T, S, A> Freeze for Intersection<'a, T, S, A>

§

impl<'a, T, S, A> RefUnwindSafe for Intersection<'a, T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<'a, T, S, A> Send for Intersection<'a, T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<'a, T, S, A> Sync for Intersection<'a, T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<'a, T, S, A> Unpin for Intersection<'a, T, S, A>

§

impl<'a, T, S, A> UnwindSafe for Intersection<'a, T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.IntoIter.html b/hashbrown/hash_set/struct.IntoIter.html new file mode 100644 index 000000000..fff853d51 --- /dev/null +++ b/hashbrown/hash_set/struct.IntoIter.html @@ -0,0 +1,203 @@ +IntoIter in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::IntoIter

source ·
pub struct IntoIter<K, A: Allocator = Global> { /* private fields */ }
Expand description

An owning iterator over the items of a HashSet.

+

This struct is created by the into_iter method on HashSet +(provided by the IntoIterator trait). See its documentation for more.

+

Trait Implementations§

source§

impl<K: Debug, A: Allocator> Debug for IntoIter<K, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, A: Allocator> ExactSizeIterator for IntoIter<K, A>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<K, A: Allocator> Iterator for IntoIter<K, A>

§

type Item = K

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<K>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K, A: Allocator> FusedIterator for IntoIter<K, A>

Auto Trait Implementations§

§

impl<K, A> Freeze for IntoIter<K, A>
where + A: Freeze,

§

impl<K, A> RefUnwindSafe for IntoIter<K, A>
where + A: RefUnwindSafe, + K: RefUnwindSafe,

§

impl<K, A> Send for IntoIter<K, A>
where + A: Send, + K: Send,

§

impl<K, A> Sync for IntoIter<K, A>
where + A: Sync, + K: Sync,

§

impl<K, A> Unpin for IntoIter<K, A>
where + A: Unpin, + K: Unpin,

§

impl<K, A> UnwindSafe for IntoIter<K, A>
where + A: UnwindSafe, + K: UnwindSafe + RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.Iter.html b/hashbrown/hash_set/struct.Iter.html new file mode 100644 index 000000000..627735b82 --- /dev/null +++ b/hashbrown/hash_set/struct.Iter.html @@ -0,0 +1,198 @@ +Iter in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::Iter

source ·
pub struct Iter<'a, K> { /* private fields */ }
Expand description

An iterator over the items of a HashSet.

+

This struct is created by the iter method on HashSet. +See its documentation for more.

+

Trait Implementations§

source§

impl<K> Clone for Iter<'_, K>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<K: Debug> Debug for Iter<'_, K>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a, K> ExactSizeIterator for Iter<'a, K>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<'a, K> Iterator for Iter<'a, K>

§

type Item = &'a K

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<&'a K>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · source§

fn cycle(self) -> Cycle<Self>
where + Self: Sized + Clone,

Repeats an iterator endlessly. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<K> FusedIterator for Iter<'_, K>

Auto Trait Implementations§

§

impl<'a, K> Freeze for Iter<'a, K>

§

impl<'a, K> RefUnwindSafe for Iter<'a, K>
where + K: RefUnwindSafe,

§

impl<'a, K> Send for Iter<'a, K>
where + K: Sync,

§

impl<'a, K> Sync for Iter<'a, K>
where + K: Sync,

§

impl<'a, K> Unpin for Iter<'a, K>

§

impl<'a, K> UnwindSafe for Iter<'a, K>
where + K: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.OccupiedEntry.html b/hashbrown/hash_set/struct.OccupiedEntry.html new file mode 100644 index 000000000..95e694f0c --- /dev/null +++ b/hashbrown/hash_set/struct.OccupiedEntry.html @@ -0,0 +1,108 @@ +OccupiedEntry in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::OccupiedEntry

source ·
pub struct OccupiedEntry<'a, T, S, A: Allocator = Global> { /* private fields */ }
Expand description

A view into an occupied entry in a HashSet. +It is part of the Entry enum.

+

§Examples

+
use hashbrown::hash_set::{Entry, HashSet, OccupiedEntry};
+
+let mut set = HashSet::new();
+set.extend(["a", "b", "c"]);
+
+let _entry_o: OccupiedEntry<_, _> = set.entry("a").insert();
+assert_eq!(set.len(), 3);
+
+// Existing key
+match set.entry("a") {
+    Entry::Vacant(_) => unreachable!(),
+    Entry::Occupied(view) => {
+        assert_eq!(view.get(), &"a");
+    }
+}
+
+assert_eq!(set.len(), 3);
+
+// Existing key (take)
+match set.entry("c") {
+    Entry::Vacant(_) => unreachable!(),
+    Entry::Occupied(view) => {
+        assert_eq!(view.remove(), "c");
+    }
+}
+assert_eq!(set.get(&"c"), None);
+assert_eq!(set.len(), 2);
+

Implementations§

source§

impl<T, S, A: Allocator> OccupiedEntry<'_, T, S, A>

source

pub fn get(&self) -> &T

Gets a reference to the value in the entry.

+
§Examples
+
use hashbrown::hash_set::{Entry, HashSet};
+
+let mut set: HashSet<&str> = HashSet::new();
+set.entry("poneyland").or_insert();
+
+match set.entry("poneyland") {
+    Entry::Vacant(_) => panic!(),
+    Entry::Occupied(entry) => assert_eq!(entry.get(), &"poneyland"),
+}
+
source

pub fn remove(self) -> T

Takes the value out of the entry, and returns it. +Keeps the allocated memory for reuse.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_set::Entry;
+
+let mut set: HashSet<&str> = HashSet::new();
+// The set is empty
+assert!(set.is_empty() && set.capacity() == 0);
+
+set.entry("poneyland").or_insert();
+let capacity_before_remove = set.capacity();
+
+if let Entry::Occupied(o) = set.entry("poneyland") {
+    assert_eq!(o.remove(), "poneyland");
+}
+
+assert_eq!(set.contains("poneyland"), false);
+// Now set hold none elements but capacity is equal to the old one
+assert!(set.len() == 0 && set.capacity() == capacity_before_remove);
+
source

pub fn replace(self) -> T

Replaces the entry, returning the old value. The new value in the hash map will be +the value used to create this entry.

+
§Panics
+

Will panic if this OccupiedEntry was created through Entry::insert.

+
§Examples
+
 use hashbrown::hash_set::{Entry, HashSet};
+ use std::rc::Rc;
+
+ let mut set: HashSet<Rc<String>> = HashSet::new();
+ let key_one = Rc::new("Stringthing".to_string());
+ let key_two = Rc::new("Stringthing".to_string());
+
+ set.insert(key_one.clone());
+ assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+
+ match set.entry(key_two.clone()) {
+     Entry::Occupied(entry) => {
+         let old_key: Rc<String> = entry.replace();
+         assert!(Rc::ptr_eq(&key_one, &old_key));
+     }
+     Entry::Vacant(_) => panic!(),
+ }
+
+ assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+ assert!(set.contains(&"Stringthing".to_owned()));
+

Trait Implementations§

source§

impl<T: Debug, S, A: Allocator> Debug for OccupiedEntry<'_, T, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, T, S, A> Freeze for OccupiedEntry<'a, T, S, A>
where + T: Freeze,

§

impl<'a, T, S, A> RefUnwindSafe for OccupiedEntry<'a, T, S, A>
where + T: RefUnwindSafe, + S: RefUnwindSafe, + A: RefUnwindSafe,

§

impl<'a, T, S, A> Send for OccupiedEntry<'a, T, S, A>
where + T: Send, + S: Send, + A: Send,

§

impl<'a, T, S, A> Sync for OccupiedEntry<'a, T, S, A>
where + T: Sync, + S: Sync, + A: Sync,

§

impl<'a, T, S, A> Unpin for OccupiedEntry<'a, T, S, A>
where + T: Unpin,

§

impl<'a, T, S, A = Global> !UnwindSafe for OccupiedEntry<'a, T, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.SymmetricDifference.html b/hashbrown/hash_set/struct.SymmetricDifference.html new file mode 100644 index 000000000..f7b9a39df --- /dev/null +++ b/hashbrown/hash_set/struct.SymmetricDifference.html @@ -0,0 +1,215 @@ +SymmetricDifference in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::SymmetricDifference

source ·
pub struct SymmetricDifference<'a, T, S, A: Allocator = Global> { /* private fields */ }
Expand description

A lazy iterator producing elements in the symmetric difference of HashSets.

+

This struct is created by the symmetric_difference method on +HashSet. See its documentation for more.

+

Trait Implementations§

source§

impl<T, S, A: Allocator> Clone for SymmetricDifference<'_, T, S, A>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T, S, A> Debug for SymmetricDifference<'_, T, S, A>
where + T: Debug + Eq + Hash, + S: BuildHasher, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a, T, S, A> Iterator for SymmetricDifference<'a, T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

§

type Item = &'a T

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<&'a T>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · source§

fn cycle(self) -> Cycle<Self>
where + Self: Sized + Clone,

Repeats an iterator endlessly. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<T, S, A> FusedIterator for SymmetricDifference<'_, T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

Auto Trait Implementations§

§

impl<'a, T, S, A> Freeze for SymmetricDifference<'a, T, S, A>

§

impl<'a, T, S, A> RefUnwindSafe for SymmetricDifference<'a, T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<'a, T, S, A> Send for SymmetricDifference<'a, T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<'a, T, S, A> Sync for SymmetricDifference<'a, T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<'a, T, S, A> Unpin for SymmetricDifference<'a, T, S, A>

§

impl<'a, T, S, A> UnwindSafe for SymmetricDifference<'a, T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.Union.html b/hashbrown/hash_set/struct.Union.html new file mode 100644 index 000000000..e39335eaa --- /dev/null +++ b/hashbrown/hash_set/struct.Union.html @@ -0,0 +1,215 @@ +Union in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::Union

source ·
pub struct Union<'a, T, S, A: Allocator = Global> { /* private fields */ }
Expand description

A lazy iterator producing elements in the union of HashSets.

+

This struct is created by the union method on HashSet. +See its documentation for more.

+

Trait Implementations§

source§

impl<T, S, A: Allocator> Clone for Union<'_, T, S, A>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T, S, A> Debug for Union<'_, T, S, A>
where + T: Debug + Eq + Hash, + S: BuildHasher, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a, T, S, A> Iterator for Union<'a, T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

§

type Item = &'a T

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<&'a T>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
1.0.0 · source§

fn cycle(self) -> Cycle<Self>
where + Self: Sized + Clone,

Repeats an iterator endlessly. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<T, S, A> FusedIterator for Union<'_, T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

Auto Trait Implementations§

§

impl<'a, T, S, A> Freeze for Union<'a, T, S, A>

§

impl<'a, T, S, A> RefUnwindSafe for Union<'a, T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<'a, T, S, A> Send for Union<'a, T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<'a, T, S, A> Sync for Union<'a, T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<'a, T, S, A> Unpin for Union<'a, T, S, A>

§

impl<'a, T, S, A> UnwindSafe for Union<'a, T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_set/struct.VacantEntry.html b/hashbrown/hash_set/struct.VacantEntry.html new file mode 100644 index 000000000..b67f9cc2b --- /dev/null +++ b/hashbrown/hash_set/struct.VacantEntry.html @@ -0,0 +1,72 @@ +VacantEntry in hashbrown::hash_set - Rust

Struct hashbrown::hash_set::VacantEntry

source ·
pub struct VacantEntry<'a, T, S, A: Allocator = Global> { /* private fields */ }
Expand description

A view into a vacant entry in a HashSet. +It is part of the Entry enum.

+

§Examples

+
use hashbrown::hash_set::{Entry, HashSet, VacantEntry};
+
+let mut set = HashSet::<&str>::new();
+
+let entry_v: VacantEntry<_, _> = match set.entry("a") {
+    Entry::Vacant(view) => view,
+    Entry::Occupied(_) => unreachable!(),
+};
+entry_v.insert();
+assert!(set.contains("a") && set.len() == 1);
+
+// Nonexistent key (insert)
+match set.entry("b") {
+    Entry::Vacant(view) => view.insert(),
+    Entry::Occupied(_) => unreachable!(),
+}
+assert!(set.contains("b") && set.len() == 2);
+

Implementations§

source§

impl<'a, T, S, A: Allocator> VacantEntry<'a, T, S, A>

source

pub fn get(&self) -> &T

Gets a reference to the value that would be used when inserting +through the VacantEntry.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<&str> = HashSet::new();
+assert_eq!(set.entry("poneyland").get(), &"poneyland");
+
source

pub fn into_value(self) -> T

Take ownership of the value.

+
§Examples
+
use hashbrown::hash_set::{Entry, HashSet};
+
+let mut set: HashSet<&str> = HashSet::new();
+
+match set.entry("poneyland") {
+    Entry::Occupied(_) => panic!(),
+    Entry::Vacant(v) => assert_eq!(v.into_value(), "poneyland"),
+}
+
source

pub fn insert(self)
where + T: Hash, + S: BuildHasher,

Sets the value of the entry with the VacantEntry’s value.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_set::Entry;
+
+let mut set: HashSet<&str> = HashSet::new();
+
+if let Entry::Vacant(o) = set.entry("poneyland") {
+    o.insert();
+}
+assert!(set.contains("poneyland"));
+

Trait Implementations§

source§

impl<T: Debug, S, A: Allocator> Debug for VacantEntry<'_, T, S, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, T, S, A> Freeze for VacantEntry<'a, T, S, A>
where + T: Freeze,

§

impl<'a, T, S, A> RefUnwindSafe for VacantEntry<'a, T, S, A>
where + T: RefUnwindSafe, + S: RefUnwindSafe, + A: RefUnwindSafe,

§

impl<'a, T, S, A> Send for VacantEntry<'a, T, S, A>
where + T: Send, + S: Send, + A: Send,

§

impl<'a, T, S, A> Sync for VacantEntry<'a, T, S, A>
where + T: Sync, + S: Sync, + A: Sync,

§

impl<'a, T, S, A> Unpin for VacantEntry<'a, T, S, A>
where + T: Unpin,

§

impl<'a, T, S, A = Global> !UnwindSafe for VacantEntry<'a, T, S, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/enum.Entry.html b/hashbrown/hash_table/enum.Entry.html new file mode 100644 index 000000000..4bf49a594 --- /dev/null +++ b/hashbrown/hash_table/enum.Entry.html @@ -0,0 +1,191 @@ +Entry in hashbrown::hash_table - Rust

Enum hashbrown::hash_table::Entry

source ·
pub enum Entry<'a, T, A = Global>
where + A: Allocator,
{ + Occupied(OccupiedEntry<'a, T, A>), + Vacant(VacantEntry<'a, T, A>), +}
Expand description

A view into a single entry in a table, which may either be vacant or occupied.

+

This enum is constructed from the entry method on HashTable.

+

§Examples

+
use ahash::AHasher;
+use hashbrown::hash_table::{Entry, HashTable, OccupiedEntry};
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for x in ["a", "b", "c"] {
+    table.insert_unique(hasher(&x), x, hasher);
+}
+assert_eq!(table.len(), 3);
+
+// Existing value (insert)
+let entry: Entry<_> = table.entry(hasher(&"a"), |&x| x == "a", hasher);
+let _raw_o: OccupiedEntry<_, _> = entry.insert("a");
+assert_eq!(table.len(), 3);
+// Nonexistent value (insert)
+table.entry(hasher(&"d"), |&x| x == "d", hasher).insert("d");
+
+// Existing value (or_insert)
+table
+    .entry(hasher(&"b"), |&x| x == "b", hasher)
+    .or_insert("b");
+// Nonexistent value (or_insert)
+table
+    .entry(hasher(&"e"), |&x| x == "e", hasher)
+    .or_insert("e");
+
+println!("Our HashTable: {:?}", table);
+
+let mut vec: Vec<_> = table.iter().copied().collect();
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, ["a", "b", "c", "d", "e"]);
+

Variants§

§

Occupied(OccupiedEntry<'a, T, A>)

An occupied entry.

+

§Examples

+
use ahash::AHasher;
+use hashbrown::hash_table::{Entry, HashTable, OccupiedEntry};
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for x in ["a", "b"] {
+    table.insert_unique(hasher(&x), x, hasher);
+}
+
+match table.entry(hasher(&"a"), |&x| x == "a", hasher) {
+    Entry::Vacant(_) => unreachable!(),
+    Entry::Occupied(_) => {}
+}
+
§

Vacant(VacantEntry<'a, T, A>)

A vacant entry.

+

§Examples

+
use ahash::AHasher;
+use hashbrown::hash_table::{Entry, HashTable, OccupiedEntry};
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::<&str>::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+match table.entry(hasher(&"a"), |&x| x == "a", hasher) {
+    Entry::Vacant(_) => {}
+    Entry::Occupied(_) => unreachable!(),
+}
+

Implementations§

source§

impl<'a, T, A> Entry<'a, T, A>
where + A: Allocator,

source

pub fn insert(self, value: T) -> OccupiedEntry<'a, T, A>

Sets the value of the entry, replacing any existing value if there is +one, and returns an OccupiedEntry.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<&str> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+let entry = table
+    .entry(hasher(&"horseyland"), |&x| x == "horseyland", hasher)
+    .insert("horseyland");
+
+assert_eq!(entry.get(), &"horseyland");
+
source

pub fn or_insert(self, default: T) -> OccupiedEntry<'a, T, A>

Ensures a value is in the entry by inserting if it was vacant.

+

Returns an OccupiedEntry pointing to the now-occupied entry.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<&str> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+// nonexistent key
+table
+    .entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher)
+    .or_insert("poneyland");
+assert!(table
+    .find(hasher(&"poneyland"), |&x| x == "poneyland")
+    .is_some());
+
+// existing key
+table
+    .entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher)
+    .or_insert("poneyland");
+assert!(table
+    .find(hasher(&"poneyland"), |&x| x == "poneyland")
+    .is_some());
+assert_eq!(table.len(), 1);
+
source

pub fn or_insert_with( + self, + default: impl FnOnce() -> T +) -> OccupiedEntry<'a, T, A>

Ensures a value is in the entry by inserting the result of the default function if empty..

+

Returns an OccupiedEntry pointing to the now-occupied entry.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<String> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+table
+    .entry(hasher("poneyland"), |x| x == "poneyland", |val| hasher(val))
+    .or_insert_with(|| "poneyland".to_string());
+
+assert!(table
+    .find(hasher(&"poneyland"), |x| x == "poneyland")
+    .is_some());
+
source

pub fn and_modify(self, f: impl FnOnce(&mut T)) -> Self

Provides in-place mutable access to an occupied entry before any +potential inserts into the table.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<(&str, u32)> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+table
+    .entry(
+        hasher(&"poneyland"),
+        |&(x, _)| x == "poneyland",
+        |(k, _)| hasher(&k),
+    )
+    .and_modify(|(_, v)| *v += 1)
+    .or_insert(("poneyland", 42));
+assert_eq!(
+    table.find(hasher(&"poneyland"), |&(k, _)| k == "poneyland"),
+    Some(&("poneyland", 42))
+);
+
+table
+    .entry(
+        hasher(&"poneyland"),
+        |&(x, _)| x == "poneyland",
+        |(k, _)| hasher(&k),
+    )
+    .and_modify(|(_, v)| *v += 1)
+    .or_insert(("poneyland", 42));
+assert_eq!(
+    table.find(hasher(&"poneyland"), |&(k, _)| k == "poneyland"),
+    Some(&("poneyland", 43))
+);
+

Trait Implementations§

source§

impl<T: Debug, A: Allocator> Debug for Entry<'_, T, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, T, A> Freeze for Entry<'a, T, A>

§

impl<'a, T, A> RefUnwindSafe for Entry<'a, T, A>
where + T: RefUnwindSafe, + A: RefUnwindSafe,

§

impl<'a, T, A> Send for Entry<'a, T, A>
where + T: Send, + A: Send,

§

impl<'a, T, A> Sync for Entry<'a, T, A>
where + T: Sync, + A: Sync,

§

impl<'a, T, A> Unpin for Entry<'a, T, A>

§

impl<'a, T, A = Global> !UnwindSafe for Entry<'a, T, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/index.html b/hashbrown/hash_table/index.html new file mode 100644 index 000000000..0407c8b45 --- /dev/null +++ b/hashbrown/hash_table/index.html @@ -0,0 +1,7 @@ +hashbrown::hash_table - Rust

Module hashbrown::hash_table

source ·
Expand description

A hash table implemented with quadratic probing and SIMD lookup.

+

Structs§

  • Type representing the absence of an entry, as returned by HashTable::find_entry.
  • A draining iterator over the items of a HashTable.
  • A draining iterator over entries of a HashTable which don’t satisfy the predicate f.
  • Low-level hash table with explicit hashing.
  • An owning iterator over the entries of a HashTable in arbitrary order. +The iterator element type is T.
  • An iterator over the entries of a HashTable in arbitrary order. +The iterator element type is &'a T.
  • A mutable iterator over the entries of a HashTable in arbitrary order. +The iterator element type is &'a mut T.
  • A view into an occupied entry in a HashTable. +It is part of the Entry enum.
  • A view into a vacant entry in a HashTable. +It is part of the Entry enum.

Enums§

  • A view into a single entry in a table, which may either be vacant or occupied.
\ No newline at end of file diff --git a/hashbrown/hash_table/sidebar-items.js b/hashbrown/hash_table/sidebar-items.js new file mode 100644 index 000000000..9723a3803 --- /dev/null +++ b/hashbrown/hash_table/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {"enum":["Entry"],"struct":["AbsentEntry","Drain","ExtractIf","HashTable","IntoIter","Iter","IterMut","OccupiedEntry","VacantEntry"]}; \ No newline at end of file diff --git a/hashbrown/hash_table/struct.AbsentEntry.html b/hashbrown/hash_table/struct.AbsentEntry.html new file mode 100644 index 000000000..6bf417ece --- /dev/null +++ b/hashbrown/hash_table/struct.AbsentEntry.html @@ -0,0 +1,48 @@ +AbsentEntry in hashbrown::hash_table - Rust

Struct hashbrown::hash_table::AbsentEntry

source ·
pub struct AbsentEntry<'a, T, A = Global>
where + A: Allocator,
{ /* private fields */ }
Expand description

Type representing the absence of an entry, as returned by HashTable::find_entry.

+

This type only exists due to limitations in Rust’s NLL borrow checker. In +the future, find_entry will return an Option<OccupiedEntry> and this +type will be removed.

+

§Examples

+
use ahash::AHasher;
+use hashbrown::hash_table::{AbsentEntry, Entry, HashTable};
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<&str> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+let entry_v: AbsentEntry<_, _> = table.find_entry(hasher(&"a"), |&x| x == "a").unwrap_err();
+entry_v
+    .into_table()
+    .insert_unique(hasher(&"a"), "a", hasher);
+assert!(table.find(hasher(&"a"), |&x| x == "a").is_some() && table.len() == 1);
+
+// Nonexistent key (insert)
+match table.entry(hasher(&"b"), |&x| x == "b", hasher) {
+    Entry::Vacant(view) => {
+        view.insert("b");
+    }
+    Entry::Occupied(_) => unreachable!(),
+}
+assert!(table.find(hasher(&"b"), |&x| x == "b").is_some() && table.len() == 2);
+

Implementations§

source§

impl<'a, T, A> AbsentEntry<'a, T, A>
where + A: Allocator,

source

pub fn into_table(self) -> &'a mut HashTable<T, A>

Converts the AbsentEntry into a mutable reference to the underlying +table.

+

Trait Implementations§

source§

impl<T: Debug, A: Allocator> Debug for AbsentEntry<'_, T, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, T, A> Freeze for AbsentEntry<'a, T, A>

§

impl<'a, T, A> RefUnwindSafe for AbsentEntry<'a, T, A>
where + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<'a, T, A> Send for AbsentEntry<'a, T, A>
where + T: Send, + A: Send,

§

impl<'a, T, A> Sync for AbsentEntry<'a, T, A>
where + T: Sync, + A: Sync,

§

impl<'a, T, A> Unpin for AbsentEntry<'a, T, A>

§

impl<'a, T, A = Global> !UnwindSafe for AbsentEntry<'a, T, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/struct.Drain.html b/hashbrown/hash_table/struct.Drain.html new file mode 100644 index 000000000..2c4f73080 --- /dev/null +++ b/hashbrown/hash_table/struct.Drain.html @@ -0,0 +1,200 @@ +Drain in hashbrown::hash_table - Rust

Struct hashbrown::hash_table::Drain

source ·
pub struct Drain<'a, T, A: Allocator = Global> { /* private fields */ }
Expand description

A draining iterator over the items of a HashTable.

+

This struct is created by the drain method on HashTable. +See its documentation for more.

+

Trait Implementations§

source§

impl<T: Debug, A: Allocator> Debug for Drain<'_, T, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<T, A: Allocator> Iterator for Drain<'_, T, A>

§

type Item = T

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<T>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.0.0 · source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<T, A: Allocator> FusedIterator for Drain<'_, T, A>

Auto Trait Implementations§

§

impl<'a, T, A> Freeze for Drain<'a, T, A>

§

impl<'a, T, A> RefUnwindSafe for Drain<'a, T, A>
where + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<'a, T, A> Send for Drain<'a, T, A>
where + T: Send, + A: Send,

§

impl<'a, T, A> Sync for Drain<'a, T, A>
where + T: Sync, + A: Sync,

§

impl<'a, T, A> Unpin for Drain<'a, T, A>

§

impl<'a, T, A> UnwindSafe for Drain<'a, T, A>
where + A: RefUnwindSafe, + T: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/struct.ExtractIf.html b/hashbrown/hash_table/struct.ExtractIf.html new file mode 100644 index 000000000..bd0a93e7c --- /dev/null +++ b/hashbrown/hash_table/struct.ExtractIf.html @@ -0,0 +1,206 @@ +ExtractIf in hashbrown::hash_table - Rust

Struct hashbrown::hash_table::ExtractIf

source ·
pub struct ExtractIf<'a, T, F, A: Allocator = Global>
where + F: FnMut(&mut T) -> bool,
{ /* private fields */ }
Expand description

A draining iterator over entries of a HashTable which don’t satisfy the predicate f.

+

This struct is created by HashTable::extract_if. See its +documentation for more.

+

Trait Implementations§

source§

impl<T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A>
where + F: FnMut(&mut T) -> bool,

§

type Item = T

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<Self::Item>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.0.0 · source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<T, F, A: Allocator> FusedIterator for ExtractIf<'_, T, F, A>
where + F: FnMut(&mut T) -> bool,

Auto Trait Implementations§

§

impl<'a, T, F, A> Freeze for ExtractIf<'a, T, F, A>
where + F: Freeze,

§

impl<'a, T, F, A> RefUnwindSafe for ExtractIf<'a, T, F, A>
where + F: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<'a, T, F, A> Send for ExtractIf<'a, T, F, A>
where + F: Send, + T: Send, + A: Send,

§

impl<'a, T, F, A> Sync for ExtractIf<'a, T, F, A>
where + F: Sync, + T: Sync, + A: Sync,

§

impl<'a, T, F, A> Unpin for ExtractIf<'a, T, F, A>
where + F: Unpin,

§

impl<'a, T, F, A = Global> !UnwindSafe for ExtractIf<'a, T, F, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/struct.HashTable.html b/hashbrown/hash_table/struct.HashTable.html new file mode 100644 index 000000000..207c21e85 --- /dev/null +++ b/hashbrown/hash_table/struct.HashTable.html @@ -0,0 +1,587 @@ +HashTable in hashbrown::hash_table - Rust

Struct hashbrown::hash_table::HashTable

source ·
pub struct HashTable<T, A = Global>
where + A: Allocator,
{ /* private fields */ }
Expand description

Low-level hash table with explicit hashing.

+

The primary use case for this type over HashMap or HashSet is to +support types that do not implement the Hash and Eq traits, but +instead require additional data not contained in the key itself to compute a +hash and compare two elements for equality.

+

Examples of when this can be useful include:

+
    +
  • An IndexMap implementation where indices into a Vec are stored as +elements in a HashTable<usize>. Hashing and comparing the elements +requires indexing the associated Vec to get the actual value referred to +by the index.
  • +
  • Avoiding re-computing a hash when it is already known.
  • +
  • Mutating the key of an element in a way that doesn’t affect its hash.
  • +
+

To achieve this, HashTable methods that search for an element in the table +require a hash value and equality function to be explicitly passed in as +arguments. The method will then iterate over the elements with the given +hash and call the equality function on each of them, until a match is found.

+

In most cases, a HashTable will not be exposed directly in an API. It will +instead be wrapped in a helper type which handles the work of calculating +hash values and comparing elements.

+

Due to its low-level nature, this type provides fewer guarantees than +HashMap and HashSet. Specifically, the API allows you to shoot +yourself in the foot by having multiple elements with identical keys in the +table. The table itself will still function correctly and lookups will +arbitrarily return one of the matching elements. However you should avoid +doing this because it changes the runtime of hash table operations from +O(1) to O(k) where k is the number of duplicate entries.

+

Implementations§

source§

impl<T> HashTable<T, Global>

source

pub const fn new() -> Self

Creates an empty HashTable.

+

The hash table is initially created with a capacity of 0, so it will not allocate until it +is first inserted into.

+
§Examples
+
use hashbrown::HashTable;
+let mut table: HashTable<&str> = HashTable::new();
+assert_eq!(table.len(), 0);
+assert_eq!(table.capacity(), 0);
+
source

pub fn with_capacity(capacity: usize) -> Self

Creates an empty HashTable with the specified capacity.

+

The hash table will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash table will not allocate.

+
§Examples
+
use hashbrown::HashTable;
+let mut table: HashTable<&str> = HashTable::with_capacity(10);
+assert_eq!(table.len(), 0);
+assert!(table.capacity() >= 10);
+
source§

impl<T, A> HashTable<T, A>
where + A: Allocator,

source

pub const fn new_in(alloc: A) -> Self

Creates an empty HashTable using the given allocator.

+

The hash table is initially created with a capacity of 0, so it will not allocate until it +is first inserted into.

+
§Examples
+
use ahash::AHasher;
+use bumpalo::Bump;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let bump = Bump::new();
+let mut table = HashTable::new_in(&bump);
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+// The created HashTable holds none elements
+assert_eq!(table.len(), 0);
+
+// The created HashTable also doesn't allocate memory
+assert_eq!(table.capacity(), 0);
+
+// Now we insert element inside created HashTable
+table.insert_unique(hasher(&"One"), "One", hasher);
+// We can see that the HashTable holds 1 element
+assert_eq!(table.len(), 1);
+// And it also allocates some capacity
+assert!(table.capacity() > 1);
+
source

pub fn with_capacity_in(capacity: usize, alloc: A) -> Self

Creates an empty HashTable with the specified capacity using the given allocator.

+

The hash table will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash table will not allocate.

+
§Examples
+
use ahash::AHasher;
+use bumpalo::Bump;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let bump = Bump::new();
+let mut table = HashTable::with_capacity_in(5, &bump);
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+// The created HashTable holds none elements
+assert_eq!(table.len(), 0);
+// But it can hold at least 5 elements without reallocating
+let empty_map_capacity = table.capacity();
+assert!(empty_map_capacity >= 5);
+
+// Now we insert some 5 elements inside created HashTable
+table.insert_unique(hasher(&"One"), "One", hasher);
+table.insert_unique(hasher(&"Two"), "Two", hasher);
+table.insert_unique(hasher(&"Three"), "Three", hasher);
+table.insert_unique(hasher(&"Four"), "Four", hasher);
+table.insert_unique(hasher(&"Five"), "Five", hasher);
+
+// We can see that the HashTable holds 5 elements
+assert_eq!(table.len(), 5);
+// But its capacity isn't changed
+assert_eq!(table.capacity(), empty_map_capacity)
+
source

pub fn allocator(&self) -> &A

Returns a reference to the underlying allocator.

+
source

pub fn find(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T>

Returns a reference to an entry in the table with the given hash and +which satisfies the equality function passed.

+

This method will call eq for all entries with the given hash, but may +also call it for entries with a different hash. eq should only return +true for the desired entry, at which point the search is stopped.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), 1, hasher);
+table.insert_unique(hasher(&2), 2, hasher);
+table.insert_unique(hasher(&3), 3, hasher);
+assert_eq!(table.find(hasher(&2), |&val| val == 2), Some(&2));
+assert_eq!(table.find(hasher(&4), |&val| val == 4), None);
+
source

pub fn find_mut( + &mut self, + hash: u64, + eq: impl FnMut(&T) -> bool +) -> Option<&mut T>

Returns a mutable reference to an entry in the table with the given hash +and which satisfies the equality function passed.

+

This method will call eq for all entries with the given hash, but may +also call it for entries with a different hash. eq should only return +true for the desired entry, at which point the search is stopped.

+

When mutating an entry, you should ensure that it still retains the same +hash value as when it was inserted, otherwise lookups of that entry may +fail to find it.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), (1, "a"), |val| hasher(&val.0));
+if let Some(val) = table.find_mut(hasher(&1), |val| val.0 == 1) {
+    val.1 = "b";
+}
+assert_eq!(table.find(hasher(&1), |val| val.0 == 1), Some(&(1, "b")));
+assert_eq!(table.find(hasher(&2), |val| val.0 == 2), None);
+
source

pub fn find_entry( + &mut self, + hash: u64, + eq: impl FnMut(&T) -> bool +) -> Result<OccupiedEntry<'_, T, A>, AbsentEntry<'_, T, A>>

Returns an OccupiedEntry for an entry in the table with the given hash +and which satisfies the equality function passed.

+

This can be used to remove the entry from the table. Call +HashTable::entry instead if you wish to insert an entry if the +lookup fails.

+

This method will call eq for all entries with the given hash, but may +also call it for entries with a different hash. eq should only return +true for the desired entry, at which point the search is stopped.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), (1, "a"), |val| hasher(&val.0));
+if let Ok(entry) = table.find_entry(hasher(&1), |val| val.0 == 1) {
+    entry.remove();
+}
+assert_eq!(table.find(hasher(&1), |val| val.0 == 1), None);
+
source

pub fn entry( + &mut self, + hash: u64, + eq: impl FnMut(&T) -> bool, + hasher: impl Fn(&T) -> u64 +) -> Entry<'_, T, A>

Returns an Entry for an entry in the table with the given hash +and which satisfies the equality function passed.

+

This can be used to remove the entry from the table, or insert a new +entry with the given hash if one doesn’t already exist.

+

This method will call eq for all entries with the given hash, but may +also call it for entries with a different hash. eq should only return +true for the desired entry, at which point the search is stopped.

+

This method may grow the table in preparation for an insertion. Call +HashTable::find_entry if this is undesirable.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), (1, "a"), |val| hasher(&val.0));
+if let Entry::Occupied(entry) = table.entry(hasher(&1), |val| val.0 == 1, |val| hasher(&val.0))
+{
+    entry.remove();
+}
+if let Entry::Vacant(entry) = table.entry(hasher(&2), |val| val.0 == 2, |val| hasher(&val.0)) {
+    entry.insert((2, "b"));
+}
+assert_eq!(table.find(hasher(&1), |val| val.0 == 1), None);
+assert_eq!(table.find(hasher(&2), |val| val.0 == 2), Some(&(2, "b")));
+
source

pub fn insert_unique( + &mut self, + hash: u64, + value: T, + hasher: impl Fn(&T) -> u64 +) -> OccupiedEntry<'_, T, A>

Inserts an element into the HashTable with the given hash value, but +without checking whether an equivalent element already exists within the +table.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut v = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+v.insert_unique(hasher(&1), 1, hasher);
+
source

pub fn clear(&mut self)

Clears the table, removing all values.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut v = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+v.insert_unique(hasher(&1), 1, hasher);
+v.clear();
+assert!(v.is_empty());
+
source

pub fn shrink_to_fit(&mut self, hasher: impl Fn(&T) -> u64)

Shrinks the capacity of the table as much as possible. It will drop +down as much as possible while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::with_capacity(100);
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), 1, hasher);
+table.insert_unique(hasher(&2), 2, hasher);
+assert!(table.capacity() >= 100);
+table.shrink_to_fit(hasher);
+assert!(table.capacity() >= 2);
+
source

pub fn shrink_to(&mut self, min_capacity: usize, hasher: impl Fn(&T) -> u64)

Shrinks the capacity of the table with a lower limit. It will drop +down no lower than the supplied limit while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+

Panics if the current capacity is smaller than the supplied +minimum capacity.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::with_capacity(100);
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), 1, hasher);
+table.insert_unique(hasher(&2), 2, hasher);
+assert!(table.capacity() >= 100);
+table.shrink_to(10, hasher);
+assert!(table.capacity() >= 10);
+table.shrink_to(0, hasher);
+assert!(table.capacity() >= 2);
+
source

pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64)

Reserves capacity for at least additional more elements to be inserted +in the HashTable. The collection may reserve more space to avoid +frequent reallocations.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Panics
+

Panics if the new capacity exceeds isize::MAX bytes and abort the program +in case of allocation error. Use try_reserve instead +if you want to handle memory allocation failure.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<i32> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.reserve(10, hasher);
+assert!(table.capacity() >= 10);
+
source

pub fn try_reserve( + &mut self, + additional: usize, + hasher: impl Fn(&T) -> u64 +) -> Result<(), TryReserveError>

Tries to reserve capacity for at least additional more elements to be inserted +in the given HashTable. The collection may reserve more space to avoid +frequent reallocations.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Errors
+

If the capacity overflows, or the allocator reports a failure, then an error +is returned.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<i32> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table
+    .try_reserve(10, hasher)
+    .expect("why is the test harness OOMing on 10 bytes?");
+
source

pub fn capacity(&self) -> usize

Returns the number of elements the table can hold without reallocating.

+
§Examples
+
use hashbrown::HashTable;
+let table: HashTable<i32> = HashTable::with_capacity(100);
+assert!(table.capacity() >= 100);
+
source

pub fn len(&self) -> usize

Returns the number of elements in the table.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+let mut v = HashTable::new();
+assert_eq!(v.len(), 0);
+v.insert_unique(hasher(&1), 1, hasher);
+assert_eq!(v.len(), 1);
+
source

pub fn is_empty(&self) -> bool

Returns true if the set contains no elements.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+let mut v = HashTable::new();
+assert!(v.is_empty());
+v.insert_unique(hasher(&1), 1, hasher);
+assert!(!v.is_empty());
+
source

pub fn iter(&self) -> Iter<'_, T>

An iterator visiting all elements in arbitrary order. +The iterator element type is &'a T.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&"a"), "b", hasher);
+table.insert_unique(hasher(&"b"), "b", hasher);
+
+// Will print in an arbitrary order.
+for x in table.iter() {
+    println!("{}", x);
+}
+
source

pub fn iter_mut(&mut self) -> IterMut<'_, T>

An iterator visiting all elements in arbitrary order, +with mutable references to the elements. +The iterator element type is &'a mut T.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), 1, hasher);
+table.insert_unique(hasher(&2), 2, hasher);
+table.insert_unique(hasher(&3), 3, hasher);
+
+// Update all values
+for val in table.iter_mut() {
+    *val *= 2;
+}
+
+assert_eq!(table.len(), 3);
+let mut vec: Vec<i32> = Vec::new();
+
+for val in &table {
+    println!("val: {}", val);
+    vec.push(*val);
+}
+
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [2, 4, 6]);
+
+assert_eq!(table.len(), 3);
+
source

pub fn retain(&mut self, f: impl FnMut(&mut T) -> bool)

Retains only the elements specified by the predicate.

+

In other words, remove all elements e such that f(&e) returns false.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for x in 1..=6 {
+    table.insert_unique(hasher(&x), x, hasher);
+}
+table.retain(|&mut x| x % 2 == 0);
+assert_eq!(table.len(), 3);
+
source

pub fn drain(&mut self) -> Drain<'_, T, A>

Clears the set, returning all elements in an iterator.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for x in 1..=3 {
+    table.insert_unique(hasher(&x), x, hasher);
+}
+assert!(!table.is_empty());
+
+// print 1, 2, 3 in an arbitrary order
+for i in table.drain() {
+    println!("{}", i);
+}
+
+assert!(table.is_empty());
+
source

pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, T, F, A>
where + F: FnMut(&mut T) -> bool,

Drains elements which are true under the given predicate, +and returns an iterator over the removed items.

+

In other words, move all elements e such that f(&e) returns true out +into another iterator.

+

If the returned ExtractIf is not exhausted, e.g. because it is dropped without iterating +or the iteration short-circuits, then the remaining elements will be retained. +Use retain() with a negated predicate if you do not need the returned iterator.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for x in 0..8 {
+    table.insert_unique(hasher(&x), x, hasher);
+}
+let drained: Vec<i32> = table.extract_if(|&mut v| v % 2 == 0).collect();
+
+let mut evens = drained.into_iter().collect::<Vec<_>>();
+let mut odds = table.into_iter().collect::<Vec<_>>();
+evens.sort();
+odds.sort();
+
+assert_eq!(evens, vec![0, 2, 4, 6]);
+assert_eq!(odds, vec![1, 3, 5, 7]);
+
source

pub fn get_many_mut<const N: usize>( + &mut self, + hashes: [u64; N], + eq: impl FnMut(usize, &T) -> bool +) -> Option<[&mut T; N]>

Attempts to get mutable references to N values in the map at once.

+

The eq argument should be a closure such that eq(i, k) returns true if k is equal to +the ith key to be looked up.

+

Returns an array of length N with the results of each query. For soundness, at most one +mutable reference will be returned to any value. None will be returned if any of the +keys are duplicates or missing.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut libraries: HashTable<(&str, u32)> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for (k, v) in [
+    ("Bodleian Library", 1602),
+    ("Athenæum", 1807),
+    ("Herzogin-Anna-Amalia-Bibliothek", 1691),
+    ("Library of Congress", 1800),
+] {
+    libraries.insert_unique(hasher(&k), (k, v), |(k, _)| hasher(&k));
+}
+
+let keys = ["Athenæum", "Library of Congress"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(
+    got,
+    Some([&mut ("Athenæum", 1807), &mut ("Library of Congress", 1800),]),
+);
+
+// Missing keys result in None
+let keys = ["Athenæum", "New York Public Library"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(got, None);
+
+// Duplicate keys result in None
+let keys = ["Athenæum", "Athenæum"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(got, None);
+
source

pub unsafe fn get_many_unchecked_mut<const N: usize>( + &mut self, + hashes: [u64; N], + eq: impl FnMut(usize, &T) -> bool +) -> Option<[&mut T; N]>

Attempts to get mutable references to N values in the map at once, without validating that +the values are unique.

+

The eq argument should be a closure such that eq(i, k) returns true if k is equal to +the ith key to be looked up.

+

Returns an array of length N with the results of each query. None will be returned if +any of the keys are missing.

+

For a safe alternative see get_many_mut.

+
§Safety
+

Calling this method with overlapping keys is undefined behavior even if the resulting +references are not used.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut libraries: HashTable<(&str, u32)> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for (k, v) in [
+    ("Bodleian Library", 1602),
+    ("Athenæum", 1807),
+    ("Herzogin-Anna-Amalia-Bibliothek", 1691),
+    ("Library of Congress", 1800),
+] {
+    libraries.insert_unique(hasher(&k), (k, v), |(k, _)| hasher(&k));
+}
+
+let keys = ["Athenæum", "Library of Congress"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(
+    got,
+    Some([&mut ("Athenæum", 1807), &mut ("Library of Congress", 1800),]),
+);
+
+// Missing keys result in None
+let keys = ["Athenæum", "New York Public Library"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(got, None);
+
+// Duplicate keys result in None
+let keys = ["Athenæum", "Athenæum"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(got, None);
+

Trait Implementations§

source§

impl<T, A> Clone for HashTable<T, A>
where + T: Clone, + A: Allocator + Clone,

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T, A> Debug for HashTable<T, A>
where + T: Debug, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T, A> Default for HashTable<T, A>
where + A: Allocator + Default,

source§

fn default() -> Self

Returns the “default value” for a type. Read more
source§

impl<'a, T, A> IntoIterator for &'a HashTable<T, A>
where + A: Allocator,

§

type Item = &'a T

The type of the elements being iterated over.
§

type IntoIter = Iter<'a, T>

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> Iter<'a, T>

Creates an iterator from a value. Read more
source§

impl<'a, T, A> IntoIterator for &'a mut HashTable<T, A>
where + A: Allocator,

§

type Item = &'a mut T

The type of the elements being iterated over.
§

type IntoIter = IterMut<'a, T>

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> IterMut<'a, T>

Creates an iterator from a value. Read more
source§

impl<T, A> IntoIterator for HashTable<T, A>
where + A: Allocator,

§

type Item = T

The type of the elements being iterated over.
§

type IntoIter = IntoIter<T, A>

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> IntoIter<T, A>

Creates an iterator from a value. Read more

Auto Trait Implementations§

§

impl<T, A> Freeze for HashTable<T, A>
where + A: Freeze,

§

impl<T, A> RefUnwindSafe for HashTable<T, A>
where + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<T, A> Send for HashTable<T, A>
where + T: Send, + A: Send,

§

impl<T, A> Sync for HashTable<T, A>
where + T: Sync, + A: Sync,

§

impl<T, A> Unpin for HashTable<T, A>
where + A: Unpin, + T: Unpin,

§

impl<T, A> UnwindSafe for HashTable<T, A>
where + A: UnwindSafe, + T: UnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/struct.IntoIter.html b/hashbrown/hash_table/struct.IntoIter.html new file mode 100644 index 000000000..70350edbc --- /dev/null +++ b/hashbrown/hash_table/struct.IntoIter.html @@ -0,0 +1,209 @@ +IntoIter in hashbrown::hash_table - Rust

Struct hashbrown::hash_table::IntoIter

source ·
pub struct IntoIter<T, A = Global>
where + A: Allocator,
{ /* private fields */ }
Expand description

An owning iterator over the entries of a HashTable in arbitrary order. +The iterator element type is T.

+

This struct is created by the into_iter method on HashTable +(provided by the IntoIterator trait). See its documentation for more. +The table cannot be used after calling that method.

+

Trait Implementations§

source§

impl<T, A> ExactSizeIterator for IntoIter<T, A>
where + A: Allocator,

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<T, A> Iterator for IntoIter<T, A>
where + A: Allocator,

§

type Item = T

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<Self::Item>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<T, A> FusedIterator for IntoIter<T, A>
where + A: Allocator,

Auto Trait Implementations§

§

impl<T, A> Freeze for IntoIter<T, A>
where + A: Freeze,

§

impl<T, A> RefUnwindSafe for IntoIter<T, A>
where + T: RefUnwindSafe, + A: RefUnwindSafe,

§

impl<T, A> Send for IntoIter<T, A>
where + T: Send, + A: Send,

§

impl<T, A> Sync for IntoIter<T, A>
where + T: Sync, + A: Sync,

§

impl<T, A> Unpin for IntoIter<T, A>
where + T: Unpin, + A: Unpin,

§

impl<T, A> UnwindSafe for IntoIter<T, A>
where + T: UnwindSafe + RefUnwindSafe, + A: UnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/struct.Iter.html b/hashbrown/hash_table/struct.Iter.html new file mode 100644 index 000000000..8e2b0b6e4 --- /dev/null +++ b/hashbrown/hash_table/struct.Iter.html @@ -0,0 +1,197 @@ +Iter in hashbrown::hash_table - Rust

Struct hashbrown::hash_table::Iter

source ·
pub struct Iter<'a, T> { /* private fields */ }
Expand description

An iterator over the entries of a HashTable in arbitrary order. +The iterator element type is &'a T.

+

This struct is created by the iter method on HashTable. See its +documentation for more.

+

Trait Implementations§

source§

impl<T> ExactSizeIterator for Iter<'_, T>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<'a, T> Iterator for Iter<'a, T>

§

type Item = &'a T

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<Self::Item>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<T> FusedIterator for Iter<'_, T>

Auto Trait Implementations§

§

impl<'a, T> Freeze for Iter<'a, T>

§

impl<'a, T> RefUnwindSafe for Iter<'a, T>
where + T: RefUnwindSafe,

§

impl<'a, T> Send for Iter<'a, T>
where + T: Sync,

§

impl<'a, T> Sync for Iter<'a, T>
where + T: Sync,

§

impl<'a, T> Unpin for Iter<'a, T>

§

impl<'a, T> UnwindSafe for Iter<'a, T>
where + T: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/struct.IterMut.html b/hashbrown/hash_table/struct.IterMut.html new file mode 100644 index 000000000..808ae38ef --- /dev/null +++ b/hashbrown/hash_table/struct.IterMut.html @@ -0,0 +1,196 @@ +IterMut in hashbrown::hash_table - Rust

Struct hashbrown::hash_table::IterMut

source ·
pub struct IterMut<'a, T> { /* private fields */ }
Expand description

A mutable iterator over the entries of a HashTable in arbitrary order. +The iterator element type is &'a mut T.

+

This struct is created by the iter_mut method on HashTable. See its +documentation for more.

+

Trait Implementations§

source§

impl<T> ExactSizeIterator for IterMut<'_, T>

source§

fn len(&self) -> usize

Returns the exact remaining length of the iterator. Read more
source§

fn is_empty(&self) -> bool

🔬This is a nightly-only experimental API. (exact_size_is_empty)
Returns true if the iterator is empty. Read more
source§

impl<'a, T> Iterator for IterMut<'a, T>

§

type Item = &'a mut T

The type of the elements being iterated over.
source§

fn next(&mut self) -> Option<Self::Item>

Advances the iterator and returns the next value. Read more
source§

fn size_hint(&self) -> (usize, Option<usize>)

Returns the bounds on the remaining length of the iterator. Read more
source§

fn fold<B, F>(self, init: B, f: F) -> B
where + Self: Sized, + F: FnMut(B, Self::Item) -> B,

Folds every element into an accumulator by applying an operation, +returning the final result. Read more
source§

fn next_chunk<const N: usize>( + &mut self +) -> Result<[Self::Item; N], IntoIter<Self::Item, N>>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_next_chunk)
Advances the iterator and returns an array containing the next N values. Read more
1.0.0 · source§

fn count(self) -> usize
where + Self: Sized,

Consumes the iterator, counting the number of iterations and returning it. Read more
1.0.0 · source§

fn last(self) -> Option<Self::Item>
where + Self: Sized,

Consumes the iterator, returning the last element. Read more
source§

fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>>

🔬This is a nightly-only experimental API. (iter_advance_by)
Advances the iterator by n elements. Read more
1.0.0 · source§

fn nth(&mut self, n: usize) -> Option<Self::Item>

Returns the nth element of the iterator. Read more
1.28.0 · source§

fn step_by(self, step: usize) -> StepBy<Self>
where + Self: Sized,

Creates an iterator starting at the same point, but stepping by +the given amount at each iteration. Read more
1.0.0 · source§

fn chain<U>(self, other: U) -> Chain<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator<Item = Self::Item>,

Takes two iterators and creates a new iterator over both in sequence. Read more
1.0.0 · source§

fn zip<U>(self, other: U) -> Zip<Self, <U as IntoIterator>::IntoIter>
where + Self: Sized, + U: IntoIterator,

‘Zips up’ two iterators into a single iterator of pairs. Read more
source§

fn intersperse_with<G>(self, separator: G) -> IntersperseWith<Self, G>
where + Self: Sized, + G: FnMut() -> Self::Item,

🔬This is a nightly-only experimental API. (iter_intersperse)
Creates a new iterator which places an item generated by separator +between adjacent items of the original iterator. Read more
1.0.0 · source§

fn map<B, F>(self, f: F) -> Map<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> B,

Takes a closure and creates an iterator which calls that closure on each +element. Read more
1.21.0 · source§

fn for_each<F>(self, f: F)
where + Self: Sized, + F: FnMut(Self::Item),

Calls a closure on each element of an iterator. Read more
1.0.0 · source§

fn filter<P>(self, predicate: P) -> Filter<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator which uses a closure to determine if an element +should be yielded. Read more
1.0.0 · source§

fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both filters and maps. Read more
1.0.0 · source§

fn enumerate(self) -> Enumerate<Self>
where + Self: Sized,

Creates an iterator which gives the current iteration count as well as +the next value. Read more
1.0.0 · source§

fn peekable(self) -> Peekable<Self>
where + Self: Sized,

Creates an iterator which can use the peek and peek_mut methods +to look at the next element of the iterator without consuming it. See +their documentation for more information. Read more
1.0.0 · source§

fn skip_while<P>(self, predicate: P) -> SkipWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that skips elements based on a predicate. Read more
1.0.0 · source§

fn take_while<P>(self, predicate: P) -> TakeWhile<Self, P>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Creates an iterator that yields elements based on a predicate. Read more
1.57.0 · source§

fn map_while<B, P>(self, predicate: P) -> MapWhile<Self, P>
where + Self: Sized, + P: FnMut(Self::Item) -> Option<B>,

Creates an iterator that both yields elements based on a predicate and maps. Read more
1.0.0 · source§

fn skip(self, n: usize) -> Skip<Self>
where + Self: Sized,

Creates an iterator that skips the first n elements. Read more
1.0.0 · source§

fn take(self, n: usize) -> Take<Self>
where + Self: Sized,

Creates an iterator that yields the first n elements, or fewer +if the underlying iterator ends sooner. Read more
1.0.0 · source§

fn scan<St, B, F>(self, initial_state: St, f: F) -> Scan<Self, St, F>
where + Self: Sized, + F: FnMut(&mut St, Self::Item) -> Option<B>,

An iterator adapter which, like fold, holds internal state, but +unlike fold, produces a new iterator. Read more
1.0.0 · source§

fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
where + Self: Sized, + U: IntoIterator, + F: FnMut(Self::Item) -> U,

Creates an iterator that works like map, but flattens nested structure. Read more
source§

fn map_windows<F, R, const N: usize>(self, f: F) -> MapWindows<Self, F, N>
where + Self: Sized, + F: FnMut(&[Self::Item; N]) -> R,

🔬This is a nightly-only experimental API. (iter_map_windows)
Calls the given function f for each contiguous window of size N over +self and returns an iterator over the outputs of f. Like slice::windows(), +the windows during mapping overlap as well. Read more
1.0.0 · source§

fn fuse(self) -> Fuse<Self>
where + Self: Sized,

Creates an iterator which ends after the first None. Read more
1.0.0 · source§

fn inspect<F>(self, f: F) -> Inspect<Self, F>
where + Self: Sized, + F: FnMut(&Self::Item),

Does something with each element of an iterator, passing the value on. Read more
1.0.0 · source§

fn by_ref(&mut self) -> &mut Self
where + Self: Sized,

Borrows an iterator, rather than consuming it. Read more
1.0.0 · source§

fn collect<B>(self) -> B
where + B: FromIterator<Self::Item>, + Self: Sized,

Transforms an iterator into a collection. Read more
source§

fn collect_into<E>(self, collection: &mut E) -> &mut E
where + E: Extend<Self::Item>, + Self: Sized,

🔬This is a nightly-only experimental API. (iter_collect_into)
Collects all the items from an iterator into a collection. Read more
1.0.0 · source§

fn partition<B, F>(self, f: F) -> (B, B)
where + Self: Sized, + B: Default + Extend<Self::Item>, + F: FnMut(&Self::Item) -> bool,

Consumes an iterator, creating two collections from it. Read more
source§

fn is_partitioned<P>(self, predicate: P) -> bool
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_is_partitioned)
Checks if the elements of this iterator are partitioned according to the given predicate, +such that all those that return true precede all those that return false. Read more
1.27.0 · source§

fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: Try<Output = B>,

An iterator method that applies a function as long as it returns +successfully, producing a single, final value. Read more
1.27.0 · source§

fn try_for_each<F, R>(&mut self, f: F) -> R
where + Self: Sized, + F: FnMut(Self::Item) -> R, + R: Try<Output = ()>,

An iterator method that applies a fallible function to each item in the +iterator, stopping at the first error and returning that error. Read more
1.51.0 · source§

fn reduce<F>(self, f: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> Self::Item,

Reduces the elements to a single one, by repeatedly applying a reducing +operation. Read more
source§

fn try_reduce<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<<R as Try>::Output>>>::TryType
where + Self: Sized, + F: FnMut(Self::Item, Self::Item) -> R, + R: Try<Output = Self::Item>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (iterator_try_reduce)
Reduces the elements to a single one by repeatedly applying a reducing operation. If the +closure returns a failure, the failure is propagated back to the caller immediately. Read more
1.0.0 · source§

fn all<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if every element of the iterator matches a predicate. Read more
1.0.0 · source§

fn any<F>(&mut self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> bool,

Tests if any element of the iterator matches a predicate. Read more
1.0.0 · source§

fn find<P>(&mut self, predicate: P) -> Option<Self::Item>
where + Self: Sized, + P: FnMut(&Self::Item) -> bool,

Searches for an element of an iterator that satisfies a predicate. Read more
1.30.0 · source§

fn find_map<B, F>(&mut self, f: F) -> Option<B>
where + Self: Sized, + F: FnMut(Self::Item) -> Option<B>,

Applies function to the elements of iterator and returns +the first non-none result. Read more
source§

fn try_find<F, R>( + &mut self, + f: F +) -> <<R as Try>::Residual as Residual<Option<Self::Item>>>::TryType
where + Self: Sized, + F: FnMut(&Self::Item) -> R, + R: Try<Output = bool>, + <R as Try>::Residual: Residual<Option<Self::Item>>,

🔬This is a nightly-only experimental API. (try_find)
Applies function to the elements of iterator and returns +the first true result or the first error. Read more
1.0.0 · source§

fn position<P>(&mut self, predicate: P) -> Option<usize>
where + Self: Sized, + P: FnMut(Self::Item) -> bool,

Searches for an element in an iterator, returning its index. Read more
1.6.0 · source§

fn max_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the maximum value from the +specified function. Read more
1.15.0 · source§

fn max_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the maximum value with respect to the +specified comparison function. Read more
1.6.0 · source§

fn min_by_key<B, F>(self, f: F) -> Option<Self::Item>
where + B: Ord, + Self: Sized, + F: FnMut(&Self::Item) -> B,

Returns the element that gives the minimum value from the +specified function. Read more
1.15.0 · source§

fn min_by<F>(self, compare: F) -> Option<Self::Item>
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> Ordering,

Returns the element that gives the minimum value with respect to the +specified comparison function. Read more
1.0.0 · source§

fn unzip<A, B, FromA, FromB>(self) -> (FromA, FromB)
where + FromA: Default + Extend<A>, + FromB: Default + Extend<B>, + Self: Sized + Iterator<Item = (A, B)>,

Converts an iterator of pairs into a pair of containers. Read more
1.36.0 · source§

fn copied<'a, T>(self) -> Copied<Self>
where + T: 'a + Copy, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which copies all of its elements. Read more
1.0.0 · source§

fn cloned<'a, T>(self) -> Cloned<Self>
where + T: 'a + Clone, + Self: Sized + Iterator<Item = &'a T>,

Creates an iterator which clones all of its elements. Read more
source§

fn array_chunks<const N: usize>(self) -> ArrayChunks<Self, N>
where + Self: Sized,

🔬This is a nightly-only experimental API. (iter_array_chunks)
Returns an iterator over N elements of the iterator at a time. Read more
1.11.0 · source§

fn sum<S>(self) -> S
where + Self: Sized, + S: Sum<Self::Item>,

Sums the elements of an iterator. Read more
1.11.0 · source§

fn product<P>(self) -> P
where + Self: Sized, + P: Product<Self::Item>,

Iterates over the entire iterator, multiplying all the elements Read more
source§

fn cmp_by<I, F>(self, other: I, cmp: F) -> Ordering
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Ordering,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn partial_cmp<I>(self, other: I) -> Option<Ordering>
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Lexicographically compares the PartialOrd elements of +this Iterator with those of another. The comparison works like short-circuit +evaluation, returning a result without comparing the remaining elements. +As soon as an order can be determined, the evaluation stops and a result is returned. Read more
source§

fn partial_cmp_by<I, F>(self, other: I, partial_cmp: F) -> Option<Ordering>
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> Option<Ordering>,

🔬This is a nightly-only experimental API. (iter_order_by)
Lexicographically compares the elements of this Iterator with those +of another with respect to the specified comparison function. Read more
1.5.0 · source§

fn eq<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are equal to those of +another. Read more
source§

fn eq_by<I, F>(self, other: I, eq: F) -> bool
where + Self: Sized, + I: IntoIterator, + F: FnMut(Self::Item, <I as IntoIterator>::Item) -> bool,

🔬This is a nightly-only experimental API. (iter_order_by)
Determines if the elements of this Iterator are equal to those of +another with respect to the specified equality function. Read more
1.5.0 · source§

fn ne<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialEq<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are not equal to those of +another. Read more
1.5.0 · source§

fn lt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less than those of another. Read more
1.5.0 · source§

fn le<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +less or equal to those of another. Read more
1.5.0 · source§

fn gt<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than those of another. Read more
1.5.0 · source§

fn ge<I>(self, other: I) -> bool
where + I: IntoIterator, + Self::Item: PartialOrd<<I as IntoIterator>::Item>, + Self: Sized,

Determines if the elements of this Iterator are lexicographically +greater than or equal to those of another. Read more
source§

fn is_sorted_by<F>(self, compare: F) -> bool
where + Self: Sized, + F: FnMut(&Self::Item, &Self::Item) -> bool,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given comparator function. Read more
source§

fn is_sorted_by_key<F, K>(self, f: F) -> bool
where + Self: Sized, + F: FnMut(Self::Item) -> K, + K: PartialOrd,

🔬This is a nightly-only experimental API. (is_sorted)
Checks if the elements of this iterator are sorted using the given key extraction +function. Read more
source§

impl<T> FusedIterator for IterMut<'_, T>

Auto Trait Implementations§

§

impl<'a, T> Freeze for IterMut<'a, T>

§

impl<'a, T> RefUnwindSafe for IterMut<'a, T>
where + T: RefUnwindSafe,

§

impl<'a, T> Send for IterMut<'a, T>
where + T: Send,

§

impl<'a, T> Sync for IterMut<'a, T>
where + T: Sync,

§

impl<'a, T> Unpin for IterMut<'a, T>

§

impl<'a, T> !UnwindSafe for IterMut<'a, T>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<I> IntoIterator for I
where + I: Iterator,

§

type Item = <I as Iterator>::Item

The type of the elements being iterated over.
§

type IntoIter = I

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> I

Creates an iterator from a value. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/struct.OccupiedEntry.html b/hashbrown/hash_table/struct.OccupiedEntry.html new file mode 100644 index 000000000..aaea0bd29 --- /dev/null +++ b/hashbrown/hash_table/struct.OccupiedEntry.html @@ -0,0 +1,170 @@ +OccupiedEntry in hashbrown::hash_table - Rust

Struct hashbrown::hash_table::OccupiedEntry

source ·
pub struct OccupiedEntry<'a, T, A = Global>
where + A: Allocator,
{ /* private fields */ }
Expand description

A view into an occupied entry in a HashTable. +It is part of the Entry enum.

+

§Examples

+
use ahash::AHasher;
+use hashbrown::hash_table::{Entry, HashTable, OccupiedEntry};
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for x in ["a", "b", "c"] {
+    table.insert_unique(hasher(&x), x, hasher);
+}
+assert_eq!(table.len(), 3);
+
+let _entry_o: OccupiedEntry<_, _> = table.find_entry(hasher(&"a"), |&x| x == "a").unwrap();
+assert_eq!(table.len(), 3);
+
+// Existing key
+match table.entry(hasher(&"a"), |&x| x == "a", hasher) {
+    Entry::Vacant(_) => unreachable!(),
+    Entry::Occupied(view) => {
+        assert_eq!(view.get(), &"a");
+    }
+}
+
+assert_eq!(table.len(), 3);
+
+// Existing key (take)
+match table.entry(hasher(&"c"), |&x| x == "c", hasher) {
+    Entry::Vacant(_) => unreachable!(),
+    Entry::Occupied(view) => {
+        assert_eq!(view.remove().0, "c");
+    }
+}
+assert_eq!(table.find(hasher(&"c"), |&x| x == "c"), None);
+assert_eq!(table.len(), 2);
+

Implementations§

source§

impl<'a, T, A> OccupiedEntry<'a, T, A>
where + A: Allocator,

source

pub fn remove(self) -> (T, VacantEntry<'a, T, A>)

Takes the value out of the entry, and returns it along with a +VacantEntry that can be used to insert another value with the same +hash as the one that was just removed.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<&str> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+// The table is empty
+assert!(table.is_empty() && table.capacity() == 0);
+
+table.insert_unique(hasher(&"poneyland"), "poneyland", hasher);
+let capacity_before_remove = table.capacity();
+
+if let Entry::Occupied(o) = table.entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher) {
+    assert_eq!(o.remove().0, "poneyland");
+}
+
+assert!(table
+    .find(hasher(&"poneyland"), |&x| x == "poneyland")
+    .is_none());
+// Now table hold none elements but capacity is equal to the old one
+assert!(table.len() == 0 && table.capacity() == capacity_before_remove);
+
source

pub fn get(&self) -> &T

Gets a reference to the value in the entry.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<&str> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&"poneyland"), "poneyland", hasher);
+
+match table.entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher) {
+    Entry::Vacant(_) => panic!(),
+    Entry::Occupied(entry) => assert_eq!(entry.get(), &"poneyland"),
+}
+
source

pub fn get_mut(&mut self) -> &mut T

Gets a mutable reference to the value in the entry.

+

If you need a reference to the OccupiedEntry which may outlive the +destruction of the Entry value, see into_mut.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<(&str, u32)> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&"poneyland"), ("poneyland", 12), |(k, _)| hasher(&k));
+
+assert_eq!(
+    table.find(hasher(&"poneyland"), |&(x, _)| x == "poneyland",),
+    Some(&("poneyland", 12))
+);
+
+if let Entry::Occupied(mut o) = table.entry(
+    hasher(&"poneyland"),
+    |&(x, _)| x == "poneyland",
+    |(k, _)| hasher(&k),
+) {
+    o.get_mut().1 += 10;
+    assert_eq!(o.get().1, 22);
+
+    // We can use the same Entry multiple times.
+    o.get_mut().1 += 2;
+}
+
+assert_eq!(
+    table.find(hasher(&"poneyland"), |&(x, _)| x == "poneyland",),
+    Some(&("poneyland", 24))
+);
+
source

pub fn into_mut(self) -> &'a mut T

Converts the OccupiedEntry into a mutable reference to the value in the entry +with a lifetime bound to the table itself.

+

If you need multiple references to the OccupiedEntry, see get_mut.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<(&str, u32)> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&"poneyland"), ("poneyland", 12), |(k, _)| hasher(&k));
+
+assert_eq!(
+    table.find(hasher(&"poneyland"), |&(x, _)| x == "poneyland",),
+    Some(&("poneyland", 12))
+);
+
+let value: &mut (&str, u32);
+match table.entry(
+    hasher(&"poneyland"),
+    |&(x, _)| x == "poneyland",
+    |(k, _)| hasher(&k),
+) {
+    Entry::Occupied(entry) => value = entry.into_mut(),
+    Entry::Vacant(_) => panic!(),
+}
+value.1 += 10;
+
+assert_eq!(
+    table.find(hasher(&"poneyland"), |&(x, _)| x == "poneyland",),
+    Some(&("poneyland", 22))
+);
+
source

pub fn into_table(self) -> &'a mut HashTable<T, A>

Converts the OccupiedEntry into a mutable reference to the underlying +table.

+

Trait Implementations§

source§

impl<T: Debug, A: Allocator> Debug for OccupiedEntry<'_, T, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T, A> Send for OccupiedEntry<'_, T, A>
where + T: Send, + A: Send + Allocator,

source§

impl<T, A> Sync for OccupiedEntry<'_, T, A>
where + T: Sync, + A: Sync + Allocator,

Auto Trait Implementations§

§

impl<'a, T, A> Freeze for OccupiedEntry<'a, T, A>

§

impl<'a, T, A> RefUnwindSafe for OccupiedEntry<'a, T, A>
where + T: RefUnwindSafe, + A: RefUnwindSafe,

§

impl<'a, T, A> Unpin for OccupiedEntry<'a, T, A>

§

impl<'a, T, A = Global> !UnwindSafe for OccupiedEntry<'a, T, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/hash_table/struct.VacantEntry.html b/hashbrown/hash_table/struct.VacantEntry.html new file mode 100644 index 000000000..b0cc0987b --- /dev/null +++ b/hashbrown/hash_table/struct.VacantEntry.html @@ -0,0 +1,67 @@ +VacantEntry in hashbrown::hash_table - Rust

Struct hashbrown::hash_table::VacantEntry

source ·
pub struct VacantEntry<'a, T, A = Global>
where + A: Allocator,
{ /* private fields */ }
Expand description

A view into a vacant entry in a HashTable. +It is part of the Entry enum.

+

§Examples

+
use ahash::AHasher;
+use hashbrown::hash_table::{Entry, HashTable, VacantEntry};
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<&str> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+let entry_v: VacantEntry<_, _> = match table.entry(hasher(&"a"), |&x| x == "a", hasher) {
+    Entry::Vacant(view) => view,
+    Entry::Occupied(_) => unreachable!(),
+};
+entry_v.insert("a");
+assert!(table.find(hasher(&"a"), |&x| x == "a").is_some() && table.len() == 1);
+
+// Nonexistent key (insert)
+match table.entry(hasher(&"b"), |&x| x == "b", hasher) {
+    Entry::Vacant(view) => {
+        view.insert("b");
+    }
+    Entry::Occupied(_) => unreachable!(),
+}
+assert!(table.find(hasher(&"b"), |&x| x == "b").is_some() && table.len() == 2);
+

Implementations§

source§

impl<'a, T, A> VacantEntry<'a, T, A>
where + A: Allocator,

source

pub fn insert(self, value: T) -> OccupiedEntry<'a, T, A>

Inserts a new element into the table with the hash that was used to +obtain the VacantEntry.

+

An OccupiedEntry is returned for the newly inserted element.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<&str> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+if let Entry::Vacant(o) = table.entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher) {
+    o.insert("poneyland");
+}
+assert_eq!(
+    table.find(hasher(&"poneyland"), |&x| x == "poneyland"),
+    Some(&"poneyland")
+);
+
source

pub fn into_table(self) -> &'a mut HashTable<T, A>

Converts the VacantEntry into a mutable reference to the underlying +table.

+

Trait Implementations§

source§

impl<T: Debug, A: Allocator> Debug for VacantEntry<'_, T, A>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a, T, A> Freeze for VacantEntry<'a, T, A>

§

impl<'a, T, A> RefUnwindSafe for VacantEntry<'a, T, A>
where + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<'a, T, A> Send for VacantEntry<'a, T, A>
where + T: Send, + A: Send,

§

impl<'a, T, A> Sync for VacantEntry<'a, T, A>
where + T: Sync, + A: Sync,

§

impl<'a, T, A> Unpin for VacantEntry<'a, T, A>

§

impl<'a, T, A = Global> !UnwindSafe for VacantEntry<'a, T, A>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/index.html b/hashbrown/index.html new file mode 100644 index 000000000..9a74d2269 --- /dev/null +++ b/hashbrown/index.html @@ -0,0 +1,6 @@ +hashbrown - Rust

Crate hashbrown

source ·
Expand description

This crate is a Rust port of Google’s high-performance SwissTable hash +map, adapted to make it a drop-in replacement for Rust’s standard HashMap +and HashSet types.

+

The original C++ version of SwissTable can be found here, and this +CppCon talk gives an overview of how the algorithm works.

+

Modules§

  • A hash map implemented with quadratic probing and SIMD lookup.
  • A hash set implemented as a HashMap where the value is ().
  • A hash table implemented with quadratic probing and SIMD lookup.

Structs§

  • A hash map implemented with quadratic probing and SIMD lookup.
  • A hash set implemented as a HashMap where the value is ().
  • Low-level hash table with explicit hashing.

Enums§

Traits§

\ No newline at end of file diff --git a/hashbrown/map/enum.DefaultHashBuilder.html b/hashbrown/map/enum.DefaultHashBuilder.html new file mode 100644 index 000000000..4ea833b2b --- /dev/null +++ b/hashbrown/map/enum.DefaultHashBuilder.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/enum.DefaultHashBuilder.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/enum.Entry.html b/hashbrown/map/enum.Entry.html new file mode 100644 index 000000000..4628247b0 --- /dev/null +++ b/hashbrown/map/enum.Entry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/enum.Entry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/enum.EntryRef.html b/hashbrown/map/enum.EntryRef.html new file mode 100644 index 000000000..f5c02d90e --- /dev/null +++ b/hashbrown/map/enum.EntryRef.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/enum.EntryRef.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/enum.RawEntryMut.html b/hashbrown/map/enum.RawEntryMut.html new file mode 100644 index 000000000..e5157b5bb --- /dev/null +++ b/hashbrown/map/enum.RawEntryMut.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/enum.RawEntryMut.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.Drain.html b/hashbrown/map/struct.Drain.html new file mode 100644 index 000000000..8638f9569 --- /dev/null +++ b/hashbrown/map/struct.Drain.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.Drain.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.ExtractIf.html b/hashbrown/map/struct.ExtractIf.html new file mode 100644 index 000000000..03109298c --- /dev/null +++ b/hashbrown/map/struct.ExtractIf.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.ExtractIf.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.HashMap.html b/hashbrown/map/struct.HashMap.html new file mode 100644 index 000000000..20322e82c --- /dev/null +++ b/hashbrown/map/struct.HashMap.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.HashMap.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.IntoIter.html b/hashbrown/map/struct.IntoIter.html new file mode 100644 index 000000000..ab081586d --- /dev/null +++ b/hashbrown/map/struct.IntoIter.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.IntoIter.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.IntoKeys.html b/hashbrown/map/struct.IntoKeys.html new file mode 100644 index 000000000..3d8c19e71 --- /dev/null +++ b/hashbrown/map/struct.IntoKeys.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.IntoKeys.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.IntoValues.html b/hashbrown/map/struct.IntoValues.html new file mode 100644 index 000000000..9abfbe012 --- /dev/null +++ b/hashbrown/map/struct.IntoValues.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.IntoValues.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.Iter.html b/hashbrown/map/struct.Iter.html new file mode 100644 index 000000000..1d3bab1dd --- /dev/null +++ b/hashbrown/map/struct.Iter.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.Iter.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.IterMut.html b/hashbrown/map/struct.IterMut.html new file mode 100644 index 000000000..c35f1e765 --- /dev/null +++ b/hashbrown/map/struct.IterMut.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.IterMut.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.Keys.html b/hashbrown/map/struct.Keys.html new file mode 100644 index 000000000..c4981538d --- /dev/null +++ b/hashbrown/map/struct.Keys.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.Keys.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.OccupiedEntry.html b/hashbrown/map/struct.OccupiedEntry.html new file mode 100644 index 000000000..6df905bfa --- /dev/null +++ b/hashbrown/map/struct.OccupiedEntry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.OccupiedEntry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.OccupiedEntryRef.html b/hashbrown/map/struct.OccupiedEntryRef.html new file mode 100644 index 000000000..c8f8df660 --- /dev/null +++ b/hashbrown/map/struct.OccupiedEntryRef.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.OccupiedEntryRef.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.OccupiedError.html b/hashbrown/map/struct.OccupiedError.html new file mode 100644 index 000000000..f13f8ea92 --- /dev/null +++ b/hashbrown/map/struct.OccupiedError.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.OccupiedError.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.RawEntryBuilder.html b/hashbrown/map/struct.RawEntryBuilder.html new file mode 100644 index 000000000..3916c7ae0 --- /dev/null +++ b/hashbrown/map/struct.RawEntryBuilder.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.RawEntryBuilder.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.RawEntryBuilderMut.html b/hashbrown/map/struct.RawEntryBuilderMut.html new file mode 100644 index 000000000..e700cbcef --- /dev/null +++ b/hashbrown/map/struct.RawEntryBuilderMut.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.RawEntryBuilderMut.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.RawOccupiedEntryMut.html b/hashbrown/map/struct.RawOccupiedEntryMut.html new file mode 100644 index 000000000..4b655918a --- /dev/null +++ b/hashbrown/map/struct.RawOccupiedEntryMut.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.RawOccupiedEntryMut.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.RawVacantEntryMut.html b/hashbrown/map/struct.RawVacantEntryMut.html new file mode 100644 index 000000000..841504718 --- /dev/null +++ b/hashbrown/map/struct.RawVacantEntryMut.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.RawVacantEntryMut.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.VacantEntry.html b/hashbrown/map/struct.VacantEntry.html new file mode 100644 index 000000000..e88c09f2a --- /dev/null +++ b/hashbrown/map/struct.VacantEntry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.VacantEntry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.VacantEntryRef.html b/hashbrown/map/struct.VacantEntryRef.html new file mode 100644 index 000000000..33ef25cd2 --- /dev/null +++ b/hashbrown/map/struct.VacantEntryRef.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.VacantEntryRef.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.Values.html b/hashbrown/map/struct.Values.html new file mode 100644 index 000000000..24cd81deb --- /dev/null +++ b/hashbrown/map/struct.Values.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.Values.html...

+ + + \ No newline at end of file diff --git a/hashbrown/map/struct.ValuesMut.html b/hashbrown/map/struct.ValuesMut.html new file mode 100644 index 000000000..51309c87c --- /dev/null +++ b/hashbrown/map/struct.ValuesMut.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_map/struct.ValuesMut.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/enum.Entry.html b/hashbrown/set/enum.Entry.html new file mode 100644 index 000000000..8e3df2263 --- /dev/null +++ b/hashbrown/set/enum.Entry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/enum.Entry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.Difference.html b/hashbrown/set/struct.Difference.html new file mode 100644 index 000000000..32e41828e --- /dev/null +++ b/hashbrown/set/struct.Difference.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.Difference.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.Drain.html b/hashbrown/set/struct.Drain.html new file mode 100644 index 000000000..35a3a7ed2 --- /dev/null +++ b/hashbrown/set/struct.Drain.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.Drain.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.ExtractIf.html b/hashbrown/set/struct.ExtractIf.html new file mode 100644 index 000000000..ab0e949a2 --- /dev/null +++ b/hashbrown/set/struct.ExtractIf.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.ExtractIf.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.HashSet.html b/hashbrown/set/struct.HashSet.html new file mode 100644 index 000000000..583fb70ae --- /dev/null +++ b/hashbrown/set/struct.HashSet.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.HashSet.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.Intersection.html b/hashbrown/set/struct.Intersection.html new file mode 100644 index 000000000..2e2ac867a --- /dev/null +++ b/hashbrown/set/struct.Intersection.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.Intersection.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.IntoIter.html b/hashbrown/set/struct.IntoIter.html new file mode 100644 index 000000000..997ff48aa --- /dev/null +++ b/hashbrown/set/struct.IntoIter.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.IntoIter.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.Iter.html b/hashbrown/set/struct.Iter.html new file mode 100644 index 000000000..4af76fba1 --- /dev/null +++ b/hashbrown/set/struct.Iter.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.Iter.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.OccupiedEntry.html b/hashbrown/set/struct.OccupiedEntry.html new file mode 100644 index 000000000..e840319a9 --- /dev/null +++ b/hashbrown/set/struct.OccupiedEntry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.OccupiedEntry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.SymmetricDifference.html b/hashbrown/set/struct.SymmetricDifference.html new file mode 100644 index 000000000..76b5de124 --- /dev/null +++ b/hashbrown/set/struct.SymmetricDifference.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.SymmetricDifference.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.Union.html b/hashbrown/set/struct.Union.html new file mode 100644 index 000000000..5c4b9ac40 --- /dev/null +++ b/hashbrown/set/struct.Union.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.Union.html...

+ + + \ No newline at end of file diff --git a/hashbrown/set/struct.VacantEntry.html b/hashbrown/set/struct.VacantEntry.html new file mode 100644 index 000000000..d431a5104 --- /dev/null +++ b/hashbrown/set/struct.VacantEntry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_set/struct.VacantEntry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/sidebar-items.js b/hashbrown/sidebar-items.js new file mode 100644 index 000000000..b8695c077 --- /dev/null +++ b/hashbrown/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {"enum":["TryReserveError"],"mod":["hash_map","hash_set","hash_table"],"struct":["HashMap","HashSet","HashTable"],"trait":["Equivalent"]}; \ No newline at end of file diff --git a/hashbrown/struct.HashMap.html b/hashbrown/struct.HashMap.html new file mode 100644 index 000000000..79c23dca7 --- /dev/null +++ b/hashbrown/struct.HashMap.html @@ -0,0 +1,1340 @@ +HashMap in hashbrown - Rust

Struct hashbrown::HashMap

source ·
pub struct HashMap<K, V, S = DefaultHashBuilder, A: Allocator = Global> { /* private fields */ }
Expand description

A hash map implemented with quadratic probing and SIMD lookup.

+

The default hashing algorithm is currently AHash, though this is +subject to change at any point in the future. This hash function is very +fast for all types of keys, but this algorithm will typically not protect +against attacks such as HashDoS.

+

The hashing algorithm can be replaced on a per-HashMap basis using the +default, with_hasher, and with_capacity_and_hasher methods. Many +alternative algorithms are available on crates.io, such as the fnv crate.

+

It is required that the keys implement the Eq and Hash traits, although +this can frequently be achieved by using #[derive(PartialEq, Eq, Hash)]. +If you implement these yourself, it is important that the following +property holds:

+
k1 == k2 -> hash(k1) == hash(k2)
+
+

In other words, if two keys are equal, their hashes must be equal.

+

It is a logic error for a key to be modified in such a way that the key’s +hash, as determined by the Hash trait, or its equality, as determined by +the Eq trait, changes while it is in the map. This is normally only +possible through Cell, RefCell, global state, I/O, or unsafe code.

+

It is also a logic error for the Hash implementation of a key to panic. +This is generally only possible if the trait is implemented manually. If a +panic does occur then the contents of the HashMap may become corrupted and +some items may be dropped from the table.

+

§Examples

+
use hashbrown::HashMap;
+
+// Type inference lets us omit an explicit type signature (which
+// would be `HashMap<String, String>` in this example).
+let mut book_reviews = HashMap::new();
+
+// Review some books.
+book_reviews.insert(
+    "Adventures of Huckleberry Finn".to_string(),
+    "My favorite book.".to_string(),
+);
+book_reviews.insert(
+    "Grimms' Fairy Tales".to_string(),
+    "Masterpiece.".to_string(),
+);
+book_reviews.insert(
+    "Pride and Prejudice".to_string(),
+    "Very enjoyable.".to_string(),
+);
+book_reviews.insert(
+    "The Adventures of Sherlock Holmes".to_string(),
+    "Eye lyked it alot.".to_string(),
+);
+
+// Check for a specific one.
+// When collections store owned values (String), they can still be
+// queried using references (&str).
+if !book_reviews.contains_key("Les Misérables") {
+    println!("We've got {} reviews, but Les Misérables ain't one.",
+             book_reviews.len());
+}
+
+// oops, this review has a lot of spelling mistakes, let's delete it.
+book_reviews.remove("The Adventures of Sherlock Holmes");
+
+// Look up the values associated with some keys.
+let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
+for &book in &to_find {
+    match book_reviews.get(book) {
+        Some(review) => println!("{}: {}", book, review),
+        None => println!("{} is unreviewed.", book)
+    }
+}
+
+// Look up the value for a key (will panic if the key is not found).
+println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
+
+// Iterate over everything.
+for (book, review) in &book_reviews {
+    println!("{}: \"{}\"", book, review);
+}
+

HashMap also implements an Entry API, which allows +for more complex methods of getting, setting, updating and removing keys and +their values:

+ +
use hashbrown::HashMap;
+
+// type inference lets us omit an explicit type signature (which
+// would be `HashMap<&str, u8>` in this example).
+let mut player_stats = HashMap::new();
+
+fn random_stat_buff() -> u8 {
+    // could actually return some random value here - let's just return
+    // some fixed value for now
+    42
+}
+
+// insert a key only if it doesn't already exist
+player_stats.entry("health").or_insert(100);
+
+// insert a key using a function that provides a new value only if it
+// doesn't already exist
+player_stats.entry("defence").or_insert_with(random_stat_buff);
+
+// update a key, guarding against the key possibly not being set
+let stat = player_stats.entry("attack").or_insert(100);
+*stat += random_stat_buff();
+

The easiest way to use HashMap with a custom key type is to derive Eq and Hash. +We must also derive PartialEq.

+ +
use hashbrown::HashMap;
+
+#[derive(Hash, Eq, PartialEq, Debug)]
+struct Viking {
+    name: String,
+    country: String,
+}
+
+impl Viking {
+    /// Creates a new Viking.
+    fn new(name: &str, country: &str) -> Viking {
+        Viking { name: name.to_string(), country: country.to_string() }
+    }
+}
+
+// Use a HashMap to store the vikings' health points.
+let mut vikings = HashMap::new();
+
+vikings.insert(Viking::new("Einar", "Norway"), 25);
+vikings.insert(Viking::new("Olaf", "Denmark"), 24);
+vikings.insert(Viking::new("Harald", "Iceland"), 12);
+
+// Use derived implementation to print the status of the vikings.
+for (viking, health) in &vikings {
+    println!("{:?} has {} hp", viking, health);
+}
+

A HashMap with fixed list of elements can be initialized from an array:

+ +
use hashbrown::HashMap;
+
+let timber_resources: HashMap<&str, i32> = [("Norway", 100), ("Denmark", 50), ("Iceland", 10)]
+    .into_iter().collect();
+// use the values stored in map
+

Implementations§

source§

impl<K, V, S> HashMap<K, V, S>

source

pub const fn with_hasher(hash_builder: S) -> Self

Creates an empty HashMap which will use the given hash builder to hash +keys.

+

The hash map is initially created with a capacity of 0, so it will not +allocate until it is first inserted into.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashMap to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashMap.

+

The hash_builder passed should implement the BuildHasher trait for +the HashMap to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut map = HashMap::with_hasher(s);
+assert_eq!(map.len(), 0);
+assert_eq!(map.capacity(), 0);
+
+map.insert(1, 2);
+
source

pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self

Creates an empty HashMap with the specified capacity, using hash_builder +to hash the keys.

+

The hash map will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash map will not allocate.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashMap to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashMap.

+

The hash_builder passed should implement the BuildHasher trait for +the HashMap to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut map = HashMap::with_capacity_and_hasher(10, s);
+assert_eq!(map.len(), 0);
+assert!(map.capacity() >= 10);
+
+map.insert(1, 2);
+
source§

impl<K, V, S, A: Allocator> HashMap<K, V, S, A>

source

pub fn allocator(&self) -> &A

Returns a reference to the underlying allocator.

+
source

pub const fn with_hasher_in(hash_builder: S, alloc: A) -> Self

Creates an empty HashMap which will use the given hash builder to hash +keys. It will be allocated with the given allocator.

+

The hash map is initially created with a capacity of 0, so it will not allocate until it +is first inserted into.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashMap to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashMap.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut map = HashMap::with_hasher(s);
+map.insert(1, 2);
+
source

pub fn with_capacity_and_hasher_in( + capacity: usize, + hash_builder: S, + alloc: A +) -> Self

Creates an empty HashMap with the specified capacity, using hash_builder +to hash the keys. It will be allocated with the given allocator.

+

The hash map will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash map will not allocate.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashMap to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashMap.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut map = HashMap::with_capacity_and_hasher(10, s);
+map.insert(1, 2);
+
source

pub fn hasher(&self) -> &S

Returns a reference to the map’s BuildHasher.

+
§Examples
+
use hashbrown::HashMap;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let hasher = DefaultHashBuilder::default();
+let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
+let hasher: &DefaultHashBuilder = map.hasher();
+
source

pub fn capacity(&self) -> usize

Returns the number of elements the map can hold without reallocating.

+

This number is a lower bound; the HashMap<K, V> might be able to hold +more, but is guaranteed to be able to hold at least this many.

+
§Examples
+
use hashbrown::HashMap;
+let map: HashMap<i32, i32> = HashMap::with_capacity(100);
+assert_eq!(map.len(), 0);
+assert!(map.capacity() >= 100);
+
source

pub fn keys(&self) -> Keys<'_, K, V>

An iterator visiting all keys in arbitrary order. +The iterator element type is &'a K.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+assert_eq!(map.len(), 3);
+let mut vec: Vec<&str> = Vec::new();
+
+for key in map.keys() {
+    println!("{}", key);
+    vec.push(*key);
+}
+
+// The `Keys` iterator produces keys in arbitrary order, so the
+// keys must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, ["a", "b", "c"]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn values(&self) -> Values<'_, K, V>

An iterator visiting all values in arbitrary order. +The iterator element type is &'a V.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+assert_eq!(map.len(), 3);
+let mut vec: Vec<i32> = Vec::new();
+
+for val in map.values() {
+    println!("{}", val);
+    vec.push(*val);
+}
+
+// The `Values` iterator produces values in arbitrary order, so the
+// values must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [1, 2, 3]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn values_mut(&mut self) -> ValuesMut<'_, K, V>

An iterator visiting all values mutably in arbitrary order. +The iterator element type is &'a mut V.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+
+for val in map.values_mut() {
+    *val = *val + 10;
+}
+
+assert_eq!(map.len(), 3);
+let mut vec: Vec<i32> = Vec::new();
+
+for val in map.values() {
+    println!("{}", val);
+    vec.push(*val);
+}
+
+// The `Values` iterator produces values in arbitrary order, so the
+// values must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [11, 12, 13]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn iter(&self) -> Iter<'_, K, V>

An iterator visiting all key-value pairs in arbitrary order. +The iterator element type is (&'a K, &'a V).

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+assert_eq!(map.len(), 3);
+let mut vec: Vec<(&str, i32)> = Vec::new();
+
+for (key, val) in map.iter() {
+    println!("key: {} val: {}", key, val);
+    vec.push((*key, *val));
+}
+
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3)]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn iter_mut(&mut self) -> IterMut<'_, K, V>

An iterator visiting all key-value pairs in arbitrary order, +with mutable references to the values. +The iterator element type is (&'a K, &'a mut V).

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+
+// Update all values
+for (_, val) in map.iter_mut() {
+    *val *= 2;
+}
+
+assert_eq!(map.len(), 3);
+let mut vec: Vec<(&str, i32)> = Vec::new();
+
+for (key, val) in &map {
+    println!("key: {} val: {}", key, val);
+    vec.push((*key, *val));
+}
+
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [("a", 2), ("b", 4), ("c", 6)]);
+
+assert_eq!(map.len(), 3);
+
source

pub fn len(&self) -> usize

Returns the number of elements in the map.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut a = HashMap::new();
+assert_eq!(a.len(), 0);
+a.insert(1, "a");
+assert_eq!(a.len(), 1);
+
source

pub fn is_empty(&self) -> bool

Returns true if the map contains no elements.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut a = HashMap::new();
+assert!(a.is_empty());
+a.insert(1, "a");
+assert!(!a.is_empty());
+
source

pub fn drain(&mut self) -> Drain<'_, K, V, A>

Clears the map, returning all key-value pairs as an iterator. Keeps the +allocated memory for reuse.

+

If the returned iterator is dropped before being fully consumed, it +drops the remaining key-value pairs. The returned iterator keeps a +mutable borrow on the vector to optimize its implementation.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut a = HashMap::new();
+a.insert(1, "a");
+a.insert(2, "b");
+let capacity_before_drain = a.capacity();
+
+for (k, v) in a.drain().take(1) {
+    assert!(k == 1 || k == 2);
+    assert!(v == "a" || v == "b");
+}
+
+// As we can see, the map is empty and contains no element.
+assert!(a.is_empty() && a.len() == 0);
+// But map capacity is equal to old one.
+assert_eq!(a.capacity(), capacity_before_drain);
+
+let mut a = HashMap::new();
+a.insert(1, "a");
+a.insert(2, "b");
+
+{   // Iterator is dropped without being consumed.
+    let d = a.drain();
+}
+
+// But the map is empty even if we do not use Drain iterator.
+assert!(a.is_empty());
+
source

pub fn retain<F>(&mut self, f: F)
where + F: FnMut(&K, &mut V) -> bool,

Retains only the elements specified by the predicate. Keeps the +allocated memory for reuse.

+

In other words, remove all pairs (k, v) such that f(&k, &mut v) returns false. +The elements are visited in unsorted (and unspecified) order.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
+assert_eq!(map.len(), 8);
+
+map.retain(|&k, _| k % 2 == 0);
+
+// We can see, that the number of elements inside map is changed.
+assert_eq!(map.len(), 4);
+
+let mut vec: Vec<(i32, i32)> = map.iter().map(|(&k, &v)| (k, v)).collect();
+vec.sort_unstable();
+assert_eq!(vec, [(0, 0), (2, 20), (4, 40), (6, 60)]);
+
source

pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, K, V, F, A>
where + F: FnMut(&K, &mut V) -> bool,

Drains elements which are true under the given predicate, +and returns an iterator over the removed items.

+

In other words, move all pairs (k, v) such that f(&k, &mut v) returns true out +into another iterator.

+

Note that extract_if lets you mutate every value in the filter closure, regardless of +whether you choose to keep or remove it.

+

If the returned ExtractIf is not exhausted, e.g. because it is dropped without iterating +or the iteration short-circuits, then the remaining elements will be retained. +Use retain() with a negated predicate if you do not need the returned iterator.

+

Keeps the allocated memory for reuse.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+
+let drained: HashMap<i32, i32> = map.extract_if(|k, _v| k % 2 == 0).collect();
+
+let mut evens = drained.keys().cloned().collect::<Vec<_>>();
+let mut odds = map.keys().cloned().collect::<Vec<_>>();
+evens.sort();
+odds.sort();
+
+assert_eq!(evens, vec![0, 2, 4, 6]);
+assert_eq!(odds, vec![1, 3, 5, 7]);
+
+let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+
+{   // Iterator is dropped without being consumed.
+    let d = map.extract_if(|k, _v| k % 2 != 0);
+}
+
+// ExtractIf was not exhausted, therefore no elements were drained.
+assert_eq!(map.len(), 8);
+
source

pub fn clear(&mut self)

Clears the map, removing all key-value pairs. Keeps the allocated memory +for reuse.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut a = HashMap::new();
+a.insert(1, "a");
+let capacity_before_clear = a.capacity();
+
+a.clear();
+
+// Map is empty.
+assert!(a.is_empty());
+// But map capacity is equal to old one.
+assert_eq!(a.capacity(), capacity_before_clear);
+
source

pub fn into_keys(self) -> IntoKeys<K, V, A>

Creates a consuming iterator visiting all the keys in arbitrary order. +The map cannot be used after calling this. +The iterator element type is K.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+
+let mut vec: Vec<&str> = map.into_keys().collect();
+
+// The `IntoKeys` iterator produces keys in arbitrary order, so the
+// keys must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, ["a", "b", "c"]);
+
source

pub fn into_values(self) -> IntoValues<K, V, A>

Creates a consuming iterator visiting all the values in arbitrary order. +The map cannot be used after calling this. +The iterator element type is V.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert("a", 1);
+map.insert("b", 2);
+map.insert("c", 3);
+
+let mut vec: Vec<i32> = map.into_values().collect();
+
+// The `IntoValues` iterator produces values in arbitrary order, so
+// the values must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [1, 2, 3]);
+
source§

impl<K, V, S, A> HashMap<K, V, S, A>
where + K: Eq + Hash, + S: BuildHasher, + A: Allocator,

source

pub fn reserve(&mut self, additional: usize)

Reserves capacity for at least additional more elements to be inserted +in the HashMap. The collection may reserve more space to avoid +frequent reallocations.

+
§Panics
+

Panics if the new capacity exceeds isize::MAX bytes and abort the program +in case of allocation error. Use try_reserve instead +if you want to handle memory allocation failure.

+
§Examples
+
use hashbrown::HashMap;
+let mut map: HashMap<&str, i32> = HashMap::new();
+// Map is empty and doesn't allocate memory
+assert_eq!(map.capacity(), 0);
+
+map.reserve(10);
+
+// And now map can hold at least 10 elements
+assert!(map.capacity() >= 10);
+
source

pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError>

Tries to reserve capacity for at least additional more elements to be inserted +in the given HashMap<K,V>. The collection may reserve more space to avoid +frequent reallocations.

+
§Errors
+

If the capacity overflows, or the allocator reports a failure, then an error +is returned.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<&str, isize> = HashMap::new();
+// Map is empty and doesn't allocate memory
+assert_eq!(map.capacity(), 0);
+
+map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
+
+// And now map can hold at least 10 elements
+assert!(map.capacity() >= 10);
+

If the capacity overflows, or the allocator reports a failure, then an error +is returned:

+ +
use hashbrown::HashMap;
+use hashbrown::TryReserveError;
+let mut map: HashMap<i32, i32> = HashMap::new();
+
+match map.try_reserve(usize::MAX) {
+    Err(error) => match error {
+        TryReserveError::CapacityOverflow => {}
+        _ => panic!("TryReserveError::AllocError ?"),
+    },
+    _ => panic!(),
+}
+
source

pub fn shrink_to_fit(&mut self)

Shrinks the capacity of the map as much as possible. It will drop +down as much as possible while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
+map.insert(1, 2);
+map.insert(3, 4);
+assert!(map.capacity() >= 100);
+map.shrink_to_fit();
+assert!(map.capacity() >= 2);
+
source

pub fn shrink_to(&mut self, min_capacity: usize)

Shrinks the capacity of the map with a lower limit. It will drop +down no lower than the supplied limit while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+

This function does nothing if the current capacity is smaller than the +supplied minimum capacity.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
+map.insert(1, 2);
+map.insert(3, 4);
+assert!(map.capacity() >= 100);
+map.shrink_to(10);
+assert!(map.capacity() >= 10);
+map.shrink_to(0);
+assert!(map.capacity() >= 2);
+map.shrink_to(10);
+assert!(map.capacity() >= 2);
+
source

pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A>

Gets the given key’s corresponding entry in the map for in-place manipulation.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut letters = HashMap::new();
+
+for ch in "a short treatise on fungi".chars() {
+    let counter = letters.entry(ch).or_insert(0);
+    *counter += 1;
+}
+
+assert_eq!(letters[&'s'], 2);
+assert_eq!(letters[&'t'], 3);
+assert_eq!(letters[&'u'], 1);
+assert_eq!(letters.get(&'y'), None);
+
source

pub fn entry_ref<'a, 'b, Q>( + &'a mut self, + key: &'b Q +) -> EntryRef<'a, 'b, K, Q, V, S, A>
where + Q: Hash + Equivalent<K> + ?Sized,

Gets the given key’s corresponding entry by reference in the map for in-place manipulation.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut words: HashMap<String, usize> = HashMap::new();
+let source = ["poneyland", "horseyland", "poneyland", "poneyland"];
+for (i, &s) in source.iter().enumerate() {
+    let counter = words.entry_ref(s).or_insert(0);
+    *counter += 1;
+}
+
+assert_eq!(words["poneyland"], 3);
+assert_eq!(words["horseyland"], 1);
+
source

pub fn get<Q>(&self, k: &Q) -> Option<&V>
where + Q: Hash + Equivalent<K> + ?Sized,

Returns a reference to the value corresponding to the key.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+assert_eq!(map.get(&1), Some(&"a"));
+assert_eq!(map.get(&2), None);
+
source

pub fn get_key_value<Q>(&self, k: &Q) -> Option<(&K, &V)>
where + Q: Hash + Equivalent<K> + ?Sized,

Returns the key-value pair corresponding to the supplied key.

+

The supplied key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
+assert_eq!(map.get_key_value(&2), None);
+
source

pub fn get_key_value_mut<Q>(&mut self, k: &Q) -> Option<(&K, &mut V)>
where + Q: Hash + Equivalent<K> + ?Sized,

Returns the key-value pair corresponding to the supplied key, with a mutable reference to value.

+

The supplied key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+let (k, v) = map.get_key_value_mut(&1).unwrap();
+assert_eq!(k, &1);
+assert_eq!(v, &mut "a");
+*v = "b";
+assert_eq!(map.get_key_value_mut(&1), Some((&1, &mut "b")));
+assert_eq!(map.get_key_value_mut(&2), None);
+
source

pub fn contains_key<Q>(&self, k: &Q) -> bool
where + Q: Hash + Equivalent<K> + ?Sized,

Returns true if the map contains a value for the specified key.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+assert_eq!(map.contains_key(&1), true);
+assert_eq!(map.contains_key(&2), false);
+
source

pub fn get_mut<Q>(&mut self, k: &Q) -> Option<&mut V>
where + Q: Hash + Equivalent<K> + ?Sized,

Returns a mutable reference to the value corresponding to the key.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, "a");
+if let Some(x) = map.get_mut(&1) {
+    *x = "b";
+}
+assert_eq!(map[&1], "b");
+
+assert_eq!(map.get_mut(&2), None);
+
source

pub fn get_many_mut<Q, const N: usize>( + &mut self, + ks: [&Q; N] +) -> Option<[&mut V; N]>
where + Q: Hash + Equivalent<K> + ?Sized,

Attempts to get mutable references to N values in the map at once.

+

Returns an array of length N with the results of each query. For soundness, at most one +mutable reference will be returned to any value. None will be returned if any of the +keys are duplicates or missing.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut libraries = HashMap::new();
+libraries.insert("Bodleian Library".to_string(), 1602);
+libraries.insert("Athenæum".to_string(), 1807);
+libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+libraries.insert("Library of Congress".to_string(), 1800);
+
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "Library of Congress",
+]);
+assert_eq!(
+    got,
+    Some([
+        &mut 1807,
+        &mut 1800,
+    ]),
+);
+
+// Missing keys result in None
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "New York Public Library",
+]);
+assert_eq!(got, None);
+
+// Duplicate keys result in None
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "Athenæum",
+]);
+assert_eq!(got, None);
+
source

pub unsafe fn get_many_unchecked_mut<Q, const N: usize>( + &mut self, + ks: [&Q; N] +) -> Option<[&mut V; N]>
where + Q: Hash + Equivalent<K> + ?Sized,

Attempts to get mutable references to N values in the map at once, without validating that +the values are unique.

+

Returns an array of length N with the results of each query. None will be returned if +any of the keys are missing.

+

For a safe alternative see get_many_mut.

+
§Safety
+

Calling this method with overlapping keys is undefined behavior even if the resulting +references are not used.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut libraries = HashMap::new();
+libraries.insert("Bodleian Library".to_string(), 1602);
+libraries.insert("Athenæum".to_string(), 1807);
+libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+libraries.insert("Library of Congress".to_string(), 1800);
+
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "Library of Congress",
+]);
+assert_eq!(
+    got,
+    Some([
+        &mut 1807,
+        &mut 1800,
+    ]),
+);
+
+// Missing keys result in None
+let got = libraries.get_many_mut([
+    "Athenæum",
+    "New York Public Library",
+]);
+assert_eq!(got, None);
+
source

pub fn get_many_key_value_mut<Q, const N: usize>( + &mut self, + ks: [&Q; N] +) -> Option<[(&K, &mut V); N]>
where + Q: Hash + Equivalent<K> + ?Sized,

Attempts to get mutable references to N values in the map at once, with immutable +references to the corresponding keys.

+

Returns an array of length N with the results of each query. For soundness, at most one +mutable reference will be returned to any value. None will be returned if any of the keys +are duplicates or missing.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut libraries = HashMap::new();
+libraries.insert("Bodleian Library".to_string(), 1602);
+libraries.insert("Athenæum".to_string(), 1807);
+libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+libraries.insert("Library of Congress".to_string(), 1800);
+
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Herzogin-Anna-Amalia-Bibliothek",
+]);
+assert_eq!(
+    got,
+    Some([
+        (&"Bodleian Library".to_string(), &mut 1602),
+        (&"Herzogin-Anna-Amalia-Bibliothek".to_string(), &mut 1691),
+    ]),
+);
+// Missing keys result in None
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Gewandhaus",
+]);
+assert_eq!(got, None);
+
+// Duplicate keys result in None
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Herzogin-Anna-Amalia-Bibliothek",
+    "Herzogin-Anna-Amalia-Bibliothek",
+]);
+assert_eq!(got, None);
+
source

pub unsafe fn get_many_key_value_unchecked_mut<Q, const N: usize>( + &mut self, + ks: [&Q; N] +) -> Option<[(&K, &mut V); N]>
where + Q: Hash + Equivalent<K> + ?Sized,

Attempts to get mutable references to N values in the map at once, with immutable +references to the corresponding keys, without validating that the values are unique.

+

Returns an array of length N with the results of each query. None will be returned if +any of the keys are missing.

+

For a safe alternative see get_many_key_value_mut.

+
§Safety
+

Calling this method with overlapping keys is undefined behavior even if the resulting +references are not used.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut libraries = HashMap::new();
+libraries.insert("Bodleian Library".to_string(), 1602);
+libraries.insert("Athenæum".to_string(), 1807);
+libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+libraries.insert("Library of Congress".to_string(), 1800);
+
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Herzogin-Anna-Amalia-Bibliothek",
+]);
+assert_eq!(
+    got,
+    Some([
+        (&"Bodleian Library".to_string(), &mut 1602),
+        (&"Herzogin-Anna-Amalia-Bibliothek".to_string(), &mut 1691),
+    ]),
+);
+// Missing keys result in None
+let got = libraries.get_many_key_value_mut([
+    "Bodleian Library",
+    "Gewandhaus",
+]);
+assert_eq!(got, None);
+
source

pub fn insert(&mut self, k: K, v: V) -> Option<V>

Inserts a key-value pair into the map.

+

If the map did not have this key present, None is returned.

+

If the map did have this key present, the value is updated, and the old +value is returned. The key is not updated, though; this matters for +types that can be == without being identical. See the std::collections +module-level documentation for more.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+assert_eq!(map.insert(37, "a"), None);
+assert_eq!(map.is_empty(), false);
+
+map.insert(37, "b");
+assert_eq!(map.insert(37, "c"), Some("b"));
+assert_eq!(map[&37], "c");
+
source

pub fn insert_unique_unchecked(&mut self, k: K, v: V) -> (&K, &mut V)

Insert a key-value pair into the map without checking +if the key already exists in the map.

+

Returns a reference to the key and value just inserted.

+

This operation is safe if a key does not exist in the map.

+

However, if a key exists in the map already, the behavior is unspecified: +this operation may panic, loop forever, or any following operation with the map +may panic, loop forever or return arbitrary result.

+

That said, this operation (and following operations) are guaranteed to +not violate memory safety.

+

This operation is faster than regular insert, because it does not perform +lookup before insertion.

+

This operation is useful during initial population of the map. +For example, when constructing a map from another map, we know +that keys are unique.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map1 = HashMap::new();
+assert_eq!(map1.insert(1, "a"), None);
+assert_eq!(map1.insert(2, "b"), None);
+assert_eq!(map1.insert(3, "c"), None);
+assert_eq!(map1.len(), 3);
+
+let mut map2 = HashMap::new();
+
+for (key, value) in map1.into_iter() {
+    map2.insert_unique_unchecked(key, value);
+}
+
+let (key, value) = map2.insert_unique_unchecked(4, "d");
+assert_eq!(key, &4);
+assert_eq!(value, &mut "d");
+*value = "e";
+
+assert_eq!(map2[&1], "a");
+assert_eq!(map2[&2], "b");
+assert_eq!(map2[&3], "c");
+assert_eq!(map2[&4], "e");
+assert_eq!(map2.len(), 4);
+
source

pub fn try_insert( + &mut self, + key: K, + value: V +) -> Result<&mut V, OccupiedError<'_, K, V, S, A>>

Tries to insert a key-value pair into the map, and returns +a mutable reference to the value in the entry.

+
§Errors
+

If the map already had this key present, nothing is updated, and +an error containing the occupied entry and the value is returned.

+
§Examples
+

Basic usage:

+ +
use hashbrown::HashMap;
+use hashbrown::hash_map::OccupiedError;
+
+let mut map = HashMap::new();
+assert_eq!(map.try_insert(37, "a").unwrap(), &"a");
+
+match map.try_insert(37, "b") {
+    Err(OccupiedError { entry, value }) => {
+        assert_eq!(entry.key(), &37);
+        assert_eq!(entry.get(), &"a");
+        assert_eq!(value, "b");
+    }
+    _ => panic!()
+}
+
source

pub fn remove<Q>(&mut self, k: &Q) -> Option<V>
where + Q: Hash + Equivalent<K> + ?Sized,

Removes a key from the map, returning the value at the key if the key +was previously in the map. Keeps the allocated memory for reuse.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+// The map is empty
+assert!(map.is_empty() && map.capacity() == 0);
+
+map.insert(1, "a");
+
+assert_eq!(map.remove(&1), Some("a"));
+assert_eq!(map.remove(&1), None);
+
+// Now map holds none elements
+assert!(map.is_empty());
+
source

pub fn remove_entry<Q>(&mut self, k: &Q) -> Option<(K, V)>
where + Q: Hash + Equivalent<K> + ?Sized,

Removes a key from the map, returning the stored key and value if the +key was previously in the map. Keeps the allocated memory for reuse.

+

The key may be any borrowed form of the map’s key type, but +Hash and Eq on the borrowed form must match those for +the key type.

+
§Examples
+
use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+// The map is empty
+assert!(map.is_empty() && map.capacity() == 0);
+
+map.insert(1, "a");
+
+assert_eq!(map.remove_entry(&1), Some((1, "a")));
+assert_eq!(map.remove(&1), None);
+
+// Now map hold none elements
+assert!(map.is_empty());
+
source§

impl<K, V, S, A: Allocator> HashMap<K, V, S, A>

source

pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S, A>

Creates a raw entry builder for the HashMap.

+

Raw entries provide the lowest level of control for searching and +manipulating a map. They must be manually initialized with a hash and +then manually searched. After this, insertions into a vacant entry +still require an owned key to be provided.

+

Raw entries are useful for such exotic situations as:

+
    +
  • Hash memoization
  • +
  • Deferring the creation of an owned key until it is known to be required
  • +
  • Using a search key that doesn’t work with the Borrow trait
  • +
  • Using custom comparison logic without newtype wrappers
  • +
+

Because raw entries provide much more low-level control, it’s much easier +to put the HashMap into an inconsistent state which, while memory-safe, +will cause the map to produce seemingly random results. Higher-level and +more foolproof APIs like entry should be preferred when possible.

+

In particular, the hash used to initialized the raw entry must still be +consistent with the hash of the key that is ultimately stored in the entry. +This is because implementations of HashMap may need to recompute hashes +when resizing, at which point only the keys are available.

+

Raw entries give mutable access to the keys. This must not be used +to modify how the key would compare or hash, as the map will not re-evaluate +where the key should go, meaning the keys may become “lost” if their +location does not reflect their state. For instance, if you change a key +so that the map now contains keys which compare equal, search may start +acting erratically, with two keys randomly masking each other. Implementations +are free to assume this doesn’t happen (within the limits of memory-safety).

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::hash_map::{HashMap, RawEntryMut};
+
+let mut map = HashMap::new();
+map.extend([("a", 100), ("b", 200), ("c", 300)]);
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+// Existing key (insert and update)
+match map.raw_entry_mut().from_key(&"a") {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(mut view) => {
+        assert_eq!(view.get(), &100);
+        let v = view.get_mut();
+        let new_v = (*v) * 10;
+        *v = new_v;
+        assert_eq!(view.insert(1111), 1000);
+    }
+}
+
+assert_eq!(map[&"a"], 1111);
+assert_eq!(map.len(), 3);
+
+// Existing key (take)
+let hash = compute_hash(map.hasher(), &"c");
+match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"c") {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(view) => {
+        assert_eq!(view.remove_entry(), ("c", 300));
+    }
+}
+assert_eq!(map.raw_entry().from_key(&"c"), None);
+assert_eq!(map.len(), 2);
+
+// Nonexistent key (insert and update)
+let key = "d";
+let hash = compute_hash(map.hasher(), &key);
+match map.raw_entry_mut().from_hash(hash, |q| *q == key) {
+    RawEntryMut::Occupied(_) => unreachable!(),
+    RawEntryMut::Vacant(view) => {
+        let (k, value) = view.insert("d", 4000);
+        assert_eq!((*k, *value), ("d", 4000));
+        *value = 40000;
+    }
+}
+assert_eq!(map[&"d"], 40000);
+assert_eq!(map.len(), 3);
+
+match map.raw_entry_mut().from_hash(hash, |q| *q == key) {
+    RawEntryMut::Vacant(_) => unreachable!(),
+    RawEntryMut::Occupied(view) => {
+        assert_eq!(view.remove_entry(), ("d", 40000));
+    }
+}
+assert_eq!(map.get(&"d"), None);
+assert_eq!(map.len(), 2);
+
source

pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S, A>

Creates a raw immutable entry builder for the HashMap.

+

Raw entries provide the lowest level of control for searching and +manipulating a map. They must be manually initialized with a hash and +then manually searched.

+

This is useful for

+
    +
  • Hash memoization
  • +
  • Using a search key that doesn’t work with the Borrow trait
  • +
  • Using custom comparison logic without newtype wrappers
  • +
+

Unless you are in such a situation, higher-level and more foolproof APIs like +get should be preferred.

+

Immutable raw entries have very limited use; you might instead want raw_entry_mut.

+
§Examples
+
use core::hash::{BuildHasher, Hash};
+use hashbrown::HashMap;
+
+let mut map = HashMap::new();
+map.extend([("a", 100), ("b", 200), ("c", 300)]);
+
+fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    key.hash(&mut state);
+    state.finish()
+}
+
+for k in ["a", "b", "c", "d", "e", "f"] {
+    let hash = compute_hash(map.hasher(), k);
+    let v = map.get(&k).cloned();
+    let kv = v.as_ref().map(|v| (&k, v));
+
+    println!("Key: {} and value: {:?}", k, v);
+
+    assert_eq!(map.raw_entry().from_key(&k), kv);
+    assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
+    assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
+}
+

Trait Implementations§

source§

impl<K: Clone, V: Clone, S: Clone, A: Allocator + Clone> Clone for HashMap<K, V, S, A>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<K, V, S, A> Debug for HashMap<K, V, S, A>
where + K: Debug, + V: Debug, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<K, V, S, A> Default for HashMap<K, V, S, A>
where + S: Default, + A: Default + Allocator,

source§

fn default() -> Self

Creates an empty HashMap<K, V, S, A>, with the Default value for the hasher and allocator.

+
§Examples
+
use hashbrown::HashMap;
+use std::collections::hash_map::RandomState;
+
+// You can specify all types of HashMap, including hasher and allocator.
+// Created map is empty and don't allocate memory
+let map: HashMap<u32, String> = Default::default();
+assert_eq!(map.capacity(), 0);
+let map: HashMap<u32, String, RandomState> = HashMap::default();
+assert_eq!(map.capacity(), 0);
+
source§

impl<'a, K, V, S, A> Extend<&'a (K, V)> for HashMap<K, V, S, A>
where + K: Eq + Hash + Copy, + V: Copy, + S: BuildHasher, + A: Allocator,

Inserts all new key-values from the iterator and replaces values with existing +keys with new values returned from the iterator.

+
source§

fn extend<T: IntoIterator<Item = &'a (K, V)>>(&mut self, iter: T)

Inserts all new key-values from the iterator to existing HashMap<K, V, S, A>. +Replace values with existing keys with new values returned from the iterator. +The keys and values must implement Copy trait.

+
§Examples
+
use hashbrown::hash_map::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, 100);
+
+let arr = [(1, 1), (2, 2)];
+let some_iter = arr.iter();
+map.extend(some_iter);
+// Replace values with existing keys with new values returned from the iterator.
+// So that the map.get(&1) doesn't return Some(&100).
+assert_eq!(map.get(&1), Some(&1));
+
+let some_vec: Vec<_> = vec![(3, 3), (4, 4)];
+map.extend(&some_vec);
+
+let some_arr = [(5, 5), (6, 6)];
+map.extend(&some_arr);
+
+let mut vec: Vec<_> = map.into_iter().collect();
+// The `IntoIter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]);
+
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<'a, K, V, S, A> Extend<(&'a K, &'a V)> for HashMap<K, V, S, A>
where + K: Eq + Hash + Copy, + V: Copy, + S: BuildHasher, + A: Allocator,

Inserts all new key-values from the iterator and replaces values with existing +keys with new values returned from the iterator.

+
source§

fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T)

Inserts all new key-values from the iterator to existing HashMap<K, V, S, A>. +Replace values with existing keys with new values returned from the iterator. +The keys and values must implement Copy trait.

+
§Examples
+
use hashbrown::hash_map::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, 100);
+
+let arr = [(1, 1), (2, 2)];
+let some_iter = arr.iter().map(|(k, v)| (k, v));
+map.extend(some_iter);
+// Replace values with existing keys with new values returned from the iterator.
+// So that the map.get(&1) doesn't return Some(&100).
+assert_eq!(map.get(&1), Some(&1));
+
+let some_vec: Vec<_> = vec![(3, 3), (4, 4)];
+map.extend(some_vec.iter().map(|(k, v)| (k, v)));
+
+let some_arr = [(5, 5), (6, 6)];
+map.extend(some_arr.iter().map(|(k, v)| (k, v)));
+
+// You can also extend from another HashMap
+let mut new_map = HashMap::new();
+new_map.extend(&map);
+assert_eq!(new_map, map);
+
+let mut vec: Vec<_> = new_map.into_iter().collect();
+// The `IntoIter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]);
+
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<K, V, S, A> Extend<(K, V)> for HashMap<K, V, S, A>
where + K: Eq + Hash, + S: BuildHasher, + A: Allocator,

Inserts all new key-values from the iterator and replaces values with existing +keys with new values returned from the iterator.

+
source§

fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T)

Inserts all new key-values from the iterator to existing HashMap<K, V, S, A>. +Replace values with existing keys with new values returned from the iterator.

+
§Examples
+
use hashbrown::hash_map::HashMap;
+
+let mut map = HashMap::new();
+map.insert(1, 100);
+
+let some_iter = [(1, 1), (2, 2)].into_iter();
+map.extend(some_iter);
+// Replace values with existing keys with new values returned from the iterator.
+// So that the map.get(&1) doesn't return Some(&100).
+assert_eq!(map.get(&1), Some(&1));
+
+let some_vec: Vec<_> = vec![(3, 3), (4, 4)];
+map.extend(some_vec);
+
+let some_arr = [(5, 5), (6, 6)];
+map.extend(some_arr);
+let old_map_len = map.len();
+
+// You can also extend from another HashMap
+let mut new_map = HashMap::new();
+new_map.extend(map);
+assert_eq!(new_map.len(), old_map_len);
+
+let mut vec: Vec<_> = new_map.into_iter().collect();
+// The `IntoIter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]);
+
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<T, S, A> From<HashMap<T, (), S, A>> for HashSet<T, S, A>
where + A: Allocator,

source§

fn from(map: HashMap<T, (), S, A>) -> Self

Converts to this type from the input type.
source§

impl<K, V, S, A> FromIterator<(K, V)> for HashMap<K, V, S, A>
where + K: Eq + Hash, + S: BuildHasher + Default, + A: Default + Allocator,

source§

fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self

Creates a value from an iterator. Read more
source§

impl<K, Q, V, S, A> Index<&Q> for HashMap<K, V, S, A>
where + K: Eq + Hash, + Q: Hash + Equivalent<K> + ?Sized, + S: BuildHasher, + A: Allocator,

source§

fn index(&self, key: &Q) -> &V

Returns a reference to the value corresponding to the supplied key.

+
§Panics
+

Panics if the key is not present in the HashMap.

+
§Examples
+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [("a", "One"), ("b", "Two")].into();
+
+assert_eq!(map[&"a"], "One");
+assert_eq!(map[&"b"], "Two");
+
§

type Output = V

The returned type after indexing.
source§

impl<'a, K, V, S, A: Allocator> IntoIterator for &'a HashMap<K, V, S, A>

source§

fn into_iter(self) -> Iter<'a, K, V>

Creates an iterator over the entries of a HashMap in arbitrary order. +The iterator element type is (&'a K, &'a V).

+

Return the same Iter struct as by the iter method on HashMap.

+
§Examples
+
use hashbrown::HashMap;
+let map_one: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+let mut map_two = HashMap::new();
+
+for (key, value) in &map_one {
+    println!("Key: {}, Value: {}", key, value);
+    map_two.insert_unique_unchecked(*key, *value);
+}
+
+assert_eq!(map_one, map_two);
+
§

type Item = (&'a K, &'a V)

The type of the elements being iterated over.
§

type IntoIter = Iter<'a, K, V>

Which kind of iterator are we turning this into?
source§

impl<'a, K, V, S, A: Allocator> IntoIterator for &'a mut HashMap<K, V, S, A>

source§

fn into_iter(self) -> IterMut<'a, K, V>

Creates an iterator over the entries of a HashMap in arbitrary order +with mutable references to the values. The iterator element type is +(&'a K, &'a mut V).

+

Return the same IterMut struct as by the iter_mut method on +HashMap.

+
§Examples
+
use hashbrown::HashMap;
+let mut map: HashMap<_, _> = [("a", 1), ("b", 2), ("c", 3)].into();
+
+for (key, value) in &mut map {
+    println!("Key: {}, Value: {}", key, value);
+    *value *= 2;
+}
+
+let mut vec = map.iter().collect::<Vec<_>>();
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [(&"a", &2), (&"b", &4), (&"c", &6)]);
+
§

type Item = (&'a K, &'a mut V)

The type of the elements being iterated over.
§

type IntoIter = IterMut<'a, K, V>

Which kind of iterator are we turning this into?
source§

impl<K, V, S, A: Allocator> IntoIterator for HashMap<K, V, S, A>

source§

fn into_iter(self) -> IntoIter<K, V, A>

Creates a consuming iterator, that is, one that moves each key-value +pair out of the map in arbitrary order. The map cannot be used after +calling this.

+
§Examples
+
use hashbrown::HashMap;
+
+let map: HashMap<_, _> = [("a", 1), ("b", 2), ("c", 3)].into();
+
+// Not possible with .iter()
+let mut vec: Vec<(&str, i32)> = map.into_iter().collect();
+// The `IntoIter` iterator produces items in arbitrary order, so
+// the items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3)]);
+
§

type Item = (K, V)

The type of the elements being iterated over.
§

type IntoIter = IntoIter<K, V, A>

Which kind of iterator are we turning this into?
source§

impl<K, V, S, A> PartialEq for HashMap<K, V, S, A>
where + K: Eq + Hash, + V: PartialEq, + S: BuildHasher, + A: Allocator,

source§

fn eq(&self, other: &Self) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl<K, V, S, A> Eq for HashMap<K, V, S, A>
where + K: Eq + Hash, + V: Eq, + S: BuildHasher, + A: Allocator,

Auto Trait Implementations§

§

impl<K, V, S, A> Freeze for HashMap<K, V, S, A>
where + S: Freeze, + A: Freeze,

§

impl<K, V, S, A> RefUnwindSafe for HashMap<K, V, S, A>

§

impl<K, V, S, A> Send for HashMap<K, V, S, A>
where + S: Send, + A: Send, + K: Send, + V: Send,

§

impl<K, V, S, A> Sync for HashMap<K, V, S, A>
where + S: Sync, + A: Sync, + K: Sync, + V: Sync,

§

impl<K, V, S, A> Unpin for HashMap<K, V, S, A>
where + S: Unpin, + A: Unpin, + K: Unpin, + V: Unpin,

§

impl<K, V, S, A> UnwindSafe for HashMap<K, V, S, A>
where + S: UnwindSafe, + A: UnwindSafe, + K: UnwindSafe, + V: UnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<Q, K> Equivalent<K> for Q
where + Q: Eq + ?Sized, + K: Borrow<Q> + ?Sized,

source§

fn equivalent(&self, key: &K) -> bool

Checks if this value is equivalent to the given key. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/struct.HashSet.html b/hashbrown/struct.HashSet.html new file mode 100644 index 000000000..95eb06eb3 --- /dev/null +++ b/hashbrown/struct.HashSet.html @@ -0,0 +1,700 @@ +HashSet in hashbrown - Rust

Struct hashbrown::HashSet

source ·
pub struct HashSet<T, S = DefaultHashBuilder, A: Allocator = Global> { /* private fields */ }
Expand description

A hash set implemented as a HashMap where the value is ().

+

As with the HashMap type, a HashSet requires that the elements +implement the Eq and Hash traits. This can frequently be achieved by +using #[derive(PartialEq, Eq, Hash)]. If you implement these yourself, +it is important that the following property holds:

+
k1 == k2 -> hash(k1) == hash(k2)
+
+

In other words, if two keys are equal, their hashes must be equal.

+

It is a logic error for an item to be modified in such a way that the +item’s hash, as determined by the Hash trait, or its equality, as +determined by the Eq trait, changes while it is in the set. This is +normally only possible through Cell, RefCell, global state, I/O, or +unsafe code.

+

It is also a logic error for the Hash implementation of a key to panic. +This is generally only possible if the trait is implemented manually. If a +panic does occur then the contents of the HashSet may become corrupted and +some items may be dropped from the table.

+

§Examples

+
use hashbrown::HashSet;
+// Type inference lets us omit an explicit type signature (which
+// would be `HashSet<String>` in this example).
+let mut books = HashSet::new();
+
+// Add some books.
+books.insert("A Dance With Dragons".to_string());
+books.insert("To Kill a Mockingbird".to_string());
+books.insert("The Odyssey".to_string());
+books.insert("The Great Gatsby".to_string());
+
+// Check for a specific one.
+if !books.contains("The Winds of Winter") {
+    println!("We have {} books, but The Winds of Winter ain't one.",
+             books.len());
+}
+
+// Remove a book.
+books.remove("The Odyssey");
+
+// Iterate over everything.
+for book in &books {
+    println!("{}", book);
+}
+

The easiest way to use HashSet with a custom type is to derive +Eq and Hash. We must also derive PartialEq. This will in the +future be implied by Eq.

+ +
use hashbrown::HashSet;
+#[derive(Hash, Eq, PartialEq, Debug)]
+struct Viking {
+    name: String,
+    power: usize,
+}
+
+let mut vikings = HashSet::new();
+
+vikings.insert(Viking { name: "Einar".to_string(), power: 9 });
+vikings.insert(Viking { name: "Einar".to_string(), power: 9 });
+vikings.insert(Viking { name: "Olaf".to_string(), power: 4 });
+vikings.insert(Viking { name: "Harald".to_string(), power: 8 });
+
+// Use derived implementation to print the vikings.
+for x in &vikings {
+    println!("{:?}", x);
+}
+

A HashSet with fixed list of elements can be initialized from an array:

+ +
use hashbrown::HashSet;
+
+let viking_names: HashSet<&'static str> =
+    [ "Einar", "Olaf", "Harald" ].into_iter().collect();
+// use the values stored in the set
+

Implementations§

source§

impl<T, S, A: Allocator> HashSet<T, S, A>

source

pub fn capacity(&self) -> usize

Returns the number of elements the set can hold without reallocating.

+
§Examples
+
use hashbrown::HashSet;
+let set: HashSet<i32> = HashSet::with_capacity(100);
+assert!(set.capacity() >= 100);
+
source

pub fn iter(&self) -> Iter<'_, T>

An iterator visiting all elements in arbitrary order. +The iterator element type is &'a T.

+
§Examples
+
use hashbrown::HashSet;
+let mut set = HashSet::new();
+set.insert("a");
+set.insert("b");
+
+// Will print in an arbitrary order.
+for x in set.iter() {
+    println!("{}", x);
+}
+
source

pub fn len(&self) -> usize

Returns the number of elements in the set.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut v = HashSet::new();
+assert_eq!(v.len(), 0);
+v.insert(1);
+assert_eq!(v.len(), 1);
+
source

pub fn is_empty(&self) -> bool

Returns true if the set contains no elements.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut v = HashSet::new();
+assert!(v.is_empty());
+v.insert(1);
+assert!(!v.is_empty());
+
source

pub fn drain(&mut self) -> Drain<'_, T, A>

Clears the set, returning all elements in an iterator.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert!(!set.is_empty());
+
+// print 1, 2, 3 in an arbitrary order
+for i in set.drain() {
+    println!("{}", i);
+}
+
+assert!(set.is_empty());
+
source

pub fn retain<F>(&mut self, f: F)
where + F: FnMut(&T) -> bool,

Retains only the elements specified by the predicate.

+

In other words, remove all elements e such that f(&e) returns false.

+
§Examples
+
use hashbrown::HashSet;
+
+let xs = [1,2,3,4,5,6];
+let mut set: HashSet<i32> = xs.into_iter().collect();
+set.retain(|&k| k % 2 == 0);
+assert_eq!(set.len(), 3);
+
source

pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, T, F, A>
where + F: FnMut(&T) -> bool,

Drains elements which are true under the given predicate, +and returns an iterator over the removed items.

+

In other words, move all elements e such that f(&e) returns true out +into another iterator.

+

If the returned ExtractIf is not exhausted, e.g. because it is dropped without iterating +or the iteration short-circuits, then the remaining elements will be retained. +Use retain() with a negated predicate if you do not need the returned iterator.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<i32> = (0..8).collect();
+let drained: HashSet<i32> = set.extract_if(|v| v % 2 == 0).collect();
+
+let mut evens = drained.into_iter().collect::<Vec<_>>();
+let mut odds = set.into_iter().collect::<Vec<_>>();
+evens.sort();
+odds.sort();
+
+assert_eq!(evens, vec![0, 2, 4, 6]);
+assert_eq!(odds, vec![1, 3, 5, 7]);
+
source

pub fn clear(&mut self)

Clears the set, removing all values.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut v = HashSet::new();
+v.insert(1);
+v.clear();
+assert!(v.is_empty());
+
source§

impl<T, S> HashSet<T, S, Global>

source

pub const fn with_hasher(hasher: S) -> Self

Creates a new empty hash set which will use the given hasher to hash +keys.

+

The hash set is initially created with a capacity of 0, so it will not +allocate until it is first inserted into.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashSet to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashSet.

+

The hash_builder passed should implement the BuildHasher trait for +the HashSet to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut set = HashSet::with_hasher(s);
+set.insert(2);
+
source

pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self

Creates an empty HashSet with the specified capacity, using +hasher to hash the keys.

+

The hash set will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash set will not allocate.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashSet to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashSet.

+

The hash_builder passed should implement the BuildHasher trait for +the HashSet to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut set = HashSet::with_capacity_and_hasher(10, s);
+set.insert(1);
+
source§

impl<T, S, A> HashSet<T, S, A>
where + A: Allocator,

source

pub fn allocator(&self) -> &A

Returns a reference to the underlying allocator.

+
source

pub const fn with_hasher_in(hasher: S, alloc: A) -> Self

Creates a new empty hash set which will use the given hasher to hash +keys.

+

The hash set is initially created with a capacity of 0, so it will not +allocate until it is first inserted into.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashSet to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashSet.

+

The hash_builder passed should implement the BuildHasher trait for +the HashSet to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut set = HashSet::with_hasher(s);
+set.insert(2);
+
source

pub fn with_capacity_and_hasher_in(capacity: usize, hasher: S, alloc: A) -> Self

Creates an empty HashSet with the specified capacity, using +hasher to hash the keys.

+

The hash set will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash set will not allocate.

+
§HashDoS resistance
+

The hash_builder normally use a fixed key by default and that does +not allow the HashSet to be protected against attacks such as HashDoS. +Users who require HashDoS resistance should explicitly use +[ahash::RandomState] or std::collections::hash_map::RandomState +as the hasher when creating a HashSet.

+

The hash_builder passed should implement the BuildHasher trait for +the HashSet to be useful, see its documentation for details.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let s = DefaultHashBuilder::default();
+let mut set = HashSet::with_capacity_and_hasher(10, s);
+set.insert(1);
+
source

pub fn hasher(&self) -> &S

Returns a reference to the set’s BuildHasher.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_map::DefaultHashBuilder;
+
+let hasher = DefaultHashBuilder::default();
+let set: HashSet<i32> = HashSet::with_hasher(hasher);
+let hasher: &DefaultHashBuilder = set.hasher();
+
source§

impl<T, S, A> HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

source

pub fn reserve(&mut self, additional: usize)

Reserves capacity for at least additional more elements to be inserted +in the HashSet. The collection may reserve more space to avoid +frequent reallocations.

+
§Panics
+

Panics if the new capacity exceeds isize::MAX bytes and abort the program +in case of allocation error. Use try_reserve instead +if you want to handle memory allocation failure.

+
§Examples
+
use hashbrown::HashSet;
+let mut set: HashSet<i32> = HashSet::new();
+set.reserve(10);
+assert!(set.capacity() >= 10);
+
source

pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError>

Tries to reserve capacity for at least additional more elements to be inserted +in the given HashSet<K,V>. The collection may reserve more space to avoid +frequent reallocations.

+
§Errors
+

If the capacity overflows, or the allocator reports a failure, then an error +is returned.

+
§Examples
+
use hashbrown::HashSet;
+let mut set: HashSet<i32> = HashSet::new();
+set.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
+
source

pub fn shrink_to_fit(&mut self)

Shrinks the capacity of the set as much as possible. It will drop +down as much as possible while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::with_capacity(100);
+set.insert(1);
+set.insert(2);
+assert!(set.capacity() >= 100);
+set.shrink_to_fit();
+assert!(set.capacity() >= 2);
+
source

pub fn shrink_to(&mut self, min_capacity: usize)

Shrinks the capacity of the set with a lower limit. It will drop +down no lower than the supplied limit while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+

Panics if the current capacity is smaller than the supplied +minimum capacity.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::with_capacity(100);
+set.insert(1);
+set.insert(2);
+assert!(set.capacity() >= 100);
+set.shrink_to(10);
+assert!(set.capacity() >= 10);
+set.shrink_to(0);
+assert!(set.capacity() >= 2);
+
source

pub fn difference<'a>(&'a self, other: &'a Self) -> Difference<'a, T, S, A>

Visits the values representing the difference, +i.e., the values that are in self but not in other.

+
§Examples
+
use hashbrown::HashSet;
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+
+// Can be seen as `a - b`.
+for x in a.difference(&b) {
+    println!("{}", x); // Print 1
+}
+
+let diff: HashSet<_> = a.difference(&b).collect();
+assert_eq!(diff, [1].iter().collect());
+
+// Note that difference is not symmetric,
+// and `b - a` means something else:
+let diff: HashSet<_> = b.difference(&a).collect();
+assert_eq!(diff, [4].iter().collect());
+
source

pub fn symmetric_difference<'a>( + &'a self, + other: &'a Self +) -> SymmetricDifference<'a, T, S, A>

Visits the values representing the symmetric difference, +i.e., the values that are in self or in other but not in both.

+
§Examples
+
use hashbrown::HashSet;
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+
+// Print 1, 4 in arbitrary order.
+for x in a.symmetric_difference(&b) {
+    println!("{}", x);
+}
+
+let diff1: HashSet<_> = a.symmetric_difference(&b).collect();
+let diff2: HashSet<_> = b.symmetric_difference(&a).collect();
+
+assert_eq!(diff1, diff2);
+assert_eq!(diff1, [1, 4].iter().collect());
+
source

pub fn intersection<'a>(&'a self, other: &'a Self) -> Intersection<'a, T, S, A>

Visits the values representing the intersection, +i.e., the values that are both in self and other.

+
§Examples
+
use hashbrown::HashSet;
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+
+// Print 2, 3 in arbitrary order.
+for x in a.intersection(&b) {
+    println!("{}", x);
+}
+
+let intersection: HashSet<_> = a.intersection(&b).collect();
+assert_eq!(intersection, [2, 3].iter().collect());
+
source

pub fn union<'a>(&'a self, other: &'a Self) -> Union<'a, T, S, A>

Visits the values representing the union, +i.e., all the values in self or other, without duplicates.

+
§Examples
+
use hashbrown::HashSet;
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+
+// Print 1, 2, 3, 4 in arbitrary order.
+for x in a.union(&b) {
+    println!("{}", x);
+}
+
+let union: HashSet<_> = a.union(&b).collect();
+assert_eq!(union, [1, 2, 3, 4].iter().collect());
+
source

pub fn contains<Q>(&self, value: &Q) -> bool
where + Q: Hash + Equivalent<T> + ?Sized,

Returns true if the set contains a value.

+

The value may be any borrowed form of the set’s value type, but +Hash and Eq on the borrowed form must match those for +the value type.

+
§Examples
+
use hashbrown::HashSet;
+
+let set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert_eq!(set.contains(&1), true);
+assert_eq!(set.contains(&4), false);
+
source

pub fn get<Q>(&self, value: &Q) -> Option<&T>
where + Q: Hash + Equivalent<T> + ?Sized,

Returns a reference to the value in the set, if any, that is equal to the given value.

+

The value may be any borrowed form of the set’s value type, but +Hash and Eq on the borrowed form must match those for +the value type.

+
§Examples
+
use hashbrown::HashSet;
+
+let set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert_eq!(set.get(&2), Some(&2));
+assert_eq!(set.get(&4), None);
+
source

pub fn get_or_insert(&mut self, value: T) -> &T

Inserts the given value into the set if it is not present, then +returns a reference to the value in the set.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert_eq!(set.len(), 3);
+assert_eq!(set.get_or_insert(2), &2);
+assert_eq!(set.get_or_insert(100), &100);
+assert_eq!(set.len(), 4); // 100 was inserted
+
source

pub fn get_or_insert_owned<Q>(&mut self, value: &Q) -> &T
where + Q: Hash + Equivalent<T> + ToOwned<Owned = T> + ?Sized,

Inserts an owned copy of the given value into the set if it is not +present, then returns a reference to the value in the set.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<String> = ["cat", "dog", "horse"]
+    .iter().map(|&pet| pet.to_owned()).collect();
+
+assert_eq!(set.len(), 3);
+for &pet in &["cat", "dog", "fish"] {
+    let value = set.get_or_insert_owned(pet);
+    assert_eq!(value, pet);
+}
+assert_eq!(set.len(), 4); // a new "fish" was inserted
+
source

pub fn get_or_insert_with<Q, F>(&mut self, value: &Q, f: F) -> &T
where + Q: Hash + Equivalent<T> + ?Sized, + F: FnOnce(&Q) -> T,

Inserts a value computed from f into the set if the given value is +not present, then returns a reference to the value in the set.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<String> = ["cat", "dog", "horse"]
+    .iter().map(|&pet| pet.to_owned()).collect();
+
+assert_eq!(set.len(), 3);
+for &pet in &["cat", "dog", "fish"] {
+    let value = set.get_or_insert_with(pet, str::to_owned);
+    assert_eq!(value, pet);
+}
+assert_eq!(set.len(), 4); // a new "fish" was inserted
+
source

pub fn entry(&mut self, value: T) -> Entry<'_, T, S, A>

Gets the given value’s corresponding entry in the set for in-place manipulation.

+
§Examples
+
use hashbrown::HashSet;
+use hashbrown::hash_set::Entry::*;
+
+let mut singles = HashSet::new();
+let mut dupes = HashSet::new();
+
+for ch in "a short treatise on fungi".chars() {
+    if let Vacant(dupe_entry) = dupes.entry(ch) {
+        // We haven't already seen a duplicate, so
+        // check if we've at least seen it once.
+        match singles.entry(ch) {
+            Vacant(single_entry) => {
+                // We found a new character for the first time.
+                single_entry.insert()
+            }
+            Occupied(single_entry) => {
+                // We've already seen this once, "move" it to dupes.
+                single_entry.remove();
+                dupe_entry.insert();
+            }
+        }
+    }
+}
+
+assert!(!singles.contains(&'t') && dupes.contains(&'t'));
+assert!(singles.contains(&'u') && !dupes.contains(&'u'));
+assert!(!singles.contains(&'v') && !dupes.contains(&'v'));
+
source

pub fn is_disjoint(&self, other: &Self) -> bool

Returns true if self has no elements in common with other. +This is equivalent to checking for an empty intersection.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+let mut b = HashSet::new();
+
+assert_eq!(a.is_disjoint(&b), true);
+b.insert(4);
+assert_eq!(a.is_disjoint(&b), true);
+b.insert(1);
+assert_eq!(a.is_disjoint(&b), false);
+
source

pub fn is_subset(&self, other: &Self) -> bool

Returns true if the set is a subset of another, +i.e., other contains at least all the values in self.

+
§Examples
+
use hashbrown::HashSet;
+
+let sup: HashSet<_> = [1, 2, 3].into_iter().collect();
+let mut set = HashSet::new();
+
+assert_eq!(set.is_subset(&sup), true);
+set.insert(2);
+assert_eq!(set.is_subset(&sup), true);
+set.insert(4);
+assert_eq!(set.is_subset(&sup), false);
+
source

pub fn is_superset(&self, other: &Self) -> bool

Returns true if the set is a superset of another, +i.e., self contains at least all the values in other.

+
§Examples
+
use hashbrown::HashSet;
+
+let sub: HashSet<_> = [1, 2].into_iter().collect();
+let mut set = HashSet::new();
+
+assert_eq!(set.is_superset(&sub), false);
+
+set.insert(0);
+set.insert(1);
+assert_eq!(set.is_superset(&sub), false);
+
+set.insert(2);
+assert_eq!(set.is_superset(&sub), true);
+
source

pub fn insert(&mut self, value: T) -> bool

Adds a value to the set.

+

If the set did not have this value present, true is returned.

+

If the set did have this value present, false is returned.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::new();
+
+assert_eq!(set.insert(2), true);
+assert_eq!(set.insert(2), false);
+assert_eq!(set.len(), 1);
+
source

pub fn insert_unique_unchecked(&mut self, value: T) -> &T

Insert a value the set without checking if the value already exists in the set.

+

Returns a reference to the value just inserted.

+

This operation is safe if a value does not exist in the set.

+

However, if a value exists in the set already, the behavior is unspecified: +this operation may panic, loop forever, or any following operation with the set +may panic, loop forever or return arbitrary result.

+

That said, this operation (and following operations) are guaranteed to +not violate memory safety.

+

This operation is faster than regular insert, because it does not perform +lookup before insertion.

+

This operation is useful during initial population of the set. +For example, when constructing a set from another set, we know +that values are unique.

+
source

pub fn replace(&mut self, value: T) -> Option<T>

Adds a value to the set, replacing the existing value, if any, that is equal to the given +one. Returns the replaced value.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::new();
+set.insert(Vec::<i32>::new());
+
+assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
+set.replace(Vec::with_capacity(10));
+assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
+
source

pub fn remove<Q>(&mut self, value: &Q) -> bool
where + Q: Hash + Equivalent<T> + ?Sized,

Removes a value from the set. Returns whether the value was +present in the set.

+

The value may be any borrowed form of the set’s value type, but +Hash and Eq on the borrowed form must match those for +the value type.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set = HashSet::new();
+
+set.insert(2);
+assert_eq!(set.remove(&2), true);
+assert_eq!(set.remove(&2), false);
+
source

pub fn take<Q>(&mut self, value: &Q) -> Option<T>
where + Q: Hash + Equivalent<T> + ?Sized,

Removes and returns the value in the set, if any, that is equal to the given one.

+

The value may be any borrowed form of the set’s value type, but +Hash and Eq on the borrowed form must match those for +the value type.

+
§Examples
+
use hashbrown::HashSet;
+
+let mut set: HashSet<_> = [1, 2, 3].into_iter().collect();
+assert_eq!(set.take(&2), Some(2));
+assert_eq!(set.take(&2), None);
+

Trait Implementations§

source§

impl<T, S, A> BitAnd<&HashSet<T, S, A>> for &HashSet<T, S, A>
where + T: Eq + Hash + Clone, + S: BuildHasher + Default, + A: Allocator,

source§

fn bitand(self, rhs: &HashSet<T, S, A>) -> HashSet<T, S>

Returns the intersection of self and rhs as a new HashSet<T, S>.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+let b: HashSet<_> = vec![2, 3, 4].into_iter().collect();
+
+let set = &a & &b;
+
+let mut i = 0;
+let expected = [2, 3];
+for x in &set {
+    assert!(expected.contains(x));
+    i += 1;
+}
+assert_eq!(i, expected.len());
+
§

type Output = HashSet<T, S>

The resulting type after applying the & operator.
source§

impl<T, S, A> BitOr<&HashSet<T, S, A>> for &HashSet<T, S, A>
where + T: Eq + Hash + Clone, + S: BuildHasher + Default, + A: Allocator,

source§

fn bitor(self, rhs: &HashSet<T, S, A>) -> HashSet<T, S>

Returns the union of self and rhs as a new HashSet<T, S>.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+
+let set = &a | &b;
+
+let mut i = 0;
+let expected = [1, 2, 3, 4, 5];
+for x in &set {
+    assert!(expected.contains(x));
+    i += 1;
+}
+assert_eq!(i, expected.len());
+
§

type Output = HashSet<T, S>

The resulting type after applying the | operator.
source§

impl<T, S> BitXor<&HashSet<T, S>> for &HashSet<T, S>
where + T: Eq + Hash + Clone, + S: BuildHasher + Default,

source§

fn bitxor(self, rhs: &HashSet<T, S>) -> HashSet<T, S>

Returns the symmetric difference of self and rhs as a new HashSet<T, S>.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+
+let set = &a ^ &b;
+
+let mut i = 0;
+let expected = [1, 2, 4, 5];
+for x in &set {
+    assert!(expected.contains(x));
+    i += 1;
+}
+assert_eq!(i, expected.len());
+
§

type Output = HashSet<T, S>

The resulting type after applying the ^ operator.
source§

impl<T: Clone, S: Clone, A: Allocator + Clone> Clone for HashSet<T, S, A>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T, S, A> Debug for HashSet<T, S, A>
where + T: Debug, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T, S, A> Default for HashSet<T, S, A>
where + S: Default, + A: Default + Allocator,

source§

fn default() -> Self

Creates an empty HashSet<T, S> with the Default value for the hasher.

+
source§

impl<'a, T, S, A> Extend<&'a T> for HashSet<T, S, A>
where + T: 'a + Eq + Hash + Copy, + S: BuildHasher, + A: Allocator,

source§

fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I)

Extends a collection with the contents of an iterator. Read more
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<T, S, A> Extend<T> for HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

source§

fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I)

Extends a collection with the contents of an iterator. Read more
source§

fn extend_one(&mut self, item: A)

🔬This is a nightly-only experimental API. (extend_one)
Extends a collection with exactly one element.
source§

fn extend_reserve(&mut self, additional: usize)

🔬This is a nightly-only experimental API. (extend_one)
Reserves capacity in a collection for the given number of additional elements. Read more
source§

impl<T, S, A> From<HashMap<T, (), S, A>> for HashSet<T, S, A>
where + A: Allocator,

source§

fn from(map: HashMap<T, (), S, A>) -> Self

Converts to this type from the input type.
source§

impl<T, S, A> FromIterator<T> for HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher + Default, + A: Default + Allocator,

source§

fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self

Creates a value from an iterator. Read more
source§

impl<'a, T, S, A: Allocator> IntoIterator for &'a HashSet<T, S, A>

§

type Item = &'a T

The type of the elements being iterated over.
§

type IntoIter = Iter<'a, T>

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> Iter<'a, T>

Creates an iterator from a value. Read more
source§

impl<T, S, A: Allocator> IntoIterator for HashSet<T, S, A>

source§

fn into_iter(self) -> IntoIter<T, A>

Creates a consuming iterator, that is, one that moves each value out +of the set in arbitrary order. The set cannot be used after calling +this.

+
§Examples
+
use hashbrown::HashSet;
+let mut set = HashSet::new();
+set.insert("a".to_string());
+set.insert("b".to_string());
+
+// Not possible to collect to a Vec<String> with a regular `.iter()`.
+let v: Vec<String> = set.into_iter().collect();
+
+// Will print in an arbitrary order.
+for x in &v {
+    println!("{}", x);
+}
+
§

type Item = T

The type of the elements being iterated over.
§

type IntoIter = IntoIter<T, A>

Which kind of iterator are we turning this into?
source§

impl<T, S, A> PartialEq for HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

source§

fn eq(&self, other: &Self) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl<T, S> Sub<&HashSet<T, S>> for &HashSet<T, S>
where + T: Eq + Hash + Clone, + S: BuildHasher + Default,

source§

fn sub(self, rhs: &HashSet<T, S>) -> HashSet<T, S>

Returns the difference of self and rhs as a new HashSet<T, S>.

+
§Examples
+
use hashbrown::HashSet;
+
+let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+
+let set = &a - &b;
+
+let mut i = 0;
+let expected = [1, 2];
+for x in &set {
+    assert!(expected.contains(x));
+    i += 1;
+}
+assert_eq!(i, expected.len());
+
§

type Output = HashSet<T, S>

The resulting type after applying the - operator.
source§

impl<T, S, A> Eq for HashSet<T, S, A>
where + T: Eq + Hash, + S: BuildHasher, + A: Allocator,

Auto Trait Implementations§

§

impl<T, S, A> Freeze for HashSet<T, S, A>
where + S: Freeze, + A: Freeze,

§

impl<T, S, A> RefUnwindSafe for HashSet<T, S, A>
where + S: RefUnwindSafe, + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<T, S, A> Send for HashSet<T, S, A>
where + S: Send, + A: Send, + T: Send,

§

impl<T, S, A> Sync for HashSet<T, S, A>
where + S: Sync, + A: Sync, + T: Sync,

§

impl<T, S, A> Unpin for HashSet<T, S, A>
where + S: Unpin, + A: Unpin, + T: Unpin,

§

impl<T, S, A> UnwindSafe for HashSet<T, S, A>
where + S: UnwindSafe, + A: UnwindSafe, + T: UnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<Q, K> Equivalent<K> for Q
where + Q: Eq + ?Sized, + K: Borrow<Q> + ?Sized,

source§

fn equivalent(&self, key: &K) -> bool

Checks if this value is equivalent to the given key. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/struct.HashTable.html b/hashbrown/struct.HashTable.html new file mode 100644 index 000000000..b30f48c20 --- /dev/null +++ b/hashbrown/struct.HashTable.html @@ -0,0 +1,587 @@ +HashTable in hashbrown - Rust

Struct hashbrown::HashTable

source ·
pub struct HashTable<T, A = Global>
where + A: Allocator,
{ /* private fields */ }
Expand description

Low-level hash table with explicit hashing.

+

The primary use case for this type over HashMap or HashSet is to +support types that do not implement the Hash and Eq traits, but +instead require additional data not contained in the key itself to compute a +hash and compare two elements for equality.

+

Examples of when this can be useful include:

+
    +
  • An IndexMap implementation where indices into a Vec are stored as +elements in a HashTable<usize>. Hashing and comparing the elements +requires indexing the associated Vec to get the actual value referred to +by the index.
  • +
  • Avoiding re-computing a hash when it is already known.
  • +
  • Mutating the key of an element in a way that doesn’t affect its hash.
  • +
+

To achieve this, HashTable methods that search for an element in the table +require a hash value and equality function to be explicitly passed in as +arguments. The method will then iterate over the elements with the given +hash and call the equality function on each of them, until a match is found.

+

In most cases, a HashTable will not be exposed directly in an API. It will +instead be wrapped in a helper type which handles the work of calculating +hash values and comparing elements.

+

Due to its low-level nature, this type provides fewer guarantees than +HashMap and HashSet. Specifically, the API allows you to shoot +yourself in the foot by having multiple elements with identical keys in the +table. The table itself will still function correctly and lookups will +arbitrarily return one of the matching elements. However you should avoid +doing this because it changes the runtime of hash table operations from +O(1) to O(k) where k is the number of duplicate entries.

+

Implementations§

source§

impl<T> HashTable<T, Global>

source

pub const fn new() -> Self

Creates an empty HashTable.

+

The hash table is initially created with a capacity of 0, so it will not allocate until it +is first inserted into.

+
§Examples
+
use hashbrown::HashTable;
+let mut table: HashTable<&str> = HashTable::new();
+assert_eq!(table.len(), 0);
+assert_eq!(table.capacity(), 0);
+
source

pub fn with_capacity(capacity: usize) -> Self

Creates an empty HashTable with the specified capacity.

+

The hash table will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash table will not allocate.

+
§Examples
+
use hashbrown::HashTable;
+let mut table: HashTable<&str> = HashTable::with_capacity(10);
+assert_eq!(table.len(), 0);
+assert!(table.capacity() >= 10);
+
source§

impl<T, A> HashTable<T, A>
where + A: Allocator,

source

pub const fn new_in(alloc: A) -> Self

Creates an empty HashTable using the given allocator.

+

The hash table is initially created with a capacity of 0, so it will not allocate until it +is first inserted into.

+
§Examples
+
use ahash::AHasher;
+use bumpalo::Bump;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let bump = Bump::new();
+let mut table = HashTable::new_in(&bump);
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+// The created HashTable holds none elements
+assert_eq!(table.len(), 0);
+
+// The created HashTable also doesn't allocate memory
+assert_eq!(table.capacity(), 0);
+
+// Now we insert element inside created HashTable
+table.insert_unique(hasher(&"One"), "One", hasher);
+// We can see that the HashTable holds 1 element
+assert_eq!(table.len(), 1);
+// And it also allocates some capacity
+assert!(table.capacity() > 1);
+
source

pub fn with_capacity_in(capacity: usize, alloc: A) -> Self

Creates an empty HashTable with the specified capacity using the given allocator.

+

The hash table will be able to hold at least capacity elements without +reallocating. If capacity is 0, the hash table will not allocate.

+
§Examples
+
use ahash::AHasher;
+use bumpalo::Bump;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let bump = Bump::new();
+let mut table = HashTable::with_capacity_in(5, &bump);
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+
+// The created HashTable holds none elements
+assert_eq!(table.len(), 0);
+// But it can hold at least 5 elements without reallocating
+let empty_map_capacity = table.capacity();
+assert!(empty_map_capacity >= 5);
+
+// Now we insert some 5 elements inside created HashTable
+table.insert_unique(hasher(&"One"), "One", hasher);
+table.insert_unique(hasher(&"Two"), "Two", hasher);
+table.insert_unique(hasher(&"Three"), "Three", hasher);
+table.insert_unique(hasher(&"Four"), "Four", hasher);
+table.insert_unique(hasher(&"Five"), "Five", hasher);
+
+// We can see that the HashTable holds 5 elements
+assert_eq!(table.len(), 5);
+// But its capacity isn't changed
+assert_eq!(table.capacity(), empty_map_capacity)
+
source

pub fn allocator(&self) -> &A

Returns a reference to the underlying allocator.

+
source

pub fn find(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T>

Returns a reference to an entry in the table with the given hash and +which satisfies the equality function passed.

+

This method will call eq for all entries with the given hash, but may +also call it for entries with a different hash. eq should only return +true for the desired entry, at which point the search is stopped.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), 1, hasher);
+table.insert_unique(hasher(&2), 2, hasher);
+table.insert_unique(hasher(&3), 3, hasher);
+assert_eq!(table.find(hasher(&2), |&val| val == 2), Some(&2));
+assert_eq!(table.find(hasher(&4), |&val| val == 4), None);
+
source

pub fn find_mut( + &mut self, + hash: u64, + eq: impl FnMut(&T) -> bool +) -> Option<&mut T>

Returns a mutable reference to an entry in the table with the given hash +and which satisfies the equality function passed.

+

This method will call eq for all entries with the given hash, but may +also call it for entries with a different hash. eq should only return +true for the desired entry, at which point the search is stopped.

+

When mutating an entry, you should ensure that it still retains the same +hash value as when it was inserted, otherwise lookups of that entry may +fail to find it.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), (1, "a"), |val| hasher(&val.0));
+if let Some(val) = table.find_mut(hasher(&1), |val| val.0 == 1) {
+    val.1 = "b";
+}
+assert_eq!(table.find(hasher(&1), |val| val.0 == 1), Some(&(1, "b")));
+assert_eq!(table.find(hasher(&2), |val| val.0 == 2), None);
+
source

pub fn find_entry( + &mut self, + hash: u64, + eq: impl FnMut(&T) -> bool +) -> Result<OccupiedEntry<'_, T, A>, AbsentEntry<'_, T, A>>

Returns an OccupiedEntry for an entry in the table with the given hash +and which satisfies the equality function passed.

+

This can be used to remove the entry from the table. Call +HashTable::entry instead if you wish to insert an entry if the +lookup fails.

+

This method will call eq for all entries with the given hash, but may +also call it for entries with a different hash. eq should only return +true for the desired entry, at which point the search is stopped.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), (1, "a"), |val| hasher(&val.0));
+if let Ok(entry) = table.find_entry(hasher(&1), |val| val.0 == 1) {
+    entry.remove();
+}
+assert_eq!(table.find(hasher(&1), |val| val.0 == 1), None);
+
source

pub fn entry( + &mut self, + hash: u64, + eq: impl FnMut(&T) -> bool, + hasher: impl Fn(&T) -> u64 +) -> Entry<'_, T, A>

Returns an Entry for an entry in the table with the given hash +and which satisfies the equality function passed.

+

This can be used to remove the entry from the table, or insert a new +entry with the given hash if one doesn’t already exist.

+

This method will call eq for all entries with the given hash, but may +also call it for entries with a different hash. eq should only return +true for the desired entry, at which point the search is stopped.

+

This method may grow the table in preparation for an insertion. Call +HashTable::find_entry if this is undesirable.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), (1, "a"), |val| hasher(&val.0));
+if let Entry::Occupied(entry) = table.entry(hasher(&1), |val| val.0 == 1, |val| hasher(&val.0))
+{
+    entry.remove();
+}
+if let Entry::Vacant(entry) = table.entry(hasher(&2), |val| val.0 == 2, |val| hasher(&val.0)) {
+    entry.insert((2, "b"));
+}
+assert_eq!(table.find(hasher(&1), |val| val.0 == 1), None);
+assert_eq!(table.find(hasher(&2), |val| val.0 == 2), Some(&(2, "b")));
+
source

pub fn insert_unique( + &mut self, + hash: u64, + value: T, + hasher: impl Fn(&T) -> u64 +) -> OccupiedEntry<'_, T, A>

Inserts an element into the HashTable with the given hash value, but +without checking whether an equivalent element already exists within the +table.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut v = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+v.insert_unique(hasher(&1), 1, hasher);
+
source

pub fn clear(&mut self)

Clears the table, removing all values.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut v = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+v.insert_unique(hasher(&1), 1, hasher);
+v.clear();
+assert!(v.is_empty());
+
source

pub fn shrink_to_fit(&mut self, hasher: impl Fn(&T) -> u64)

Shrinks the capacity of the table as much as possible. It will drop +down as much as possible while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::with_capacity(100);
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), 1, hasher);
+table.insert_unique(hasher(&2), 2, hasher);
+assert!(table.capacity() >= 100);
+table.shrink_to_fit(hasher);
+assert!(table.capacity() >= 2);
+
source

pub fn shrink_to(&mut self, min_capacity: usize, hasher: impl Fn(&T) -> u64)

Shrinks the capacity of the table with a lower limit. It will drop +down no lower than the supplied limit while maintaining the internal rules +and possibly leaving some space in accordance with the resize policy.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+

Panics if the current capacity is smaller than the supplied +minimum capacity.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::with_capacity(100);
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), 1, hasher);
+table.insert_unique(hasher(&2), 2, hasher);
+assert!(table.capacity() >= 100);
+table.shrink_to(10, hasher);
+assert!(table.capacity() >= 10);
+table.shrink_to(0, hasher);
+assert!(table.capacity() >= 2);
+
source

pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64)

Reserves capacity for at least additional more elements to be inserted +in the HashTable. The collection may reserve more space to avoid +frequent reallocations.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Panics
+

Panics if the new capacity exceeds isize::MAX bytes and abort the program +in case of allocation error. Use try_reserve instead +if you want to handle memory allocation failure.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<i32> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.reserve(10, hasher);
+assert!(table.capacity() >= 10);
+
source

pub fn try_reserve( + &mut self, + additional: usize, + hasher: impl Fn(&T) -> u64 +) -> Result<(), TryReserveError>

Tries to reserve capacity for at least additional more elements to be inserted +in the given HashTable. The collection may reserve more space to avoid +frequent reallocations.

+

hasher is called if entries need to be moved or copied to a new table. +This must return the same hash value that each entry was inserted with.

+
§Errors
+

If the capacity overflows, or the allocator reports a failure, then an error +is returned.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table: HashTable<i32> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table
+    .try_reserve(10, hasher)
+    .expect("why is the test harness OOMing on 10 bytes?");
+
source

pub fn capacity(&self) -> usize

Returns the number of elements the table can hold without reallocating.

+
§Examples
+
use hashbrown::HashTable;
+let table: HashTable<i32> = HashTable::with_capacity(100);
+assert!(table.capacity() >= 100);
+
source

pub fn len(&self) -> usize

Returns the number of elements in the table.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+let mut v = HashTable::new();
+assert_eq!(v.len(), 0);
+v.insert_unique(hasher(&1), 1, hasher);
+assert_eq!(v.len(), 1);
+
source

pub fn is_empty(&self) -> bool

Returns true if the set contains no elements.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+let mut v = HashTable::new();
+assert!(v.is_empty());
+v.insert_unique(hasher(&1), 1, hasher);
+assert!(!v.is_empty());
+
source

pub fn iter(&self) -> Iter<'_, T>

An iterator visiting all elements in arbitrary order. +The iterator element type is &'a T.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&"a"), "b", hasher);
+table.insert_unique(hasher(&"b"), "b", hasher);
+
+// Will print in an arbitrary order.
+for x in table.iter() {
+    println!("{}", x);
+}
+
source

pub fn iter_mut(&mut self) -> IterMut<'_, T>

An iterator visiting all elements in arbitrary order, +with mutable references to the elements. +The iterator element type is &'a mut T.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+table.insert_unique(hasher(&1), 1, hasher);
+table.insert_unique(hasher(&2), 2, hasher);
+table.insert_unique(hasher(&3), 3, hasher);
+
+// Update all values
+for val in table.iter_mut() {
+    *val *= 2;
+}
+
+assert_eq!(table.len(), 3);
+let mut vec: Vec<i32> = Vec::new();
+
+for val in &table {
+    println!("val: {}", val);
+    vec.push(*val);
+}
+
+// The `Iter` iterator produces items in arbitrary order, so the
+// items must be sorted to test them against a sorted array.
+vec.sort_unstable();
+assert_eq!(vec, [2, 4, 6]);
+
+assert_eq!(table.len(), 3);
+
source

pub fn retain(&mut self, f: impl FnMut(&mut T) -> bool)

Retains only the elements specified by the predicate.

+

In other words, remove all elements e such that f(&e) returns false.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for x in 1..=6 {
+    table.insert_unique(hasher(&x), x, hasher);
+}
+table.retain(|&mut x| x % 2 == 0);
+assert_eq!(table.len(), 3);
+
source

pub fn drain(&mut self) -> Drain<'_, T, A>

Clears the set, returning all elements in an iterator.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for x in 1..=3 {
+    table.insert_unique(hasher(&x), x, hasher);
+}
+assert!(!table.is_empty());
+
+// print 1, 2, 3 in an arbitrary order
+for i in table.drain() {
+    println!("{}", i);
+}
+
+assert!(table.is_empty());
+
source

pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, T, F, A>
where + F: FnMut(&mut T) -> bool,

Drains elements which are true under the given predicate, +and returns an iterator over the removed items.

+

In other words, move all elements e such that f(&e) returns true out +into another iterator.

+

If the returned ExtractIf is not exhausted, e.g. because it is dropped without iterating +or the iteration short-circuits, then the remaining elements will be retained. +Use retain() with a negated predicate if you do not need the returned iterator.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut table = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for x in 0..8 {
+    table.insert_unique(hasher(&x), x, hasher);
+}
+let drained: Vec<i32> = table.extract_if(|&mut v| v % 2 == 0).collect();
+
+let mut evens = drained.into_iter().collect::<Vec<_>>();
+let mut odds = table.into_iter().collect::<Vec<_>>();
+evens.sort();
+odds.sort();
+
+assert_eq!(evens, vec![0, 2, 4, 6]);
+assert_eq!(odds, vec![1, 3, 5, 7]);
+
source

pub fn get_many_mut<const N: usize>( + &mut self, + hashes: [u64; N], + eq: impl FnMut(usize, &T) -> bool +) -> Option<[&mut T; N]>

Attempts to get mutable references to N values in the map at once.

+

The eq argument should be a closure such that eq(i, k) returns true if k is equal to +the ith key to be looked up.

+

Returns an array of length N with the results of each query. For soundness, at most one +mutable reference will be returned to any value. None will be returned if any of the +keys are duplicates or missing.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut libraries: HashTable<(&str, u32)> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for (k, v) in [
+    ("Bodleian Library", 1602),
+    ("Athenæum", 1807),
+    ("Herzogin-Anna-Amalia-Bibliothek", 1691),
+    ("Library of Congress", 1800),
+] {
+    libraries.insert_unique(hasher(&k), (k, v), |(k, _)| hasher(&k));
+}
+
+let keys = ["Athenæum", "Library of Congress"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(
+    got,
+    Some([&mut ("Athenæum", 1807), &mut ("Library of Congress", 1800),]),
+);
+
+// Missing keys result in None
+let keys = ["Athenæum", "New York Public Library"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(got, None);
+
+// Duplicate keys result in None
+let keys = ["Athenæum", "Athenæum"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(got, None);
+
source

pub unsafe fn get_many_unchecked_mut<const N: usize>( + &mut self, + hashes: [u64; N], + eq: impl FnMut(usize, &T) -> bool +) -> Option<[&mut T; N]>

Attempts to get mutable references to N values in the map at once, without validating that +the values are unique.

+

The eq argument should be a closure such that eq(i, k) returns true if k is equal to +the ith key to be looked up.

+

Returns an array of length N with the results of each query. None will be returned if +any of the keys are missing.

+

For a safe alternative see get_many_mut.

+
§Safety
+

Calling this method with overlapping keys is undefined behavior even if the resulting +references are not used.

+
§Examples
+
use ahash::AHasher;
+use hashbrown::hash_table::Entry;
+use hashbrown::HashTable;
+use std::hash::{BuildHasher, BuildHasherDefault};
+
+let mut libraries: HashTable<(&str, u32)> = HashTable::new();
+let hasher = BuildHasherDefault::<AHasher>::default();
+let hasher = |val: &_| hasher.hash_one(val);
+for (k, v) in [
+    ("Bodleian Library", 1602),
+    ("Athenæum", 1807),
+    ("Herzogin-Anna-Amalia-Bibliothek", 1691),
+    ("Library of Congress", 1800),
+] {
+    libraries.insert_unique(hasher(&k), (k, v), |(k, _)| hasher(&k));
+}
+
+let keys = ["Athenæum", "Library of Congress"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(
+    got,
+    Some([&mut ("Athenæum", 1807), &mut ("Library of Congress", 1800),]),
+);
+
+// Missing keys result in None
+let keys = ["Athenæum", "New York Public Library"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(got, None);
+
+// Duplicate keys result in None
+let keys = ["Athenæum", "Athenæum"];
+let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+assert_eq!(got, None);
+

Trait Implementations§

source§

impl<T, A> Clone for HashTable<T, A>
where + T: Clone, + A: Allocator + Clone,

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T, A> Debug for HashTable<T, A>
where + T: Debug, + A: Allocator,

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<T, A> Default for HashTable<T, A>
where + A: Allocator + Default,

source§

fn default() -> Self

Returns the “default value” for a type. Read more
source§

impl<'a, T, A> IntoIterator for &'a HashTable<T, A>
where + A: Allocator,

§

type Item = &'a T

The type of the elements being iterated over.
§

type IntoIter = Iter<'a, T>

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> Iter<'a, T>

Creates an iterator from a value. Read more
source§

impl<'a, T, A> IntoIterator for &'a mut HashTable<T, A>
where + A: Allocator,

§

type Item = &'a mut T

The type of the elements being iterated over.
§

type IntoIter = IterMut<'a, T>

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> IterMut<'a, T>

Creates an iterator from a value. Read more
source§

impl<T, A> IntoIterator for HashTable<T, A>
where + A: Allocator,

§

type Item = T

The type of the elements being iterated over.
§

type IntoIter = IntoIter<T, A>

Which kind of iterator are we turning this into?
source§

fn into_iter(self) -> IntoIter<T, A>

Creates an iterator from a value. Read more

Auto Trait Implementations§

§

impl<T, A> Freeze for HashTable<T, A>
where + A: Freeze,

§

impl<T, A> RefUnwindSafe for HashTable<T, A>
where + A: RefUnwindSafe, + T: RefUnwindSafe,

§

impl<T, A> Send for HashTable<T, A>
where + T: Send, + A: Send,

§

impl<T, A> Sync for HashTable<T, A>
where + T: Sync, + A: Sync,

§

impl<T, A> Unpin for HashTable<T, A>
where + A: Unpin, + T: Unpin,

§

impl<T, A> UnwindSafe for HashTable<T, A>
where + A: UnwindSafe, + T: UnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/hashbrown/table/enum.Entry.html b/hashbrown/table/enum.Entry.html new file mode 100644 index 000000000..07ad323ac --- /dev/null +++ b/hashbrown/table/enum.Entry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/enum.Entry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/table/struct.AbsentEntry.html b/hashbrown/table/struct.AbsentEntry.html new file mode 100644 index 000000000..ae2218449 --- /dev/null +++ b/hashbrown/table/struct.AbsentEntry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/struct.AbsentEntry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/table/struct.Drain.html b/hashbrown/table/struct.Drain.html new file mode 100644 index 000000000..65a7e9914 --- /dev/null +++ b/hashbrown/table/struct.Drain.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/struct.Drain.html...

+ + + \ No newline at end of file diff --git a/hashbrown/table/struct.ExtractIf.html b/hashbrown/table/struct.ExtractIf.html new file mode 100644 index 000000000..5546734ca --- /dev/null +++ b/hashbrown/table/struct.ExtractIf.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/struct.ExtractIf.html...

+ + + \ No newline at end of file diff --git a/hashbrown/table/struct.HashTable.html b/hashbrown/table/struct.HashTable.html new file mode 100644 index 000000000..e6a331469 --- /dev/null +++ b/hashbrown/table/struct.HashTable.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/struct.HashTable.html...

+ + + \ No newline at end of file diff --git a/hashbrown/table/struct.IntoIter.html b/hashbrown/table/struct.IntoIter.html new file mode 100644 index 000000000..7cf6655d7 --- /dev/null +++ b/hashbrown/table/struct.IntoIter.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/struct.IntoIter.html...

+ + + \ No newline at end of file diff --git a/hashbrown/table/struct.Iter.html b/hashbrown/table/struct.Iter.html new file mode 100644 index 000000000..bf425e56a --- /dev/null +++ b/hashbrown/table/struct.Iter.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/struct.Iter.html...

+ + + \ No newline at end of file diff --git a/hashbrown/table/struct.IterMut.html b/hashbrown/table/struct.IterMut.html new file mode 100644 index 000000000..df9bd8980 --- /dev/null +++ b/hashbrown/table/struct.IterMut.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/struct.IterMut.html...

+ + + \ No newline at end of file diff --git a/hashbrown/table/struct.OccupiedEntry.html b/hashbrown/table/struct.OccupiedEntry.html new file mode 100644 index 000000000..0a46f35b7 --- /dev/null +++ b/hashbrown/table/struct.OccupiedEntry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/struct.OccupiedEntry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/table/struct.VacantEntry.html b/hashbrown/table/struct.VacantEntry.html new file mode 100644 index 000000000..7c80215de --- /dev/null +++ b/hashbrown/table/struct.VacantEntry.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to ../../hashbrown/hash_table/struct.VacantEntry.html...

+ + + \ No newline at end of file diff --git a/hashbrown/trait.Equivalent.html b/hashbrown/trait.Equivalent.html new file mode 100644 index 000000000..06900ed31 --- /dev/null +++ b/hashbrown/trait.Equivalent.html @@ -0,0 +1,19 @@ +Equivalent in hashbrown - Rust

Trait hashbrown::Equivalent

source ·
pub trait Equivalent<K: ?Sized> {
+    // Required method
+    fn equivalent(&self, key: &K) -> bool;
+}
Expand description

Key equivalence trait.

+

This trait defines the function used to compare the input value with the +map keys (or set values) during a lookup operation such as HashMap::get +or HashSet::contains. +It is provided with a blanket implementation based on the +Borrow trait.

+

§Correctness

+

Equivalent values must hash to the same value.

+

Required Methods§

source

fn equivalent(&self, key: &K) -> bool

Checks if this value is equivalent to the given key.

+

Returns true if both values are equivalent, and false otherwise.

+
§Correctness
+

When this function returns true, both self and key must hash to +the same value.

+

Implementors§

source§

impl<Q, K> Equivalent<K> for Q
where + Q: Eq + ?Sized, + K: Borrow<Q> + ?Sized,

\ No newline at end of file diff --git a/help.html b/help.html new file mode 100644 index 000000000..2e6ff4ca3 --- /dev/null +++ b/help.html @@ -0,0 +1 @@ +Help

Rustdoc help

Back
\ No newline at end of file diff --git a/log/all.html b/log/all.html new file mode 100644 index 000000000..a5a8edf4f --- /dev/null +++ b/log/all.html @@ -0,0 +1 @@ +List of all items in this crate

List of all items

Structs

Enums

Traits

Macros

Functions

Constants

\ No newline at end of file diff --git a/log/constant.STATIC_MAX_LEVEL.html b/log/constant.STATIC_MAX_LEVEL.html new file mode 100644 index 000000000..28ffcf2db --- /dev/null +++ b/log/constant.STATIC_MAX_LEVEL.html @@ -0,0 +1,6 @@ +STATIC_MAX_LEVEL in log - Rust

Constant log::STATIC_MAX_LEVEL

source ·
pub const STATIC_MAX_LEVEL: LevelFilter;
Expand description

The statically resolved maximum log level.

+

See the crate level documentation for information on how to configure this.

+

This value is checked by the log macros, but not by the Logger returned by +the logger function. Code that manually calls functions on that value +should compare the level against this value.

+
\ No newline at end of file diff --git a/log/enum.Level.html b/log/enum.Level.html new file mode 100644 index 000000000..bf4294f7d --- /dev/null +++ b/log/enum.Level.html @@ -0,0 +1,62 @@ +Level in log - Rust

Enum log::Level

source ·
#[repr(usize)]
pub enum Level { + Error = 1, + Warn = 2, + Info = 3, + Debug = 4, + Trace = 5, +}
Expand description

An enum representing the available verbosity levels of the logger.

+

Typical usage includes: checking if a certain Level is enabled with +log_enabled!, specifying the Level of +log!, and comparing a Level directly to a +LevelFilter.

+

Variants§

§

Error = 1

The “error” level.

+

Designates very serious errors.

+
§

Warn = 2

The “warn” level.

+

Designates hazardous situations.

+
§

Info = 3

The “info” level.

+

Designates useful information.

+
§

Debug = 4

The “debug” level.

+

Designates lower priority information.

+
§

Trace = 5

The “trace” level.

+

Designates very low priority, often extremely verbose, information.

+

Implementations§

source§

impl Level

source

pub fn max() -> Level

Returns the most verbose logging level.

+
source

pub fn to_level_filter(&self) -> LevelFilter

Converts the Level to the equivalent LevelFilter.

+
source

pub fn as_str(&self) -> &'static str

Returns the string representation of the Level.

+

This returns the same string as the fmt::Display implementation.

+
source

pub fn iter() -> impl Iterator<Item = Self>

Iterate through all supported logging levels.

+

The order of iteration is from more severe to less severe log messages.

+
§Examples
+
use log::Level;
+
+let mut levels = Level::iter();
+
+assert_eq!(Some(Level::Error), levels.next());
+assert_eq!(Some(Level::Trace), levels.last());
+

Trait Implementations§

source§

impl Clone for Level

source§

fn clone(&self) -> Level

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl Debug for Level

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl Display for Level

source§

fn fmt(&self, fmt: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl FromStr for Level

§

type Err = ParseLevelError

The associated error which can be returned from parsing.
source§

fn from_str(level: &str) -> Result<Level, Self::Err>

Parses a string s to return a value of this type. Read more
source§

impl Hash for Level

source§

fn hash<__H: Hasher>(&self, state: &mut __H)

Feeds this value into the given Hasher. Read more
1.3.0 · source§

fn hash_slice<H>(data: &[Self], state: &mut H)
where + H: Hasher, + Self: Sized,

Feeds a slice of this type into the given Hasher. Read more
source§

impl Ord for Level

source§

fn cmp(&self, other: &Level) -> Ordering

This method returns an Ordering between self and other. Read more
1.21.0 · source§

fn max(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the maximum of two values. Read more
1.21.0 · source§

fn min(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the minimum of two values. Read more
1.50.0 · source§

fn clamp(self, min: Self, max: Self) -> Self
where + Self: Sized + PartialOrd,

Restrict a value to a certain interval. Read more
source§

impl PartialEq<Level> for LevelFilter

source§

fn eq(&self, other: &Level) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl PartialEq<LevelFilter> for Level

source§

fn eq(&self, other: &LevelFilter) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl PartialEq for Level

source§

fn eq(&self, other: &Level) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl PartialOrd<Level> for LevelFilter

source§

fn partial_cmp(&self, other: &Level) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0 · source§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0 · source§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= +operator. Read more
1.0.0 · source§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0 · source§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= +operator. Read more
source§

impl PartialOrd<LevelFilter> for Level

source§

fn partial_cmp(&self, other: &LevelFilter) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0 · source§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0 · source§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= +operator. Read more
1.0.0 · source§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0 · source§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= +operator. Read more
source§

impl PartialOrd for Level

source§

fn partial_cmp(&self, other: &Level) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0 · source§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0 · source§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= +operator. Read more
1.0.0 · source§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0 · source§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= +operator. Read more
source§

impl Copy for Level

source§

impl Eq for Level

source§

impl StructuralPartialEq for Level

Auto Trait Implementations§

§

impl Freeze for Level

§

impl RefUnwindSafe for Level

§

impl Send for Level

§

impl Sync for Level

§

impl Unpin for Level

§

impl UnwindSafe for Level

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/log/enum.LevelFilter.html b/log/enum.LevelFilter.html new file mode 100644 index 000000000..f120656eb --- /dev/null +++ b/log/enum.LevelFilter.html @@ -0,0 +1,58 @@ +LevelFilter in log - Rust

Enum log::LevelFilter

source ·
#[repr(usize)]
pub enum LevelFilter { + Off = 0, + Error = 1, + Warn = 2, + Info = 3, + Debug = 4, + Trace = 5, +}
Expand description

An enum representing the available verbosity level filters of the logger.

+

A LevelFilter may be compared directly to a Level. Use this type +to get and set the maximum log level with max_level() and set_max_level.

+

Variants§

§

Off = 0

A level lower than all log levels.

+
§

Error = 1

Corresponds to the Error log level.

+
§

Warn = 2

Corresponds to the Warn log level.

+
§

Info = 3

Corresponds to the Info log level.

+
§

Debug = 4

Corresponds to the Debug log level.

+
§

Trace = 5

Corresponds to the Trace log level.

+

Implementations§

source§

impl LevelFilter

source

pub fn max() -> LevelFilter

Returns the most verbose logging level filter.

+
source

pub fn to_level(&self) -> Option<Level>

Converts self to the equivalent Level.

+

Returns None if self is LevelFilter::Off.

+
source

pub fn as_str(&self) -> &'static str

Returns the string representation of the LevelFilter.

+

This returns the same string as the fmt::Display implementation.

+
source

pub fn iter() -> impl Iterator<Item = Self>

Iterate through all supported filtering levels.

+

The order of iteration is from less to more verbose filtering.

+
§Examples
+
use log::LevelFilter;
+
+let mut levels = LevelFilter::iter();
+
+assert_eq!(Some(LevelFilter::Off), levels.next());
+assert_eq!(Some(LevelFilter::Trace), levels.last());
+

Trait Implementations§

source§

impl Clone for LevelFilter

source§

fn clone(&self) -> LevelFilter

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl Debug for LevelFilter

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl Display for LevelFilter

source§

fn fmt(&self, fmt: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl FromStr for LevelFilter

§

type Err = ParseLevelError

The associated error which can be returned from parsing.
source§

fn from_str(level: &str) -> Result<LevelFilter, Self::Err>

Parses a string s to return a value of this type. Read more
source§

impl Hash for LevelFilter

source§

fn hash<__H: Hasher>(&self, state: &mut __H)

Feeds this value into the given Hasher. Read more
1.3.0 · source§

fn hash_slice<H>(data: &[Self], state: &mut H)
where + H: Hasher, + Self: Sized,

Feeds a slice of this type into the given Hasher. Read more
source§

impl Ord for LevelFilter

source§

fn cmp(&self, other: &LevelFilter) -> Ordering

This method returns an Ordering between self and other. Read more
1.21.0 · source§

fn max(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the maximum of two values. Read more
1.21.0 · source§

fn min(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the minimum of two values. Read more
1.50.0 · source§

fn clamp(self, min: Self, max: Self) -> Self
where + Self: Sized + PartialOrd,

Restrict a value to a certain interval. Read more
source§

impl PartialEq<Level> for LevelFilter

source§

fn eq(&self, other: &Level) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl PartialEq<LevelFilter> for Level

source§

fn eq(&self, other: &LevelFilter) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl PartialEq for LevelFilter

source§

fn eq(&self, other: &LevelFilter) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl PartialOrd<Level> for LevelFilter

source§

fn partial_cmp(&self, other: &Level) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0 · source§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0 · source§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= +operator. Read more
1.0.0 · source§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0 · source§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= +operator. Read more
source§

impl PartialOrd<LevelFilter> for Level

source§

fn partial_cmp(&self, other: &LevelFilter) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0 · source§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0 · source§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= +operator. Read more
1.0.0 · source§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0 · source§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= +operator. Read more
source§

impl PartialOrd for LevelFilter

source§

fn partial_cmp(&self, other: &LevelFilter) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0 · source§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0 · source§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= +operator. Read more
1.0.0 · source§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0 · source§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= +operator. Read more
source§

impl Copy for LevelFilter

source§

impl Eq for LevelFilter

source§

impl StructuralPartialEq for LevelFilter

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/log/fn.logger.html b/log/fn.logger.html new file mode 100644 index 000000000..0955c21e6 --- /dev/null +++ b/log/fn.logger.html @@ -0,0 +1,3 @@ +logger in log - Rust

Function log::logger

source ·
pub fn logger() -> &'static dyn Log
Expand description

Returns a reference to the logger.

+

If a logger has not been set, a no-op implementation is returned.

+
\ No newline at end of file diff --git a/log/fn.max_level.html b/log/fn.max_level.html new file mode 100644 index 000000000..f57d7bbcb --- /dev/null +++ b/log/fn.max_level.html @@ -0,0 +1,5 @@ +max_level in log - Rust

Function log::max_level

source ·
pub fn max_level() -> LevelFilter
Expand description

Returns the current maximum log level.

+

The log!, error!, warn!, info!, debug!, and trace! macros check +this value and discard any message logged at a higher level. The maximum +log level is set by the set_max_level function.

+
\ No newline at end of file diff --git a/log/fn.set_logger.html b/log/fn.set_logger.html new file mode 100644 index 000000000..d276a3621 --- /dev/null +++ b/log/fn.set_logger.html @@ -0,0 +1,40 @@ +set_logger in log - Rust

Function log::set_logger

source ·
pub fn set_logger(logger: &'static dyn Log) -> Result<(), SetLoggerError>
Expand description

Sets the global logger to a &'static Log.

+

This function may only be called once in the lifetime of a program. Any log +events that occur before the call to set_logger completes will be ignored.

+

This function does not typically need to be called manually. Logger +implementations should provide an initialization method that installs the +logger internally.

+

§Availability

+

This method is available even when the std feature is disabled. However, +it is currently unavailable on thumbv6 targets, which lack support for +some atomic operations which are used by this function. Even on those +targets, set_logger_racy will be available.

+

§Errors

+

An error is returned if a logger has already been set.

+

§Examples

+
use log::{error, info, warn, Record, Level, Metadata, LevelFilter};
+
+static MY_LOGGER: MyLogger = MyLogger;
+
+struct MyLogger;
+
+impl log::Log for MyLogger {
+    fn enabled(&self, metadata: &Metadata) -> bool {
+        metadata.level() <= Level::Info
+    }
+
+    fn log(&self, record: &Record) {
+        if self.enabled(record.metadata()) {
+            println!("{} - {}", record.level(), record.args());
+        }
+    }
+    fn flush(&self) {}
+}
+
+log::set_logger(&MY_LOGGER).unwrap();
+log::set_max_level(LevelFilter::Info);
+
+info!("hello log");
+warn!("warning");
+error!("oops");
+
\ No newline at end of file diff --git a/log/fn.set_logger_racy.html b/log/fn.set_logger_racy.html new file mode 100644 index 000000000..da2c8d1a0 --- /dev/null +++ b/log/fn.set_logger_racy.html @@ -0,0 +1,14 @@ +set_logger_racy in log - Rust

Function log::set_logger_racy

source ·
pub unsafe fn set_logger_racy(
+    logger: &'static dyn Log
+) -> Result<(), SetLoggerError>
Expand description

A thread-unsafe version of set_logger.

+

This function is available on all platforms, even those that do not have +support for atomics that is needed by set_logger.

+

In almost all cases, set_logger should be preferred.

+

§Safety

+

This function is only safe to call when no other logger initialization +function is called while this function still executes.

+

This can be upheld by (for example) making sure that there are no other +threads, and (on embedded) that interrupts are disabled.

+

It is safe to use other logging functions while this function runs +(including all logging macros).

+
\ No newline at end of file diff --git a/log/fn.set_max_level.html b/log/fn.set_max_level.html new file mode 100644 index 000000000..fc927896a --- /dev/null +++ b/log/fn.set_max_level.html @@ -0,0 +1,4 @@ +set_max_level in log - Rust

Function log::set_max_level

source ·
pub fn set_max_level(level: LevelFilter)
Expand description

Sets the global maximum log level.

+

Generally, this should only be called by the active logging implementation.

+

Note that Trace is the maximum level, because it provides the maximum amount of detail in the emitted logs.

+
\ No newline at end of file diff --git a/log/fn.set_max_level_racy.html b/log/fn.set_max_level_racy.html new file mode 100644 index 000000000..63194ea87 --- /dev/null +++ b/log/fn.set_max_level_racy.html @@ -0,0 +1,12 @@ +set_max_level_racy in log - Rust

Function log::set_max_level_racy

source ·
pub unsafe fn set_max_level_racy(level: LevelFilter)
Expand description

A thread-unsafe version of set_max_level.

+

This function is available on all platforms, even those that do not have +support for atomics that is needed by set_max_level.

+

In almost all cases, set_max_level should be preferred.

+

§Safety

+

This function is only safe to call when no other level setting function is +called while this function still executes.

+

This can be upheld by (for example) making sure that there are no other +threads, and (on embedded) that interrupts are disabled.

+

Is is safe to use all other logging functions while this function runs +(including all logging macros).

+
\ No newline at end of file diff --git a/log/index.html b/log/index.html new file mode 100644 index 000000000..602ec0d51 --- /dev/null +++ b/log/index.html @@ -0,0 +1,226 @@ +log - Rust

Crate log

source ·
Expand description

A lightweight logging facade.

+

The log crate provides a single logging API that abstracts over the +actual logging implementation. Libraries can use the logging API provided +by this crate, and the consumer of those libraries can choose the logging +implementation that is most suitable for its use case.

+

If no logging implementation is selected, the facade falls back to a “noop” +implementation that ignores all log messages. The overhead in this case +is very small - just an integer load, comparison and jump.

+

A log request consists of a target, a level, and a body. A target is a +string which defaults to the module path of the location of the log request, +though that default may be overridden. Logger implementations typically use +the target to filter requests based on some user configuration.

+

§Usage

+

The basic use of the log crate is through the five logging macros: error!, +warn!, info!, debug! and trace! +where error! represents the highest-priority log messages +and trace! the lowest. The log messages are filtered by configuring +the log level to exclude messages with a lower priority. +Each of these macros accept format strings similarly to println!.

+

§In libraries

+

Libraries should link only to the log crate, and use the provided +macros to log whatever information will be useful to downstream consumers.

+

§Examples

+
use log::{info, warn};
+
+pub fn shave_the_yak(yak: &mut Yak) {
+    info!(target: "yak_events", "Commencing yak shaving for {yak:?}");
+
+    loop {
+        match find_a_razor() {
+            Ok(razor) => {
+                info!("Razor located: {razor}");
+                yak.shave(razor);
+                break;
+            }
+            Err(err) => {
+                warn!("Unable to locate a razor: {err}, retrying");
+            }
+        }
+    }
+}
+

§In executables

+

Executables should choose a logging implementation and initialize it early in the +runtime of the program. Logging implementations will typically include a +function to do this. Any log messages generated before +the implementation is initialized will be ignored.

+

The executable itself may use the log crate to log as well.

+

§Warning

+

The logging system may only be initialized once.

+

§Structured logging

+

If you enable the kv feature you can associate structured values +with your log records. If we take the example from before, we can include +some additional context besides what’s in the formatted message:

+ +
use log::{info, warn};
+
+pub fn shave_the_yak(yak: &mut Yak) {
+    info!(target: "yak_events", yak:serde; "Commencing yak shaving");
+
+    loop {
+        match find_a_razor() {
+            Ok(razor) => {
+                info!(razor; "Razor located");
+                yak.shave(razor);
+                break;
+            }
+            Err(e) => {
+                warn!(e:err; "Unable to locate a razor, retrying");
+            }
+        }
+    }
+}
+

See the [kv] module documentation for more details.

+

§Available logging implementations

+

In order to produce log output executables have to use +a logger implementation compatible with the facade. +There are many available implementations to choose from, +here are some of the most popular ones:

+ +

§Implementing a Logger

+

Loggers implement the Log trait. Here’s a very basic example that simply +logs all messages at the Error, Warn or +Info levels to stdout:

+ +
use log::{Record, Level, Metadata};
+
+struct SimpleLogger;
+
+impl log::Log for SimpleLogger {
+    fn enabled(&self, metadata: &Metadata) -> bool {
+        metadata.level() <= Level::Info
+    }
+
+    fn log(&self, record: &Record) {
+        if self.enabled(record.metadata()) {
+            println!("{} - {}", record.level(), record.args());
+        }
+    }
+
+    fn flush(&self) {}
+}
+
+

Loggers are installed by calling the set_logger function. The maximum +log level also needs to be adjusted via the set_max_level function. The +logging facade uses this as an optimization to improve performance of log +messages at levels that are disabled. It’s important to set it, as it +defaults to Off, so no log messages will ever be captured! +In the case of our example logger, we’ll want to set the maximum log level +to Info, since we ignore any Debug or +Trace level log messages. A logging implementation should +provide a function that wraps a call to set_logger and +set_max_level, handling initialization of the logger:

+ +
use log::{SetLoggerError, LevelFilter};
+
+static LOGGER: SimpleLogger = SimpleLogger;
+
+pub fn init() -> Result<(), SetLoggerError> {
+    log::set_logger(&LOGGER)
+        .map(|()| log::set_max_level(LevelFilter::Info))
+}
+

Implementations that adjust their configurations at runtime should take care +to adjust the maximum log level as well.

+

§Use with std

+

set_logger requires you to provide a &'static Log, which can be hard to +obtain if your logger depends on some runtime configuration. The +set_boxed_logger function is available with the std Cargo feature. It is +identical to set_logger except that it takes a Box<Log> rather than a +&'static Log:

+ +
pub fn init() -> Result<(), SetLoggerError> {
+    log::set_boxed_logger(Box::new(SimpleLogger))
+        .map(|()| log::set_max_level(LevelFilter::Info))
+}
+

§Compile time filters

+

Log levels can be statically disabled at compile time by enabling one of these Cargo features:

+
    +
  • max_level_off
  • +
  • max_level_error
  • +
  • max_level_warn
  • +
  • max_level_info
  • +
  • max_level_debug
  • +
  • max_level_trace
  • +
+

Log invocations at disabled levels will be skipped and will not even be present in the +resulting binary. These features control the value of the STATIC_MAX_LEVEL constant. The +logging macros check this value before logging a message. By default, no levels are disabled.

+

It is possible to override this level for release builds only with the following features:

+
    +
  • release_max_level_off
  • +
  • release_max_level_error
  • +
  • release_max_level_warn
  • +
  • release_max_level_info
  • +
  • release_max_level_debug
  • +
  • release_max_level_trace
  • +
+

Libraries should avoid using the max level features because they’re global and can’t be changed +once they’re set.

+

For example, a crate can disable trace level logs in debug builds and trace, debug, and info +level logs in release builds with the following configuration:

+
[dependencies]
+log = { version = "0.4", features = ["max_level_debug", "release_max_level_warn"] }
+

§Crate Feature Flags

+

The following crate feature flags are available in addition to the filters. They are +configured in your Cargo.toml.

+
    +
  • std allows use of std crate instead of the default core. Enables using std::error and +set_boxed_logger functionality.
  • +
  • serde enables support for serialization and deserialization of Level and LevelFilter.
  • +
+
[dependencies]
+log = { version = "0.4", features = ["std", "serde"] }
+

§Version compatibility

+

The 0.3 and 0.4 versions of the log crate are almost entirely compatible. Log messages +made using log 0.3 will forward transparently to a logger implementation using log 0.4. Log +messages made using log 0.4 will forward to a logger implementation using log 0.3, but the +module path and file name information associated with the message will unfortunately be lost.

+

Macros§

  • Logs a message at the debug level.
  • Logs a message at the error level.
  • Logs a message at the info level.
  • The standard logging macro.
  • Determines if a message logged at the specified level in that module will +be logged.
  • Logs a message at the trace level.
  • Logs a message at the warn level.

Structs§

Enums§

  • An enum representing the available verbosity levels of the logger.
  • An enum representing the available verbosity level filters of the logger.

Constants§

Traits§

  • A trait encapsulating the operations required of a logger.

Functions§

\ No newline at end of file diff --git a/log/macro.debug!.html b/log/macro.debug!.html new file mode 100644 index 000000000..00bfa281a --- /dev/null +++ b/log/macro.debug!.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to macro.debug.html...

+ + + \ No newline at end of file diff --git a/log/macro.debug.html b/log/macro.debug.html new file mode 100644 index 000000000..44d52d6d7 --- /dev/null +++ b/log/macro.debug.html @@ -0,0 +1,12 @@ +debug in log - Rust

Macro log::debug

source ·
macro_rules! debug {
+    (target: $target:expr, $($arg:tt)+) => { ... };
+    ($($arg:tt)+) => { ... };
+}
Expand description

Logs a message at the debug level.

+

§Examples

+
use log::debug;
+
+let pos = Position { x: 3.234, y: -1.223 };
+
+debug!("New position: x: {}, y: {}", pos.x, pos.y);
+debug!(target: "app_events", "New position: x: {}, y: {}", pos.x, pos.y);
+
\ No newline at end of file diff --git a/log/macro.error!.html b/log/macro.error!.html new file mode 100644 index 000000000..3721cb89c --- /dev/null +++ b/log/macro.error!.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to macro.error.html...

+ + + \ No newline at end of file diff --git a/log/macro.error.html b/log/macro.error.html new file mode 100644 index 000000000..e63dd447c --- /dev/null +++ b/log/macro.error.html @@ -0,0 +1,12 @@ +error in log - Rust

Macro log::error

source ·
macro_rules! error {
+    (target: $target:expr, $($arg:tt)+) => { ... };
+    ($($arg:tt)+) => { ... };
+}
Expand description

Logs a message at the error level.

+

§Examples

+
use log::error;
+
+let (err_info, port) = ("No connection", 22);
+
+error!("Error: {err_info} on port {port}");
+error!(target: "app_events", "App Error: {err_info}, Port: {port}");
+
\ No newline at end of file diff --git a/log/macro.info!.html b/log/macro.info!.html new file mode 100644 index 000000000..140be2679 --- /dev/null +++ b/log/macro.info!.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to macro.info.html...

+ + + \ No newline at end of file diff --git a/log/macro.info.html b/log/macro.info.html new file mode 100644 index 000000000..8f8321cf6 --- /dev/null +++ b/log/macro.info.html @@ -0,0 +1,13 @@ +info in log - Rust

Macro log::info

source ·
macro_rules! info {
+    (target: $target:expr, $($arg:tt)+) => { ... };
+    ($($arg:tt)+) => { ... };
+}
Expand description

Logs a message at the info level.

+

§Examples

+
use log::info;
+
+let conn_info = Connection { port: 40, speed: 3.20 };
+
+info!("Connected to port {} at {} Mb/s", conn_info.port, conn_info.speed);
+info!(target: "connection_events", "Successful connection, port: {}, speed: {}",
+      conn_info.port, conn_info.speed);
+
\ No newline at end of file diff --git a/log/macro.log!.html b/log/macro.log!.html new file mode 100644 index 000000000..174235025 --- /dev/null +++ b/log/macro.log!.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to macro.log.html...

+ + + \ No newline at end of file diff --git a/log/macro.log.html b/log/macro.log.html new file mode 100644 index 000000000..9d1a2266d --- /dev/null +++ b/log/macro.log.html @@ -0,0 +1,17 @@ +log in log - Rust

Macro log::log

source ·
macro_rules! log {
+    (target: $target:expr, $lvl:expr, $($key:tt $(:$capture:tt)? $(= $value:expr)?),+; $($arg:tt)+) => { ... };
+    (target: $target:expr, $lvl:expr, $($arg:tt)+) => { ... };
+    ($lvl:expr, $($arg:tt)+) => { ... };
+}
Expand description

The standard logging macro.

+

This macro will generically log with the specified Level and format! +based argument list.

+

§Examples

+
use log::{log, Level};
+
+let data = (42, "Forty-two");
+let private_data = "private";
+
+log!(Level::Error, "Received errors: {}, {}", data.0, data.1);
+log!(target: "app_events", Level::Warn, "App warning: {}, {}, {}",
+    data.0, data.1, private_data);
+
\ No newline at end of file diff --git a/log/macro.log_enabled!.html b/log/macro.log_enabled!.html new file mode 100644 index 000000000..8b450c717 --- /dev/null +++ b/log/macro.log_enabled!.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to macro.log_enabled.html...

+ + + \ No newline at end of file diff --git a/log/macro.log_enabled.html b/log/macro.log_enabled.html new file mode 100644 index 000000000..bece21224 --- /dev/null +++ b/log/macro.log_enabled.html @@ -0,0 +1,20 @@ +log_enabled in log - Rust

Macro log::log_enabled

source ·
macro_rules! log_enabled {
+    (target: $target:expr, $lvl:expr) => { ... };
+    ($lvl:expr) => { ... };
+}
Expand description

Determines if a message logged at the specified level in that module will +be logged.

+

This can be used to avoid expensive computation of log message arguments if +the message would be ignored anyway.

+

§Examples

+
use log::Level::Debug;
+use log::{debug, log_enabled};
+
+if log_enabled!(Debug) {
+    let data = expensive_call();
+    debug!("expensive debug data: {} {}", data.x, data.y);
+}
+if log_enabled!(target: "Global", Debug) {
+   let data = expensive_call();
+   debug!(target: "Global", "expensive debug data: {} {}", data.x, data.y);
+}
+
\ No newline at end of file diff --git a/log/macro.trace!.html b/log/macro.trace!.html new file mode 100644 index 000000000..beceac429 --- /dev/null +++ b/log/macro.trace!.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to macro.trace.html...

+ + + \ No newline at end of file diff --git a/log/macro.trace.html b/log/macro.trace.html new file mode 100644 index 000000000..f65d2f175 --- /dev/null +++ b/log/macro.trace.html @@ -0,0 +1,14 @@ +trace in log - Rust

Macro log::trace

source ·
macro_rules! trace {
+    (target: $target:expr, $($arg:tt)+) => { ... };
+    ($($arg:tt)+) => { ... };
+}
Expand description

Logs a message at the trace level.

+

§Examples

+
use log::trace;
+
+let pos = Position { x: 3.234, y: -1.223 };
+
+trace!("Position is: x: {}, y: {}", pos.x, pos.y);
+trace!(target: "app_events", "x is {} and y is {}",
+       if pos.x >= 0.0 { "positive" } else { "negative" },
+       if pos.y >= 0.0 { "positive" } else { "negative" });
+
\ No newline at end of file diff --git a/log/macro.warn!.html b/log/macro.warn!.html new file mode 100644 index 000000000..4871ab8c6 --- /dev/null +++ b/log/macro.warn!.html @@ -0,0 +1,11 @@ + + + + + Redirection + + +

Redirecting to macro.warn.html...

+ + + \ No newline at end of file diff --git a/log/macro.warn.html b/log/macro.warn.html new file mode 100644 index 000000000..f7a91a9f6 --- /dev/null +++ b/log/macro.warn.html @@ -0,0 +1,12 @@ +warn in log - Rust

Macro log::warn

source ·
macro_rules! warn {
+    (target: $target:expr, $($arg:tt)+) => { ... };
+    ($($arg:tt)+) => { ... };
+}
Expand description

Logs a message at the warn level.

+

§Examples

+
use log::warn;
+
+let warn_description = "Invalid Input";
+
+warn!("Warning! {warn_description}!");
+warn!(target: "input_events", "App received warning: {warn_description}");
+
\ No newline at end of file diff --git a/log/sidebar-items.js b/log/sidebar-items.js new file mode 100644 index 000000000..348505518 --- /dev/null +++ b/log/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {"constant":["STATIC_MAX_LEVEL"],"enum":["Level","LevelFilter"],"fn":["logger","max_level","set_logger","set_logger_racy","set_max_level","set_max_level_racy"],"macro":["debug","error","info","log","log_enabled","trace","warn"],"struct":["Metadata","MetadataBuilder","ParseLevelError","Record","RecordBuilder","SetLoggerError"],"trait":["Log"]}; \ No newline at end of file diff --git a/log/struct.Metadata.html b/log/struct.Metadata.html new file mode 100644 index 000000000..511006b9a --- /dev/null +++ b/log/struct.Metadata.html @@ -0,0 +1,51 @@ +Metadata in log - Rust

Struct log::Metadata

source ·
pub struct Metadata<'a> { /* private fields */ }
Expand description

Metadata about a log message.

+

§Use

+

Metadata structs are created when users of the library use +logging macros.

+

They are consumed by implementations of the Log trait in the +enabled method.

+

Records use Metadata to determine the log message’s severity +and target.

+

Users should use the log_enabled! macro in their code to avoid +constructing expensive log messages.

+

§Examples

+
use log::{Record, Level, Metadata};
+
+struct MyLogger;
+
+impl log::Log for MyLogger {
+    fn enabled(&self, metadata: &Metadata) -> bool {
+        metadata.level() <= Level::Info
+    }
+
+    fn log(&self, record: &Record) {
+        if self.enabled(record.metadata()) {
+            println!("{} - {}", record.level(), record.args());
+        }
+    }
+    fn flush(&self) {}
+}
+
+

Implementations§

source§

impl<'a> Metadata<'a>

source

pub fn builder() -> MetadataBuilder<'a>

Returns a new builder.

+
source

pub fn level(&self) -> Level

The verbosity level of the message.

+
source

pub fn target(&self) -> &'a str

The name of the target of the directive.

+

Trait Implementations§

source§

impl<'a> Clone for Metadata<'a>

source§

fn clone(&self) -> Metadata<'a>

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<'a> Debug for Metadata<'a>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a> Hash for Metadata<'a>

source§

fn hash<__H: Hasher>(&self, state: &mut __H)

Feeds this value into the given Hasher. Read more
1.3.0 · source§

fn hash_slice<H>(data: &[Self], state: &mut H)
where + H: Hasher, + Self: Sized,

Feeds a slice of this type into the given Hasher. Read more
source§

impl<'a> Ord for Metadata<'a>

source§

fn cmp(&self, other: &Metadata<'a>) -> Ordering

This method returns an Ordering between self and other. Read more
1.21.0 · source§

fn max(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the maximum of two values. Read more
1.21.0 · source§

fn min(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the minimum of two values. Read more
1.50.0 · source§

fn clamp(self, min: Self, max: Self) -> Self
where + Self: Sized + PartialOrd,

Restrict a value to a certain interval. Read more
source§

impl<'a> PartialEq for Metadata<'a>

source§

fn eq(&self, other: &Metadata<'a>) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl<'a> PartialOrd for Metadata<'a>

source§

fn partial_cmp(&self, other: &Metadata<'a>) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0 · source§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0 · source§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= +operator. Read more
1.0.0 · source§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0 · source§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= +operator. Read more
source§

impl<'a> Eq for Metadata<'a>

source§

impl<'a> StructuralPartialEq for Metadata<'a>

Auto Trait Implementations§

§

impl<'a> Freeze for Metadata<'a>

§

impl<'a> RefUnwindSafe for Metadata<'a>

§

impl<'a> Send for Metadata<'a>

§

impl<'a> Sync for Metadata<'a>

§

impl<'a> Unpin for Metadata<'a>

§

impl<'a> UnwindSafe for Metadata<'a>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/log/struct.MetadataBuilder.html b/log/struct.MetadataBuilder.html new file mode 100644 index 000000000..105b9c81c --- /dev/null +++ b/log/struct.MetadataBuilder.html @@ -0,0 +1,40 @@ +MetadataBuilder in log - Rust

Struct log::MetadataBuilder

source ·
pub struct MetadataBuilder<'a> { /* private fields */ }
Expand description

Builder for Metadata.

+

Typically should only be used by log library creators or for testing and “shim loggers”. +The MetadataBuilder can set the different parameters of a Metadata object, and returns +the created object when build is called.

+

§Example

+
let target = "myApp";
+use log::{Level, MetadataBuilder};
+let metadata = MetadataBuilder::new()
+                    .level(Level::Debug)
+                    .target(target)
+                    .build();
+

Implementations§

source§

impl<'a> MetadataBuilder<'a>

source

pub fn new() -> MetadataBuilder<'a>

Construct a new MetadataBuilder.

+

The default options are:

+
    +
  • level: Level::Info
  • +
  • target: ""
  • +
+
source

pub fn level(&mut self, arg: Level) -> &mut MetadataBuilder<'a>

Setter for level.

+
source

pub fn target(&mut self, target: &'a str) -> &mut MetadataBuilder<'a>

Setter for target.

+
source

pub fn build(&self) -> Metadata<'a>

Returns a Metadata object.

+

Trait Implementations§

source§

impl<'a> Debug for MetadataBuilder<'a>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a> Default for MetadataBuilder<'a>

source§

fn default() -> Self

Returns the “default value” for a type. Read more
source§

impl<'a> Hash for MetadataBuilder<'a>

source§

fn hash<__H: Hasher>(&self, state: &mut __H)

Feeds this value into the given Hasher. Read more
1.3.0 · source§

fn hash_slice<H>(data: &[Self], state: &mut H)
where + H: Hasher, + Self: Sized,

Feeds a slice of this type into the given Hasher. Read more
source§

impl<'a> Ord for MetadataBuilder<'a>

source§

fn cmp(&self, other: &MetadataBuilder<'a>) -> Ordering

This method returns an Ordering between self and other. Read more
1.21.0 · source§

fn max(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the maximum of two values. Read more
1.21.0 · source§

fn min(self, other: Self) -> Self
where + Self: Sized,

Compares and returns the minimum of two values. Read more
1.50.0 · source§

fn clamp(self, min: Self, max: Self) -> Self
where + Self: Sized + PartialOrd,

Restrict a value to a certain interval. Read more
source§

impl<'a> PartialEq for MetadataBuilder<'a>

source§

fn eq(&self, other: &MetadataBuilder<'a>) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl<'a> PartialOrd for MetadataBuilder<'a>

source§

fn partial_cmp(&self, other: &MetadataBuilder<'a>) -> Option<Ordering>

This method returns an ordering between self and other values if one exists. Read more
1.0.0 · source§

fn lt(&self, other: &Rhs) -> bool

This method tests less than (for self and other) and is used by the < operator. Read more
1.0.0 · source§

fn le(&self, other: &Rhs) -> bool

This method tests less than or equal to (for self and other) and is used by the <= +operator. Read more
1.0.0 · source§

fn gt(&self, other: &Rhs) -> bool

This method tests greater than (for self and other) and is used by the > operator. Read more
1.0.0 · source§

fn ge(&self, other: &Rhs) -> bool

This method tests greater than or equal to (for self and other) and is used by the >= +operator. Read more
source§

impl<'a> Eq for MetadataBuilder<'a>

source§

impl<'a> StructuralPartialEq for MetadataBuilder<'a>

Auto Trait Implementations§

§

impl<'a> Freeze for MetadataBuilder<'a>

§

impl<'a> RefUnwindSafe for MetadataBuilder<'a>

§

impl<'a> Send for MetadataBuilder<'a>

§

impl<'a> Sync for MetadataBuilder<'a>

§

impl<'a> Unpin for MetadataBuilder<'a>

§

impl<'a> UnwindSafe for MetadataBuilder<'a>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/log/struct.ParseLevelError.html b/log/struct.ParseLevelError.html new file mode 100644 index 000000000..f32d8a2e5 --- /dev/null +++ b/log/struct.ParseLevelError.html @@ -0,0 +1,14 @@ +ParseLevelError in log - Rust

Struct log::ParseLevelError

source ·
pub struct ParseLevelError(/* private fields */);
Expand description

The type returned by from_str when the string doesn’t match any of the log levels.

+

Trait Implementations§

source§

impl Debug for ParseLevelError

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl Display for ParseLevelError

source§

fn fmt(&self, fmt: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl PartialEq for ParseLevelError

source§

fn eq(&self, other: &ParseLevelError) -> bool

This method tests for self and other values to be equal, and is used +by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always +sufficient, and should not be overridden without very good reason.
source§

impl Eq for ParseLevelError

source§

impl StructuralPartialEq for ParseLevelError

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/log/struct.Record.html b/log/struct.Record.html new file mode 100644 index 000000000..4455a8cf8 --- /dev/null +++ b/log/struct.Record.html @@ -0,0 +1,53 @@ +Record in log - Rust

Struct log::Record

source ·
pub struct Record<'a> { /* private fields */ }
Expand description

The “payload” of a log message.

+

§Use

+

Record structures are passed as parameters to the log +method of the Log trait. Logger implementors manipulate these +structures in order to display log messages. Records are automatically +created by the log! macro and so are not seen by log users.

+

Note that the level() and target() accessors are equivalent to +self.metadata().level() and self.metadata().target() respectively. +These methods are provided as a convenience for users of this structure.

+

§Example

+

The following example shows a simple logger that displays the level, +module path, and message of any Record that is passed to it.

+ +
struct SimpleLogger;
+
+impl log::Log for SimpleLogger {
+   fn enabled(&self, _metadata: &log::Metadata) -> bool {
+       true
+   }
+
+   fn log(&self, record: &log::Record) {
+       if !self.enabled(record.metadata()) {
+           return;
+       }
+
+       println!("{}:{} -- {}",
+                record.level(),
+                record.target(),
+                record.args());
+   }
+   fn flush(&self) {}
+}
+

Implementations§

source§

impl<'a> Record<'a>

source

pub fn builder() -> RecordBuilder<'a>

Returns a new builder.

+
source

pub fn args(&self) -> &Arguments<'a>

The message body.

+
source

pub fn metadata(&self) -> &Metadata<'a>

Metadata about the log directive.

+
source

pub fn level(&self) -> Level

The verbosity level of the message.

+
source

pub fn target(&self) -> &'a str

The name of the target of the directive.

+
source

pub fn module_path(&self) -> Option<&'a str>

The module path of the message.

+
source

pub fn module_path_static(&self) -> Option<&'static str>

The module path of the message, if it is a 'static string.

+
source

pub fn file(&self) -> Option<&'a str>

The source file containing the message.

+
source

pub fn file_static(&self) -> Option<&'static str>

The source file containing the message, if it is a 'static string.

+
source

pub fn line(&self) -> Option<u32>

The line containing the message.

+

Trait Implementations§

source§

impl<'a> Clone for Record<'a>

source§

fn clone(&self) -> Record<'a>

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<'a> Debug for Record<'a>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

§

impl<'a> Freeze for Record<'a>

§

impl<'a> !RefUnwindSafe for Record<'a>

§

impl<'a> !Send for Record<'a>

§

impl<'a> !Sync for Record<'a>

§

impl<'a> Unpin for Record<'a>

§

impl<'a> !UnwindSafe for Record<'a>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/log/struct.RecordBuilder.html b/log/struct.RecordBuilder.html new file mode 100644 index 000000000..2b4851f36 --- /dev/null +++ b/log/struct.RecordBuilder.html @@ -0,0 +1,67 @@ +RecordBuilder in log - Rust

Struct log::RecordBuilder

source ·
pub struct RecordBuilder<'a> { /* private fields */ }
Expand description

Builder for Record.

+

Typically should only be used by log library creators or for testing and “shim loggers”. +The RecordBuilder can set the different parameters of Record object, and returns +the created object when build is called.

+

§Examples

+
use log::{Level, Record};
+
+let record = Record::builder()
+                .args(format_args!("Error!"))
+                .level(Level::Error)
+                .target("myApp")
+                .file(Some("server.rs"))
+                .line(Some(144))
+                .module_path(Some("server"))
+                .build();
+

Alternatively, use MetadataBuilder:

+ +
use log::{Record, Level, MetadataBuilder};
+
+let error_metadata = MetadataBuilder::new()
+                        .target("myApp")
+                        .level(Level::Error)
+                        .build();
+
+let record = Record::builder()
+                .metadata(error_metadata)
+                .args(format_args!("Error!"))
+                .line(Some(433))
+                .file(Some("app.rs"))
+                .module_path(Some("server"))
+                .build();
+

Implementations§

source§

impl<'a> RecordBuilder<'a>

source

pub fn new() -> RecordBuilder<'a>

Construct new RecordBuilder.

+

The default options are:

+ +
source

pub fn args(&mut self, args: Arguments<'a>) -> &mut RecordBuilder<'a>

Set args.

+
source

pub fn metadata(&mut self, metadata: Metadata<'a>) -> &mut RecordBuilder<'a>

Set metadata. Construct a Metadata object with MetadataBuilder.

+
source

pub fn level(&mut self, level: Level) -> &mut RecordBuilder<'a>

source

pub fn target(&mut self, target: &'a str) -> &mut RecordBuilder<'a>

source

pub fn module_path(&mut self, path: Option<&'a str>) -> &mut RecordBuilder<'a>

source

pub fn module_path_static( + &mut self, + path: Option<&'static str> +) -> &mut RecordBuilder<'a>

Set module_path to a 'static string

+
source

pub fn file(&mut self, file: Option<&'a str>) -> &mut RecordBuilder<'a>

Set file

+
source

pub fn file_static( + &mut self, + file: Option<&'static str> +) -> &mut RecordBuilder<'a>

Set file to a 'static string.

+
source

pub fn line(&mut self, line: Option<u32>) -> &mut RecordBuilder<'a>

Set line

+
source

pub fn build(&self) -> Record<'a>

Invoke the builder and return a Record

+

Trait Implementations§

source§

impl<'a> Debug for RecordBuilder<'a>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl<'a> Default for RecordBuilder<'a>

source§

fn default() -> Self

Returns the “default value” for a type. Read more

Auto Trait Implementations§

§

impl<'a> Freeze for RecordBuilder<'a>

§

impl<'a> !RefUnwindSafe for RecordBuilder<'a>

§

impl<'a> !Send for RecordBuilder<'a>

§

impl<'a> !Sync for RecordBuilder<'a>

§

impl<'a> Unpin for RecordBuilder<'a>

§

impl<'a> !UnwindSafe for RecordBuilder<'a>

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/log/struct.SetLoggerError.html b/log/struct.SetLoggerError.html new file mode 100644 index 000000000..4aaacd561 --- /dev/null +++ b/log/struct.SetLoggerError.html @@ -0,0 +1,12 @@ +SetLoggerError in log - Rust

Struct log::SetLoggerError

source ·
pub struct SetLoggerError(/* private fields */);
Expand description

The type returned by set_logger if set_logger has already been called.

+

Trait Implementations§

source§

impl Debug for SetLoggerError

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl Display for SetLoggerError

source§

fn fmt(&self, fmt: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/log/trait.Log.html b/log/trait.Log.html new file mode 100644 index 000000000..959682820 --- /dev/null +++ b/log/trait.Log.html @@ -0,0 +1,23 @@ +Log in log - Rust

Trait log::Log

source ·
pub trait Log: Sync + Send {
+    // Required methods
+    fn enabled(&self, metadata: &Metadata<'_>) -> bool;
+    fn log(&self, record: &Record<'_>);
+    fn flush(&self);
+}
Expand description

A trait encapsulating the operations required of a logger.

+

Required Methods§

source

fn enabled(&self, metadata: &Metadata<'_>) -> bool

Determines if a log message with the specified metadata would be +logged.

+

This is used by the log_enabled! macro to allow callers to avoid +expensive computation of log message arguments if the message would be +discarded anyway.

+
§For implementors
+

This method isn’t called automatically by the log! macros. +It’s up to an implementation of the Log trait to call enabled in its own +log method implementation to guarantee that filtering is applied.

+
source

fn log(&self, record: &Record<'_>)

Logs the Record.

+
§For implementors
+

Note that enabled is not necessarily called before this method. +Implementations of log should perform all necessary filtering +internally.

+
source

fn flush(&self)

Flushes any buffered records.

+

Implementations on Foreign Types§

source§

impl<T> Log for &T
where + T: ?Sized + Log,

source§

fn enabled(&self, metadata: &Metadata<'_>) -> bool

source§

fn log(&self, record: &Record<'_>)

source§

fn flush(&self)

Implementors§

\ No newline at end of file diff --git a/rustc_hash/all.html b/rustc_hash/all.html new file mode 100644 index 000000000..fb41a93fd --- /dev/null +++ b/rustc_hash/all.html @@ -0,0 +1 @@ +List of all items in this crate

List of all items

Structs

\ No newline at end of file diff --git a/rustc_hash/index.html b/rustc_hash/index.html new file mode 100644 index 000000000..7729a3e14 --- /dev/null +++ b/rustc_hash/index.html @@ -0,0 +1,9 @@ +rustc_hash - Rust

Crate rustc_hash

source ·
Expand description

Fast, non-cryptographic hash used by rustc and Firefox.

+

§Example

+
use rustc_hash::FxHashMap;
+let mut map: FxHashMap<u32, u32> = FxHashMap::default();
+map.insert(22, 44);
+

Structs§

  • A speedy hash algorithm for use within rustc. The hashmap in liballoc +by default uses SipHash which isn’t quite as speedy as we want. In the +compiler we’re not really worried about DOS attempts, so we use a fast +non-cryptographic hash.
\ No newline at end of file diff --git a/rustc_hash/sidebar-items.js b/rustc_hash/sidebar-items.js new file mode 100644 index 000000000..f40a11bab --- /dev/null +++ b/rustc_hash/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {"struct":["FxHasher"]}; \ No newline at end of file diff --git a/rustc_hash/struct.FxHasher.html b/rustc_hash/struct.FxHasher.html new file mode 100644 index 000000000..5b6fa3ba8 --- /dev/null +++ b/rustc_hash/struct.FxHasher.html @@ -0,0 +1,21 @@ +FxHasher in rustc_hash - Rust

Struct rustc_hash::FxHasher

source ·
pub struct FxHasher { /* private fields */ }
Expand description

A speedy hash algorithm for use within rustc. The hashmap in liballoc +by default uses SipHash which isn’t quite as speedy as we want. In the +compiler we’re not really worried about DOS attempts, so we use a fast +non-cryptographic hash.

+

This is the same as the algorithm used by Firefox – which is a homespun +one not based on any widely-known algorithm – though modified to produce +64-bit hash values instead of 32-bit hash values. It consistently +out-performs an FNV-based hash within rustc itself – the collision rate is +similar or slightly worse than FNV, but the speed of the hash function +itself is much higher because it works on up to 8 bytes at a time.

+

Trait Implementations§

source§

impl Default for FxHasher

source§

fn default() -> FxHasher

Returns the “default value” for a type. Read more
source§

impl Hasher for FxHasher

source§

fn write(&mut self, bytes: &[u8])

Writes some data into this Hasher. Read more
source§

fn write_u8(&mut self, i: u8)

Writes a single u8 into this hasher.
source§

fn write_u16(&mut self, i: u16)

Writes a single u16 into this hasher.
source§

fn write_u32(&mut self, i: u32)

Writes a single u32 into this hasher.
source§

fn write_u64(&mut self, i: u64)

Writes a single u64 into this hasher.
source§

fn write_usize(&mut self, i: usize)

Writes a single usize into this hasher.
source§

fn finish(&self) -> u64

Returns the hash value for the values written so far. Read more
1.26.0 · source§

fn write_u128(&mut self, i: u128)

Writes a single u128 into this hasher.
1.3.0 · source§

fn write_i8(&mut self, i: i8)

Writes a single i8 into this hasher.
1.3.0 · source§

fn write_i16(&mut self, i: i16)

Writes a single i16 into this hasher.
1.3.0 · source§

fn write_i32(&mut self, i: i32)

Writes a single i32 into this hasher.
1.3.0 · source§

fn write_i64(&mut self, i: i64)

Writes a single i64 into this hasher.
1.26.0 · source§

fn write_i128(&mut self, i: i128)

Writes a single i128 into this hasher.
1.3.0 · source§

fn write_isize(&mut self, i: isize)

Writes a single isize into this hasher.
source§

fn write_length_prefix(&mut self, len: usize)

🔬This is a nightly-only experimental API. (hasher_prefixfree_extras)
Writes a length prefix into this hasher, as part of being prefix-free. Read more
source§

fn write_str(&mut self, s: &str)

🔬This is a nightly-only experimental API. (hasher_prefixfree_extras)
Writes a single str into this hasher. Read more

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where + T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where + T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where + T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
source§

impl<T> From<T> for T

source§

fn from(t: T) -> T

Returns the argument unchanged.

+
source§

impl<T, U> Into<U> for T
where + U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

+

That is, this conversion is whatever the implementation of +From<T> for U chooses to do.

+
source§

impl<T, U> TryFrom<U> for T
where + U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where + U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/search-index.js b/search-index.js new file mode 100644 index 000000000..a6006cb58 --- /dev/null +++ b/search-index.js @@ -0,0 +1,8 @@ +var searchIndex = new Map(JSON.parse('[\ +["cactusref",{"t":"KIIFFMNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNCNNNNNNNNNNNNNNNNNNNNNNNNNNNNMNNNN","n":["Adopt","CactusRef","CactusWeakRef","Rc","Weak","adopt_unchecked","adopt_unchecked","as_ptr","as_ptr","as_ref","assume_init","borrow","borrow","borrow","borrow_mut","borrow_mut","clone","clone","clone_into","clone_into","cmp","decrement_strong_count","default","default","deref","downgrade","drop","drop","eq","equivalent","equivalent","fmt","fmt","fmt","fmt","from","from","from","from","from","from_raw","from_raw","ge","get_mut","get_mut_unchecked","gt","hash","implementing_self_referential_data_structures","increment_strong_count","into","into","into_raw","into_raw","le","lt","make_mut","ne","new","new","new_uninit","partial_cmp","pin","ptr_eq","ptr_eq","strong_count","strong_count","to_owned","to_owned","to_string","try_from","try_from","try_into","try_into","try_unwrap","type_id","type_id","unadopt","unadopt","upgrade","weak_count","weak_count"],"q":[[0,"cactusref"],[81,"cactusref::adopt"],[82,"cactusref::rc"],[83,"core::mem::maybe_uninit"],[84,"core::cmp"],[85,"core::default"],[86,"core::fmt"],[87,"alloc::boxed"],[88,"core::option"],[89,"core::hash"],[90,"core::clone"],[91,"core::pin"],[92,"alloc::string"],[93,"core::result"],[94,"core::any"]],"i":[0,0,0,0,0,1,4,4,5,4,4,4,4,5,4,5,4,5,4,5,4,4,4,5,4,4,4,5,4,4,4,4,4,4,5,4,4,4,4,5,4,5,4,4,4,4,4,0,4,4,5,4,5,4,4,4,4,4,5,4,4,4,4,5,4,5,4,5,4,4,5,4,5,4,4,5,1,4,5,4,5],"f":"`````{{{d{b}}{d{b}}}f}{{{d{{h{c}}}}{d{{h{c}}}}}f{}}{{{d{{h{c}}}}}{}{}}{{{d{{j{c}}}}}{}{}}{{{d{{h{c}}}}}{{d{c}}}{}}{{{h{{l{c}}}}}{{h{c}}}{}}{{{d{c}}}{{d{e}}}{}{}}20{{{d{nc}}}{{d{ne}}}{}{}}0{{{d{{h{c}}}}}{{h{c}}}{}}{{{d{{j{c}}}}}{{j{c}}}{}}{{{d{c}}{d{ne}}}f{}{}}0{{{d{{h{c}}}}{d{{h{c}}}}}A`Ab}{{}f}{{}{{h{c}}}Ad}{{}{{j{c}}}{}}:{{{d{{h{c}}}}}{{j{c}}}{}}{{{d{n{h{c}}}}}f{}}{{{d{n{j{c}}}}}f{}}{{{d{{h{c}}}}{d{{h{c}}}}}AfAh}{{{d{c}}{d{e}}}Af{}{}}0{{{d{{h{c}}}}{d{nAj}}}Al{}}{{{d{{h{c}}}}{d{nAj}}}AlAn}{{{d{{h{c}}}}{d{nAj}}}AlB`}{{{d{{j{c}}}}{d{nAj}}}AlB`}{{{Bb{c}}}{{h{c}}}{}}{c{{h{c}}}{}}{Bdc{}}{cc{}}0{{}{{h{c}}}{}}>{{{d{{h{c}}}}{d{{h{c}}}}}AfBf}{{{d{n{h{c}}}}}{{Bh{{d{nc}}}}}{}}{{{d{n{h{c}}}}}{{d{nc}}}{}}2{{{d{{h{c}}}}{d{ne}}}fBjBl}`{{}f}{ce{}{}}0{{{h{c}}}{}{}}{{{j{c}}}{}{}}77{{{d{n{h{c}}}}}{{d{nc}}}Bn}{{{d{{h{c}}}}{d{{h{c}}}}}AfAh}={{}{{j{c}}}{}}{{}{{h{{l{c}}}}}{}}{{{d{{h{c}}}}{d{{h{c}}}}}{{Bh{A`}}}Bf}{c{{C`{{h{c}}}}}{}}{{{d{{h{c}}}}{d{{h{c}}}}}Af{}}{{{d{{j{c}}}}{d{{j{c}}}}}Af{}}{{{d{{h{c}}}}}Cb{}}{{{d{{j{c}}}}}Cb{}}{{{d{c}}}e{}{}}0{{{d{c}}}Cd{}}{c{{Cf{e}}}{}{}}000{{{h{c}}}{{Cf{c{h{c}}}}}{}}{{{d{c}}}Ch{}}0{{{d{b}}{d{b}}}f}{{{d{{h{c}}}}{d{{h{c}}}}}f{}}{{{d{{j{c}}}}}{{Bh{{h{c}}}}}{}}98","D":"G`","p":[[10,"Adopt",0,81],[1,"reference"],[1,"unit"],[5,"Rc",0,82],[5,"Weak",0,82],[20,"MaybeUninit",83],[0,"mut"],[6,"Ordering",84],[10,"Ord",84],[10,"Default",85],[1,"bool"],[10,"PartialEq",84],[5,"Formatter",86],[8,"Result",86],[10,"Display",86],[10,"Debug",86],[5,"Box",87],[1,"never"],[10,"PartialOrd",84],[6,"Option",88],[10,"Hash",89],[10,"Hasher",89],[10,"Clone",90],[5,"Pin",91],[1,"usize"],[5,"String",92],[6,"Result",93],[5,"TypeId",94]],"r":[[0,81],[3,82],[4,82]],"b":[[31,"impl-Pointer-for-Rc%3CT%3E"],[32,"impl-Display-for-Rc%3CT%3E"],[33,"impl-Debug-for-Rc%3CT%3E"],[35,"impl-From%3CBox%3CT%3E%3E-for-Rc%3CT%3E"],[36,"impl-From%3CT%3E-for-Rc%3CT%3E"]],"c":"OjAAAAAAAAA=","e":"OzAAAAEAABkACQAKAAAADAAEABMAAQAZAAAAHgAFACYAAAAvAAAAQwAGAEsAAQA="}],\ +["hashbrown",{"t":"PPKFFFGNNNNNMNNNCCCNNNNNOGFGGFFFFFFFFPPPFFFFFGFFPPPFFFFNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNONNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNONNNNNNFFGFFFFFPFFFPFNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNFFGFFFFFPFPFNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNN","n":["AllocError","CapacityOverflow","Equivalent","HashMap","HashSet","HashTable","TryReserveError","borrow","borrow_mut","clone","clone_into","eq","equivalent","equivalent","fmt","from","hash_map","hash_set","hash_table","into","to_owned","try_from","try_into","type_id","layout","DefaultHashBuilder","Drain","Entry","EntryRef","ExtractIf","HashMap","IntoIter","IntoKeys","IntoValues","Iter","IterMut","Keys","Occupied","Occupied","Occupied","OccupiedEntry","OccupiedEntryRef","OccupiedError","RawEntryBuilder","RawEntryBuilderMut","RawEntryMut","RawOccupiedEntryMut","RawVacantEntryMut","Vacant","Vacant","Vacant","VacantEntry","VacantEntryRef","Values","ValuesMut","allocator","and_modify","and_modify","and_modify","and_replace_entry_with","and_replace_entry_with","and_replace_entry_with","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","capacity","clear","clone","clone","clone","clone","clone_from","clone_into","clone_into","clone_into","clone_into","contains_key","default","drain","entry","entry","entry_ref","eq","equivalent","extend","extend","extend","extract_if","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fold","fold","fold","fold","fold","fold","fold","fold","fold","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from","from_hash","from_hash","from_iter","from_key","from_key","from_key_hashed_nocheck","from_key_hashed_nocheck","get","get","get","get","get_key_value","get_key_value","get_key_value_mut","get_key_value_mut","get_many_key_value_mut","get_many_key_value_unchecked_mut","get_many_mut","get_many_unchecked_mut","get_mut","get_mut","get_mut","get_mut","hasher","index","insert","insert","insert","insert","insert","insert","insert","insert","insert","insert","insert_hashed_nocheck","insert_key","insert_unique_unchecked","insert_with_hasher","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_key","into_key","into_key","into_key_value","into_keys","into_mut","into_mut","into_mut","into_values","is_empty","iter","iter_mut","key","key","key","key","key","key","key","key_mut","keys","len","len","len","len","len","len","len","len","len","len","next","next","next","next","next","next","next","next","next","next","or_default","or_default","or_insert","or_insert","or_insert","or_insert_with","or_insert_with","or_insert_with","or_insert_with_key","or_insert_with_key","raw_entry","raw_entry_mut","remove","remove","remove","remove","remove_entry","remove_entry","remove_entry","remove_entry","replace_entry","replace_entry","replace_entry_with","replace_entry_with","replace_entry_with","replace_key","replace_key","reserve","retain","shrink_to","shrink_to_fit","size_hint","size_hint","size_hint","size_hint","size_hint","size_hint","size_hint","size_hint","size_hint","size_hint","to_owned","to_owned","to_owned","to_owned","to_string","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_insert","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_reserve","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","value","values","values_mut","with_capacity_and_hasher","with_capacity_and_hasher_in","with_hasher","with_hasher_in","Difference","Drain","Entry","ExtractIf","HashSet","Intersection","IntoIter","Iter","Occupied","OccupiedEntry","SymmetricDifference","Union","Vacant","VacantEntry","allocator","bitand","bitor","bitxor","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","capacity","clear","clone","clone","clone","clone","clone","clone","clone_from","clone_into","clone_into","clone_into","clone_into","clone_into","clone_into","contains","default","difference","drain","entry","eq","equivalent","extend","extend","extract_if","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fold","fold","fold","fold","fold","fold","fold","from","from","from","from","from","from","from","from","from","from","from","from","from","from_iter","get","get","get","get","get_or_insert","get_or_insert_owned","get_or_insert_with","hasher","insert","insert","insert","insert_unique_unchecked","intersection","into","into","into","into","into","into","into","into","into","into","into","into","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_value","is_disjoint","is_empty","is_subset","is_superset","iter","len","len","len","len","next","next","next","next","next","next","next","next","or_insert","remove","remove","replace","replace","reserve","retain","shrink_to","shrink_to_fit","size_hint","size_hint","size_hint","size_hint","size_hint","size_hint","size_hint","size_hint","sub","symmetric_difference","take","to_owned","to_owned","to_owned","to_owned","to_owned","to_owned","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_reserve","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","union","with_capacity_and_hasher","with_capacity_and_hasher_in","with_hasher","with_hasher_in","AbsentEntry","Drain","Entry","ExtractIf","HashTable","IntoIter","Iter","IterMut","Occupied","OccupiedEntry","Vacant","VacantEntry","allocator","and_modify","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","capacity","clear","clone","clone_into","default","drain","entry","extract_if","find","find_entry","find_mut","fmt","fmt","fmt","fmt","fmt","fmt","fold","fold","fold","from","from","from","from","from","from","from","from","from","from","get","get_many_mut","get_many_unchecked_mut","get_mut","insert","insert","insert_unique","into","into","into","into","into","into","into","into","into","into","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_iter","into_mut","into_table","into_table","into_table","is_empty","iter","iter_mut","len","len","len","len","len","new","new_in","next","next","next","next","next","or_insert","or_insert_with","remove","reserve","retain","shrink_to","shrink_to_fit","size_hint","size_hint","size_hint","size_hint","size_hint","to_owned","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_reserve","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","with_capacity","with_capacity_in"],"q":[[0,"hashbrown"],[24,"hashbrown::TryReserveError"],[25,"hashbrown::hash_map"],[433,"hashbrown::hash_set"],[653,"hashbrown::hash_table"],[807,"core::marker"],[808,"core::fmt"],[809,"core::result"],[810,"core::any"],[811,"hashbrown::map"],[812,"core::ops::function"],[813,"core::option"],[814,"core::clone"],[815,"core::cmp"],[816,"core::hash"],[817,"core::default"],[818,"core::iter::traits::collect"],[819,"core::borrow"],[820,"core::convert"],[821,"alloc::string"],[822,"hashbrown::set"],[823,"alloc::borrow"],[824,"hashbrown::table"]],"i":[3,3,0,0,0,0,0,3,3,3,3,3,6,3,3,3,0,0,0,3,3,3,3,3,81,0,0,0,0,0,0,0,0,0,0,0,0,13,15,16,0,0,0,0,0,0,0,0,13,15,16,0,0,0,0,12,13,15,16,13,15,16,82,12,21,37,38,39,40,22,23,28,34,41,42,13,43,44,45,15,46,47,16,49,50,51,82,12,21,37,38,39,40,22,23,28,34,41,42,13,43,44,45,15,46,47,16,49,50,51,12,12,12,21,22,23,12,12,21,22,23,12,12,12,12,51,12,12,12,12,12,12,12,12,21,37,38,39,40,22,23,28,41,42,13,43,44,45,15,46,47,16,49,50,51,51,21,37,38,39,40,22,23,28,41,82,12,21,37,38,39,40,22,23,28,34,41,42,13,43,44,45,15,46,47,16,49,50,51,42,45,12,42,45,42,45,12,43,46,49,12,43,12,43,12,12,12,12,12,43,46,49,12,12,12,13,43,44,15,46,47,16,49,50,44,43,12,44,82,12,21,37,38,39,40,22,23,28,34,41,42,13,43,44,45,15,46,47,16,49,50,51,12,12,12,21,37,38,39,40,22,23,28,34,41,43,47,50,43,12,43,46,49,12,12,12,12,43,15,46,47,16,49,50,43,12,12,21,37,38,39,40,22,23,28,41,21,37,38,39,40,22,23,28,34,41,15,16,13,15,16,13,15,16,15,16,12,12,12,43,46,49,12,43,46,49,46,49,43,46,49,46,49,12,12,12,12,21,37,38,39,40,22,23,28,34,41,12,21,22,23,51,82,12,21,37,38,39,40,22,23,28,34,41,42,13,43,44,45,15,46,47,16,49,50,51,12,82,12,21,37,38,39,40,22,23,28,34,41,42,13,43,44,45,15,46,47,16,49,50,51,12,82,12,21,37,38,39,40,22,23,28,34,41,42,13,43,44,45,15,46,47,16,49,50,51,51,12,12,12,12,12,12,0,0,0,0,0,0,0,0,64,0,0,0,64,0,57,57,57,57,57,58,66,63,65,59,60,61,62,64,67,68,57,58,66,63,65,59,60,61,62,64,67,68,57,57,57,58,59,60,61,62,57,57,58,59,60,61,62,57,57,57,57,57,57,57,57,57,57,57,58,66,63,59,60,61,62,64,67,68,58,66,63,59,60,61,62,57,57,58,66,63,65,59,60,61,62,64,67,68,57,57,64,67,68,57,57,57,57,57,64,68,57,57,57,58,66,63,65,59,60,61,62,64,67,68,57,57,58,66,63,65,59,60,61,62,68,57,57,57,57,57,57,58,66,63,58,66,63,65,59,60,61,62,64,57,67,57,67,57,57,57,57,58,66,63,65,59,60,61,62,57,57,57,57,58,59,60,61,62,57,58,66,63,65,59,60,61,62,64,67,68,57,58,66,63,65,59,60,61,62,64,67,68,57,57,58,66,63,65,59,60,61,62,64,67,68,57,57,57,57,57,0,0,0,0,0,0,0,0,72,0,72,0,71,72,71,72,75,77,76,78,79,80,73,74,71,72,75,77,76,78,79,80,73,74,71,71,71,71,71,71,71,71,71,71,71,71,72,75,77,76,73,78,79,80,71,72,75,77,76,78,79,80,73,74,75,71,71,75,72,77,71,71,72,75,77,76,78,79,80,73,74,71,71,71,78,79,80,73,74,75,75,77,76,71,71,71,71,78,79,80,73,71,71,78,79,80,73,74,72,72,75,71,71,71,71,78,79,80,73,74,71,71,72,75,77,76,78,79,80,73,74,71,72,75,77,76,78,79,80,73,74,71,71,72,75,77,76,78,79,80,73,74,71,71],"f":"```````{{{b{c}}}{{b{e}}}{}{}}{{{b{dc}}}{{b{de}}}{}{}}{{{b{f}}}f}{{{b{c}}{b{de}}}h{}{}}{{{b{f}}{b{f}}}j}{{{b{l}}{b{c}}}jn}{{{b{c}}{b{e}}}j{}{}}{{{b{f}}{b{dA`}}}Ab}{cc{}}```{ce{}{}}{{{b{c}}}e{}{}}{c{{Ad{e}}}{}{}}0{{{b{c}}}Af{}}```````````````````````````````{{{b{{Ah{cegi}}}}}{{b{i}}}{}{}{}`}{{{Aj{cegi}}k}{{Aj{cegi}}}{}{}{}`{{Al{{b{dc}}{b{de}}}}}}{{{An{cegi}}k}{{An{cegi}}}{}{}{}`{{Al{{b{de}}}}}}{{{B`{cegik}}m}{{B`{cegik}}}{}n{}{}`{{Al{{b{dg}}}}}}{{{Aj{cegi}}k}{{Aj{cegi}}}{}{}{}`{{Al{{b{c}}e}{{Bb{{Bd{e}}}}}}}}{{{An{cegi}}k}{{An{cegi}}}{}{}{}`{{Al{{b{c}}e}{{Bb{{Bd{e}}}}}}}}{{{B`{cegik}}m}{{B`{cegik}}}{}n{}{}`{{Al{{b{c}}g}{{Bb{{Bd{g}}}}}}}}{{{b{c}}}{{b{e}}}{}{}}00000000000000000000000{{{b{dc}}}{{b{de}}}{}{}}00000000000000000000000{{{b{{Ah{cegi}}}}}Bf{}{}{}`}{{{b{d{Ah{cegi}}}}}h{}{}{}`}{{{b{{Ah{cegi}}}}}{{Ah{cegi}}}BhBhBh{`Bh}}{{{b{{Bj{ce}}}}}{{Bj{ce}}}{}{}}{{{b{{Bl{ce}}}}}{{Bl{ce}}}{}{}}{{{b{{Bn{ce}}}}}{{Bn{ce}}}{}{}}{{{b{d{Ah{cegi}}}}{b{{Ah{cegi}}}}}hBhBhBh{`Bh}}{{{b{c}}{b{de}}}h{}{}}000{{{b{{Ah{cegi}}}}{b{k}}}j{C`Cb}{}Cd`{Cb{l{c}}n}}{{}{{Ah{cegi}}}{}{}Cf{Cf`}}{{{b{d{Ah{cegi}}}}}{{Ch{cei}}}{}{}{}`}{{{b{d{Ah{cegi}}}}c}{{An{cegi}}}{C`Cb}{}Cd`}`{{{b{d{Ah{cegi}}}}{b{k}}}{{B`{ckegi}}}{C`Cb}{}Cd`{Cb{l{c}}n}}{{{b{{Ah{cegi}}}}{b{{Ah{cegi}}}}}j{C`Cb}CjCd`}{{{b{c}}{b{e}}}j{}{}}{{{b{d{Ah{cegi}}}}k}h{C`CbCl}ClCd`{{Db{}{{Cn{{D`{{b{c}}{b{e}}}}}}}}}}{{{b{d{Ah{cegi}}}}k}h{C`Cb}{}Cd`{{Db{}{{Cn{{D`{ce}}}}}}}}{{{b{d{Ah{cegi}}}}k}h{C`CbCl}ClCd`{{Db{}{{Cn{{b{{D`{ce}}}}}}}}}}{{{b{d{Ah{cegi}}}}k}{{Dd{ceki}}}{}{}{}`{{Df{{b{c}}{b{de}}}{{Bb{j}}}}}}{{{b{{Ah{cegi}}}}{b{dA`}}}AbDhDh{}`}{{{b{{Bj{ce}}}}{b{dA`}}}AbDhDh}{{{b{{Dj{ce}}}}{b{dA`}}}AbDhDh}{{{b{{Dl{ceg}}}}{b{dA`}}}AbDhDh`}{{{b{{Dn{ceg}}}}{b{dA`}}}AbDhDh`}{{{b{{E`{ceg}}}}{b{dA`}}}Ab{}Dh`}{{{b{{Bl{ce}}}}{b{dA`}}}AbDh{}}{{{b{{Bn{ce}}}}{b{dA`}}}Ab{}Dh}{{{b{{Ch{ceg}}}}{b{dA`}}}AbDhDh`}{{{b{{Eb{ce}}}}{b{dA`}}}Ab{}Dh}{{{b{{Ed{cegi}}}}{b{dA`}}}Ab{}{}{}`}{{{b{{Aj{cegi}}}}{b{dA`}}}AbDhDh{}`}{{{b{{Ef{cegi}}}}{b{dA`}}}AbDhDh{}`}{{{b{{Eh{cegi}}}}{b{dA`}}}Ab{}{}{}`}{{{b{{Ej{cegi}}}}{b{dA`}}}Ab{}{}{}`}{{{b{{An{cegi}}}}{b{dA`}}}AbDhDh{}`}{{{b{{El{cegi}}}}{b{dA`}}}AbDhDh{}`}{{{b{{En{cegi}}}}{b{dA`}}}AbDh{}{}`}{{{b{{B`{ecgik}}}}{b{dA`}}}Ab{nDh}{{F`{c}}}Dh{}`}{{{b{{Fb{ecgik}}}}{b{dA`}}}Ab{nDh}{{F`{c}}}Dh{}`}{{{b{{Fd{ecgik}}}}{b{dA`}}}Ab{nDh}{{F`{c}}}{}{}`}{{{b{{Ff{cegi}}}}{b{dA`}}}AbDhDh{}`}0{{{Bj{ce}}gk}g{}{}{}{}{{Df{gi}{{Bb{g}}}}}}{{{Dj{ce}}gk}g{}{}{}{}{{Df{gi}{{Bb{g}}}}}}{{{Dl{ceg}}im}i{}{}`{}{}{{Df{ik}{{Bb{i}}}}}}{{{Dn{ceg}}im}i{}{}`{}{}{{Df{ik}{{Bb{i}}}}}}{{{E`{ceg}}im}i{}{}`{}{}{{Df{ik}{{Bb{i}}}}}}{{{Bl{ce}}gk}g{}{}{}{}{{Df{gi}{{Bb{g}}}}}}{{{Bn{ce}}gk}g{}{}{}{}{{Df{gi}{{Bb{g}}}}}}{{{Ch{ceg}}im}i{}{}`{}{}{{Df{ik}{{Bb{i}}}}}}{{{Eb{ce}}gk}g{}{}{}{}{{Df{gi}{{Bb{g}}}}}}{cc{}}00000000000000000000000{{{Ed{cegi}}Fhk}{{Aj{cegi}}}{}{}{}`{{Df{{b{c}}}{{Bb{j}}}}}}{{{Ej{cegi}}Fhk}{{Bd{{D`{{b{c}}{b{e}}}}}}}{}{}{}`{{Df{{b{c}}}{{Bb{j}}}}}}{g{{Ah{ceik}}}{C`Cb}{}{{Db{}{{Cn{{D`{ce}}}}}}}{CdCf}{Cf`}}{{{Ed{cegi}}{b{k}}}{{Aj{cegi}}}{}{}Cd`{Cb{l{c}}n}}{{{Ej{cegi}}{b{k}}}{{Bd{{D`{{b{c}}{b{e}}}}}}}{}{}Cd`{Cb{l{c}}n}}{{{Ed{cegi}}Fh{b{k}}}{{Aj{cegi}}}{}{}{}`{{l{c}}n}}{{{Ej{cegi}}Fh{b{k}}}{{Bd{{D`{{b{c}}{b{e}}}}}}}{}{}{}`{{l{c}}n}}{{{b{{Ah{cegi}}}}{b{k}}}{{Bd{{b{e}}}}}{C`Cb}{}Cd`{Cb{l{c}}n}}{{{b{{Ef{cegi}}}}}{{b{e}}}{}{}{}`}{{{b{{El{cegi}}}}}{{b{e}}}{}{}{}`}{{{b{{Fb{cegik}}}}}{{b{g}}}{}n{}{}`}{{{b{{Ah{cegi}}}}{b{k}}}{{Bd{{D`{{b{c}}{b{e}}}}}}}{C`Cb}{}Cd`{Cb{l{c}}n}}{{{b{{Ef{cegi}}}}}{{D`{{b{c}}{b{e}}}}}{}{}{}`}{{{b{d{Ah{cegi}}}}{b{k}}}{{Bd{{D`{{b{c}}{b{de}}}}}}}{C`Cb}{}Cd`{Cb{l{c}}n}}{{{b{d{Ef{cegi}}}}}{{D`{{b{dc}}{b{de}}}}}{}{}{}`}{{{b{d{Ah{cegi}}}}{Fj{{b{k}}}}}{{Bd{{Fj{{D`{{b{c}}{b{de}}}}}}}}}{C`Cb}{}Cd`{Cb{l{c}}n}}0{{{b{d{Ah{cegi}}}}{Fj{{b{k}}}}}{{Bd{{Fj{{b{de}}}}}}}{C`Cb}{}Cd`{Cb{l{c}}n}}0{{{b{d{Ah{cegi}}}}{b{k}}}{{Bd{{b{de}}}}}{C`Cb}{}Cd`{Cb{l{c}}n}}{{{b{d{Ef{cegi}}}}}{{b{de}}}{}{}{}`}{{{b{d{El{cegi}}}}}{{b{de}}}{}{}{}`}{{{b{d{Fb{cegik}}}}}{{b{dg}}}{}n{}{}`}{{{b{{Ah{cegi}}}}}{{b{g}}}{}{}{}`}{{{b{{Ah{cegi}}}}{b{k}}}{{b{e}}}{C`Cb}{}Cd`{Cb{l{c}}n}}{{{b{d{Ah{cegi}}}}ce}{{Bd{e}}}{C`Cb}{}Cd`}{{{Aj{cegi}}ce}{{Ef{cegi}}}Cb{}Cd`}{{{b{d{Ef{cegi}}}}e}e{}{}{}`}{{{Eh{cegi}}ce}{{D`{{b{dc}}{b{de}}}}}Cb{}Cd`}{{{An{cegi}}e}{{El{cegi}}}Cb{}Cd`}{{{b{d{El{cegi}}}}e}e{}{}{}`}{{{En{cegi}}e}{{b{de}}}Cb{}Cd`}{{{B`{ecgik}}g}{{Fb{ecgik}}}n{Cb{Fl{{b{c}}}}}{}Cd`}{{{b{d{Fb{cegik}}}}g}g{}n{}{}`}{{{Fd{ecgik}}g}{{b{dg}}}n{Cb{Fl{{b{c}}}}}{}Cd`}{{{Eh{cegi}}Fhce}{{D`{{b{dc}}{b{de}}}}}Cb{}Cd`}{{{b{d{Ef{cegi}}}}c}c{}{}{}`}{{{b{d{Ah{cegi}}}}ce}{{D`{{b{c}}{b{de}}}}}{C`Cb}{}Cd`}{{{Eh{cegi}}Fhcek}{{D`{{b{dc}}{b{de}}}}}{}{}{}`{{Fn{{b{c}}}{{Bb{Fh}}}}}}{ce{}{}}00000000000000000000000{{{Ah{cegi}}}{{Dl{cei}}}{}{}{}`}{{{b{d{Ah{cegi}}}}}{{Dj{ce}}}{}{}{}`}{{{b{{Ah{cegi}}}}}{{Bj{ce}}}{}{}{}`}3333333333{{{Ef{cegi}}}{{b{dc}}}{}{}{}`}{{{En{cegi}}}c{}{}{}`}{{{Fd{ecgik}}}en{{Fl{{b{c}}}}}{}{}`}{{{Ef{cegi}}}{{D`{{b{dc}}{b{de}}}}}{}{}{}`}{{{Ah{cegi}}}{{Dn{cei}}}{}{}{}`}{{{Ef{cegi}}}{{b{de}}}{}{}{}`}{{{El{cegi}}}{{b{de}}}{}{}{}`}{{{Fb{cegik}}}{{b{dg}}}{}n{}{}`}{{{Ah{cegi}}}{{E`{cei}}}{}{}{}`}{{{b{{Ah{cegi}}}}}j{}{}{}`}:;{{{b{{Ef{cegi}}}}}{{b{c}}}{}{}{}`}{{{b{{An{cegi}}}}}{{b{c}}}{}{}{}`}{{{b{{El{cegi}}}}}{{b{c}}}{}{}{}`}{{{b{{En{cegi}}}}}{{b{c}}}{}{}{}`}{{{b{{B`{ecgik}}}}}{{b{c}}}n{{F`{c}}}{}{}`}{{{b{{Fb{cegik}}}}}{{b{c}}}{}n{}{}`}{{{b{{Fd{ecgik}}}}}{{b{c}}}n{{F`{c}}}{}{}`}{{{b{d{Ef{cegi}}}}}{{b{dc}}}{}{}{}`}{{{b{{Ah{cegi}}}}}{{Bl{ce}}}{}{}{}`}{{{b{{Ah{cegi}}}}}Bf{}{}{}`}{{{b{{Bj{ce}}}}}Bf{}{}}{{{b{{Dj{ce}}}}}Bf{}{}}{{{b{{Dl{ceg}}}}}Bf{}{}`}{{{b{{Dn{ceg}}}}}Bf{}{}`}{{{b{{E`{ceg}}}}}Bf{}{}`}{{{b{{Bl{ce}}}}}Bf{}{}}{{{b{{Bn{ce}}}}}Bf{}{}}{{{b{{Ch{ceg}}}}}Bf{}{}`}{{{b{{Eb{ce}}}}}Bf{}{}}{{{b{d{Bj{ce}}}}}{{Bd{{D`{{b{c}}{b{e}}}}}}}{}{}}{{{b{d{Dj{ce}}}}}{{Bd{{D`{{b{c}}{b{de}}}}}}}{}{}}{{{b{d{Dl{ceg}}}}}{{Bd{{D`{ce}}}}}{}{}`}{{{b{d{Dn{ceg}}}}}{{Bd{c}}}{}{}`}{{{b{d{E`{ceg}}}}}{{Bd{e}}}{}{}`}{{{b{d{Bl{ce}}}}}{{Bd{{b{c}}}}}{}{}}{{{b{d{Bn{ce}}}}}{{Bd{{b{e}}}}}{}{}}{{{b{d{Ch{ceg}}}}}{{Bd{{D`{ce}}}}}{}{}`}{{{b{d{Dd{cegi}}}}}{{Bd{k}}}{}{}{{Df{{b{c}}{b{de}}}{{Bb{j}}}}}`{}}{{{b{d{Eb{ce}}}}}{{Bd{{b{de}}}}}{}{}}{{{An{cegi}}}{{b{de}}}CbCfCd`}{{{B`{ecgik}}}{{b{dg}}}n{Cb{Fl{{b{c}}}}}CfCd`}{{{Aj{cegi}}ce}{{D`{{b{dc}}{b{de}}}}}Cb{}Cd`}{{{An{cegi}}e}{{b{de}}}Cb{}Cd`}{{{B`{ecgik}}g}{{b{dg}}}n{Cb{Fl{{b{c}}}}}{}Cd`}{{{Aj{cegi}}k}{{D`{{b{dc}}{b{de}}}}}Cb{}Cd`{{Al{}{{Bb{{D`{ce}}}}}}}}{{{An{cegi}}k}{{b{de}}}Cb{}Cd`{{Al{}{{Bb{e}}}}}}{{{B`{ecgik}}m}{{b{dg}}}n{Cb{Fl{{b{c}}}}}{}Cd`{{Al{}{{Bb{g}}}}}}{{{An{cegi}}k}{{b{de}}}Cb{}Cd`{{Al{{b{c}}}{{Bb{e}}}}}}{{{B`{ecgik}}m}{{b{dg}}}n{Cb{F`{c}}{Fl{{b{c}}}}}{}Cd`{{Al{{b{c}}}{{Bb{g}}}}}}{{{b{{Ah{cegi}}}}}{{Ej{cegi}}}{}{}{}`}{{{b{d{Ah{cegi}}}}}{{Ed{cegi}}}{}{}{}`}{{{b{d{Ah{cegi}}}}{b{k}}}{{Bd{e}}}{C`Cb}{}Cd`{Cb{l{c}}n}}{{{Ef{cegi}}}e{}{}{}`}{{{El{cegi}}}e{}{}{}`}{{{Fb{cegik}}}g{}n{}{}`}{{{b{d{Ah{cegi}}}}{b{k}}}{{Bd{{D`{ce}}}}}{C`Cb}{}Cd`{Cb{l{c}}n}}{{{Ef{cegi}}}{{D`{ce}}}{}{}{}`}{{{El{cegi}}}{{D`{ce}}}{}{}{}`}{{{Fb{cegik}}}{{D`{cg}}}{}n{}{}`}{{{El{cegi}}e}{{D`{ce}}}{}{}{}`}{{{Fb{ecgik}}g}{{D`{eg}}}n{{Fl{{b{c}}}}}{}{}`}{{{Ef{cegi}}k}{{Aj{cegi}}}{}{}{}`{{Al{{b{c}}e}{{Bb{{Bd{e}}}}}}}}{{{El{cegi}}k}{{An{cegi}}}{}{}{}`{{Al{{b{c}}e}{{Bb{{Bd{e}}}}}}}}{{{Fb{cegik}}m}{{B`{cegik}}}{}n{}{}`{{Al{{b{c}}g}{{Bb{{Bd{g}}}}}}}}{{{El{cegi}}}c{}{}{}`}{{{Fb{ecgik}}}en{{Fl{{b{c}}}}}{}{}`}{{{b{d{Ah{cegi}}}}Bf}h{C`Cb}{}Cd`}{{{b{d{Ah{cegi}}}}k}h{}{}{}`{{Df{{b{c}}{b{de}}}{{Bb{j}}}}}}1{{{b{d{Ah{cegi}}}}}h{C`Cb}{}Cd`}{{{b{{Bj{ce}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}}{{{b{{Dj{ce}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}}{{{b{{Dl{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}`}{{{b{{Dn{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}`}{{{b{{E`{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}`}{{{b{{Bl{ce}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}}{{{b{{Bn{ce}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}}{{{b{{Ch{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}`}{{{b{{Dd{cegi}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}{{Df{{b{c}}{b{de}}}{{Bb{j}}}}}`}{{{b{{Eb{ce}}}}}{{D`{Bf{Bd{Bf}}}}}{}{}}{{{b{c}}}e{}{}}000{{{b{c}}}G`{}}{c{{Ad{e}}}{}{}}00000000000000000000000{{{b{d{Ah{cegi}}}}ce}{{Ad{{b{de}}{Ff{cegi}}}}}{C`Cb}{}Cd`}111111111111111111111111{{{b{d{Ah{cegi}}}}Bf}{{Ad{hf}}}{C`Cb}{}Cd`}{{{b{c}}}Af{}}00000000000000000000000`{{{b{{Ah{cegi}}}}}{{Bn{ce}}}{}{}{}`}{{{b{d{Ah{cegi}}}}}{{Eb{ce}}}{}{}{}`}{{Bfc}{{Ah{egc}}}{}{}{}}{{Bfce}{{Ah{gice}}}{}`{}{}}{c{{Ah{egc}}}{}{}{}}{{ce}{{Ah{gice}}}{}`{}{}}``````````````{{{b{{Gb{ceg}}}}}{{b{g}}}{}{}`}{{{b{{Gb{ceg}}}}{b{{Gb{ceg}}}}}{{Gb{ce}}}{C`CbBh}{CdCf}`}0{{{b{{Gb{ce}}}}{b{{Gb{ce}}}}}{{Gb{ce}}}{C`CbBh}{CdCf}}{{{b{c}}}{{b{e}}}{}{}}00000000000{{{b{dc}}}{{b{de}}}{}{}}00000000000{{{b{{Gb{ceg}}}}}Bf{}{}`}{{{b{d{Gb{ceg}}}}}h{}{}`}{{{b{{Gb{ceg}}}}}{{Gb{ceg}}}BhBh{`Bh}}{{{b{{Gd{c}}}}}{{Gd{c}}}{}}{{{b{{Gf{ceg}}}}}{{Gf{ceg}}}{}{}`}{{{b{{Gh{ceg}}}}}{{Gh{ceg}}}{}{}`}{{{b{{Gj{ceg}}}}}{{Gj{ceg}}}{}{}`}{{{b{{Gl{ceg}}}}}{{Gl{ceg}}}{}{}`}{{{b{d{Gb{ceg}}}}{b{{Gb{ceg}}}}}hBhBh{`Bh}}{{{b{c}}{b{de}}}h{}{}}00000{{{b{{Gb{ceg}}}}{b{i}}}j{C`Cb}Cd`{Cb{l{c}}n}}{{}{{Gb{ceg}}}{}Cf{Cf`}}{{{b{{Gb{ceg}}}}{b{{Gb{ceg}}}}}{{Gh{ceg}}}{C`Cb}Cd`}{{{b{d{Gb{ceg}}}}}{{Gn{cg}}}{}{}`}{{{b{d{Gb{ceg}}}}c}{{H`{ceg}}}{C`Cb}Cd`}{{{b{{Gb{ceg}}}}{b{{Gb{ceg}}}}}j{C`Cb}Cd`}{{{b{c}}{b{e}}}j{}{}}{{{b{d{Gb{ceg}}}}i}h{C`CbCl}Cd`{{Db{}{{Cn{{b{c}}}}}}}}{{{b{d{Gb{ceg}}}}i}h{C`Cb}Cd`{{Db{}{{Cn{c}}}}}}{{{b{d{Gb{ceg}}}}i}{{Hb{cig}}}{}{}`{{Df{{b{c}}}{{Bb{j}}}}}}{{{b{{Gb{ceg}}}}{b{dA`}}}AbDh{}`}{{{b{{Gd{c}}}}{b{dA`}}}AbDh}{{{b{{Hd{ce}}}}{b{dA`}}}AbDh`}{{{b{{Gn{ce}}}}{b{dA`}}}AbDh`}{{{b{{Gf{ceg}}}}{b{dA`}}}Ab{DhC`Cb}Cd`}{{{b{{Gh{ceg}}}}{b{dA`}}}Ab{DhC`Cb}Cd`}{{{b{{Gj{ceg}}}}{b{dA`}}}Ab{DhC`Cb}Cd`}{{{b{{Gl{ceg}}}}{b{dA`}}}Ab{DhC`Cb}Cd`}{{{b{{H`{ceg}}}}{b{dA`}}}AbDh{}`}{{{b{{Hf{ceg}}}}{b{dA`}}}AbDh{}`}{{{b{{Hh{ceg}}}}{b{dA`}}}AbDh{}`}{{{Gd{c}}ei}e{}{}{}{{Df{eg}{{Bb{e}}}}}}{{{Hd{ce}}gk}g{}`{}{}{{Df{gi}{{Bb{g}}}}}}{{{Gn{ce}}gk}g{}`{}{}{{Df{gi}{{Bb{g}}}}}}{{{Gf{ceg}}im}i{C`Cb}Cd`{}{}{{Df{ik}{{Bb{i}}}}}}{{{Gh{ceg}}im}i{C`Cb}Cd`{}{}{{Df{ik}{{Bb{i}}}}}}{{{Gj{ceg}}im}i{C`Cb}Cd`{}{}{{Df{ik}{{Bb{i}}}}}}{{{Gl{ceg}}im}i{C`Cb}Cd`{}{}{{Df{ik}{{Bb{i}}}}}}{{{Ah{cheg}}}{{Gb{ceg}}}{}{}`}{cc{}}00000000000{e{{Gb{cgi}}}{C`Cb}{{Db{}{{Cn{c}}}}}{CdCf}{Cf`}}{{{b{{Gb{ceg}}}}{b{i}}}{{Bd{{b{c}}}}}{C`Cb}Cd`{Cb{l{c}}n}}{{{b{{H`{ceg}}}}}{{b{c}}}{}{}`}{{{b{{Hf{ceg}}}}}{{b{c}}}{}{}`}{{{b{{Hh{ceg}}}}}{{b{c}}}{}{}`}{{{b{d{Gb{ceg}}}}c}{{b{c}}}{C`Cb}Cd`}{{{b{d{Gb{ceg}}}}{b{i}}}{{b{c}}}{C`Cb}Cd`{Cb{l{c}}{Hl{}{{Hj{c}}}}n}}{{{b{d{Gb{ceg}}}}{b{i}}k}{{b{c}}}{C`Cb}Cd`{Cb{l{c}}n}{{Al{{b{i}}}{{Bb{c}}}}}}{{{b{{Gb{ceg}}}}}{{b{e}}}{}{}`}{{{b{d{Gb{ceg}}}}c}j{C`Cb}Cd`}{{{H`{ceg}}}{{Hf{ceg}}}CbCd`}{{{Hh{ceg}}}hCbCd`}6{{{b{{Gb{ceg}}}}{b{{Gb{ceg}}}}}{{Gf{ceg}}}{C`Cb}Cd`}{ce{}{}}00000000000{{{b{{Gb{ceg}}}}}{{Gd{c}}}{}{}`}{{{Gb{ceg}}}{{Hd{cg}}}{}{}`}22222222{{{Hh{ceg}}}c{}{}`}{{{b{{Gb{ceg}}}}{b{{Gb{ceg}}}}}j{C`Cb}Cd`}{{{b{{Gb{ceg}}}}}j{}{}`}114{{{b{{Gb{ceg}}}}}Bf{}{}`}{{{b{{Gd{c}}}}}Bf{}}{{{b{{Hd{ce}}}}}Bf{}`}{{{b{{Gn{ce}}}}}Bf{}`}{{{b{d{Gd{c}}}}}{{Bd{{b{c}}}}}{}}{{{b{d{Hd{ce}}}}}{{Bd{c}}}{}`}{{{b{d{Gn{ce}}}}}{{Bd{c}}}{}`}{{{b{d{Hb{ceg}}}}}{{Bd{i}}}{}{{Df{{b{c}}}{{Bb{j}}}}}`{}}{{{b{d{Gf{ceg}}}}}{{Bd{{b{c}}}}}{C`Cb}Cd`}{{{b{d{Gh{ceg}}}}}{{Bd{{b{c}}}}}{C`Cb}Cd`}{{{b{d{Gj{ceg}}}}}{{Bd{{b{c}}}}}{C`Cb}Cd`}{{{b{d{Gl{ceg}}}}}{{Bd{{b{c}}}}}{C`Cb}Cd`}{{{H`{ceg}}}hCbCd`}{{{b{d{Gb{ceg}}}}{b{i}}}j{C`Cb}Cd`{Cb{l{c}}n}}{{{Hf{ceg}}}c{}{}`}{{{b{d{Gb{ceg}}}}c}{{Bd{c}}}{C`Cb}Cd`}1{{{b{d{Gb{ceg}}}}Bf}h{C`Cb}Cd`}{{{b{d{Gb{ceg}}}}i}h{}{}`{{Df{{b{c}}}{{Bb{j}}}}}}1{{{b{d{Gb{ceg}}}}}h{C`Cb}Cd`}{{{b{{Gd{c}}}}}{{D`{Bf{Bd{Bf}}}}}{}}{{{b{{Hd{ce}}}}}{{D`{Bf{Bd{Bf}}}}}{}`}{{{b{{Gn{ce}}}}}{{D`{Bf{Bd{Bf}}}}}{}`}{{{b{{Hb{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{}{{Df{{b{c}}}{{Bb{j}}}}}`}{{{b{{Gf{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{C`Cb}Cd`}{{{b{{Gh{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{C`Cb}Cd`}{{{b{{Gj{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{C`Cb}Cd`}{{{b{{Gl{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{C`Cb}Cd`}{{{b{{Gb{ce}}}}{b{{Gb{ce}}}}}{{Gb{ce}}}{C`CbBh}{CdCf}}{{{b{{Gb{ceg}}}}{b{{Gb{ceg}}}}}{{Gj{ceg}}}{C`Cb}Cd`}{{{b{d{Gb{ceg}}}}{b{i}}}{{Bd{c}}}{C`Cb}Cd`{Cb{l{c}}n}}{{{b{c}}}e{}{}}00000{c{{Ad{e}}}{}{}}00000000000000000000000{{{b{d{Gb{ceg}}}}Bf}{{Ad{hf}}}{C`Cb}Cd`}{{{b{c}}}Af{}}00000000000{{{b{{Gb{ceg}}}}{b{{Gb{ceg}}}}}{{Gl{ceg}}}{C`Cb}Cd`}{{Bfc}{{Gb{ec`}}}{}{}}{{Bfce}{{Gb{gce}}}{}`{}}{c{{Gb{ec`}}}{}{}}{{ce}{{Gb{gce}}}{}`{}}````````````{{{b{{Hn{ce}}}}}{{b{e}}}{}`}{{{I`{ce}}g}{{I`{ce}}}{}`{{Al{{b{dc}}}}}}{{{b{c}}}{{b{e}}}{}{}}000000000{{{b{dc}}}{{b{de}}}{}{}}000000000{{{b{{Hn{ce}}}}}Bf{}`}{{{b{d{Hn{ce}}}}}h{}`}{{{b{{Hn{ce}}}}}{{Hn{ce}}}Bh{`Bh}}{{{b{c}}{b{de}}}h{}{}}{{}{{Hn{ce}}}{}{`Cf}}{{{b{d{Hn{ce}}}}}{{Ib{ce}}}{}`}{{{b{d{Hn{ce}}}}Fhgi}{{I`{ce}}}{}`{{Df{{b{c}}}{{Bb{j}}}}}{{Fn{{b{c}}}{{Bb{Fh}}}}}}{{{b{d{Hn{ce}}}}g}{{Id{cge}}}{}`{{Df{{b{dc}}}{{Bb{j}}}}}}{{{b{{Hn{ce}}}}Fhg}{{Bd{{b{c}}}}}{}`{{Df{{b{c}}}{{Bb{j}}}}}}{{{b{d{Hn{ce}}}}Fhg}{{Ad{{If{ce}}{Ih{ce}}}}}{}`{{Df{{b{c}}}{{Bb{j}}}}}}{{{b{d{Hn{ce}}}}Fhg}{{Bd{{b{dc}}}}}{}`{{Df{{b{c}}}{{Bb{j}}}}}}{{{b{{Hn{ce}}}}{b{dA`}}}AbDh`}{{{b{{I`{ce}}}}{b{dA`}}}AbDh`}{{{b{{If{ce}}}}{b{dA`}}}AbDh`}{{{b{{Ij{ce}}}}{b{dA`}}}AbDh`}{{{b{{Ih{ce}}}}{b{dA`}}}AbDh`}{{{b{{Ib{ce}}}}{b{dA`}}}AbDh`}{{{Il{c}}ei}e{}{}{}{{Df{eg}{{Bb{e}}}}}}{{{In{c}}ei}e{}{}{}{{Df{eg}{{Bb{e}}}}}}{{{J`{ce}}gk}g{}`{}{}{{Df{gi}{{Bb{g}}}}}}{cc{}}000000000{{{b{{If{ce}}}}}{{b{c}}}{}`}{{{b{d{Hn{ce}}}}{Fj{Fh}}g}{{Bd{{Fj{{b{dc}}}}}}}{}`{{Df{Bf{b{c}}}{{Bb{j}}}}}}0{{{b{d{If{ce}}}}}{{b{dc}}}{}`}{{{I`{ce}}c}{{If{ce}}}{}`}{{{Ij{ce}}c}{{If{ce}}}{}`}{{{b{d{Hn{ce}}}}Fhcg}{{If{ce}}}{}`{{Fn{{b{c}}}{{Bb{Fh}}}}}}{ce{}{}}000000000{{{Hn{ce}}}{{J`{ce}}}{}`}{{{b{d{Hn{ce}}}}}{{In{c}}}{}`}{{{b{{Hn{ce}}}}}{{Il{c}}}{}`}33333{{{If{ce}}}{{b{dc}}}{}`}{{{If{ce}}}{{b{d{Hn{ce}}}}}{}`}{{{Ij{ce}}}{{b{d{Hn{ce}}}}}{}`}{{{Ih{ce}}}{{b{d{Hn{ce}}}}}{}`}{{{b{{Hn{ce}}}}}j{}`}56{{{b{{Hn{ce}}}}}Bf{}`}{{{b{{Il{c}}}}}Bf{}}{{{b{{In{c}}}}}Bf{}}{{{b{{J`{ce}}}}}Bf{}`}{{{b{{Ib{ce}}}}}Bf{}`}{{}{{Hn{c`}}}{}}{c{{Hn{ec}}}`{}}{{{b{d{Il{c}}}}}{{Bd{e}}}{}{}}{{{b{d{In{c}}}}}{{Bd{e}}}{}{}}{{{b{d{J`{ce}}}}}{{Bd{g}}}{}`{}}{{{b{d{Ib{ce}}}}}{{Bd{c}}}{}`}{{{b{d{Id{ceg}}}}}{{Bd{i}}}{}{{Df{{b{dc}}}{{Bb{j}}}}}`{}}{{{I`{ce}}c}{{If{ce}}}{}`}{{{I`{ce}}g}{{If{ce}}}{}`{{Al{}{{Bb{c}}}}}}{{{If{ce}}}{{D`{c{Ij{ce}}}}}{}`}{{{b{d{Hn{ce}}}}Bfg}h{}`{{Fn{{b{c}}}{{Bb{Fh}}}}}}{{{b{d{Hn{ce}}}}g}h{}`{{Df{{b{dc}}}{{Bb{j}}}}}}1{{{b{d{Hn{ce}}}}g}h{}`{{Fn{{b{c}}}{{Bb{Fh}}}}}}{{{b{{Il{c}}}}}{{D`{Bf{Bd{Bf}}}}}{}}{{{b{{In{c}}}}}{{D`{Bf{Bd{Bf}}}}}{}}{{{b{{J`{ce}}}}}{{D`{Bf{Bd{Bf}}}}}{}`}{{{b{{Ib{ce}}}}}{{D`{Bf{Bd{Bf}}}}}{}`}{{{b{{Id{ceg}}}}}{{D`{Bf{Bd{Bf}}}}}{}{{Df{{b{dc}}}{{Bb{j}}}}}`}{{{b{c}}}e{}{}}{c{{Ad{e}}}{}{}}0000000000000000000{{{b{d{Hn{ce}}}}Bfg}{{Ad{hf}}}{}`{{Fn{{b{c}}}{{Bb{Fh}}}}}}{{{b{c}}}Af{}}000000000{Bf{{Hn{c`}}}{}}{{Bfc}{{Hn{ec}}}`{}}","D":"BNj","p":[[1,"reference"],[0,"mut"],[6,"TryReserveError",0],[1,"unit"],[1,"bool"],[10,"Equivalent",0],[10,"Sized",807],[5,"Formatter",808],[8,"Result",808],[6,"Result",809],[5,"TypeId",810],[5,"HashMap",25,811],[6,"RawEntryMut",25,811],[10,"FnOnce",812],[6,"Entry",25,811],[6,"EntryRef",25,811],[17,"Output"],[6,"Option",813],[1,"usize"],[10,"Clone",814],[5,"Iter",25,811],[5,"Keys",25,811],[5,"Values",25,811],[10,"Eq",815],[10,"Hash",816],[10,"BuildHasher",816],[10,"Default",817],[5,"Drain",25,811],[10,"PartialEq",815],[10,"Copy",807],[17,"Item"],[1,"tuple"],[10,"IntoIterator",818],[5,"ExtractIf",25,811],[10,"FnMut",812],[10,"Debug",808],[5,"IterMut",25,811],[5,"IntoIter",25,811],[5,"IntoKeys",25,811],[5,"IntoValues",25,811],[5,"ValuesMut",25,811],[5,"RawEntryBuilderMut",25,811],[5,"RawOccupiedEntryMut",25,811],[5,"RawVacantEntryMut",25,811],[5,"RawEntryBuilder",25,811],[5,"OccupiedEntry",25,811],[5,"VacantEntry",25,811],[10,"Borrow",819],[5,"OccupiedEntryRef",25,811],[5,"VacantEntryRef",25,811],[5,"OccupiedError",25,811],[1,"u64"],[1,"array"],[10,"From",820],[10,"Fn",812],[5,"String",821],[5,"HashSet",433,822],[5,"Iter",433,822],[5,"Intersection",433,822],[5,"Difference",433,822],[5,"SymmetricDifference",433,822],[5,"Union",433,822],[5,"Drain",433,822],[6,"Entry",433,822],[5,"ExtractIf",433,822],[5,"IntoIter",433,822],[5,"OccupiedEntry",433,822],[5,"VacantEntry",433,822],[17,"Owned"],[10,"ToOwned",823],[5,"HashTable",653,824],[6,"Entry",653,824],[5,"Drain",653,824],[5,"ExtractIf",653,824],[5,"OccupiedEntry",653,824],[5,"AbsentEntry",653,824],[5,"VacantEntry",653,824],[5,"Iter",653,824],[5,"IterMut",653,824],[5,"IntoIter",653,824],[15,"AllocError",24],[6,"DefaultHashBuilder",25]],"r":[[3,811],[4,822],[5,824],[25,811],[26,811],[27,811],[28,811],[29,811],[30,811],[31,811],[32,811],[33,811],[34,811],[35,811],[36,811],[40,811],[41,811],[42,811],[43,811],[44,811],[45,811],[46,811],[47,811],[51,811],[52,811],[53,811],[54,811],[433,822],[434,822],[435,822],[436,822],[437,822],[438,822],[439,822],[440,822],[442,822],[443,822],[444,822],[446,822],[653,824],[654,824],[655,824],[656,824],[657,824],[658,824],[659,824],[660,824],[662,824],[664,824]],"b":[[129,"impl-Extend%3C(%26K,+%26V)%3E-for-HashMap%3CK,+V,+S,+A%3E"],[130,"impl-Extend%3C(K,+V)%3E-for-HashMap%3CK,+V,+S,+A%3E"],[131,"impl-Extend%3C%26(K,+V)%3E-for-HashMap%3CK,+V,+S,+A%3E"],[154,"impl-Display-for-OccupiedError%3C\'a,+K,+V,+S,+A%3E"],[155,"impl-Debug-for-OccupiedError%3C\'_,+K,+V,+S,+A%3E"],[252,"impl-IntoIterator-for-HashMap%3CK,+V,+S,+A%3E"],[253,"impl-IntoIterator-for-%26mut+HashMap%3CK,+V,+S,+A%3E"],[254,"impl-IntoIterator-for-%26HashMap%3CK,+V,+S,+A%3E"],[497,"impl-Extend%3C%26T%3E-for-HashSet%3CT,+S,+A%3E"],[498,"impl-Extend%3CT%3E-for-HashSet%3CT,+S,+A%3E"],[557,"impl-IntoIterator-for-%26HashSet%3CT,+S,+A%3E"],[558,"impl-IntoIterator-for-HashSet%3CT,+S,+A%3E"],[734,"impl-IntoIterator-for-HashTable%3CT,+A%3E"],[735,"impl-IntoIterator-for-%26mut+HashTable%3CT,+A%3E"],[736,"impl-IntoIterator-for-%26HashTable%3CT,+A%3E"]],"c":"OjAAAAAAAAA=","e":"OzAAAAEAALIBIAAIAAQADgABABUAAwA/AC8AcQAIAIAAAQCGAB8AwAAAAAABCQAgARIAUgEmAHoBFwCTARcAxAEXAN4BDADwAQMA9QESABQCAAAuAgAAMAIHAD8CCgBTAgcAXgIdAH0CCwCcAhMAsgICALsCCADfAgcA7wIDAPUCBAABAxkAHAMJAA=="}],\ +["log",{"t":"PPPPPPGGKFFPFFFSFPPPPNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNQNNMNNNNNNNQNNNNMNNNNNNNNNNNNNNNNNNNNNNNNNNQNNNNNNNNNNNNNNNNMQQHNNHNNNNNNNNNNNNNNHHHHNNNNNNQNNNNNNNNNNNNNNNNNNNNNNNNQ","n":["Debug","Debug","Error","Error","Info","Info","Level","LevelFilter","Log","Metadata","MetadataBuilder","Off","ParseLevelError","Record","RecordBuilder","STATIC_MAX_LEVEL","SetLoggerError","Trace","Trace","Warn","Warn","args","args","as_str","as_str","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","build","build","builder","builder","clone","clone","clone","clone","cmp","cmp","cmp","cmp","debug","default","default","enabled","eq","eq","eq","eq","eq","eq","eq","error","file","file","file_static","file_static","flush","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","fmt","from","from","from","from","from","from","from","from","from_str","from_str","hash","hash","hash","hash","info","into","into","into","into","into","into","into","into","iter","iter","level","level","level","level","line","line","log","log","log_enabled","logger","max","max","max_level","metadata","metadata","module_path","module_path","module_path_static","module_path_static","new","new","partial_cmp","partial_cmp","partial_cmp","partial_cmp","partial_cmp","partial_cmp","set_logger","set_logger_racy","set_max_level","set_max_level_racy","target","target","target","target","to_level","to_level_filter","trace","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_from","try_into","try_into","try_into","try_into","try_into","try_into","try_into","try_into","type_id","type_id","type_id","type_id","type_id","type_id","type_id","type_id","warn"],"q":[[0,"log"],[170,"core::fmt"],[171,"core::cmp"],[172,"core::option"],[173,"core::result"],[174,"core::hash"],[175,"core::iter::traits::iterator"],[176,"core::any"]],"i":[6,8,6,8,6,8,0,0,0,0,0,8,0,0,0,0,0,6,8,6,8,1,5,6,8,6,8,1,5,10,9,19,14,6,8,1,5,10,9,19,14,5,9,1,10,6,8,1,10,6,8,10,9,0,5,9,12,6,6,8,8,10,9,14,0,1,5,1,5,12,6,6,8,8,1,5,10,9,19,19,14,14,6,8,1,5,10,9,19,14,6,8,6,8,10,9,0,6,8,1,5,10,9,19,14,6,8,1,5,10,9,1,5,12,0,0,0,6,8,0,1,5,1,5,1,5,5,9,6,6,8,8,10,9,0,0,0,0,1,5,10,9,8,6,0,6,8,1,5,10,9,19,14,6,8,1,5,10,9,19,14,6,8,1,5,10,9,19,14,0],"f":"`````````````````````{{{d{b}}}{{d{f}}}}{{{d{hj}}f}{{d{hj}}}}{{{d{l}}}{{d{n}}}}{{{d{A`}}}{{d{n}}}}{{{d{c}}}{{d{e}}}{}{}}0000000{{{d{hc}}}{{d{he}}}{}{}}0000000{{{d{j}}}b}{{{d{Ab}}}Ad}{{}j}{{}Ab}{{{d{l}}}l}{{{d{A`}}}A`}{{{d{b}}}b}{{{d{Ad}}}Ad}{{{d{l}}{d{l}}}Af}{{{d{A`}}{d{A`}}}Af}{{{d{Ad}}{d{Ad}}}Af}{{{d{Ab}}{d{Ab}}}Af}`98{{{d{Ah}}{d{Ad}}}Aj}{{{d{l}}{d{A`}}}Aj}{{{d{l}}{d{l}}}Aj}{{{d{A`}}{d{A`}}}Aj}{{{d{A`}}{d{l}}}Aj}{{{d{Ad}}{d{Ad}}}Aj}{{{d{Ab}}{d{Ab}}}Aj}{{{d{Al}}{d{Al}}}Aj}`{{{d{b}}}{{An{{d{n}}}}}}{{{d{hj}}{An{{d{n}}}}}{{d{hj}}}}10{{{d{Ah}}}B`}{{{d{l}}{d{hBb}}}Bd}0{{{d{A`}}{d{hBb}}}Bd}0{{{d{b}}{d{hBb}}}Bd}{{{d{j}}{d{hBb}}}Bd}{{{d{Ad}}{d{hBb}}}Bd}{{{d{Ab}}{d{hBb}}}Bd}{{{d{Bf}}{d{hBb}}}Bd}0{{{d{Al}}{d{hBb}}}Bd}0{cc{}}0000000{{{d{n}}}{{Bh{lc}}}{}}{{{d{n}}}{{Bh{A`c}}}{}}{{{d{l}}{d{hc}}}B`Bj}{{{d{A`}}{d{hc}}}B`Bj}{{{d{Ad}}{d{hc}}}B`Bj}{{{d{Ab}}{d{hc}}}B`Bj}`{ce{}{}}0000000{{}{{`{{Bn{}{{Bl{l}}}}}}}}{{}{{`{{Bn{}{{Bl{A`}}}}}}}}{{{d{b}}}l}{{{d{hj}}l}{{d{hj}}}}{{{d{Ad}}}l}{{{d{hAb}}l}{{d{hAb}}}}{{{d{b}}}{{An{C`}}}}{{{d{hj}}{An{C`}}}{{d{hj}}}}{{{d{Ah}}{d{b}}}B`}``{{}{{d{Ah}}}}{{}l}{{}A`}0{{{d{b}}}{{d{Ad}}}}{{{d{hj}}Ad}{{d{hj}}}}{{{d{b}}}{{An{{d{n}}}}}}{{{d{hj}}{An{{d{n}}}}}{{d{hj}}}}10{{}j}{{}Ab}{{{d{l}}{d{l}}}{{An{Af}}}}{{{d{l}}{d{A`}}}{{An{Af}}}}{{{d{A`}}{d{A`}}}{{An{Af}}}}{{{d{A`}}{d{l}}}{{An{Af}}}}{{{d{Ad}}{d{Ad}}}{{An{Af}}}}{{{d{Ab}}{d{Ab}}}{{An{Af}}}}{{{d{Ah}}}{{Bh{B`Bf}}}}0{A`B`}0{{{d{b}}}{{d{n}}}}{{{d{hj}}{d{n}}}{{d{hj}}}}{{{d{Ad}}}{{d{n}}}}{{{d{hAb}}{d{n}}}{{d{hAb}}}}{{{d{A`}}}{{An{l}}}}{{{d{l}}}A`}`{c{{Bh{e}}}{}{}}000000000000000{{{d{c}}}Cb{}}0000000`","D":"Kd","p":[[5,"Record",0],[1,"reference"],[5,"Arguments",170],[0,"mut"],[5,"RecordBuilder",0],[6,"Level",0],[1,"str"],[6,"LevelFilter",0],[5,"MetadataBuilder",0],[5,"Metadata",0],[6,"Ordering",171],[10,"Log",0],[1,"bool"],[5,"ParseLevelError",0],[6,"Option",172],[1,"unit"],[5,"Formatter",170],[8,"Result",170],[5,"SetLoggerError",0],[6,"Result",173],[10,"Hasher",174],[17,"Item"],[10,"Iterator",175],[1,"u32"],[5,"TypeId",176]],"r":[],"b":[[57,"impl-PartialEq%3CLevelFilter%3E-for-Level"],[58,"impl-PartialEq-for-Level"],[59,"impl-PartialEq-for-LevelFilter"],[60,"impl-PartialEq%3CLevel%3E-for-LevelFilter"],[70,"impl-Display-for-Level"],[71,"impl-Debug-for-Level"],[72,"impl-Debug-for-LevelFilter"],[73,"impl-Display-for-LevelFilter"],[78,"impl-Display-for-SetLoggerError"],[79,"impl-Debug-for-SetLoggerError"],[80,"impl-Debug-for-ParseLevelError"],[81,"impl-Display-for-ParseLevelError"],[128,"impl-PartialOrd-for-Level"],[129,"impl-PartialOrd%3CLevelFilter%3E-for-Level"],[130,"impl-PartialOrd-for-LevelFilter"],[131,"impl-PartialOrd%3CLevel%3E-for-LevelFilter"]],"c":"OjAAAAAAAAA=","e":"OzAAAAEAAFAACAAaAA8ALgAHADcAAQA6AAYARwALAFsABQCBAAUAkgAXAA=="}],\ +["rustc_hash",{"t":"FNNNNNNNNNNNNNNN","n":["FxHasher","borrow","borrow_mut","default","finish","from","into","try_from","try_into","type_id","write","write_u16","write_u32","write_u64","write_u8","write_usize"],"q":[[0,"rustc_hash"],[16,"core::result"],[17,"core::any"]],"i":[0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3],"f":"`{{{b{c}}}{{b{e}}}{}{}}{{{b{dc}}}{{b{de}}}{}{}}{{}f}{{{b{f}}}h}{cc{}}{ce{}{}}{c{{j{e}}}{}{}}0{{{b{c}}}l{}}{{{b{df}}{b{{A`{n}}}}}Ab}{{{b{df}}Ad}Ab}{{{b{df}}Af}Ab}{{{b{df}}h}Ab}{{{b{df}}n}Ab}{{{b{df}}Ah}Ab}","D":"h","p":[[1,"reference"],[0,"mut"],[5,"FxHasher",0],[1,"u64"],[6,"Result",16],[5,"TypeId",17],[1,"u8"],[1,"slice"],[1,"unit"],[1,"u16"],[1,"u32"],[1,"usize"]],"r":[],"b":[],"c":"OjAAAAAAAAA=","e":"OzAAAAEAAAwAAgACAAMACAAIAA=="}]\ +]')); +if (typeof exports !== 'undefined') exports.searchIndex = searchIndex; +else if (window.initSearch) window.initSearch(searchIndex); diff --git a/search.desc/cactusref/cactusref-desc-0-.js b/search.desc/cactusref/cactusref-desc-0-.js new file mode 100644 index 000000000..f25943454 --- /dev/null +++ b/search.desc/cactusref/cactusref-desc-0-.js @@ -0,0 +1 @@ +searchState.loadedDescShard("cactusref", 0, "Single-threaded, cycle-aware, reference-counting pointers. …\nBuild a graph of linked Rc smart pointers to enable …\nCactus alias for Rc.\nCactus alias for Weak.\nA single-threaded reference-counting pointer. ‘Rc’ …\nWeak is a version of Rc that holds a non-owning reference …\nPerform bookkeeping to record that this has an owned …\nPerform bookkeeping to record that this has an owned …\nProvides a raw pointer to the data.\nReturns a raw pointer to the object T pointed to by this …\nConverts to Rc<T>.\nMakes a clone of the Rc pointer.\nMakes a clone of the Weak pointer that points to the same …\nComparison for two Rcs.\nDecrements the strong reference count on the Rc<T> …\nCreates a new Rc<T>, with the Default value for T.\nConstructs a new Weak<T>, without allocating any memory. …\nCreates a new Weak pointer to this allocation.\nDrops the Rc.\nDrops the Weak pointer.\nEquality for two Rcs.\nMove a boxed object to a new, reference counted, …\nConverts a generic type T into a Rc<T>\nReturns the argument unchanged.\nReturns the argument unchanged.\nConstructs an Rc<T> from a raw pointer.\nConverts a raw pointer previously created by into_raw back …\n‘Greater than or equal to’ comparison for two Rcs.\nReturns a mutable reference into the given Rc, if there are\nReturns a mutable reference into the given Rc, without any …\nGreater-than comparison for two Rcs.\nExamples of implementing self-referential data structures …\nIncrements the strong reference count on the Rc<T> …\nCalls U::from(self).\nCalls U::from(self).\nConsumes the Rc, returning the wrapped pointer.\nConsumes the Weak<T> and turns it into a raw pointer.\n‘Less than or equal to’ comparison for two Rcs.\nLess-than comparison for two Rcs.\nMakes a mutable reference into the given Rc.\nInequality for two Rcs.\nConstructs a new Rc<T>.\nConstructs a new Weak<T>, without allocating any memory. …\nConstructs a new Rc with uninitialized contents.\nPartial comparison for two Rcs.\nConstructs a new Pin<Rc<T>>. If T does not implement Unpin…\nReturns true if the two Rcs point to the same allocation …\nReturns true if the two Weaks point to the same allocation …\nGets the number of strong (Rc) pointers to this allocation.\nGets the number of strong (Rc) pointers pointing to this …\nReturns the inner value, if the Rc has exactly one strong …\nPerform bookkeeping to record that this has removed an …\nPerform bookkeeping to record that this has removed an …\nAttempts to upgrade the Weak pointer to an Rc, delaying …\nGets the number of Weak pointers to this allocation.\nGets the number of Weak pointers pointing to this …") \ No newline at end of file diff --git a/search.desc/hashbrown/hashbrown-desc-0-.js b/search.desc/hashbrown/hashbrown-desc-0-.js new file mode 100644 index 000000000..dac46e16f --- /dev/null +++ b/search.desc/hashbrown/hashbrown-desc-0-.js @@ -0,0 +1 @@ +searchState.loadedDescShard("hashbrown", 0, "This crate is a Rust port of Google’s high-performance …\nThe memory allocator returned an error\nError due to the computed capacity exceeding the collection…\nKey equivalence trait.\nA hash map implemented with quadratic probing and SIMD …\nA hash set implemented as a HashMap where the value is ().\nLow-level hash table with explicit hashing.\nThe error type for try_reserve methods.\nChecks if this value is equivalent to the given key.\nReturns the argument unchanged.\nA hash map implemented with quadratic probing and SIMD …\nA hash set implemented as a HashMap where the value is ().\nA hash table implemented with quadratic probing and SIMD …\nCalls U::from(self).\nThe layout of the allocation request that failed.\nDummy default hasher for HashMap.\nA draining iterator over the entries of a HashMap in …\nA view into a single entry in a map, which may either be …\nA view into a single entry in a map, which may either be …\nA draining iterator over entries of a HashMap which don’…\nA hash map implemented with quadratic probing and SIMD …\nAn owning iterator over the entries of a HashMap in …\nAn owning iterator over the keys of a HashMap in arbitrary …\nAn owning iterator over the values of a HashMap in …\nAn iterator over the entries of a HashMap in arbitrary …\nA mutable iterator over the entries of a HashMap in …\nAn iterator over the keys of a HashMap in arbitrary order. …\nAn occupied entry.\nAn occupied entry.\nAn occupied entry.\nA view into an occupied entry in a HashMap. It is part of …\nA view into an occupied entry in a HashMap. It is part of …\nThe error returned by try_insert when the key already …\nA builder for computing where in a HashMap a key-value …\nA builder for computing where in a HashMap a key-value …\nA view into a single entry in a map, which may either be …\nA view into an occupied entry in a HashMap. It is part of …\nA view into a vacant entry in a HashMap. It is part of the …\nA vacant entry.\nA vacant entry.\nA vacant entry.\nA view into a vacant entry in a HashMap. It is part of the …\nA view into a vacant entry in a HashMap. It is part of the …\nAn iterator over the values of a HashMap in arbitrary …\nA mutable iterator over the values of a HashMap in …\nReturns a reference to the underlying allocator.\nProvides in-place mutable access to an occupied entry …\nProvides in-place mutable access to an occupied entry …\nProvides in-place mutable access to an occupied entry …\nProvides shared access to the key and owned access to the …\nProvides shared access to the key and owned access to the …\nProvides shared access to the key and owned access to the …\nReturns the number of elements the map can hold without …\nClears the map, removing all key-value pairs. Keeps the …\nReturns true if the map contains a value for the specified …\nCreates an empty HashMap<K, V, S, A>, with the Default …\nClears the map, returning all key-value pairs as an …\nGets the given key’s corresponding entry in the map for …\nThe entry in the map that was already occupied.\nGets the given key’s corresponding entry by reference in …\nInserts all new key-values from the iterator to existing …\nInserts all new key-values from the iterator to existing …\nInserts all new key-values from the iterator to existing …\nDrains elements which are true under the given predicate, …\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nCreates a RawEntryMut from the given hash and matching …\nAccess an immutable entry by hash and matching function.\nCreates a RawEntryMut from the given key.\nAccess an immutable entry by key.\nCreates a RawEntryMut from the given key and its hash.\nAccess an immutable entry by a key and its hash.\nReturns a reference to the value corresponding to the key.\nGets a reference to the value in the entry.\nGets a reference to the value in the entry.\nGets a reference to the value in the entry.\nReturns the key-value pair corresponding to the supplied …\nGets a reference to the key and value in the entry.\nReturns the key-value pair corresponding to the supplied …\nGets a mutable reference to the key and value in the entry.\nAttempts to get mutable references to N values in the map …\nAttempts to get mutable references to N values in the map …\nAttempts to get mutable references to N values in the map …\nAttempts to get mutable references to N values in the map …\nReturns a mutable reference to the value corresponding to …\nGets a mutable reference to the value in the entry.\nGets a mutable reference to the value in the entry.\nGets a mutable reference to the value in the entry.\nReturns a reference to the map’s BuildHasher.\nReturns a reference to the value corresponding to the …\nInserts a key-value pair into the map.\nSets the value of the entry, and returns a …\nSets the value of the entry, and returns the entry’s old …\nSets the value of the entry with the VacantEntry’s key, …\nSets the value of the entry, and returns an OccupiedEntry.\nSets the value of the entry, and returns the entry’s old …\nSets the value of the entry with the VacantEntry’s key, …\nSets the value of the entry, and returns an …\nSets the value of the entry, and returns the entry’s old …\nSets the value of the entry with the VacantEntryRef’s …\nSets the value of the entry with the VacantEntry’s key, …\nSets the value of the entry, and returns the entry’s old …\nInsert a key-value pair into the map without checking if …\nSet the value of an entry with a custom hasher function.\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCreates a consuming iterator, that is, one that moves each …\nCreates an iterator over the entries of a HashMap in …\nCreates an iterator over the entries of a HashMap in …\nConverts the entry into a mutable reference to the key in …\nTake ownership of the key.\nTake ownership of the key.\nConverts the OccupiedEntry into a mutable reference to the …\nCreates a consuming iterator visiting all the keys in …\nConverts the OccupiedEntry into a mutable reference to the …\nConverts the OccupiedEntry into a mutable reference to the …\nConverts the OccupiedEntryRef into a mutable reference to …\nCreates a consuming iterator visiting all the values in …\nReturns true if the map contains no elements.\nAn iterator visiting all key-value pairs in arbitrary …\nAn iterator visiting all key-value pairs in arbitrary …\nGets a reference to the key in the entry.\nReturns a reference to this entry’s key.\nGets a reference to the key in the entry.\nGets a reference to the key that would be used when …\nReturns a reference to this entry’s key.\nGets a reference to the key in the entry.\nGets a reference to the key that would be used when …\nGets a mutable reference to the key in the entry.\nAn iterator visiting all keys in arbitrary order. The …\nReturns the number of elements in the map.\nEnsures a value is in the entry by inserting the default …\nEnsures a value is in the entry by inserting the default …\nEnsures a value is in the entry by inserting the default …\nEnsures a value is in the entry by inserting the default …\nEnsures a value is in the entry by inserting the default …\nEnsures a value is in the entry by inserting the result of …\nEnsures a value is in the entry by inserting the result of …\nEnsures a value is in the entry by inserting the result of …\nEnsures a value is in the entry by inserting, if empty, …\nEnsures a value is in the entry by inserting, if empty, …\nCreates a raw immutable entry builder for the HashMap.\nCreates a raw entry builder for the HashMap.\nRemoves a key from the map, returning the value at the key …\nTakes the value out of the entry, and returns it.\nTakes the value out of the entry, and returns it. Keeps …\nTakes the value out of the entry, and returns it. Keeps …\nRemoves a key from the map, returning the stored key and …\nTake the ownership of the key and value from the map.\nTake the ownership of the key and value from the map. …\nTake the ownership of the key and value from the map. …\nReplaces the entry, returning the old key and value. The …\nReplaces the entry, returning the old key and value. The …\nProvides shared access to the key and owned access to the …\nProvides shared access to the key and owned access to the …\nProvides shared access to the key and owned access to the …\nReplaces the key in the hash map with the key used to …\nReplaces the key in the hash map with the key used to …\nReserves capacity for at least additional more elements to …\nRetains only the elements specified by the predicate. …\nShrinks the capacity of the map with a lower limit. It …\nShrinks the capacity of the map as much as possible. It …\nTries to insert a key-value pair into the map, and returns …\nTries to reserve capacity for at least additional more …\nThe value which was not inserted, because the entry was …\nAn iterator visiting all values in arbitrary order. The …\nAn iterator visiting all values mutably in arbitrary order.\nCreates an empty HashMap with the specified capacity, …\nCreates an empty HashMap with the specified capacity, …\nCreates an empty HashMap which will use the given hash …\nCreates an empty HashMap which will use the given hash …\nA lazy iterator producing elements in the difference of …\nA draining iterator over the items of a HashSet.\nA view into a single entry in a set, which may either be …\nA draining iterator over entries of a HashSet which don’…\nA hash set implemented as a HashMap where the value is ().\nA lazy iterator producing elements in the intersection of …\nAn owning iterator over the items of a HashSet.\nAn iterator over the items of a HashSet.\nAn occupied entry.\nA view into an occupied entry in a HashSet. It is part of …\nA lazy iterator producing elements in the symmetric …\nA lazy iterator producing elements in the union of HashSet…\nA vacant entry.\nA view into a vacant entry in a HashSet. It is part of the …\nReturns a reference to the underlying allocator.\nReturns the intersection of self and rhs as a new …\nReturns the union of self and rhs as a new HashSet<T, S>.\nReturns the symmetric difference of self and rhs as a new …\nReturns the number of elements the set can hold without …\nClears the set, removing all values.\nReturns true if the set contains a value.\nCreates an empty HashSet<T, S> with the Default value for …\nVisits the values representing the difference, i.e., the …\nClears the set, returning all elements in an iterator.\nGets the given value’s corresponding entry in the set …\nDrains elements which are true under the given predicate, …\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns a reference to the value in the set, if any, that …\nReturns a reference to this entry’s value.\nGets a reference to the value in the entry.\nGets a reference to the value that would be used when …\nInserts the given value into the set if it is not present, …\nInserts an owned copy of the given value into the set if …\nInserts a value computed from f into the set if the given …\nReturns a reference to the set’s BuildHasher.\nAdds a value to the set.\nSets the value of the entry, and returns an OccupiedEntry.\nSets the value of the entry with the VacantEntry’s value.\nInsert a value the set without checking if the value …\nVisits the values representing the intersection, i.e., the …\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCreates a consuming iterator, that is, one that moves each …\nTake ownership of the value.\nReturns true if self has no elements in common with other. …\nReturns true if the set contains no elements.\nReturns true if the set is a subset of another, i.e., other…\nReturns true if the set is a superset of another, i.e., …\nAn iterator visiting all elements in arbitrary order. The …\nReturns the number of elements in the set.\nEnsures a value is in the entry by inserting if it was …\nRemoves a value from the set. Returns whether the value was\nTakes the value out of the entry, and returns it. Keeps …\nAdds a value to the set, replacing the existing value, if …\nReplaces the entry, returning the old value. The new value …\nReserves capacity for at least additional more elements to …\nRetains only the elements specified by the predicate.\nShrinks the capacity of the set with a lower limit. It …\nShrinks the capacity of the set as much as possible. It …\nReturns the difference of self and rhs as a new …\nVisits the values representing the symmetric difference, …\nRemoves and returns the value in the set, if any, that is …\nTries to reserve capacity for at least additional more …\nVisits the values representing the union, i.e., all the …\nCreates an empty HashSet with the specified capacity, using\nCreates an empty HashSet with the specified capacity, using\nCreates a new empty hash set which will use the given …\nCreates a new empty hash set which will use the given …\nType representing the absence of an entry, as returned by …\nA draining iterator over the items of a HashTable.\nA view into a single entry in a table, which may either be …\nA draining iterator over entries of a HashTable which don…\nLow-level hash table with explicit hashing.\nAn owning iterator over the entries of a HashTable in …\nAn iterator over the entries of a HashTable in arbitrary …\nA mutable iterator over the entries of a HashTable in …\nAn occupied entry.\nA view into an occupied entry in a HashTable. It is part …\nA vacant entry.\nA view into a vacant entry in a HashTable. It is part of …\nReturns a reference to the underlying allocator.\nProvides in-place mutable access to an occupied entry …\nReturns the number of elements the table can hold without …\nClears the table, removing all values.\nClears the set, returning all elements in an iterator.\nReturns an Entry for an entry in the table with the given …\nDrains elements which are true under the given predicate, …\nReturns a reference to an entry in the table with the …\nReturns an OccupiedEntry for an entry in the table with …\nReturns a mutable reference to an entry in the table with …\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nGets a reference to the value in the entry.\nAttempts to get mutable references to N values in the map …\nAttempts to get mutable references to N values in the map …\nGets a mutable reference to the value in the entry.\nSets the value of the entry, replacing any existing value …\nInserts a new element into the table with the hash that …\nInserts an element into the HashTable with the given hash …\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nConverts the OccupiedEntry into a mutable reference to the …\nConverts the OccupiedEntry into a mutable reference to the …\nConverts the VacantEntry into a mutable reference to the …\nConverts the AbsentEntry into a mutable reference to the …\nReturns true if the set contains no elements.\nAn iterator visiting all elements in arbitrary order. The …\nAn iterator visiting all elements in arbitrary order, with …\nReturns the number of elements in the table.\nCreates an empty HashTable.\nCreates an empty HashTable using the given allocator.\nEnsures a value is in the entry by inserting if it was …\nEnsures a value is in the entry by inserting the result of …\nTakes the value out of the entry, and returns it along …\nReserves capacity for at least additional more elements to …\nRetains only the elements specified by the predicate.\nShrinks the capacity of the table with a lower limit. It …\nShrinks the capacity of the table as much as possible. It …\nTries to reserve capacity for at least additional more …\nCreates an empty HashTable with the specified capacity.\nCreates an empty HashTable with the specified capacity …") \ No newline at end of file diff --git a/search.desc/log/log-desc-0-.js b/search.desc/log/log-desc-0-.js new file mode 100644 index 000000000..c7570e6da --- /dev/null +++ b/search.desc/log/log-desc-0-.js @@ -0,0 +1 @@ +searchState.loadedDescShard("log", 0, "A lightweight logging facade.\nThe “debug” level.\nCorresponds to the Debug log level.\nThe “error” level.\nCorresponds to the Error log level.\nThe “info” level.\nCorresponds to the Info log level.\nAn enum representing the available verbosity levels of the …\nAn enum representing the available verbosity level filters …\nA trait encapsulating the operations required of a logger.\nMetadata about a log message.\nBuilder for Metadata.\nA level lower than all log levels.\nThe type returned by from_str when the string doesn’t …\nThe “payload” of a log message.\nBuilder for Record.\nThe statically resolved maximum log level.\nThe type returned by set_logger if set_logger has already …\nThe “trace” level.\nCorresponds to the Trace log level.\nThe “warn” level.\nCorresponds to the Warn log level.\nThe message body.\nSet args.\nReturns the string representation of the Level.\nReturns the string representation of the LevelFilter.\nInvoke the builder and return a Record\nReturns a Metadata object.\nReturns a new builder.\nReturns a new builder.\nLogs a message at the debug level.\nDetermines if a log message with the specified metadata …\nLogs a message at the error level.\nThe source file containing the message.\nSet file\nThe source file containing the message, if it is a 'static …\nSet file to a 'static string.\nFlushes any buffered records.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nReturns the argument unchanged.\nLogs a message at the info level.\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nCalls U::from(self).\nIterate through all supported logging levels.\nIterate through all supported filtering levels.\nThe verbosity level of the message.\nSet Metadata::level.\nThe verbosity level of the message.\nSetter for level.\nThe line containing the message.\nSet line\nLogs the Record.\nThe standard logging macro.\nDetermines if a message logged at the specified level in …\nReturns a reference to the logger.\nReturns the most verbose logging level.\nReturns the most verbose logging level filter.\nReturns the current maximum log level.\nMetadata about the log directive.\nSet metadata. Construct a Metadata object with …\nThe module path of the message.\nSet module_path\nThe module path of the message, if it is a 'static string.\nSet module_path to a 'static string\nConstruct new RecordBuilder.\nConstruct a new MetadataBuilder.\nSets the global logger to a &'static Log.\nA thread-unsafe version of set_logger.\nSets the global maximum log level.\nA thread-unsafe version of set_max_level.\nThe name of the target of the directive.\nSet Metadata::target\nThe name of the target of the directive.\nSetter for target.\nConverts self to the equivalent Level.\nConverts the Level to the equivalent LevelFilter.\nLogs a message at the trace level.\nLogs a message at the warn level.") \ No newline at end of file diff --git a/search.desc/rustc_hash/rustc_hash-desc-0-.js b/search.desc/rustc_hash/rustc_hash-desc-0-.js new file mode 100644 index 000000000..91ec6e9b6 --- /dev/null +++ b/search.desc/rustc_hash/rustc_hash-desc-0-.js @@ -0,0 +1 @@ +searchState.loadedDescShard("rustc_hash", 0, "Fast, non-cryptographic hash used by rustc and Firefox.\nA speedy hash algorithm for use within rustc. The hashmap …\nReturns the argument unchanged.\nCalls U::from(self).") \ No newline at end of file diff --git a/settings.html b/settings.html new file mode 100644 index 000000000..026891450 --- /dev/null +++ b/settings.html @@ -0,0 +1 @@ +Settings

Rustdoc settings

Back
\ No newline at end of file diff --git a/src-files.js b/src-files.js new file mode 100644 index 000000000..a7d0bdb31 --- /dev/null +++ b/src-files.js @@ -0,0 +1,7 @@ +var srcIndex = new Map(JSON.parse('[\ +["cactusref",["",[["doc",[],["implementing_self_referential_data_structures.rs"]]],["adopt.rs","cycle.rs","drop.rs","hash.rs","lib.rs","link.rs","rc.rs"]]],\ +["hashbrown",["",[["external_trait_impls",[],["mod.rs"]],["raw",[],["alloc.rs","bitmask.rs","mod.rs","sse2.rs"]]],["lib.rs","macros.rs","map.rs","scopeguard.rs","set.rs","table.rs"]]],\ +["log",["",[],["__private_api.rs","lib.rs","macros.rs"]]],\ +["rustc_hash",["",[],["lib.rs"]]]\ +]')); +createSrcSidebar(); diff --git a/src/cactusref/adopt.rs.html b/src/cactusref/adopt.rs.html new file mode 100644 index 000000000..048212369 --- /dev/null +++ b/src/cactusref/adopt.rs.html @@ -0,0 +1,497 @@ +adopt.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+
use core::ptr;
+
+use crate::link::Link;
+use crate::Rc;
+
+mod sealed {
+    use crate::Rc;
+
+    #[doc(hidden)]
+    pub trait Sealed {}
+
+    impl<T> Sealed for Rc<T> {}
+}
+
+/// Build a graph of linked [`Rc`] smart pointers to enable busting cycles on
+/// drop.
+///
+/// Calling [`adopt_unchecked`] builds an object graph which can be used by to
+/// detect cycles.
+///
+/// # Safety
+///
+/// Implementors of this trait must ensure that bookkeeping edges in the object
+/// graph is correct because these links are used to determine whether an `Rc`
+/// is reachable in `Rc`'s `Drop` implementation. Failure to properly bookkeep
+/// the object graph will result in *[undefined behavior]*.
+///
+/// Undefined behavior may include:
+///
+/// - Memory leaks.
+/// - Double-frees.
+/// - Dangling `Rc`s which will cause a use after free.
+///
+/// [`adopt_unchecked`]: Adopt::adopt_unchecked
+/// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+pub unsafe trait Adopt: sealed::Sealed {
+    /// Perform bookkeeping to record that `this` has an owned reference to
+    /// `other`.
+    ///
+    /// Adoption is a one-way link, or a directed edge in the object graph which
+    /// means "`this` owns `other`".
+    ///
+    /// `adopt` can be called multiple times for a pair of `Rc`s. Each call to
+    /// `adopt` indicates that `this` owns one distinct clone of `other`.
+    ///
+    /// This is an associated function that needs to be used as
+    /// `Adopt::adopt_unchecked(...)`. A method would interfere with methods of the same
+    /// name on the contents of a `Rc` used through `Deref`.
+    ///
+    /// # Safety
+    ///
+    /// Callers must ensure that `this` owns a strong reference to `other`.
+    ///
+    /// Callers should call [`unadopt`] when `this` no longer holds a strong
+    /// reference to `other` to avoid memory leaks, but this is not required for
+    /// soundness.
+    ///
+    /// [`unadopt`]: Adopt::unadopt
+    unsafe fn adopt_unchecked(this: &Self, other: &Self);
+
+    /// Perform bookkeeping to record that `this` has removed an owned reference
+    /// to `other`.
+    ///
+    /// Adoption is a one-way link, or a directed edge in the object graph which
+    /// means "`this` owns `other`".
+    ///
+    /// This is an associated function that needs to be used as
+    /// `Adopt::unadopt(...)`. A method would interfere with methods of the same
+    /// name on the contents of a `Rc` used through `Deref`.
+    ///
+    /// # Memory Leaks
+    ///
+    /// Failure to call this function when removing an owned `Rc` from `this`
+    /// is safe, but may result in a memory leak.
+    fn unadopt(this: &Self, other: &Self);
+}
+
+/// Implementation of [`Adopt`] for [`Rc`] which enables `Rc`s to form a cycle
+/// of strong references that are reaped by `Rc`'s [`Drop`] implementation.
+unsafe impl<T> Adopt for Rc<T> {
+    /// Perform bookkeeping to record that `this` has an owned reference to
+    /// `other`.
+    ///
+    /// Adoption is a one-way link, or a directed edge in the object graph which
+    /// means "`this` owns `other`".
+    ///
+    /// `adopt` can be called multiple times for a pair of `Rc`s. Each call to
+    /// `adopt` indicates that `this` owns one distinct clone of `other`.
+    ///
+    /// This is an associated function that needs to be used as
+    /// `Rc::adopt_unchecked(...)`. A method would interfere with methods of the same
+    /// name on the contents of a `Rc` used through `Deref`.
+    ///
+    /// # Safety
+    ///
+    /// Callers must ensure that `this` owns a strong reference to `other`.
+    ///
+    /// Callers should call [`unadopt`] when `this` no longer holds a strong
+    /// reference to `other` to avoid memory leaks, but this is not required for
+    /// soundness.
+    ///
+    /// Calling `adopt` does not increment the strong count of `other`. Callers
+    /// must ensure that `other` has been cloned and stored in the `T` contained
+    /// by `this`.
+    ///
+    /// # Examples
+    ///
+    /// The following implements a self-referential array.
+    ///
+    /// ```rust
+    /// use cactusref::{Adopt, Rc};
+    /// use std::cell::RefCell;
+    ///
+    /// #[derive(Default)]
+    /// struct Array {
+    ///     buffer: Vec<Rc<RefCell<Self>>>,
+    /// }
+    ///
+    /// let array = Rc::new(RefCell::new(Array::default()));
+    /// for _ in 0..10 {
+    ///     let item = Rc::clone(&array);
+    ///     unsafe {
+    ///         Rc::adopt_unchecked(&array, &item);
+    ///     }
+    ///     array.borrow_mut().buffer.push(item);
+    /// }
+    /// let weak = Rc::downgrade(&array);
+    /// // 1 for the array binding, 10 for the `Rc`s in buffer
+    /// assert_eq!(Rc::strong_count(&array), 11);
+    /// drop(array);
+    /// assert!(weak.upgrade().is_none());
+    /// assert_eq!(weak.weak_count(), 0);
+    /// ```
+    ///
+    /// [`unadopt`]: Rc::unadopt
+    unsafe fn adopt_unchecked(this: &Self, other: &Self) {
+        // Self-adoptions have no effect.
+        if ptr::eq(this, other) {
+            // Store a loopback reference to `other` in `this`. This bookkeeping
+            // logs a strong reference and is used for discovering cycles.
+            //
+            // SAFETY: `this` is a live `Rc` so the `links` on its inner
+            // allocation are an inhabited `MaybeUninit`.
+            let mut links = this.inner().links().borrow_mut();
+            links.insert(Link::loopback(other.ptr));
+            return;
+        }
+        // Store a forward reference to `other` in `this`. This bookkeeping logs
+        // a strong reference and is used for discovering cycles.
+        //
+        // SAFETY: `this` is a live `Rc` so the `links` on its inner allocation
+        // are an inhabited `MaybeUninit`.
+        let mut links = this.inner().links().borrow_mut();
+        links.insert(Link::forward(other.ptr));
+        // `this` and `other` may point to the same allocation. Drop the borrow
+        // on `links` before accessing `other` to avoid a already borrowed error
+        // from the `RefCell`.
+        drop(links);
+        // Store a backward reference to `this` in `other`. This bookkeeping is
+        // used for discovering cycles.
+        //
+        // SAFETY: `this` is a live `Rc` so the `links` on its inner allocation
+        // are an inhabited `MaybeUninit`.
+        let mut links = other.inner().links().borrow_mut();
+        links.insert(Link::backward(this.ptr));
+    }
+
+    /// Perform bookkeeping to record that `this` has removed an owned reference
+    /// to `other`.
+    ///
+    /// Adoption is a one-way link, or a directed edge in the object graph which
+    /// means "`this` owns `other`".
+    ///
+    /// This is an associated function that needs to be used as
+    /// `Adopt::unadopt(...)`. A method would interfere with methods of the same
+    /// name on the contents of a `Rc` used through `Deref`.
+    ///
+    /// # Memory Leaks
+    ///
+    /// Failure to call this function when removing an owned `Rc` from `this`
+    /// is safe, but may result in a memory leak.
+    ///
+    /// # Examples
+    ///
+    /// The following implements a self-referential array.
+    ///
+    /// ```rust
+    /// use cactusref::{Adopt, Rc};
+    /// use std::cell::RefCell;
+    ///
+    /// #[derive(Default)]
+    /// struct Array {
+    ///     buffer: Vec<Rc<RefCell<Self>>>,
+    /// }
+    ///
+    /// let array = Rc::new(RefCell::new(Array::default()));
+    /// for _ in 0..10 {
+    ///     let item = Rc::clone(&array);
+    ///     unsafe {
+    ///         Rc::adopt_unchecked(&array, &item);
+    ///     }
+    ///     array.borrow_mut().buffer.push(item);
+    /// }
+    /// let weak = Rc::downgrade(&array);
+    /// // 1 for the array binding, 10 for the `Rc`s in buffer
+    /// assert_eq!(Rc::strong_count(&array), 11);
+    ///
+    /// let head = array.borrow_mut().buffer.pop().unwrap();
+    /// Rc::unadopt(&array, &head);
+    ///
+    /// drop(head);
+    /// assert_eq!(Rc::strong_count(&array), 10);
+    /// drop(array);
+    /// assert!(weak.upgrade().is_none());
+    /// assert_eq!(weak.weak_count(), 0);
+    /// ```
+    fn unadopt(this: &Self, other: &Self) {
+        // Self-adoptions have no effect.
+        if ptr::eq(this, other) {
+            // Remove a loopback reference to `other` in `this`. This bookkeeping
+            // logs a strong reference and is used for discovering cycles.
+            //
+            // SAFETY: `this` is a live `Rc` so the `links` on its inner
+            // allocation are an inhabited `MaybeUninit`.
+            let mut links = unsafe { this.inner().links().borrow_mut() };
+            links.remove(Link::loopback(other.ptr), 1);
+            return;
+        }
+        // Remove a forward reference to `other` in `this`. This bookkeeping
+        // removes a strong reference and is used for discovering cycles.
+        //
+        // SAFETY: `this` is a live `Rc` so the `links` on its inner allocation
+        // are an inhabited `MaybeUninit`.
+        let mut links = unsafe { this.inner().links().borrow_mut() };
+        links.remove(Link::forward(other.ptr), 1);
+        // `this` and `other` may point to the same allocation. Drop the borrow
+        // on `links` before accessing `other` to avoid a already borrowed error
+        // from the `RefCell`.
+        drop(links);
+        // Remove a backward reference to `this` in `other`. This bookkeeping is
+        // used for discovering cycles.
+        //
+        // SAFETY: `this` is a live `Rc` so the `links` on its inner allocation
+        // are an inhabited `MaybeUninit`.
+        let mut links = unsafe { other.inner().links().borrow_mut() };
+        links.remove(Link::backward(this.ptr), 1);
+    }
+}
+
\ No newline at end of file diff --git a/src/cactusref/cycle.rs.html b/src/cactusref/cycle.rs.html new file mode 100644 index 000000000..d61baca80 --- /dev/null +++ b/src/cactusref/cycle.rs.html @@ -0,0 +1,205 @@ +cycle.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+
use alloc::vec;
+
+use crate::hash::{HashMap, HashSet};
+use crate::link::{Kind, Link};
+use crate::rc::RcInnerPtr;
+use crate::Rc;
+
+impl<T> Rc<T> {
+    /// Traverse the linked object graph from the given `Rc` to determine if the
+    /// graph is not externally reachable.
+    ///
+    /// Cycles are discovered using breadth-first search of the graph's adopted
+    /// links.
+    ///
+    /// If this function returns `Some(_)`, the graph of `Rc`s would leak using
+    /// `std::rc::Rc`.
+    ///
+    /// This funtion returns a hash map of forward links to the number of times
+    /// the link appears in the cycle.
+    ///
+    /// This function is invoked during `drop` to determine which strategy to use
+    /// for deallocating a group of `Rc`s.
+    pub(crate) fn orphaned_cycle(this: &Self) -> Option<HashMap<Link<T>, usize>> {
+        let cycle = cycle_refs(Link::forward(this.ptr));
+        if cycle.is_empty() {
+            return None;
+        }
+        let has_external_owners = cycle
+            .iter()
+            .any(|(item, &cycle_owned_refs)| item.strong() > cycle_owned_refs);
+        if has_external_owners {
+            None
+        } else {
+            Some(cycle)
+        }
+    }
+}
+
+// Perform a breadth first search over all of the forward and backward links to
+// determine the clique of nodes in a cycle and their strong counts.
+fn cycle_refs<T>(this: Link<T>) -> HashMap<Link<T>, usize> {
+    // These collections track compute the layout of the object graph in linear
+    // time in the size of the graph.
+    let mut cycle_owned_refs = HashMap::default();
+    let mut discovered = vec![this];
+    let mut visited = HashSet::default();
+
+    // crawl the graph
+    while let Some(node) = discovered.pop() {
+        if visited.contains(&node) {
+            continue;
+        }
+        visited.insert(node);
+
+        let links = unsafe { node.as_ref().links().borrow() };
+        for (&link, &strong) in links.iter() {
+            if let Kind::Forward | Kind::Loopback = link.kind() {
+                cycle_owned_refs
+                    .entry(link)
+                    .and_modify(|count| *count += strong)
+                    .or_insert(strong);
+                discovered.push(link);
+            } else {
+                cycle_owned_refs.entry(link.as_forward()).or_default();
+            }
+        }
+    }
+
+    #[cfg(debug_assertions)]
+    debug_cycle(&cycle_owned_refs);
+    cycle_owned_refs
+}
+
+#[cfg(debug_assertions)]
+fn debug_cycle<T>(cycle: &HashMap<Link<T>, usize>) {
+    use alloc::vec::Vec;
+
+    if cycle.is_empty() {
+        trace!("cactusref reachability test found no cycles");
+        return;
+    }
+
+    let counts = cycle
+        .iter()
+        .map(|(item, cycle_count)| (item.as_ref().strong(), cycle_count))
+        .collect::<Vec<_>>();
+    let has_external_owners = cycle
+        .iter()
+        .any(|(item, &cycle_owned_refs)| item.strong() > cycle_owned_refs);
+
+    if has_external_owners {
+        trace!(
+            "cactusref reachability test found externally owned cycle with (strong, cycle) counts: {:?}",
+            counts
+        );
+    } else {
+        trace!(
+            "cactusref reachability test found unreachable cycle  with (strong, cycle) counts: {:?}",
+            counts
+        );
+    }
+}
+
\ No newline at end of file diff --git a/src/cactusref/doc/implementing_self_referential_data_structures.rs.html b/src/cactusref/doc/implementing_self_referential_data_structures.rs.html new file mode 100644 index 000000000..fabdcc698 --- /dev/null +++ b/src/cactusref/doc/implementing_self_referential_data_structures.rs.html @@ -0,0 +1,237 @@ +implementing_self_referential_data_structures.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+
//! `CactusRef` can be used to implement collections that own strong references
+//! to themselves.
+//!
+//! # Doubly-linked List
+//!
+//! The following implements a doubly-linked list that is fully deallocated once
+//! the `list` binding is dropped.
+//!
+//! ```rust
+//! use std::cell::RefCell;
+//! use std::iter;
+//!
+//! use cactusref::{Adopt, Rc};
+//!
+//! struct Node<T> {
+//!     pub prev: Option<Rc<RefCell<Self>>>,
+//!     pub next: Option<Rc<RefCell<Self>>>,
+//!     pub data: T,
+//! }
+//!
+//! struct List<T> {
+//!     pub head: Option<Rc<RefCell<Node<T>>>>,
+//! }
+//!
+//! impl<T> List<T> {
+//!     fn pop(&mut self) -> Option<Rc<RefCell<Node<T>>>> {
+//!         let head = self.head.take()?;
+//!         let tail = head.borrow_mut().prev.take();
+//!         let next = head.borrow_mut().next.take();
+//!         if let Some(ref tail) = tail {
+//!             Rc::unadopt(&head, tail);
+//!             Rc::unadopt(tail, &head);
+//!
+//!             tail.borrow_mut().next.clone_from(&next);
+//!             if let Some(ref next) = next {
+//!                 unsafe {
+//!                     Rc::adopt_unchecked(tail, next);
+//!                 }
+//!             }
+//!         }
+//!         if let Some(ref next) = next {
+//!             Rc::unadopt(&head, next);
+//!             Rc::unadopt(next, &head);
+//!
+//!             next.borrow_mut().prev.clone_from(&tail);
+//!             if let Some(ref tail) = tail {
+//!                 unsafe {
+//!                     Rc::adopt_unchecked(next, tail);
+//!                 }
+//!             }
+//!         }
+//!         self.head = next;
+//!         Some(head)
+//!     }
+//! }
+//!
+//! impl<T> From<Vec<T>> for List<T> {
+//!     fn from(list: Vec<T>) -> Self {
+//!         let nodes = list
+//!             .into_iter()
+//!             .map(|data| {
+//!                 Rc::new(RefCell::new(Node {
+//!                     prev: None,
+//!                     next: None,
+//!                     data,
+//!                 }))
+//!             })
+//!             .collect::<Vec<_>>();
+//!         for i in 0..nodes.len() - 1 {
+//!             let curr = &nodes[i];
+//!             let next = &nodes[i + 1];
+//!             curr.borrow_mut().next = Some(Rc::clone(next));
+//!             next.borrow_mut().prev = Some(Rc::clone(curr));
+//!             unsafe {
+//!                 Rc::adopt_unchecked(curr, next);
+//!                 Rc::adopt_unchecked(next, curr);
+//!             }
+//!         }
+//!         let tail = &nodes[nodes.len() - 1];
+//!         let head = &nodes[0];
+//!         tail.borrow_mut().next = Some(Rc::clone(head));
+//!         head.borrow_mut().prev = Some(Rc::clone(tail));
+//!         unsafe {
+//!             Rc::adopt_unchecked(tail, head);
+//!             Rc::adopt_unchecked(head, tail);
+//!         }
+//!
+//!         let head = Rc::clone(head);
+//!         Self { head: Some(head) }
+//!     }
+//! }
+//!
+//! let list = iter::repeat(())
+//!     .map(|_| "a".repeat(1024 * 1024))
+//!     .take(10)
+//!     .collect::<Vec<_>>();
+//! let mut list = List::from(list);
+//!
+//! let head = list.pop().unwrap();
+//! assert_eq!(Rc::strong_count(&head), 1);
+//! assert!(head.borrow().data.starts_with('a'));
+//!
+//! // The new head of the list is owned three times:
+//! //
+//! // - itself.
+//! // - the `prev` pointer to it from it's next element.
+//! // - the `next` pointer from the list's tail.
+//! assert_eq!(list.head.as_ref().map(Rc::strong_count), Some(3));
+//!
+//! // The popped head is no longer part of the graph and can be safely dropped
+//! // and deallocated.
+//! let weak = Rc::downgrade(&head);
+//! drop(head);
+//! assert!(weak.upgrade().is_none());
+//!
+//! drop(list);
+//! // all memory consumed by the list nodes is reclaimed.
+//! ```
+
\ No newline at end of file diff --git a/src/cactusref/drop.rs.html b/src/cactusref/drop.rs.html new file mode 100644 index 000000000..e419a7c1c --- /dev/null +++ b/src/cactusref/drop.rs.html @@ -0,0 +1,873 @@ +drop.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+
use alloc::alloc::{Allocator, Global, Layout};
+use alloc::vec;
+use core::mem::{self, MaybeUninit};
+use core::ptr;
+
+#[cfg(doc)]
+use crate::adopt::Adopt;
+use crate::hash::HashMap;
+use crate::link::{Kind, Link};
+use crate::rc::RcInnerPtr;
+use crate::Rc;
+
+unsafe impl<#[may_dangle] T> Drop for Rc<T> {
+    /// Drops the [`Rc`].
+    ///
+    /// This will decrement the strong reference count. If the strong reference
+    /// count reaches zero then the only other references (if any) are [`Weak`],
+    /// so we `drop` the inner value.
+    ///
+    /// [`Weak`]: crate::Weak
+    ///
+    /// If this `Rc` has adopted any other `Rc`s, drop will trace the reachable
+    /// object graph and detect if this `Rc` is part of an orphaned cycle. An
+    /// orphaned cycle is a cycle in which all members have no owned references
+    /// held by `Rc`s outside of the cycle.
+    ///
+    /// `Rc`s do not pay the cost of the reachability check unless they use
+    /// [`Adopt::adopt_unchecked`].
+    ///
+    /// [`Adopt::adopt_unchecked`]: crate::Adopt::adopt_unchecked
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// struct Foo;
+    ///
+    /// impl Drop for Foo {
+    ///     fn drop(&mut self) {
+    ///         println!("dropped!");
+    ///     }
+    /// }
+    ///
+    /// let foo  = Rc::new(Foo);
+    /// let foo2 = Rc::clone(&foo);
+    ///
+    /// drop(foo);    // Doesn't print anything
+    /// drop(foo2);   // Prints "dropped!"
+    /// ```
+    ///
+    /// ```
+    /// use cactusref::{Adopt, Rc};
+    ///
+    /// struct Foo(u8);
+    ///
+    /// impl Drop for Foo {
+    ///     fn drop(&mut self) {
+    ///         println!("dropped {}!", self.0);
+    ///     }
+    /// }
+    ///
+    /// let foo  = Rc::new(Foo(10));
+    /// let foo2 = Rc::new(Foo(20));
+    ///
+    /// unsafe {
+    ///     Rc::adopt_unchecked(&foo, &foo2);
+    ///     Rc::adopt_unchecked(&foo2, &foo);
+    /// }
+    ///
+    /// drop(foo);    // Doesn't print anything
+    /// drop(foo2);   // Prints "dropped 10!" and "dropped 20!"
+    /// ```
+    ///
+    /// # Cycle Detection and Deallocation Algorithm
+    ///
+    /// [`Rc::adopt_unchecked`] does explicit bookkeeping to store links to
+    /// adoptee `Rc`s.  These links form a graph of reachable objects which are
+    /// used to detect cycles.
+    ///
+    /// [`Rc::adopt_unchecked`]: crate::Rc::adopt_unchecked
+    ///
+    /// On drop, if an `Rc` has no links, it is dropped like a normal `Rc`. If
+    /// the `Rc` has links, `Drop` performs a breadth first search by traversing
+    /// the forward and backward links stored in each `Rc`. Deallocating cycles
+    /// requires correct use of [`Adopt::adopt_unchecked`] and [`Adopt::unadopt`]
+    /// to perform the reachability bookkeeping.
+    ///
+    /// [`Adopt::adopt_unchecked`]: crate::Adopt::adopt_unchecked
+    /// [`Adopt::unadopt`]: crate::Adopt::unadopt
+    ///
+    /// After determining all reachable objects, `Rc` reduces the graph to
+    /// objects that form a cycle by performing pairwise reachability checks.
+    /// During this step, for each object in the cycle, `Rc` counts the number
+    /// of refs held by other objects in the cycle.
+    ///
+    /// Using the cycle-held references, `Rc` computes whether the object graph
+    /// is reachable by any non-cycle nodes by comparing strong counts.
+    ///
+    /// If the cycle is orphaned, `Rc` busts all the link structures and
+    /// deallocates each object.
+    ///
+    /// ## Performance
+    ///
+    /// Cycle detection uses breadth first search to trace the object graph.
+    /// The runtime complexity of detecting a cycle is `O(links + nodes)` where
+    /// links is the number of adoptions that are alive and nodes is the number
+    /// of objects in the cycle.
+    ///
+    /// Determining whether the cycle is orphaned builds on cycle detection and
+    /// iterates over all nodes in the graph to see if their strong count is
+    /// greater than the number of references in the cycle. The runtime
+    /// complexity of finding an orphaned cycle is `O(links + nodes)` where
+    /// links is the number of adoptions that are alive and nodes is the number
+    /// objects in the cycle.
+    fn drop(&mut self) {
+        // If `self` is held in a cycle, as we deallocate members of the cycle,
+        // they will drop their refs to `self`. To prevent a double free, mark
+        // nodes as dead if they have already been deallocated and short
+        // circuit.
+        if self.inner().is_dead() {
+            return;
+        }
+
+        // If a drop is occuring it is because there was an existing `Rc` which
+        // is maintaining a strong count. Decrement the strong count on drop,
+        // even if this `Rc` is dead. This ensures `Weak::upgrade` behaves
+        // correctly for deallocated cycles and does not cause a use-after-free.
+        self.inner().dec_strong();
+
+        unsafe {
+            // If links is empty, the object is either not in a cycle or
+            // part of a cycle that has been link busted for deallocation.
+            if self.inner().links().borrow().is_empty() {
+                // If the object was never in a cycle, `dec_strong` above will
+                // kill the `Rc`.
+                //
+                // If the object was in a cycle, the `Rc` will only be dead if
+                // all strong references to it have been dropped.
+                if self.inner().is_dead() {
+                    drop_unreachable(self);
+                }
+                // otherwise, ignore the pointed to object; it will be dropped
+                // when there are no more remaining strong references to it.
+                return;
+            }
+            if self.inner().is_dead() {
+                drop_unreachable_with_adoptions(self);
+                return;
+            }
+            if let Some(cycle) = Self::orphaned_cycle(self) {
+                drop_cycle(cycle);
+                return;
+            }
+            debug!("cactusref drop skipped, Rc is reachable");
+        }
+    }
+}
+
+unsafe fn drop_unreachable<T>(this: &mut Rc<T>) {
+    debug!("cactusref detected unreachable Rc");
+    let forward = Link::forward(this.ptr);
+    let backward = Link::backward(this.ptr);
+    // Remove reverse links so `this` is not included in cycle detection for
+    // objects that had adopted `this`. This prevents a use-after-free in
+    // `Rc::orphaned_cycle`.
+    let links = this.inner().links();
+    for (item, &strong) in links.borrow().iter() {
+        match item.kind() {
+            Kind::Forward => {
+                let mut links = links.borrow_mut();
+                links.remove(forward, strong);
+                links.remove(backward, strong);
+            }
+            Kind::Loopback => {
+                let mut links = links.borrow_mut();
+                links.remove(*item, strong);
+            }
+            Kind::Backward => {}
+        }
+    }
+
+    let rcbox = this.ptr.as_ptr();
+    // Mark `this` as pending deallocation. This is not strictly necessary since
+    // `this` is unreachable, but `kill`ing `this ensures we don't double-free.
+    if !(*rcbox).is_uninit() {
+        trace!("cactusref deallocating unreachable RcBox {:p}", rcbox);
+        // Mark the `RcBox` as uninitialized so we can make its `MaybeUninit`
+        // fields uninhabited.
+        (*rcbox).make_uninit();
+
+        // Move `T` out of the `RcBox`. Dropping an uninitialized `MaybeUninit`
+        // has no effect.
+        let inner = mem::replace(&mut (*rcbox).value, MaybeUninit::uninit());
+        // destroy the contained `T`.
+        drop(inner.assume_init());
+        // Move the links `HashMap` out of the `RcBox`. Dropping an uninitialized
+        // `MaybeUninit` has no effect.
+        let links = mem::replace(&mut (*rcbox).links, MaybeUninit::uninit());
+        // Destroy the heap-allocated links.
+        drop(links.assume_init());
+    }
+
+    // remove the implicit "strong weak" pointer now that we've destroyed the
+    // contents.
+    (*rcbox).dec_weak();
+
+    if (*rcbox).weak() == 0 {
+        // SAFETY: `T` is `Sized`, which means `Layout::for_value_raw` is always
+        // safe to call.
+        let layout = Layout::for_value_raw(this.ptr.as_ptr());
+        Global.deallocate(this.ptr.cast(), layout);
+    }
+}
+
+unsafe fn drop_cycle<T>(cycle: HashMap<Link<T>, usize>) {
+    debug!(
+        "cactusref detected orphaned cycle with {} objects",
+        cycle.len()
+    );
+    // Iterate over all the nodes in the cycle, bust all of the links. All nodes
+    // in the cycle are reachable by other nodes in the cycle, so removing
+    // all cycle-internal links won't result in a leak.
+    for (ptr, &refcount) in &cycle {
+        trace!(
+            "cactusref dropping {:?} member of orphaned cycle with refcount {}",
+            ptr,
+            refcount
+        );
+
+        // Remove reverse links so `this` is not included in cycle detection for
+        // objects that had adopted `this`. This prevents a use-after-free in
+        // `Rc::orphaned_cycle`.
+        //
+        // Because the entire cycle is unreachable, the only forward and
+        // backward links are to objects in the cycle that we are about to
+        // deallocate. This allows us to bust the cycle detection by clearing
+        // all links.
+        let rcbox = ptr.as_ptr();
+        let cycle_strong_refs = {
+            let mut links = (*rcbox).links().borrow_mut();
+            links
+                .extract_if(|link, _| {
+                    if let Kind::Forward | Kind::Loopback = link.kind() {
+                        cycle.contains_key(link)
+                    } else {
+                        false
+                    }
+                })
+                .map(|(link, count)| {
+                    if let Kind::Forward = link.kind() {
+                        count
+                    } else {
+                        0
+                    }
+                })
+                .sum::<usize>()
+        };
+
+        // To be in a cycle, at least one `value` field in an `RcBox` in the
+        // cycle holds a strong reference to `this`. Mark all nodes in the cycle
+        // as dead so when we deallocate them via the `value` pointer we don't
+        // get a double-free.
+        for _ in 0..cycle_strong_refs.min((*rcbox).strong()) {
+            (*rcbox).dec_strong();
+        }
+    }
+
+    let mut inners = vec![];
+    for (ptr, _) in &cycle {
+        if !ptr.is_dead() {
+            // This object continues to be referenced outside the cycle in
+            // another part of the graph.
+            continue;
+        }
+
+        let ptr = ptr.into_raw_non_null();
+        let rcbox = ptr.as_ptr();
+
+        if !(*rcbox).is_uninit() {
+            // Mark the `RcBox` as uninitialized so we can make its
+            // `MaybeUninit` fields uninhabited.
+            (*rcbox).make_uninit();
+
+            // Move `T` out of the `RcBox`. Dropping an uninitialized
+            // `MaybeUninit` has no effect.
+            let inner = mem::replace(&mut (*rcbox).value, MaybeUninit::uninit());
+            // Move the links `HashMap` out of the `RcBox`. Dropping an
+            // uninitialized `MaybeUninit` has no effect.
+            let links = mem::replace(&mut (*rcbox).links, MaybeUninit::uninit());
+            trace!("cactusref deconstructed member {:p} of orphan cycle", rcbox);
+            // Move `T` and the `HashMap` out of the `RcBox` to be dropped after
+            // busting the cycle.
+            inners.push((inner.assume_init(), links.assume_init()));
+        }
+    }
+    // Drop and deallocate all `T` and `HashMap` objects.
+    drop(inners);
+
+    let unreachable_cycle_participants = cycle.into_iter().map(|(ptr, _)| ptr).filter(|ptr| {
+        // Filter the set of cycle participants so we only drop `Rc`s that are
+        // dead.
+        //
+        // If an `Rc` is not dead, it continues to be referenced outside of the
+        // cycle, for example:
+        //
+        //  | Rc | -> | Rc | -> | Rc | <-> | Rc |
+        //    ^                   |
+        //    |-------------------|
+        //
+        // This object continues to be referenced outside the cycle in another
+        // part of the graph.
+        ptr.is_dead()
+    });
+
+    for ptr in unreachable_cycle_participants {
+        let ptr = ptr.into_raw_non_null();
+        trace!(
+            "cactusref deallocating RcBox after dropping item {:?} in orphaned cycle",
+            ptr
+        );
+
+        let rcbox = ptr.as_ptr();
+        // remove the implicit "strong weak" pointer now that we've destroyed
+        // the contents.
+        (*rcbox).dec_weak();
+
+        if (*rcbox).weak() == 0 {
+            trace!(
+                "no more weak references, deallocating layout for item {:?} in orphaned cycle",
+                ptr
+            );
+            // SAFETY: `T` is `Sized`, which means `Layout::for_value_raw` is
+            // always safe to call.
+            let layout = Layout::for_value_raw(ptr.as_ptr());
+            Global.deallocate(ptr.cast(), layout);
+        }
+    }
+}
+
+// Drop an `Rc` that is unreachable, but has adopted other `Rc`s.
+//
+// Unreachable `Rc`s have a strong count of zero, but because they have adopted
+// other `Rc`s, other `Rc`s have back links to `this`.
+//
+// Before dropping `this`, we must traverse `this`'s forward links to collect
+// all of `this`'s adoptions. Then, remove `this` from it's adoptions back
+// links. By pruning back links in the rest of the graph, we can ensure that
+// `this` and its `RcBox` are not referenced and can be safely deallocated.
+//
+// # Diagram
+//
+//          this
+// |--------------------|
+// | ptr:    RcBox      |
+// |      |----------| <--------|
+// |      | value: T |  |       |
+// |      | links: ------> | other RcBox |
+// |      |   |----------> | other RcBox |
+// |      |          |  |       |
+// |      |----------| <--------|
+// |--------------------|
+unsafe fn drop_unreachable_with_adoptions<T>(this: &mut Rc<T>) {
+    // Construct a forward and back link from `this` so we can
+    // purge it from the adopted `links`.
+    let forward = Link::forward(this.ptr);
+    let backward = Link::backward(this.ptr);
+    // `this` is unreachable but may have been adopted and dropped.
+    //
+    // Iterate over all of the other nodes in the graph that have links to
+    // `this` and remove all of the adoptions. By doing so, when other graph
+    // participants are dropped, they do not try to deallocate `this`.
+    //
+    // `this` is fully removed from the graph.
+    let links = this.inner().links();
+    for (item, &strong) in links.borrow().iter() {
+        // if `this` has adopted itself, we don't need to clear these links in
+        // the loop to avoid an already borrowed error.
+        if ptr::eq(this.inner(), item.as_ptr()) {
+            continue;
+        }
+        let mut links = item.as_ref().links().borrow_mut();
+        // The cycle counts don't distinguish which nodes the cycle strong
+        // counts are from, so purge as many strong counts as possible.
+        //
+        // Additionally, `item` may have forward adoptions for `this`, so
+        // purge those as well.
+        //
+        // `Links::remove` ensures the count for forward and back links will not
+        // underflow.
+        links.remove(forward, strong);
+        links.remove(backward, strong);
+    }
+    // Bust the links for this since it is now unreachable and set to be
+    // deallocated.
+    links.borrow_mut().clear();
+
+    let rcbox = this.ptr.as_ptr();
+    // Mark `this` as pending deallocation. This is not strictly necessary since
+    // `this` is unreachable, but `kill`ing `this ensures we don't double-free.
+    if !(*rcbox).is_uninit() {
+        trace!(
+            "cactusref deallocating RcBox after dropping adopted and unreachable item {:p} in the object graph",
+            rcbox
+        );
+        // Mark the `RcBox` as uninitialized so we can make its `MaybeUninit`
+        // fields uninhabited.
+        (*rcbox).make_uninit();
+
+        // Move `T` out of the `RcBox`. Dropping an uninitialized `MaybeUninit`
+        // has no effect.
+        let inner = mem::replace(&mut (*rcbox).value, MaybeUninit::uninit());
+        // destroy the contained `T`.
+        drop(inner.assume_init());
+        // Move the links `HashMap` out of the `RcBox`. Dropping an uninitialized
+        // `MaybeUninit` has no effect.
+        let links = mem::replace(&mut (*rcbox).links, MaybeUninit::uninit());
+        // Destroy the heap-allocated links.
+        drop(links.assume_init());
+    }
+
+    // remove the implicit "strong weak" pointer now that we've destroyed the
+    // contents.
+    (*rcbox).dec_weak();
+
+    if (*rcbox).weak() == 0 {
+        trace!(
+            "no more weak references, deallocating layout for adopted and unreachable item {:?} in the object graph",
+            this.ptr
+        );
+        // SAFETY: `T` is `Sized`, which means `Layout::for_value_raw` is always
+        // safe to call.
+        let layout = Layout::for_value_raw(this.ptr.as_ptr());
+        Global.deallocate(this.ptr.cast(), layout);
+    }
+}
+
\ No newline at end of file diff --git a/src/cactusref/hash.rs.html b/src/cactusref/hash.rs.html new file mode 100644 index 000000000..75750820b --- /dev/null +++ b/src/cactusref/hash.rs.html @@ -0,0 +1,31 @@ +hash.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+
#![allow(clippy::module_name_repetitions)]
+
+use core::hash::BuildHasherDefault;
+
+use rustc_hash::FxHasher;
+
+pub type HashMap<K, V> = hashbrown::HashMap<K, V, BuildHasherDefault<FxHasher>>;
+pub type HashSet<T> = hashbrown::HashSet<T, BuildHasherDefault<FxHasher>>;
+
+pub mod hash_map {
+    use hashbrown::hash_map;
+
+    pub type Iter<'a, K, V> = hash_map::Iter<'a, K, V>;
+    pub type ExtractIf<'a, K, V, F> = hash_map::ExtractIf<'a, K, V, F>;
+}
+
\ No newline at end of file diff --git a/src/cactusref/lib.rs.html b/src/cactusref/lib.rs.html new file mode 100644 index 000000000..523389d3c --- /dev/null +++ b/src/cactusref/lib.rs.html @@ -0,0 +1,335 @@ +lib.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+
#![feature(
+    allocator_api,
+    core_intrinsics,
+    dropck_eyepatch,
+    layout_for_ptr,
+    set_ptr_value,
+    slice_ptr_get
+)]
+#![allow(incomplete_features)]
+#![allow(internal_features)]
+#![warn(clippy::all)]
+#![warn(clippy::pedantic)]
+#![warn(clippy::cargo)]
+#![allow(clippy::cast_possible_wrap)]
+#![allow(clippy::inline_always)]
+#![allow(clippy::let_underscore_untyped)]
+#![allow(clippy::manual_let_else)]
+#![allow(clippy::missing_panics_doc)]
+#![allow(clippy::option_if_let_else)]
+#![allow(clippy::needless_pass_by_ref_mut)]
+#![allow(clippy::ref_as_ptr)]
+#![allow(unknown_lints)]
+#![warn(missing_copy_implementations)]
+#![warn(missing_debug_implementations)]
+#![warn(missing_docs)]
+#![warn(rust_2018_idioms)]
+#![warn(unused_qualifications)]
+#![warn(variant_size_differences)]
+
+//! Single-threaded, cycle-aware, reference-counting pointers. 'Rc' stands
+//! for 'Reference Counted'.
+//!
+//! The type [`Rc<T>`] provides shared ownership of a value of type `T`,
+//! allocated in the heap. Invoking [`clone`] on [`Rc`] produces a new pointer
+//! to the same value in the heap. When the last externally reachable [`Rc`]
+//! pointer to a given value is destroyed, the pointed-to value is also
+//! destroyed.
+//!
+//! [`Rc<T>`]: crate::Rc
+//! [`clone`]: Clone::clone
+//!
+//! `Rc` can **detect and deallocate cycles** of `Rc`s through the use of
+//! [`Adopt`]. Cycle detection is opt-in and no reachability checks are
+//! performed unless graphs have adoptions.
+//!
+//! # Nightly
+//!
+//! CactusRef depends on several unstable Rust features and can only be built
+//! on a nightly toolchain.
+//!
+//! # Maturity
+//!
+//! CactusRef is experimental. This crate has several limitations:
+//!
+//! - CactusRef is nightly only.
+//! - Cycle detection requires [unsafe code][adopt-api] to use.
+//!
+//! CactusRef is a non-trivial extension to `std::rc::Rc` and has not been
+//! proven to be safe. Although CactusRef makes a best effort to abort the
+//! program if it detects a dangling `Rc`, this crate may be unsound.
+//!
+//! [adopt-api]: crate::Adopt
+//!
+//! # CactusRef vs. `std::rc`
+//!
+//! The `Rc` in CactusRef is derived from [`std::rc::Rc`] and CactusRef
+//! implements most of the API from `std`.
+//!
+//! CactusRef does not implement the following APIs that are present on
+//! [`std::rc::Rc`]:
+//!
+//! - [`std::rc::Rc::downcast`]
+//! - [`CoerceUnsized`]
+//! - [`DispatchFromDyn`]
+//! - `From<Cow<'_, T>>`
+//!
+//! CactusRef cannot be used with unsized types like `[T]` or `str`.
+//!
+//! If you do not depend on these APIs, CactusRef is a drop-in replacement for
+//! [`std::rc::Rc`].
+//!
+//! Like [`std::rc`], [`Rc`] and [`Weak`] are not `Send` and are not `Sync`.
+//!
+//! [`std::rc`]: https://doc.rust-lang.org/stable/std/rc/index.html
+//!
+//! # Building an object graph
+//!
+//! CactusRef smart pointers can be used to implement a tracing garbage
+//! collector local to a graph objects. Graphs of CactusRefs are cycle-aware and
+//! can deallocate a cycle of strong references that is otherwise unreachable
+//! from the rest of the object graph, unlike [`std::rc::Rc`].
+//!
+//! `CactusRef` relies on proper use of [`Adopt::adopt_unchecked`] and [`Adopt::unadopt`]
+//! to maintain bookkeeping about the object graph for breaking cycles. These
+//! functions are unsafe because improperly managing the bookkeeping can cause
+//! the `Rc` drop implementation to deallocate cycles while they are still
+//! externally reachable. Failure to uphold [`Adopt`]'s safety invariants will
+//! result in *[undefined behavior]* and held `Rc`s that point to members of the
+//! now deallocated cycle may dangle.
+//!
+//! [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+//!
+//! CactusRef makes a best-effort attempt to abort the program if it detects an
+//! access to a dangling `Rc`.
+//!
+//! # Cycle Detection
+//!
+//! `Rc` implements [`Adopt`] to log bookkeeping entries for strong ownership
+//! links to other `Rc`s that may form a cycle. The ownership links tracked by
+//! these bookkeeping entries form an object graph of reachable `Rc`s. On
+//! `drop`, `Rc` uses these entries to conduct a reachability trace of the
+//! object graph to determine if it is part of an _orphaned cycle_. An orphaned
+//! cycle is a cycle where the only strong references to all nodes in the cycle
+//! come from other nodes in the cycle.
+//!
+//! Cycle detection is a zero-cost abstraction. If you never
+//! `use cactusref::Adopt;`, `drop` uses the same implementation as
+//! [`std::rc::Rc`] (and leaks in the same way as `std::rc::Rc` if you form a
+//! cycle of strong references). The only costs you pay are the memory costs of
+//! one empty hash map used to track adoptions and an if statement to check if
+//! these structures are empty on `drop`.
+//!
+//! Cycle detection uses breadth-first search for traversing the object graph.
+//! The algorithm supports arbitrarily large object graphs and will not overflow
+//! the stack during the reachability trace.
+//!
+//! [`std::rc::Rc`]: alloc::rc::Rc
+//! [`std::rc::Rc::downcast`]: alloc::rc::Rc::downcast
+//! [`CoerceUnsized`]: core::ops::CoerceUnsized
+//! [`DispatchFromDyn`]: core::ops::DispatchFromDyn
+
+#![doc(html_root_url = "https://docs.rs/cactusref/0.5.0")]
+#![no_std]
+
+// Ensure code blocks in README.md compile
+#[cfg(doctest)]
+#[doc = include_str!("../README.md")]
+mod readme {}
+
+extern crate alloc;
+#[cfg(any(feature = "std", test, doctest, miri))]
+extern crate std;
+#[macro_use]
+extern crate log;
+
+mod adopt;
+mod cycle;
+mod drop;
+mod hash;
+mod link;
+mod rc;
+
+// Doc modules
+#[cfg(any(doctest, docsrs))]
+#[path = "doc/implementing_self_referential_data_structures.rs"]
+/// Examples of implementing self-referential data structures with CactusRef.
+pub mod implementing_self_referential_data_structures;
+
+pub use adopt::Adopt;
+pub use rc::Rc;
+pub use rc::Weak;
+
+/// Cactus alias for [`Rc`].
+pub type CactusRef<T> = Rc<T>;
+
+/// Cactus alias for [`Weak`].
+pub type CactusWeakRef<T> = Weak<T>;
+
\ No newline at end of file diff --git a/src/cactusref/link.rs.html b/src/cactusref/link.rs.html new file mode 100644 index 000000000..3cb117944 --- /dev/null +++ b/src/cactusref/link.rs.html @@ -0,0 +1,399 @@ +link.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+
#![allow(clippy::iter_without_into_iter)]
+
+//! This module includes data structures for building an object graph.
+
+use core::cell::Cell;
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::num::NonZeroUsize;
+use core::ptr::{self, NonNull};
+
+use crate::hash::hash_map::{ExtractIf, Iter};
+use crate::hash::HashMap;
+use crate::rc::{RcBox, RcInnerPtr};
+
+#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
+pub(crate) enum Kind {
+    Forward,
+    Backward,
+    Loopback,
+}
+
+/// A collection of forward and backward links and their corresponding adoptions.
+pub(crate) struct Links<T> {
+    registry: HashMap<Link<T>, usize>,
+}
+
+impl<T> fmt::Debug for Links<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Links")
+            .field("registry", &self.registry)
+            .finish()
+    }
+}
+
+impl<T> Links<T> {
+    #[inline]
+    pub fn new() -> Self {
+        Self {
+            registry: HashMap::default(),
+        }
+    }
+
+    #[inline]
+    pub fn insert(&mut self, other: Link<T>) {
+        *self.registry.entry(other).or_insert(0) += 1;
+    }
+
+    #[inline]
+    pub fn remove(&mut self, other: Link<T>, strong: usize) {
+        let count = self.registry.get(&other).copied().unwrap_or_default();
+        let remaining_strong_count = count.checked_sub(strong).and_then(NonZeroUsize::new);
+        if let Some(remaining_strong_count) = remaining_strong_count {
+            self.registry.insert(other, remaining_strong_count.get());
+        } else {
+            self.registry.remove(&other);
+        }
+    }
+
+    #[inline]
+    pub fn clear(&mut self) {
+        self.registry.clear();
+    }
+
+    #[inline]
+    pub fn is_empty(&self) -> bool {
+        self.registry.is_empty()
+    }
+
+    #[inline]
+    pub fn iter(&self) -> Iter<'_, Link<T>, usize> {
+        self.registry.iter()
+    }
+
+    #[inline]
+    pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, Link<T>, usize, F>
+    where
+        F: FnMut(&Link<T>, &mut usize) -> bool,
+    {
+        self.registry.extract_if(f)
+    }
+}
+
+/// Link represents a directed edge in the object graph of strong CactusRef `Rc`
+/// smart pointers.
+///
+/// Links can be one of several types:
+///
+/// - Forward, which means the `Rc` storing the link is adopting the link's
+///   pointee.
+/// - Backward, which means this `Rc` is being adopted by the link's pointee.
+/// - Loopback, which means the `Rc` has adopted itself.
+pub(crate) struct Link<T> {
+    ptr: NonNull<RcBox<T>>,
+    kind: Kind,
+}
+
+impl<T> fmt::Debug for Link<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("Link")
+            .field("ptr", &self.ptr)
+            .field("kind", &self.kind)
+            .finish()
+    }
+}
+
+impl<T> fmt::Pointer for Link<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Pointer::fmt(&self.ptr.as_ptr(), f)
+    }
+}
+
+impl<T> Link<T> {
+    #[inline]
+    pub const fn forward(ptr: NonNull<RcBox<T>>) -> Self {
+        Self {
+            ptr,
+            kind: Kind::Forward,
+        }
+    }
+
+    #[inline]
+    pub const fn backward(ptr: NonNull<RcBox<T>>) -> Self {
+        Self {
+            ptr,
+            kind: Kind::Backward,
+        }
+    }
+
+    #[inline]
+    pub const fn loopback(ptr: NonNull<RcBox<T>>) -> Self {
+        Self {
+            ptr,
+            kind: Kind::Loopback,
+        }
+    }
+
+    #[inline]
+    pub const fn kind(&self) -> Kind {
+        self.kind
+    }
+
+    #[inline]
+    pub const fn as_forward(&self) -> Self {
+        Self::forward(self.ptr)
+    }
+
+    #[inline]
+    pub fn as_ptr(&self) -> *mut RcBox<T> {
+        self.ptr.as_ptr()
+    }
+
+    #[inline]
+    pub fn as_ref(&self) -> &RcBox<T> {
+        unsafe { self.ptr.as_ref() }
+    }
+
+    #[inline]
+    pub fn into_raw_non_null(self) -> NonNull<RcBox<T>> {
+        self.ptr
+    }
+}
+
+impl<T> RcInnerPtr for Link<T> {
+    #[inline(always)]
+    fn weak_ref(&self) -> &Cell<usize> {
+        unsafe { self.ptr.as_ref().weak_ref() }
+    }
+
+    #[inline(always)]
+    fn strong_ref(&self) -> &Cell<usize> {
+        unsafe { self.ptr.as_ref().strong_ref() }
+    }
+}
+
+impl<T> Clone for Link<T> {
+    #[inline]
+    fn clone(&self) -> Self {
+        *self
+    }
+}
+
+impl<T> Copy for Link<T> {}
+
+impl<T> PartialEq for Link<T> {
+    #[inline]
+    fn eq(&self, other: &Self) -> bool {
+        self.kind == other.kind && ptr::eq(self.as_ptr(), other.as_ptr())
+    }
+}
+
+impl<T> Eq for Link<T> {}
+
+impl<T> Hash for Link<T> {
+    #[inline]
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        self.ptr.hash(state);
+        self.kind.hash(state);
+    }
+}
+
\ No newline at end of file diff --git a/src/cactusref/rc.rs.html b/src/cactusref/rc.rs.html new file mode 100644 index 000000000..a48ee4793 --- /dev/null +++ b/src/cactusref/rc.rs.html @@ -0,0 +1,3837 @@ +rc.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
+445
+446
+447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
+469
+470
+471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
+505
+506
+507
+508
+509
+510
+511
+512
+513
+514
+515
+516
+517
+518
+519
+520
+521
+522
+523
+524
+525
+526
+527
+528
+529
+530
+531
+532
+533
+534
+535
+536
+537
+538
+539
+540
+541
+542
+543
+544
+545
+546
+547
+548
+549
+550
+551
+552
+553
+554
+555
+556
+557
+558
+559
+560
+561
+562
+563
+564
+565
+566
+567
+568
+569
+570
+571
+572
+573
+574
+575
+576
+577
+578
+579
+580
+581
+582
+583
+584
+585
+586
+587
+588
+589
+590
+591
+592
+593
+594
+595
+596
+597
+598
+599
+600
+601
+602
+603
+604
+605
+606
+607
+608
+609
+610
+611
+612
+613
+614
+615
+616
+617
+618
+619
+620
+621
+622
+623
+624
+625
+626
+627
+628
+629
+630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
+646
+647
+648
+649
+650
+651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
+664
+665
+666
+667
+668
+669
+670
+671
+672
+673
+674
+675
+676
+677
+678
+679
+680
+681
+682
+683
+684
+685
+686
+687
+688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
+699
+700
+701
+702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
+720
+721
+722
+723
+724
+725
+726
+727
+728
+729
+730
+731
+732
+733
+734
+735
+736
+737
+738
+739
+740
+741
+742
+743
+744
+745
+746
+747
+748
+749
+750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
+769
+770
+771
+772
+773
+774
+775
+776
+777
+778
+779
+780
+781
+782
+783
+784
+785
+786
+787
+788
+789
+790
+791
+792
+793
+794
+795
+796
+797
+798
+799
+800
+801
+802
+803
+804
+805
+806
+807
+808
+809
+810
+811
+812
+813
+814
+815
+816
+817
+818
+819
+820
+821
+822
+823
+824
+825
+826
+827
+828
+829
+830
+831
+832
+833
+834
+835
+836
+837
+838
+839
+840
+841
+842
+843
+844
+845
+846
+847
+848
+849
+850
+851
+852
+853
+854
+855
+856
+857
+858
+859
+860
+861
+862
+863
+864
+865
+866
+867
+868
+869
+870
+871
+872
+873
+874
+875
+876
+877
+878
+879
+880
+881
+882
+883
+884
+885
+886
+887
+888
+889
+890
+891
+892
+893
+894
+895
+896
+897
+898
+899
+900
+901
+902
+903
+904
+905
+906
+907
+908
+909
+910
+911
+912
+913
+914
+915
+916
+917
+918
+919
+920
+921
+922
+923
+924
+925
+926
+927
+928
+929
+930
+931
+932
+933
+934
+935
+936
+937
+938
+939
+940
+941
+942
+943
+944
+945
+946
+947
+948
+949
+950
+951
+952
+953
+954
+955
+956
+957
+958
+959
+960
+961
+962
+963
+964
+965
+966
+967
+968
+969
+970
+971
+972
+973
+974
+975
+976
+977
+978
+979
+980
+981
+982
+983
+984
+985
+986
+987
+988
+989
+990
+991
+992
+993
+994
+995
+996
+997
+998
+999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
+1122
+1123
+1124
+1125
+1126
+1127
+1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
+1143
+1144
+1145
+1146
+1147
+1148
+1149
+1150
+1151
+1152
+1153
+1154
+1155
+1156
+1157
+1158
+1159
+1160
+1161
+1162
+1163
+1164
+1165
+1166
+1167
+1168
+1169
+1170
+1171
+1172
+1173
+1174
+1175
+1176
+1177
+1178
+1179
+1180
+1181
+1182
+1183
+1184
+1185
+1186
+1187
+1188
+1189
+1190
+1191
+1192
+1193
+1194
+1195
+1196
+1197
+1198
+1199
+1200
+1201
+1202
+1203
+1204
+1205
+1206
+1207
+1208
+1209
+1210
+1211
+1212
+1213
+1214
+1215
+1216
+1217
+1218
+1219
+1220
+1221
+1222
+1223
+1224
+1225
+1226
+1227
+1228
+1229
+1230
+1231
+1232
+1233
+1234
+1235
+1236
+1237
+1238
+1239
+1240
+1241
+1242
+1243
+1244
+1245
+1246
+1247
+1248
+1249
+1250
+1251
+1252
+1253
+1254
+1255
+1256
+1257
+1258
+1259
+1260
+1261
+1262
+1263
+1264
+1265
+1266
+1267
+1268
+1269
+1270
+1271
+1272
+1273
+1274
+1275
+1276
+1277
+1278
+1279
+1280
+1281
+1282
+1283
+1284
+1285
+1286
+1287
+1288
+1289
+1290
+1291
+1292
+1293
+1294
+1295
+1296
+1297
+1298
+1299
+1300
+1301
+1302
+1303
+1304
+1305
+1306
+1307
+1308
+1309
+1310
+1311
+1312
+1313
+1314
+1315
+1316
+1317
+1318
+1319
+1320
+1321
+1322
+1323
+1324
+1325
+1326
+1327
+1328
+1329
+1330
+1331
+1332
+1333
+1334
+1335
+1336
+1337
+1338
+1339
+1340
+1341
+1342
+1343
+1344
+1345
+1346
+1347
+1348
+1349
+1350
+1351
+1352
+1353
+1354
+1355
+1356
+1357
+1358
+1359
+1360
+1361
+1362
+1363
+1364
+1365
+1366
+1367
+1368
+1369
+1370
+1371
+1372
+1373
+1374
+1375
+1376
+1377
+1378
+1379
+1380
+1381
+1382
+1383
+1384
+1385
+1386
+1387
+1388
+1389
+1390
+1391
+1392
+1393
+1394
+1395
+1396
+1397
+1398
+1399
+1400
+1401
+1402
+1403
+1404
+1405
+1406
+1407
+1408
+1409
+1410
+1411
+1412
+1413
+1414
+1415
+1416
+1417
+1418
+1419
+1420
+1421
+1422
+1423
+1424
+1425
+1426
+1427
+1428
+1429
+1430
+1431
+1432
+1433
+1434
+1435
+1436
+1437
+1438
+1439
+1440
+1441
+1442
+1443
+1444
+1445
+1446
+1447
+1448
+1449
+1450
+1451
+1452
+1453
+1454
+1455
+1456
+1457
+1458
+1459
+1460
+1461
+1462
+1463
+1464
+1465
+1466
+1467
+1468
+1469
+1470
+1471
+1472
+1473
+1474
+1475
+1476
+1477
+1478
+1479
+1480
+1481
+1482
+1483
+1484
+1485
+1486
+1487
+1488
+1489
+1490
+1491
+1492
+1493
+1494
+1495
+1496
+1497
+1498
+1499
+1500
+1501
+1502
+1503
+1504
+1505
+1506
+1507
+1508
+1509
+1510
+1511
+1512
+1513
+1514
+1515
+1516
+1517
+1518
+1519
+1520
+1521
+1522
+1523
+1524
+1525
+1526
+1527
+1528
+1529
+1530
+1531
+1532
+1533
+1534
+1535
+1536
+1537
+1538
+1539
+1540
+1541
+1542
+1543
+1544
+1545
+1546
+1547
+1548
+1549
+1550
+1551
+1552
+1553
+1554
+1555
+1556
+1557
+1558
+1559
+1560
+1561
+1562
+1563
+1564
+1565
+1566
+1567
+1568
+1569
+1570
+1571
+1572
+1573
+1574
+1575
+1576
+1577
+1578
+1579
+1580
+1581
+1582
+1583
+1584
+1585
+1586
+1587
+1588
+1589
+1590
+1591
+1592
+1593
+1594
+1595
+1596
+1597
+1598
+1599
+1600
+1601
+1602
+1603
+1604
+1605
+1606
+1607
+1608
+1609
+1610
+1611
+1612
+1613
+1614
+1615
+1616
+1617
+1618
+1619
+1620
+1621
+1622
+1623
+1624
+1625
+1626
+1627
+1628
+1629
+1630
+1631
+1632
+1633
+1634
+1635
+1636
+1637
+1638
+1639
+1640
+1641
+1642
+1643
+1644
+1645
+1646
+1647
+1648
+1649
+1650
+1651
+1652
+1653
+1654
+1655
+1656
+1657
+1658
+1659
+1660
+1661
+1662
+1663
+1664
+1665
+1666
+1667
+1668
+1669
+1670
+1671
+1672
+1673
+1674
+1675
+1676
+1677
+1678
+1679
+1680
+1681
+1682
+1683
+1684
+1685
+1686
+1687
+1688
+1689
+1690
+1691
+1692
+1693
+1694
+1695
+1696
+1697
+1698
+1699
+1700
+1701
+1702
+1703
+1704
+1705
+1706
+1707
+1708
+1709
+1710
+1711
+1712
+1713
+1714
+1715
+1716
+1717
+1718
+1719
+1720
+1721
+1722
+1723
+1724
+1725
+1726
+1727
+1728
+1729
+1730
+1731
+1732
+1733
+1734
+1735
+1736
+1737
+1738
+1739
+1740
+1741
+1742
+1743
+1744
+1745
+1746
+1747
+1748
+1749
+1750
+1751
+1752
+1753
+1754
+1755
+1756
+1757
+1758
+1759
+1760
+1761
+1762
+1763
+1764
+1765
+1766
+1767
+1768
+1769
+1770
+1771
+1772
+1773
+1774
+1775
+1776
+1777
+1778
+1779
+1780
+1781
+1782
+1783
+1784
+1785
+1786
+1787
+1788
+1789
+1790
+1791
+1792
+1793
+1794
+1795
+1796
+1797
+1798
+1799
+1800
+1801
+1802
+1803
+1804
+1805
+1806
+1807
+1808
+1809
+1810
+1811
+1812
+1813
+1814
+1815
+1816
+1817
+1818
+1819
+1820
+1821
+1822
+1823
+1824
+1825
+1826
+1827
+1828
+1829
+1830
+1831
+1832
+1833
+1834
+1835
+1836
+1837
+1838
+1839
+1840
+1841
+1842
+1843
+1844
+1845
+1846
+1847
+1848
+1849
+1850
+1851
+1852
+1853
+1854
+1855
+1856
+1857
+1858
+1859
+1860
+1861
+1862
+1863
+1864
+1865
+1866
+1867
+1868
+1869
+1870
+1871
+1872
+1873
+1874
+1875
+1876
+1877
+1878
+1879
+1880
+1881
+1882
+1883
+1884
+1885
+1886
+1887
+1888
+1889
+1890
+1891
+1892
+1893
+1894
+1895
+1896
+1897
+1898
+1899
+1900
+1901
+1902
+1903
+1904
+1905
+1906
+1907
+1908
+1909
+1910
+1911
+1912
+1913
+1914
+1915
+1916
+1917
+1918
+
//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
+//! Counted'.
+//!
+//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
+//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
+//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
+//! given allocation is destroyed, the value stored in that allocation (often
+//! referred to as "inner value") is also dropped.
+//!
+//! Shared references in Rust disallow mutation by default, and [`Rc`]
+//! is no exception: you cannot generally obtain a mutable reference to
+//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
+//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
+//! inside an `Rc`][mutability].
+//!
+//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
+//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
+//! does not implement [`Send`][send]. As a result, the Rust compiler
+//! will check *at compile time* that you are not sending [`Rc`]s between
+//! threads. If you need multi-threaded, atomic reference counting, use
+//! [`sync::Arc`][arc].
+//!
+//! The [`downgrade`][downgrade] method can be used to create a non-owning
+//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
+//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
+//! already been dropped. In other words, `Weak` pointers do not keep the value
+//! inside the allocation alive; however, they *do* keep the allocation
+//! (the backing store for the inner value) alive.
+//!
+//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
+//! [`Weak`] is used to break cycles. For example, a tree could have strong
+//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
+//! children back to their parents.
+//!
+//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
+//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
+//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
+//! functions, called using [fully qualified syntax]:
+//!
+//! ```
+//! use cactusref::Rc;
+//!
+//! let my_rc = Rc::new(());
+//! Rc::downgrade(&my_rc);
+//! ```
+//!
+//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
+//! fully qualified syntax. Some people prefer to use fully qualified syntax,
+//! while others prefer using method-call syntax.
+//!
+//! ```
+//! use cactusref::Rc;
+//!
+//! let rc = Rc::new(());
+//! // Method-call syntax
+//! let rc2 = rc.clone();
+//! // Fully qualified syntax
+//! let rc3 = Rc::clone(&rc);
+//! ```
+//!
+//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
+//! already been dropped.
+//!
+//! # Cloning references
+//!
+//! Creating a new reference to the same allocation as an existing reference counted pointer
+//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
+//!
+//! ```
+//! use cactusref::Rc;
+//!
+//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
+//! // The two syntaxes below are equivalent.
+//! let a = foo.clone();
+//! let b = Rc::clone(&foo);
+//! // a and b both point to the same memory location as foo.
+//! ```
+//!
+//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
+//! the meaning of the code. In the example above, this syntax makes it easier to see that
+//! this code is creating a new reference rather than copying the whole content of foo.
+//!
+//! # Examples
+//!
+//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
+//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
+//! unique ownership, because more than one gadget may belong to the same
+//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
+//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
+//!
+//! ```
+//! use cactusref::Rc;
+//!
+//! struct Owner {
+//!     name: String,
+//!     // ...other fields
+//! }
+//!
+//! struct Gadget {
+//!     id: i32,
+//!     owner: Rc<Owner>,
+//!     // ...other fields
+//! }
+//!
+//! // Create a reference-counted `Owner`.
+//! let gadget_owner: Rc<Owner> = Rc::new(
+//!     Owner {
+//!         name: "Gadget Man".to_string(),
+//!     }
+//! );
+//!
+//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
+//! // gives us a new pointer to the same `Owner` allocation, incrementing
+//! // the reference count in the process.
+//! let gadget1 = Gadget {
+//!     id: 1,
+//!     owner: Rc::clone(&gadget_owner),
+//! };
+//! let gadget2 = Gadget {
+//!     id: 2,
+//!     owner: Rc::clone(&gadget_owner),
+//! };
+//!
+//! // Dispose of our local variable `gadget_owner`.
+//! drop(gadget_owner);
+//!
+//! // Despite dropping `gadget_owner`, we're still able to print out the name
+//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
+//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
+//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
+//! // live. The field projection `gadget1.owner.name` works because
+//! // `Rc<Owner>` automatically dereferences to `Owner`.
+//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
+//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
+//!
+//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
+//! // with them the last counted references to our `Owner`. Gadget Man now
+//! // gets destroyed as well.
+//! ```
+//!
+//! If our requirements change, and we also need to be able to traverse from
+//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
+//! to `Gadget` introduces a cycle. This means that their
+//! reference counts can never reach 0, and the allocation will never be destroyed:
+//! a memory leak. In order to get around this, we can use [`Weak`]
+//! pointers.
+//!
+//! Rust actually makes it somewhat difficult to produce this loop in the first
+//! place. In order to end up with two values that point at each other, one of
+//! them needs to be mutable. This is difficult because [`Rc`] enforces
+//! memory safety by only giving out shared references to the value it wraps,
+//! and these don't allow direct mutation. We need to wrap the part of the
+//! value we wish to mutate in a [`RefCell`], which provides *interior
+//! mutability*: a method to achieve mutability through a shared reference.
+//! [`RefCell`] enforces Rust's borrowing rules at runtime.
+//!
+//! ```
+//! use cactusref::Rc;
+//! use cactusref::Weak;
+//! use std::cell::RefCell;
+//!
+//! struct Owner {
+//!     name: String,
+//!     gadgets: RefCell<Vec<Weak<Gadget>>>,
+//!     // ...other fields
+//! }
+//!
+//! struct Gadget {
+//!     id: i32,
+//!     owner: Rc<Owner>,
+//!     // ...other fields
+//! }
+//!
+//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
+//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
+//! // a shared reference.
+//! let gadget_owner: Rc<Owner> = Rc::new(
+//!     Owner {
+//!         name: "Gadget Man".to_string(),
+//!         gadgets: RefCell::new(vec![]),
+//!     }
+//! );
+//!
+//! // Create `Gadget`s belonging to `gadget_owner`, as before.
+//! let gadget1 = Rc::new(
+//!     Gadget {
+//!         id: 1,
+//!         owner: Rc::clone(&gadget_owner),
+//!     }
+//! );
+//! let gadget2 = Rc::new(
+//!     Gadget {
+//!         id: 2,
+//!         owner: Rc::clone(&gadget_owner),
+//!     }
+//! );
+//!
+//! // Add the `Gadget`s to their `Owner`.
+//! {
+//!     let mut gadgets = gadget_owner.gadgets.borrow_mut();
+//!     gadgets.push(Rc::downgrade(&gadget1));
+//!     gadgets.push(Rc::downgrade(&gadget2));
+//!
+//!     // `RefCell` dynamic borrow ends here.
+//! }
+//!
+//! // Iterate over our `Gadget`s, printing their details out.
+//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
+//!
+//!     // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
+//!     // guarantee the allocation still exists, we need to call
+//!     // `upgrade`, which returns an `Option<Rc<Gadget>>`.
+//!     //
+//!     // In this case we know the allocation still exists, so we simply
+//!     // `unwrap` the `Option`. In a more complicated program, you might
+//!     // need graceful error handling for a `None` result.
+//!
+//!     let gadget = gadget_weak.upgrade().unwrap();
+//!     println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
+//! }
+//!
+//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
+//! // are destroyed. There are now no strong (`Rc`) pointers to the
+//! // gadgets, so they are destroyed. This zeroes the reference count on
+//! // Gadget Man, so he gets destroyed as well.
+//! ```
+//!
+//! [clone]: Clone::clone
+//! [`Cell`]: core::cell::Cell
+//! [`RefCell`]: core::cell::RefCell
+//! [send]: core::marker::Send
+#![cfg_attr(feature = "std", doc = "[arc]: std::sync::Arc")]
+#![cfg_attr(
+    not(feature = "std"),
+    doc = "[arc]: https://doc.rust-lang.org/stable/std/sync/struct.Arc.html"
+)]
+//! [`Deref`]: core::ops::Deref
+//! [downgrade]: Rc::downgrade
+//! [upgrade]: Weak::upgrade
+//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
+//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
+
+use core::borrow;
+use core::cell::{Cell, RefCell};
+use core::cmp::Ordering;
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::intrinsics::abort;
+use core::marker::PhantomData;
+use core::mem::{self, ManuallyDrop, MaybeUninit};
+use core::ops::Deref;
+use core::pin::Pin;
+use core::ptr::{self, NonNull};
+
+use alloc::alloc::handle_alloc_error;
+use alloc::alloc::{AllocError, Allocator, Global, Layout};
+use alloc::boxed::Box;
+
+use crate::link::Links;
+
+#[cfg(test)]
+#[allow(clippy::redundant_clone)]
+#[allow(clippy::uninlined_format_args)]
+mod tests;
+
+// This is repr(C) to future-proof against possible field-reordering, which
+// would interfere with otherwise safe [into|from]_raw() of transmutable
+// inner types.
+#[repr(C)]
+pub(crate) struct RcBox<T> {
+    strong: Cell<usize>,
+    weak: Cell<usize>,
+    pub links: MaybeUninit<RefCell<Links<T>>>,
+    pub value: MaybeUninit<T>,
+}
+
+impl<T> RcBox<T> {
+    /// # Safety
+    ///
+    /// Callers must ensure this `RcBox` is not dead.
+    #[inline]
+    pub(crate) unsafe fn links(&self) -> &RefCell<Links<T>> {
+        let links = &self.links;
+        // SAFETY: because callers have ensured the `RcBox` is not dead, `links`
+        // has not yet been deallocated and the `MaybeUninit` is inhabited.
+        let pointer_to_links = links as *const MaybeUninit<RefCell<Links<T>>>;
+        &*(pointer_to_links.cast::<RefCell<Links<T>>>())
+    }
+}
+
+/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
+/// Counted'.
+///
+/// See the [module-level documentation](./index.html) for more details.
+///
+/// The inherent methods of `Rc` are all associated functions, which means
+/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
+/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
+///
+/// [get_mut]: Rc::get_mut
+pub struct Rc<T> {
+    pub(crate) ptr: NonNull<RcBox<T>>,
+    phantom: PhantomData<RcBox<T>>,
+}
+
+/// `Rc` is not `Send`.
+///
+/// ```compile_fail
+/// use cactusref::Rc;
+/// fn requires_send<T: Send>(val: T) {}
+/// let rc = Rc::<usize>::new(1);
+/// requires_send(rc);
+/// ```
+mod rc_is_not_send {}
+
+/// `Rc` is not `Sync`.
+///
+/// ```compile_fail
+/// use cactusref::Rc;
+/// fn requires_sync<T: Sync>(val: T) {}
+/// let rc = Rc::<usize>::new(1);
+/// requires_sync(rc);
+/// ```
+mod rc_is_not_sync {}
+
+impl<T> Rc<T> {
+    #[inline(always)]
+    pub(crate) fn inner(&self) -> &RcBox<T> {
+        // This unsafety is ok because while this Rc is alive we're guaranteed
+        // that the inner pointer is valid.
+        unsafe { self.ptr.as_ref() }
+    }
+
+    fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
+        Self {
+            ptr,
+            phantom: PhantomData,
+        }
+    }
+
+    unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
+        Self::from_inner(NonNull::new_unchecked(ptr))
+    }
+}
+
+impl<T> Rc<T> {
+    /// Constructs a new `Rc<T>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    /// ```
+    pub fn new(value: T) -> Rc<T> {
+        // There is an implicit weak pointer owned by all the strong
+        // pointers, which ensures that the weak destructor never frees
+        // the allocation while the strong destructor is running, even
+        // if the weak pointer is stored inside the strong one.
+        Self::from_inner(
+            Box::leak(Box::new(RcBox {
+                strong: Cell::new(1),
+                weak: Cell::new(1),
+                links: MaybeUninit::new(RefCell::new(Links::new())),
+                value: MaybeUninit::new(value),
+            }))
+            .into(),
+        )
+    }
+
+    /// Constructs a new `Rc` with uninitialized contents.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let mut five = Rc::<u32>::new_uninit();
+    ///
+    /// let five = unsafe {
+    ///     // Deferred initialization:
+    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+    ///
+    ///     five.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*five, 5)
+    /// ```
+    #[must_use]
+    pub fn new_uninit() -> Rc<MaybeUninit<T>> {
+        unsafe {
+            Rc::from_ptr(Rc::allocate_for_layout(
+                Layout::new::<T>(),
+                |layout| Global.allocate(layout),
+                <*mut u8>::cast,
+            ))
+        }
+    }
+
+    /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
+    /// `value` will be pinned in memory and unable to be moved.
+    pub fn pin(value: T) -> Pin<Rc<T>> {
+        unsafe { Pin::new_unchecked(Rc::new(value)) }
+    }
+
+    /// Returns the inner value, if the `Rc` has exactly one strong reference.
+    ///
+    /// Otherwise, an [`Err`] is returned with the same `Rc` that was
+    /// passed in.
+    ///
+    /// This will succeed even if there are outstanding weak references.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let x = Rc::new(3);
+    /// assert_eq!(Rc::try_unwrap(x), Ok(3));
+    ///
+    /// let x = Rc::new(4);
+    /// let _y = Rc::clone(&x);
+    /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
+    /// ```
+    ///
+    /// # Errors
+    ///
+    /// If the given `Rc` does not have exactly one strong reference, it is
+    /// returned in the `Err` variant of the returned `Result`.
+    #[inline]
+    pub fn try_unwrap(this: Self) -> Result<T, Self> {
+        if Rc::strong_count(&this) == 1 {
+            unsafe {
+                let val = ptr::read(&*this); // copy the contained object
+
+                // Indicate to Weaks that they can't be promoted by decrementing
+                // the strong count, and then remove the implicit "strong weak"
+                // pointer while also handling drop logic by just crafting a
+                // fake Weak.
+                this.inner().dec_strong();
+                let _weak = Weak {
+                    ptr: this.ptr,
+                    phantom: PhantomData,
+                };
+                mem::forget(this);
+                Ok(val)
+            }
+        } else {
+            Err(this)
+        }
+    }
+}
+
+impl<T> Rc<MaybeUninit<T>> {
+    /// Converts to `Rc<T>`.
+    ///
+    /// # Safety
+    ///
+    /// As with [`MaybeUninit::assume_init`],
+    /// it is up to the caller to guarantee that the inner value
+    /// really is in an initialized state.
+    /// Calling this when the content is not yet fully initialized
+    /// causes immediate undefined behavior.
+    ///
+    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let mut five = Rc::<u32>::new_uninit();
+    ///
+    /// let five = unsafe {
+    ///     // Deferred initialization:
+    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+    ///
+    ///     five.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*five, 5)
+    /// ```
+    #[inline]
+    #[must_use]
+    pub unsafe fn assume_init(self) -> Rc<T> {
+        Rc::from_inner(ManuallyDrop::new(self).ptr.cast())
+    }
+}
+
+impl<T> Rc<T> {
+    /// Consumes the `Rc`, returning the wrapped pointer.
+    ///
+    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
+    /// [`Rc::from_raw`][from_raw].
+    ///
+    /// [from_raw]: Rc::from_raw
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let x = Rc::new("hello".to_owned());
+    /// let x_ptr = Rc::into_raw(x);
+    /// assert_eq!(unsafe { &*x_ptr }, "hello");
+    /// // Reconstruct the `Rc` to avoid a leak.
+    /// let _ = unsafe { Rc::from_raw(x_ptr) };
+    /// ```
+    #[must_use]
+    pub fn into_raw(this: Self) -> *const T {
+        let ptr = Self::as_ptr(&this);
+        mem::forget(this);
+        ptr
+    }
+
+    /// Provides a raw pointer to the data.
+    ///
+    /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
+    /// for as long there are strong counts in the `Rc`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let x = Rc::new("hello".to_owned());
+    /// let y = Rc::clone(&x);
+    /// let x_ptr = Rc::as_ptr(&x);
+    /// assert_eq!(x_ptr, Rc::as_ptr(&y));
+    /// assert_eq!(unsafe { &*x_ptr }, "hello");
+    /// ```
+    #[must_use]
+    pub fn as_ptr(this: &Self) -> *const T {
+        let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
+
+        // SAFETY: This cannot go through Deref::deref or Rc::inner because
+        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
+        // write through the pointer after the Rc is recovered through `from_raw`.
+        unsafe {
+            // SAFETY: we can cast the `MaybeUninit<T>` to a `T` because we are
+            // calling and associated function with a live `Rc`. If an `Rc` is
+            // not dead, the inner `MaybeUninit` is inhabited.
+            ptr::addr_of_mut!((*ptr).value).cast::<T>()
+        }
+    }
+
+    /// Constructs an `Rc<T>` from a raw pointer.
+    ///
+    /// The raw pointer must have been previously returned by a call to
+    /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
+    /// and alignment as `T`. This is trivially true if `U` is `T`.
+    /// Note that if `U` is not `T` but has the same size and alignment, this is
+    /// basically like transmuting references of different types. See
+    /// [`mem::transmute`][transmute] for more information on what
+    /// restrictions apply in this case.
+    ///
+    /// The user of `from_raw` has to make sure a specific value of `T` is only
+    /// dropped once.
+    ///
+    /// This function is unsafe because improper use may lead to memory unsafety,
+    /// even if the returned `Rc<T>` is never accessed.
+    ///
+    /// [into_raw]: Rc::into_raw
+    /// [transmute]: core::mem::transmute
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let x = Rc::new("hello".to_owned());
+    /// let x_ptr = Rc::into_raw(x);
+    ///
+    /// unsafe {
+    ///     // Convert back to an `Rc` to prevent leak.
+    ///     let x = Rc::from_raw(x_ptr);
+    ///     assert_eq!(&*x, "hello");
+    ///
+    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
+    /// }
+    ///
+    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+    /// ```
+    ///
+    /// # Safety
+    ///
+    /// Callers must ensure that `ptr` points to a live `Rc` and was created
+    /// with a call to [`Rc::into_raw`].
+    pub unsafe fn from_raw(ptr: *const T) -> Self {
+        let offset = data_offset(ptr);
+
+        // Reverse the offset to find the original RcBox.
+        let rc_ptr = (ptr as *mut u8)
+            .offset(-offset)
+            .with_metadata_of(ptr as *mut RcBox<T>);
+
+        Self::from_ptr(rc_ptr)
+    }
+
+    /// Creates a new [`Weak`] pointer to this allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// let weak_five = Rc::downgrade(&five);
+    /// ```
+    #[must_use]
+    pub fn downgrade(this: &Self) -> Weak<T> {
+        this.inner().inc_weak();
+        // Make sure we do not create a dangling Weak
+        debug_assert!(!is_dangling(this.ptr.as_ptr()));
+        Weak {
+            ptr: this.ptr,
+            phantom: PhantomData,
+        }
+    }
+
+    /// Gets the number of [`Weak`] pointers to this allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    /// let _weak_five = Rc::downgrade(&five);
+    ///
+    /// assert_eq!(1, Rc::weak_count(&five));
+    /// ```
+    #[inline]
+    #[must_use]
+    pub fn weak_count(this: &Self) -> usize {
+        this.inner().weak() - 1
+    }
+
+    /// Gets the number of strong (`Rc`) pointers to this allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    /// let _also_five = Rc::clone(&five);
+    ///
+    /// assert_eq!(2, Rc::strong_count(&five));
+    /// ```
+    #[inline]
+    #[must_use]
+    pub fn strong_count(this: &Self) -> usize {
+        this.inner().strong()
+    }
+
+    /// Increments the strong reference count on the `Rc<T>` associated with the
+    /// provided pointer by one.
+    ///
+    /// # Safety
+    ///
+    /// The pointer must have been obtained through `Rc::into_raw`, and the
+    /// associated `Rc` instance must be valid (i.e. the strong count must be at
+    /// least 1) for the duration of this method.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// unsafe {
+    ///     let ptr = Rc::into_raw(five);
+    ///     Rc::increment_strong_count(ptr);
+    ///
+    ///     let five = Rc::from_raw(ptr);
+    ///     assert_eq!(2, Rc::strong_count(&five));
+    ///
+    ///     // Decrement the strong count to avoid a leak.
+    ///     Rc::decrement_strong_count(ptr);
+    /// }
+    /// ```
+    #[inline]
+    pub unsafe fn increment_strong_count(ptr: *const T) {
+        // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
+        let rc = ManuallyDrop::new(Rc::<T>::from_raw(ptr));
+        // Now increase refcount, but don't drop new refcount either
+        let _rc_clone: ManuallyDrop<_> = rc.clone();
+    }
+
+    /// Decrements the strong reference count on the `Rc<T>` associated with the
+    /// provided pointer by one.
+    ///
+    /// # Safety
+    ///
+    /// The pointer must have been obtained through `Rc::into_raw`, and the
+    /// associated `Rc` instance must be valid (i.e. the strong count must be at
+    /// least 1) when invoking this method. This method can be used to release
+    /// the final `Rc` and backing storage, but **should not** be called after
+    /// the final `Rc` has been released.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// unsafe {
+    ///     let ptr = Rc::into_raw(five);
+    ///     Rc::increment_strong_count(ptr);
+    ///
+    ///     let five = Rc::from_raw(ptr);
+    ///     assert_eq!(2, Rc::strong_count(&five));
+    ///     Rc::decrement_strong_count(ptr);
+    ///     assert_eq!(1, Rc::strong_count(&five));
+    /// }
+    /// ```
+    #[inline]
+    pub unsafe fn decrement_strong_count(ptr: *const T) {
+        drop(Rc::from_raw(ptr));
+    }
+
+    /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
+    /// this allocation.
+    #[inline]
+    fn is_unique(this: &Self) -> bool {
+        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
+    }
+
+    /// Returns a mutable reference into the given `Rc`, if there are
+    /// no other `Rc` or [`Weak`] pointers to the same allocation.
+    ///
+    /// Returns [`None`] otherwise, because it is not safe to
+    /// mutate a shared value.
+    ///
+    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
+    /// the inner value when there are other pointers.
+    ///
+    /// [make_mut]: Rc::make_mut
+    /// [clone]: Clone::clone
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let mut x = Rc::new(3);
+    /// *Rc::get_mut(&mut x).unwrap() = 4;
+    /// assert_eq!(*x, 4);
+    ///
+    /// let _y = Rc::clone(&x);
+    /// assert!(Rc::get_mut(&mut x).is_none());
+    /// ```
+    #[inline]
+    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
+        if Rc::is_unique(this) {
+            unsafe { Some(Rc::get_mut_unchecked(this)) }
+        } else {
+            None
+        }
+    }
+
+    /// Returns a mutable reference into the given `Rc`,
+    /// without any check.
+    ///
+    /// See also [`get_mut`], which is safe and does appropriate checks.
+    ///
+    /// [`get_mut`]: Rc::get_mut
+    ///
+    /// # Safety
+    ///
+    /// Any other `Rc` or [`Weak`] pointers to the same allocation must not be dereferenced
+    /// for the duration of the returned borrow.
+    /// This is trivially the case if no such pointers exist,
+    /// for example immediately after `Rc::new`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let mut x = Rc::new(String::new());
+    /// unsafe {
+    ///     Rc::get_mut_unchecked(&mut x).push_str("foo")
+    /// }
+    /// assert_eq!(*x, "foo");
+    /// ```
+    #[inline]
+    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
+        debug_assert!(!this.inner().is_dead());
+        // We are careful to *not* create a reference covering the "count" fields, as
+        // this would conflict with accesses to the reference counts (e.g. by `Weak`).
+        //
+        // Safety: If we have an `Rc`, then the allocation is not dead so the `MaybeUninit`
+        // is inhabited.
+        let value = &mut (*this.ptr.as_ptr()).value;
+        // SAFETY: we can cast the `MaybeUninit<T>` to a `T` because we are
+        // calling and associated function with a live `Rc`. If an `Rc` is not
+        // dead, the inner `MaybeUninit` is inhabited.
+        let pointer_to_value = (value as *mut MaybeUninit<T>).cast::<T>();
+        &mut *(pointer_to_value)
+    }
+
+    /// Returns `true` if the two `Rc`s point to the same allocation
+    /// (in a vein similar to [`ptr::eq`]).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    /// let same_five = Rc::clone(&five);
+    /// let other_five = Rc::new(5);
+    ///
+    /// assert!(Rc::ptr_eq(&five, &same_five));
+    /// assert!(!Rc::ptr_eq(&five, &other_five));
+    /// ```
+    ///
+    /// [`ptr::eq`]: core::ptr::eq
+    #[inline]
+    #[must_use]
+    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
+        this.ptr.as_ptr() == other.ptr.as_ptr()
+    }
+}
+
+impl<T: Clone> Rc<T> {
+    /// Makes a mutable reference into the given `Rc`.
+    ///
+    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
+    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
+    /// referred to as clone-on-write.
+    ///
+    /// If there are no other `Rc` pointers to this allocation, then [`Weak`]
+    /// pointers to this allocation will be disassociated.
+    ///
+    /// See also [`get_mut`], which will fail rather than cloning.
+    ///
+    /// [`clone`]: Clone::clone
+    /// [`get_mut`]: Rc::get_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let mut data = Rc::new(5);
+    ///
+    /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
+    /// let mut other_data = Rc::clone(&data);    // Won't clone inner data
+    /// *Rc::make_mut(&mut data) += 1;        // Clones inner data
+    /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
+    /// *Rc::make_mut(&mut other_data) *= 2;  // Won't clone anything
+    ///
+    /// // Now `data` and `other_data` point to different allocations.
+    /// assert_eq!(*data, 8);
+    /// assert_eq!(*other_data, 12);
+    /// ```
+    ///
+    /// [`Weak`] pointers will be disassociated:
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let mut data = Rc::new(75);
+    /// let weak = Rc::downgrade(&data);
+    ///
+    /// assert!(75 == *data);
+    /// assert!(75 == *weak.upgrade().unwrap());
+    ///
+    /// *Rc::make_mut(&mut data) += 1;
+    ///
+    /// assert!(76 == *data);
+    /// assert!(weak.upgrade().is_none());
+    /// ```
+    #[inline]
+    pub fn make_mut(this: &mut Self) -> &mut T {
+        if Rc::strong_count(this) != 1 {
+            // Gotta clone the data, there are other Rcs.
+            // Pre-allocate memory to allow writing the cloned value directly.
+            let mut rc = Self::new_uninit();
+            unsafe {
+                let data = Rc::get_mut_unchecked(&mut rc);
+                data.as_mut_ptr().write((**this).clone());
+                *this = rc.assume_init();
+            }
+        } else if Rc::weak_count(this) != 0 {
+            // Can just steal the data, all that's left is Weaks
+            let mut rc = Self::new_uninit();
+            unsafe {
+                let data: &mut MaybeUninit<T> = mem::transmute(Rc::get_mut_unchecked(&mut rc));
+                data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
+
+                this.inner().dec_strong();
+                // Remove implicit strong-weak ref (no need to craft a fake
+                // Weak here -- we know other Weaks can clean up for us)
+                this.inner().dec_weak();
+                ptr::write(this, rc.assume_init());
+            }
+        }
+        // This unsafety is ok because we're guaranteed that the pointer
+        // returned is the *only* pointer that will ever be returned to T. Our
+        // reference count is guaranteed to be 1 at this point, and we required
+        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
+        // reference to the allocation.
+        unsafe {
+            let value = &mut this.ptr.as_mut().value;
+            // SAFETY: we can cast the `MaybeUninit<T>` to a `T` because we are
+            // calling and associated function with a live `Rc`. If an `Rc` is
+            // not dead, the inner `MaybeUninit` is inhabited.
+            let pointer_to_value = (value as *mut MaybeUninit<T>).cast::<T>();
+            &mut *(pointer_to_value)
+        }
+    }
+}
+
+impl<T> Rc<T> {
+    /// Allocates an `RcBox<T>` with sufficient space for
+    /// a possibly-unsized inner value where the value has the layout provided.
+    ///
+    /// The function `mem_to_rcbox` is called with the data pointer
+    /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
+    unsafe fn allocate_for_layout(
+        value_layout: Layout,
+        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
+        mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
+    ) -> *mut RcBox<T> {
+        // Calculate layout using the given value layout.
+        // Previously, layout was calculated on the expression
+        // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
+        // reference (see #54908).
+        let layout = Layout::new::<RcBox<()>>()
+            .extend(value_layout)
+            .unwrap()
+            .0
+            .pad_to_align();
+        Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox)
+            .unwrap_or_else(|_| handle_alloc_error(layout))
+    }
+
+    /// Allocates an `RcBox<T>` with sufficient space for
+    /// a possibly-unsized inner value where the value has the layout provided,
+    /// returning an error if allocation fails.
+    ///
+    /// The function `mem_to_rcbox` is called with the data pointer
+    /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
+    #[inline]
+    unsafe fn try_allocate_for_layout(
+        value_layout: Layout,
+        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
+        mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
+    ) -> Result<*mut RcBox<T>, AllocError> {
+        // Calculate layout using the given value layout.
+        // Previously, layout was calculated on the expression
+        // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
+        // reference (see #54908).
+        let layout = Layout::new::<RcBox<()>>()
+            .extend(value_layout)
+            .unwrap()
+            .0
+            .pad_to_align();
+
+        // Allocate for the layout.
+        let ptr = allocate(layout)?;
+
+        // Initialize the RcBox
+        let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
+        debug_assert_eq!(Layout::for_value(&*inner), layout);
+
+        ptr::write(&mut (*inner).strong, Cell::new(1));
+        ptr::write(&mut (*inner).weak, Cell::new(1));
+        ptr::write(
+            &mut (*inner).links,
+            MaybeUninit::new(RefCell::new(Links::new())),
+        );
+
+        Ok(inner)
+    }
+
+    /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
+    unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
+        // Allocate for the `RcBox<T>` using the given value.
+        Self::allocate_for_layout(
+            Layout::for_value(&*ptr),
+            |layout| Global.allocate(layout),
+            |mem| mem.with_metadata_of(ptr as *mut RcBox<T>),
+        )
+    }
+
+    fn from_box(v: Box<T>) -> Rc<T> {
+        unsafe {
+            let (box_unique, alloc) = Box::into_raw_with_allocator(v);
+            // SAFETY: Pointers obtained from `Box::into_raw` are always
+            // non-null.
+            let box_unique = NonNull::new_unchecked(box_unique);
+            let box_ptr = box_unique.as_ptr();
+
+            let value_size = mem::size_of_val(&*box_ptr);
+            let ptr = Self::allocate_for_ptr(box_ptr);
+
+            // Copy value as bytes
+            ptr::copy_nonoverlapping(
+                box_ptr.cast_const().cast::<u8>(),
+                ptr::addr_of_mut!((*ptr).value).cast::<u8>(),
+                value_size,
+            );
+
+            // Free the allocation without dropping its contents
+            box_free(box_unique, alloc);
+
+            Self::from_ptr(ptr)
+        }
+    }
+}
+
+impl<T> Deref for Rc<T> {
+    type Target = T;
+
+    #[inline(always)]
+    fn deref(&self) -> &T {
+        unsafe {
+            let value = &self.inner().value;
+            // SAFETY: we can cast the `MaybeUninit<T>` to a `T` because we are
+            // calling and associated function with a live `Rc`. If an `Rc` is
+            // not dead, the inner `MaybeUninit` is inhabited.
+            let pointer_to_value = (value as *const MaybeUninit<T>).cast::<T>();
+            &*(pointer_to_value)
+        }
+    }
+}
+
+impl<T> Clone for Rc<T> {
+    /// Makes a clone of the `Rc` pointer.
+    ///
+    /// This creates another pointer to the same allocation, increasing the
+    /// strong reference count.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// let _ = Rc::clone(&five);
+    /// ```
+    #[inline]
+    fn clone(&self) -> Rc<T> {
+        self.inner().inc_strong();
+        Self::from_inner(self.ptr)
+    }
+}
+
+impl<T: Default> Default for Rc<T> {
+    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let x: Rc<i32> = Default::default();
+    /// assert_eq!(*x, 0);
+    /// ```
+    #[inline]
+    fn default() -> Rc<T> {
+        Rc::new(Default::default())
+    }
+}
+
+impl<T: PartialEq> PartialEq for Rc<T> {
+    /// Equality for two `Rc`s.
+    ///
+    /// Two `Rc`s are equal if their inner values are equal, even if they are
+    /// stored in different allocation.
+    ///
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Rc`s that point to the same allocation are
+    /// always equal.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five == Rc::new(5));
+    /// ```
+    #[inline]
+    fn eq(&self, other: &Rc<T>) -> bool {
+        **self == **other
+    }
+
+    /// Inequality for two `Rc`s.
+    ///
+    /// Two `Rc`s are unequal if their inner values are unequal.
+    ///
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Rc`s that point to the same allocation are
+    /// never unequal.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five != Rc::new(6));
+    /// ```
+    #[inline]
+    #[allow(clippy::partialeq_ne_impl)]
+    fn ne(&self, other: &Rc<T>) -> bool {
+        **self != **other
+    }
+}
+
+impl<T: Eq> Eq for Rc<T> {}
+
+impl<T: PartialOrd> PartialOrd for Rc<T> {
+    /// Partial comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `partial_cmp()` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    /// use std::cmp::Ordering;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
+    /// ```
+    #[inline(always)]
+    fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
+        (**self).partial_cmp(&**other)
+    }
+
+    /// Less-than comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `<` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five < Rc::new(6));
+    /// ```
+    #[inline(always)]
+    fn lt(&self, other: &Rc<T>) -> bool {
+        **self < **other
+    }
+
+    /// 'Less than or equal to' comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `<=` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five <= Rc::new(5));
+    /// ```
+    #[inline(always)]
+    fn le(&self, other: &Rc<T>) -> bool {
+        **self <= **other
+    }
+
+    /// Greater-than comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `>` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five > Rc::new(4));
+    /// ```
+    #[inline(always)]
+    fn gt(&self, other: &Rc<T>) -> bool {
+        **self > **other
+    }
+
+    /// 'Greater than or equal to' comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `>=` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five >= Rc::new(5));
+    /// ```
+    #[inline(always)]
+    fn ge(&self, other: &Rc<T>) -> bool {
+        **self >= **other
+    }
+}
+
+impl<T: Ord> Ord for Rc<T> {
+    /// Comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `cmp()` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    /// use std::cmp::Ordering;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
+    /// ```
+    #[inline]
+    fn cmp(&self, other: &Rc<T>) -> Ordering {
+        (**self).cmp(&**other)
+    }
+}
+
+impl<T: Hash> Hash for Rc<T> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        (**self).hash(state);
+    }
+}
+
+impl<T: fmt::Display> fmt::Display for Rc<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Display::fmt(&**self, f)
+    }
+}
+
+impl<T: fmt::Debug> fmt::Debug for Rc<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Debug::fmt(&**self, f)
+    }
+}
+
+impl<T> fmt::Pointer for Rc<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Pointer::fmt(&ptr::addr_of!(**self), f)
+    }
+}
+
+impl<T> From<T> for Rc<T> {
+    /// Converts a generic type `T` into a `Rc<T>`
+    ///
+    /// The conversion allocates on the heap and moves `t`
+    /// from the stack into it.
+    ///
+    /// # Example
+    /// ```rust
+    /// # use cactusref::Rc;
+    /// let x = 5;
+    /// let rc = Rc::new(5);
+    ///
+    /// assert_eq!(Rc::from(x), rc);
+    /// ```
+    fn from(t: T) -> Self {
+        Rc::new(t)
+    }
+}
+
+impl<T> From<Box<T>> for Rc<T> {
+    /// Move a boxed object to a new, reference counted, allocation.
+    ///
+    /// # Example
+    ///
+    /// ```
+    /// # use cactusref::Rc;
+    /// let original: Box<i32> = Box::new(1);
+    /// let shared: Rc<i32> = Rc::from(original);
+    /// assert_eq!(1, *shared);
+    /// ```
+    #[inline]
+    fn from(v: Box<T>) -> Rc<T> {
+        Rc::from_box(v)
+    }
+}
+
+/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
+/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
+/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
+///
+/// Since a `Weak` reference does not count towards ownership, it will not
+/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
+/// guarantees about the value still being present. Thus it may return [`None`]
+/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
+/// itself (the backing store) from being deallocated.
+///
+/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
+/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
+/// prevent circular references between [`Rc`] pointers, since mutual owning references
+/// would never allow either [`Rc`] to be dropped. For example, a tree could
+/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
+/// pointers from children back to their parents.
+///
+/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
+///
+/// [`upgrade`]: Weak::upgrade
+pub struct Weak<T> {
+    // This is a `NonNull` to allow optimizing the size of this type in enums,
+    // but it is not necessarily a valid pointer.
+    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
+    // to allocate space on the heap.  That's not a value a real pointer
+    // will ever have because RcBox has alignment at least 2.
+    // This is only possible when `T: Sized`; unsized `T` never dangle.
+    ptr: NonNull<RcBox<T>>,
+    phantom: PhantomData<RcBox<T>>,
+}
+
+/// `Weak` is not `Send`.
+///
+/// ```compile_fail
+/// use cactusref::Weak;
+/// fn requires_send<T: Send>(val: T) {}
+/// let weak = Weak::<usize>::new();
+/// requires_send(weak);
+/// ```
+mod weak_is_not_send {}
+
+/// `Weak` is not `Sync`.
+///
+/// ```compile_fail
+/// use cactusref::Weak;
+/// fn requires_sync<T: Sync>(val: T) {}
+/// let weak = Weak::<usize>::new();
+/// requires_sync(weak);
+/// ```
+mod weak_is_not_sync {}
+
+impl<T> Weak<T> {
+    /// Constructs a new `Weak<T>`, without allocating any memory.
+    /// Calling [`upgrade`] on the return value always gives [`None`].
+    ///
+    /// [`upgrade`]: Weak::upgrade
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Weak;
+    ///
+    /// let empty: Weak<i64> = Weak::new();
+    /// assert!(empty.upgrade().is_none());
+    /// ```
+    #[must_use]
+    pub fn new() -> Weak<T> {
+        Weak {
+            ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0"),
+            phantom: PhantomData,
+        }
+    }
+}
+
+pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
+    let address = ptr.cast::<()>() as usize;
+    address == usize::MAX
+}
+
+/// Helper type to allow accessing the reference counts without
+/// making any assertions about the data field.
+struct WeakInner<'a> {
+    weak: &'a Cell<usize>,
+    strong: &'a Cell<usize>,
+}
+
+impl<T> Weak<T> {
+    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
+    ///
+    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
+    /// unaligned or even [`null`] otherwise.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    /// use std::ptr;
+    ///
+    /// let strong = Rc::new("hello".to_owned());
+    /// let weak = Rc::downgrade(&strong);
+    /// // Both point to the same object
+    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
+    /// // The strong here keeps it alive, so we can still access the object.
+    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
+    ///
+    /// drop(strong);
+    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
+    /// // undefined behaviour.
+    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
+    /// ```
+    ///
+    /// [`null`]: core::ptr::null
+    #[must_use]
+    pub fn as_ptr(&self) -> *const T {
+        let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
+
+        if is_dangling(ptr) {
+            // If the pointer is dangling, we return the sentinel directly. This cannot be
+            // a valid payload address, as the payload is at least as aligned as RcBox (usize).
+            ptr as *const T
+        } else {
+            // SAFETY: if is_dangling returns false, then the pointer is dereferencable.
+            // The payload may be dropped at this point, and we have to maintain provenance,
+            // so use raw pointer manipulation.
+            //
+            // SAFETY: Because we are a live `Rc`, the `MaybeUninit` `value` is
+            // inhabited and can be transmuted to an initialized `T`.
+            unsafe { ptr::addr_of_mut!((*ptr).value) as *const T }
+        }
+    }
+
+    /// Consumes the `Weak<T>` and turns it into a raw pointer.
+    ///
+    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
+    /// one weak reference (the weak count is not modified by this operation). It can be turned
+    /// back into the `Weak<T>` with [`from_raw`].
+    ///
+    /// The same restrictions of accessing the target of the pointer as with
+    /// [`as_ptr`] apply.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::{Rc, Weak};
+    ///
+    /// let strong = Rc::new("hello".to_owned());
+    /// let weak = Rc::downgrade(&strong);
+    /// let raw = weak.into_raw();
+    ///
+    /// assert_eq!(1, Rc::weak_count(&strong));
+    /// assert_eq!("hello", unsafe { &*raw });
+    ///
+    /// drop(unsafe { Weak::from_raw(raw) });
+    /// assert_eq!(0, Rc::weak_count(&strong));
+    /// ```
+    ///
+    /// [`from_raw`]: Weak::from_raw
+    /// [`as_ptr`]: Weak::as_ptr
+    #[must_use]
+    pub fn into_raw(self) -> *const T {
+        let result = self.as_ptr();
+        mem::forget(self);
+        result
+    }
+
+    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
+    ///
+    /// This can be used to safely get a strong reference (by calling [`upgrade`]
+    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
+    ///
+    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
+    /// as these don't own anything; the method still works on them).
+    ///
+    /// # Safety
+    ///
+    /// The pointer must have originated from the [`into_raw`] and must still own its potential
+    /// weak reference.
+    ///
+    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
+    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
+    /// count is not modified by this operation) and therefore it must be paired with a previous
+    /// call to [`into_raw`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::{Rc, Weak};
+    ///
+    /// let strong = Rc::new("hello".to_owned());
+    ///
+    /// let raw_1 = Rc::downgrade(&strong).into_raw();
+    /// let raw_2 = Rc::downgrade(&strong).into_raw();
+    ///
+    /// assert_eq!(2, Rc::weak_count(&strong));
+    ///
+    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
+    /// assert_eq!(1, Rc::weak_count(&strong));
+    ///
+    /// drop(strong);
+    ///
+    /// // Decrement the last weak count.
+    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
+    /// ```
+    ///
+    /// [`into_raw`]: Weak::into_raw
+    /// [`upgrade`]: Weak::upgrade
+    /// [`new`]: Weak::new
+    pub unsafe fn from_raw(ptr: *const T) -> Self {
+        // See Weak::as_ptr for context on how the input pointer is derived.
+
+        let ptr = if is_dangling(ptr.cast_mut()) {
+            // This is a dangling Weak.
+            ptr as *mut RcBox<T>
+        } else {
+            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
+            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
+            let offset = data_offset(ptr);
+            // Thus, we reverse the offset to get the whole RcBox.
+            // SAFETY: the pointer originated from a Weak, so this offset is safe.
+            (ptr as *mut u8)
+                .offset(-offset)
+                .with_metadata_of(ptr as *mut RcBox<T>)
+        };
+
+        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
+        Weak {
+            ptr: NonNull::new_unchecked(ptr),
+            phantom: PhantomData,
+        }
+    }
+
+    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
+    /// dropping of the inner value if successful.
+    ///
+    /// Returns [`None`] if the inner value has since been dropped.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// let weak_five = Rc::downgrade(&five);
+    ///
+    /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
+    /// assert!(strong_five.is_some());
+    ///
+    /// // Destroy all strong pointers.
+    /// drop(strong_five);
+    /// drop(five);
+    ///
+    /// assert!(weak_five.upgrade().is_none());
+    /// ```
+    #[must_use]
+    pub fn upgrade(&self) -> Option<Rc<T>> {
+        let inner = self.inner()?;
+        if inner.is_dead() {
+            None
+        } else {
+            inner.inc_strong();
+            Some(Rc::from_inner(self.ptr))
+        }
+    }
+
+    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
+    ///
+    /// If `self` was created using [`Weak::new`], this will return 0.
+    #[must_use]
+    pub fn strong_count(&self) -> usize {
+        if let Some(inner) = self.inner() {
+            if inner.is_uninit() {
+                0
+            } else {
+                inner.strong()
+            }
+        } else {
+            0
+        }
+    }
+
+    /// Gets the number of `Weak` pointers pointing to this allocation.
+    ///
+    /// If no strong pointers remain, this will return zero.
+    #[must_use]
+    pub fn weak_count(&self) -> usize {
+        self.inner().map_or(0, |inner| {
+            if inner.is_uninit() {
+                0
+            } else if inner.strong() > 0 {
+                inner.weak() - 1 // subtract the implicit weak ptr
+            } else {
+                0
+            }
+        })
+    }
+
+    /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`,
+    /// (i.e., when this `Weak` was created by `Weak::new`).
+    #[inline]
+    #[must_use]
+    fn inner(&self) -> Option<WeakInner<'_>> {
+        if is_dangling(self.ptr.as_ptr()) {
+            None
+        } else {
+            // We are careful to *not* create a reference covering the "data" field, as
+            // the field may be mutated concurrently (for example, if the last `Rc`
+            // is dropped, the data field will be dropped in-place).
+            Some(unsafe {
+                let ptr = self.ptr.as_ptr();
+                WeakInner {
+                    strong: &(*ptr).strong,
+                    weak: &(*ptr).weak,
+                }
+            })
+        }
+    }
+
+    /// Returns `true` if the two `Weak`s point to the same allocation (similar to
+    /// [`ptr::eq`]), or if both don't point to any allocation
+    /// (because they were created with `Weak::new()`).
+    ///
+    /// # Notes
+    ///
+    /// Since this compares pointers it means that `Weak::new()` will equal each
+    /// other, even though they don't point to any allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Rc;
+    ///
+    /// let first_rc = Rc::new(5);
+    /// let first = Rc::downgrade(&first_rc);
+    /// let second = Rc::downgrade(&first_rc);
+    ///
+    /// assert!(first.ptr_eq(&second));
+    ///
+    /// let third_rc = Rc::new(5);
+    /// let third = Rc::downgrade(&third_rc);
+    ///
+    /// assert!(!first.ptr_eq(&third));
+    /// ```
+    ///
+    /// Comparing `Weak::new`.
+    ///
+    /// ```
+    /// use cactusref::{Rc, Weak};
+    ///
+    /// let first = Weak::new();
+    /// let second = Weak::new();
+    /// assert!(first.ptr_eq(&second));
+    ///
+    /// let third_rc = Rc::new(());
+    /// let third = Rc::downgrade(&third_rc);
+    /// assert!(!first.ptr_eq(&third));
+    /// ```
+    ///
+    /// [`ptr::eq`]: core::ptr::eq
+    #[inline]
+    #[must_use]
+    pub fn ptr_eq(&self, other: &Self) -> bool {
+        self.ptr.as_ptr() == other.ptr.as_ptr()
+    }
+}
+
+unsafe impl<#[may_dangle] T> Drop for Weak<T> {
+    /// Drops the `Weak` pointer.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::{Rc, Weak};
+    ///
+    /// struct Foo;
+    ///
+    /// impl Drop for Foo {
+    ///     fn drop(&mut self) {
+    ///         println!("dropped!");
+    ///     }
+    /// }
+    ///
+    /// let foo = Rc::new(Foo);
+    /// let weak_foo = Rc::downgrade(&foo);
+    /// let other_weak_foo = Weak::clone(&weak_foo);
+    ///
+    /// drop(weak_foo);   // Doesn't print anything
+    /// drop(foo);        // Prints "dropped!"
+    ///
+    /// assert!(other_weak_foo.upgrade().is_none());
+    /// ```
+    fn drop(&mut self) {
+        let inner = if let Some(inner) = self.inner() {
+            inner
+        } else {
+            return;
+        };
+
+        inner.dec_weak();
+        // the weak count starts at 1, and will only go to zero if all
+        // the strong pointers have disappeared.
+        if inner.weak() == 0 {
+            unsafe {
+                // SAFETY: `T` is `Sized`, which means `Layout::for_value_raw`
+                // is always safe to call.
+                let layout = Layout::for_value_raw(self.ptr.as_ptr());
+                Global.deallocate(self.ptr.cast(), layout);
+            }
+        }
+    }
+}
+
+impl<T> Clone for Weak<T> {
+    /// Makes a clone of the `Weak` pointer that points to the same allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::{Rc, Weak};
+    ///
+    /// let weak_five = Rc::downgrade(&Rc::new(5));
+    ///
+    /// let _ = Weak::clone(&weak_five);
+    /// ```
+    #[inline]
+    fn clone(&self) -> Weak<T> {
+        if let Some(inner) = self.inner() {
+            inner.inc_weak();
+        }
+        Weak {
+            ptr: self.ptr,
+            phantom: PhantomData,
+        }
+    }
+}
+
+impl<T: fmt::Debug> fmt::Debug for Weak<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "(Weak)")
+    }
+}
+
+impl<T> Default for Weak<T> {
+    /// Constructs a new `Weak<T>`, without allocating any memory.
+    /// Calling [`upgrade`] on the return value always gives [`None`].
+    ///
+    /// [`None`]: Option
+    /// [`upgrade`]: Weak::upgrade
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use cactusref::Weak;
+    ///
+    /// let empty: Weak<i64> = Default::default();
+    /// assert!(empty.upgrade().is_none());
+    /// ```
+    fn default() -> Weak<T> {
+        Weak::new()
+    }
+}
+
+// NOTE: We checked_add here to deal with mem::forget safely. In particular
+// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
+// you can free the allocation while outstanding Rcs (or Weaks) exist.
+// We abort because this is such a degenerate scenario that we don't care about
+// what happens -- no real program should ever experience this.
+//
+// This should have negligible overhead since you don't actually need to
+// clone these much in Rust thanks to ownership and move-semantics.
+
+#[doc(hidden)]
+pub(crate) trait RcInnerPtr {
+    fn weak_ref(&self) -> &Cell<usize>;
+    fn strong_ref(&self) -> &Cell<usize>;
+
+    #[inline]
+    fn strong(&self) -> usize {
+        self.strong_ref().get()
+    }
+
+    #[inline]
+    fn inc_strong(&self) {
+        let strong = self.strong();
+
+        // We want to abort on overflow instead of dropping the value.
+        // The reference count will never be zero when this is called;
+        // nevertheless, we insert an abort here to hint LLVM at
+        // an otherwise missed optimization.
+        if strong == 0 || strong == usize::MAX {
+            abort();
+        }
+        // `usize::MAX` is used to mark the `Rc` as uninitialized, so disallow
+        // incrementing the strong count to prevent a memory leak and type
+        // confusion in `Drop::drop`.
+        if strong + 1 == usize::MAX {
+            abort();
+        }
+        self.strong_ref().set(strong + 1);
+    }
+
+    #[inline]
+    fn dec_strong(&self) {
+        self.strong_ref().set(self.strong() - 1);
+    }
+
+    #[inline]
+    fn weak(&self) -> usize {
+        self.weak_ref().get()
+    }
+
+    #[inline]
+    fn inc_weak(&self) {
+        let weak = self.weak();
+
+        // We want to abort on overflow instead of dropping the value.
+        // The reference count will never be zero when this is called;
+        // nevertheless, we insert an abort here to hint LLVM at
+        // an otherwise missed optimization.
+        if weak == 0 || weak == usize::MAX {
+            abort();
+        }
+        self.weak_ref().set(weak + 1);
+    }
+
+    #[inline]
+    fn dec_weak(&self) {
+        self.weak_ref().set(self.weak() - 1);
+    }
+
+    #[inline]
+    fn is_dead(&self) -> bool {
+        self.strong() == 0 || self.is_uninit()
+    }
+
+    #[inline]
+    fn is_uninit(&self) -> bool {
+        self.strong() == usize::MAX
+    }
+
+    #[inline]
+    fn make_uninit(&self) {
+        self.strong_ref().set(usize::MAX);
+    }
+}
+
+impl<T> RcInnerPtr for RcBox<T> {
+    #[inline(always)]
+    fn weak_ref(&self) -> &Cell<usize> {
+        &self.weak
+    }
+
+    #[inline(always)]
+    fn strong_ref(&self) -> &Cell<usize> {
+        &self.strong
+    }
+}
+
+impl<'a> RcInnerPtr for WeakInner<'a> {
+    #[inline(always)]
+    fn weak_ref(&self) -> &Cell<usize> {
+        self.weak
+    }
+
+    #[inline(always)]
+    fn strong_ref(&self) -> &Cell<usize> {
+        self.strong
+    }
+}
+
+impl<T> borrow::Borrow<T> for Rc<T> {
+    fn borrow(&self) -> &T {
+        self
+    }
+}
+
+impl<T> AsRef<T> for Rc<T> {
+    fn as_ref(&self) -> &T {
+        self
+    }
+}
+
+impl<T> Unpin for Rc<T> {}
+
+/// Get the offset within an `RcBox` for the payload behind a pointer.
+///
+/// # Safety
+///
+/// The pointer must point to (and have valid metadata for) a previously
+/// valid instance of T, but the T is allowed to be dropped.
+unsafe fn data_offset<T>(ptr: *const T) -> isize {
+    let _ = ptr;
+
+    let rcbox = MaybeUninit::<RcBox<T>>::uninit();
+
+    let base_ptr = rcbox.as_ptr();
+    let base_ptr = base_ptr as usize;
+
+    let field_ptr = ptr::addr_of!((*(base_ptr as *const RcBox<T>)).value);
+    let field_ptr = field_ptr as usize;
+
+    (field_ptr - base_ptr) as isize
+}
+
+// Deallocate a `Box` without destroying the inner `T`.
+//
+// # Safety
+//
+// Callers must ensure that `ptr` was allocated by `Box::new` with the global allocator.
+//
+// Callers must ensure that `T` is not dropped.
+#[inline]
+unsafe fn box_free<T, A: Allocator>(ptr: NonNull<T>, alloc: A) {
+    // SAFETY: `T` is `Sized`, which means `Layout::for_value_raw` is always
+    // safe to call.
+    let layout = Layout::for_value_raw(ptr.as_ptr());
+
+    alloc.deallocate(ptr.cast(), layout);
+}
+
\ No newline at end of file diff --git a/src/hashbrown/external_trait_impls/mod.rs.html b/src/hashbrown/external_trait_impls/mod.rs.html new file mode 100644 index 000000000..48faddabf --- /dev/null +++ b/src/hashbrown/external_trait_impls/mod.rs.html @@ -0,0 +1,13 @@ +mod.rs - source
1
+2
+3
+4
+5
+6
+
#[cfg(feature = "rayon")]
+pub(crate) mod rayon;
+#[cfg(feature = "rkyv")]
+mod rkyv;
+#[cfg(feature = "serde")]
+mod serde;
+
\ No newline at end of file diff --git a/src/hashbrown/lib.rs.html b/src/hashbrown/lib.rs.html new file mode 100644 index 000000000..5e192772a --- /dev/null +++ b/src/hashbrown/lib.rs.html @@ -0,0 +1,377 @@ +lib.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+
//! This crate is a Rust port of Google's high-performance [SwissTable] hash
+//! map, adapted to make it a drop-in replacement for Rust's standard `HashMap`
+//! and `HashSet` types.
+//!
+//! The original C++ version of [SwissTable] can be found [here], and this
+//! [CppCon talk] gives an overview of how the algorithm works.
+//!
+//! [SwissTable]: https://abseil.io/blog/20180927-swisstables
+//! [here]: https://github.com/abseil/abseil-cpp/blob/master/absl/container/internal/raw_hash_set.h
+//! [CppCon talk]: https://www.youtube.com/watch?v=ncHmEUmJZf4
+
+#![no_std]
+#![cfg_attr(
+    feature = "nightly",
+    feature(
+        test,
+        core_intrinsics,
+        dropck_eyepatch,
+        min_specialization,
+        extend_one,
+        allocator_api,
+        slice_ptr_get,
+        maybe_uninit_array_assume_init,
+        strict_provenance
+    )
+)]
+#![allow(
+    clippy::doc_markdown,
+    clippy::module_name_repetitions,
+    clippy::must_use_candidate,
+    clippy::option_if_let_else,
+    clippy::redundant_else,
+    clippy::manual_map,
+    clippy::missing_safety_doc,
+    clippy::missing_errors_doc
+)]
+#![warn(missing_docs)]
+#![warn(rust_2018_idioms)]
+#![cfg_attr(feature = "nightly", warn(fuzzy_provenance_casts))]
+#![cfg_attr(feature = "nightly", allow(internal_features))]
+
+#[cfg(test)]
+#[macro_use]
+extern crate std;
+
+#[cfg_attr(test, macro_use)]
+extern crate alloc;
+
+#[cfg(feature = "nightly")]
+#[cfg(doctest)]
+doc_comment::doctest!("../README.md");
+
+#[macro_use]
+mod macros;
+
+#[cfg(feature = "raw")]
+/// Experimental and unsafe `RawTable` API. This module is only available if the
+/// `raw` feature is enabled.
+pub mod raw {
+    // The RawTable API is still experimental and is not properly documented yet.
+    #[allow(missing_docs)]
+    #[path = "mod.rs"]
+    mod inner;
+    pub use inner::*;
+
+    #[cfg(feature = "rayon")]
+    /// [rayon]-based parallel iterator types for hash maps.
+    /// You will rarely need to interact with it directly unless you have need
+    /// to name one of the iterator types.
+    ///
+    /// [rayon]: https://docs.rs/rayon/1.0/rayon
+    pub mod rayon {
+        pub use crate::external_trait_impls::rayon::raw::*;
+    }
+}
+#[cfg(not(feature = "raw"))]
+mod raw;
+
+mod external_trait_impls;
+mod map;
+#[cfg(feature = "rustc-internal-api")]
+mod rustc_entry;
+mod scopeguard;
+mod set;
+mod table;
+
+pub mod hash_map {
+    //! A hash map implemented with quadratic probing and SIMD lookup.
+    pub use crate::map::*;
+
+    #[cfg(feature = "rustc-internal-api")]
+    pub use crate::rustc_entry::*;
+
+    #[cfg(feature = "rayon")]
+    /// [rayon]-based parallel iterator types for hash maps.
+    /// You will rarely need to interact with it directly unless you have need
+    /// to name one of the iterator types.
+    ///
+    /// [rayon]: https://docs.rs/rayon/1.0/rayon
+    pub mod rayon {
+        pub use crate::external_trait_impls::rayon::map::*;
+    }
+}
+pub mod hash_set {
+    //! A hash set implemented as a `HashMap` where the value is `()`.
+    pub use crate::set::*;
+
+    #[cfg(feature = "rayon")]
+    /// [rayon]-based parallel iterator types for hash sets.
+    /// You will rarely need to interact with it directly unless you have need
+    /// to name one of the iterator types.
+    ///
+    /// [rayon]: https://docs.rs/rayon/1.0/rayon
+    pub mod rayon {
+        pub use crate::external_trait_impls::rayon::set::*;
+    }
+}
+pub mod hash_table {
+    //! A hash table implemented with quadratic probing and SIMD lookup.
+    pub use crate::table::*;
+
+    #[cfg(feature = "rayon")]
+    /// [rayon]-based parallel iterator types for hash tables.
+    /// You will rarely need to interact with it directly unless you have need
+    /// to name one of the iterator types.
+    ///
+    /// [rayon]: https://docs.rs/rayon/1.0/rayon
+    pub mod rayon {
+        pub use crate::external_trait_impls::rayon::table::*;
+    }
+}
+
+pub use crate::map::HashMap;
+pub use crate::set::HashSet;
+pub use crate::table::HashTable;
+
+#[cfg(feature = "equivalent")]
+pub use equivalent::Equivalent;
+
+// This is only used as a fallback when building as part of `std`.
+#[cfg(not(feature = "equivalent"))]
+/// Key equivalence trait.
+///
+/// This trait defines the function used to compare the input value with the
+/// map keys (or set values) during a lookup operation such as [`HashMap::get`]
+/// or [`HashSet::contains`].
+/// It is provided with a blanket implementation based on the
+/// [`Borrow`](core::borrow::Borrow) trait.
+///
+/// # Correctness
+///
+/// Equivalent values must hash to the same value.
+pub trait Equivalent<K: ?Sized> {
+    /// Checks if this value is equivalent to the given key.
+    ///
+    /// Returns `true` if both values are equivalent, and `false` otherwise.
+    ///
+    /// # Correctness
+    ///
+    /// When this function returns `true`, both `self` and `key` must hash to
+    /// the same value.
+    fn equivalent(&self, key: &K) -> bool;
+}
+
+#[cfg(not(feature = "equivalent"))]
+impl<Q: ?Sized, K: ?Sized> Equivalent<K> for Q
+where
+    Q: Eq,
+    K: core::borrow::Borrow<Q>,
+{
+    fn equivalent(&self, key: &K) -> bool {
+        self == key.borrow()
+    }
+}
+
+/// The error type for `try_reserve` methods.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub enum TryReserveError {
+    /// Error due to the computed capacity exceeding the collection's maximum
+    /// (usually `isize::MAX` bytes).
+    CapacityOverflow,
+
+    /// The memory allocator returned an error
+    AllocError {
+        /// The layout of the allocation request that failed.
+        layout: alloc::alloc::Layout,
+    },
+}
+
\ No newline at end of file diff --git a/src/hashbrown/macros.rs.html b/src/hashbrown/macros.rs.html new file mode 100644 index 000000000..8ac3c7c92 --- /dev/null +++ b/src/hashbrown/macros.rs.html @@ -0,0 +1,141 @@ +macros.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+
// See the cfg-if crate.
+#[allow(unused_macro_rules)]
+macro_rules! cfg_if {
+    // match if/else chains with a final `else`
+    ($(
+        if #[cfg($($meta:meta),*)] { $($it:item)* }
+    ) else * else {
+        $($it2:item)*
+    }) => {
+        cfg_if! {
+            @__items
+            () ;
+            $( ( ($($meta),*) ($($it)*) ), )*
+            ( () ($($it2)*) ),
+        }
+    };
+
+    // match if/else chains lacking a final `else`
+    (
+        if #[cfg($($i_met:meta),*)] { $($i_it:item)* }
+        $(
+            else if #[cfg($($e_met:meta),*)] { $($e_it:item)* }
+        )*
+    ) => {
+        cfg_if! {
+            @__items
+            () ;
+            ( ($($i_met),*) ($($i_it)*) ),
+            $( ( ($($e_met),*) ($($e_it)*) ), )*
+            ( () () ),
+        }
+    };
+
+    // Internal and recursive macro to emit all the items
+    //
+    // Collects all the negated cfgs in a list at the beginning and after the
+    // semicolon is all the remaining items
+    (@__items ($($not:meta,)*) ; ) => {};
+    (@__items ($($not:meta,)*) ; ( ($($m:meta),*) ($($it:item)*) ), $($rest:tt)*) => {
+        // Emit all items within one block, applying an appropriate #[cfg]. The
+        // #[cfg] will require all `$m` matchers specified and must also negate
+        // all previous matchers.
+        cfg_if! { @__apply cfg(all($($m,)* not(any($($not),*)))), $($it)* }
+
+        // Recurse to emit all other items in `$rest`, and when we do so add all
+        // our `$m` matchers to the list of `$not` matchers as future emissions
+        // will have to negate everything we just matched as well.
+        cfg_if! { @__items ($($not,)* $($m,)*) ; $($rest)* }
+    };
+
+    // Internal macro to Apply a cfg attribute to a list of items
+    (@__apply $m:meta, $($it:item)*) => {
+        $(#[$m] $it)*
+    };
+}
+
+// Helper macro for specialization. This also helps avoid parse errors if the
+// default fn syntax for specialization changes in the future.
+#[cfg(feature = "nightly")]
+macro_rules! default_fn {
+	(#[$($a:tt)*] $($tt:tt)*) => {
+        #[$($a)*] default $($tt)*
+    }
+}
+#[cfg(not(feature = "nightly"))]
+macro_rules! default_fn {
+	($($tt:tt)*) => {
+        $($tt)*
+    }
+}
+
\ No newline at end of file diff --git a/src/hashbrown/map.rs.html b/src/hashbrown/map.rs.html new file mode 100644 index 000000000..5d0364e40 --- /dev/null +++ b/src/hashbrown/map.rs.html @@ -0,0 +1,17921 @@ +map.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
+445
+446
+447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
+469
+470
+471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
+505
+506
+507
+508
+509
+510
+511
+512
+513
+514
+515
+516
+517
+518
+519
+520
+521
+522
+523
+524
+525
+526
+527
+528
+529
+530
+531
+532
+533
+534
+535
+536
+537
+538
+539
+540
+541
+542
+543
+544
+545
+546
+547
+548
+549
+550
+551
+552
+553
+554
+555
+556
+557
+558
+559
+560
+561
+562
+563
+564
+565
+566
+567
+568
+569
+570
+571
+572
+573
+574
+575
+576
+577
+578
+579
+580
+581
+582
+583
+584
+585
+586
+587
+588
+589
+590
+591
+592
+593
+594
+595
+596
+597
+598
+599
+600
+601
+602
+603
+604
+605
+606
+607
+608
+609
+610
+611
+612
+613
+614
+615
+616
+617
+618
+619
+620
+621
+622
+623
+624
+625
+626
+627
+628
+629
+630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
+646
+647
+648
+649
+650
+651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
+664
+665
+666
+667
+668
+669
+670
+671
+672
+673
+674
+675
+676
+677
+678
+679
+680
+681
+682
+683
+684
+685
+686
+687
+688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
+699
+700
+701
+702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
+720
+721
+722
+723
+724
+725
+726
+727
+728
+729
+730
+731
+732
+733
+734
+735
+736
+737
+738
+739
+740
+741
+742
+743
+744
+745
+746
+747
+748
+749
+750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
+769
+770
+771
+772
+773
+774
+775
+776
+777
+778
+779
+780
+781
+782
+783
+784
+785
+786
+787
+788
+789
+790
+791
+792
+793
+794
+795
+796
+797
+798
+799
+800
+801
+802
+803
+804
+805
+806
+807
+808
+809
+810
+811
+812
+813
+814
+815
+816
+817
+818
+819
+820
+821
+822
+823
+824
+825
+826
+827
+828
+829
+830
+831
+832
+833
+834
+835
+836
+837
+838
+839
+840
+841
+842
+843
+844
+845
+846
+847
+848
+849
+850
+851
+852
+853
+854
+855
+856
+857
+858
+859
+860
+861
+862
+863
+864
+865
+866
+867
+868
+869
+870
+871
+872
+873
+874
+875
+876
+877
+878
+879
+880
+881
+882
+883
+884
+885
+886
+887
+888
+889
+890
+891
+892
+893
+894
+895
+896
+897
+898
+899
+900
+901
+902
+903
+904
+905
+906
+907
+908
+909
+910
+911
+912
+913
+914
+915
+916
+917
+918
+919
+920
+921
+922
+923
+924
+925
+926
+927
+928
+929
+930
+931
+932
+933
+934
+935
+936
+937
+938
+939
+940
+941
+942
+943
+944
+945
+946
+947
+948
+949
+950
+951
+952
+953
+954
+955
+956
+957
+958
+959
+960
+961
+962
+963
+964
+965
+966
+967
+968
+969
+970
+971
+972
+973
+974
+975
+976
+977
+978
+979
+980
+981
+982
+983
+984
+985
+986
+987
+988
+989
+990
+991
+992
+993
+994
+995
+996
+997
+998
+999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
+1122
+1123
+1124
+1125
+1126
+1127
+1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
+1143
+1144
+1145
+1146
+1147
+1148
+1149
+1150
+1151
+1152
+1153
+1154
+1155
+1156
+1157
+1158
+1159
+1160
+1161
+1162
+1163
+1164
+1165
+1166
+1167
+1168
+1169
+1170
+1171
+1172
+1173
+1174
+1175
+1176
+1177
+1178
+1179
+1180
+1181
+1182
+1183
+1184
+1185
+1186
+1187
+1188
+1189
+1190
+1191
+1192
+1193
+1194
+1195
+1196
+1197
+1198
+1199
+1200
+1201
+1202
+1203
+1204
+1205
+1206
+1207
+1208
+1209
+1210
+1211
+1212
+1213
+1214
+1215
+1216
+1217
+1218
+1219
+1220
+1221
+1222
+1223
+1224
+1225
+1226
+1227
+1228
+1229
+1230
+1231
+1232
+1233
+1234
+1235
+1236
+1237
+1238
+1239
+1240
+1241
+1242
+1243
+1244
+1245
+1246
+1247
+1248
+1249
+1250
+1251
+1252
+1253
+1254
+1255
+1256
+1257
+1258
+1259
+1260
+1261
+1262
+1263
+1264
+1265
+1266
+1267
+1268
+1269
+1270
+1271
+1272
+1273
+1274
+1275
+1276
+1277
+1278
+1279
+1280
+1281
+1282
+1283
+1284
+1285
+1286
+1287
+1288
+1289
+1290
+1291
+1292
+1293
+1294
+1295
+1296
+1297
+1298
+1299
+1300
+1301
+1302
+1303
+1304
+1305
+1306
+1307
+1308
+1309
+1310
+1311
+1312
+1313
+1314
+1315
+1316
+1317
+1318
+1319
+1320
+1321
+1322
+1323
+1324
+1325
+1326
+1327
+1328
+1329
+1330
+1331
+1332
+1333
+1334
+1335
+1336
+1337
+1338
+1339
+1340
+1341
+1342
+1343
+1344
+1345
+1346
+1347
+1348
+1349
+1350
+1351
+1352
+1353
+1354
+1355
+1356
+1357
+1358
+1359
+1360
+1361
+1362
+1363
+1364
+1365
+1366
+1367
+1368
+1369
+1370
+1371
+1372
+1373
+1374
+1375
+1376
+1377
+1378
+1379
+1380
+1381
+1382
+1383
+1384
+1385
+1386
+1387
+1388
+1389
+1390
+1391
+1392
+1393
+1394
+1395
+1396
+1397
+1398
+1399
+1400
+1401
+1402
+1403
+1404
+1405
+1406
+1407
+1408
+1409
+1410
+1411
+1412
+1413
+1414
+1415
+1416
+1417
+1418
+1419
+1420
+1421
+1422
+1423
+1424
+1425
+1426
+1427
+1428
+1429
+1430
+1431
+1432
+1433
+1434
+1435
+1436
+1437
+1438
+1439
+1440
+1441
+1442
+1443
+1444
+1445
+1446
+1447
+1448
+1449
+1450
+1451
+1452
+1453
+1454
+1455
+1456
+1457
+1458
+1459
+1460
+1461
+1462
+1463
+1464
+1465
+1466
+1467
+1468
+1469
+1470
+1471
+1472
+1473
+1474
+1475
+1476
+1477
+1478
+1479
+1480
+1481
+1482
+1483
+1484
+1485
+1486
+1487
+1488
+1489
+1490
+1491
+1492
+1493
+1494
+1495
+1496
+1497
+1498
+1499
+1500
+1501
+1502
+1503
+1504
+1505
+1506
+1507
+1508
+1509
+1510
+1511
+1512
+1513
+1514
+1515
+1516
+1517
+1518
+1519
+1520
+1521
+1522
+1523
+1524
+1525
+1526
+1527
+1528
+1529
+1530
+1531
+1532
+1533
+1534
+1535
+1536
+1537
+1538
+1539
+1540
+1541
+1542
+1543
+1544
+1545
+1546
+1547
+1548
+1549
+1550
+1551
+1552
+1553
+1554
+1555
+1556
+1557
+1558
+1559
+1560
+1561
+1562
+1563
+1564
+1565
+1566
+1567
+1568
+1569
+1570
+1571
+1572
+1573
+1574
+1575
+1576
+1577
+1578
+1579
+1580
+1581
+1582
+1583
+1584
+1585
+1586
+1587
+1588
+1589
+1590
+1591
+1592
+1593
+1594
+1595
+1596
+1597
+1598
+1599
+1600
+1601
+1602
+1603
+1604
+1605
+1606
+1607
+1608
+1609
+1610
+1611
+1612
+1613
+1614
+1615
+1616
+1617
+1618
+1619
+1620
+1621
+1622
+1623
+1624
+1625
+1626
+1627
+1628
+1629
+1630
+1631
+1632
+1633
+1634
+1635
+1636
+1637
+1638
+1639
+1640
+1641
+1642
+1643
+1644
+1645
+1646
+1647
+1648
+1649
+1650
+1651
+1652
+1653
+1654
+1655
+1656
+1657
+1658
+1659
+1660
+1661
+1662
+1663
+1664
+1665
+1666
+1667
+1668
+1669
+1670
+1671
+1672
+1673
+1674
+1675
+1676
+1677
+1678
+1679
+1680
+1681
+1682
+1683
+1684
+1685
+1686
+1687
+1688
+1689
+1690
+1691
+1692
+1693
+1694
+1695
+1696
+1697
+1698
+1699
+1700
+1701
+1702
+1703
+1704
+1705
+1706
+1707
+1708
+1709
+1710
+1711
+1712
+1713
+1714
+1715
+1716
+1717
+1718
+1719
+1720
+1721
+1722
+1723
+1724
+1725
+1726
+1727
+1728
+1729
+1730
+1731
+1732
+1733
+1734
+1735
+1736
+1737
+1738
+1739
+1740
+1741
+1742
+1743
+1744
+1745
+1746
+1747
+1748
+1749
+1750
+1751
+1752
+1753
+1754
+1755
+1756
+1757
+1758
+1759
+1760
+1761
+1762
+1763
+1764
+1765
+1766
+1767
+1768
+1769
+1770
+1771
+1772
+1773
+1774
+1775
+1776
+1777
+1778
+1779
+1780
+1781
+1782
+1783
+1784
+1785
+1786
+1787
+1788
+1789
+1790
+1791
+1792
+1793
+1794
+1795
+1796
+1797
+1798
+1799
+1800
+1801
+1802
+1803
+1804
+1805
+1806
+1807
+1808
+1809
+1810
+1811
+1812
+1813
+1814
+1815
+1816
+1817
+1818
+1819
+1820
+1821
+1822
+1823
+1824
+1825
+1826
+1827
+1828
+1829
+1830
+1831
+1832
+1833
+1834
+1835
+1836
+1837
+1838
+1839
+1840
+1841
+1842
+1843
+1844
+1845
+1846
+1847
+1848
+1849
+1850
+1851
+1852
+1853
+1854
+1855
+1856
+1857
+1858
+1859
+1860
+1861
+1862
+1863
+1864
+1865
+1866
+1867
+1868
+1869
+1870
+1871
+1872
+1873
+1874
+1875
+1876
+1877
+1878
+1879
+1880
+1881
+1882
+1883
+1884
+1885
+1886
+1887
+1888
+1889
+1890
+1891
+1892
+1893
+1894
+1895
+1896
+1897
+1898
+1899
+1900
+1901
+1902
+1903
+1904
+1905
+1906
+1907
+1908
+1909
+1910
+1911
+1912
+1913
+1914
+1915
+1916
+1917
+1918
+1919
+1920
+1921
+1922
+1923
+1924
+1925
+1926
+1927
+1928
+1929
+1930
+1931
+1932
+1933
+1934
+1935
+1936
+1937
+1938
+1939
+1940
+1941
+1942
+1943
+1944
+1945
+1946
+1947
+1948
+1949
+1950
+1951
+1952
+1953
+1954
+1955
+1956
+1957
+1958
+1959
+1960
+1961
+1962
+1963
+1964
+1965
+1966
+1967
+1968
+1969
+1970
+1971
+1972
+1973
+1974
+1975
+1976
+1977
+1978
+1979
+1980
+1981
+1982
+1983
+1984
+1985
+1986
+1987
+1988
+1989
+1990
+1991
+1992
+1993
+1994
+1995
+1996
+1997
+1998
+1999
+2000
+2001
+2002
+2003
+2004
+2005
+2006
+2007
+2008
+2009
+2010
+2011
+2012
+2013
+2014
+2015
+2016
+2017
+2018
+2019
+2020
+2021
+2022
+2023
+2024
+2025
+2026
+2027
+2028
+2029
+2030
+2031
+2032
+2033
+2034
+2035
+2036
+2037
+2038
+2039
+2040
+2041
+2042
+2043
+2044
+2045
+2046
+2047
+2048
+2049
+2050
+2051
+2052
+2053
+2054
+2055
+2056
+2057
+2058
+2059
+2060
+2061
+2062
+2063
+2064
+2065
+2066
+2067
+2068
+2069
+2070
+2071
+2072
+2073
+2074
+2075
+2076
+2077
+2078
+2079
+2080
+2081
+2082
+2083
+2084
+2085
+2086
+2087
+2088
+2089
+2090
+2091
+2092
+2093
+2094
+2095
+2096
+2097
+2098
+2099
+2100
+2101
+2102
+2103
+2104
+2105
+2106
+2107
+2108
+2109
+2110
+2111
+2112
+2113
+2114
+2115
+2116
+2117
+2118
+2119
+2120
+2121
+2122
+2123
+2124
+2125
+2126
+2127
+2128
+2129
+2130
+2131
+2132
+2133
+2134
+2135
+2136
+2137
+2138
+2139
+2140
+2141
+2142
+2143
+2144
+2145
+2146
+2147
+2148
+2149
+2150
+2151
+2152
+2153
+2154
+2155
+2156
+2157
+2158
+2159
+2160
+2161
+2162
+2163
+2164
+2165
+2166
+2167
+2168
+2169
+2170
+2171
+2172
+2173
+2174
+2175
+2176
+2177
+2178
+2179
+2180
+2181
+2182
+2183
+2184
+2185
+2186
+2187
+2188
+2189
+2190
+2191
+2192
+2193
+2194
+2195
+2196
+2197
+2198
+2199
+2200
+2201
+2202
+2203
+2204
+2205
+2206
+2207
+2208
+2209
+2210
+2211
+2212
+2213
+2214
+2215
+2216
+2217
+2218
+2219
+2220
+2221
+2222
+2223
+2224
+2225
+2226
+2227
+2228
+2229
+2230
+2231
+2232
+2233
+2234
+2235
+2236
+2237
+2238
+2239
+2240
+2241
+2242
+2243
+2244
+2245
+2246
+2247
+2248
+2249
+2250
+2251
+2252
+2253
+2254
+2255
+2256
+2257
+2258
+2259
+2260
+2261
+2262
+2263
+2264
+2265
+2266
+2267
+2268
+2269
+2270
+2271
+2272
+2273
+2274
+2275
+2276
+2277
+2278
+2279
+2280
+2281
+2282
+2283
+2284
+2285
+2286
+2287
+2288
+2289
+2290
+2291
+2292
+2293
+2294
+2295
+2296
+2297
+2298
+2299
+2300
+2301
+2302
+2303
+2304
+2305
+2306
+2307
+2308
+2309
+2310
+2311
+2312
+2313
+2314
+2315
+2316
+2317
+2318
+2319
+2320
+2321
+2322
+2323
+2324
+2325
+2326
+2327
+2328
+2329
+2330
+2331
+2332
+2333
+2334
+2335
+2336
+2337
+2338
+2339
+2340
+2341
+2342
+2343
+2344
+2345
+2346
+2347
+2348
+2349
+2350
+2351
+2352
+2353
+2354
+2355
+2356
+2357
+2358
+2359
+2360
+2361
+2362
+2363
+2364
+2365
+2366
+2367
+2368
+2369
+2370
+2371
+2372
+2373
+2374
+2375
+2376
+2377
+2378
+2379
+2380
+2381
+2382
+2383
+2384
+2385
+2386
+2387
+2388
+2389
+2390
+2391
+2392
+2393
+2394
+2395
+2396
+2397
+2398
+2399
+2400
+2401
+2402
+2403
+2404
+2405
+2406
+2407
+2408
+2409
+2410
+2411
+2412
+2413
+2414
+2415
+2416
+2417
+2418
+2419
+2420
+2421
+2422
+2423
+2424
+2425
+2426
+2427
+2428
+2429
+2430
+2431
+2432
+2433
+2434
+2435
+2436
+2437
+2438
+2439
+2440
+2441
+2442
+2443
+2444
+2445
+2446
+2447
+2448
+2449
+2450
+2451
+2452
+2453
+2454
+2455
+2456
+2457
+2458
+2459
+2460
+2461
+2462
+2463
+2464
+2465
+2466
+2467
+2468
+2469
+2470
+2471
+2472
+2473
+2474
+2475
+2476
+2477
+2478
+2479
+2480
+2481
+2482
+2483
+2484
+2485
+2486
+2487
+2488
+2489
+2490
+2491
+2492
+2493
+2494
+2495
+2496
+2497
+2498
+2499
+2500
+2501
+2502
+2503
+2504
+2505
+2506
+2507
+2508
+2509
+2510
+2511
+2512
+2513
+2514
+2515
+2516
+2517
+2518
+2519
+2520
+2521
+2522
+2523
+2524
+2525
+2526
+2527
+2528
+2529
+2530
+2531
+2532
+2533
+2534
+2535
+2536
+2537
+2538
+2539
+2540
+2541
+2542
+2543
+2544
+2545
+2546
+2547
+2548
+2549
+2550
+2551
+2552
+2553
+2554
+2555
+2556
+2557
+2558
+2559
+2560
+2561
+2562
+2563
+2564
+2565
+2566
+2567
+2568
+2569
+2570
+2571
+2572
+2573
+2574
+2575
+2576
+2577
+2578
+2579
+2580
+2581
+2582
+2583
+2584
+2585
+2586
+2587
+2588
+2589
+2590
+2591
+2592
+2593
+2594
+2595
+2596
+2597
+2598
+2599
+2600
+2601
+2602
+2603
+2604
+2605
+2606
+2607
+2608
+2609
+2610
+2611
+2612
+2613
+2614
+2615
+2616
+2617
+2618
+2619
+2620
+2621
+2622
+2623
+2624
+2625
+2626
+2627
+2628
+2629
+2630
+2631
+2632
+2633
+2634
+2635
+2636
+2637
+2638
+2639
+2640
+2641
+2642
+2643
+2644
+2645
+2646
+2647
+2648
+2649
+2650
+2651
+2652
+2653
+2654
+2655
+2656
+2657
+2658
+2659
+2660
+2661
+2662
+2663
+2664
+2665
+2666
+2667
+2668
+2669
+2670
+2671
+2672
+2673
+2674
+2675
+2676
+2677
+2678
+2679
+2680
+2681
+2682
+2683
+2684
+2685
+2686
+2687
+2688
+2689
+2690
+2691
+2692
+2693
+2694
+2695
+2696
+2697
+2698
+2699
+2700
+2701
+2702
+2703
+2704
+2705
+2706
+2707
+2708
+2709
+2710
+2711
+2712
+2713
+2714
+2715
+2716
+2717
+2718
+2719
+2720
+2721
+2722
+2723
+2724
+2725
+2726
+2727
+2728
+2729
+2730
+2731
+2732
+2733
+2734
+2735
+2736
+2737
+2738
+2739
+2740
+2741
+2742
+2743
+2744
+2745
+2746
+2747
+2748
+2749
+2750
+2751
+2752
+2753
+2754
+2755
+2756
+2757
+2758
+2759
+2760
+2761
+2762
+2763
+2764
+2765
+2766
+2767
+2768
+2769
+2770
+2771
+2772
+2773
+2774
+2775
+2776
+2777
+2778
+2779
+2780
+2781
+2782
+2783
+2784
+2785
+2786
+2787
+2788
+2789
+2790
+2791
+2792
+2793
+2794
+2795
+2796
+2797
+2798
+2799
+2800
+2801
+2802
+2803
+2804
+2805
+2806
+2807
+2808
+2809
+2810
+2811
+2812
+2813
+2814
+2815
+2816
+2817
+2818
+2819
+2820
+2821
+2822
+2823
+2824
+2825
+2826
+2827
+2828
+2829
+2830
+2831
+2832
+2833
+2834
+2835
+2836
+2837
+2838
+2839
+2840
+2841
+2842
+2843
+2844
+2845
+2846
+2847
+2848
+2849
+2850
+2851
+2852
+2853
+2854
+2855
+2856
+2857
+2858
+2859
+2860
+2861
+2862
+2863
+2864
+2865
+2866
+2867
+2868
+2869
+2870
+2871
+2872
+2873
+2874
+2875
+2876
+2877
+2878
+2879
+2880
+2881
+2882
+2883
+2884
+2885
+2886
+2887
+2888
+2889
+2890
+2891
+2892
+2893
+2894
+2895
+2896
+2897
+2898
+2899
+2900
+2901
+2902
+2903
+2904
+2905
+2906
+2907
+2908
+2909
+2910
+2911
+2912
+2913
+2914
+2915
+2916
+2917
+2918
+2919
+2920
+2921
+2922
+2923
+2924
+2925
+2926
+2927
+2928
+2929
+2930
+2931
+2932
+2933
+2934
+2935
+2936
+2937
+2938
+2939
+2940
+2941
+2942
+2943
+2944
+2945
+2946
+2947
+2948
+2949
+2950
+2951
+2952
+2953
+2954
+2955
+2956
+2957
+2958
+2959
+2960
+2961
+2962
+2963
+2964
+2965
+2966
+2967
+2968
+2969
+2970
+2971
+2972
+2973
+2974
+2975
+2976
+2977
+2978
+2979
+2980
+2981
+2982
+2983
+2984
+2985
+2986
+2987
+2988
+2989
+2990
+2991
+2992
+2993
+2994
+2995
+2996
+2997
+2998
+2999
+3000
+3001
+3002
+3003
+3004
+3005
+3006
+3007
+3008
+3009
+3010
+3011
+3012
+3013
+3014
+3015
+3016
+3017
+3018
+3019
+3020
+3021
+3022
+3023
+3024
+3025
+3026
+3027
+3028
+3029
+3030
+3031
+3032
+3033
+3034
+3035
+3036
+3037
+3038
+3039
+3040
+3041
+3042
+3043
+3044
+3045
+3046
+3047
+3048
+3049
+3050
+3051
+3052
+3053
+3054
+3055
+3056
+3057
+3058
+3059
+3060
+3061
+3062
+3063
+3064
+3065
+3066
+3067
+3068
+3069
+3070
+3071
+3072
+3073
+3074
+3075
+3076
+3077
+3078
+3079
+3080
+3081
+3082
+3083
+3084
+3085
+3086
+3087
+3088
+3089
+3090
+3091
+3092
+3093
+3094
+3095
+3096
+3097
+3098
+3099
+3100
+3101
+3102
+3103
+3104
+3105
+3106
+3107
+3108
+3109
+3110
+3111
+3112
+3113
+3114
+3115
+3116
+3117
+3118
+3119
+3120
+3121
+3122
+3123
+3124
+3125
+3126
+3127
+3128
+3129
+3130
+3131
+3132
+3133
+3134
+3135
+3136
+3137
+3138
+3139
+3140
+3141
+3142
+3143
+3144
+3145
+3146
+3147
+3148
+3149
+3150
+3151
+3152
+3153
+3154
+3155
+3156
+3157
+3158
+3159
+3160
+3161
+3162
+3163
+3164
+3165
+3166
+3167
+3168
+3169
+3170
+3171
+3172
+3173
+3174
+3175
+3176
+3177
+3178
+3179
+3180
+3181
+3182
+3183
+3184
+3185
+3186
+3187
+3188
+3189
+3190
+3191
+3192
+3193
+3194
+3195
+3196
+3197
+3198
+3199
+3200
+3201
+3202
+3203
+3204
+3205
+3206
+3207
+3208
+3209
+3210
+3211
+3212
+3213
+3214
+3215
+3216
+3217
+3218
+3219
+3220
+3221
+3222
+3223
+3224
+3225
+3226
+3227
+3228
+3229
+3230
+3231
+3232
+3233
+3234
+3235
+3236
+3237
+3238
+3239
+3240
+3241
+3242
+3243
+3244
+3245
+3246
+3247
+3248
+3249
+3250
+3251
+3252
+3253
+3254
+3255
+3256
+3257
+3258
+3259
+3260
+3261
+3262
+3263
+3264
+3265
+3266
+3267
+3268
+3269
+3270
+3271
+3272
+3273
+3274
+3275
+3276
+3277
+3278
+3279
+3280
+3281
+3282
+3283
+3284
+3285
+3286
+3287
+3288
+3289
+3290
+3291
+3292
+3293
+3294
+3295
+3296
+3297
+3298
+3299
+3300
+3301
+3302
+3303
+3304
+3305
+3306
+3307
+3308
+3309
+3310
+3311
+3312
+3313
+3314
+3315
+3316
+3317
+3318
+3319
+3320
+3321
+3322
+3323
+3324
+3325
+3326
+3327
+3328
+3329
+3330
+3331
+3332
+3333
+3334
+3335
+3336
+3337
+3338
+3339
+3340
+3341
+3342
+3343
+3344
+3345
+3346
+3347
+3348
+3349
+3350
+3351
+3352
+3353
+3354
+3355
+3356
+3357
+3358
+3359
+3360
+3361
+3362
+3363
+3364
+3365
+3366
+3367
+3368
+3369
+3370
+3371
+3372
+3373
+3374
+3375
+3376
+3377
+3378
+3379
+3380
+3381
+3382
+3383
+3384
+3385
+3386
+3387
+3388
+3389
+3390
+3391
+3392
+3393
+3394
+3395
+3396
+3397
+3398
+3399
+3400
+3401
+3402
+3403
+3404
+3405
+3406
+3407
+3408
+3409
+3410
+3411
+3412
+3413
+3414
+3415
+3416
+3417
+3418
+3419
+3420
+3421
+3422
+3423
+3424
+3425
+3426
+3427
+3428
+3429
+3430
+3431
+3432
+3433
+3434
+3435
+3436
+3437
+3438
+3439
+3440
+3441
+3442
+3443
+3444
+3445
+3446
+3447
+3448
+3449
+3450
+3451
+3452
+3453
+3454
+3455
+3456
+3457
+3458
+3459
+3460
+3461
+3462
+3463
+3464
+3465
+3466
+3467
+3468
+3469
+3470
+3471
+3472
+3473
+3474
+3475
+3476
+3477
+3478
+3479
+3480
+3481
+3482
+3483
+3484
+3485
+3486
+3487
+3488
+3489
+3490
+3491
+3492
+3493
+3494
+3495
+3496
+3497
+3498
+3499
+3500
+3501
+3502
+3503
+3504
+3505
+3506
+3507
+3508
+3509
+3510
+3511
+3512
+3513
+3514
+3515
+3516
+3517
+3518
+3519
+3520
+3521
+3522
+3523
+3524
+3525
+3526
+3527
+3528
+3529
+3530
+3531
+3532
+3533
+3534
+3535
+3536
+3537
+3538
+3539
+3540
+3541
+3542
+3543
+3544
+3545
+3546
+3547
+3548
+3549
+3550
+3551
+3552
+3553
+3554
+3555
+3556
+3557
+3558
+3559
+3560
+3561
+3562
+3563
+3564
+3565
+3566
+3567
+3568
+3569
+3570
+3571
+3572
+3573
+3574
+3575
+3576
+3577
+3578
+3579
+3580
+3581
+3582
+3583
+3584
+3585
+3586
+3587
+3588
+3589
+3590
+3591
+3592
+3593
+3594
+3595
+3596
+3597
+3598
+3599
+3600
+3601
+3602
+3603
+3604
+3605
+3606
+3607
+3608
+3609
+3610
+3611
+3612
+3613
+3614
+3615
+3616
+3617
+3618
+3619
+3620
+3621
+3622
+3623
+3624
+3625
+3626
+3627
+3628
+3629
+3630
+3631
+3632
+3633
+3634
+3635
+3636
+3637
+3638
+3639
+3640
+3641
+3642
+3643
+3644
+3645
+3646
+3647
+3648
+3649
+3650
+3651
+3652
+3653
+3654
+3655
+3656
+3657
+3658
+3659
+3660
+3661
+3662
+3663
+3664
+3665
+3666
+3667
+3668
+3669
+3670
+3671
+3672
+3673
+3674
+3675
+3676
+3677
+3678
+3679
+3680
+3681
+3682
+3683
+3684
+3685
+3686
+3687
+3688
+3689
+3690
+3691
+3692
+3693
+3694
+3695
+3696
+3697
+3698
+3699
+3700
+3701
+3702
+3703
+3704
+3705
+3706
+3707
+3708
+3709
+3710
+3711
+3712
+3713
+3714
+3715
+3716
+3717
+3718
+3719
+3720
+3721
+3722
+3723
+3724
+3725
+3726
+3727
+3728
+3729
+3730
+3731
+3732
+3733
+3734
+3735
+3736
+3737
+3738
+3739
+3740
+3741
+3742
+3743
+3744
+3745
+3746
+3747
+3748
+3749
+3750
+3751
+3752
+3753
+3754
+3755
+3756
+3757
+3758
+3759
+3760
+3761
+3762
+3763
+3764
+3765
+3766
+3767
+3768
+3769
+3770
+3771
+3772
+3773
+3774
+3775
+3776
+3777
+3778
+3779
+3780
+3781
+3782
+3783
+3784
+3785
+3786
+3787
+3788
+3789
+3790
+3791
+3792
+3793
+3794
+3795
+3796
+3797
+3798
+3799
+3800
+3801
+3802
+3803
+3804
+3805
+3806
+3807
+3808
+3809
+3810
+3811
+3812
+3813
+3814
+3815
+3816
+3817
+3818
+3819
+3820
+3821
+3822
+3823
+3824
+3825
+3826
+3827
+3828
+3829
+3830
+3831
+3832
+3833
+3834
+3835
+3836
+3837
+3838
+3839
+3840
+3841
+3842
+3843
+3844
+3845
+3846
+3847
+3848
+3849
+3850
+3851
+3852
+3853
+3854
+3855
+3856
+3857
+3858
+3859
+3860
+3861
+3862
+3863
+3864
+3865
+3866
+3867
+3868
+3869
+3870
+3871
+3872
+3873
+3874
+3875
+3876
+3877
+3878
+3879
+3880
+3881
+3882
+3883
+3884
+3885
+3886
+3887
+3888
+3889
+3890
+3891
+3892
+3893
+3894
+3895
+3896
+3897
+3898
+3899
+3900
+3901
+3902
+3903
+3904
+3905
+3906
+3907
+3908
+3909
+3910
+3911
+3912
+3913
+3914
+3915
+3916
+3917
+3918
+3919
+3920
+3921
+3922
+3923
+3924
+3925
+3926
+3927
+3928
+3929
+3930
+3931
+3932
+3933
+3934
+3935
+3936
+3937
+3938
+3939
+3940
+3941
+3942
+3943
+3944
+3945
+3946
+3947
+3948
+3949
+3950
+3951
+3952
+3953
+3954
+3955
+3956
+3957
+3958
+3959
+3960
+3961
+3962
+3963
+3964
+3965
+3966
+3967
+3968
+3969
+3970
+3971
+3972
+3973
+3974
+3975
+3976
+3977
+3978
+3979
+3980
+3981
+3982
+3983
+3984
+3985
+3986
+3987
+3988
+3989
+3990
+3991
+3992
+3993
+3994
+3995
+3996
+3997
+3998
+3999
+4000
+4001
+4002
+4003
+4004
+4005
+4006
+4007
+4008
+4009
+4010
+4011
+4012
+4013
+4014
+4015
+4016
+4017
+4018
+4019
+4020
+4021
+4022
+4023
+4024
+4025
+4026
+4027
+4028
+4029
+4030
+4031
+4032
+4033
+4034
+4035
+4036
+4037
+4038
+4039
+4040
+4041
+4042
+4043
+4044
+4045
+4046
+4047
+4048
+4049
+4050
+4051
+4052
+4053
+4054
+4055
+4056
+4057
+4058
+4059
+4060
+4061
+4062
+4063
+4064
+4065
+4066
+4067
+4068
+4069
+4070
+4071
+4072
+4073
+4074
+4075
+4076
+4077
+4078
+4079
+4080
+4081
+4082
+4083
+4084
+4085
+4086
+4087
+4088
+4089
+4090
+4091
+4092
+4093
+4094
+4095
+4096
+4097
+4098
+4099
+4100
+4101
+4102
+4103
+4104
+4105
+4106
+4107
+4108
+4109
+4110
+4111
+4112
+4113
+4114
+4115
+4116
+4117
+4118
+4119
+4120
+4121
+4122
+4123
+4124
+4125
+4126
+4127
+4128
+4129
+4130
+4131
+4132
+4133
+4134
+4135
+4136
+4137
+4138
+4139
+4140
+4141
+4142
+4143
+4144
+4145
+4146
+4147
+4148
+4149
+4150
+4151
+4152
+4153
+4154
+4155
+4156
+4157
+4158
+4159
+4160
+4161
+4162
+4163
+4164
+4165
+4166
+4167
+4168
+4169
+4170
+4171
+4172
+4173
+4174
+4175
+4176
+4177
+4178
+4179
+4180
+4181
+4182
+4183
+4184
+4185
+4186
+4187
+4188
+4189
+4190
+4191
+4192
+4193
+4194
+4195
+4196
+4197
+4198
+4199
+4200
+4201
+4202
+4203
+4204
+4205
+4206
+4207
+4208
+4209
+4210
+4211
+4212
+4213
+4214
+4215
+4216
+4217
+4218
+4219
+4220
+4221
+4222
+4223
+4224
+4225
+4226
+4227
+4228
+4229
+4230
+4231
+4232
+4233
+4234
+4235
+4236
+4237
+4238
+4239
+4240
+4241
+4242
+4243
+4244
+4245
+4246
+4247
+4248
+4249
+4250
+4251
+4252
+4253
+4254
+4255
+4256
+4257
+4258
+4259
+4260
+4261
+4262
+4263
+4264
+4265
+4266
+4267
+4268
+4269
+4270
+4271
+4272
+4273
+4274
+4275
+4276
+4277
+4278
+4279
+4280
+4281
+4282
+4283
+4284
+4285
+4286
+4287
+4288
+4289
+4290
+4291
+4292
+4293
+4294
+4295
+4296
+4297
+4298
+4299
+4300
+4301
+4302
+4303
+4304
+4305
+4306
+4307
+4308
+4309
+4310
+4311
+4312
+4313
+4314
+4315
+4316
+4317
+4318
+4319
+4320
+4321
+4322
+4323
+4324
+4325
+4326
+4327
+4328
+4329
+4330
+4331
+4332
+4333
+4334
+4335
+4336
+4337
+4338
+4339
+4340
+4341
+4342
+4343
+4344
+4345
+4346
+4347
+4348
+4349
+4350
+4351
+4352
+4353
+4354
+4355
+4356
+4357
+4358
+4359
+4360
+4361
+4362
+4363
+4364
+4365
+4366
+4367
+4368
+4369
+4370
+4371
+4372
+4373
+4374
+4375
+4376
+4377
+4378
+4379
+4380
+4381
+4382
+4383
+4384
+4385
+4386
+4387
+4388
+4389
+4390
+4391
+4392
+4393
+4394
+4395
+4396
+4397
+4398
+4399
+4400
+4401
+4402
+4403
+4404
+4405
+4406
+4407
+4408
+4409
+4410
+4411
+4412
+4413
+4414
+4415
+4416
+4417
+4418
+4419
+4420
+4421
+4422
+4423
+4424
+4425
+4426
+4427
+4428
+4429
+4430
+4431
+4432
+4433
+4434
+4435
+4436
+4437
+4438
+4439
+4440
+4441
+4442
+4443
+4444
+4445
+4446
+4447
+4448
+4449
+4450
+4451
+4452
+4453
+4454
+4455
+4456
+4457
+4458
+4459
+4460
+4461
+4462
+4463
+4464
+4465
+4466
+4467
+4468
+4469
+4470
+4471
+4472
+4473
+4474
+4475
+4476
+4477
+4478
+4479
+4480
+4481
+4482
+4483
+4484
+4485
+4486
+4487
+4488
+4489
+4490
+4491
+4492
+4493
+4494
+4495
+4496
+4497
+4498
+4499
+4500
+4501
+4502
+4503
+4504
+4505
+4506
+4507
+4508
+4509
+4510
+4511
+4512
+4513
+4514
+4515
+4516
+4517
+4518
+4519
+4520
+4521
+4522
+4523
+4524
+4525
+4526
+4527
+4528
+4529
+4530
+4531
+4532
+4533
+4534
+4535
+4536
+4537
+4538
+4539
+4540
+4541
+4542
+4543
+4544
+4545
+4546
+4547
+4548
+4549
+4550
+4551
+4552
+4553
+4554
+4555
+4556
+4557
+4558
+4559
+4560
+4561
+4562
+4563
+4564
+4565
+4566
+4567
+4568
+4569
+4570
+4571
+4572
+4573
+4574
+4575
+4576
+4577
+4578
+4579
+4580
+4581
+4582
+4583
+4584
+4585
+4586
+4587
+4588
+4589
+4590
+4591
+4592
+4593
+4594
+4595
+4596
+4597
+4598
+4599
+4600
+4601
+4602
+4603
+4604
+4605
+4606
+4607
+4608
+4609
+4610
+4611
+4612
+4613
+4614
+4615
+4616
+4617
+4618
+4619
+4620
+4621
+4622
+4623
+4624
+4625
+4626
+4627
+4628
+4629
+4630
+4631
+4632
+4633
+4634
+4635
+4636
+4637
+4638
+4639
+4640
+4641
+4642
+4643
+4644
+4645
+4646
+4647
+4648
+4649
+4650
+4651
+4652
+4653
+4654
+4655
+4656
+4657
+4658
+4659
+4660
+4661
+4662
+4663
+4664
+4665
+4666
+4667
+4668
+4669
+4670
+4671
+4672
+4673
+4674
+4675
+4676
+4677
+4678
+4679
+4680
+4681
+4682
+4683
+4684
+4685
+4686
+4687
+4688
+4689
+4690
+4691
+4692
+4693
+4694
+4695
+4696
+4697
+4698
+4699
+4700
+4701
+4702
+4703
+4704
+4705
+4706
+4707
+4708
+4709
+4710
+4711
+4712
+4713
+4714
+4715
+4716
+4717
+4718
+4719
+4720
+4721
+4722
+4723
+4724
+4725
+4726
+4727
+4728
+4729
+4730
+4731
+4732
+4733
+4734
+4735
+4736
+4737
+4738
+4739
+4740
+4741
+4742
+4743
+4744
+4745
+4746
+4747
+4748
+4749
+4750
+4751
+4752
+4753
+4754
+4755
+4756
+4757
+4758
+4759
+4760
+4761
+4762
+4763
+4764
+4765
+4766
+4767
+4768
+4769
+4770
+4771
+4772
+4773
+4774
+4775
+4776
+4777
+4778
+4779
+4780
+4781
+4782
+4783
+4784
+4785
+4786
+4787
+4788
+4789
+4790
+4791
+4792
+4793
+4794
+4795
+4796
+4797
+4798
+4799
+4800
+4801
+4802
+4803
+4804
+4805
+4806
+4807
+4808
+4809
+4810
+4811
+4812
+4813
+4814
+4815
+4816
+4817
+4818
+4819
+4820
+4821
+4822
+4823
+4824
+4825
+4826
+4827
+4828
+4829
+4830
+4831
+4832
+4833
+4834
+4835
+4836
+4837
+4838
+4839
+4840
+4841
+4842
+4843
+4844
+4845
+4846
+4847
+4848
+4849
+4850
+4851
+4852
+4853
+4854
+4855
+4856
+4857
+4858
+4859
+4860
+4861
+4862
+4863
+4864
+4865
+4866
+4867
+4868
+4869
+4870
+4871
+4872
+4873
+4874
+4875
+4876
+4877
+4878
+4879
+4880
+4881
+4882
+4883
+4884
+4885
+4886
+4887
+4888
+4889
+4890
+4891
+4892
+4893
+4894
+4895
+4896
+4897
+4898
+4899
+4900
+4901
+4902
+4903
+4904
+4905
+4906
+4907
+4908
+4909
+4910
+4911
+4912
+4913
+4914
+4915
+4916
+4917
+4918
+4919
+4920
+4921
+4922
+4923
+4924
+4925
+4926
+4927
+4928
+4929
+4930
+4931
+4932
+4933
+4934
+4935
+4936
+4937
+4938
+4939
+4940
+4941
+4942
+4943
+4944
+4945
+4946
+4947
+4948
+4949
+4950
+4951
+4952
+4953
+4954
+4955
+4956
+4957
+4958
+4959
+4960
+4961
+4962
+4963
+4964
+4965
+4966
+4967
+4968
+4969
+4970
+4971
+4972
+4973
+4974
+4975
+4976
+4977
+4978
+4979
+4980
+4981
+4982
+4983
+4984
+4985
+4986
+4987
+4988
+4989
+4990
+4991
+4992
+4993
+4994
+4995
+4996
+4997
+4998
+4999
+5000
+5001
+5002
+5003
+5004
+5005
+5006
+5007
+5008
+5009
+5010
+5011
+5012
+5013
+5014
+5015
+5016
+5017
+5018
+5019
+5020
+5021
+5022
+5023
+5024
+5025
+5026
+5027
+5028
+5029
+5030
+5031
+5032
+5033
+5034
+5035
+5036
+5037
+5038
+5039
+5040
+5041
+5042
+5043
+5044
+5045
+5046
+5047
+5048
+5049
+5050
+5051
+5052
+5053
+5054
+5055
+5056
+5057
+5058
+5059
+5060
+5061
+5062
+5063
+5064
+5065
+5066
+5067
+5068
+5069
+5070
+5071
+5072
+5073
+5074
+5075
+5076
+5077
+5078
+5079
+5080
+5081
+5082
+5083
+5084
+5085
+5086
+5087
+5088
+5089
+5090
+5091
+5092
+5093
+5094
+5095
+5096
+5097
+5098
+5099
+5100
+5101
+5102
+5103
+5104
+5105
+5106
+5107
+5108
+5109
+5110
+5111
+5112
+5113
+5114
+5115
+5116
+5117
+5118
+5119
+5120
+5121
+5122
+5123
+5124
+5125
+5126
+5127
+5128
+5129
+5130
+5131
+5132
+5133
+5134
+5135
+5136
+5137
+5138
+5139
+5140
+5141
+5142
+5143
+5144
+5145
+5146
+5147
+5148
+5149
+5150
+5151
+5152
+5153
+5154
+5155
+5156
+5157
+5158
+5159
+5160
+5161
+5162
+5163
+5164
+5165
+5166
+5167
+5168
+5169
+5170
+5171
+5172
+5173
+5174
+5175
+5176
+5177
+5178
+5179
+5180
+5181
+5182
+5183
+5184
+5185
+5186
+5187
+5188
+5189
+5190
+5191
+5192
+5193
+5194
+5195
+5196
+5197
+5198
+5199
+5200
+5201
+5202
+5203
+5204
+5205
+5206
+5207
+5208
+5209
+5210
+5211
+5212
+5213
+5214
+5215
+5216
+5217
+5218
+5219
+5220
+5221
+5222
+5223
+5224
+5225
+5226
+5227
+5228
+5229
+5230
+5231
+5232
+5233
+5234
+5235
+5236
+5237
+5238
+5239
+5240
+5241
+5242
+5243
+5244
+5245
+5246
+5247
+5248
+5249
+5250
+5251
+5252
+5253
+5254
+5255
+5256
+5257
+5258
+5259
+5260
+5261
+5262
+5263
+5264
+5265
+5266
+5267
+5268
+5269
+5270
+5271
+5272
+5273
+5274
+5275
+5276
+5277
+5278
+5279
+5280
+5281
+5282
+5283
+5284
+5285
+5286
+5287
+5288
+5289
+5290
+5291
+5292
+5293
+5294
+5295
+5296
+5297
+5298
+5299
+5300
+5301
+5302
+5303
+5304
+5305
+5306
+5307
+5308
+5309
+5310
+5311
+5312
+5313
+5314
+5315
+5316
+5317
+5318
+5319
+5320
+5321
+5322
+5323
+5324
+5325
+5326
+5327
+5328
+5329
+5330
+5331
+5332
+5333
+5334
+5335
+5336
+5337
+5338
+5339
+5340
+5341
+5342
+5343
+5344
+5345
+5346
+5347
+5348
+5349
+5350
+5351
+5352
+5353
+5354
+5355
+5356
+5357
+5358
+5359
+5360
+5361
+5362
+5363
+5364
+5365
+5366
+5367
+5368
+5369
+5370
+5371
+5372
+5373
+5374
+5375
+5376
+5377
+5378
+5379
+5380
+5381
+5382
+5383
+5384
+5385
+5386
+5387
+5388
+5389
+5390
+5391
+5392
+5393
+5394
+5395
+5396
+5397
+5398
+5399
+5400
+5401
+5402
+5403
+5404
+5405
+5406
+5407
+5408
+5409
+5410
+5411
+5412
+5413
+5414
+5415
+5416
+5417
+5418
+5419
+5420
+5421
+5422
+5423
+5424
+5425
+5426
+5427
+5428
+5429
+5430
+5431
+5432
+5433
+5434
+5435
+5436
+5437
+5438
+5439
+5440
+5441
+5442
+5443
+5444
+5445
+5446
+5447
+5448
+5449
+5450
+5451
+5452
+5453
+5454
+5455
+5456
+5457
+5458
+5459
+5460
+5461
+5462
+5463
+5464
+5465
+5466
+5467
+5468
+5469
+5470
+5471
+5472
+5473
+5474
+5475
+5476
+5477
+5478
+5479
+5480
+5481
+5482
+5483
+5484
+5485
+5486
+5487
+5488
+5489
+5490
+5491
+5492
+5493
+5494
+5495
+5496
+5497
+5498
+5499
+5500
+5501
+5502
+5503
+5504
+5505
+5506
+5507
+5508
+5509
+5510
+5511
+5512
+5513
+5514
+5515
+5516
+5517
+5518
+5519
+5520
+5521
+5522
+5523
+5524
+5525
+5526
+5527
+5528
+5529
+5530
+5531
+5532
+5533
+5534
+5535
+5536
+5537
+5538
+5539
+5540
+5541
+5542
+5543
+5544
+5545
+5546
+5547
+5548
+5549
+5550
+5551
+5552
+5553
+5554
+5555
+5556
+5557
+5558
+5559
+5560
+5561
+5562
+5563
+5564
+5565
+5566
+5567
+5568
+5569
+5570
+5571
+5572
+5573
+5574
+5575
+5576
+5577
+5578
+5579
+5580
+5581
+5582
+5583
+5584
+5585
+5586
+5587
+5588
+5589
+5590
+5591
+5592
+5593
+5594
+5595
+5596
+5597
+5598
+5599
+5600
+5601
+5602
+5603
+5604
+5605
+5606
+5607
+5608
+5609
+5610
+5611
+5612
+5613
+5614
+5615
+5616
+5617
+5618
+5619
+5620
+5621
+5622
+5623
+5624
+5625
+5626
+5627
+5628
+5629
+5630
+5631
+5632
+5633
+5634
+5635
+5636
+5637
+5638
+5639
+5640
+5641
+5642
+5643
+5644
+5645
+5646
+5647
+5648
+5649
+5650
+5651
+5652
+5653
+5654
+5655
+5656
+5657
+5658
+5659
+5660
+5661
+5662
+5663
+5664
+5665
+5666
+5667
+5668
+5669
+5670
+5671
+5672
+5673
+5674
+5675
+5676
+5677
+5678
+5679
+5680
+5681
+5682
+5683
+5684
+5685
+5686
+5687
+5688
+5689
+5690
+5691
+5692
+5693
+5694
+5695
+5696
+5697
+5698
+5699
+5700
+5701
+5702
+5703
+5704
+5705
+5706
+5707
+5708
+5709
+5710
+5711
+5712
+5713
+5714
+5715
+5716
+5717
+5718
+5719
+5720
+5721
+5722
+5723
+5724
+5725
+5726
+5727
+5728
+5729
+5730
+5731
+5732
+5733
+5734
+5735
+5736
+5737
+5738
+5739
+5740
+5741
+5742
+5743
+5744
+5745
+5746
+5747
+5748
+5749
+5750
+5751
+5752
+5753
+5754
+5755
+5756
+5757
+5758
+5759
+5760
+5761
+5762
+5763
+5764
+5765
+5766
+5767
+5768
+5769
+5770
+5771
+5772
+5773
+5774
+5775
+5776
+5777
+5778
+5779
+5780
+5781
+5782
+5783
+5784
+5785
+5786
+5787
+5788
+5789
+5790
+5791
+5792
+5793
+5794
+5795
+5796
+5797
+5798
+5799
+5800
+5801
+5802
+5803
+5804
+5805
+5806
+5807
+5808
+5809
+5810
+5811
+5812
+5813
+5814
+5815
+5816
+5817
+5818
+5819
+5820
+5821
+5822
+5823
+5824
+5825
+5826
+5827
+5828
+5829
+5830
+5831
+5832
+5833
+5834
+5835
+5836
+5837
+5838
+5839
+5840
+5841
+5842
+5843
+5844
+5845
+5846
+5847
+5848
+5849
+5850
+5851
+5852
+5853
+5854
+5855
+5856
+5857
+5858
+5859
+5860
+5861
+5862
+5863
+5864
+5865
+5866
+5867
+5868
+5869
+5870
+5871
+5872
+5873
+5874
+5875
+5876
+5877
+5878
+5879
+5880
+5881
+5882
+5883
+5884
+5885
+5886
+5887
+5888
+5889
+5890
+5891
+5892
+5893
+5894
+5895
+5896
+5897
+5898
+5899
+5900
+5901
+5902
+5903
+5904
+5905
+5906
+5907
+5908
+5909
+5910
+5911
+5912
+5913
+5914
+5915
+5916
+5917
+5918
+5919
+5920
+5921
+5922
+5923
+5924
+5925
+5926
+5927
+5928
+5929
+5930
+5931
+5932
+5933
+5934
+5935
+5936
+5937
+5938
+5939
+5940
+5941
+5942
+5943
+5944
+5945
+5946
+5947
+5948
+5949
+5950
+5951
+5952
+5953
+5954
+5955
+5956
+5957
+5958
+5959
+5960
+5961
+5962
+5963
+5964
+5965
+5966
+5967
+5968
+5969
+5970
+5971
+5972
+5973
+5974
+5975
+5976
+5977
+5978
+5979
+5980
+5981
+5982
+5983
+5984
+5985
+5986
+5987
+5988
+5989
+5990
+5991
+5992
+5993
+5994
+5995
+5996
+5997
+5998
+5999
+6000
+6001
+6002
+6003
+6004
+6005
+6006
+6007
+6008
+6009
+6010
+6011
+6012
+6013
+6014
+6015
+6016
+6017
+6018
+6019
+6020
+6021
+6022
+6023
+6024
+6025
+6026
+6027
+6028
+6029
+6030
+6031
+6032
+6033
+6034
+6035
+6036
+6037
+6038
+6039
+6040
+6041
+6042
+6043
+6044
+6045
+6046
+6047
+6048
+6049
+6050
+6051
+6052
+6053
+6054
+6055
+6056
+6057
+6058
+6059
+6060
+6061
+6062
+6063
+6064
+6065
+6066
+6067
+6068
+6069
+6070
+6071
+6072
+6073
+6074
+6075
+6076
+6077
+6078
+6079
+6080
+6081
+6082
+6083
+6084
+6085
+6086
+6087
+6088
+6089
+6090
+6091
+6092
+6093
+6094
+6095
+6096
+6097
+6098
+6099
+6100
+6101
+6102
+6103
+6104
+6105
+6106
+6107
+6108
+6109
+6110
+6111
+6112
+6113
+6114
+6115
+6116
+6117
+6118
+6119
+6120
+6121
+6122
+6123
+6124
+6125
+6126
+6127
+6128
+6129
+6130
+6131
+6132
+6133
+6134
+6135
+6136
+6137
+6138
+6139
+6140
+6141
+6142
+6143
+6144
+6145
+6146
+6147
+6148
+6149
+6150
+6151
+6152
+6153
+6154
+6155
+6156
+6157
+6158
+6159
+6160
+6161
+6162
+6163
+6164
+6165
+6166
+6167
+6168
+6169
+6170
+6171
+6172
+6173
+6174
+6175
+6176
+6177
+6178
+6179
+6180
+6181
+6182
+6183
+6184
+6185
+6186
+6187
+6188
+6189
+6190
+6191
+6192
+6193
+6194
+6195
+6196
+6197
+6198
+6199
+6200
+6201
+6202
+6203
+6204
+6205
+6206
+6207
+6208
+6209
+6210
+6211
+6212
+6213
+6214
+6215
+6216
+6217
+6218
+6219
+6220
+6221
+6222
+6223
+6224
+6225
+6226
+6227
+6228
+6229
+6230
+6231
+6232
+6233
+6234
+6235
+6236
+6237
+6238
+6239
+6240
+6241
+6242
+6243
+6244
+6245
+6246
+6247
+6248
+6249
+6250
+6251
+6252
+6253
+6254
+6255
+6256
+6257
+6258
+6259
+6260
+6261
+6262
+6263
+6264
+6265
+6266
+6267
+6268
+6269
+6270
+6271
+6272
+6273
+6274
+6275
+6276
+6277
+6278
+6279
+6280
+6281
+6282
+6283
+6284
+6285
+6286
+6287
+6288
+6289
+6290
+6291
+6292
+6293
+6294
+6295
+6296
+6297
+6298
+6299
+6300
+6301
+6302
+6303
+6304
+6305
+6306
+6307
+6308
+6309
+6310
+6311
+6312
+6313
+6314
+6315
+6316
+6317
+6318
+6319
+6320
+6321
+6322
+6323
+6324
+6325
+6326
+6327
+6328
+6329
+6330
+6331
+6332
+6333
+6334
+6335
+6336
+6337
+6338
+6339
+6340
+6341
+6342
+6343
+6344
+6345
+6346
+6347
+6348
+6349
+6350
+6351
+6352
+6353
+6354
+6355
+6356
+6357
+6358
+6359
+6360
+6361
+6362
+6363
+6364
+6365
+6366
+6367
+6368
+6369
+6370
+6371
+6372
+6373
+6374
+6375
+6376
+6377
+6378
+6379
+6380
+6381
+6382
+6383
+6384
+6385
+6386
+6387
+6388
+6389
+6390
+6391
+6392
+6393
+6394
+6395
+6396
+6397
+6398
+6399
+6400
+6401
+6402
+6403
+6404
+6405
+6406
+6407
+6408
+6409
+6410
+6411
+6412
+6413
+6414
+6415
+6416
+6417
+6418
+6419
+6420
+6421
+6422
+6423
+6424
+6425
+6426
+6427
+6428
+6429
+6430
+6431
+6432
+6433
+6434
+6435
+6436
+6437
+6438
+6439
+6440
+6441
+6442
+6443
+6444
+6445
+6446
+6447
+6448
+6449
+6450
+6451
+6452
+6453
+6454
+6455
+6456
+6457
+6458
+6459
+6460
+6461
+6462
+6463
+6464
+6465
+6466
+6467
+6468
+6469
+6470
+6471
+6472
+6473
+6474
+6475
+6476
+6477
+6478
+6479
+6480
+6481
+6482
+6483
+6484
+6485
+6486
+6487
+6488
+6489
+6490
+6491
+6492
+6493
+6494
+6495
+6496
+6497
+6498
+6499
+6500
+6501
+6502
+6503
+6504
+6505
+6506
+6507
+6508
+6509
+6510
+6511
+6512
+6513
+6514
+6515
+6516
+6517
+6518
+6519
+6520
+6521
+6522
+6523
+6524
+6525
+6526
+6527
+6528
+6529
+6530
+6531
+6532
+6533
+6534
+6535
+6536
+6537
+6538
+6539
+6540
+6541
+6542
+6543
+6544
+6545
+6546
+6547
+6548
+6549
+6550
+6551
+6552
+6553
+6554
+6555
+6556
+6557
+6558
+6559
+6560
+6561
+6562
+6563
+6564
+6565
+6566
+6567
+6568
+6569
+6570
+6571
+6572
+6573
+6574
+6575
+6576
+6577
+6578
+6579
+6580
+6581
+6582
+6583
+6584
+6585
+6586
+6587
+6588
+6589
+6590
+6591
+6592
+6593
+6594
+6595
+6596
+6597
+6598
+6599
+6600
+6601
+6602
+6603
+6604
+6605
+6606
+6607
+6608
+6609
+6610
+6611
+6612
+6613
+6614
+6615
+6616
+6617
+6618
+6619
+6620
+6621
+6622
+6623
+6624
+6625
+6626
+6627
+6628
+6629
+6630
+6631
+6632
+6633
+6634
+6635
+6636
+6637
+6638
+6639
+6640
+6641
+6642
+6643
+6644
+6645
+6646
+6647
+6648
+6649
+6650
+6651
+6652
+6653
+6654
+6655
+6656
+6657
+6658
+6659
+6660
+6661
+6662
+6663
+6664
+6665
+6666
+6667
+6668
+6669
+6670
+6671
+6672
+6673
+6674
+6675
+6676
+6677
+6678
+6679
+6680
+6681
+6682
+6683
+6684
+6685
+6686
+6687
+6688
+6689
+6690
+6691
+6692
+6693
+6694
+6695
+6696
+6697
+6698
+6699
+6700
+6701
+6702
+6703
+6704
+6705
+6706
+6707
+6708
+6709
+6710
+6711
+6712
+6713
+6714
+6715
+6716
+6717
+6718
+6719
+6720
+6721
+6722
+6723
+6724
+6725
+6726
+6727
+6728
+6729
+6730
+6731
+6732
+6733
+6734
+6735
+6736
+6737
+6738
+6739
+6740
+6741
+6742
+6743
+6744
+6745
+6746
+6747
+6748
+6749
+6750
+6751
+6752
+6753
+6754
+6755
+6756
+6757
+6758
+6759
+6760
+6761
+6762
+6763
+6764
+6765
+6766
+6767
+6768
+6769
+6770
+6771
+6772
+6773
+6774
+6775
+6776
+6777
+6778
+6779
+6780
+6781
+6782
+6783
+6784
+6785
+6786
+6787
+6788
+6789
+6790
+6791
+6792
+6793
+6794
+6795
+6796
+6797
+6798
+6799
+6800
+6801
+6802
+6803
+6804
+6805
+6806
+6807
+6808
+6809
+6810
+6811
+6812
+6813
+6814
+6815
+6816
+6817
+6818
+6819
+6820
+6821
+6822
+6823
+6824
+6825
+6826
+6827
+6828
+6829
+6830
+6831
+6832
+6833
+6834
+6835
+6836
+6837
+6838
+6839
+6840
+6841
+6842
+6843
+6844
+6845
+6846
+6847
+6848
+6849
+6850
+6851
+6852
+6853
+6854
+6855
+6856
+6857
+6858
+6859
+6860
+6861
+6862
+6863
+6864
+6865
+6866
+6867
+6868
+6869
+6870
+6871
+6872
+6873
+6874
+6875
+6876
+6877
+6878
+6879
+6880
+6881
+6882
+6883
+6884
+6885
+6886
+6887
+6888
+6889
+6890
+6891
+6892
+6893
+6894
+6895
+6896
+6897
+6898
+6899
+6900
+6901
+6902
+6903
+6904
+6905
+6906
+6907
+6908
+6909
+6910
+6911
+6912
+6913
+6914
+6915
+6916
+6917
+6918
+6919
+6920
+6921
+6922
+6923
+6924
+6925
+6926
+6927
+6928
+6929
+6930
+6931
+6932
+6933
+6934
+6935
+6936
+6937
+6938
+6939
+6940
+6941
+6942
+6943
+6944
+6945
+6946
+6947
+6948
+6949
+6950
+6951
+6952
+6953
+6954
+6955
+6956
+6957
+6958
+6959
+6960
+6961
+6962
+6963
+6964
+6965
+6966
+6967
+6968
+6969
+6970
+6971
+6972
+6973
+6974
+6975
+6976
+6977
+6978
+6979
+6980
+6981
+6982
+6983
+6984
+6985
+6986
+6987
+6988
+6989
+6990
+6991
+6992
+6993
+6994
+6995
+6996
+6997
+6998
+6999
+7000
+7001
+7002
+7003
+7004
+7005
+7006
+7007
+7008
+7009
+7010
+7011
+7012
+7013
+7014
+7015
+7016
+7017
+7018
+7019
+7020
+7021
+7022
+7023
+7024
+7025
+7026
+7027
+7028
+7029
+7030
+7031
+7032
+7033
+7034
+7035
+7036
+7037
+7038
+7039
+7040
+7041
+7042
+7043
+7044
+7045
+7046
+7047
+7048
+7049
+7050
+7051
+7052
+7053
+7054
+7055
+7056
+7057
+7058
+7059
+7060
+7061
+7062
+7063
+7064
+7065
+7066
+7067
+7068
+7069
+7070
+7071
+7072
+7073
+7074
+7075
+7076
+7077
+7078
+7079
+7080
+7081
+7082
+7083
+7084
+7085
+7086
+7087
+7088
+7089
+7090
+7091
+7092
+7093
+7094
+7095
+7096
+7097
+7098
+7099
+7100
+7101
+7102
+7103
+7104
+7105
+7106
+7107
+7108
+7109
+7110
+7111
+7112
+7113
+7114
+7115
+7116
+7117
+7118
+7119
+7120
+7121
+7122
+7123
+7124
+7125
+7126
+7127
+7128
+7129
+7130
+7131
+7132
+7133
+7134
+7135
+7136
+7137
+7138
+7139
+7140
+7141
+7142
+7143
+7144
+7145
+7146
+7147
+7148
+7149
+7150
+7151
+7152
+7153
+7154
+7155
+7156
+7157
+7158
+7159
+7160
+7161
+7162
+7163
+7164
+7165
+7166
+7167
+7168
+7169
+7170
+7171
+7172
+7173
+7174
+7175
+7176
+7177
+7178
+7179
+7180
+7181
+7182
+7183
+7184
+7185
+7186
+7187
+7188
+7189
+7190
+7191
+7192
+7193
+7194
+7195
+7196
+7197
+7198
+7199
+7200
+7201
+7202
+7203
+7204
+7205
+7206
+7207
+7208
+7209
+7210
+7211
+7212
+7213
+7214
+7215
+7216
+7217
+7218
+7219
+7220
+7221
+7222
+7223
+7224
+7225
+7226
+7227
+7228
+7229
+7230
+7231
+7232
+7233
+7234
+7235
+7236
+7237
+7238
+7239
+7240
+7241
+7242
+7243
+7244
+7245
+7246
+7247
+7248
+7249
+7250
+7251
+7252
+7253
+7254
+7255
+7256
+7257
+7258
+7259
+7260
+7261
+7262
+7263
+7264
+7265
+7266
+7267
+7268
+7269
+7270
+7271
+7272
+7273
+7274
+7275
+7276
+7277
+7278
+7279
+7280
+7281
+7282
+7283
+7284
+7285
+7286
+7287
+7288
+7289
+7290
+7291
+7292
+7293
+7294
+7295
+7296
+7297
+7298
+7299
+7300
+7301
+7302
+7303
+7304
+7305
+7306
+7307
+7308
+7309
+7310
+7311
+7312
+7313
+7314
+7315
+7316
+7317
+7318
+7319
+7320
+7321
+7322
+7323
+7324
+7325
+7326
+7327
+7328
+7329
+7330
+7331
+7332
+7333
+7334
+7335
+7336
+7337
+7338
+7339
+7340
+7341
+7342
+7343
+7344
+7345
+7346
+7347
+7348
+7349
+7350
+7351
+7352
+7353
+7354
+7355
+7356
+7357
+7358
+7359
+7360
+7361
+7362
+7363
+7364
+7365
+7366
+7367
+7368
+7369
+7370
+7371
+7372
+7373
+7374
+7375
+7376
+7377
+7378
+7379
+7380
+7381
+7382
+7383
+7384
+7385
+7386
+7387
+7388
+7389
+7390
+7391
+7392
+7393
+7394
+7395
+7396
+7397
+7398
+7399
+7400
+7401
+7402
+7403
+7404
+7405
+7406
+7407
+7408
+7409
+7410
+7411
+7412
+7413
+7414
+7415
+7416
+7417
+7418
+7419
+7420
+7421
+7422
+7423
+7424
+7425
+7426
+7427
+7428
+7429
+7430
+7431
+7432
+7433
+7434
+7435
+7436
+7437
+7438
+7439
+7440
+7441
+7442
+7443
+7444
+7445
+7446
+7447
+7448
+7449
+7450
+7451
+7452
+7453
+7454
+7455
+7456
+7457
+7458
+7459
+7460
+7461
+7462
+7463
+7464
+7465
+7466
+7467
+7468
+7469
+7470
+7471
+7472
+7473
+7474
+7475
+7476
+7477
+7478
+7479
+7480
+7481
+7482
+7483
+7484
+7485
+7486
+7487
+7488
+7489
+7490
+7491
+7492
+7493
+7494
+7495
+7496
+7497
+7498
+7499
+7500
+7501
+7502
+7503
+7504
+7505
+7506
+7507
+7508
+7509
+7510
+7511
+7512
+7513
+7514
+7515
+7516
+7517
+7518
+7519
+7520
+7521
+7522
+7523
+7524
+7525
+7526
+7527
+7528
+7529
+7530
+7531
+7532
+7533
+7534
+7535
+7536
+7537
+7538
+7539
+7540
+7541
+7542
+7543
+7544
+7545
+7546
+7547
+7548
+7549
+7550
+7551
+7552
+7553
+7554
+7555
+7556
+7557
+7558
+7559
+7560
+7561
+7562
+7563
+7564
+7565
+7566
+7567
+7568
+7569
+7570
+7571
+7572
+7573
+7574
+7575
+7576
+7577
+7578
+7579
+7580
+7581
+7582
+7583
+7584
+7585
+7586
+7587
+7588
+7589
+7590
+7591
+7592
+7593
+7594
+7595
+7596
+7597
+7598
+7599
+7600
+7601
+7602
+7603
+7604
+7605
+7606
+7607
+7608
+7609
+7610
+7611
+7612
+7613
+7614
+7615
+7616
+7617
+7618
+7619
+7620
+7621
+7622
+7623
+7624
+7625
+7626
+7627
+7628
+7629
+7630
+7631
+7632
+7633
+7634
+7635
+7636
+7637
+7638
+7639
+7640
+7641
+7642
+7643
+7644
+7645
+7646
+7647
+7648
+7649
+7650
+7651
+7652
+7653
+7654
+7655
+7656
+7657
+7658
+7659
+7660
+7661
+7662
+7663
+7664
+7665
+7666
+7667
+7668
+7669
+7670
+7671
+7672
+7673
+7674
+7675
+7676
+7677
+7678
+7679
+7680
+7681
+7682
+7683
+7684
+7685
+7686
+7687
+7688
+7689
+7690
+7691
+7692
+7693
+7694
+7695
+7696
+7697
+7698
+7699
+7700
+7701
+7702
+7703
+7704
+7705
+7706
+7707
+7708
+7709
+7710
+7711
+7712
+7713
+7714
+7715
+7716
+7717
+7718
+7719
+7720
+7721
+7722
+7723
+7724
+7725
+7726
+7727
+7728
+7729
+7730
+7731
+7732
+7733
+7734
+7735
+7736
+7737
+7738
+7739
+7740
+7741
+7742
+7743
+7744
+7745
+7746
+7747
+7748
+7749
+7750
+7751
+7752
+7753
+7754
+7755
+7756
+7757
+7758
+7759
+7760
+7761
+7762
+7763
+7764
+7765
+7766
+7767
+7768
+7769
+7770
+7771
+7772
+7773
+7774
+7775
+7776
+7777
+7778
+7779
+7780
+7781
+7782
+7783
+7784
+7785
+7786
+7787
+7788
+7789
+7790
+7791
+7792
+7793
+7794
+7795
+7796
+7797
+7798
+7799
+7800
+7801
+7802
+7803
+7804
+7805
+7806
+7807
+7808
+7809
+7810
+7811
+7812
+7813
+7814
+7815
+7816
+7817
+7818
+7819
+7820
+7821
+7822
+7823
+7824
+7825
+7826
+7827
+7828
+7829
+7830
+7831
+7832
+7833
+7834
+7835
+7836
+7837
+7838
+7839
+7840
+7841
+7842
+7843
+7844
+7845
+7846
+7847
+7848
+7849
+7850
+7851
+7852
+7853
+7854
+7855
+7856
+7857
+7858
+7859
+7860
+7861
+7862
+7863
+7864
+7865
+7866
+7867
+7868
+7869
+7870
+7871
+7872
+7873
+7874
+7875
+7876
+7877
+7878
+7879
+7880
+7881
+7882
+7883
+7884
+7885
+7886
+7887
+7888
+7889
+7890
+7891
+7892
+7893
+7894
+7895
+7896
+7897
+7898
+7899
+7900
+7901
+7902
+7903
+7904
+7905
+7906
+7907
+7908
+7909
+7910
+7911
+7912
+7913
+7914
+7915
+7916
+7917
+7918
+7919
+7920
+7921
+7922
+7923
+7924
+7925
+7926
+7927
+7928
+7929
+7930
+7931
+7932
+7933
+7934
+7935
+7936
+7937
+7938
+7939
+7940
+7941
+7942
+7943
+7944
+7945
+7946
+7947
+7948
+7949
+7950
+7951
+7952
+7953
+7954
+7955
+7956
+7957
+7958
+7959
+7960
+7961
+7962
+7963
+7964
+7965
+7966
+7967
+7968
+7969
+7970
+7971
+7972
+7973
+7974
+7975
+7976
+7977
+7978
+7979
+7980
+7981
+7982
+7983
+7984
+7985
+7986
+7987
+7988
+7989
+7990
+7991
+7992
+7993
+7994
+7995
+7996
+7997
+7998
+7999
+8000
+8001
+8002
+8003
+8004
+8005
+8006
+8007
+8008
+8009
+8010
+8011
+8012
+8013
+8014
+8015
+8016
+8017
+8018
+8019
+8020
+8021
+8022
+8023
+8024
+8025
+8026
+8027
+8028
+8029
+8030
+8031
+8032
+8033
+8034
+8035
+8036
+8037
+8038
+8039
+8040
+8041
+8042
+8043
+8044
+8045
+8046
+8047
+8048
+8049
+8050
+8051
+8052
+8053
+8054
+8055
+8056
+8057
+8058
+8059
+8060
+8061
+8062
+8063
+8064
+8065
+8066
+8067
+8068
+8069
+8070
+8071
+8072
+8073
+8074
+8075
+8076
+8077
+8078
+8079
+8080
+8081
+8082
+8083
+8084
+8085
+8086
+8087
+8088
+8089
+8090
+8091
+8092
+8093
+8094
+8095
+8096
+8097
+8098
+8099
+8100
+8101
+8102
+8103
+8104
+8105
+8106
+8107
+8108
+8109
+8110
+8111
+8112
+8113
+8114
+8115
+8116
+8117
+8118
+8119
+8120
+8121
+8122
+8123
+8124
+8125
+8126
+8127
+8128
+8129
+8130
+8131
+8132
+8133
+8134
+8135
+8136
+8137
+8138
+8139
+8140
+8141
+8142
+8143
+8144
+8145
+8146
+8147
+8148
+8149
+8150
+8151
+8152
+8153
+8154
+8155
+8156
+8157
+8158
+8159
+8160
+8161
+8162
+8163
+8164
+8165
+8166
+8167
+8168
+8169
+8170
+8171
+8172
+8173
+8174
+8175
+8176
+8177
+8178
+8179
+8180
+8181
+8182
+8183
+8184
+8185
+8186
+8187
+8188
+8189
+8190
+8191
+8192
+8193
+8194
+8195
+8196
+8197
+8198
+8199
+8200
+8201
+8202
+8203
+8204
+8205
+8206
+8207
+8208
+8209
+8210
+8211
+8212
+8213
+8214
+8215
+8216
+8217
+8218
+8219
+8220
+8221
+8222
+8223
+8224
+8225
+8226
+8227
+8228
+8229
+8230
+8231
+8232
+8233
+8234
+8235
+8236
+8237
+8238
+8239
+8240
+8241
+8242
+8243
+8244
+8245
+8246
+8247
+8248
+8249
+8250
+8251
+8252
+8253
+8254
+8255
+8256
+8257
+8258
+8259
+8260
+8261
+8262
+8263
+8264
+8265
+8266
+8267
+8268
+8269
+8270
+8271
+8272
+8273
+8274
+8275
+8276
+8277
+8278
+8279
+8280
+8281
+8282
+8283
+8284
+8285
+8286
+8287
+8288
+8289
+8290
+8291
+8292
+8293
+8294
+8295
+8296
+8297
+8298
+8299
+8300
+8301
+8302
+8303
+8304
+8305
+8306
+8307
+8308
+8309
+8310
+8311
+8312
+8313
+8314
+8315
+8316
+8317
+8318
+8319
+8320
+8321
+8322
+8323
+8324
+8325
+8326
+8327
+8328
+8329
+8330
+8331
+8332
+8333
+8334
+8335
+8336
+8337
+8338
+8339
+8340
+8341
+8342
+8343
+8344
+8345
+8346
+8347
+8348
+8349
+8350
+8351
+8352
+8353
+8354
+8355
+8356
+8357
+8358
+8359
+8360
+8361
+8362
+8363
+8364
+8365
+8366
+8367
+8368
+8369
+8370
+8371
+8372
+8373
+8374
+8375
+8376
+8377
+8378
+8379
+8380
+8381
+8382
+8383
+8384
+8385
+8386
+8387
+8388
+8389
+8390
+8391
+8392
+8393
+8394
+8395
+8396
+8397
+8398
+8399
+8400
+8401
+8402
+8403
+8404
+8405
+8406
+8407
+8408
+8409
+8410
+8411
+8412
+8413
+8414
+8415
+8416
+8417
+8418
+8419
+8420
+8421
+8422
+8423
+8424
+8425
+8426
+8427
+8428
+8429
+8430
+8431
+8432
+8433
+8434
+8435
+8436
+8437
+8438
+8439
+8440
+8441
+8442
+8443
+8444
+8445
+8446
+8447
+8448
+8449
+8450
+8451
+8452
+8453
+8454
+8455
+8456
+8457
+8458
+8459
+8460
+8461
+8462
+8463
+8464
+8465
+8466
+8467
+8468
+8469
+8470
+8471
+8472
+8473
+8474
+8475
+8476
+8477
+8478
+8479
+8480
+8481
+8482
+8483
+8484
+8485
+8486
+8487
+8488
+8489
+8490
+8491
+8492
+8493
+8494
+8495
+8496
+8497
+8498
+8499
+8500
+8501
+8502
+8503
+8504
+8505
+8506
+8507
+8508
+8509
+8510
+8511
+8512
+8513
+8514
+8515
+8516
+8517
+8518
+8519
+8520
+8521
+8522
+8523
+8524
+8525
+8526
+8527
+8528
+8529
+8530
+8531
+8532
+8533
+8534
+8535
+8536
+8537
+8538
+8539
+8540
+8541
+8542
+8543
+8544
+8545
+8546
+8547
+8548
+8549
+8550
+8551
+8552
+8553
+8554
+8555
+8556
+8557
+8558
+8559
+8560
+8561
+8562
+8563
+8564
+8565
+8566
+8567
+8568
+8569
+8570
+8571
+8572
+8573
+8574
+8575
+8576
+8577
+8578
+8579
+8580
+8581
+8582
+8583
+8584
+8585
+8586
+8587
+8588
+8589
+8590
+8591
+8592
+8593
+8594
+8595
+8596
+8597
+8598
+8599
+8600
+8601
+8602
+8603
+8604
+8605
+8606
+8607
+8608
+8609
+8610
+8611
+8612
+8613
+8614
+8615
+8616
+8617
+8618
+8619
+8620
+8621
+8622
+8623
+8624
+8625
+8626
+8627
+8628
+8629
+8630
+8631
+8632
+8633
+8634
+8635
+8636
+8637
+8638
+8639
+8640
+8641
+8642
+8643
+8644
+8645
+8646
+8647
+8648
+8649
+8650
+8651
+8652
+8653
+8654
+8655
+8656
+8657
+8658
+8659
+8660
+8661
+8662
+8663
+8664
+8665
+8666
+8667
+8668
+8669
+8670
+8671
+8672
+8673
+8674
+8675
+8676
+8677
+8678
+8679
+8680
+8681
+8682
+8683
+8684
+8685
+8686
+8687
+8688
+8689
+8690
+8691
+8692
+8693
+8694
+8695
+8696
+8697
+8698
+8699
+8700
+8701
+8702
+8703
+8704
+8705
+8706
+8707
+8708
+8709
+8710
+8711
+8712
+8713
+8714
+8715
+8716
+8717
+8718
+8719
+8720
+8721
+8722
+8723
+8724
+8725
+8726
+8727
+8728
+8729
+8730
+8731
+8732
+8733
+8734
+8735
+8736
+8737
+8738
+8739
+8740
+8741
+8742
+8743
+8744
+8745
+8746
+8747
+8748
+8749
+8750
+8751
+8752
+8753
+8754
+8755
+8756
+8757
+8758
+8759
+8760
+8761
+8762
+8763
+8764
+8765
+8766
+8767
+8768
+8769
+8770
+8771
+8772
+8773
+8774
+8775
+8776
+8777
+8778
+8779
+8780
+8781
+8782
+8783
+8784
+8785
+8786
+8787
+8788
+8789
+8790
+8791
+8792
+8793
+8794
+8795
+8796
+8797
+8798
+8799
+8800
+8801
+8802
+8803
+8804
+8805
+8806
+8807
+8808
+8809
+8810
+8811
+8812
+8813
+8814
+8815
+8816
+8817
+8818
+8819
+8820
+8821
+8822
+8823
+8824
+8825
+8826
+8827
+8828
+8829
+8830
+8831
+8832
+8833
+8834
+8835
+8836
+8837
+8838
+8839
+8840
+8841
+8842
+8843
+8844
+8845
+8846
+8847
+8848
+8849
+8850
+8851
+8852
+8853
+8854
+8855
+8856
+8857
+8858
+8859
+8860
+8861
+8862
+8863
+8864
+8865
+8866
+8867
+8868
+8869
+8870
+8871
+8872
+8873
+8874
+8875
+8876
+8877
+8878
+8879
+8880
+8881
+8882
+8883
+8884
+8885
+8886
+8887
+8888
+8889
+8890
+8891
+8892
+8893
+8894
+8895
+8896
+8897
+8898
+8899
+8900
+8901
+8902
+8903
+8904
+8905
+8906
+8907
+8908
+8909
+8910
+8911
+8912
+8913
+8914
+8915
+8916
+8917
+8918
+8919
+8920
+8921
+8922
+8923
+8924
+8925
+8926
+8927
+8928
+8929
+8930
+8931
+8932
+8933
+8934
+8935
+8936
+8937
+8938
+8939
+8940
+8941
+8942
+8943
+8944
+8945
+8946
+8947
+8948
+8949
+8950
+8951
+8952
+8953
+8954
+8955
+8956
+8957
+8958
+8959
+8960
+
use crate::raw::{
+    Allocator, Bucket, Global, RawDrain, RawExtractIf, RawIntoIter, RawIter, RawTable,
+};
+use crate::{Equivalent, TryReserveError};
+use core::borrow::Borrow;
+use core::fmt::{self, Debug};
+use core::hash::{BuildHasher, Hash};
+use core::iter::FusedIterator;
+use core::marker::PhantomData;
+use core::mem;
+use core::ops::Index;
+
+/// Default hasher for `HashMap`.
+#[cfg(feature = "ahash")]
+pub type DefaultHashBuilder = core::hash::BuildHasherDefault<ahash::AHasher>;
+
+/// Dummy default hasher for `HashMap`.
+#[cfg(not(feature = "ahash"))]
+pub enum DefaultHashBuilder {}
+
+/// A hash map implemented with quadratic probing and SIMD lookup.
+///
+/// The default hashing algorithm is currently [`AHash`], though this is
+/// subject to change at any point in the future. This hash function is very
+/// fast for all types of keys, but this algorithm will typically *not* protect
+/// against attacks such as HashDoS.
+///
+/// The hashing algorithm can be replaced on a per-`HashMap` basis using the
+/// [`default`], [`with_hasher`], and [`with_capacity_and_hasher`] methods. Many
+/// alternative algorithms are available on crates.io, such as the [`fnv`] crate.
+///
+/// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although
+/// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`.
+/// If you implement these yourself, it is important that the following
+/// property holds:
+///
+/// ```text
+/// k1 == k2 -> hash(k1) == hash(k2)
+/// ```
+///
+/// In other words, if two keys are equal, their hashes must be equal.
+///
+/// It is a logic error for a key to be modified in such a way that the key's
+/// hash, as determined by the [`Hash`] trait, or its equality, as determined by
+/// the [`Eq`] trait, changes while it is in the map. This is normally only
+/// possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// It is also a logic error for the [`Hash`] implementation of a key to panic.
+/// This is generally only possible if the trait is implemented manually. If a
+/// panic does occur then the contents of the `HashMap` may become corrupted and
+/// some items may be dropped from the table.
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `HashMap<String, String>` in this example).
+/// let mut book_reviews = HashMap::new();
+///
+/// // Review some books.
+/// book_reviews.insert(
+///     "Adventures of Huckleberry Finn".to_string(),
+///     "My favorite book.".to_string(),
+/// );
+/// book_reviews.insert(
+///     "Grimms' Fairy Tales".to_string(),
+///     "Masterpiece.".to_string(),
+/// );
+/// book_reviews.insert(
+///     "Pride and Prejudice".to_string(),
+///     "Very enjoyable.".to_string(),
+/// );
+/// book_reviews.insert(
+///     "The Adventures of Sherlock Holmes".to_string(),
+///     "Eye lyked it alot.".to_string(),
+/// );
+///
+/// // Check for a specific one.
+/// // When collections store owned values (String), they can still be
+/// // queried using references (&str).
+/// if !book_reviews.contains_key("Les Misérables") {
+///     println!("We've got {} reviews, but Les Misérables ain't one.",
+///              book_reviews.len());
+/// }
+///
+/// // oops, this review has a lot of spelling mistakes, let's delete it.
+/// book_reviews.remove("The Adventures of Sherlock Holmes");
+///
+/// // Look up the values associated with some keys.
+/// let to_find = ["Pride and Prejudice", "Alice's Adventure in Wonderland"];
+/// for &book in &to_find {
+///     match book_reviews.get(book) {
+///         Some(review) => println!("{}: {}", book, review),
+///         None => println!("{} is unreviewed.", book)
+///     }
+/// }
+///
+/// // Look up the value for a key (will panic if the key is not found).
+/// println!("Review for Jane: {}", book_reviews["Pride and Prejudice"]);
+///
+/// // Iterate over everything.
+/// for (book, review) in &book_reviews {
+///     println!("{}: \"{}\"", book, review);
+/// }
+/// ```
+///
+/// `HashMap` also implements an [`Entry API`](#method.entry), which allows
+/// for more complex methods of getting, setting, updating and removing keys and
+/// their values:
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `HashMap<&str, u8>` in this example).
+/// let mut player_stats = HashMap::new();
+///
+/// fn random_stat_buff() -> u8 {
+///     // could actually return some random value here - let's just return
+///     // some fixed value for now
+///     42
+/// }
+///
+/// // insert a key only if it doesn't already exist
+/// player_stats.entry("health").or_insert(100);
+///
+/// // insert a key using a function that provides a new value only if it
+/// // doesn't already exist
+/// player_stats.entry("defence").or_insert_with(random_stat_buff);
+///
+/// // update a key, guarding against the key possibly not being set
+/// let stat = player_stats.entry("attack").or_insert(100);
+/// *stat += random_stat_buff();
+/// ```
+///
+/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
+/// We must also derive [`PartialEq`].
+///
+/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
+/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
+/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
+/// [`default`]: #method.default
+/// [`with_hasher`]: #method.with_hasher
+/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
+/// [`fnv`]: https://crates.io/crates/fnv
+/// [`AHash`]: https://crates.io/crates/ahash
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// #[derive(Hash, Eq, PartialEq, Debug)]
+/// struct Viking {
+///     name: String,
+///     country: String,
+/// }
+///
+/// impl Viking {
+///     /// Creates a new Viking.
+///     fn new(name: &str, country: &str) -> Viking {
+///         Viking { name: name.to_string(), country: country.to_string() }
+///     }
+/// }
+///
+/// // Use a HashMap to store the vikings' health points.
+/// let mut vikings = HashMap::new();
+///
+/// vikings.insert(Viking::new("Einar", "Norway"), 25);
+/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
+/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
+///
+/// // Use derived implementation to print the status of the vikings.
+/// for (viking, health) in &vikings {
+///     println!("{:?} has {} hp", viking, health);
+/// }
+/// ```
+///
+/// A `HashMap` with fixed list of elements can be initialized from an array:
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let timber_resources: HashMap<&str, i32> = [("Norway", 100), ("Denmark", 50), ("Iceland", 10)]
+///     .into_iter().collect();
+/// // use the values stored in map
+/// ```
+pub struct HashMap<K, V, S = DefaultHashBuilder, A: Allocator = Global> {
+    pub(crate) hash_builder: S,
+    pub(crate) table: RawTable<(K, V), A>,
+}
+
+impl<K: Clone, V: Clone, S: Clone, A: Allocator + Clone> Clone for HashMap<K, V, S, A> {
+    fn clone(&self) -> Self {
+        HashMap {
+            hash_builder: self.hash_builder.clone(),
+            table: self.table.clone(),
+        }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        self.table.clone_from(&source.table);
+
+        // Update hash_builder only if we successfully cloned all elements.
+        self.hash_builder.clone_from(&source.hash_builder);
+    }
+}
+
+/// Ensures that a single closure type across uses of this which, in turn prevents multiple
+/// instances of any functions like RawTable::reserve from being generated
+#[cfg_attr(feature = "inline-more", inline)]
+pub(crate) fn make_hasher<Q, V, S>(hash_builder: &S) -> impl Fn(&(Q, V)) -> u64 + '_
+where
+    Q: Hash,
+    S: BuildHasher,
+{
+    move |val| make_hash::<Q, S>(hash_builder, &val.0)
+}
+
+/// Ensures that a single closure type across uses of this which, in turn prevents multiple
+/// instances of any functions like RawTable::reserve from being generated
+#[cfg_attr(feature = "inline-more", inline)]
+fn equivalent_key<Q, K, V>(k: &Q) -> impl Fn(&(K, V)) -> bool + '_
+where
+    Q: ?Sized + Equivalent<K>,
+{
+    move |x| k.equivalent(&x.0)
+}
+
+/// Ensures that a single closure type across uses of this which, in turn prevents multiple
+/// instances of any functions like RawTable::reserve from being generated
+#[cfg_attr(feature = "inline-more", inline)]
+fn equivalent<Q, K>(k: &Q) -> impl Fn(&K) -> bool + '_
+where
+    Q: ?Sized + Equivalent<K>,
+{
+    move |x| k.equivalent(x)
+}
+
+#[cfg(not(feature = "nightly"))]
+#[cfg_attr(feature = "inline-more", inline)]
+pub(crate) fn make_hash<Q, S>(hash_builder: &S, val: &Q) -> u64
+where
+    Q: Hash + ?Sized,
+    S: BuildHasher,
+{
+    use core::hash::Hasher;
+    let mut state = hash_builder.build_hasher();
+    val.hash(&mut state);
+    state.finish()
+}
+
+#[cfg(feature = "nightly")]
+#[cfg_attr(feature = "inline-more", inline)]
+pub(crate) fn make_hash<Q, S>(hash_builder: &S, val: &Q) -> u64
+where
+    Q: Hash + ?Sized,
+    S: BuildHasher,
+{
+    hash_builder.hash_one(val)
+}
+
+#[cfg(feature = "ahash")]
+impl<K, V> HashMap<K, V, DefaultHashBuilder> {
+    /// Creates an empty `HashMap`.
+    ///
+    /// The hash map is initially created with a capacity of 0, so it will not allocate until it
+    /// is first inserted into.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashMap`], for example with
+    /// [`with_hasher`](HashMap::with_hasher) method.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let mut map: HashMap<&str, i32> = HashMap::new();
+    /// assert_eq!(map.len(), 0);
+    /// assert_eq!(map.capacity(), 0);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn new() -> Self {
+        Self::default()
+    }
+
+    /// Creates an empty `HashMap` with the specified capacity.
+    ///
+    /// The hash map will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash map will not allocate.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashMap`], for example with
+    /// [`with_capacity_and_hasher`](HashMap::with_capacity_and_hasher) method.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
+    /// assert_eq!(map.len(), 0);
+    /// assert!(map.capacity() >= 10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity(capacity: usize) -> Self {
+        Self::with_capacity_and_hasher(capacity, DefaultHashBuilder::default())
+    }
+}
+
+#[cfg(feature = "ahash")]
+impl<K, V, A: Allocator> HashMap<K, V, DefaultHashBuilder, A> {
+    /// Creates an empty `HashMap` using the given allocator.
+    ///
+    /// The hash map is initially created with a capacity of 0, so it will not allocate until it
+    /// is first inserted into.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashMap`], for example with
+    /// [`with_hasher_in`](HashMap::with_hasher_in) method.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use bumpalo::Bump;
+    ///
+    /// let bump = Bump::new();
+    /// let mut map = HashMap::new_in(&bump);
+    ///
+    /// // The created HashMap holds none elements
+    /// assert_eq!(map.len(), 0);
+    ///
+    /// // The created HashMap also doesn't allocate memory
+    /// assert_eq!(map.capacity(), 0);
+    ///
+    /// // Now we insert element inside created HashMap
+    /// map.insert("One", 1);
+    /// // We can see that the HashMap holds 1 element
+    /// assert_eq!(map.len(), 1);
+    /// // And it also allocates some capacity
+    /// assert!(map.capacity() > 1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn new_in(alloc: A) -> Self {
+        Self::with_hasher_in(DefaultHashBuilder::default(), alloc)
+    }
+
+    /// Creates an empty `HashMap` with the specified capacity using the given allocator.
+    ///
+    /// The hash map will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash map will not allocate.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashMap`], for example with
+    /// [`with_capacity_and_hasher_in`](HashMap::with_capacity_and_hasher_in) method.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use bumpalo::Bump;
+    ///
+    /// let bump = Bump::new();
+    /// let mut map = HashMap::with_capacity_in(5, &bump);
+    ///
+    /// // The created HashMap holds none elements
+    /// assert_eq!(map.len(), 0);
+    /// // But it can hold at least 5 elements without reallocating
+    /// let empty_map_capacity = map.capacity();
+    /// assert!(empty_map_capacity >= 5);
+    ///
+    /// // Now we insert some 5 elements inside created HashMap
+    /// map.insert("One",   1);
+    /// map.insert("Two",   2);
+    /// map.insert("Three", 3);
+    /// map.insert("Four",  4);
+    /// map.insert("Five",  5);
+    ///
+    /// // We can see that the HashMap holds 5 elements
+    /// assert_eq!(map.len(), 5);
+    /// // But its capacity isn't changed
+    /// assert_eq!(map.capacity(), empty_map_capacity)
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
+        Self::with_capacity_and_hasher_in(capacity, DefaultHashBuilder::default(), alloc)
+    }
+}
+
+impl<K, V, S> HashMap<K, V, S> {
+    /// Creates an empty `HashMap` which will use the given hash builder to hash
+    /// keys.
+    ///
+    /// The hash map is initially created with a capacity of 0, so it will not
+    /// allocate until it is first inserted into.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashMap`].
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashMap to be useful, see its documentation for details.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut map = HashMap::with_hasher(s);
+    /// assert_eq!(map.len(), 0);
+    /// assert_eq!(map.capacity(), 0);
+    ///
+    /// map.insert(1, 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub const fn with_hasher(hash_builder: S) -> Self {
+        Self {
+            hash_builder,
+            table: RawTable::new(),
+        }
+    }
+
+    /// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
+    /// to hash the keys.
+    ///
+    /// The hash map will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash map will not allocate.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashMap`].
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashMap to be useful, see its documentation for details.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut map = HashMap::with_capacity_and_hasher(10, s);
+    /// assert_eq!(map.len(), 0);
+    /// assert!(map.capacity() >= 10);
+    ///
+    /// map.insert(1, 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self {
+        Self {
+            hash_builder,
+            table: RawTable::with_capacity(capacity),
+        }
+    }
+}
+
+impl<K, V, S, A: Allocator> HashMap<K, V, S, A> {
+    /// Returns a reference to the underlying allocator.
+    #[inline]
+    pub fn allocator(&self) -> &A {
+        self.table.allocator()
+    }
+
+    /// Creates an empty `HashMap` which will use the given hash builder to hash
+    /// keys. It will be allocated with the given allocator.
+    ///
+    /// The hash map is initially created with a capacity of 0, so it will not allocate until it
+    /// is first inserted into.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashMap`].
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut map = HashMap::with_hasher(s);
+    /// map.insert(1, 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub const fn with_hasher_in(hash_builder: S, alloc: A) -> Self {
+        Self {
+            hash_builder,
+            table: RawTable::new_in(alloc),
+        }
+    }
+
+    /// Creates an empty `HashMap` with the specified capacity, using `hash_builder`
+    /// to hash the keys. It will be allocated with the given allocator.
+    ///
+    /// The hash map will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash map will not allocate.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashMap` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashMap`].
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut map = HashMap::with_capacity_and_hasher(10, s);
+    /// map.insert(1, 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity_and_hasher_in(capacity: usize, hash_builder: S, alloc: A) -> Self {
+        Self {
+            hash_builder,
+            table: RawTable::with_capacity_in(capacity, alloc),
+        }
+    }
+
+    /// Returns a reference to the map's [`BuildHasher`].
+    ///
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let hasher = DefaultHashBuilder::default();
+    /// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
+    /// let hasher: &DefaultHashBuilder = map.hasher();
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn hasher(&self) -> &S {
+        &self.hash_builder
+    }
+
+    /// Returns the number of elements the map can hold without reallocating.
+    ///
+    /// This number is a lower bound; the `HashMap<K, V>` might be able to hold
+    /// more, but is guaranteed to be able to hold at least this many.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
+    /// assert_eq!(map.len(), 0);
+    /// assert!(map.capacity() >= 100);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn capacity(&self) -> usize {
+        self.table.capacity()
+    }
+
+    /// An iterator visiting all keys in arbitrary order.
+    /// The iterator element type is `&'a K`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    /// assert_eq!(map.len(), 3);
+    /// let mut vec: Vec<&str> = Vec::new();
+    ///
+    /// for key in map.keys() {
+    ///     println!("{}", key);
+    ///     vec.push(*key);
+    /// }
+    ///
+    /// // The `Keys` iterator produces keys in arbitrary order, so the
+    /// // keys must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, ["a", "b", "c"]);
+    ///
+    /// assert_eq!(map.len(), 3);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn keys(&self) -> Keys<'_, K, V> {
+        Keys { inner: self.iter() }
+    }
+
+    /// An iterator visiting all values in arbitrary order.
+    /// The iterator element type is `&'a V`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    /// assert_eq!(map.len(), 3);
+    /// let mut vec: Vec<i32> = Vec::new();
+    ///
+    /// for val in map.values() {
+    ///     println!("{}", val);
+    ///     vec.push(*val);
+    /// }
+    ///
+    /// // The `Values` iterator produces values in arbitrary order, so the
+    /// // values must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [1, 2, 3]);
+    ///
+    /// assert_eq!(map.len(), 3);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn values(&self) -> Values<'_, K, V> {
+        Values { inner: self.iter() }
+    }
+
+    /// An iterator visiting all values mutably in arbitrary order.
+    /// The iterator element type is `&'a mut V`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    ///
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// for val in map.values_mut() {
+    ///     *val = *val + 10;
+    /// }
+    ///
+    /// assert_eq!(map.len(), 3);
+    /// let mut vec: Vec<i32> = Vec::new();
+    ///
+    /// for val in map.values() {
+    ///     println!("{}", val);
+    ///     vec.push(*val);
+    /// }
+    ///
+    /// // The `Values` iterator produces values in arbitrary order, so the
+    /// // values must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [11, 12, 13]);
+    ///
+    /// assert_eq!(map.len(), 3);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
+        ValuesMut {
+            inner: self.iter_mut(),
+        }
+    }
+
+    /// An iterator visiting all key-value pairs in arbitrary order.
+    /// The iterator element type is `(&'a K, &'a V)`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    /// assert_eq!(map.len(), 3);
+    /// let mut vec: Vec<(&str, i32)> = Vec::new();
+    ///
+    /// for (key, val) in map.iter() {
+    ///     println!("key: {} val: {}", key, val);
+    ///     vec.push((*key, *val));
+    /// }
+    ///
+    /// // The `Iter` iterator produces items in arbitrary order, so the
+    /// // items must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3)]);
+    ///
+    /// assert_eq!(map.len(), 3);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter(&self) -> Iter<'_, K, V> {
+        // Here we tie the lifetime of self to the iter.
+        unsafe {
+            Iter {
+                inner: self.table.iter(),
+                marker: PhantomData,
+            }
+        }
+    }
+
+    /// An iterator visiting all key-value pairs in arbitrary order,
+    /// with mutable references to the values.
+    /// The iterator element type is `(&'a K, &'a mut V)`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// // Update all values
+    /// for (_, val) in map.iter_mut() {
+    ///     *val *= 2;
+    /// }
+    ///
+    /// assert_eq!(map.len(), 3);
+    /// let mut vec: Vec<(&str, i32)> = Vec::new();
+    ///
+    /// for (key, val) in &map {
+    ///     println!("key: {} val: {}", key, val);
+    ///     vec.push((*key, *val));
+    /// }
+    ///
+    /// // The `Iter` iterator produces items in arbitrary order, so the
+    /// // items must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [("a", 2), ("b", 4), ("c", 6)]);
+    ///
+    /// assert_eq!(map.len(), 3);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
+        // Here we tie the lifetime of self to the iter.
+        unsafe {
+            IterMut {
+                inner: self.table.iter(),
+                marker: PhantomData,
+            }
+        }
+    }
+
+    #[cfg(test)]
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn raw_capacity(&self) -> usize {
+        self.table.buckets()
+    }
+
+    /// Returns the number of elements in the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut a = HashMap::new();
+    /// assert_eq!(a.len(), 0);
+    /// a.insert(1, "a");
+    /// assert_eq!(a.len(), 1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn len(&self) -> usize {
+        self.table.len()
+    }
+
+    /// Returns `true` if the map contains no elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut a = HashMap::new();
+    /// assert!(a.is_empty());
+    /// a.insert(1, "a");
+    /// assert!(!a.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// Clears the map, returning all key-value pairs as an iterator. Keeps the
+    /// allocated memory for reuse.
+    ///
+    /// If the returned iterator is dropped before being fully consumed, it
+    /// drops the remaining key-value pairs. The returned iterator keeps a
+    /// mutable borrow on the vector to optimize its implementation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut a = HashMap::new();
+    /// a.insert(1, "a");
+    /// a.insert(2, "b");
+    /// let capacity_before_drain = a.capacity();
+    ///
+    /// for (k, v) in a.drain().take(1) {
+    ///     assert!(k == 1 || k == 2);
+    ///     assert!(v == "a" || v == "b");
+    /// }
+    ///
+    /// // As we can see, the map is empty and contains no element.
+    /// assert!(a.is_empty() && a.len() == 0);
+    /// // But map capacity is equal to old one.
+    /// assert_eq!(a.capacity(), capacity_before_drain);
+    ///
+    /// let mut a = HashMap::new();
+    /// a.insert(1, "a");
+    /// a.insert(2, "b");
+    ///
+    /// {   // Iterator is dropped without being consumed.
+    ///     let d = a.drain();
+    /// }
+    ///
+    /// // But the map is empty even if we do not use Drain iterator.
+    /// assert!(a.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn drain(&mut self) -> Drain<'_, K, V, A> {
+        Drain {
+            inner: self.table.drain(),
+        }
+    }
+
+    /// Retains only the elements specified by the predicate. Keeps the
+    /// allocated memory for reuse.
+    ///
+    /// In other words, remove all pairs `(k, v)` such that `f(&k, &mut v)` returns `false`.
+    /// The elements are visited in unsorted (and unspecified) order.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
+    /// assert_eq!(map.len(), 8);
+    ///
+    /// map.retain(|&k, _| k % 2 == 0);
+    ///
+    /// // We can see, that the number of elements inside map is changed.
+    /// assert_eq!(map.len(), 4);
+    ///
+    /// let mut vec: Vec<(i32, i32)> = map.iter().map(|(&k, &v)| (k, v)).collect();
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [(0, 0), (2, 20), (4, 40), (6, 60)]);
+    /// ```
+    pub fn retain<F>(&mut self, mut f: F)
+    where
+        F: FnMut(&K, &mut V) -> bool,
+    {
+        // Here we only use `iter` as a temporary, preventing use-after-free
+        unsafe {
+            for item in self.table.iter() {
+                let &mut (ref key, ref mut value) = item.as_mut();
+                if !f(key, value) {
+                    self.table.erase(item);
+                }
+            }
+        }
+    }
+
+    /// Drains elements which are true under the given predicate,
+    /// and returns an iterator over the removed items.
+    ///
+    /// In other words, move all pairs `(k, v)` such that `f(&k, &mut v)` returns `true` out
+    /// into another iterator.
+    ///
+    /// Note that `extract_if` lets you mutate every value in the filter closure, regardless of
+    /// whether you choose to keep or remove it.
+    ///
+    /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
+    /// or the iteration short-circuits, then the remaining elements will be retained.
+    /// Use [`retain()`] with a negated predicate if you do not need the returned iterator.
+    ///
+    /// Keeps the allocated memory for reuse.
+    ///
+    /// [`retain()`]: HashMap::retain
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+    ///
+    /// let drained: HashMap<i32, i32> = map.extract_if(|k, _v| k % 2 == 0).collect();
+    ///
+    /// let mut evens = drained.keys().cloned().collect::<Vec<_>>();
+    /// let mut odds = map.keys().cloned().collect::<Vec<_>>();
+    /// evens.sort();
+    /// odds.sort();
+    ///
+    /// assert_eq!(evens, vec![0, 2, 4, 6]);
+    /// assert_eq!(odds, vec![1, 3, 5, 7]);
+    ///
+    /// let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+    ///
+    /// {   // Iterator is dropped without being consumed.
+    ///     let d = map.extract_if(|k, _v| k % 2 != 0);
+    /// }
+    ///
+    /// // ExtractIf was not exhausted, therefore no elements were drained.
+    /// assert_eq!(map.len(), 8);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, K, V, F, A>
+    where
+        F: FnMut(&K, &mut V) -> bool,
+    {
+        ExtractIf {
+            f,
+            inner: RawExtractIf {
+                iter: unsafe { self.table.iter() },
+                table: &mut self.table,
+            },
+        }
+    }
+
+    /// Clears the map, removing all key-value pairs. Keeps the allocated memory
+    /// for reuse.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut a = HashMap::new();
+    /// a.insert(1, "a");
+    /// let capacity_before_clear = a.capacity();
+    ///
+    /// a.clear();
+    ///
+    /// // Map is empty.
+    /// assert!(a.is_empty());
+    /// // But map capacity is equal to old one.
+    /// assert_eq!(a.capacity(), capacity_before_clear);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn clear(&mut self) {
+        self.table.clear();
+    }
+
+    /// Creates a consuming iterator visiting all the keys in arbitrary order.
+    /// The map cannot be used after calling this.
+    /// The iterator element type is `K`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// let mut vec: Vec<&str> = map.into_keys().collect();
+    ///
+    /// // The `IntoKeys` iterator produces keys in arbitrary order, so the
+    /// // keys must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, ["a", "b", "c"]);
+    /// ```
+    #[inline]
+    pub fn into_keys(self) -> IntoKeys<K, V, A> {
+        IntoKeys {
+            inner: self.into_iter(),
+        }
+    }
+
+    /// Creates a consuming iterator visiting all the values in arbitrary order.
+    /// The map cannot be used after calling this.
+    /// The iterator element type is `V`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// let mut vec: Vec<i32> = map.into_values().collect();
+    ///
+    /// // The `IntoValues` iterator produces values in arbitrary order, so
+    /// // the values must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [1, 2, 3]);
+    /// ```
+    #[inline]
+    pub fn into_values(self) -> IntoValues<K, V, A> {
+        IntoValues {
+            inner: self.into_iter(),
+        }
+    }
+}
+
+impl<K, V, S, A> HashMap<K, V, S, A>
+where
+    K: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    /// Reserves capacity for at least `additional` more elements to be inserted
+    /// in the `HashMap`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity exceeds [`isize::MAX`] bytes and [`abort`] the program
+    /// in case of allocation error. Use [`try_reserve`](HashMap::try_reserve) instead
+    /// if you want to handle memory allocation failure.
+    ///
+    /// [`isize::MAX`]: https://doc.rust-lang.org/std/primitive.isize.html
+    /// [`abort`]: https://doc.rust-lang.org/alloc/alloc/fn.handle_alloc_error.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let mut map: HashMap<&str, i32> = HashMap::new();
+    /// // Map is empty and doesn't allocate memory
+    /// assert_eq!(map.capacity(), 0);
+    ///
+    /// map.reserve(10);
+    ///
+    /// // And now map can hold at least 10 elements
+    /// assert!(map.capacity() >= 10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn reserve(&mut self, additional: usize) {
+        self.table
+            .reserve(additional, make_hasher::<_, V, S>(&self.hash_builder));
+    }
+
+    /// Tries to reserve capacity for at least `additional` more elements to be inserted
+    /// in the given `HashMap<K,V>`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, isize> = HashMap::new();
+    /// // Map is empty and doesn't allocate memory
+    /// assert_eq!(map.capacity(), 0);
+    ///
+    /// map.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
+    ///
+    /// // And now map can hold at least 10 elements
+    /// assert!(map.capacity() >= 10);
+    /// ```
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned:
+    /// ```
+    /// # fn test() {
+    /// use hashbrown::HashMap;
+    /// use hashbrown::TryReserveError;
+    /// let mut map: HashMap<i32, i32> = HashMap::new();
+    ///
+    /// match map.try_reserve(usize::MAX) {
+    ///     Err(error) => match error {
+    ///         TryReserveError::CapacityOverflow => {}
+    ///         _ => panic!("TryReserveError::AllocError ?"),
+    ///     },
+    ///     _ => panic!(),
+    /// }
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(not(miri))]
+    /// #     test()
+    /// # }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        self.table
+            .try_reserve(additional, make_hasher::<_, V, S>(&self.hash_builder))
+    }
+
+    /// Shrinks the capacity of the map as much as possible. It will drop
+    /// down as much as possible while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
+    /// map.insert(1, 2);
+    /// map.insert(3, 4);
+    /// assert!(map.capacity() >= 100);
+    /// map.shrink_to_fit();
+    /// assert!(map.capacity() >= 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to_fit(&mut self) {
+        self.table
+            .shrink_to(0, make_hasher::<_, V, S>(&self.hash_builder));
+    }
+
+    /// Shrinks the capacity of the map with a lower limit. It will drop
+    /// down no lower than the supplied limit while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// This function does nothing if the current capacity is smaller than the
+    /// supplied minimum capacity.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
+    /// map.insert(1, 2);
+    /// map.insert(3, 4);
+    /// assert!(map.capacity() >= 100);
+    /// map.shrink_to(10);
+    /// assert!(map.capacity() >= 10);
+    /// map.shrink_to(0);
+    /// assert!(map.capacity() >= 2);
+    /// map.shrink_to(10);
+    /// assert!(map.capacity() >= 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to(&mut self, min_capacity: usize) {
+        self.table
+            .shrink_to(min_capacity, make_hasher::<_, V, S>(&self.hash_builder));
+    }
+
+    /// Gets the given key's corresponding entry in the map for in-place manipulation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut letters = HashMap::new();
+    ///
+    /// for ch in "a short treatise on fungi".chars() {
+    ///     let counter = letters.entry(ch).or_insert(0);
+    ///     *counter += 1;
+    /// }
+    ///
+    /// assert_eq!(letters[&'s'], 2);
+    /// assert_eq!(letters[&'t'], 3);
+    /// assert_eq!(letters[&'u'], 1);
+    /// assert_eq!(letters.get(&'y'), None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A> {
+        let hash = make_hash::<K, S>(&self.hash_builder, &key);
+        if let Some(elem) = self.table.find(hash, equivalent_key(&key)) {
+            Entry::Occupied(OccupiedEntry {
+                hash,
+                key: Some(key),
+                elem,
+                table: self,
+            })
+        } else {
+            Entry::Vacant(VacantEntry {
+                hash,
+                key,
+                table: self,
+            })
+        }
+    }
+
+    /// Gets the given key's corresponding entry by reference in the map for in-place manipulation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut words: HashMap<String, usize> = HashMap::new();
+    /// let source = ["poneyland", "horseyland", "poneyland", "poneyland"];
+    /// for (i, &s) in source.iter().enumerate() {
+    ///     let counter = words.entry_ref(s).or_insert(0);
+    ///     *counter += 1;
+    /// }
+    ///
+    /// assert_eq!(words["poneyland"], 3);
+    /// assert_eq!(words["horseyland"], 1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn entry_ref<'a, 'b, Q: ?Sized>(&'a mut self, key: &'b Q) -> EntryRef<'a, 'b, K, Q, V, S, A>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        let hash = make_hash::<Q, S>(&self.hash_builder, key);
+        if let Some(elem) = self.table.find(hash, equivalent_key(key)) {
+            EntryRef::Occupied(OccupiedEntryRef {
+                hash,
+                key: Some(KeyOrRef::Borrowed(key)),
+                elem,
+                table: self,
+            })
+        } else {
+            EntryRef::Vacant(VacantEntryRef {
+                hash,
+                key: KeyOrRef::Borrowed(key),
+                table: self,
+            })
+        }
+    }
+
+    /// Returns a reference to the value corresponding to the key.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.get(&1), Some(&"a"));
+    /// assert_eq!(map.get(&2), None);
+    /// ```
+    #[inline]
+    pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.get_inner(k) {
+            Some((_, v)) => Some(v),
+            None => None,
+        }
+    }
+
+    /// Returns the key-value pair corresponding to the supplied key.
+    ///
+    /// The supplied key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
+    /// assert_eq!(map.get_key_value(&2), None);
+    /// ```
+    #[inline]
+    pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.get_inner(k) {
+            Some((key, value)) => Some((key, value)),
+            None => None,
+        }
+    }
+
+    #[inline]
+    fn get_inner<Q: ?Sized>(&self, k: &Q) -> Option<&(K, V)>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        if self.table.is_empty() {
+            None
+        } else {
+            let hash = make_hash::<Q, S>(&self.hash_builder, k);
+            self.table.get(hash, equivalent_key(k))
+        }
+    }
+
+    /// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value.
+    ///
+    /// The supplied key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// let (k, v) = map.get_key_value_mut(&1).unwrap();
+    /// assert_eq!(k, &1);
+    /// assert_eq!(v, &mut "a");
+    /// *v = "b";
+    /// assert_eq!(map.get_key_value_mut(&1), Some((&1, &mut "b")));
+    /// assert_eq!(map.get_key_value_mut(&2), None);
+    /// ```
+    #[inline]
+    pub fn get_key_value_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<(&K, &mut V)>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.get_inner_mut(k) {
+            Some(&mut (ref key, ref mut value)) => Some((key, value)),
+            None => None,
+        }
+    }
+
+    /// Returns `true` if the map contains a value for the specified key.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.contains_key(&1), true);
+    /// assert_eq!(map.contains_key(&2), false);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn contains_key<Q: ?Sized>(&self, k: &Q) -> bool
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        self.get_inner(k).is_some()
+    }
+
+    /// Returns a mutable reference to the value corresponding to the key.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, "a");
+    /// if let Some(x) = map.get_mut(&1) {
+    ///     *x = "b";
+    /// }
+    /// assert_eq!(map[&1], "b");
+    ///
+    /// assert_eq!(map.get_mut(&2), None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut V>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.get_inner_mut(k) {
+            Some(&mut (_, ref mut v)) => Some(v),
+            None => None,
+        }
+    }
+
+    #[inline]
+    fn get_inner_mut<Q: ?Sized>(&mut self, k: &Q) -> Option<&mut (K, V)>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        if self.table.is_empty() {
+            None
+        } else {
+            let hash = make_hash::<Q, S>(&self.hash_builder, k);
+            self.table.get_mut(hash, equivalent_key(k))
+        }
+    }
+
+    /// Attempts to get mutable references to `N` values in the map at once.
+    ///
+    /// Returns an array of length `N` with the results of each query. For soundness, at most one
+    /// mutable reference will be returned to any value. `None` will be returned if any of the
+    /// keys are duplicates or missing.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut libraries = HashMap::new();
+    /// libraries.insert("Bodleian Library".to_string(), 1602);
+    /// libraries.insert("Athenæum".to_string(), 1807);
+    /// libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+    /// libraries.insert("Library of Congress".to_string(), 1800);
+    ///
+    /// let got = libraries.get_many_mut([
+    ///     "Athenæum",
+    ///     "Library of Congress",
+    /// ]);
+    /// assert_eq!(
+    ///     got,
+    ///     Some([
+    ///         &mut 1807,
+    ///         &mut 1800,
+    ///     ]),
+    /// );
+    ///
+    /// // Missing keys result in None
+    /// let got = libraries.get_many_mut([
+    ///     "Athenæum",
+    ///     "New York Public Library",
+    /// ]);
+    /// assert_eq!(got, None);
+    ///
+    /// // Duplicate keys result in None
+    /// let got = libraries.get_many_mut([
+    ///     "Athenæum",
+    ///     "Athenæum",
+    /// ]);
+    /// assert_eq!(got, None);
+    /// ```
+    pub fn get_many_mut<Q: ?Sized, const N: usize>(&mut self, ks: [&Q; N]) -> Option<[&'_ mut V; N]>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        self.get_many_mut_inner(ks).map(|res| res.map(|(_, v)| v))
+    }
+
+    /// Attempts to get mutable references to `N` values in the map at once, without validating that
+    /// the values are unique.
+    ///
+    /// Returns an array of length `N` with the results of each query. `None` will be returned if
+    /// any of the keys are missing.
+    ///
+    /// For a safe alternative see [`get_many_mut`](`HashMap::get_many_mut`).
+    ///
+    /// # Safety
+    ///
+    /// Calling this method with overlapping keys is *[undefined behavior]* even if the resulting
+    /// references are not used.
+    ///
+    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut libraries = HashMap::new();
+    /// libraries.insert("Bodleian Library".to_string(), 1602);
+    /// libraries.insert("Athenæum".to_string(), 1807);
+    /// libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+    /// libraries.insert("Library of Congress".to_string(), 1800);
+    ///
+    /// let got = libraries.get_many_mut([
+    ///     "Athenæum",
+    ///     "Library of Congress",
+    /// ]);
+    /// assert_eq!(
+    ///     got,
+    ///     Some([
+    ///         &mut 1807,
+    ///         &mut 1800,
+    ///     ]),
+    /// );
+    ///
+    /// // Missing keys result in None
+    /// let got = libraries.get_many_mut([
+    ///     "Athenæum",
+    ///     "New York Public Library",
+    /// ]);
+    /// assert_eq!(got, None);
+    /// ```
+    pub unsafe fn get_many_unchecked_mut<Q: ?Sized, const N: usize>(
+        &mut self,
+        ks: [&Q; N],
+    ) -> Option<[&'_ mut V; N]>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        self.get_many_unchecked_mut_inner(ks)
+            .map(|res| res.map(|(_, v)| v))
+    }
+
+    /// Attempts to get mutable references to `N` values in the map at once, with immutable
+    /// references to the corresponding keys.
+    ///
+    /// Returns an array of length `N` with the results of each query. For soundness, at most one
+    /// mutable reference will be returned to any value. `None` will be returned if any of the keys
+    /// are duplicates or missing.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut libraries = HashMap::new();
+    /// libraries.insert("Bodleian Library".to_string(), 1602);
+    /// libraries.insert("Athenæum".to_string(), 1807);
+    /// libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+    /// libraries.insert("Library of Congress".to_string(), 1800);
+    ///
+    /// let got = libraries.get_many_key_value_mut([
+    ///     "Bodleian Library",
+    ///     "Herzogin-Anna-Amalia-Bibliothek",
+    /// ]);
+    /// assert_eq!(
+    ///     got,
+    ///     Some([
+    ///         (&"Bodleian Library".to_string(), &mut 1602),
+    ///         (&"Herzogin-Anna-Amalia-Bibliothek".to_string(), &mut 1691),
+    ///     ]),
+    /// );
+    /// // Missing keys result in None
+    /// let got = libraries.get_many_key_value_mut([
+    ///     "Bodleian Library",
+    ///     "Gewandhaus",
+    /// ]);
+    /// assert_eq!(got, None);
+    ///
+    /// // Duplicate keys result in None
+    /// let got = libraries.get_many_key_value_mut([
+    ///     "Bodleian Library",
+    ///     "Herzogin-Anna-Amalia-Bibliothek",
+    ///     "Herzogin-Anna-Amalia-Bibliothek",
+    /// ]);
+    /// assert_eq!(got, None);
+    /// ```
+    pub fn get_many_key_value_mut<Q: ?Sized, const N: usize>(
+        &mut self,
+        ks: [&Q; N],
+    ) -> Option<[(&'_ K, &'_ mut V); N]>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        self.get_many_mut_inner(ks)
+            .map(|res| res.map(|(k, v)| (&*k, v)))
+    }
+
+    /// Attempts to get mutable references to `N` values in the map at once, with immutable
+    /// references to the corresponding keys, without validating that the values are unique.
+    ///
+    /// Returns an array of length `N` with the results of each query. `None` will be returned if
+    /// any of the keys are missing.
+    ///
+    /// For a safe alternative see [`get_many_key_value_mut`](`HashMap::get_many_key_value_mut`).
+    ///
+    /// # Safety
+    ///
+    /// Calling this method with overlapping keys is *[undefined behavior]* even if the resulting
+    /// references are not used.
+    ///
+    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut libraries = HashMap::new();
+    /// libraries.insert("Bodleian Library".to_string(), 1602);
+    /// libraries.insert("Athenæum".to_string(), 1807);
+    /// libraries.insert("Herzogin-Anna-Amalia-Bibliothek".to_string(), 1691);
+    /// libraries.insert("Library of Congress".to_string(), 1800);
+    ///
+    /// let got = libraries.get_many_key_value_mut([
+    ///     "Bodleian Library",
+    ///     "Herzogin-Anna-Amalia-Bibliothek",
+    /// ]);
+    /// assert_eq!(
+    ///     got,
+    ///     Some([
+    ///         (&"Bodleian Library".to_string(), &mut 1602),
+    ///         (&"Herzogin-Anna-Amalia-Bibliothek".to_string(), &mut 1691),
+    ///     ]),
+    /// );
+    /// // Missing keys result in None
+    /// let got = libraries.get_many_key_value_mut([
+    ///     "Bodleian Library",
+    ///     "Gewandhaus",
+    /// ]);
+    /// assert_eq!(got, None);
+    /// ```
+    pub unsafe fn get_many_key_value_unchecked_mut<Q: ?Sized, const N: usize>(
+        &mut self,
+        ks: [&Q; N],
+    ) -> Option<[(&'_ K, &'_ mut V); N]>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        self.get_many_unchecked_mut_inner(ks)
+            .map(|res| res.map(|(k, v)| (&*k, v)))
+    }
+
+    fn get_many_mut_inner<Q: ?Sized, const N: usize>(
+        &mut self,
+        ks: [&Q; N],
+    ) -> Option<[&'_ mut (K, V); N]>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        let hashes = self.build_hashes_inner(ks);
+        self.table
+            .get_many_mut(hashes, |i, (k, _)| ks[i].equivalent(k))
+    }
+
+    unsafe fn get_many_unchecked_mut_inner<Q: ?Sized, const N: usize>(
+        &mut self,
+        ks: [&Q; N],
+    ) -> Option<[&'_ mut (K, V); N]>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        let hashes = self.build_hashes_inner(ks);
+        self.table
+            .get_many_unchecked_mut(hashes, |i, (k, _)| ks[i].equivalent(k))
+    }
+
+    fn build_hashes_inner<Q: ?Sized, const N: usize>(&self, ks: [&Q; N]) -> [u64; N]
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        let mut hashes = [0_u64; N];
+        for i in 0..N {
+            hashes[i] = make_hash::<Q, S>(&self.hash_builder, ks[i]);
+        }
+        hashes
+    }
+
+    /// Inserts a key-value pair into the map.
+    ///
+    /// If the map did not have this key present, [`None`] is returned.
+    ///
+    /// If the map did have this key present, the value is updated, and the old
+    /// value is returned. The key is not updated, though; this matters for
+    /// types that can be `==` without being identical. See the [`std::collections`]
+    /// [module-level documentation] for more.
+    ///
+    /// [`None`]: https://doc.rust-lang.org/std/option/enum.Option.html#variant.None
+    /// [`std::collections`]: https://doc.rust-lang.org/std/collections/index.html
+    /// [module-level documentation]: https://doc.rust-lang.org/std/collections/index.html#insert-and-complex-keys
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// assert_eq!(map.insert(37, "a"), None);
+    /// assert_eq!(map.is_empty(), false);
+    ///
+    /// map.insert(37, "b");
+    /// assert_eq!(map.insert(37, "c"), Some("b"));
+    /// assert_eq!(map[&37], "c");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, k: K, v: V) -> Option<V> {
+        let hash = make_hash::<K, S>(&self.hash_builder, &k);
+        let hasher = make_hasher::<_, V, S>(&self.hash_builder);
+        match self
+            .table
+            .find_or_find_insert_slot(hash, equivalent_key(&k), hasher)
+        {
+            Ok(bucket) => Some(mem::replace(unsafe { &mut bucket.as_mut().1 }, v)),
+            Err(slot) => {
+                unsafe {
+                    self.table.insert_in_slot(hash, slot, (k, v));
+                }
+                None
+            }
+        }
+    }
+
+    /// Insert a key-value pair into the map without checking
+    /// if the key already exists in the map.
+    ///
+    /// Returns a reference to the key and value just inserted.
+    ///
+    /// This operation is safe if a key does not exist in the map.
+    ///
+    /// However, if a key exists in the map already, the behavior is unspecified:
+    /// this operation may panic, loop forever, or any following operation with the map
+    /// may panic, loop forever or return arbitrary result.
+    ///
+    /// That said, this operation (and following operations) are guaranteed to
+    /// not violate memory safety.
+    ///
+    /// This operation is faster than regular insert, because it does not perform
+    /// lookup before insertion.
+    ///
+    /// This operation is useful during initial population of the map.
+    /// For example, when constructing a map from another map, we know
+    /// that keys are unique.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map1 = HashMap::new();
+    /// assert_eq!(map1.insert(1, "a"), None);
+    /// assert_eq!(map1.insert(2, "b"), None);
+    /// assert_eq!(map1.insert(3, "c"), None);
+    /// assert_eq!(map1.len(), 3);
+    ///
+    /// let mut map2 = HashMap::new();
+    ///
+    /// for (key, value) in map1.into_iter() {
+    ///     map2.insert_unique_unchecked(key, value);
+    /// }
+    ///
+    /// let (key, value) = map2.insert_unique_unchecked(4, "d");
+    /// assert_eq!(key, &4);
+    /// assert_eq!(value, &mut "d");
+    /// *value = "e";
+    ///
+    /// assert_eq!(map2[&1], "a");
+    /// assert_eq!(map2[&2], "b");
+    /// assert_eq!(map2[&3], "c");
+    /// assert_eq!(map2[&4], "e");
+    /// assert_eq!(map2.len(), 4);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert_unique_unchecked(&mut self, k: K, v: V) -> (&K, &mut V) {
+        let hash = make_hash::<K, S>(&self.hash_builder, &k);
+        let bucket = self
+            .table
+            .insert(hash, (k, v), make_hasher::<_, V, S>(&self.hash_builder));
+        let (k_ref, v_ref) = unsafe { bucket.as_mut() };
+        (k_ref, v_ref)
+    }
+
+    /// Tries to insert a key-value pair into the map, and returns
+    /// a mutable reference to the value in the entry.
+    ///
+    /// # Errors
+    ///
+    /// If the map already had this key present, nothing is updated, and
+    /// an error containing the occupied entry and the value is returned.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::OccupiedError;
+    ///
+    /// let mut map = HashMap::new();
+    /// assert_eq!(map.try_insert(37, "a").unwrap(), &"a");
+    ///
+    /// match map.try_insert(37, "b") {
+    ///     Err(OccupiedError { entry, value }) => {
+    ///         assert_eq!(entry.key(), &37);
+    ///         assert_eq!(entry.get(), &"a");
+    ///         assert_eq!(value, "b");
+    ///     }
+    ///     _ => panic!()
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn try_insert(
+        &mut self,
+        key: K,
+        value: V,
+    ) -> Result<&mut V, OccupiedError<'_, K, V, S, A>> {
+        match self.entry(key) {
+            Entry::Occupied(entry) => Err(OccupiedError { entry, value }),
+            Entry::Vacant(entry) => Ok(entry.insert(value)),
+        }
+    }
+
+    /// Removes a key from the map, returning the value at the key if the key
+    /// was previously in the map. Keeps the allocated memory for reuse.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// // The map is empty
+    /// assert!(map.is_empty() && map.capacity() == 0);
+    ///
+    /// map.insert(1, "a");
+    ///
+    /// assert_eq!(map.remove(&1), Some("a"));
+    /// assert_eq!(map.remove(&1), None);
+    ///
+    /// // Now map holds none elements
+    /// assert!(map.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove<Q: ?Sized>(&mut self, k: &Q) -> Option<V>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.remove_entry(k) {
+            Some((_, v)) => Some(v),
+            None => None,
+        }
+    }
+
+    /// Removes a key from the map, returning the stored key and value if the
+    /// key was previously in the map. Keeps the allocated memory for reuse.
+    ///
+    /// The key may be any borrowed form of the map's key type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the key type.
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// // The map is empty
+    /// assert!(map.is_empty() && map.capacity() == 0);
+    ///
+    /// map.insert(1, "a");
+    ///
+    /// assert_eq!(map.remove_entry(&1), Some((1, "a")));
+    /// assert_eq!(map.remove(&1), None);
+    ///
+    /// // Now map hold none elements
+    /// assert!(map.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry<Q: ?Sized>(&mut self, k: &Q) -> Option<(K, V)>
+    where
+        Q: Hash + Equivalent<K>,
+    {
+        let hash = make_hash::<Q, S>(&self.hash_builder, k);
+        self.table.remove_entry(hash, equivalent_key(k))
+    }
+}
+
+impl<K, V, S, A: Allocator> HashMap<K, V, S, A> {
+    /// Creates a raw entry builder for the HashMap.
+    ///
+    /// Raw entries provide the lowest level of control for searching and
+    /// manipulating a map. They must be manually initialized with a hash and
+    /// then manually searched. After this, insertions into a vacant entry
+    /// still require an owned key to be provided.
+    ///
+    /// Raw entries are useful for such exotic situations as:
+    ///
+    /// * Hash memoization
+    /// * Deferring the creation of an owned key until it is known to be required
+    /// * Using a search key that doesn't work with the Borrow trait
+    /// * Using custom comparison logic without newtype wrappers
+    ///
+    /// Because raw entries provide much more low-level control, it's much easier
+    /// to put the HashMap into an inconsistent state which, while memory-safe,
+    /// will cause the map to produce seemingly random results. Higher-level and
+    /// more foolproof APIs like `entry` should be preferred when possible.
+    ///
+    /// In particular, the hash used to initialized the raw entry must still be
+    /// consistent with the hash of the key that is ultimately stored in the entry.
+    /// This is because implementations of HashMap may need to recompute hashes
+    /// when resizing, at which point only the keys are available.
+    ///
+    /// Raw entries give mutable access to the keys. This must not be used
+    /// to modify how the key would compare or hash, as the map will not re-evaluate
+    /// where the key should go, meaning the keys may become "lost" if their
+    /// location does not reflect their state. For instance, if you change a key
+    /// so that the map now contains keys which compare equal, search may start
+    /// acting erratically, with two keys randomly masking each other. Implementations
+    /// are free to assume this doesn't happen (within the limits of memory-safety).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map = HashMap::new();
+    /// map.extend([("a", 100), ("b", 200), ("c", 300)]);
+    ///
+    /// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// // Existing key (insert and update)
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => unreachable!(),
+    ///     RawEntryMut::Occupied(mut view) => {
+    ///         assert_eq!(view.get(), &100);
+    ///         let v = view.get_mut();
+    ///         let new_v = (*v) * 10;
+    ///         *v = new_v;
+    ///         assert_eq!(view.insert(1111), 1000);
+    ///     }
+    /// }
+    ///
+    /// assert_eq!(map[&"a"], 1111);
+    /// assert_eq!(map.len(), 3);
+    ///
+    /// // Existing key (take)
+    /// let hash = compute_hash(map.hasher(), &"c");
+    /// match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"c") {
+    ///     RawEntryMut::Vacant(_) => unreachable!(),
+    ///     RawEntryMut::Occupied(view) => {
+    ///         assert_eq!(view.remove_entry(), ("c", 300));
+    ///     }
+    /// }
+    /// assert_eq!(map.raw_entry().from_key(&"c"), None);
+    /// assert_eq!(map.len(), 2);
+    ///
+    /// // Nonexistent key (insert and update)
+    /// let key = "d";
+    /// let hash = compute_hash(map.hasher(), &key);
+    /// match map.raw_entry_mut().from_hash(hash, |q| *q == key) {
+    ///     RawEntryMut::Occupied(_) => unreachable!(),
+    ///     RawEntryMut::Vacant(view) => {
+    ///         let (k, value) = view.insert("d", 4000);
+    ///         assert_eq!((*k, *value), ("d", 4000));
+    ///         *value = 40000;
+    ///     }
+    /// }
+    /// assert_eq!(map[&"d"], 40000);
+    /// assert_eq!(map.len(), 3);
+    ///
+    /// match map.raw_entry_mut().from_hash(hash, |q| *q == key) {
+    ///     RawEntryMut::Vacant(_) => unreachable!(),
+    ///     RawEntryMut::Occupied(view) => {
+    ///         assert_eq!(view.remove_entry(), ("d", 40000));
+    ///     }
+    /// }
+    /// assert_eq!(map.get(&"d"), None);
+    /// assert_eq!(map.len(), 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S, A> {
+        RawEntryBuilderMut { map: self }
+    }
+
+    /// Creates a raw immutable entry builder for the HashMap.
+    ///
+    /// Raw entries provide the lowest level of control for searching and
+    /// manipulating a map. They must be manually initialized with a hash and
+    /// then manually searched.
+    ///
+    /// This is useful for
+    /// * Hash memoization
+    /// * Using a search key that doesn't work with the Borrow trait
+    /// * Using custom comparison logic without newtype wrappers
+    ///
+    /// Unless you are in such a situation, higher-level and more foolproof APIs like
+    /// `get` should be preferred.
+    ///
+    /// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.extend([("a", 100), ("b", 200), ("c", 300)]);
+    ///
+    /// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// for k in ["a", "b", "c", "d", "e", "f"] {
+    ///     let hash = compute_hash(map.hasher(), k);
+    ///     let v = map.get(&k).cloned();
+    ///     let kv = v.as_ref().map(|v| (&k, v));
+    ///
+    ///     println!("Key: {} and value: {:?}", k, v);
+    ///
+    ///     assert_eq!(map.raw_entry().from_key(&k), kv);
+    ///     assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
+    ///     assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S, A> {
+        RawEntryBuilder { map: self }
+    }
+
+    /// Returns a reference to the [`RawTable`] used underneath [`HashMap`].
+    /// This function is only available if the `raw` feature of the crate is enabled.
+    ///
+    /// See [`raw_table_mut`] for more.
+    ///
+    /// [`raw_table_mut`]: Self::raw_table_mut
+    #[cfg(feature = "raw")]
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn raw_table(&self) -> &RawTable<(K, V), A> {
+        &self.table
+    }
+
+    /// Returns a mutable reference to the [`RawTable`] used underneath [`HashMap`].
+    /// This function is only available if the `raw` feature of the crate is enabled.
+    ///
+    /// # Note
+    ///
+    /// Calling this function is safe, but using the raw hash table API may require
+    /// unsafe functions or blocks.
+    ///
+    /// `RawTable` API gives the lowest level of control under the map that can be useful
+    /// for extending the HashMap's API, but may lead to *[undefined behavior]*.
+    ///
+    /// [`HashMap`]: struct.HashMap.html
+    /// [`RawTable`]: crate::raw::RawTable
+    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.extend([("a", 10), ("b", 20), ("c", 30)]);
+    /// assert_eq!(map.len(), 3);
+    ///
+    /// // Let's imagine that we have a value and a hash of the key, but not the key itself.
+    /// // However, if you want to remove the value from the map by hash and value, and you
+    /// // know exactly that the value is unique, then you can create a function like this:
+    /// fn remove_by_hash<K, V, S, F>(
+    ///     map: &mut HashMap<K, V, S>,
+    ///     hash: u64,
+    ///     is_match: F,
+    /// ) -> Option<(K, V)>
+    /// where
+    ///     F: Fn(&(K, V)) -> bool,
+    /// {
+    ///     let raw_table = map.raw_table_mut();
+    ///     match raw_table.find(hash, is_match) {
+    ///         Some(bucket) => Some(unsafe { raw_table.remove(bucket).0 }),
+    ///         None => None,
+    ///     }
+    /// }
+    ///
+    /// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// let hash = compute_hash(map.hasher(), "a");
+    /// assert_eq!(remove_by_hash(&mut map, hash, |(_, v)| *v == 10), Some(("a", 10)));
+    /// assert_eq!(map.get(&"a"), None);
+    /// assert_eq!(map.len(), 2);
+    /// ```
+    #[cfg(feature = "raw")]
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn raw_table_mut(&mut self) -> &mut RawTable<(K, V), A> {
+        &mut self.table
+    }
+}
+
+impl<K, V, S, A> PartialEq for HashMap<K, V, S, A>
+where
+    K: Eq + Hash,
+    V: PartialEq,
+    S: BuildHasher,
+    A: Allocator,
+{
+    fn eq(&self, other: &Self) -> bool {
+        if self.len() != other.len() {
+            return false;
+        }
+
+        self.iter()
+            .all(|(key, value)| other.get(key).map_or(false, |v| *value == *v))
+    }
+}
+
+impl<K, V, S, A> Eq for HashMap<K, V, S, A>
+where
+    K: Eq + Hash,
+    V: Eq,
+    S: BuildHasher,
+    A: Allocator,
+{
+}
+
+impl<K, V, S, A> Debug for HashMap<K, V, S, A>
+where
+    K: Debug,
+    V: Debug,
+    A: Allocator,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_map().entries(self.iter()).finish()
+    }
+}
+
+impl<K, V, S, A> Default for HashMap<K, V, S, A>
+where
+    S: Default,
+    A: Default + Allocator,
+{
+    /// Creates an empty `HashMap<K, V, S, A>`, with the `Default` value for the hasher and allocator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use std::collections::hash_map::RandomState;
+    ///
+    /// // You can specify all types of HashMap, including hasher and allocator.
+    /// // Created map is empty and don't allocate memory
+    /// let map: HashMap<u32, String> = Default::default();
+    /// assert_eq!(map.capacity(), 0);
+    /// let map: HashMap<u32, String, RandomState> = HashMap::default();
+    /// assert_eq!(map.capacity(), 0);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn default() -> Self {
+        Self::with_hasher_in(Default::default(), Default::default())
+    }
+}
+
+impl<K, Q: ?Sized, V, S, A> Index<&Q> for HashMap<K, V, S, A>
+where
+    K: Eq + Hash,
+    Q: Hash + Equivalent<K>,
+    S: BuildHasher,
+    A: Allocator,
+{
+    type Output = V;
+
+    /// Returns a reference to the value corresponding to the supplied key.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the key is not present in the `HashMap`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let map: HashMap<_, _> = [("a", "One"), ("b", "Two")].into();
+    ///
+    /// assert_eq!(map[&"a"], "One");
+    /// assert_eq!(map[&"b"], "Two");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn index(&self, key: &Q) -> &V {
+        self.get(key).expect("no entry found for key")
+    }
+}
+
+// The default hasher is used to match the std implementation signature
+#[cfg(feature = "ahash")]
+impl<K, V, A, const N: usize> From<[(K, V); N]> for HashMap<K, V, DefaultHashBuilder, A>
+where
+    K: Eq + Hash,
+    A: Default + Allocator,
+{
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let map1 = HashMap::from([(1, 2), (3, 4)]);
+    /// let map2: HashMap<_, _> = [(1, 2), (3, 4)].into();
+    /// assert_eq!(map1, map2);
+    /// ```
+    fn from(arr: [(K, V); N]) -> Self {
+        arr.into_iter().collect()
+    }
+}
+
+/// An iterator over the entries of a `HashMap` in arbitrary order.
+/// The iterator element type is `(&'a K, &'a V)`.
+///
+/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.HashMap.html#method.iter
+/// [`HashMap`]: struct.HashMap.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+///
+/// let mut iter = map.iter();
+/// let mut vec = vec![iter.next(), iter.next(), iter.next()];
+///
+/// // The `Iter` iterator produces items in arbitrary order, so the
+/// // items must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [Some((&1, &"a")), Some((&2, &"b")), Some((&3, &"c"))]);
+///
+/// // It is fused iterator
+/// assert_eq!(iter.next(), None);
+/// assert_eq!(iter.next(), None);
+/// ```
+pub struct Iter<'a, K, V> {
+    inner: RawIter<(K, V)>,
+    marker: PhantomData<(&'a K, &'a V)>,
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+impl<K, V> Clone for Iter<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Iter {
+            inner: self.inner.clone(),
+            marker: PhantomData,
+        }
+    }
+}
+
+impl<K: Debug, V: Debug> fmt::Debug for Iter<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// A mutable iterator over the entries of a `HashMap` in arbitrary order.
+/// The iterator element type is `(&'a K, &'a mut V)`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
+/// [`HashMap`]: struct.HashMap.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let mut map: HashMap<_, _> = [(1, "One".to_owned()), (2, "Two".into())].into();
+///
+/// let mut iter = map.iter_mut();
+/// iter.next().map(|(_, v)| v.push_str(" Mississippi"));
+/// iter.next().map(|(_, v)| v.push_str(" Mississippi"));
+///
+/// // It is fused iterator
+/// assert_eq!(iter.next(), None);
+/// assert_eq!(iter.next(), None);
+///
+/// assert_eq!(map.get(&1).unwrap(), &"One Mississippi".to_owned());
+/// assert_eq!(map.get(&2).unwrap(), &"Two Mississippi".to_owned());
+/// ```
+pub struct IterMut<'a, K, V> {
+    inner: RawIter<(K, V)>,
+    // To ensure invariance with respect to V
+    marker: PhantomData<(&'a K, &'a mut V)>,
+}
+
+// We override the default Send impl which has K: Sync instead of K: Send. Both
+// are correct, but this one is more general since it allows keys which
+// implement Send but not Sync.
+unsafe impl<K: Send, V: Send> Send for IterMut<'_, K, V> {}
+
+impl<K, V> IterMut<'_, K, V> {
+    /// Returns a iterator of references over the remaining items.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(super) fn iter(&self) -> Iter<'_, K, V> {
+        Iter {
+            inner: self.inner.clone(),
+            marker: PhantomData,
+        }
+    }
+}
+
+/// An owning iterator over the entries of a `HashMap` in arbitrary order.
+/// The iterator element type is `(K, V)`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`HashMap`]
+/// (provided by the [`IntoIterator`] trait). See its documentation for more.
+/// The map cannot be used after calling that method.
+///
+/// [`into_iter`]: struct.HashMap.html#method.into_iter
+/// [`HashMap`]: struct.HashMap.html
+/// [`IntoIterator`]: https://doc.rust-lang.org/core/iter/trait.IntoIterator.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+///
+/// let mut iter = map.into_iter();
+/// let mut vec = vec![iter.next(), iter.next(), iter.next()];
+///
+/// // The `IntoIter` iterator produces items in arbitrary order, so the
+/// // items must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [Some((1, "a")), Some((2, "b")), Some((3, "c"))]);
+///
+/// // It is fused iterator
+/// assert_eq!(iter.next(), None);
+/// assert_eq!(iter.next(), None);
+/// ```
+pub struct IntoIter<K, V, A: Allocator = Global> {
+    inner: RawIntoIter<(K, V), A>,
+}
+
+impl<K, V, A: Allocator> IntoIter<K, V, A> {
+    /// Returns a iterator of references over the remaining items.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(super) fn iter(&self) -> Iter<'_, K, V> {
+        Iter {
+            inner: self.inner.iter(),
+            marker: PhantomData,
+        }
+    }
+}
+
+/// An owning iterator over the keys of a `HashMap` in arbitrary order.
+/// The iterator element type is `K`.
+///
+/// This `struct` is created by the [`into_keys`] method on [`HashMap`].
+/// See its documentation for more.
+/// The map cannot be used after calling that method.
+///
+/// [`into_keys`]: struct.HashMap.html#method.into_keys
+/// [`HashMap`]: struct.HashMap.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+///
+/// let mut keys = map.into_keys();
+/// let mut vec = vec![keys.next(), keys.next(), keys.next()];
+///
+/// // The `IntoKeys` iterator produces keys in arbitrary order, so the
+/// // keys must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [Some(1), Some(2), Some(3)]);
+///
+/// // It is fused iterator
+/// assert_eq!(keys.next(), None);
+/// assert_eq!(keys.next(), None);
+/// ```
+pub struct IntoKeys<K, V, A: Allocator = Global> {
+    inner: IntoIter<K, V, A>,
+}
+
+impl<K, V, A: Allocator> Iterator for IntoKeys<K, V, A> {
+    type Item = K;
+
+    #[inline]
+    fn next(&mut self) -> Option<K> {
+        self.inner.next().map(|(k, _)| k)
+    }
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+    #[inline]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, |acc, (k, _)| f(acc, k))
+    }
+}
+
+impl<K, V, A: Allocator> ExactSizeIterator for IntoKeys<K, V, A> {
+    #[inline]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+impl<K, V, A: Allocator> FusedIterator for IntoKeys<K, V, A> {}
+
+impl<K: Debug, V: Debug, A: Allocator> fmt::Debug for IntoKeys<K, V, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list()
+            .entries(self.inner.iter().map(|(k, _)| k))
+            .finish()
+    }
+}
+
+/// An owning iterator over the values of a `HashMap` in arbitrary order.
+/// The iterator element type is `V`.
+///
+/// This `struct` is created by the [`into_values`] method on [`HashMap`].
+/// See its documentation for more. The map cannot be used after calling that method.
+///
+/// [`into_values`]: struct.HashMap.html#method.into_values
+/// [`HashMap`]: struct.HashMap.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+///
+/// let mut values = map.into_values();
+/// let mut vec = vec![values.next(), values.next(), values.next()];
+///
+/// // The `IntoValues` iterator produces values in arbitrary order, so
+/// // the values must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [Some("a"), Some("b"), Some("c")]);
+///
+/// // It is fused iterator
+/// assert_eq!(values.next(), None);
+/// assert_eq!(values.next(), None);
+/// ```
+pub struct IntoValues<K, V, A: Allocator = Global> {
+    inner: IntoIter<K, V, A>,
+}
+
+impl<K, V, A: Allocator> Iterator for IntoValues<K, V, A> {
+    type Item = V;
+
+    #[inline]
+    fn next(&mut self) -> Option<V> {
+        self.inner.next().map(|(_, v)| v)
+    }
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+    #[inline]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, |acc, (_, v)| f(acc, v))
+    }
+}
+
+impl<K, V, A: Allocator> ExactSizeIterator for IntoValues<K, V, A> {
+    #[inline]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+impl<K, V, A: Allocator> FusedIterator for IntoValues<K, V, A> {}
+
+impl<K, V: Debug, A: Allocator> fmt::Debug for IntoValues<K, V, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list()
+            .entries(self.inner.iter().map(|(_, v)| v))
+            .finish()
+    }
+}
+
+/// An iterator over the keys of a `HashMap` in arbitrary order.
+/// The iterator element type is `&'a K`.
+///
+/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`keys`]: struct.HashMap.html#method.keys
+/// [`HashMap`]: struct.HashMap.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+///
+/// let mut keys = map.keys();
+/// let mut vec = vec![keys.next(), keys.next(), keys.next()];
+///
+/// // The `Keys` iterator produces keys in arbitrary order, so the
+/// // keys must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [Some(&1), Some(&2), Some(&3)]);
+///
+/// // It is fused iterator
+/// assert_eq!(keys.next(), None);
+/// assert_eq!(keys.next(), None);
+/// ```
+pub struct Keys<'a, K, V> {
+    inner: Iter<'a, K, V>,
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+impl<K, V> Clone for Keys<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Keys {
+            inner: self.inner.clone(),
+        }
+    }
+}
+
+impl<K: Debug, V> fmt::Debug for Keys<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// An iterator over the values of a `HashMap` in arbitrary order.
+/// The iterator element type is `&'a V`.
+///
+/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`values`]: struct.HashMap.html#method.values
+/// [`HashMap`]: struct.HashMap.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+///
+/// let mut values = map.values();
+/// let mut vec = vec![values.next(), values.next(), values.next()];
+///
+/// // The `Values` iterator produces values in arbitrary order, so the
+/// // values must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [Some(&"a"), Some(&"b"), Some(&"c")]);
+///
+/// // It is fused iterator
+/// assert_eq!(values.next(), None);
+/// assert_eq!(values.next(), None);
+/// ```
+pub struct Values<'a, K, V> {
+    inner: Iter<'a, K, V>,
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+impl<K, V> Clone for Values<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Values {
+            inner: self.inner.clone(),
+        }
+    }
+}
+
+impl<K, V: Debug> fmt::Debug for Values<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// A draining iterator over the entries of a `HashMap` in arbitrary
+/// order. The iterator element type is `(K, V)`.
+///
+/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`drain`]: struct.HashMap.html#method.drain
+/// [`HashMap`]: struct.HashMap.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let mut map: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+///
+/// let mut drain_iter = map.drain();
+/// let mut vec = vec![drain_iter.next(), drain_iter.next(), drain_iter.next()];
+///
+/// // The `Drain` iterator produces items in arbitrary order, so the
+/// // items must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [Some((1, "a")), Some((2, "b")), Some((3, "c"))]);
+///
+/// // It is fused iterator
+/// assert_eq!(drain_iter.next(), None);
+/// assert_eq!(drain_iter.next(), None);
+/// ```
+pub struct Drain<'a, K, V, A: Allocator = Global> {
+    inner: RawDrain<'a, (K, V), A>,
+}
+
+impl<K, V, A: Allocator> Drain<'_, K, V, A> {
+    /// Returns a iterator of references over the remaining items.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(super) fn iter(&self) -> Iter<'_, K, V> {
+        Iter {
+            inner: self.inner.iter(),
+            marker: PhantomData,
+        }
+    }
+}
+
+/// A draining iterator over entries of a `HashMap` which don't satisfy the predicate
+/// `f(&k, &mut v)` in arbitrary order. The iterator element type is `(K, V)`.
+///
+/// This `struct` is created by the [`extract_if`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`extract_if`]: struct.HashMap.html#method.extract_if
+/// [`HashMap`]: struct.HashMap.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let mut map: HashMap<i32, &str> = [(1, "a"), (2, "b"), (3, "c")].into();
+///
+/// let mut extract_if = map.extract_if(|k, _v| k % 2 != 0);
+/// let mut vec = vec![extract_if.next(), extract_if.next()];
+///
+/// // The `ExtractIf` iterator produces items in arbitrary order, so the
+/// // items must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [Some((1, "a")),Some((3, "c"))]);
+///
+/// // It is fused iterator
+/// assert_eq!(extract_if.next(), None);
+/// assert_eq!(extract_if.next(), None);
+/// drop(extract_if);
+///
+/// assert_eq!(map.len(), 1);
+/// ```
+#[must_use = "Iterators are lazy unless consumed"]
+pub struct ExtractIf<'a, K, V, F, A: Allocator = Global>
+where
+    F: FnMut(&K, &mut V) -> bool,
+{
+    f: F,
+    inner: RawExtractIf<'a, (K, V), A>,
+}
+
+impl<K, V, F, A> Iterator for ExtractIf<'_, K, V, F, A>
+where
+    F: FnMut(&K, &mut V) -> bool,
+    A: Allocator,
+{
+    type Item = (K, V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<Self::Item> {
+        self.inner.next(|&mut (ref k, ref mut v)| (self.f)(k, v))
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (0, self.inner.iter.size_hint().1)
+    }
+}
+
+impl<K, V, F> FusedIterator for ExtractIf<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
+
+/// A mutable iterator over the values of a `HashMap` in arbitrary order.
+/// The iterator element type is `&'a mut V`.
+///
+/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
+/// documentation for more.
+///
+/// [`values_mut`]: struct.HashMap.html#method.values_mut
+/// [`HashMap`]: struct.HashMap.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashMap;
+///
+/// let mut map: HashMap<_, _> = [(1, "One".to_owned()), (2, "Two".into())].into();
+///
+/// let mut values = map.values_mut();
+/// values.next().map(|v| v.push_str(" Mississippi"));
+/// values.next().map(|v| v.push_str(" Mississippi"));
+///
+/// // It is fused iterator
+/// assert_eq!(values.next(), None);
+/// assert_eq!(values.next(), None);
+///
+/// assert_eq!(map.get(&1).unwrap(), &"One Mississippi".to_owned());
+/// assert_eq!(map.get(&2).unwrap(), &"Two Mississippi".to_owned());
+/// ```
+pub struct ValuesMut<'a, K, V> {
+    inner: IterMut<'a, K, V>,
+}
+
+/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
+///
+/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
+///
+/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_map::{RawEntryBuilderMut, RawEntryMut::Vacant, RawEntryMut::Occupied};
+/// use hashbrown::HashMap;
+/// use core::hash::{BuildHasher, Hash};
+///
+/// let mut map = HashMap::new();
+/// map.extend([(1, 11), (2, 12), (3, 13), (4, 14), (5, 15), (6, 16)]);
+/// assert_eq!(map.len(), 6);
+///
+/// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+///     use core::hash::Hasher;
+///     let mut state = hash_builder.build_hasher();
+///     key.hash(&mut state);
+///     state.finish()
+/// }
+///
+/// let builder: RawEntryBuilderMut<_, _, _> = map.raw_entry_mut();
+///
+/// // Existing key
+/// match builder.from_key(&6) {
+///     Vacant(_) => unreachable!(),
+///     Occupied(view) => assert_eq!(view.get(), &16),
+/// }
+///
+/// for key in 0..12 {
+///     let hash = compute_hash(map.hasher(), &key);
+///     let value = map.get(&key).cloned();
+///     let key_value = value.as_ref().map(|v| (&key, v));
+///
+///     println!("Key: {} and value: {:?}", key, value);
+///
+///     match map.raw_entry_mut().from_key(&key) {
+///         Occupied(mut o) => assert_eq!(Some(o.get_key_value()), key_value),
+///         Vacant(_) => assert_eq!(value, None),
+///     }
+///     match map.raw_entry_mut().from_key_hashed_nocheck(hash, &key) {
+///         Occupied(mut o) => assert_eq!(Some(o.get_key_value()), key_value),
+///         Vacant(_) => assert_eq!(value, None),
+///     }
+///     match map.raw_entry_mut().from_hash(hash, |q| *q == key) {
+///         Occupied(mut o) => assert_eq!(Some(o.get_key_value()), key_value),
+///         Vacant(_) => assert_eq!(value, None),
+///     }
+/// }
+///
+/// assert_eq!(map.len(), 6);
+/// ```
+pub struct RawEntryBuilderMut<'a, K, V, S, A: Allocator = Global> {
+    map: &'a mut HashMap<K, V, S, A>,
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This is a lower-level version of [`Entry`].
+///
+/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
+/// then calling one of the methods of that [`RawEntryBuilderMut`].
+///
+/// [`HashMap`]: struct.HashMap.html
+/// [`Entry`]: enum.Entry.html
+/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
+/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
+///
+/// # Examples
+///
+/// ```
+/// use core::hash::{BuildHasher, Hash};
+/// use hashbrown::hash_map::{HashMap, RawEntryMut, RawOccupiedEntryMut};
+///
+/// let mut map = HashMap::new();
+/// map.extend([('a', 1), ('b', 2), ('c', 3)]);
+/// assert_eq!(map.len(), 3);
+///
+/// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+///     use core::hash::Hasher;
+///     let mut state = hash_builder.build_hasher();
+///     key.hash(&mut state);
+///     state.finish()
+/// }
+///
+/// // Existing key (insert)
+/// let raw: RawEntryMut<_, _, _> = map.raw_entry_mut().from_key(&'a');
+/// let _raw_o: RawOccupiedEntryMut<_, _, _> = raw.insert('a', 10);
+/// assert_eq!(map.len(), 3);
+///
+/// // Nonexistent key (insert)
+/// map.raw_entry_mut().from_key(&'d').insert('d', 40);
+/// assert_eq!(map.len(), 4);
+///
+/// // Existing key (or_insert)
+/// let hash = compute_hash(map.hasher(), &'b');
+/// let kv = map
+///     .raw_entry_mut()
+///     .from_key_hashed_nocheck(hash, &'b')
+///     .or_insert('b', 20);
+/// assert_eq!(kv, (&mut 'b', &mut 2));
+/// *kv.1 = 20;
+/// assert_eq!(map.len(), 4);
+///
+/// // Nonexistent key (or_insert)
+/// let hash = compute_hash(map.hasher(), &'e');
+/// let kv = map
+///     .raw_entry_mut()
+///     .from_key_hashed_nocheck(hash, &'e')
+///     .or_insert('e', 50);
+/// assert_eq!(kv, (&mut 'e', &mut 50));
+/// assert_eq!(map.len(), 5);
+///
+/// // Existing key (or_insert_with)
+/// let hash = compute_hash(map.hasher(), &'c');
+/// let kv = map
+///     .raw_entry_mut()
+///     .from_hash(hash, |q| q == &'c')
+///     .or_insert_with(|| ('c', 30));
+/// assert_eq!(kv, (&mut 'c', &mut 3));
+/// *kv.1 = 30;
+/// assert_eq!(map.len(), 5);
+///
+/// // Nonexistent key (or_insert_with)
+/// let hash = compute_hash(map.hasher(), &'f');
+/// let kv = map
+///     .raw_entry_mut()
+///     .from_hash(hash, |q| q == &'f')
+///     .or_insert_with(|| ('f', 60));
+/// assert_eq!(kv, (&mut 'f', &mut 60));
+/// assert_eq!(map.len(), 6);
+///
+/// println!("Our HashMap: {:?}", map);
+///
+/// let mut vec: Vec<_> = map.iter().map(|(&k, &v)| (k, v)).collect();
+/// // The `Iter` iterator produces items in arbitrary order, so the
+/// // items must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [('a', 10), ('b', 20), ('c', 30), ('d', 40), ('e', 50), ('f', 60)]);
+/// ```
+pub enum RawEntryMut<'a, K, V, S, A: Allocator = Global> {
+    /// An occupied entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::{hash_map::RawEntryMut, HashMap};
+    /// let mut map: HashMap<_, _> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => unreachable!(),
+    ///     RawEntryMut::Occupied(_) => { }
+    /// }
+    /// ```
+    Occupied(RawOccupiedEntryMut<'a, K, V, S, A>),
+    /// A vacant entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::{hash_map::RawEntryMut, HashMap};
+    /// let mut map: HashMap<&str, i32> = HashMap::new();
+    ///
+    /// match map.raw_entry_mut().from_key("a") {
+    ///     RawEntryMut::Occupied(_) => unreachable!(),
+    ///     RawEntryMut::Vacant(_) => { }
+    /// }
+    /// ```
+    Vacant(RawVacantEntryMut<'a, K, V, S, A>),
+}
+
+/// A view into an occupied entry in a `HashMap`.
+/// It is part of the [`RawEntryMut`] enum.
+///
+/// [`RawEntryMut`]: enum.RawEntryMut.html
+///
+/// # Examples
+///
+/// ```
+/// use core::hash::{BuildHasher, Hash};
+/// use hashbrown::hash_map::{HashMap, RawEntryMut, RawOccupiedEntryMut};
+///
+/// let mut map = HashMap::new();
+/// map.extend([("a", 10), ("b", 20), ("c", 30)]);
+///
+/// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+///     use core::hash::Hasher;
+///     let mut state = hash_builder.build_hasher();
+///     key.hash(&mut state);
+///     state.finish()
+/// }
+///
+/// let _raw_o: RawOccupiedEntryMut<_, _, _> = map.raw_entry_mut().from_key(&"a").insert("a", 100);
+/// assert_eq!(map.len(), 3);
+///
+/// // Existing key (insert and update)
+/// match map.raw_entry_mut().from_key(&"a") {
+///     RawEntryMut::Vacant(_) => unreachable!(),
+///     RawEntryMut::Occupied(mut view) => {
+///         assert_eq!(view.get(), &100);
+///         let v = view.get_mut();
+///         let new_v = (*v) * 10;
+///         *v = new_v;
+///         assert_eq!(view.insert(1111), 1000);
+///     }
+/// }
+///
+/// assert_eq!(map[&"a"], 1111);
+/// assert_eq!(map.len(), 3);
+///
+/// // Existing key (take)
+/// let hash = compute_hash(map.hasher(), &"c");
+/// match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"c") {
+///     RawEntryMut::Vacant(_) => unreachable!(),
+///     RawEntryMut::Occupied(view) => {
+///         assert_eq!(view.remove_entry(), ("c", 30));
+///     }
+/// }
+/// assert_eq!(map.raw_entry().from_key(&"c"), None);
+/// assert_eq!(map.len(), 2);
+///
+/// let hash = compute_hash(map.hasher(), &"b");
+/// match map.raw_entry_mut().from_hash(hash, |q| *q == "b") {
+///     RawEntryMut::Vacant(_) => unreachable!(),
+///     RawEntryMut::Occupied(view) => {
+///         assert_eq!(view.remove_entry(), ("b", 20));
+///     }
+/// }
+/// assert_eq!(map.get(&"b"), None);
+/// assert_eq!(map.len(), 1);
+/// ```
+pub struct RawOccupiedEntryMut<'a, K, V, S, A: Allocator = Global> {
+    elem: Bucket<(K, V)>,
+    table: &'a mut RawTable<(K, V), A>,
+    hash_builder: &'a S,
+}
+
+unsafe impl<K, V, S, A> Send for RawOccupiedEntryMut<'_, K, V, S, A>
+where
+    K: Send,
+    V: Send,
+    S: Send,
+    A: Send + Allocator,
+{
+}
+unsafe impl<K, V, S, A> Sync for RawOccupiedEntryMut<'_, K, V, S, A>
+where
+    K: Sync,
+    V: Sync,
+    S: Sync,
+    A: Sync + Allocator,
+{
+}
+
+/// A view into a vacant entry in a `HashMap`.
+/// It is part of the [`RawEntryMut`] enum.
+///
+/// [`RawEntryMut`]: enum.RawEntryMut.html
+///
+/// # Examples
+///
+/// ```
+/// use core::hash::{BuildHasher, Hash};
+/// use hashbrown::hash_map::{HashMap, RawEntryMut, RawVacantEntryMut};
+///
+/// let mut map = HashMap::<&str, i32>::new();
+///
+/// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+///     use core::hash::Hasher;
+///     let mut state = hash_builder.build_hasher();
+///     key.hash(&mut state);
+///     state.finish()
+/// }
+///
+/// let raw_v: RawVacantEntryMut<_, _, _> = match map.raw_entry_mut().from_key(&"a") {
+///     RawEntryMut::Vacant(view) => view,
+///     RawEntryMut::Occupied(_) => unreachable!(),
+/// };
+/// raw_v.insert("a", 10);
+/// assert!(map[&"a"] == 10 && map.len() == 1);
+///
+/// // Nonexistent key (insert and update)
+/// let hash = compute_hash(map.hasher(), &"b");
+/// match map.raw_entry_mut().from_key_hashed_nocheck(hash, &"b") {
+///     RawEntryMut::Occupied(_) => unreachable!(),
+///     RawEntryMut::Vacant(view) => {
+///         let (k, value) = view.insert("b", 2);
+///         assert_eq!((*k, *value), ("b", 2));
+///         *value = 20;
+///     }
+/// }
+/// assert!(map[&"b"] == 20 && map.len() == 2);
+///
+/// let hash = compute_hash(map.hasher(), &"c");
+/// match map.raw_entry_mut().from_hash(hash, |q| *q == "c") {
+///     RawEntryMut::Occupied(_) => unreachable!(),
+///     RawEntryMut::Vacant(view) => {
+///         assert_eq!(view.insert("c", 30), (&mut "c", &mut 30));
+///     }
+/// }
+/// assert!(map[&"c"] == 30 && map.len() == 3);
+/// ```
+pub struct RawVacantEntryMut<'a, K, V, S, A: Allocator = Global> {
+    table: &'a mut RawTable<(K, V), A>,
+    hash_builder: &'a S,
+}
+
+/// A builder for computing where in a [`HashMap`] a key-value pair would be stored.
+///
+/// See the [`HashMap::raw_entry`] docs for usage examples.
+///
+/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_map::{HashMap, RawEntryBuilder};
+/// use core::hash::{BuildHasher, Hash};
+///
+/// let mut map = HashMap::new();
+/// map.extend([(1, 10), (2, 20), (3, 30)]);
+///
+/// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+///     use core::hash::Hasher;
+///     let mut state = hash_builder.build_hasher();
+///     key.hash(&mut state);
+///     state.finish()
+/// }
+///
+/// for k in 0..6 {
+///     let hash = compute_hash(map.hasher(), &k);
+///     let v = map.get(&k).cloned();
+///     let kv = v.as_ref().map(|v| (&k, v));
+///
+///     println!("Key: {} and value: {:?}", k, v);
+///     let builder: RawEntryBuilder<_, _, _> = map.raw_entry();
+///     assert_eq!(builder.from_key(&k), kv);
+///     assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
+///     assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
+/// }
+/// ```
+pub struct RawEntryBuilder<'a, K, V, S, A: Allocator = Global> {
+    map: &'a HashMap<K, V, S, A>,
+}
+
+impl<'a, K, V, S, A: Allocator> RawEntryBuilderMut<'a, K, V, S, A> {
+    /// Creates a `RawEntryMut` from the given key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// let key = "a";
+    /// let entry: RawEntryMut<&str, u32, _> = map.raw_entry_mut().from_key(&key);
+    /// entry.insert(key, 100);
+    /// assert_eq!(map[&"a"], 100);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S, A>
+    where
+        S: BuildHasher,
+        Q: Hash + Equivalent<K>,
+    {
+        let hash = make_hash::<Q, S>(&self.map.hash_builder, k);
+        self.from_key_hashed_nocheck(hash, k)
+    }
+
+    /// Creates a `RawEntryMut` from the given key and its hash.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// let key = "a";
+    /// let hash = compute_hash(map.hasher(), &key);
+    /// let entry: RawEntryMut<&str, u32, _> = map.raw_entry_mut().from_key_hashed_nocheck(hash, &key);
+    /// entry.insert(key, 100);
+    /// assert_eq!(map[&"a"], 100);
+    /// ```
+    #[inline]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S, A>
+    where
+        Q: Equivalent<K>,
+    {
+        self.from_hash(hash, equivalent(k))
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> RawEntryBuilderMut<'a, K, V, S, A> {
+    /// Creates a `RawEntryMut` from the given hash and matching function.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// let key = "a";
+    /// let hash = compute_hash(map.hasher(), &key);
+    /// let entry: RawEntryMut<&str, u32, _> = map.raw_entry_mut().from_hash(hash, |k| k == &key);
+    /// entry.insert(key, 100);
+    /// assert_eq!(map[&"a"], 100);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A>
+    where
+        for<'b> F: FnMut(&'b K) -> bool,
+    {
+        self.search(hash, is_match)
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn search<F>(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S, A>
+    where
+        for<'b> F: FnMut(&'b K) -> bool,
+    {
+        match self.map.table.find(hash, |(k, _)| is_match(k)) {
+            Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut {
+                elem,
+                table: &mut self.map.table,
+                hash_builder: &self.map.hash_builder,
+            }),
+            None => RawEntryMut::Vacant(RawVacantEntryMut {
+                table: &mut self.map.table,
+                hash_builder: &self.map.hash_builder,
+            }),
+        }
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> RawEntryBuilder<'a, K, V, S, A> {
+    /// Access an immutable entry by key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    /// let key = "a";
+    /// assert_eq!(map.raw_entry().from_key(&key), Some((&"a", &100)));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_key<Q: ?Sized>(self, k: &Q) -> Option<(&'a K, &'a V)>
+    where
+        S: BuildHasher,
+        Q: Hash + Equivalent<K>,
+    {
+        let hash = make_hash::<Q, S>(&self.map.hash_builder, k);
+        self.from_key_hashed_nocheck(hash, k)
+    }
+
+    /// Access an immutable entry by a key and its hash.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::HashMap;
+    ///
+    /// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// let map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    /// let key = "a";
+    /// let hash = compute_hash(map.hasher(), &key);
+    /// assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &key), Some((&"a", &100)));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)>
+    where
+        Q: Equivalent<K>,
+    {
+        self.from_hash(hash, equivalent(k))
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn search<F>(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)>
+    where
+        F: FnMut(&K) -> bool,
+    {
+        match self.map.table.get(hash, |(k, _)| is_match(k)) {
+            Some((key, value)) => Some((key, value)),
+            None => None,
+        }
+    }
+
+    /// Access an immutable entry by hash and matching function.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::HashMap;
+    ///
+    /// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// let map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    /// let key = "a";
+    /// let hash = compute_hash(map.hasher(), &key);
+    /// assert_eq!(map.raw_entry().from_hash(hash, |k| k == &key), Some((&"a", &100)));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::wrong_self_convention)]
+    pub fn from_hash<F>(self, hash: u64, is_match: F) -> Option<(&'a K, &'a V)>
+    where
+        F: FnMut(&K) -> bool,
+    {
+        self.search(hash, is_match)
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> RawEntryMut<'a, K, V, S, A> {
+    /// Sets the value of the entry, and returns a RawOccupiedEntryMut.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// let entry = map.raw_entry_mut().from_key("horseyland").insert("horseyland", 37);
+    ///
+    /// assert_eq!(entry.remove_entry(), ("horseyland", 37));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S, A>
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            RawEntryMut::Occupied(mut entry) => {
+                entry.insert(value);
+                entry
+            }
+            RawEntryMut::Vacant(entry) => entry.insert_entry(key, value),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the default if empty, and returns
+    /// mutable references to the key and value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 3);
+    /// assert_eq!(map["poneyland"], 3);
+    ///
+    /// *map.raw_entry_mut().from_key("poneyland").or_insert("poneyland", 10).1 *= 2;
+    /// assert_eq!(map["poneyland"], 6);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert(self, default_key: K, default_val: V) -> (&'a mut K, &'a mut V)
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            RawEntryMut::Occupied(entry) => entry.into_key_value(),
+            RawEntryMut::Vacant(entry) => entry.insert(default_key, default_val),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the result of the default function if empty,
+    /// and returns mutable references to the key and value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, String> = HashMap::new();
+    ///
+    /// map.raw_entry_mut().from_key("poneyland").or_insert_with(|| {
+    ///     ("poneyland", "hoho".to_string())
+    /// });
+    ///
+    /// assert_eq!(map["poneyland"], "hoho".to_string());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert_with<F>(self, default: F) -> (&'a mut K, &'a mut V)
+    where
+        F: FnOnce() -> (K, V),
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            RawEntryMut::Occupied(entry) => entry.into_key_value(),
+            RawEntryMut::Vacant(entry) => {
+                let (k, v) = default();
+                entry.insert(k, v)
+            }
+        }
+    }
+
+    /// Provides in-place mutable access to an occupied entry before any
+    /// potential inserts into the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.raw_entry_mut()
+    ///    .from_key("poneyland")
+    ///    .and_modify(|_k, v| { *v += 1 })
+    ///    .or_insert("poneyland", 42);
+    /// assert_eq!(map["poneyland"], 42);
+    ///
+    /// map.raw_entry_mut()
+    ///    .from_key("poneyland")
+    ///    .and_modify(|_k, v| { *v += 1 })
+    ///    .or_insert("poneyland", 0);
+    /// assert_eq!(map["poneyland"], 43);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_modify<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&mut K, &mut V),
+    {
+        match self {
+            RawEntryMut::Occupied(mut entry) => {
+                {
+                    let (k, v) = entry.get_key_value_mut();
+                    f(k, v);
+                }
+                RawEntryMut::Occupied(entry)
+            }
+            RawEntryMut::Vacant(entry) => RawEntryMut::Vacant(entry),
+        }
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// an occupied entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::RawEntryMut;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// let entry = map
+    ///     .raw_entry_mut()
+    ///     .from_key("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| panic!());
+    ///
+    /// match entry {
+    ///     RawEntryMut::Vacant(_) => {},
+    ///     RawEntryMut::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// map.insert("poneyland", 42);
+    ///
+    /// let entry = map
+    ///     .raw_entry_mut()
+    ///     .from_key("poneyland")
+    ///     .and_replace_entry_with(|k, v| {
+    ///         assert_eq!(k, &"poneyland");
+    ///         assert_eq!(v, 42);
+    ///         Some(v + 1)
+    ///     });
+    ///
+    /// match entry {
+    ///     RawEntryMut::Occupied(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///         assert_eq!(e.get(), &43);
+    ///     },
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 43);
+    ///
+    /// let entry = map
+    ///     .raw_entry_mut()
+    ///     .from_key("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| None);
+    ///
+    /// match entry {
+    ///     RawEntryMut::Vacant(_) => {},
+    ///     RawEntryMut::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// assert!(!map.contains_key("poneyland"));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_replace_entry_with<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        match self {
+            RawEntryMut::Occupied(entry) => entry.replace_entry_with(f),
+            RawEntryMut::Vacant(_) => self,
+        }
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> RawOccupiedEntryMut<'a, K, V, S, A> {
+    /// Gets a reference to the key in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => assert_eq!(o.key(), &"a")
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        unsafe { &self.elem.as_ref().0 }
+    }
+
+    /// Gets a mutable reference to the key in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    /// use std::rc::Rc;
+    ///
+    /// let key_one = Rc::new("a");
+    /// let key_two = Rc::new("a");
+    ///
+    /// let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+    /// map.insert(key_one.clone(), 10);
+    ///
+    /// assert_eq!(map[&key_one], 10);
+    /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+    ///
+    /// match map.raw_entry_mut().from_key(&key_one) {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(mut o) => {
+    ///         *o.key_mut() = key_two.clone();
+    ///     }
+    /// }
+    /// assert_eq!(map[&key_two], 10);
+    /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key_mut(&mut self) -> &mut K {
+        unsafe { &mut self.elem.as_mut().0 }
+    }
+
+    /// Converts the entry into a mutable reference to the key in the entry
+    /// with a lifetime bound to the map itself.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    /// use std::rc::Rc;
+    ///
+    /// let key_one = Rc::new("a");
+    /// let key_two = Rc::new("a");
+    ///
+    /// let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+    /// map.insert(key_one.clone(), 10);
+    ///
+    /// assert_eq!(map[&key_one], 10);
+    /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+    ///
+    /// let inside_key: &mut Rc<&str>;
+    ///
+    /// match map.raw_entry_mut().from_key(&key_one) {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => inside_key = o.into_key(),
+    /// }
+    /// *inside_key = key_two.clone();
+    ///
+    /// assert_eq!(map[&key_two], 10);
+    /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_key(self) -> &'a mut K {
+        unsafe { &mut self.elem.as_mut().0 }
+    }
+
+    /// Gets a reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => assert_eq!(o.get(), &100),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get(&self) -> &V {
+        unsafe { &self.elem.as_ref().1 }
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+    /// with a lifetime bound to the map itself.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// let value: &mut u32;
+    ///
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => value = o.into_mut(),
+    /// }
+    /// *value += 900;
+    ///
+    /// assert_eq!(map[&"a"], 1000);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_mut(self) -> &'a mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Gets a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(mut o) => *o.get_mut() += 900,
+    /// }
+    ///
+    /// assert_eq!(map[&"a"], 1000);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_mut(&mut self) -> &mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Gets a reference to the key and value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => assert_eq!(o.get_key_value(), (&"a", &100)),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_key_value(&self) -> (&K, &V) {
+        unsafe {
+            let (key, value) = self.elem.as_ref();
+            (key, value)
+        }
+    }
+
+    /// Gets a mutable reference to the key and value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    /// use std::rc::Rc;
+    ///
+    /// let key_one = Rc::new("a");
+    /// let key_two = Rc::new("a");
+    ///
+    /// let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+    /// map.insert(key_one.clone(), 10);
+    ///
+    /// assert_eq!(map[&key_one], 10);
+    /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+    ///
+    /// match map.raw_entry_mut().from_key(&key_one) {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(mut o) => {
+    ///         let (inside_key, inside_value) = o.get_key_value_mut();
+    ///         *inside_key = key_two.clone();
+    ///         *inside_value = 100;
+    ///     }
+    /// }
+    /// assert_eq!(map[&key_two], 100);
+    /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_key_value_mut(&mut self) -> (&mut K, &mut V) {
+        unsafe {
+            let &mut (ref mut key, ref mut value) = self.elem.as_mut();
+            (key, value)
+        }
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the key and value in the entry
+    /// with a lifetime bound to the map itself.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    /// use std::rc::Rc;
+    ///
+    /// let key_one = Rc::new("a");
+    /// let key_two = Rc::new("a");
+    ///
+    /// let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+    /// map.insert(key_one.clone(), 10);
+    ///
+    /// assert_eq!(map[&key_one], 10);
+    /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+    ///
+    /// let inside_key: &mut Rc<&str>;
+    /// let inside_value: &mut u32;
+    /// match map.raw_entry_mut().from_key(&key_one) {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => {
+    ///         let tuple = o.into_key_value();
+    ///         inside_key = tuple.0;
+    ///         inside_value = tuple.1;
+    ///     }
+    /// }
+    /// *inside_key = key_two.clone();
+    /// *inside_value = 100;
+    /// assert_eq!(map[&key_two], 100);
+    /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_key_value(self) -> (&'a mut K, &'a mut V) {
+        unsafe {
+            let &mut (ref mut key, ref mut value) = self.elem.as_mut();
+            (key, value)
+        }
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(mut o) => assert_eq!(o.insert(1000), 100),
+    /// }
+    ///
+    /// assert_eq!(map[&"a"], 1000);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, value: V) -> V {
+        mem::replace(self.get_mut(), value)
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    /// use std::rc::Rc;
+    ///
+    /// let key_one = Rc::new("a");
+    /// let key_two = Rc::new("a");
+    ///
+    /// let mut map: HashMap<Rc<&str>, u32> = HashMap::new();
+    /// map.insert(key_one.clone(), 10);
+    ///
+    /// assert_eq!(map[&key_one], 10);
+    /// assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+    ///
+    /// match map.raw_entry_mut().from_key(&key_one) {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(mut o) => {
+    ///         let old_key = o.insert_key(key_two.clone());
+    ///         assert!(Rc::ptr_eq(&old_key, &key_one));
+    ///     }
+    /// }
+    /// assert_eq!(map[&key_two], 10);
+    /// assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert_key(&mut self, key: K) -> K {
+        mem::replace(self.key_mut(), key)
+    }
+
+    /// Takes the value out of the entry, and returns it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => assert_eq!(o.remove(), 100),
+    /// }
+    /// assert_eq!(map.get(&"a"), None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove(self) -> V {
+        self.remove_entry().1
+    }
+
+    /// Take the ownership of the key and value from the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => assert_eq!(o.remove_entry(), ("a", 100)),
+    /// }
+    /// assert_eq!(map.get(&"a"), None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry(self) -> (K, V) {
+        unsafe { self.table.remove(self.elem).0 }
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// the entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// let raw_entry = match map.raw_entry_mut().from_key(&"a") {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => o.replace_entry_with(|k, v| {
+    ///         assert_eq!(k, &"a");
+    ///         assert_eq!(v, 100);
+    ///         Some(v + 900)
+    ///     }),
+    /// };
+    /// let raw_entry = match raw_entry {
+    ///     RawEntryMut::Vacant(_) => panic!(),
+    ///     RawEntryMut::Occupied(o) => o.replace_entry_with(|k, v| {
+    ///         assert_eq!(k, &"a");
+    ///         assert_eq!(v, 1000);
+    ///         None
+    ///     }),
+    /// };
+    /// match raw_entry {
+    ///     RawEntryMut::Vacant(_) => { },
+    ///     RawEntryMut::Occupied(_) => panic!(),
+    /// };
+    /// assert_eq!(map.get(&"a"), None);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_entry_with<F>(self, f: F) -> RawEntryMut<'a, K, V, S, A>
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        unsafe {
+            let still_occupied = self
+                .table
+                .replace_bucket_with(self.elem.clone(), |(key, value)| {
+                    f(&key, value).map(|new_value| (key, new_value))
+                });
+
+            if still_occupied {
+                RawEntryMut::Occupied(self)
+            } else {
+                RawEntryMut::Vacant(RawVacantEntryMut {
+                    table: self.table,
+                    hash_builder: self.hash_builder,
+                })
+            }
+        }
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> RawVacantEntryMut<'a, K, V, S, A> {
+    /// Sets the value of the entry with the VacantEntry's key,
+    /// and returns a mutable reference to it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.raw_entry_mut().from_key(&"c") {
+    ///     RawEntryMut::Occupied(_) => panic!(),
+    ///     RawEntryMut::Vacant(v) => assert_eq!(v.insert("c", 300), (&mut "c", &mut 300)),
+    /// }
+    ///
+    /// assert_eq!(map[&"c"], 300);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, key: K, value: V) -> (&'a mut K, &'a mut V)
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let hash = make_hash::<K, S>(self.hash_builder, &key);
+        self.insert_hashed_nocheck(hash, key, value)
+    }
+
+    /// Sets the value of the entry with the VacantEntry's key,
+    /// and returns a mutable reference to it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// fn compute_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// let mut map: HashMap<&str, u32> = [("a", 100), ("b", 200)].into();
+    /// let key = "c";
+    /// let hash = compute_hash(map.hasher(), &key);
+    ///
+    /// match map.raw_entry_mut().from_key_hashed_nocheck(hash, &key) {
+    ///     RawEntryMut::Occupied(_) => panic!(),
+    ///     RawEntryMut::Vacant(v) => assert_eq!(
+    ///         v.insert_hashed_nocheck(hash, key, 300),
+    ///         (&mut "c", &mut 300)
+    ///     ),
+    /// }
+    ///
+    /// assert_eq!(map[&"c"], 300);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::shadow_unrelated)]
+    pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V)
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let &mut (ref mut k, ref mut v) = self.table.insert_entry(
+            hash,
+            (key, value),
+            make_hasher::<_, V, S>(self.hash_builder),
+        );
+        (k, v)
+    }
+
+    /// Set the value of an entry with a custom hasher function.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::hash_map::{HashMap, RawEntryMut};
+    ///
+    /// fn make_hasher<K, S>(hash_builder: &S) -> impl Fn(&K) -> u64 + '_
+    /// where
+    ///     K: Hash + ?Sized,
+    ///     S: BuildHasher,
+    /// {
+    ///     move |key: &K| {
+    ///         use core::hash::Hasher;
+    ///         let mut state = hash_builder.build_hasher();
+    ///         key.hash(&mut state);
+    ///         state.finish()
+    ///     }
+    /// }
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// let key = "a";
+    /// let hash_builder = map.hasher().clone();
+    /// let hash = make_hasher(&hash_builder)(&key);
+    ///
+    /// match map.raw_entry_mut().from_hash(hash, |q| q == &key) {
+    ///     RawEntryMut::Occupied(_) => panic!(),
+    ///     RawEntryMut::Vacant(v) => assert_eq!(
+    ///         v.insert_with_hasher(hash, key, 100, make_hasher(&hash_builder)),
+    ///         (&mut "a", &mut 100)
+    ///     ),
+    /// }
+    /// map.extend([("b", 200), ("c", 300), ("d", 400), ("e", 500), ("f", 600)]);
+    /// assert_eq!(map[&"a"], 100);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert_with_hasher<H>(
+        self,
+        hash: u64,
+        key: K,
+        value: V,
+        hasher: H,
+    ) -> (&'a mut K, &'a mut V)
+    where
+        H: Fn(&K) -> u64,
+    {
+        let &mut (ref mut k, ref mut v) = self
+            .table
+            .insert_entry(hash, (key, value), |x| hasher(&x.0));
+        (k, v)
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, S, A>
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let hash = make_hash::<K, S>(self.hash_builder, &key);
+        let elem = self.table.insert(
+            hash,
+            (key, value),
+            make_hasher::<_, V, S>(self.hash_builder),
+        );
+        RawOccupiedEntryMut {
+            elem,
+            table: self.table,
+            hash_builder: self.hash_builder,
+        }
+    }
+}
+
+impl<K, V, S, A: Allocator> Debug for RawEntryBuilderMut<'_, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RawEntryBuilder").finish()
+    }
+}
+
+impl<K: Debug, V: Debug, S, A: Allocator> Debug for RawEntryMut<'_, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(),
+            RawEntryMut::Occupied(ref o) => f.debug_tuple("RawEntry").field(o).finish(),
+        }
+    }
+}
+
+impl<K: Debug, V: Debug, S, A: Allocator> Debug for RawOccupiedEntryMut<'_, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RawOccupiedEntryMut")
+            .field("key", self.key())
+            .field("value", self.get())
+            .finish()
+    }
+}
+
+impl<K, V, S, A: Allocator> Debug for RawVacantEntryMut<'_, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RawVacantEntryMut").finish()
+    }
+}
+
+impl<K, V, S, A: Allocator> Debug for RawEntryBuilder<'_, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("RawEntryBuilder").finish()
+    }
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
+///
+/// [`HashMap`]: struct.HashMap.html
+/// [`entry`]: struct.HashMap.html#method.entry
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_map::{Entry, HashMap, OccupiedEntry};
+///
+/// let mut map = HashMap::new();
+/// map.extend([("a", 10), ("b", 20), ("c", 30)]);
+/// assert_eq!(map.len(), 3);
+///
+/// // Existing key (insert)
+/// let entry: Entry<_, _, _> = map.entry("a");
+/// let _raw_o: OccupiedEntry<_, _, _> = entry.insert(1);
+/// assert_eq!(map.len(), 3);
+/// // Nonexistent key (insert)
+/// map.entry("d").insert(4);
+///
+/// // Existing key (or_insert)
+/// let v = map.entry("b").or_insert(2);
+/// assert_eq!(std::mem::replace(v, 2), 20);
+/// // Nonexistent key (or_insert)
+/// map.entry("e").or_insert(5);
+///
+/// // Existing key (or_insert_with)
+/// let v = map.entry("c").or_insert_with(|| 3);
+/// assert_eq!(std::mem::replace(v, 3), 30);
+/// // Nonexistent key (or_insert_with)
+/// map.entry("f").or_insert_with(|| 6);
+///
+/// println!("Our HashMap: {:?}", map);
+///
+/// let mut vec: Vec<_> = map.iter().map(|(&k, &v)| (k, v)).collect();
+/// // The `Iter` iterator produces items in arbitrary order, so the
+/// // items must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3), ("d", 4), ("e", 5), ("f", 6)]);
+/// ```
+pub enum Entry<'a, K, V, S, A = Global>
+where
+    A: Allocator,
+{
+    /// An occupied entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{Entry, HashMap};
+    /// let mut map: HashMap<_, _> = [("a", 100), ("b", 200)].into();
+    ///
+    /// match map.entry("a") {
+    ///     Entry::Vacant(_) => unreachable!(),
+    ///     Entry::Occupied(_) => { }
+    /// }
+    /// ```
+    Occupied(OccupiedEntry<'a, K, V, S, A>),
+
+    /// A vacant entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{Entry, HashMap};
+    /// let mut map: HashMap<&str, i32> = HashMap::new();
+    ///
+    /// match map.entry("a") {
+    ///     Entry::Occupied(_) => unreachable!(),
+    ///     Entry::Vacant(_) => { }
+    /// }
+    /// ```
+    Vacant(VacantEntry<'a, K, V, S, A>),
+}
+
+impl<K: Debug, V: Debug, S, A: Allocator> Debug for Entry<'_, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+            Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+        }
+    }
+}
+
+/// A view into an occupied entry in a `HashMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_map::{Entry, HashMap, OccupiedEntry};
+///
+/// let mut map = HashMap::new();
+/// map.extend([("a", 10), ("b", 20), ("c", 30)]);
+///
+/// let _entry_o: OccupiedEntry<_, _, _> = map.entry("a").insert(100);
+/// assert_eq!(map.len(), 3);
+///
+/// // Existing key (insert and update)
+/// match map.entry("a") {
+///     Entry::Vacant(_) => unreachable!(),
+///     Entry::Occupied(mut view) => {
+///         assert_eq!(view.get(), &100);
+///         let v = view.get_mut();
+///         *v *= 10;
+///         assert_eq!(view.insert(1111), 1000);
+///     }
+/// }
+///
+/// assert_eq!(map[&"a"], 1111);
+/// assert_eq!(map.len(), 3);
+///
+/// // Existing key (take)
+/// match map.entry("c") {
+///     Entry::Vacant(_) => unreachable!(),
+///     Entry::Occupied(view) => {
+///         assert_eq!(view.remove_entry(), ("c", 30));
+///     }
+/// }
+/// assert_eq!(map.get(&"c"), None);
+/// assert_eq!(map.len(), 2);
+/// ```
+pub struct OccupiedEntry<'a, K, V, S = DefaultHashBuilder, A: Allocator = Global> {
+    hash: u64,
+    key: Option<K>,
+    elem: Bucket<(K, V)>,
+    table: &'a mut HashMap<K, V, S, A>,
+}
+
+unsafe impl<K, V, S, A> Send for OccupiedEntry<'_, K, V, S, A>
+where
+    K: Send,
+    V: Send,
+    S: Send,
+    A: Send + Allocator,
+{
+}
+unsafe impl<K, V, S, A> Sync for OccupiedEntry<'_, K, V, S, A>
+where
+    K: Sync,
+    V: Sync,
+    S: Sync,
+    A: Sync + Allocator,
+{
+}
+
+impl<K: Debug, V: Debug, S, A: Allocator> Debug for OccupiedEntry<'_, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OccupiedEntry")
+            .field("key", self.key())
+            .field("value", self.get())
+            .finish()
+    }
+}
+
+/// A view into a vacant entry in a `HashMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_map::{Entry, HashMap, VacantEntry};
+///
+/// let mut map = HashMap::<&str, i32>::new();
+///
+/// let entry_v: VacantEntry<_, _, _> = match map.entry("a") {
+///     Entry::Vacant(view) => view,
+///     Entry::Occupied(_) => unreachable!(),
+/// };
+/// entry_v.insert(10);
+/// assert!(map[&"a"] == 10 && map.len() == 1);
+///
+/// // Nonexistent key (insert and update)
+/// match map.entry("b") {
+///     Entry::Occupied(_) => unreachable!(),
+///     Entry::Vacant(view) => {
+///         let value = view.insert(2);
+///         assert_eq!(*value, 2);
+///         *value = 20;
+///     }
+/// }
+/// assert!(map[&"b"] == 20 && map.len() == 2);
+/// ```
+pub struct VacantEntry<'a, K, V, S = DefaultHashBuilder, A: Allocator = Global> {
+    hash: u64,
+    key: K,
+    table: &'a mut HashMap<K, V, S, A>,
+}
+
+impl<K: Debug, V, S, A: Allocator> Debug for VacantEntry<'_, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("VacantEntry").field(self.key()).finish()
+    }
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied,
+/// with any borrowed form of the map's key type.
+///
+///
+/// This `enum` is constructed from the [`entry_ref`] method on [`HashMap`].
+///
+/// [`Hash`] and [`Eq`] on the borrowed form of the map's key type *must* match those
+/// for the key type. It also require that key may be constructed from the borrowed
+/// form through the [`From`] trait.
+///
+/// [`HashMap`]: struct.HashMap.html
+/// [`entry_ref`]: struct.HashMap.html#method.entry_ref
+/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+/// [`From`]: https://doc.rust-lang.org/std/convert/trait.From.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_map::{EntryRef, HashMap, OccupiedEntryRef};
+///
+/// let mut map = HashMap::new();
+/// map.extend([("a".to_owned(), 10), ("b".into(), 20), ("c".into(), 30)]);
+/// assert_eq!(map.len(), 3);
+///
+/// // Existing key (insert)
+/// let key = String::from("a");
+/// let entry: EntryRef<_, _, _, _> = map.entry_ref(&key);
+/// let _raw_o: OccupiedEntryRef<_, _, _, _> = entry.insert(1);
+/// assert_eq!(map.len(), 3);
+/// // Nonexistent key (insert)
+/// map.entry_ref("d").insert(4);
+///
+/// // Existing key (or_insert)
+/// let v = map.entry_ref("b").or_insert(2);
+/// assert_eq!(std::mem::replace(v, 2), 20);
+/// // Nonexistent key (or_insert)
+/// map.entry_ref("e").or_insert(5);
+///
+/// // Existing key (or_insert_with)
+/// let v = map.entry_ref("c").or_insert_with(|| 3);
+/// assert_eq!(std::mem::replace(v, 3), 30);
+/// // Nonexistent key (or_insert_with)
+/// map.entry_ref("f").or_insert_with(|| 6);
+///
+/// println!("Our HashMap: {:?}", map);
+///
+/// for (key, value) in ["a", "b", "c", "d", "e", "f"].into_iter().zip(1..=6) {
+///     assert_eq!(map[key], value)
+/// }
+/// assert_eq!(map.len(), 6);
+/// ```
+pub enum EntryRef<'a, 'b, K, Q: ?Sized, V, S, A = Global>
+where
+    A: Allocator,
+{
+    /// An occupied entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{EntryRef, HashMap};
+    /// let mut map: HashMap<_, _> = [("a".to_owned(), 100), ("b".into(), 200)].into();
+    ///
+    /// match map.entry_ref("a") {
+    ///     EntryRef::Vacant(_) => unreachable!(),
+    ///     EntryRef::Occupied(_) => { }
+    /// }
+    /// ```
+    Occupied(OccupiedEntryRef<'a, 'b, K, Q, V, S, A>),
+
+    /// A vacant entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{EntryRef, HashMap};
+    /// let mut map: HashMap<String, i32> = HashMap::new();
+    ///
+    /// match map.entry_ref("a") {
+    ///     EntryRef::Occupied(_) => unreachable!(),
+    ///     EntryRef::Vacant(_) => { }
+    /// }
+    /// ```
+    Vacant(VacantEntryRef<'a, 'b, K, Q, V, S, A>),
+}
+
+impl<K: Borrow<Q>, Q: ?Sized + Debug, V: Debug, S, A: Allocator> Debug
+    for EntryRef<'_, '_, K, Q, V, S, A>
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            EntryRef::Vacant(ref v) => f.debug_tuple("EntryRef").field(v).finish(),
+            EntryRef::Occupied(ref o) => f.debug_tuple("EntryRef").field(o).finish(),
+        }
+    }
+}
+
+enum KeyOrRef<'a, K, Q: ?Sized> {
+    Borrowed(&'a Q),
+    Owned(K),
+}
+
+impl<'a, K, Q: ?Sized> KeyOrRef<'a, K, Q> {
+    fn into_owned(self) -> K
+    where
+        K: From<&'a Q>,
+    {
+        match self {
+            Self::Borrowed(borrowed) => borrowed.into(),
+            Self::Owned(owned) => owned,
+        }
+    }
+}
+
+impl<'a, K: Borrow<Q>, Q: ?Sized> AsRef<Q> for KeyOrRef<'a, K, Q> {
+    fn as_ref(&self) -> &Q {
+        match self {
+            Self::Borrowed(borrowed) => borrowed,
+            Self::Owned(owned) => owned.borrow(),
+        }
+    }
+}
+
+/// A view into an occupied entry in a `HashMap`.
+/// It is part of the [`EntryRef`] enum.
+///
+/// [`EntryRef`]: enum.EntryRef.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_map::{EntryRef, HashMap, OccupiedEntryRef};
+///
+/// let mut map = HashMap::new();
+/// map.extend([("a".to_owned(), 10), ("b".into(), 20), ("c".into(), 30)]);
+///
+/// let key = String::from("a");
+/// let _entry_o: OccupiedEntryRef<_, _, _, _> = map.entry_ref(&key).insert(100);
+/// assert_eq!(map.len(), 3);
+///
+/// // Existing key (insert and update)
+/// match map.entry_ref("a") {
+///     EntryRef::Vacant(_) => unreachable!(),
+///     EntryRef::Occupied(mut view) => {
+///         assert_eq!(view.get(), &100);
+///         let v = view.get_mut();
+///         *v *= 10;
+///         assert_eq!(view.insert(1111), 1000);
+///     }
+/// }
+///
+/// assert_eq!(map["a"], 1111);
+/// assert_eq!(map.len(), 3);
+///
+/// // Existing key (take)
+/// match map.entry_ref("c") {
+///     EntryRef::Vacant(_) => unreachable!(),
+///     EntryRef::Occupied(view) => {
+///         assert_eq!(view.remove_entry(), ("c".to_owned(), 30));
+///     }
+/// }
+/// assert_eq!(map.get("c"), None);
+/// assert_eq!(map.len(), 2);
+/// ```
+pub struct OccupiedEntryRef<'a, 'b, K, Q: ?Sized, V, S, A: Allocator = Global> {
+    hash: u64,
+    key: Option<KeyOrRef<'b, K, Q>>,
+    elem: Bucket<(K, V)>,
+    table: &'a mut HashMap<K, V, S, A>,
+}
+
+unsafe impl<'a, 'b, K, Q, V, S, A> Send for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
+where
+    K: Send,
+    Q: Sync + ?Sized,
+    V: Send,
+    S: Send,
+    A: Send + Allocator,
+{
+}
+unsafe impl<'a, 'b, K, Q, V, S, A> Sync for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
+where
+    K: Sync,
+    Q: Sync + ?Sized,
+    V: Sync,
+    S: Sync,
+    A: Sync + Allocator,
+{
+}
+
+impl<K: Borrow<Q>, Q: ?Sized + Debug, V: Debug, S, A: Allocator> Debug
+    for OccupiedEntryRef<'_, '_, K, Q, V, S, A>
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OccupiedEntryRef")
+            .field("key", &self.key().borrow())
+            .field("value", &self.get())
+            .finish()
+    }
+}
+
+/// A view into a vacant entry in a `HashMap`.
+/// It is part of the [`EntryRef`] enum.
+///
+/// [`EntryRef`]: enum.EntryRef.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_map::{EntryRef, HashMap, VacantEntryRef};
+///
+/// let mut map = HashMap::<String, i32>::new();
+///
+/// let entry_v: VacantEntryRef<_, _, _, _> = match map.entry_ref("a") {
+///     EntryRef::Vacant(view) => view,
+///     EntryRef::Occupied(_) => unreachable!(),
+/// };
+/// entry_v.insert(10);
+/// assert!(map["a"] == 10 && map.len() == 1);
+///
+/// // Nonexistent key (insert and update)
+/// match map.entry_ref("b") {
+///     EntryRef::Occupied(_) => unreachable!(),
+///     EntryRef::Vacant(view) => {
+///         let value = view.insert(2);
+///         assert_eq!(*value, 2);
+///         *value = 20;
+///     }
+/// }
+/// assert!(map["b"] == 20 && map.len() == 2);
+/// ```
+pub struct VacantEntryRef<'a, 'b, K, Q: ?Sized, V, S, A: Allocator = Global> {
+    hash: u64,
+    key: KeyOrRef<'b, K, Q>,
+    table: &'a mut HashMap<K, V, S, A>,
+}
+
+impl<K: Borrow<Q>, Q: ?Sized + Debug, V, S, A: Allocator> Debug
+    for VacantEntryRef<'_, '_, K, Q, V, S, A>
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("VacantEntryRef").field(&self.key()).finish()
+    }
+}
+
+/// The error returned by [`try_insert`](HashMap::try_insert) when the key already exists.
+///
+/// Contains the occupied entry, and the value that was not inserted.
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_map::{HashMap, OccupiedError};
+///
+/// let mut map: HashMap<_, _> = [("a", 10), ("b", 20)].into();
+///
+/// // try_insert method returns mutable reference to the value if keys are vacant,
+/// // but if the map did have key present, nothing is updated, and the provided
+/// // value is returned inside `Err(_)` variant
+/// match map.try_insert("a", 100) {
+///     Err(OccupiedError { mut entry, value }) => {
+///         assert_eq!(entry.key(), &"a");
+///         assert_eq!(value, 100);
+///         assert_eq!(entry.insert(100), 10)
+///     }
+///     _ => unreachable!(),
+/// }
+/// assert_eq!(map[&"a"], 100);
+/// ```
+pub struct OccupiedError<'a, K, V, S, A: Allocator = Global> {
+    /// The entry in the map that was already occupied.
+    pub entry: OccupiedEntry<'a, K, V, S, A>,
+    /// The value which was not inserted, because the entry was already occupied.
+    pub value: V,
+}
+
+impl<K: Debug, V: Debug, S, A: Allocator> Debug for OccupiedError<'_, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OccupiedError")
+            .field("key", self.entry.key())
+            .field("old_value", self.entry.get())
+            .field("new_value", &self.value)
+            .finish()
+    }
+}
+
+impl<'a, K: Debug, V: Debug, S, A: Allocator> fmt::Display for OccupiedError<'a, K, V, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(
+            f,
+            "failed to insert {:?}, key {:?} already exists with value {:?}",
+            self.value,
+            self.entry.key(),
+            self.entry.get(),
+        )
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> IntoIterator for &'a HashMap<K, V, S, A> {
+    type Item = (&'a K, &'a V);
+    type IntoIter = Iter<'a, K, V>;
+
+    /// Creates an iterator over the entries of a `HashMap` in arbitrary order.
+    /// The iterator element type is `(&'a K, &'a V)`.
+    ///
+    /// Return the same `Iter` struct as by the [`iter`] method on [`HashMap`].
+    ///
+    /// [`iter`]: struct.HashMap.html#method.iter
+    /// [`HashMap`]: struct.HashMap.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let map_one: HashMap<_, _> = [(1, "a"), (2, "b"), (3, "c")].into();
+    /// let mut map_two = HashMap::new();
+    ///
+    /// for (key, value) in &map_one {
+    ///     println!("Key: {}, Value: {}", key, value);
+    ///     map_two.insert_unique_unchecked(*key, *value);
+    /// }
+    ///
+    /// assert_eq!(map_one, map_two);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> Iter<'a, K, V> {
+        self.iter()
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> IntoIterator for &'a mut HashMap<K, V, S, A> {
+    type Item = (&'a K, &'a mut V);
+    type IntoIter = IterMut<'a, K, V>;
+
+    /// Creates an iterator over the entries of a `HashMap` in arbitrary order
+    /// with mutable references to the values. The iterator element type is
+    /// `(&'a K, &'a mut V)`.
+    ///
+    /// Return the same `IterMut` struct as by the [`iter_mut`] method on
+    /// [`HashMap`].
+    ///
+    /// [`iter_mut`]: struct.HashMap.html#method.iter_mut
+    /// [`HashMap`]: struct.HashMap.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// let mut map: HashMap<_, _> = [("a", 1), ("b", 2), ("c", 3)].into();
+    ///
+    /// for (key, value) in &mut map {
+    ///     println!("Key: {}, Value: {}", key, value);
+    ///     *value *= 2;
+    /// }
+    ///
+    /// let mut vec = map.iter().collect::<Vec<_>>();
+    /// // The `Iter` iterator produces items in arbitrary order, so the
+    /// // items must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [(&"a", &2), (&"b", &4), (&"c", &6)]);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> IterMut<'a, K, V> {
+        self.iter_mut()
+    }
+}
+
+impl<K, V, S, A: Allocator> IntoIterator for HashMap<K, V, S, A> {
+    type Item = (K, V);
+    type IntoIter = IntoIter<K, V, A>;
+
+    /// Creates a consuming iterator, that is, one that moves each key-value
+    /// pair out of the map in arbitrary order. The map cannot be used after
+    /// calling this.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let map: HashMap<_, _> = [("a", 1), ("b", 2), ("c", 3)].into();
+    ///
+    /// // Not possible with .iter()
+    /// let mut vec: Vec<(&str, i32)> = map.into_iter().collect();
+    /// // The `IntoIter` iterator produces items in arbitrary order, so
+    /// // the items must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [("a", 1), ("b", 2), ("c", 3)]);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> IntoIter<K, V, A> {
+        IntoIter {
+            inner: self.table.into_iter(),
+        }
+    }
+}
+
+impl<'a, K, V> Iterator for Iter<'a, K, V> {
+    type Item = (&'a K, &'a V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<(&'a K, &'a V)> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some(x) => unsafe {
+                let r = x.as_ref();
+                Some((&r.0, &r.1))
+            },
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, |acc, x| unsafe {
+            let (k, v) = x.as_ref();
+            f(acc, (k, v))
+        })
+    }
+}
+impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+impl<K, V> FusedIterator for Iter<'_, K, V> {}
+
+impl<'a, K, V> Iterator for IterMut<'a, K, V> {
+    type Item = (&'a K, &'a mut V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some(x) => unsafe {
+                let r = x.as_mut();
+                Some((&r.0, &mut r.1))
+            },
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, |acc, x| unsafe {
+            let (k, v) = x.as_mut();
+            f(acc, (k, v))
+        })
+    }
+}
+impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for IterMut<'_, K, V> {}
+
+impl<K, V> fmt::Debug for IterMut<'_, K, V>
+where
+    K: fmt::Debug,
+    V: fmt::Debug,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.iter()).finish()
+    }
+}
+
+impl<K, V, A: Allocator> Iterator for IntoIter<K, V, A> {
+    type Item = (K, V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<(K, V)> {
+        self.inner.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, f)
+    }
+}
+impl<K, V, A: Allocator> ExactSizeIterator for IntoIter<K, V, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V, A: Allocator> FusedIterator for IntoIter<K, V, A> {}
+
+impl<K: Debug, V: Debug, A: Allocator> fmt::Debug for IntoIter<K, V, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.iter()).finish()
+    }
+}
+
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+    type Item = &'a K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a K> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, |acc, (k, _)| f(acc, k))
+    }
+}
+impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for Keys<'_, K, V> {}
+
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+    type Item = &'a V;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a V> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some((_, v)) => Some(v),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, |acc, (_, v)| f(acc, v))
+    }
+}
+impl<K, V> ExactSizeIterator for Values<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for Values<'_, K, V> {}
+
+impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
+    type Item = &'a mut V;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a mut V> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some((_, v)) => Some(v),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, |acc, (_, v)| f(acc, v))
+    }
+}
+impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
+
+impl<K, V: Debug> fmt::Debug for ValuesMut<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list()
+            .entries(self.inner.iter().map(|(_, val)| val))
+            .finish()
+    }
+}
+
+impl<'a, K, V, A: Allocator> Iterator for Drain<'a, K, V, A> {
+    type Item = (K, V);
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<(K, V)> {
+        self.inner.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, f)
+    }
+}
+impl<K, V, A: Allocator> ExactSizeIterator for Drain<'_, K, V, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<K, V, A: Allocator> FusedIterator for Drain<'_, K, V, A> {}
+
+impl<K, V, A> fmt::Debug for Drain<'_, K, V, A>
+where
+    K: fmt::Debug,
+    V: fmt::Debug,
+    A: Allocator,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.iter()).finish()
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> Entry<'a, K, V, S, A> {
+    /// Sets the value of the entry, and returns an OccupiedEntry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// let entry = map.entry("horseyland").insert(37);
+    ///
+    /// assert_eq!(entry.key(), &"horseyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S, A>
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(mut entry) => {
+                entry.insert(value);
+                entry
+            }
+            Entry::Vacant(entry) => entry.insert_entry(value),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the default if empty, and returns
+    /// a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// // nonexistent key
+    /// map.entry("poneyland").or_insert(3);
+    /// assert_eq!(map["poneyland"], 3);
+    ///
+    /// // existing key
+    /// *map.entry("poneyland").or_insert(10) *= 2;
+    /// assert_eq!(map["poneyland"], 6);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert(self, default: V) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.into_mut(),
+            Entry::Vacant(entry) => entry.insert(default),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the result of the default function if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// // nonexistent key
+    /// map.entry("poneyland").or_insert_with(|| 3);
+    /// assert_eq!(map["poneyland"], 3);
+    ///
+    /// // existing key
+    /// *map.entry("poneyland").or_insert_with(|| 10) *= 2;
+    /// assert_eq!(map["poneyland"], 6);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.into_mut(),
+            Entry::Vacant(entry) => entry.insert(default()),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting, if empty, the result of the default function.
+    /// This method allows for generating key-derived values for insertion by providing the default
+    /// function a reference to the key that was moved during the `.entry(key)` method call.
+    ///
+    /// The reference to the moved key is provided so that cloning or copying the key is
+    /// unnecessary, unlike with `.or_insert_with(|| ... )`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, usize> = HashMap::new();
+    ///
+    /// // nonexistent key
+    /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
+    /// assert_eq!(map["poneyland"], 9);
+    ///
+    /// // existing key
+    /// *map.entry("poneyland").or_insert_with_key(|key| key.chars().count() * 10) *= 2;
+    /// assert_eq!(map["poneyland"], 18);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.into_mut(),
+            Entry::Vacant(entry) => {
+                let value = default(entry.key());
+                entry.insert(value)
+            }
+        }
+    }
+
+    /// Returns a reference to this entry's key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(3);
+    /// // existing key
+    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+    /// // nonexistent key
+    /// assert_eq!(map.entry("horseland").key(), &"horseland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        match *self {
+            Entry::Occupied(ref entry) => entry.key(),
+            Entry::Vacant(ref entry) => entry.key(),
+        }
+    }
+
+    /// Provides in-place mutable access to an occupied entry before any
+    /// potential inserts into the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// map.entry("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 42);
+    ///
+    /// map.entry("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 43);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_modify<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&mut V),
+    {
+        match self {
+            Entry::Occupied(mut entry) => {
+                f(entry.get_mut());
+                Entry::Occupied(entry)
+            }
+            Entry::Vacant(entry) => Entry::Vacant(entry),
+        }
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// an occupied entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// let entry = map
+    ///     .entry("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| panic!());
+    ///
+    /// match entry {
+    ///     Entry::Vacant(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///     }
+    ///     Entry::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// map.insert("poneyland", 42);
+    ///
+    /// let entry = map
+    ///     .entry("poneyland")
+    ///     .and_replace_entry_with(|k, v| {
+    ///         assert_eq!(k, &"poneyland");
+    ///         assert_eq!(v, 42);
+    ///         Some(v + 1)
+    ///     });
+    ///
+    /// match entry {
+    ///     Entry::Occupied(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///         assert_eq!(e.get(), &43);
+    ///     }
+    ///     Entry::Vacant(_) => panic!(),
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 43);
+    ///
+    /// let entry = map
+    ///     .entry("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| None);
+    ///
+    /// match entry {
+    ///     Entry::Vacant(e) => assert_eq!(e.key(), &"poneyland"),
+    ///     Entry::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// assert!(!map.contains_key("poneyland"));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_replace_entry_with<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.replace_entry_with(f),
+            Entry::Vacant(_) => self,
+        }
+    }
+}
+
+impl<'a, K, V: Default, S, A: Allocator> Entry<'a, K, V, S, A> {
+    /// Ensures a value is in the entry by inserting the default value if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, Option<u32>> = HashMap::new();
+    ///
+    /// // nonexistent key
+    /// map.entry("poneyland").or_default();
+    /// assert_eq!(map["poneyland"], None);
+    ///
+    /// map.insert("horseland", Some(3));
+    ///
+    /// // existing key
+    /// assert_eq!(map.entry("horseland").or_default(), &mut Some(3));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_default(self) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(entry) => entry.into_mut(),
+            Entry::Vacant(entry) => entry.insert(Default::default()),
+        }
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> OccupiedEntry<'a, K, V, S, A> {
+    /// Gets a reference to the key in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{Entry, HashMap};
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// match map.entry("poneyland") {
+    ///     Entry::Vacant(_) => panic!(),
+    ///     Entry::Occupied(entry) => assert_eq!(entry.key(), &"poneyland"),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        unsafe { &self.elem.as_ref().0 }
+    }
+
+    /// Take the ownership of the key and value from the map.
+    /// Keeps the allocated memory for reuse.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// // The map is empty
+    /// assert!(map.is_empty() && map.capacity() == 0);
+    ///
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     // We delete the entry from the map.
+    ///     assert_eq!(o.remove_entry(), ("poneyland", 12));
+    /// }
+    ///
+    /// assert_eq!(map.contains_key("poneyland"), false);
+    /// // Now map hold none elements
+    /// assert!(map.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry(self) -> (K, V) {
+        unsafe { self.table.table.remove(self.elem).0 }
+    }
+
+    /// Gets a reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// match map.entry("poneyland") {
+    ///     Entry::Vacant(_) => panic!(),
+    ///     Entry::Occupied(entry) => assert_eq!(entry.get(), &12),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get(&self) -> &V {
+        unsafe { &self.elem.as_ref().1 }
+    }
+
+    /// Gets a mutable reference to the value in the entry.
+    ///
+    /// If you need a reference to the `OccupiedEntry` which may outlive the
+    /// destruction of the `Entry` value, see [`into_mut`].
+    ///
+    /// [`into_mut`]: #method.into_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+    ///     *o.get_mut() += 10;
+    ///     assert_eq!(*o.get(), 22);
+    ///
+    ///     // We can use the same Entry multiple times.
+    ///     *o.get_mut() += 2;
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 24);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_mut(&mut self) -> &mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+    /// with a lifetime bound to the map itself.
+    ///
+    /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
+    ///
+    /// [`get_mut`]: #method.get_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{Entry, HashMap};
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    ///
+    /// let value: &mut u32;
+    /// match map.entry("poneyland") {
+    ///     Entry::Occupied(entry) => value = entry.into_mut(),
+    ///     Entry::Vacant(_) => panic!(),
+    /// }
+    /// *value += 10;
+    ///
+    /// assert_eq!(map["poneyland"], 22);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_mut(self) -> &'a mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+    ///     assert_eq!(o.insert(15), 12);
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 15);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, value: V) -> V {
+        mem::replace(self.get_mut(), value)
+    }
+
+    /// Takes the value out of the entry, and returns it.
+    /// Keeps the allocated memory for reuse.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// // The map is empty
+    /// assert!(map.is_empty() && map.capacity() == 0);
+    ///
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     assert_eq!(o.remove(), 12);
+    /// }
+    ///
+    /// assert_eq!(map.contains_key("poneyland"), false);
+    /// // Now map hold none elements
+    /// assert!(map.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove(self) -> V {
+        self.remove_entry().1
+    }
+
+    /// Replaces the entry, returning the old key and value. The new key in the hash map will be
+    /// the key used to create this entry.
+    ///
+    /// # Panics
+    ///
+    /// Will panic if this OccupiedEntry was created through [`Entry::insert`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    ///  use hashbrown::hash_map::{Entry, HashMap};
+    ///  use std::rc::Rc;
+    ///
+    ///  let mut map: HashMap<Rc<String>, u32> = HashMap::new();
+    ///  let key_one = Rc::new("Stringthing".to_string());
+    ///  let key_two = Rc::new("Stringthing".to_string());
+    ///
+    ///  map.insert(key_one.clone(), 15);
+    ///  assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+    ///
+    ///  match map.entry(key_two.clone()) {
+    ///      Entry::Occupied(entry) => {
+    ///          let (old_key, old_value): (Rc<String>, u32) = entry.replace_entry(16);
+    ///          assert!(Rc::ptr_eq(&key_one, &old_key) && old_value == 15);
+    ///      }
+    ///      Entry::Vacant(_) => panic!(),
+    ///  }
+    ///
+    ///  assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+    ///  assert_eq!(map[&"Stringthing".to_owned()], 16);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_entry(self, value: V) -> (K, V) {
+        let entry = unsafe { self.elem.as_mut() };
+
+        let old_key = mem::replace(&mut entry.0, self.key.unwrap());
+        let old_value = mem::replace(&mut entry.1, value);
+
+        (old_key, old_value)
+    }
+
+    /// Replaces the key in the hash map with the key used to create this entry.
+    ///
+    /// # Panics
+    ///
+    /// Will panic if this OccupiedEntry was created through [`Entry::insert`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{Entry, HashMap};
+    /// use std::rc::Rc;
+    ///
+    /// let mut map: HashMap<Rc<String>, usize> = HashMap::with_capacity(6);
+    /// let mut keys_one: Vec<Rc<String>> = Vec::with_capacity(6);
+    /// let mut keys_two: Vec<Rc<String>> = Vec::with_capacity(6);
+    ///
+    /// for (value, key) in ["a", "b", "c", "d", "e", "f"].into_iter().enumerate() {
+    ///     let rc_key = Rc::new(key.to_owned());
+    ///     keys_one.push(rc_key.clone());
+    ///     map.insert(rc_key.clone(), value);
+    ///     keys_two.push(Rc::new(key.to_owned()));
+    /// }
+    ///
+    /// assert!(
+    ///     keys_one.iter().all(|key| Rc::strong_count(key) == 2)
+    ///         && keys_two.iter().all(|key| Rc::strong_count(key) == 1)
+    /// );
+    ///
+    /// reclaim_memory(&mut map, &keys_two);
+    ///
+    /// assert!(
+    ///     keys_one.iter().all(|key| Rc::strong_count(key) == 1)
+    ///         && keys_two.iter().all(|key| Rc::strong_count(key) == 2)
+    /// );
+    ///
+    /// fn reclaim_memory(map: &mut HashMap<Rc<String>, usize>, keys: &[Rc<String>]) {
+    ///     for key in keys {
+    ///         if let Entry::Occupied(entry) = map.entry(key.clone()) {
+    ///         // Replaces the entry's key with our version of it in `keys`.
+    ///             entry.replace_key();
+    ///         }
+    ///     }
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_key(self) -> K {
+        let entry = unsafe { self.elem.as_mut() };
+        mem::replace(&mut entry.0, self.key.unwrap())
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// the entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// map.insert("poneyland", 42);
+    ///
+    /// let entry = match map.entry("poneyland") {
+    ///     Entry::Occupied(e) => {
+    ///         e.replace_entry_with(|k, v| {
+    ///             assert_eq!(k, &"poneyland");
+    ///             assert_eq!(v, 42);
+    ///             Some(v + 1)
+    ///         })
+    ///     }
+    ///     Entry::Vacant(_) => panic!(),
+    /// };
+    ///
+    /// match entry {
+    ///     Entry::Occupied(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///         assert_eq!(e.get(), &43);
+    ///     }
+    ///     Entry::Vacant(_) => panic!(),
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 43);
+    ///
+    /// let entry = match map.entry("poneyland") {
+    ///     Entry::Occupied(e) => e.replace_entry_with(|_k, _v| None),
+    ///     Entry::Vacant(_) => panic!(),
+    /// };
+    ///
+    /// match entry {
+    ///     Entry::Vacant(e) => {
+    ///         assert_eq!(e.key(), &"poneyland");
+    ///     }
+    ///     Entry::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// assert!(!map.contains_key("poneyland"));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_entry_with<F>(self, f: F) -> Entry<'a, K, V, S, A>
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        unsafe {
+            let mut spare_key = None;
+
+            self.table
+                .table
+                .replace_bucket_with(self.elem.clone(), |(key, value)| {
+                    if let Some(new_value) = f(&key, value) {
+                        Some((key, new_value))
+                    } else {
+                        spare_key = Some(key);
+                        None
+                    }
+                });
+
+            if let Some(key) = spare_key {
+                Entry::Vacant(VacantEntry {
+                    hash: self.hash,
+                    key,
+                    table: self.table,
+                })
+            } else {
+                Entry::Occupied(self)
+            }
+        }
+    }
+}
+
+impl<'a, K, V, S, A: Allocator> VacantEntry<'a, K, V, S, A> {
+    /// Gets a reference to the key that would be used when inserting a value
+    /// through the `VacantEntry`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        &self.key
+    }
+
+    /// Take ownership of the key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{Entry, HashMap};
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// match map.entry("poneyland") {
+    ///     Entry::Occupied(_) => panic!(),
+    ///     Entry::Vacant(v) => assert_eq!(v.into_key(), "poneyland"),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_key(self) -> K {
+        self.key
+    }
+
+    /// Sets the value of the entry with the VacantEntry's key,
+    /// and returns a mutable reference to it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::Entry;
+    ///
+    /// let mut map: HashMap<&str, u32> = HashMap::new();
+    ///
+    /// if let Entry::Vacant(o) = map.entry("poneyland") {
+    ///     o.insert(37);
+    /// }
+    /// assert_eq!(map["poneyland"], 37);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, value: V) -> &'a mut V
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let table = &mut self.table.table;
+        let entry = table.insert_entry(
+            self.hash,
+            (self.key, value),
+            make_hasher::<_, V, S>(&self.table.hash_builder),
+        );
+        &mut entry.1
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(crate) fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S, A>
+    where
+        K: Hash,
+        S: BuildHasher,
+    {
+        let elem = self.table.table.insert(
+            self.hash,
+            (self.key, value),
+            make_hasher::<_, V, S>(&self.table.hash_builder),
+        );
+        OccupiedEntry {
+            hash: self.hash,
+            key: None,
+            elem,
+            table: self.table,
+        }
+    }
+}
+
+impl<'a, 'b, K, Q: ?Sized, V, S, A: Allocator> EntryRef<'a, 'b, K, Q, V, S, A> {
+    /// Sets the value of the entry, and returns an OccupiedEntryRef.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// let entry = map.entry_ref("horseyland").insert(37);
+    ///
+    /// assert_eq!(entry.key(), "horseyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, value: V) -> OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
+    where
+        K: Hash + From<&'b Q>,
+        S: BuildHasher,
+    {
+        match self {
+            EntryRef::Occupied(mut entry) => {
+                entry.insert(value);
+                entry
+            }
+            EntryRef::Vacant(entry) => entry.insert_entry(value),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the default if empty, and returns
+    /// a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    ///
+    /// // nonexistent key
+    /// map.entry_ref("poneyland").or_insert(3);
+    /// assert_eq!(map["poneyland"], 3);
+    ///
+    /// // existing key
+    /// *map.entry_ref("poneyland").or_insert(10) *= 2;
+    /// assert_eq!(map["poneyland"], 6);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert(self, default: V) -> &'a mut V
+    where
+        K: Hash + From<&'b Q>,
+        S: BuildHasher,
+    {
+        match self {
+            EntryRef::Occupied(entry) => entry.into_mut(),
+            EntryRef::Vacant(entry) => entry.insert(default),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the result of the default function if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    ///
+    /// // nonexistent key
+    /// map.entry_ref("poneyland").or_insert_with(|| 3);
+    /// assert_eq!(map["poneyland"], 3);
+    ///
+    /// // existing key
+    /// *map.entry_ref("poneyland").or_insert_with(|| 10) *= 2;
+    /// assert_eq!(map["poneyland"], 6);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V
+    where
+        K: Hash + From<&'b Q>,
+        S: BuildHasher,
+    {
+        match self {
+            EntryRef::Occupied(entry) => entry.into_mut(),
+            EntryRef::Vacant(entry) => entry.insert(default()),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting, if empty, the result of the default function.
+    /// This method allows for generating key-derived values for insertion by providing the default
+    /// function an access to the borrower form of the key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<String, usize> = HashMap::new();
+    ///
+    /// // nonexistent key
+    /// map.entry_ref("poneyland").or_insert_with_key(|key| key.chars().count());
+    /// assert_eq!(map["poneyland"], 9);
+    ///
+    /// // existing key
+    /// *map.entry_ref("poneyland").or_insert_with_key(|key| key.chars().count() * 10) *= 2;
+    /// assert_eq!(map["poneyland"], 18);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert_with_key<F: FnOnce(&Q) -> V>(self, default: F) -> &'a mut V
+    where
+        K: Hash + Borrow<Q> + From<&'b Q>,
+        S: BuildHasher,
+    {
+        match self {
+            EntryRef::Occupied(entry) => entry.into_mut(),
+            EntryRef::Vacant(entry) => {
+                let value = default(entry.key.as_ref());
+                entry.insert(value)
+            }
+        }
+    }
+
+    /// Returns a reference to this entry's key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// map.entry_ref("poneyland").or_insert(3);
+    /// // existing key
+    /// assert_eq!(map.entry_ref("poneyland").key(), "poneyland");
+    /// // nonexistent key
+    /// assert_eq!(map.entry_ref("horseland").key(), "horseland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &Q
+    where
+        K: Borrow<Q>,
+    {
+        match *self {
+            EntryRef::Occupied(ref entry) => entry.key().borrow(),
+            EntryRef::Vacant(ref entry) => entry.key(),
+        }
+    }
+
+    /// Provides in-place mutable access to an occupied entry before any
+    /// potential inserts into the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    ///
+    /// map.entry_ref("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 42);
+    ///
+    /// map.entry_ref("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 43);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_modify<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&mut V),
+    {
+        match self {
+            EntryRef::Occupied(mut entry) => {
+                f(entry.get_mut());
+                EntryRef::Occupied(entry)
+            }
+            EntryRef::Vacant(entry) => EntryRef::Vacant(entry),
+        }
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// an occupied entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::EntryRef;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    ///
+    /// let entry = map
+    ///     .entry_ref("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| panic!());
+    ///
+    /// match entry {
+    ///     EntryRef::Vacant(e) => {
+    ///         assert_eq!(e.key(), "poneyland");
+    ///     }
+    ///     EntryRef::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// map.insert("poneyland".to_string(), 42);
+    ///
+    /// let entry = map
+    ///     .entry_ref("poneyland")
+    ///     .and_replace_entry_with(|k, v| {
+    ///         assert_eq!(k, "poneyland");
+    ///         assert_eq!(v, 42);
+    ///         Some(v + 1)
+    ///     });
+    ///
+    /// match entry {
+    ///     EntryRef::Occupied(e) => {
+    ///         assert_eq!(e.key(), "poneyland");
+    ///         assert_eq!(e.get(), &43);
+    ///     }
+    ///     EntryRef::Vacant(_) => panic!(),
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 43);
+    ///
+    /// let entry = map
+    ///     .entry_ref("poneyland")
+    ///     .and_replace_entry_with(|_k, _v| None);
+    ///
+    /// match entry {
+    ///     EntryRef::Vacant(e) => assert_eq!(e.key(), "poneyland"),
+    ///     EntryRef::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// assert!(!map.contains_key("poneyland"));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn and_replace_entry_with<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        match self {
+            EntryRef::Occupied(entry) => entry.replace_entry_with(f),
+            EntryRef::Vacant(_) => self,
+        }
+    }
+}
+
+impl<'a, 'b, K, Q: ?Sized, V: Default, S, A: Allocator> EntryRef<'a, 'b, K, Q, V, S, A> {
+    /// Ensures a value is in the entry by inserting the default value if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<String, Option<u32>> = HashMap::new();
+    ///
+    /// // nonexistent key
+    /// map.entry_ref("poneyland").or_default();
+    /// assert_eq!(map["poneyland"], None);
+    ///
+    /// map.insert("horseland".to_string(), Some(3));
+    ///
+    /// // existing key
+    /// assert_eq!(map.entry_ref("horseland").or_default(), &mut Some(3));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_default(self) -> &'a mut V
+    where
+        K: Hash + From<&'b Q>,
+        S: BuildHasher,
+    {
+        match self {
+            EntryRef::Occupied(entry) => entry.into_mut(),
+            EntryRef::Vacant(entry) => entry.insert(Default::default()),
+        }
+    }
+}
+
+impl<'a, 'b, K, Q: ?Sized, V, S, A: Allocator> OccupiedEntryRef<'a, 'b, K, Q, V, S, A> {
+    /// Gets a reference to the key in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{EntryRef, HashMap};
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// map.entry_ref("poneyland").or_insert(12);
+    ///
+    /// match map.entry_ref("poneyland") {
+    ///     EntryRef::Vacant(_) => panic!(),
+    ///     EntryRef::Occupied(entry) => assert_eq!(entry.key(), "poneyland"),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &K {
+        unsafe { &self.elem.as_ref().0 }
+    }
+
+    /// Take the ownership of the key and value from the map.
+    /// Keeps the allocated memory for reuse.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::EntryRef;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// // The map is empty
+    /// assert!(map.is_empty() && map.capacity() == 0);
+    ///
+    /// map.entry_ref("poneyland").or_insert(12);
+    ///
+    /// if let EntryRef::Occupied(o) = map.entry_ref("poneyland") {
+    ///     // We delete the entry from the map.
+    ///     assert_eq!(o.remove_entry(), ("poneyland".to_owned(), 12));
+    /// }
+    ///
+    /// assert_eq!(map.contains_key("poneyland"), false);
+    /// // Now map hold none elements but capacity is equal to the old one
+    /// assert!(map.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry(self) -> (K, V) {
+        unsafe { self.table.table.remove(self.elem).0 }
+    }
+
+    /// Gets a reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::EntryRef;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// map.entry_ref("poneyland").or_insert(12);
+    ///
+    /// match map.entry_ref("poneyland") {
+    ///     EntryRef::Vacant(_) => panic!(),
+    ///     EntryRef::Occupied(entry) => assert_eq!(entry.get(), &12),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get(&self) -> &V {
+        unsafe { &self.elem.as_ref().1 }
+    }
+
+    /// Gets a mutable reference to the value in the entry.
+    ///
+    /// If you need a reference to the `OccupiedEntryRef` which may outlive the
+    /// destruction of the `EntryRef` value, see [`into_mut`].
+    ///
+    /// [`into_mut`]: #method.into_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::EntryRef;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// map.entry_ref("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    /// if let EntryRef::Occupied(mut o) = map.entry_ref("poneyland") {
+    ///     *o.get_mut() += 10;
+    ///     assert_eq!(*o.get(), 22);
+    ///
+    ///     // We can use the same Entry multiple times.
+    ///     *o.get_mut() += 2;
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 24);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_mut(&mut self) -> &mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Converts the OccupiedEntryRef into a mutable reference to the value in the entry
+    /// with a lifetime bound to the map itself.
+    ///
+    /// If you need multiple references to the `OccupiedEntryRef`, see [`get_mut`].
+    ///
+    /// [`get_mut`]: #method.get_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{EntryRef, HashMap};
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// map.entry_ref("poneyland").or_insert(12);
+    ///
+    /// let value: &mut u32;
+    /// match map.entry_ref("poneyland") {
+    ///     EntryRef::Occupied(entry) => value = entry.into_mut(),
+    ///     EntryRef::Vacant(_) => panic!(),
+    /// }
+    /// *value += 10;
+    ///
+    /// assert_eq!(map["poneyland"], 22);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_mut(self) -> &'a mut V {
+        unsafe { &mut self.elem.as_mut().1 }
+    }
+
+    /// Sets the value of the entry, and returns the entry's old value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::EntryRef;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// map.entry_ref("poneyland").or_insert(12);
+    ///
+    /// if let EntryRef::Occupied(mut o) = map.entry_ref("poneyland") {
+    ///     assert_eq!(o.insert(15), 12);
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 15);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, value: V) -> V {
+        mem::replace(self.get_mut(), value)
+    }
+
+    /// Takes the value out of the entry, and returns it.
+    /// Keeps the allocated memory for reuse.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::EntryRef;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// // The map is empty
+    /// assert!(map.is_empty() && map.capacity() == 0);
+    ///
+    /// map.entry_ref("poneyland").or_insert(12);
+    ///
+    /// if let EntryRef::Occupied(o) = map.entry_ref("poneyland") {
+    ///     assert_eq!(o.remove(), 12);
+    /// }
+    ///
+    /// assert_eq!(map.contains_key("poneyland"), false);
+    /// // Now map hold none elements but capacity is equal to the old one
+    /// assert!(map.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove(self) -> V {
+        self.remove_entry().1
+    }
+
+    /// Replaces the entry, returning the old key and value. The new key in the hash map will be
+    /// the key used to create this entry.
+    ///
+    /// # Panics
+    ///
+    /// Will panic if this OccupiedEntryRef was created through [`EntryRef::insert`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{EntryRef, HashMap};
+    /// use std::rc::Rc;
+    ///
+    /// let mut map: HashMap<Rc<str>, u32> = HashMap::new();
+    /// let key: Rc<str> = Rc::from("Stringthing");
+    ///
+    /// map.insert(key.clone(), 15);
+    /// assert_eq!(Rc::strong_count(&key), 2);
+    ///
+    /// match map.entry_ref("Stringthing") {
+    ///     EntryRef::Occupied(entry) => {
+    ///         let (old_key, old_value): (Rc<str>, u32) = entry.replace_entry(16);
+    ///         assert!(Rc::ptr_eq(&key, &old_key) && old_value == 15);
+    ///     }
+    ///     EntryRef::Vacant(_) => panic!(),
+    /// }
+    ///
+    /// assert_eq!(Rc::strong_count(&key), 1);
+    /// assert_eq!(map["Stringthing"], 16);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_entry(self, value: V) -> (K, V)
+    where
+        K: From<&'b Q>,
+    {
+        let entry = unsafe { self.elem.as_mut() };
+
+        let old_key = mem::replace(&mut entry.0, self.key.unwrap().into_owned());
+        let old_value = mem::replace(&mut entry.1, value);
+
+        (old_key, old_value)
+    }
+
+    /// Replaces the key in the hash map with the key used to create this entry.
+    ///
+    /// # Panics
+    ///
+    /// Will panic if this OccupiedEntryRef was created through [`EntryRef::insert`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{EntryRef, HashMap};
+    /// use std::rc::Rc;
+    ///
+    /// let mut map: HashMap<Rc<str>, usize> = HashMap::with_capacity(6);
+    /// let mut keys: Vec<Rc<str>> = Vec::with_capacity(6);
+    ///
+    /// for (value, key) in ["a", "b", "c", "d", "e", "f"].into_iter().enumerate() {
+    ///     let rc_key: Rc<str> = Rc::from(key);
+    ///     keys.push(rc_key.clone());
+    ///     map.insert(rc_key.clone(), value);
+    /// }
+    ///
+    /// assert!(keys.iter().all(|key| Rc::strong_count(key) == 2));
+    ///
+    /// // It doesn't matter that we kind of use a vector with the same keys,
+    /// // because all keys will be newly created from the references
+    /// reclaim_memory(&mut map, &keys);
+    ///
+    /// assert!(keys.iter().all(|key| Rc::strong_count(key) == 1));
+    ///
+    /// fn reclaim_memory(map: &mut HashMap<Rc<str>, usize>, keys: &[Rc<str>]) {
+    ///     for key in keys {
+    ///         if let EntryRef::Occupied(entry) = map.entry_ref(key.as_ref()) {
+    ///             // Replaces the entry's key with our version of it in `keys`.
+    ///             entry.replace_key();
+    ///         }
+    ///     }
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_key(self) -> K
+    where
+        K: From<&'b Q>,
+    {
+        let entry = unsafe { self.elem.as_mut() };
+        mem::replace(&mut entry.0, self.key.unwrap().into_owned())
+    }
+
+    /// Provides shared access to the key and owned access to the value of
+    /// the entry and allows to replace or remove it based on the
+    /// value of the returned option.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::EntryRef;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// map.insert("poneyland".to_string(), 42);
+    ///
+    /// let entry = match map.entry_ref("poneyland") {
+    ///     EntryRef::Occupied(e) => {
+    ///         e.replace_entry_with(|k, v| {
+    ///             assert_eq!(k, "poneyland");
+    ///             assert_eq!(v, 42);
+    ///             Some(v + 1)
+    ///         })
+    ///     }
+    ///     EntryRef::Vacant(_) => panic!(),
+    /// };
+    ///
+    /// match entry {
+    ///     EntryRef::Occupied(e) => {
+    ///         assert_eq!(e.key(), "poneyland");
+    ///         assert_eq!(e.get(), &43);
+    ///     }
+    ///     EntryRef::Vacant(_) => panic!(),
+    /// }
+    ///
+    /// assert_eq!(map["poneyland"], 43);
+    ///
+    /// let entry = match map.entry_ref("poneyland") {
+    ///     EntryRef::Occupied(e) => e.replace_entry_with(|_k, _v| None),
+    ///     EntryRef::Vacant(_) => panic!(),
+    /// };
+    ///
+    /// match entry {
+    ///     EntryRef::Vacant(e) => {
+    ///         assert_eq!(e.key(), "poneyland");
+    ///     }
+    ///     EntryRef::Occupied(_) => panic!(),
+    /// }
+    ///
+    /// assert!(!map.contains_key("poneyland"));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace_entry_with<F>(self, f: F) -> EntryRef<'a, 'b, K, Q, V, S, A>
+    where
+        F: FnOnce(&K, V) -> Option<V>,
+    {
+        unsafe {
+            let mut spare_key = None;
+
+            self.table
+                .table
+                .replace_bucket_with(self.elem.clone(), |(key, value)| {
+                    if let Some(new_value) = f(&key, value) {
+                        Some((key, new_value))
+                    } else {
+                        spare_key = Some(KeyOrRef::Owned(key));
+                        None
+                    }
+                });
+
+            if let Some(key) = spare_key {
+                EntryRef::Vacant(VacantEntryRef {
+                    hash: self.hash,
+                    key,
+                    table: self.table,
+                })
+            } else {
+                EntryRef::Occupied(self)
+            }
+        }
+    }
+}
+
+impl<'a, 'b, K, Q: ?Sized, V, S, A: Allocator> VacantEntryRef<'a, 'b, K, Q, V, S, A> {
+    /// Gets a reference to the key that would be used when inserting a value
+    /// through the `VacantEntryRef`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// let key: &str = "poneyland";
+    /// assert_eq!(map.entry_ref(key).key(), "poneyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn key(&self) -> &Q
+    where
+        K: Borrow<Q>,
+    {
+        self.key.as_ref()
+    }
+
+    /// Take ownership of the key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::{EntryRef, HashMap};
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// let key: &str = "poneyland";
+    ///
+    /// match map.entry_ref(key) {
+    ///     EntryRef::Occupied(_) => panic!(),
+    ///     EntryRef::Vacant(v) => assert_eq!(v.into_key(), "poneyland".to_owned()),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_key(self) -> K
+    where
+        K: From<&'b Q>,
+    {
+        self.key.into_owned()
+    }
+
+    /// Sets the value of the entry with the VacantEntryRef's key,
+    /// and returns a mutable reference to it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashMap;
+    /// use hashbrown::hash_map::EntryRef;
+    ///
+    /// let mut map: HashMap<String, u32> = HashMap::new();
+    /// let key: &str = "poneyland";
+    ///
+    /// if let EntryRef::Vacant(o) = map.entry_ref(key) {
+    ///     o.insert(37);
+    /// }
+    /// assert_eq!(map["poneyland"], 37);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self, value: V) -> &'a mut V
+    where
+        K: Hash + From<&'b Q>,
+        S: BuildHasher,
+    {
+        let table = &mut self.table.table;
+        let entry = table.insert_entry(
+            self.hash,
+            (self.key.into_owned(), value),
+            make_hasher::<_, V, S>(&self.table.hash_builder),
+        );
+        &mut entry.1
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn insert_entry(self, value: V) -> OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
+    where
+        K: Hash + From<&'b Q>,
+        S: BuildHasher,
+    {
+        let elem = self.table.table.insert(
+            self.hash,
+            (self.key.into_owned(), value),
+            make_hasher::<_, V, S>(&self.table.hash_builder),
+        );
+        OccupiedEntryRef {
+            hash: self.hash,
+            key: None,
+            elem,
+            table: self.table,
+        }
+    }
+}
+
+impl<K, V, S, A> FromIterator<(K, V)> for HashMap<K, V, S, A>
+where
+    K: Eq + Hash,
+    S: BuildHasher + Default,
+    A: Default + Allocator,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
+        let iter = iter.into_iter();
+        let mut map =
+            Self::with_capacity_and_hasher_in(iter.size_hint().0, S::default(), A::default());
+        iter.for_each(|(k, v)| {
+            map.insert(k, v);
+        });
+        map
+    }
+}
+
+/// Inserts all new key-values from the iterator and replaces values with existing
+/// keys with new values returned from the iterator.
+impl<K, V, S, A> Extend<(K, V)> for HashMap<K, V, S, A>
+where
+    K: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    /// Inserts all new key-values from the iterator to existing `HashMap<K, V, S, A>`.
+    /// Replace values with existing keys with new values returned from the iterator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, 100);
+    ///
+    /// let some_iter = [(1, 1), (2, 2)].into_iter();
+    /// map.extend(some_iter);
+    /// // Replace values with existing keys with new values returned from the iterator.
+    /// // So that the map.get(&1) doesn't return Some(&100).
+    /// assert_eq!(map.get(&1), Some(&1));
+    ///
+    /// let some_vec: Vec<_> = vec![(3, 3), (4, 4)];
+    /// map.extend(some_vec);
+    ///
+    /// let some_arr = [(5, 5), (6, 6)];
+    /// map.extend(some_arr);
+    /// let old_map_len = map.len();
+    ///
+    /// // You can also extend from another HashMap
+    /// let mut new_map = HashMap::new();
+    /// new_map.extend(map);
+    /// assert_eq!(new_map.len(), old_map_len);
+    ///
+    /// let mut vec: Vec<_> = new_map.into_iter().collect();
+    /// // The `IntoIter` iterator produces items in arbitrary order, so the
+    /// // items must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
+        // Keys may be already present or show multiple times in the iterator.
+        // Reserve the entire hint lower bound if the map is empty.
+        // Otherwise reserve half the hint (rounded up), so the map
+        // will only resize twice in the worst case.
+        let iter = iter.into_iter();
+        let reserve = if self.is_empty() {
+            iter.size_hint().0
+        } else {
+            (iter.size_hint().0 + 1) / 2
+        };
+        self.reserve(reserve);
+        iter.for_each(move |(k, v)| {
+            self.insert(k, v);
+        });
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_one(&mut self, (k, v): (K, V)) {
+        self.insert(k, v);
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_reserve(&mut self, additional: usize) {
+        // Keys may be already present or show multiple times in the iterator.
+        // Reserve the entire hint lower bound if the map is empty.
+        // Otherwise reserve half the hint (rounded up), so the map
+        // will only resize twice in the worst case.
+        let reserve = if self.is_empty() {
+            additional
+        } else {
+            (additional + 1) / 2
+        };
+        self.reserve(reserve);
+    }
+}
+
+/// Inserts all new key-values from the iterator and replaces values with existing
+/// keys with new values returned from the iterator.
+impl<'a, K, V, S, A> Extend<(&'a K, &'a V)> for HashMap<K, V, S, A>
+where
+    K: Eq + Hash + Copy,
+    V: Copy,
+    S: BuildHasher,
+    A: Allocator,
+{
+    /// Inserts all new key-values from the iterator to existing `HashMap<K, V, S, A>`.
+    /// Replace values with existing keys with new values returned from the iterator.
+    /// The keys and values must implement [`Copy`] trait.
+    ///
+    /// [`Copy`]: https://doc.rust-lang.org/core/marker/trait.Copy.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, 100);
+    ///
+    /// let arr = [(1, 1), (2, 2)];
+    /// let some_iter = arr.iter().map(|(k, v)| (k, v));
+    /// map.extend(some_iter);
+    /// // Replace values with existing keys with new values returned from the iterator.
+    /// // So that the map.get(&1) doesn't return Some(&100).
+    /// assert_eq!(map.get(&1), Some(&1));
+    ///
+    /// let some_vec: Vec<_> = vec![(3, 3), (4, 4)];
+    /// map.extend(some_vec.iter().map(|(k, v)| (k, v)));
+    ///
+    /// let some_arr = [(5, 5), (6, 6)];
+    /// map.extend(some_arr.iter().map(|(k, v)| (k, v)));
+    ///
+    /// // You can also extend from another HashMap
+    /// let mut new_map = HashMap::new();
+    /// new_map.extend(&map);
+    /// assert_eq!(new_map, map);
+    ///
+    /// let mut vec: Vec<_> = new_map.into_iter().collect();
+    /// // The `IntoIter` iterator produces items in arbitrary order, so the
+    /// // items must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn extend<T: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: T) {
+        self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_one(&mut self, (k, v): (&'a K, &'a V)) {
+        self.insert(*k, *v);
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_reserve(&mut self, additional: usize) {
+        Extend::<(K, V)>::extend_reserve(self, additional);
+    }
+}
+
+/// Inserts all new key-values from the iterator and replaces values with existing
+/// keys with new values returned from the iterator.
+impl<'a, K, V, S, A> Extend<&'a (K, V)> for HashMap<K, V, S, A>
+where
+    K: Eq + Hash + Copy,
+    V: Copy,
+    S: BuildHasher,
+    A: Allocator,
+{
+    /// Inserts all new key-values from the iterator to existing `HashMap<K, V, S, A>`.
+    /// Replace values with existing keys with new values returned from the iterator.
+    /// The keys and values must implement [`Copy`] trait.
+    ///
+    /// [`Copy`]: https://doc.rust-lang.org/core/marker/trait.Copy.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_map::HashMap;
+    ///
+    /// let mut map = HashMap::new();
+    /// map.insert(1, 100);
+    ///
+    /// let arr = [(1, 1), (2, 2)];
+    /// let some_iter = arr.iter();
+    /// map.extend(some_iter);
+    /// // Replace values with existing keys with new values returned from the iterator.
+    /// // So that the map.get(&1) doesn't return Some(&100).
+    /// assert_eq!(map.get(&1), Some(&1));
+    ///
+    /// let some_vec: Vec<_> = vec![(3, 3), (4, 4)];
+    /// map.extend(&some_vec);
+    ///
+    /// let some_arr = [(5, 5), (6, 6)];
+    /// map.extend(&some_arr);
+    ///
+    /// let mut vec: Vec<_> = map.into_iter().collect();
+    /// // The `IntoIter` iterator produces items in arbitrary order, so the
+    /// // items must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn extend<T: IntoIterator<Item = &'a (K, V)>>(&mut self, iter: T) {
+        self.extend(iter.into_iter().map(|&(key, value)| (key, value)));
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_one(&mut self, &(k, v): &'a (K, V)) {
+        self.insert(k, v);
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_reserve(&mut self, additional: usize) {
+        Extend::<(K, V)>::extend_reserve(self, additional);
+    }
+}
+
+#[allow(dead_code)]
+fn assert_covariance() {
+    fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> {
+        v
+    }
+    fn map_val<'new>(v: HashMap<u8, &'static str>) -> HashMap<u8, &'new str> {
+        v
+    }
+    fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> {
+        v
+    }
+    fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> {
+        v
+    }
+    fn into_iter_key<'new, A: Allocator>(
+        v: IntoIter<&'static str, u8, A>,
+    ) -> IntoIter<&'new str, u8, A> {
+        v
+    }
+    fn into_iter_val<'new, A: Allocator>(
+        v: IntoIter<u8, &'static str, A>,
+    ) -> IntoIter<u8, &'new str, A> {
+        v
+    }
+    fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> {
+        v
+    }
+    fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> {
+        v
+    }
+    fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> {
+        v
+    }
+    fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> {
+        v
+    }
+    fn drain<'new>(
+        d: Drain<'static, &'static str, &'static str>,
+    ) -> Drain<'new, &'new str, &'new str> {
+        d
+    }
+}
+
+#[cfg(test)]
+mod test_map {
+    use super::DefaultHashBuilder;
+    use super::Entry::{Occupied, Vacant};
+    use super::EntryRef;
+    use super::{HashMap, RawEntryMut};
+    use alloc::string::{String, ToString};
+    use alloc::sync::Arc;
+    use allocator_api2::alloc::{AllocError, Allocator, Global};
+    use core::alloc::Layout;
+    use core::ptr::NonNull;
+    use core::sync::atomic::{AtomicI8, Ordering};
+    use rand::{rngs::SmallRng, Rng, SeedableRng};
+    use std::borrow::ToOwned;
+    use std::cell::RefCell;
+    use std::usize;
+    use std::vec::Vec;
+
+    #[test]
+    fn test_zero_capacities() {
+        type HM = HashMap<i32, i32>;
+
+        let m = HM::new();
+        assert_eq!(m.capacity(), 0);
+
+        let m = HM::default();
+        assert_eq!(m.capacity(), 0);
+
+        let m = HM::with_hasher(DefaultHashBuilder::default());
+        assert_eq!(m.capacity(), 0);
+
+        let m = HM::with_capacity(0);
+        assert_eq!(m.capacity(), 0);
+
+        let m = HM::with_capacity_and_hasher(0, DefaultHashBuilder::default());
+        assert_eq!(m.capacity(), 0);
+
+        let mut m = HM::new();
+        m.insert(1, 1);
+        m.insert(2, 2);
+        m.remove(&1);
+        m.remove(&2);
+        m.shrink_to_fit();
+        assert_eq!(m.capacity(), 0);
+
+        let mut m = HM::new();
+        m.reserve(0);
+        assert_eq!(m.capacity(), 0);
+    }
+
+    #[test]
+    fn test_create_capacity_zero() {
+        let mut m = HashMap::with_capacity(0);
+
+        assert!(m.insert(1, 1).is_none());
+
+        assert!(m.contains_key(&1));
+        assert!(!m.contains_key(&0));
+    }
+
+    #[test]
+    fn test_insert() {
+        let mut m = HashMap::new();
+        assert_eq!(m.len(), 0);
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(m.len(), 1);
+        assert!(m.insert(2, 4).is_none());
+        assert_eq!(m.len(), 2);
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert_eq!(*m.get(&2).unwrap(), 4);
+    }
+
+    #[test]
+    fn test_clone() {
+        let mut m = HashMap::new();
+        assert_eq!(m.len(), 0);
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(m.len(), 1);
+        assert!(m.insert(2, 4).is_none());
+        assert_eq!(m.len(), 2);
+        #[allow(clippy::redundant_clone)]
+        let m2 = m.clone();
+        assert_eq!(*m2.get(&1).unwrap(), 2);
+        assert_eq!(*m2.get(&2).unwrap(), 4);
+        assert_eq!(m2.len(), 2);
+    }
+
+    #[test]
+    fn test_clone_from() {
+        let mut m = HashMap::new();
+        let mut m2 = HashMap::new();
+        assert_eq!(m.len(), 0);
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(m.len(), 1);
+        assert!(m.insert(2, 4).is_none());
+        assert_eq!(m.len(), 2);
+        m2.clone_from(&m);
+        assert_eq!(*m2.get(&1).unwrap(), 2);
+        assert_eq!(*m2.get(&2).unwrap(), 4);
+        assert_eq!(m2.len(), 2);
+    }
+
+    thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = const { RefCell::new(Vec::new()) } }
+
+    #[derive(Hash, PartialEq, Eq)]
+    struct Droppable {
+        k: usize,
+    }
+
+    impl Droppable {
+        fn new(k: usize) -> Droppable {
+            DROP_VECTOR.with(|slot| {
+                slot.borrow_mut()[k] += 1;
+            });
+
+            Droppable { k }
+        }
+    }
+
+    impl Drop for Droppable {
+        fn drop(&mut self) {
+            DROP_VECTOR.with(|slot| {
+                slot.borrow_mut()[self.k] -= 1;
+            });
+        }
+    }
+
+    impl Clone for Droppable {
+        fn clone(&self) -> Self {
+            Droppable::new(self.k)
+        }
+    }
+
+    #[test]
+    fn test_drops() {
+        DROP_VECTOR.with(|slot| {
+            *slot.borrow_mut() = vec![0; 200];
+        });
+
+        {
+            let mut m = HashMap::new();
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 0);
+                }
+            });
+
+            for i in 0..100 {
+                let d1 = Droppable::new(i);
+                let d2 = Droppable::new(i + 100);
+                m.insert(d1, d2);
+            }
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 1);
+                }
+            });
+
+            for i in 0..50 {
+                let k = Droppable::new(i);
+                let v = m.remove(&k);
+
+                assert!(v.is_some());
+
+                DROP_VECTOR.with(|v| {
+                    assert_eq!(v.borrow()[i], 1);
+                    assert_eq!(v.borrow()[i + 100], 1);
+                });
+            }
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..50 {
+                    assert_eq!(v.borrow()[i], 0);
+                    assert_eq!(v.borrow()[i + 100], 0);
+                }
+
+                for i in 50..100 {
+                    assert_eq!(v.borrow()[i], 1);
+                    assert_eq!(v.borrow()[i + 100], 1);
+                }
+            });
+        }
+
+        DROP_VECTOR.with(|v| {
+            for i in 0..200 {
+                assert_eq!(v.borrow()[i], 0);
+            }
+        });
+    }
+
+    #[test]
+    fn test_into_iter_drops() {
+        DROP_VECTOR.with(|v| {
+            *v.borrow_mut() = vec![0; 200];
+        });
+
+        let hm = {
+            let mut hm = HashMap::new();
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 0);
+                }
+            });
+
+            for i in 0..100 {
+                let d1 = Droppable::new(i);
+                let d2 = Droppable::new(i + 100);
+                hm.insert(d1, d2);
+            }
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 1);
+                }
+            });
+
+            hm
+        };
+
+        // By the way, ensure that cloning doesn't screw up the dropping.
+        drop(hm.clone());
+
+        {
+            let mut half = hm.into_iter().take(50);
+
+            DROP_VECTOR.with(|v| {
+                for i in 0..200 {
+                    assert_eq!(v.borrow()[i], 1);
+                }
+            });
+
+            for _ in half.by_ref() {}
+
+            DROP_VECTOR.with(|v| {
+                let nk = (0..100).filter(|&i| v.borrow()[i] == 1).count();
+
+                let nv = (0..100).filter(|&i| v.borrow()[i + 100] == 1).count();
+
+                assert_eq!(nk, 50);
+                assert_eq!(nv, 50);
+            });
+        };
+
+        DROP_VECTOR.with(|v| {
+            for i in 0..200 {
+                assert_eq!(v.borrow()[i], 0);
+            }
+        });
+    }
+
+    #[test]
+    fn test_empty_remove() {
+        let mut m: HashMap<i32, bool> = HashMap::new();
+        assert_eq!(m.remove(&0), None);
+    }
+
+    #[test]
+    fn test_empty_entry() {
+        let mut m: HashMap<i32, bool> = HashMap::new();
+        match m.entry(0) {
+            Occupied(_) => panic!(),
+            Vacant(_) => {}
+        }
+        assert!(*m.entry(0).or_insert(true));
+        assert_eq!(m.len(), 1);
+    }
+
+    #[test]
+    fn test_empty_entry_ref() {
+        let mut m: HashMap<std::string::String, bool> = HashMap::new();
+        match m.entry_ref("poneyland") {
+            EntryRef::Occupied(_) => panic!(),
+            EntryRef::Vacant(_) => {}
+        }
+        assert!(*m.entry_ref("poneyland").or_insert(true));
+        assert_eq!(m.len(), 1);
+    }
+
+    #[test]
+    fn test_empty_iter() {
+        let mut m: HashMap<i32, bool> = HashMap::new();
+        assert_eq!(m.drain().next(), None);
+        assert_eq!(m.keys().next(), None);
+        assert_eq!(m.values().next(), None);
+        assert_eq!(m.values_mut().next(), None);
+        assert_eq!(m.iter().next(), None);
+        assert_eq!(m.iter_mut().next(), None);
+        assert_eq!(m.len(), 0);
+        assert!(m.is_empty());
+        assert_eq!(m.into_iter().next(), None);
+    }
+
+    #[test]
+    #[cfg_attr(miri, ignore)] // FIXME: takes too long
+    fn test_lots_of_insertions() {
+        let mut m = HashMap::new();
+
+        // Try this a few times to make sure we never screw up the hashmap's
+        // internal state.
+        for _ in 0..10 {
+            assert!(m.is_empty());
+
+            for i in 1..1001 {
+                assert!(m.insert(i, i).is_none());
+
+                for j in 1..=i {
+                    let r = m.get(&j);
+                    assert_eq!(r, Some(&j));
+                }
+
+                for j in i + 1..1001 {
+                    let r = m.get(&j);
+                    assert_eq!(r, None);
+                }
+            }
+
+            for i in 1001..2001 {
+                assert!(!m.contains_key(&i));
+            }
+
+            // remove forwards
+            for i in 1..1001 {
+                assert!(m.remove(&i).is_some());
+
+                for j in 1..=i {
+                    assert!(!m.contains_key(&j));
+                }
+
+                for j in i + 1..1001 {
+                    assert!(m.contains_key(&j));
+                }
+            }
+
+            for i in 1..1001 {
+                assert!(!m.contains_key(&i));
+            }
+
+            for i in 1..1001 {
+                assert!(m.insert(i, i).is_none());
+            }
+
+            // remove backwards
+            for i in (1..1001).rev() {
+                assert!(m.remove(&i).is_some());
+
+                for j in i..1001 {
+                    assert!(!m.contains_key(&j));
+                }
+
+                for j in 1..i {
+                    assert!(m.contains_key(&j));
+                }
+            }
+        }
+    }
+
+    #[test]
+    fn test_find_mut() {
+        let mut m = HashMap::new();
+        assert!(m.insert(1, 12).is_none());
+        assert!(m.insert(2, 8).is_none());
+        assert!(m.insert(5, 14).is_none());
+        let new = 100;
+        match m.get_mut(&5) {
+            None => panic!(),
+            Some(x) => *x = new,
+        }
+        assert_eq!(m.get(&5), Some(&new));
+    }
+
+    #[test]
+    fn test_insert_overwrite() {
+        let mut m = HashMap::new();
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert!(m.insert(1, 3).is_some());
+        assert_eq!(*m.get(&1).unwrap(), 3);
+    }
+
+    #[test]
+    fn test_insert_conflicts() {
+        let mut m = HashMap::with_capacity(4);
+        assert!(m.insert(1, 2).is_none());
+        assert!(m.insert(5, 3).is_none());
+        assert!(m.insert(9, 4).is_none());
+        assert_eq!(*m.get(&9).unwrap(), 4);
+        assert_eq!(*m.get(&5).unwrap(), 3);
+        assert_eq!(*m.get(&1).unwrap(), 2);
+    }
+
+    #[test]
+    fn test_conflict_remove() {
+        let mut m = HashMap::with_capacity(4);
+        assert!(m.insert(1, 2).is_none());
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert!(m.insert(5, 3).is_none());
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert_eq!(*m.get(&5).unwrap(), 3);
+        assert!(m.insert(9, 4).is_none());
+        assert_eq!(*m.get(&1).unwrap(), 2);
+        assert_eq!(*m.get(&5).unwrap(), 3);
+        assert_eq!(*m.get(&9).unwrap(), 4);
+        assert!(m.remove(&1).is_some());
+        assert_eq!(*m.get(&9).unwrap(), 4);
+        assert_eq!(*m.get(&5).unwrap(), 3);
+    }
+
+    #[test]
+    fn test_insert_unique_unchecked() {
+        let mut map = HashMap::new();
+        let (k1, v1) = map.insert_unique_unchecked(10, 11);
+        assert_eq!((&10, &mut 11), (k1, v1));
+        let (k2, v2) = map.insert_unique_unchecked(20, 21);
+        assert_eq!((&20, &mut 21), (k2, v2));
+        assert_eq!(Some(&11), map.get(&10));
+        assert_eq!(Some(&21), map.get(&20));
+        assert_eq!(None, map.get(&30));
+    }
+
+    #[test]
+    fn test_is_empty() {
+        let mut m = HashMap::with_capacity(4);
+        assert!(m.insert(1, 2).is_none());
+        assert!(!m.is_empty());
+        assert!(m.remove(&1).is_some());
+        assert!(m.is_empty());
+    }
+
+    #[test]
+    fn test_remove() {
+        let mut m = HashMap::new();
+        m.insert(1, 2);
+        assert_eq!(m.remove(&1), Some(2));
+        assert_eq!(m.remove(&1), None);
+    }
+
+    #[test]
+    fn test_remove_entry() {
+        let mut m = HashMap::new();
+        m.insert(1, 2);
+        assert_eq!(m.remove_entry(&1), Some((1, 2)));
+        assert_eq!(m.remove(&1), None);
+    }
+
+    #[test]
+    fn test_iterate() {
+        let mut m = HashMap::with_capacity(4);
+        for i in 0..32 {
+            assert!(m.insert(i, i * 2).is_none());
+        }
+        assert_eq!(m.len(), 32);
+
+        let mut observed: u32 = 0;
+
+        for (k, v) in &m {
+            assert_eq!(*v, *k * 2);
+            observed |= 1 << *k;
+        }
+        assert_eq!(observed, 0xFFFF_FFFF);
+    }
+
+    #[test]
+    fn test_keys() {
+        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
+        let map: HashMap<_, _> = vec.into_iter().collect();
+        let keys: Vec<_> = map.keys().copied().collect();
+        assert_eq!(keys.len(), 3);
+        assert!(keys.contains(&1));
+        assert!(keys.contains(&2));
+        assert!(keys.contains(&3));
+    }
+
+    #[test]
+    fn test_values() {
+        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
+        let map: HashMap<_, _> = vec.into_iter().collect();
+        let values: Vec<_> = map.values().copied().collect();
+        assert_eq!(values.len(), 3);
+        assert!(values.contains(&'a'));
+        assert!(values.contains(&'b'));
+        assert!(values.contains(&'c'));
+    }
+
+    #[test]
+    fn test_values_mut() {
+        let vec = vec![(1, 1), (2, 2), (3, 3)];
+        let mut map: HashMap<_, _> = vec.into_iter().collect();
+        for value in map.values_mut() {
+            *value *= 2;
+        }
+        let values: Vec<_> = map.values().copied().collect();
+        assert_eq!(values.len(), 3);
+        assert!(values.contains(&2));
+        assert!(values.contains(&4));
+        assert!(values.contains(&6));
+    }
+
+    #[test]
+    fn test_into_keys() {
+        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
+        let map: HashMap<_, _> = vec.into_iter().collect();
+        let keys: Vec<_> = map.into_keys().collect();
+
+        assert_eq!(keys.len(), 3);
+        assert!(keys.contains(&1));
+        assert!(keys.contains(&2));
+        assert!(keys.contains(&3));
+    }
+
+    #[test]
+    fn test_into_values() {
+        let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
+        let map: HashMap<_, _> = vec.into_iter().collect();
+        let values: Vec<_> = map.into_values().collect();
+
+        assert_eq!(values.len(), 3);
+        assert!(values.contains(&'a'));
+        assert!(values.contains(&'b'));
+        assert!(values.contains(&'c'));
+    }
+
+    #[test]
+    fn test_find() {
+        let mut m = HashMap::new();
+        assert!(m.get(&1).is_none());
+        m.insert(1, 2);
+        match m.get(&1) {
+            None => panic!(),
+            Some(v) => assert_eq!(*v, 2),
+        }
+    }
+
+    #[test]
+    fn test_eq() {
+        let mut m1 = HashMap::new();
+        m1.insert(1, 2);
+        m1.insert(2, 3);
+        m1.insert(3, 4);
+
+        let mut m2 = HashMap::new();
+        m2.insert(1, 2);
+        m2.insert(2, 3);
+
+        assert!(m1 != m2);
+
+        m2.insert(3, 4);
+
+        assert_eq!(m1, m2);
+    }
+
+    #[test]
+    fn test_show() {
+        let mut map = HashMap::new();
+        let empty: HashMap<i32, i32> = HashMap::new();
+
+        map.insert(1, 2);
+        map.insert(3, 4);
+
+        let map_str = format!("{map:?}");
+
+        assert!(map_str == "{1: 2, 3: 4}" || map_str == "{3: 4, 1: 2}");
+        assert_eq!(format!("{empty:?}"), "{}");
+    }
+
+    #[test]
+    fn test_expand() {
+        let mut m = HashMap::new();
+
+        assert_eq!(m.len(), 0);
+        assert!(m.is_empty());
+
+        let mut i = 0;
+        let old_raw_cap = m.raw_capacity();
+        while old_raw_cap == m.raw_capacity() {
+            m.insert(i, i);
+            i += 1;
+        }
+
+        assert_eq!(m.len(), i);
+        assert!(!m.is_empty());
+    }
+
+    #[test]
+    fn test_behavior_resize_policy() {
+        let mut m = HashMap::new();
+
+        assert_eq!(m.len(), 0);
+        assert_eq!(m.raw_capacity(), 1);
+        assert!(m.is_empty());
+
+        m.insert(0, 0);
+        m.remove(&0);
+        assert!(m.is_empty());
+        let initial_raw_cap = m.raw_capacity();
+        m.reserve(initial_raw_cap);
+        let raw_cap = m.raw_capacity();
+
+        assert_eq!(raw_cap, initial_raw_cap * 2);
+
+        let mut i = 0;
+        for _ in 0..raw_cap * 3 / 4 {
+            m.insert(i, i);
+            i += 1;
+        }
+        // three quarters full
+
+        assert_eq!(m.len(), i);
+        assert_eq!(m.raw_capacity(), raw_cap);
+
+        for _ in 0..raw_cap / 4 {
+            m.insert(i, i);
+            i += 1;
+        }
+        // half full
+
+        let new_raw_cap = m.raw_capacity();
+        assert_eq!(new_raw_cap, raw_cap * 2);
+
+        for _ in 0..raw_cap / 2 - 1 {
+            i -= 1;
+            m.remove(&i);
+            assert_eq!(m.raw_capacity(), new_raw_cap);
+        }
+        // A little more than one quarter full.
+        m.shrink_to_fit();
+        assert_eq!(m.raw_capacity(), raw_cap);
+        // again, a little more than half full
+        for _ in 0..raw_cap / 2 {
+            i -= 1;
+            m.remove(&i);
+        }
+        m.shrink_to_fit();
+
+        assert_eq!(m.len(), i);
+        assert!(!m.is_empty());
+        assert_eq!(m.raw_capacity(), initial_raw_cap);
+    }
+
+    #[test]
+    fn test_reserve_shrink_to_fit() {
+        let mut m = HashMap::new();
+        m.insert(0, 0);
+        m.remove(&0);
+        assert!(m.capacity() >= m.len());
+        for i in 0..128 {
+            m.insert(i, i);
+        }
+        m.reserve(256);
+
+        let usable_cap = m.capacity();
+        for i in 128..(128 + 256) {
+            m.insert(i, i);
+            assert_eq!(m.capacity(), usable_cap);
+        }
+
+        for i in 100..(128 + 256) {
+            assert_eq!(m.remove(&i), Some(i));
+        }
+        m.shrink_to_fit();
+
+        assert_eq!(m.len(), 100);
+        assert!(!m.is_empty());
+        assert!(m.capacity() >= m.len());
+
+        for i in 0..100 {
+            assert_eq!(m.remove(&i), Some(i));
+        }
+        m.shrink_to_fit();
+        m.insert(0, 0);
+
+        assert_eq!(m.len(), 1);
+        assert!(m.capacity() >= m.len());
+        assert_eq!(m.remove(&0), Some(0));
+    }
+
+    #[test]
+    fn test_from_iter() {
+        let xs = [(1, 1), (2, 2), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let map: HashMap<_, _> = xs.iter().copied().collect();
+
+        for &(k, v) in &xs {
+            assert_eq!(map.get(&k), Some(&v));
+        }
+
+        assert_eq!(map.iter().len(), xs.len() - 1);
+    }
+
+    #[test]
+    fn test_size_hint() {
+        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let map: HashMap<_, _> = xs.iter().copied().collect();
+
+        let mut iter = map.iter();
+
+        for _ in iter.by_ref().take(3) {}
+
+        assert_eq!(iter.size_hint(), (3, Some(3)));
+    }
+
+    #[test]
+    fn test_iter_len() {
+        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let map: HashMap<_, _> = xs.iter().copied().collect();
+
+        let mut iter = map.iter();
+
+        for _ in iter.by_ref().take(3) {}
+
+        assert_eq!(iter.len(), 3);
+    }
+
+    #[test]
+    fn test_mut_size_hint() {
+        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let mut map: HashMap<_, _> = xs.iter().copied().collect();
+
+        let mut iter = map.iter_mut();
+
+        for _ in iter.by_ref().take(3) {}
+
+        assert_eq!(iter.size_hint(), (3, Some(3)));
+    }
+
+    #[test]
+    fn test_iter_mut_len() {
+        let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
+
+        let mut map: HashMap<_, _> = xs.iter().copied().collect();
+
+        let mut iter = map.iter_mut();
+
+        for _ in iter.by_ref().take(3) {}
+
+        assert_eq!(iter.len(), 3);
+    }
+
+    #[test]
+    fn test_index() {
+        let mut map = HashMap::new();
+
+        map.insert(1, 2);
+        map.insert(2, 1);
+        map.insert(3, 4);
+
+        assert_eq!(map[&2], 1);
+    }
+
+    #[test]
+    #[should_panic]
+    fn test_index_nonexistent() {
+        let mut map = HashMap::new();
+
+        map.insert(1, 2);
+        map.insert(2, 1);
+        map.insert(3, 4);
+
+        #[allow(clippy::no_effect)] // false positive lint
+        map[&4];
+    }
+
+    #[test]
+    fn test_entry() {
+        let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
+
+        let mut map: HashMap<_, _> = xs.iter().copied().collect();
+
+        // Existing key (insert)
+        match map.entry(1) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                assert_eq!(view.get(), &10);
+                assert_eq!(view.insert(100), 10);
+            }
+        }
+        assert_eq!(map.get(&1).unwrap(), &100);
+        assert_eq!(map.len(), 6);
+
+        // Existing key (update)
+        match map.entry(2) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                let v = view.get_mut();
+                let new_v = (*v) * 10;
+                *v = new_v;
+            }
+        }
+        assert_eq!(map.get(&2).unwrap(), &200);
+        assert_eq!(map.len(), 6);
+
+        // Existing key (take)
+        match map.entry(3) {
+            Vacant(_) => unreachable!(),
+            Occupied(view) => {
+                assert_eq!(view.remove(), 30);
+            }
+        }
+        assert_eq!(map.get(&3), None);
+        assert_eq!(map.len(), 5);
+
+        // Inexistent key (insert)
+        match map.entry(10) {
+            Occupied(_) => unreachable!(),
+            Vacant(view) => {
+                assert_eq!(*view.insert(1000), 1000);
+            }
+        }
+        assert_eq!(map.get(&10).unwrap(), &1000);
+        assert_eq!(map.len(), 6);
+    }
+
+    #[test]
+    fn test_entry_ref() {
+        let xs = [
+            ("One".to_owned(), 10),
+            ("Two".to_owned(), 20),
+            ("Three".to_owned(), 30),
+            ("Four".to_owned(), 40),
+            ("Five".to_owned(), 50),
+            ("Six".to_owned(), 60),
+        ];
+
+        let mut map: HashMap<_, _> = xs.iter().cloned().collect();
+
+        // Existing key (insert)
+        match map.entry_ref("One") {
+            EntryRef::Vacant(_) => unreachable!(),
+            EntryRef::Occupied(mut view) => {
+                assert_eq!(view.get(), &10);
+                assert_eq!(view.insert(100), 10);
+            }
+        }
+        assert_eq!(map.get("One").unwrap(), &100);
+        assert_eq!(map.len(), 6);
+
+        // Existing key (update)
+        match map.entry_ref("Two") {
+            EntryRef::Vacant(_) => unreachable!(),
+            EntryRef::Occupied(mut view) => {
+                let v = view.get_mut();
+                let new_v = (*v) * 10;
+                *v = new_v;
+            }
+        }
+        assert_eq!(map.get("Two").unwrap(), &200);
+        assert_eq!(map.len(), 6);
+
+        // Existing key (take)
+        match map.entry_ref("Three") {
+            EntryRef::Vacant(_) => unreachable!(),
+            EntryRef::Occupied(view) => {
+                assert_eq!(view.remove(), 30);
+            }
+        }
+        assert_eq!(map.get("Three"), None);
+        assert_eq!(map.len(), 5);
+
+        // Inexistent key (insert)
+        match map.entry_ref("Ten") {
+            EntryRef::Occupied(_) => unreachable!(),
+            EntryRef::Vacant(view) => {
+                assert_eq!(*view.insert(1000), 1000);
+            }
+        }
+        assert_eq!(map.get("Ten").unwrap(), &1000);
+        assert_eq!(map.len(), 6);
+    }
+
+    #[test]
+    fn test_entry_take_doesnt_corrupt() {
+        #![allow(deprecated)] //rand
+                              // Test for #19292
+        fn check(m: &HashMap<i32, ()>) {
+            for k in m.keys() {
+                assert!(m.contains_key(k), "{k} is in keys() but not in the map?");
+            }
+        }
+
+        let mut m = HashMap::new();
+
+        let mut rng = {
+            let seed = u64::from_le_bytes(*b"testseed");
+            SmallRng::seed_from_u64(seed)
+        };
+
+        // Populate the map with some items.
+        for _ in 0..50 {
+            let x = rng.gen_range(-10..10);
+            m.insert(x, ());
+        }
+
+        for _ in 0..1000 {
+            let x = rng.gen_range(-10..10);
+            match m.entry(x) {
+                Vacant(_) => {}
+                Occupied(e) => {
+                    e.remove();
+                }
+            }
+
+            check(&m);
+        }
+    }
+
+    #[test]
+    fn test_entry_ref_take_doesnt_corrupt() {
+        #![allow(deprecated)] //rand
+                              // Test for #19292
+        fn check(m: &HashMap<std::string::String, ()>) {
+            for k in m.keys() {
+                assert!(m.contains_key(k), "{k} is in keys() but not in the map?");
+            }
+        }
+
+        let mut m = HashMap::new();
+
+        let mut rng = {
+            let seed = u64::from_le_bytes(*b"testseed");
+            SmallRng::seed_from_u64(seed)
+        };
+
+        // Populate the map with some items.
+        for _ in 0..50 {
+            let mut x = std::string::String::with_capacity(1);
+            x.push(rng.gen_range('a'..='z'));
+            m.insert(x, ());
+        }
+
+        for _ in 0..1000 {
+            let mut x = std::string::String::with_capacity(1);
+            x.push(rng.gen_range('a'..='z'));
+            match m.entry_ref(x.as_str()) {
+                EntryRef::Vacant(_) => {}
+                EntryRef::Occupied(e) => {
+                    e.remove();
+                }
+            }
+
+            check(&m);
+        }
+    }
+
+    #[test]
+    fn test_extend_ref_k_ref_v() {
+        let mut a = HashMap::new();
+        a.insert(1, "one");
+        let mut b = HashMap::new();
+        b.insert(2, "two");
+        b.insert(3, "three");
+
+        a.extend(&b);
+
+        assert_eq!(a.len(), 3);
+        assert_eq!(a[&1], "one");
+        assert_eq!(a[&2], "two");
+        assert_eq!(a[&3], "three");
+    }
+
+    #[test]
+    #[allow(clippy::needless_borrow)]
+    fn test_extend_ref_kv_tuple() {
+        use std::ops::AddAssign;
+        let mut a = HashMap::new();
+        a.insert(0, 0);
+
+        fn create_arr<T: AddAssign<T> + Copy, const N: usize>(start: T, step: T) -> [(T, T); N] {
+            let mut outs: [(T, T); N] = [(start, start); N];
+            let mut element = step;
+            outs.iter_mut().skip(1).for_each(|(k, v)| {
+                *k += element;
+                *v += element;
+                element += step;
+            });
+            outs
+        }
+
+        let for_iter: Vec<_> = (0..100).map(|i| (i, i)).collect();
+        let iter = for_iter.iter();
+        let vec: Vec<_> = (100..200).map(|i| (i, i)).collect();
+        a.extend(iter);
+        a.extend(&vec);
+        a.extend(create_arr::<i32, 100>(200, 1));
+
+        assert_eq!(a.len(), 300);
+
+        for item in 0..300 {
+            assert_eq!(a[&item], item);
+        }
+    }
+
+    #[test]
+    fn test_capacity_not_less_than_len() {
+        let mut a = HashMap::new();
+        let mut item = 0;
+
+        for _ in 0..116 {
+            a.insert(item, 0);
+            item += 1;
+        }
+
+        assert!(a.capacity() > a.len());
+
+        let free = a.capacity() - a.len();
+        for _ in 0..free {
+            a.insert(item, 0);
+            item += 1;
+        }
+
+        assert_eq!(a.len(), a.capacity());
+
+        // Insert at capacity should cause allocation.
+        a.insert(item, 0);
+        assert!(a.capacity() > a.len());
+    }
+
+    #[test]
+    fn test_occupied_entry_key() {
+        let mut a = HashMap::new();
+        let key = "hello there";
+        let value = "value goes here";
+        assert!(a.is_empty());
+        a.insert(key, value);
+        assert_eq!(a.len(), 1);
+        assert_eq!(a[key], value);
+
+        match a.entry(key) {
+            Vacant(_) => panic!(),
+            Occupied(e) => assert_eq!(key, *e.key()),
+        }
+        assert_eq!(a.len(), 1);
+        assert_eq!(a[key], value);
+    }
+
+    #[test]
+    fn test_occupied_entry_ref_key() {
+        let mut a = HashMap::new();
+        let key = "hello there";
+        let value = "value goes here";
+        assert!(a.is_empty());
+        a.insert(key.to_owned(), value);
+        assert_eq!(a.len(), 1);
+        assert_eq!(a[key], value);
+
+        match a.entry_ref(key) {
+            EntryRef::Vacant(_) => panic!(),
+            EntryRef::Occupied(e) => assert_eq!(key, e.key()),
+        }
+        assert_eq!(a.len(), 1);
+        assert_eq!(a[key], value);
+    }
+
+    #[test]
+    fn test_vacant_entry_key() {
+        let mut a = HashMap::new();
+        let key = "hello there";
+        let value = "value goes here";
+
+        assert!(a.is_empty());
+        match a.entry(key) {
+            Occupied(_) => panic!(),
+            Vacant(e) => {
+                assert_eq!(key, *e.key());
+                e.insert(value);
+            }
+        }
+        assert_eq!(a.len(), 1);
+        assert_eq!(a[key], value);
+    }
+
+    #[test]
+    fn test_vacant_entry_ref_key() {
+        let mut a: HashMap<std::string::String, &str> = HashMap::new();
+        let key = "hello there";
+        let value = "value goes here";
+
+        assert!(a.is_empty());
+        match a.entry_ref(key) {
+            EntryRef::Occupied(_) => panic!(),
+            EntryRef::Vacant(e) => {
+                assert_eq!(key, e.key());
+                e.insert(value);
+            }
+        }
+        assert_eq!(a.len(), 1);
+        assert_eq!(a[key], value);
+    }
+
+    #[test]
+    fn test_occupied_entry_replace_entry_with() {
+        let mut a = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a.entry(key).insert(value).replace_entry_with(|k, v| {
+            assert_eq!(k, &key);
+            assert_eq!(v, value);
+            Some(new_value)
+        });
+
+        match entry {
+            Occupied(e) => {
+                assert_eq!(e.key(), &key);
+                assert_eq!(e.get(), &new_value);
+            }
+            Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = match a.entry(key) {
+            Occupied(e) => e.replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, new_value);
+                None
+            }),
+            Vacant(_) => panic!(),
+        };
+
+        match entry {
+            Vacant(e) => assert_eq!(e.key(), &key),
+            Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_occupied_entry_ref_replace_entry_with() {
+        let mut a: HashMap<std::string::String, &str> = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a.entry_ref(key).insert(value).replace_entry_with(|k, v| {
+            assert_eq!(k, key);
+            assert_eq!(v, value);
+            Some(new_value)
+        });
+
+        match entry {
+            EntryRef::Occupied(e) => {
+                assert_eq!(e.key(), key);
+                assert_eq!(e.get(), &new_value);
+            }
+            EntryRef::Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = match a.entry_ref(key) {
+            EntryRef::Occupied(e) => e.replace_entry_with(|k, v| {
+                assert_eq!(k, key);
+                assert_eq!(v, new_value);
+                None
+            }),
+            EntryRef::Vacant(_) => panic!(),
+        };
+
+        match entry {
+            EntryRef::Vacant(e) => assert_eq!(e.key(), key),
+            EntryRef::Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_entry_and_replace_entry_with() {
+        let mut a = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a.entry(key).and_replace_entry_with(|_, _| panic!());
+
+        match entry {
+            Vacant(e) => assert_eq!(e.key(), &key),
+            Occupied(_) => panic!(),
+        }
+
+        a.insert(key, value);
+
+        let entry = a.entry(key).and_replace_entry_with(|k, v| {
+            assert_eq!(k, &key);
+            assert_eq!(v, value);
+            Some(new_value)
+        });
+
+        match entry {
+            Occupied(e) => {
+                assert_eq!(e.key(), &key);
+                assert_eq!(e.get(), &new_value);
+            }
+            Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = a.entry(key).and_replace_entry_with(|k, v| {
+            assert_eq!(k, &key);
+            assert_eq!(v, new_value);
+            None
+        });
+
+        match entry {
+            Vacant(e) => assert_eq!(e.key(), &key),
+            Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_entry_ref_and_replace_entry_with() {
+        let mut a = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a.entry_ref(key).and_replace_entry_with(|_, _| panic!());
+
+        match entry {
+            EntryRef::Vacant(e) => assert_eq!(e.key(), key),
+            EntryRef::Occupied(_) => panic!(),
+        }
+
+        a.insert(key.to_owned(), value);
+
+        let entry = a.entry_ref(key).and_replace_entry_with(|k, v| {
+            assert_eq!(k, key);
+            assert_eq!(v, value);
+            Some(new_value)
+        });
+
+        match entry {
+            EntryRef::Occupied(e) => {
+                assert_eq!(e.key(), key);
+                assert_eq!(e.get(), &new_value);
+            }
+            EntryRef::Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = a.entry_ref(key).and_replace_entry_with(|k, v| {
+            assert_eq!(k, key);
+            assert_eq!(v, new_value);
+            None
+        });
+
+        match entry {
+            EntryRef::Vacant(e) => assert_eq!(e.key(), key),
+            EntryRef::Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_raw_occupied_entry_replace_entry_with() {
+        let mut a = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a
+            .raw_entry_mut()
+            .from_key(&key)
+            .insert(key, value)
+            .replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, value);
+                Some(new_value)
+            });
+
+        match entry {
+            RawEntryMut::Occupied(e) => {
+                assert_eq!(e.key(), &key);
+                assert_eq!(e.get(), &new_value);
+            }
+            RawEntryMut::Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = match a.raw_entry_mut().from_key(&key) {
+            RawEntryMut::Occupied(e) => e.replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, new_value);
+                None
+            }),
+            RawEntryMut::Vacant(_) => panic!(),
+        };
+
+        match entry {
+            RawEntryMut::Vacant(_) => {}
+            RawEntryMut::Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_raw_entry_and_replace_entry_with() {
+        let mut a = HashMap::new();
+
+        let key = "a key";
+        let value = "an initial value";
+        let new_value = "a new value";
+
+        let entry = a
+            .raw_entry_mut()
+            .from_key(&key)
+            .and_replace_entry_with(|_, _| panic!());
+
+        match entry {
+            RawEntryMut::Vacant(_) => {}
+            RawEntryMut::Occupied(_) => panic!(),
+        }
+
+        a.insert(key, value);
+
+        let entry = a
+            .raw_entry_mut()
+            .from_key(&key)
+            .and_replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, value);
+                Some(new_value)
+            });
+
+        match entry {
+            RawEntryMut::Occupied(e) => {
+                assert_eq!(e.key(), &key);
+                assert_eq!(e.get(), &new_value);
+            }
+            RawEntryMut::Vacant(_) => panic!(),
+        }
+
+        assert_eq!(a[key], new_value);
+        assert_eq!(a.len(), 1);
+
+        let entry = a
+            .raw_entry_mut()
+            .from_key(&key)
+            .and_replace_entry_with(|k, v| {
+                assert_eq!(k, &key);
+                assert_eq!(v, new_value);
+                None
+            });
+
+        match entry {
+            RawEntryMut::Vacant(_) => {}
+            RawEntryMut::Occupied(_) => panic!(),
+        }
+
+        assert!(!a.contains_key(key));
+        assert_eq!(a.len(), 0);
+    }
+
+    #[test]
+    fn test_replace_entry_with_doesnt_corrupt() {
+        #![allow(deprecated)] //rand
+                              // Test for #19292
+        fn check(m: &HashMap<i32, ()>) {
+            for k in m.keys() {
+                assert!(m.contains_key(k), "{k} is in keys() but not in the map?");
+            }
+        }
+
+        let mut m = HashMap::new();
+
+        let mut rng = {
+            let seed = u64::from_le_bytes(*b"testseed");
+            SmallRng::seed_from_u64(seed)
+        };
+
+        // Populate the map with some items.
+        for _ in 0..50 {
+            let x = rng.gen_range(-10..10);
+            m.insert(x, ());
+        }
+
+        for _ in 0..1000 {
+            let x = rng.gen_range(-10..10);
+            m.entry(x).and_replace_entry_with(|_, _| None);
+            check(&m);
+        }
+    }
+
+    #[test]
+    fn test_replace_entry_ref_with_doesnt_corrupt() {
+        #![allow(deprecated)] //rand
+                              // Test for #19292
+        fn check(m: &HashMap<std::string::String, ()>) {
+            for k in m.keys() {
+                assert!(m.contains_key(k), "{k} is in keys() but not in the map?");
+            }
+        }
+
+        let mut m = HashMap::new();
+
+        let mut rng = {
+            let seed = u64::from_le_bytes(*b"testseed");
+            SmallRng::seed_from_u64(seed)
+        };
+
+        // Populate the map with some items.
+        for _ in 0..50 {
+            let mut x = std::string::String::with_capacity(1);
+            x.push(rng.gen_range('a'..='z'));
+            m.insert(x, ());
+        }
+
+        for _ in 0..1000 {
+            let mut x = std::string::String::with_capacity(1);
+            x.push(rng.gen_range('a'..='z'));
+            m.entry_ref(x.as_str()).and_replace_entry_with(|_, _| None);
+            check(&m);
+        }
+    }
+
+    #[test]
+    fn test_retain() {
+        let mut map: HashMap<i32, i32> = (0..100).map(|x| (x, x * 10)).collect();
+
+        map.retain(|&k, _| k % 2 == 0);
+        assert_eq!(map.len(), 50);
+        assert_eq!(map[&2], 20);
+        assert_eq!(map[&4], 40);
+        assert_eq!(map[&6], 60);
+    }
+
+    #[test]
+    fn test_extract_if() {
+        {
+            let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x * 10)).collect();
+            let drained = map.extract_if(|&k, _| k % 2 == 0);
+            let mut out = drained.collect::<Vec<_>>();
+            out.sort_unstable();
+            assert_eq!(vec![(0, 0), (2, 20), (4, 40), (6, 60)], out);
+            assert_eq!(map.len(), 4);
+        }
+        {
+            let mut map: HashMap<i32, i32> = (0..8).map(|x| (x, x * 10)).collect();
+            map.extract_if(|&k, _| k % 2 == 0).for_each(drop);
+            assert_eq!(map.len(), 4);
+        }
+    }
+
+    #[test]
+    #[cfg_attr(miri, ignore)] // FIXME: no OOM signalling (https://github.com/rust-lang/miri/issues/613)
+    fn test_try_reserve() {
+        use crate::TryReserveError::{AllocError, CapacityOverflow};
+
+        const MAX_ISIZE: usize = isize::MAX as usize;
+
+        let mut empty_bytes: HashMap<u8, u8> = HashMap::new();
+
+        if let Err(CapacityOverflow) = empty_bytes.try_reserve(usize::MAX) {
+        } else {
+            panic!("usize::MAX should trigger an overflow!");
+        }
+
+        if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_ISIZE) {
+        } else {
+            panic!("isize::MAX should trigger an overflow!");
+        }
+
+        if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_ISIZE / 5) {
+        } else {
+            // This may succeed if there is enough free memory. Attempt to
+            // allocate a few more hashmaps to ensure the allocation will fail.
+            let mut empty_bytes2: HashMap<u8, u8> = HashMap::new();
+            let _ = empty_bytes2.try_reserve(MAX_ISIZE / 5);
+            let mut empty_bytes3: HashMap<u8, u8> = HashMap::new();
+            let _ = empty_bytes3.try_reserve(MAX_ISIZE / 5);
+            let mut empty_bytes4: HashMap<u8, u8> = HashMap::new();
+            if let Err(AllocError { .. }) = empty_bytes4.try_reserve(MAX_ISIZE / 5) {
+            } else {
+                panic!("isize::MAX / 5 should trigger an OOM!");
+            }
+        }
+    }
+
+    #[test]
+    fn test_raw_entry() {
+        use super::RawEntryMut::{Occupied, Vacant};
+
+        let xs = [(1_i32, 10_i32), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
+
+        let mut map: HashMap<_, _> = xs.iter().copied().collect();
+
+        let compute_hash = |map: &HashMap<i32, i32>, k: i32| -> u64 {
+            super::make_hash::<i32, _>(map.hasher(), &k)
+        };
+
+        // Existing key (insert)
+        match map.raw_entry_mut().from_key(&1) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                assert_eq!(view.get(), &10);
+                assert_eq!(view.insert(100), 10);
+            }
+        }
+        let hash1 = compute_hash(&map, 1);
+        assert_eq!(map.raw_entry().from_key(&1).unwrap(), (&1, &100));
+        assert_eq!(
+            map.raw_entry().from_hash(hash1, |k| *k == 1).unwrap(),
+            (&1, &100)
+        );
+        assert_eq!(
+            map.raw_entry().from_key_hashed_nocheck(hash1, &1).unwrap(),
+            (&1, &100)
+        );
+        assert_eq!(map.len(), 6);
+
+        // Existing key (update)
+        match map.raw_entry_mut().from_key(&2) {
+            Vacant(_) => unreachable!(),
+            Occupied(mut view) => {
+                let v = view.get_mut();
+                let new_v = (*v) * 10;
+                *v = new_v;
+            }
+        }
+        let hash2 = compute_hash(&map, 2);
+        assert_eq!(map.raw_entry().from_key(&2).unwrap(), (&2, &200));
+        assert_eq!(
+            map.raw_entry().from_hash(hash2, |k| *k == 2).unwrap(),
+            (&2, &200)
+        );
+        assert_eq!(
+            map.raw_entry().from_key_hashed_nocheck(hash2, &2).unwrap(),
+            (&2, &200)
+        );
+        assert_eq!(map.len(), 6);
+
+        // Existing key (take)
+        let hash3 = compute_hash(&map, 3);
+        match map.raw_entry_mut().from_key_hashed_nocheck(hash3, &3) {
+            Vacant(_) => unreachable!(),
+            Occupied(view) => {
+                assert_eq!(view.remove_entry(), (3, 30));
+            }
+        }
+        assert_eq!(map.raw_entry().from_key(&3), None);
+        assert_eq!(map.raw_entry().from_hash(hash3, |k| *k == 3), None);
+        assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash3, &3), None);
+        assert_eq!(map.len(), 5);
+
+        // Nonexistent key (insert)
+        match map.raw_entry_mut().from_key(&10) {
+            Occupied(_) => unreachable!(),
+            Vacant(view) => {
+                assert_eq!(view.insert(10, 1000), (&mut 10, &mut 1000));
+            }
+        }
+        assert_eq!(map.raw_entry().from_key(&10).unwrap(), (&10, &1000));
+        assert_eq!(map.len(), 6);
+
+        // Ensure all lookup methods produce equivalent results.
+        for k in 0..12 {
+            let hash = compute_hash(&map, k);
+            let v = map.get(&k).copied();
+            let kv = v.as_ref().map(|v| (&k, v));
+
+            assert_eq!(map.raw_entry().from_key(&k), kv);
+            assert_eq!(map.raw_entry().from_hash(hash, |q| *q == k), kv);
+            assert_eq!(map.raw_entry().from_key_hashed_nocheck(hash, &k), kv);
+
+            match map.raw_entry_mut().from_key(&k) {
+                Occupied(o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+            match map.raw_entry_mut().from_key_hashed_nocheck(hash, &k) {
+                Occupied(o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+            match map.raw_entry_mut().from_hash(hash, |q| *q == k) {
+                Occupied(o) => assert_eq!(Some(o.get_key_value()), kv),
+                Vacant(_) => assert_eq!(v, None),
+            }
+        }
+    }
+
+    #[test]
+    fn test_key_without_hash_impl() {
+        #[derive(Debug)]
+        struct IntWrapper(u64);
+
+        let mut m: HashMap<IntWrapper, (), ()> = HashMap::default();
+        {
+            assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
+        }
+        {
+            let vacant_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
+                RawEntryMut::Occupied(..) => panic!("Found entry for key 0"),
+                RawEntryMut::Vacant(e) => e,
+            };
+            vacant_entry.insert_with_hasher(0, IntWrapper(0), (), |k| k.0);
+        }
+        {
+            assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
+            assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_none());
+            assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
+        }
+        {
+            let vacant_entry = match m.raw_entry_mut().from_hash(1, |k| k.0 == 1) {
+                RawEntryMut::Occupied(..) => panic!("Found entry for key 1"),
+                RawEntryMut::Vacant(e) => e,
+            };
+            vacant_entry.insert_with_hasher(1, IntWrapper(1), (), |k| k.0);
+        }
+        {
+            assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_some());
+            assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
+            assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
+        }
+        {
+            let occupied_entry = match m.raw_entry_mut().from_hash(0, |k| k.0 == 0) {
+                RawEntryMut::Occupied(e) => e,
+                RawEntryMut::Vacant(..) => panic!("Couldn't find entry for key 0"),
+            };
+            occupied_entry.remove();
+        }
+        assert!(m.raw_entry().from_hash(0, |k| k.0 == 0).is_none());
+        assert!(m.raw_entry().from_hash(1, |k| k.0 == 1).is_some());
+        assert!(m.raw_entry().from_hash(2, |k| k.0 == 2).is_none());
+    }
+
+    #[test]
+    #[cfg(feature = "raw")]
+    fn test_into_iter_refresh() {
+        #[cfg(miri)]
+        const N: usize = 32;
+        #[cfg(not(miri))]
+        const N: usize = 128;
+
+        let mut rng = rand::thread_rng();
+        for n in 0..N {
+            let mut map = HashMap::new();
+            for i in 0..n {
+                assert!(map.insert(i, 2 * i).is_none());
+            }
+            let hash_builder = map.hasher().clone();
+
+            let mut it = unsafe { map.table.iter() };
+            assert_eq!(it.len(), n);
+
+            let mut i = 0;
+            let mut left = n;
+            let mut removed = Vec::new();
+            loop {
+                // occasionally remove some elements
+                if i < n && rng.gen_bool(0.1) {
+                    let hash_value = super::make_hash(&hash_builder, &i);
+
+                    unsafe {
+                        let e = map.table.find(hash_value, |q| q.0.eq(&i));
+                        if let Some(e) = e {
+                            it.reflect_remove(&e);
+                            let t = map.table.remove(e).0;
+                            removed.push(t);
+                            left -= 1;
+                        } else {
+                            assert!(removed.contains(&(i, 2 * i)), "{i} not in {removed:?}");
+                            let e = map.table.insert(
+                                hash_value,
+                                (i, 2 * i),
+                                super::make_hasher::<_, usize, _>(&hash_builder),
+                            );
+                            it.reflect_insert(&e);
+                            if let Some(p) = removed.iter().position(|e| e == &(i, 2 * i)) {
+                                removed.swap_remove(p);
+                            }
+                            left += 1;
+                        }
+                    }
+                }
+
+                let e = it.next();
+                if e.is_none() {
+                    break;
+                }
+                assert!(i < n);
+                let t = unsafe { e.unwrap().as_ref() };
+                assert!(!removed.contains(t));
+                let (key, value) = t;
+                assert_eq!(*value, 2 * key);
+                i += 1;
+            }
+            assert!(i <= n);
+
+            // just for safety:
+            assert_eq!(map.table.len(), left);
+        }
+    }
+
+    #[test]
+    fn test_const_with_hasher() {
+        use core::hash::BuildHasher;
+        use std::collections::hash_map::DefaultHasher;
+
+        #[derive(Clone)]
+        struct MyHasher;
+        impl BuildHasher for MyHasher {
+            type Hasher = DefaultHasher;
+
+            fn build_hasher(&self) -> DefaultHasher {
+                DefaultHasher::new()
+            }
+        }
+
+        const EMPTY_MAP: HashMap<u32, std::string::String, MyHasher> =
+            HashMap::with_hasher(MyHasher);
+
+        let mut map = EMPTY_MAP;
+        map.insert(17, "seventeen".to_owned());
+        assert_eq!("seventeen", map[&17]);
+    }
+
+    #[test]
+    fn test_get_each_mut() {
+        let mut map = HashMap::new();
+        map.insert("foo".to_owned(), 0);
+        map.insert("bar".to_owned(), 10);
+        map.insert("baz".to_owned(), 20);
+        map.insert("qux".to_owned(), 30);
+
+        let xs = map.get_many_mut(["foo", "qux"]);
+        assert_eq!(xs, Some([&mut 0, &mut 30]));
+
+        let xs = map.get_many_mut(["foo", "dud"]);
+        assert_eq!(xs, None);
+
+        let xs = map.get_many_mut(["foo", "foo"]);
+        assert_eq!(xs, None);
+
+        let ys = map.get_many_key_value_mut(["bar", "baz"]);
+        assert_eq!(
+            ys,
+            Some([(&"bar".to_owned(), &mut 10), (&"baz".to_owned(), &mut 20),]),
+        );
+
+        let ys = map.get_many_key_value_mut(["bar", "dip"]);
+        assert_eq!(ys, None);
+
+        let ys = map.get_many_key_value_mut(["baz", "baz"]);
+        assert_eq!(ys, None);
+    }
+
+    #[test]
+    #[should_panic = "panic in drop"]
+    fn test_clone_from_double_drop() {
+        #[derive(Clone)]
+        struct CheckedDrop {
+            panic_in_drop: bool,
+            dropped: bool,
+        }
+        impl Drop for CheckedDrop {
+            fn drop(&mut self) {
+                if self.panic_in_drop {
+                    self.dropped = true;
+                    panic!("panic in drop");
+                }
+                if self.dropped {
+                    panic!("double drop");
+                }
+                self.dropped = true;
+            }
+        }
+        const DISARMED: CheckedDrop = CheckedDrop {
+            panic_in_drop: false,
+            dropped: false,
+        };
+        const ARMED: CheckedDrop = CheckedDrop {
+            panic_in_drop: true,
+            dropped: false,
+        };
+
+        let mut map1 = HashMap::new();
+        map1.insert(1, DISARMED);
+        map1.insert(2, DISARMED);
+        map1.insert(3, DISARMED);
+        map1.insert(4, DISARMED);
+
+        let mut map2 = HashMap::new();
+        map2.insert(1, DISARMED);
+        map2.insert(2, ARMED);
+        map2.insert(3, DISARMED);
+        map2.insert(4, DISARMED);
+
+        map2.clone_from(&map1);
+    }
+
+    #[test]
+    #[should_panic = "panic in clone"]
+    fn test_clone_from_memory_leaks() {
+        use alloc::vec::Vec;
+
+        struct CheckedClone {
+            panic_in_clone: bool,
+            need_drop: Vec<i32>,
+        }
+        impl Clone for CheckedClone {
+            fn clone(&self) -> Self {
+                if self.panic_in_clone {
+                    panic!("panic in clone")
+                }
+                Self {
+                    panic_in_clone: self.panic_in_clone,
+                    need_drop: self.need_drop.clone(),
+                }
+            }
+        }
+        let mut map1 = HashMap::new();
+        map1.insert(
+            1,
+            CheckedClone {
+                panic_in_clone: false,
+                need_drop: vec![0, 1, 2],
+            },
+        );
+        map1.insert(
+            2,
+            CheckedClone {
+                panic_in_clone: false,
+                need_drop: vec![3, 4, 5],
+            },
+        );
+        map1.insert(
+            3,
+            CheckedClone {
+                panic_in_clone: true,
+                need_drop: vec![6, 7, 8],
+            },
+        );
+        let _map2 = map1.clone();
+    }
+
+    struct MyAllocInner {
+        drop_count: Arc<AtomicI8>,
+    }
+
+    #[derive(Clone)]
+    struct MyAlloc {
+        _inner: Arc<MyAllocInner>,
+    }
+
+    impl MyAlloc {
+        fn new(drop_count: Arc<AtomicI8>) -> Self {
+            MyAlloc {
+                _inner: Arc::new(MyAllocInner { drop_count }),
+            }
+        }
+    }
+
+    impl Drop for MyAllocInner {
+        fn drop(&mut self) {
+            println!("MyAlloc freed.");
+            self.drop_count.fetch_sub(1, Ordering::SeqCst);
+        }
+    }
+
+    unsafe impl Allocator for MyAlloc {
+        fn allocate(&self, layout: Layout) -> std::result::Result<NonNull<[u8]>, AllocError> {
+            let g = Global;
+            g.allocate(layout)
+        }
+
+        unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
+            let g = Global;
+            g.deallocate(ptr, layout)
+        }
+    }
+
+    #[test]
+    fn test_hashmap_into_iter_bug() {
+        let dropped: Arc<AtomicI8> = Arc::new(AtomicI8::new(1));
+
+        {
+            let mut map = HashMap::with_capacity_in(10, MyAlloc::new(dropped.clone()));
+            for i in 0..10 {
+                map.entry(i).or_insert_with(|| "i".to_string());
+            }
+
+            for (k, v) in map {
+                println!("{}, {}", k, v);
+            }
+        }
+
+        // All allocator clones should already be dropped.
+        assert_eq!(dropped.load(Ordering::SeqCst), 0);
+    }
+
+    #[derive(Debug)]
+    struct CheckedCloneDrop<T> {
+        panic_in_clone: bool,
+        panic_in_drop: bool,
+        dropped: bool,
+        data: T,
+    }
+
+    impl<T> CheckedCloneDrop<T> {
+        fn new(panic_in_clone: bool, panic_in_drop: bool, data: T) -> Self {
+            CheckedCloneDrop {
+                panic_in_clone,
+                panic_in_drop,
+                dropped: false,
+                data,
+            }
+        }
+    }
+
+    impl<T: Clone> Clone for CheckedCloneDrop<T> {
+        fn clone(&self) -> Self {
+            if self.panic_in_clone {
+                panic!("panic in clone")
+            }
+            Self {
+                panic_in_clone: self.panic_in_clone,
+                panic_in_drop: self.panic_in_drop,
+                dropped: self.dropped,
+                data: self.data.clone(),
+            }
+        }
+    }
+
+    impl<T> Drop for CheckedCloneDrop<T> {
+        fn drop(&mut self) {
+            if self.panic_in_drop {
+                self.dropped = true;
+                panic!("panic in drop");
+            }
+            if self.dropped {
+                panic!("double drop");
+            }
+            self.dropped = true;
+        }
+    }
+
+    /// Return hashmap with predefined distribution of elements.
+    /// All elements will be located in the same order as elements
+    /// returned by iterator.
+    ///
+    /// This function does not panic, but returns an error as a `String`
+    /// to distinguish between a test panic and an error in the input data.
+    fn get_test_map<I, T, A>(
+        iter: I,
+        mut fun: impl FnMut(u64) -> T,
+        alloc: A,
+    ) -> Result<HashMap<u64, CheckedCloneDrop<T>, DefaultHashBuilder, A>, String>
+    where
+        I: Iterator<Item = (bool, bool)> + Clone + ExactSizeIterator,
+        A: Allocator,
+        T: PartialEq + core::fmt::Debug,
+    {
+        use crate::scopeguard::guard;
+
+        let mut map: HashMap<u64, CheckedCloneDrop<T>, _, A> =
+            HashMap::with_capacity_in(iter.size_hint().0, alloc);
+        {
+            let mut guard = guard(&mut map, |map| {
+                for (_, value) in map.iter_mut() {
+                    value.panic_in_drop = false
+                }
+            });
+
+            let mut count = 0;
+            // Hash and Key must be equal to each other for controlling the elements placement.
+            for (panic_in_clone, panic_in_drop) in iter.clone() {
+                if core::mem::needs_drop::<T>() && panic_in_drop {
+                    return Err(String::from(
+                        "panic_in_drop can be set with a type that doesn't need to be dropped",
+                    ));
+                }
+                guard.table.insert(
+                    count,
+                    (
+                        count,
+                        CheckedCloneDrop::new(panic_in_clone, panic_in_drop, fun(count)),
+                    ),
+                    |(k, _)| *k,
+                );
+                count += 1;
+            }
+
+            // Let's check that all elements are located as we wanted
+            let mut check_count = 0;
+            for ((key, value), (panic_in_clone, panic_in_drop)) in guard.iter().zip(iter) {
+                if *key != check_count {
+                    return Err(format!(
+                        "key != check_count,\nkey: `{}`,\ncheck_count: `{}`",
+                        key, check_count
+                    ));
+                }
+                if value.dropped
+                    || value.panic_in_clone != panic_in_clone
+                    || value.panic_in_drop != panic_in_drop
+                    || value.data != fun(check_count)
+                {
+                    return Err(format!(
+                        "Value is not equal to expected,\nvalue: `{:?}`,\nexpected: \
+                        `CheckedCloneDrop {{ panic_in_clone: {}, panic_in_drop: {}, dropped: {}, data: {:?} }}`",
+                        value, panic_in_clone, panic_in_drop, false, fun(check_count)
+                    ));
+                }
+                check_count += 1;
+            }
+
+            if guard.len() != check_count as usize {
+                return Err(format!(
+                    "map.len() != check_count,\nmap.len(): `{}`,\ncheck_count: `{}`",
+                    guard.len(),
+                    check_count
+                ));
+            }
+
+            if count != check_count {
+                return Err(format!(
+                    "count != check_count,\ncount: `{}`,\ncheck_count: `{}`",
+                    count, check_count
+                ));
+            }
+            core::mem::forget(guard);
+        }
+        Ok(map)
+    }
+
+    const DISARMED: bool = false;
+    const ARMED: bool = true;
+
+    const ARMED_FLAGS: [bool; 8] = [
+        DISARMED, DISARMED, DISARMED, ARMED, DISARMED, DISARMED, DISARMED, DISARMED,
+    ];
+
+    const DISARMED_FLAGS: [bool; 8] = [
+        DISARMED, DISARMED, DISARMED, DISARMED, DISARMED, DISARMED, DISARMED, DISARMED,
+    ];
+
+    #[test]
+    #[should_panic = "panic in clone"]
+    fn test_clone_memory_leaks_and_double_drop_one() {
+        let dropped: Arc<AtomicI8> = Arc::new(AtomicI8::new(2));
+
+        {
+            assert_eq!(ARMED_FLAGS.len(), DISARMED_FLAGS.len());
+
+            let map: HashMap<u64, CheckedCloneDrop<Vec<u64>>, DefaultHashBuilder, MyAlloc> =
+                match get_test_map(
+                    ARMED_FLAGS.into_iter().zip(DISARMED_FLAGS),
+                    |n| vec![n],
+                    MyAlloc::new(dropped.clone()),
+                ) {
+                    Ok(map) => map,
+                    Err(msg) => panic!("{msg}"),
+                };
+
+            // Clone should normally clone a few elements, and then (when the
+            // clone function panics), deallocate both its own memory, memory
+            // of `dropped: Arc<AtomicI8>` and the memory of already cloned
+            // elements (Vec<i32> memory inside CheckedCloneDrop).
+            let _map2 = map.clone();
+        }
+    }
+
+    #[test]
+    #[should_panic = "panic in drop"]
+    fn test_clone_memory_leaks_and_double_drop_two() {
+        let dropped: Arc<AtomicI8> = Arc::new(AtomicI8::new(2));
+
+        {
+            assert_eq!(ARMED_FLAGS.len(), DISARMED_FLAGS.len());
+
+            let map: HashMap<u64, CheckedCloneDrop<u64>, DefaultHashBuilder, _> = match get_test_map(
+                DISARMED_FLAGS.into_iter().zip(DISARMED_FLAGS),
+                |n| n,
+                MyAlloc::new(dropped.clone()),
+            ) {
+                Ok(map) => map,
+                Err(msg) => panic!("{msg}"),
+            };
+
+            let mut map2 = match get_test_map(
+                DISARMED_FLAGS.into_iter().zip(ARMED_FLAGS),
+                |n| n,
+                MyAlloc::new(dropped.clone()),
+            ) {
+                Ok(map) => map,
+                Err(msg) => panic!("{msg}"),
+            };
+
+            // The `clone_from` should try to drop the elements of `map2` without
+            // double drop and leaking the allocator. Elements that have not been
+            // dropped leak their memory.
+            map2.clone_from(&map);
+        }
+    }
+
+    /// We check that we have a working table if the clone operation from another
+    /// thread ended in a panic (when buckets of maps are equal to each other).
+    #[test]
+    fn test_catch_panic_clone_from_when_len_is_equal() {
+        use std::thread;
+
+        let dropped: Arc<AtomicI8> = Arc::new(AtomicI8::new(2));
+
+        {
+            assert_eq!(ARMED_FLAGS.len(), DISARMED_FLAGS.len());
+
+            let mut map = match get_test_map(
+                DISARMED_FLAGS.into_iter().zip(DISARMED_FLAGS),
+                |n| vec![n],
+                MyAlloc::new(dropped.clone()),
+            ) {
+                Ok(map) => map,
+                Err(msg) => panic!("{msg}"),
+            };
+
+            thread::scope(|s| {
+                let result: thread::ScopedJoinHandle<'_, String> = s.spawn(|| {
+                    let scope_map =
+                        match get_test_map(ARMED_FLAGS.into_iter().zip(DISARMED_FLAGS), |n| vec![n * 2], MyAlloc::new(dropped.clone())) {
+                            Ok(map) => map,
+                            Err(msg) => return msg,
+                        };
+                    if map.table.buckets() != scope_map.table.buckets() {
+                        return format!(
+                            "map.table.buckets() != scope_map.table.buckets(),\nleft: `{}`,\nright: `{}`",
+                            map.table.buckets(), scope_map.table.buckets()
+                        );
+                    }
+                    map.clone_from(&scope_map);
+                    "We must fail the cloning!!!".to_owned()
+                });
+                if let Ok(msg) = result.join() {
+                    panic!("{msg}")
+                }
+            });
+
+            // Let's check that all iterators work fine and do not return elements
+            // (especially `RawIterRange`, which does not depend on the number of
+            // elements in the table, but looks directly at the control bytes)
+            //
+            // SAFETY: We know for sure that `RawTable` will outlive
+            // the returned `RawIter / RawIterRange` iterator.
+            assert_eq!(map.len(), 0);
+            assert_eq!(map.iter().count(), 0);
+            assert_eq!(unsafe { map.table.iter().count() }, 0);
+            assert_eq!(unsafe { map.table.iter().iter.count() }, 0);
+
+            for idx in 0..map.table.buckets() {
+                let idx = idx as u64;
+                assert!(
+                    map.table.find(idx, |(k, _)| *k == idx).is_none(),
+                    "Index: {idx}"
+                );
+            }
+        }
+
+        // All allocator clones should already be dropped.
+        assert_eq!(dropped.load(Ordering::SeqCst), 0);
+    }
+
+    /// We check that we have a working table if the clone operation from another
+    /// thread ended in a panic (when buckets of maps are not equal to each other).
+    #[test]
+    fn test_catch_panic_clone_from_when_len_is_not_equal() {
+        use std::thread;
+
+        let dropped: Arc<AtomicI8> = Arc::new(AtomicI8::new(2));
+
+        {
+            assert_eq!(ARMED_FLAGS.len(), DISARMED_FLAGS.len());
+
+            let mut map = match get_test_map(
+                [DISARMED].into_iter().zip([DISARMED]),
+                |n| vec![n],
+                MyAlloc::new(dropped.clone()),
+            ) {
+                Ok(map) => map,
+                Err(msg) => panic!("{msg}"),
+            };
+
+            thread::scope(|s| {
+                let result: thread::ScopedJoinHandle<'_, String> = s.spawn(|| {
+                    let scope_map = match get_test_map(
+                        ARMED_FLAGS.into_iter().zip(DISARMED_FLAGS),
+                        |n| vec![n * 2],
+                        MyAlloc::new(dropped.clone()),
+                    ) {
+                        Ok(map) => map,
+                        Err(msg) => return msg,
+                    };
+                    if map.table.buckets() == scope_map.table.buckets() {
+                        return format!(
+                            "map.table.buckets() == scope_map.table.buckets(): `{}`",
+                            map.table.buckets()
+                        );
+                    }
+                    map.clone_from(&scope_map);
+                    "We must fail the cloning!!!".to_owned()
+                });
+                if let Ok(msg) = result.join() {
+                    panic!("{msg}")
+                }
+            });
+
+            // Let's check that all iterators work fine and do not return elements
+            // (especially `RawIterRange`, which does not depend on the number of
+            // elements in the table, but looks directly at the control bytes)
+            //
+            // SAFETY: We know for sure that `RawTable` will outlive
+            // the returned `RawIter / RawIterRange` iterator.
+            assert_eq!(map.len(), 0);
+            assert_eq!(map.iter().count(), 0);
+            assert_eq!(unsafe { map.table.iter().count() }, 0);
+            assert_eq!(unsafe { map.table.iter().iter.count() }, 0);
+
+            for idx in 0..map.table.buckets() {
+                let idx = idx as u64;
+                assert!(
+                    map.table.find(idx, |(k, _)| *k == idx).is_none(),
+                    "Index: {idx}"
+                );
+            }
+        }
+
+        // All allocator clones should already be dropped.
+        assert_eq!(dropped.load(Ordering::SeqCst), 0);
+    }
+}
+
\ No newline at end of file diff --git a/src/hashbrown/raw/alloc.rs.html b/src/hashbrown/raw/alloc.rs.html new file mode 100644 index 000000000..56d94a5f4 --- /dev/null +++ b/src/hashbrown/raw/alloc.rs.html @@ -0,0 +1,173 @@ +alloc.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+
pub(crate) use self::inner::{do_alloc, Allocator, Global};
+
+// Nightly-case.
+// Use unstable `allocator_api` feature.
+// This is compatible with `allocator-api2` which can be enabled or not.
+// This is used when building for `std`.
+#[cfg(feature = "nightly")]
+mod inner {
+    use crate::alloc::alloc::Layout;
+    pub use crate::alloc::alloc::{Allocator, Global};
+    use core::ptr::NonNull;
+
+    #[allow(clippy::map_err_ignore)]
+    pub(crate) fn do_alloc<A: Allocator>(alloc: &A, layout: Layout) -> Result<NonNull<u8>, ()> {
+        match alloc.allocate(layout) {
+            Ok(ptr) => Ok(ptr.as_non_null_ptr()),
+            Err(_) => Err(()),
+        }
+    }
+}
+
+// Basic non-nightly case.
+// This uses `allocator-api2` enabled by default.
+// If any crate enables "nightly" in `allocator-api2`,
+// this will be equivalent to the nightly case,
+// since `allocator_api2::alloc::Allocator` would be re-export of
+// `core::alloc::Allocator`.
+#[cfg(all(not(feature = "nightly"), feature = "allocator-api2"))]
+mod inner {
+    use crate::alloc::alloc::Layout;
+    pub use allocator_api2::alloc::{Allocator, Global};
+    use core::ptr::NonNull;
+
+    #[allow(clippy::map_err_ignore)]
+    pub(crate) fn do_alloc<A: Allocator>(alloc: &A, layout: Layout) -> Result<NonNull<u8>, ()> {
+        match alloc.allocate(layout) {
+            Ok(ptr) => Ok(ptr.cast()),
+            Err(_) => Err(()),
+        }
+    }
+}
+
+// No-defaults case.
+// When building with default-features turned off and
+// neither `nightly` nor `allocator-api2` is enabled,
+// this will be used.
+// Making it impossible to use any custom allocator with collections defined
+// in this crate.
+// Any crate in build-tree can enable `allocator-api2`,
+// or `nightly` without disturbing users that don't want to use it.
+#[cfg(not(any(feature = "nightly", feature = "allocator-api2")))]
+mod inner {
+    use crate::alloc::alloc::{alloc, dealloc, Layout};
+    use core::ptr::NonNull;
+
+    #[allow(clippy::missing_safety_doc)] // not exposed outside of this crate
+    pub unsafe trait Allocator {
+        fn allocate(&self, layout: Layout) -> Result<NonNull<u8>, ()>;
+        unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout);
+    }
+
+    #[derive(Copy, Clone)]
+    pub struct Global;
+
+    unsafe impl Allocator for Global {
+        #[inline]
+        fn allocate(&self, layout: Layout) -> Result<NonNull<u8>, ()> {
+            unsafe { NonNull::new(alloc(layout)).ok_or(()) }
+        }
+        #[inline]
+        unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
+            dealloc(ptr.as_ptr(), layout);
+        }
+    }
+
+    impl Default for Global {
+        #[inline]
+        fn default() -> Self {
+            Global
+        }
+    }
+
+    pub(crate) fn do_alloc<A: Allocator>(alloc: &A, layout: Layout) -> Result<NonNull<u8>, ()> {
+        alloc.allocate(layout)
+    }
+}
+
\ No newline at end of file diff --git a/src/hashbrown/raw/bitmask.rs.html b/src/hashbrown/raw/bitmask.rs.html new file mode 100644 index 000000000..ea481e511 --- /dev/null +++ b/src/hashbrown/raw/bitmask.rs.html @@ -0,0 +1,267 @@ +bitmask.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+
use super::imp::{
+    BitMaskWord, NonZeroBitMaskWord, BITMASK_ITER_MASK, BITMASK_MASK, BITMASK_STRIDE,
+};
+
+/// A bit mask which contains the result of a `Match` operation on a `Group` and
+/// allows iterating through them.
+///
+/// The bit mask is arranged so that low-order bits represent lower memory
+/// addresses for group match results.
+///
+/// For implementation reasons, the bits in the set may be sparsely packed with
+/// groups of 8 bits representing one element. If any of these bits are non-zero
+/// then this element is considered to true in the mask. If this is the
+/// case, `BITMASK_STRIDE` will be 8 to indicate a divide-by-8 should be
+/// performed on counts/indices to normalize this difference. `BITMASK_MASK` is
+/// similarly a mask of all the actually-used bits.
+///
+/// To iterate over a bit mask, it must be converted to a form where only 1 bit
+/// is set per element. This is done by applying `BITMASK_ITER_MASK` on the
+/// mask bits.
+#[derive(Copy, Clone)]
+pub(crate) struct BitMask(pub(crate) BitMaskWord);
+
+#[allow(clippy::use_self)]
+impl BitMask {
+    /// Returns a new `BitMask` with all bits inverted.
+    #[inline]
+    #[must_use]
+    #[allow(dead_code)]
+    pub(crate) fn invert(self) -> Self {
+        BitMask(self.0 ^ BITMASK_MASK)
+    }
+
+    /// Returns a new `BitMask` with the lowest bit removed.
+    #[inline]
+    #[must_use]
+    fn remove_lowest_bit(self) -> Self {
+        BitMask(self.0 & (self.0 - 1))
+    }
+
+    /// Returns whether the `BitMask` has at least one set bit.
+    #[inline]
+    pub(crate) fn any_bit_set(self) -> bool {
+        self.0 != 0
+    }
+
+    /// Returns the first set bit in the `BitMask`, if there is one.
+    #[inline]
+    pub(crate) fn lowest_set_bit(self) -> Option<usize> {
+        if let Some(nonzero) = NonZeroBitMaskWord::new(self.0) {
+            Some(Self::nonzero_trailing_zeros(nonzero))
+        } else {
+            None
+        }
+    }
+
+    /// Returns the number of trailing zeroes in the `BitMask`.
+    #[inline]
+    pub(crate) fn trailing_zeros(self) -> usize {
+        // ARM doesn't have a trailing_zeroes instruction, and instead uses
+        // reverse_bits (RBIT) + leading_zeroes (CLZ). However older ARM
+        // versions (pre-ARMv7) don't have RBIT and need to emulate it
+        // instead. Since we only have 1 bit set in each byte on ARM, we can
+        // use swap_bytes (REV) + leading_zeroes instead.
+        if cfg!(target_arch = "arm") && BITMASK_STRIDE % 8 == 0 {
+            self.0.swap_bytes().leading_zeros() as usize / BITMASK_STRIDE
+        } else {
+            self.0.trailing_zeros() as usize / BITMASK_STRIDE
+        }
+    }
+
+    /// Same as above but takes a `NonZeroBitMaskWord`.
+    #[inline]
+    fn nonzero_trailing_zeros(nonzero: NonZeroBitMaskWord) -> usize {
+        if cfg!(target_arch = "arm") && BITMASK_STRIDE % 8 == 0 {
+            // SAFETY: A byte-swapped non-zero value is still non-zero.
+            let swapped = unsafe { NonZeroBitMaskWord::new_unchecked(nonzero.get().swap_bytes()) };
+            swapped.leading_zeros() as usize / BITMASK_STRIDE
+        } else {
+            nonzero.trailing_zeros() as usize / BITMASK_STRIDE
+        }
+    }
+
+    /// Returns the number of leading zeroes in the `BitMask`.
+    #[inline]
+    pub(crate) fn leading_zeros(self) -> usize {
+        self.0.leading_zeros() as usize / BITMASK_STRIDE
+    }
+}
+
+impl IntoIterator for BitMask {
+    type Item = usize;
+    type IntoIter = BitMaskIter;
+
+    #[inline]
+    fn into_iter(self) -> BitMaskIter {
+        // A BitMask only requires each element (group of bits) to be non-zero.
+        // However for iteration we need each element to only contain 1 bit.
+        BitMaskIter(BitMask(self.0 & BITMASK_ITER_MASK))
+    }
+}
+
+/// Iterator over the contents of a `BitMask`, returning the indices of set
+/// bits.
+#[derive(Copy, Clone)]
+pub(crate) struct BitMaskIter(pub(crate) BitMask);
+
+impl BitMaskIter {
+    /// Flip the bit in the mask for the entry at the given index.
+    ///
+    /// Returns the bit's previous state.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)]
+    #[cfg(feature = "raw")]
+    pub(crate) unsafe fn flip(&mut self, index: usize) -> bool {
+        // NOTE: The + BITMASK_STRIDE - 1 is to set the high bit.
+        let mask = 1 << (index * BITMASK_STRIDE + BITMASK_STRIDE - 1);
+        self.0 .0 ^= mask;
+        // The bit was set if the bit is now 0.
+        self.0 .0 & mask == 0
+    }
+}
+
+impl Iterator for BitMaskIter {
+    type Item = usize;
+
+    #[inline]
+    fn next(&mut self) -> Option<usize> {
+        let bit = self.0.lowest_set_bit()?;
+        self.0 = self.0.remove_lowest_bit();
+        Some(bit)
+    }
+}
+
\ No newline at end of file diff --git a/src/hashbrown/raw/mod.rs.html b/src/hashbrown/raw/mod.rs.html new file mode 100644 index 000000000..ea0cdc297 --- /dev/null +++ b/src/hashbrown/raw/mod.rs.html @@ -0,0 +1,9635 @@ +mod.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
+445
+446
+447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
+469
+470
+471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
+505
+506
+507
+508
+509
+510
+511
+512
+513
+514
+515
+516
+517
+518
+519
+520
+521
+522
+523
+524
+525
+526
+527
+528
+529
+530
+531
+532
+533
+534
+535
+536
+537
+538
+539
+540
+541
+542
+543
+544
+545
+546
+547
+548
+549
+550
+551
+552
+553
+554
+555
+556
+557
+558
+559
+560
+561
+562
+563
+564
+565
+566
+567
+568
+569
+570
+571
+572
+573
+574
+575
+576
+577
+578
+579
+580
+581
+582
+583
+584
+585
+586
+587
+588
+589
+590
+591
+592
+593
+594
+595
+596
+597
+598
+599
+600
+601
+602
+603
+604
+605
+606
+607
+608
+609
+610
+611
+612
+613
+614
+615
+616
+617
+618
+619
+620
+621
+622
+623
+624
+625
+626
+627
+628
+629
+630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
+646
+647
+648
+649
+650
+651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
+664
+665
+666
+667
+668
+669
+670
+671
+672
+673
+674
+675
+676
+677
+678
+679
+680
+681
+682
+683
+684
+685
+686
+687
+688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
+699
+700
+701
+702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
+720
+721
+722
+723
+724
+725
+726
+727
+728
+729
+730
+731
+732
+733
+734
+735
+736
+737
+738
+739
+740
+741
+742
+743
+744
+745
+746
+747
+748
+749
+750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
+769
+770
+771
+772
+773
+774
+775
+776
+777
+778
+779
+780
+781
+782
+783
+784
+785
+786
+787
+788
+789
+790
+791
+792
+793
+794
+795
+796
+797
+798
+799
+800
+801
+802
+803
+804
+805
+806
+807
+808
+809
+810
+811
+812
+813
+814
+815
+816
+817
+818
+819
+820
+821
+822
+823
+824
+825
+826
+827
+828
+829
+830
+831
+832
+833
+834
+835
+836
+837
+838
+839
+840
+841
+842
+843
+844
+845
+846
+847
+848
+849
+850
+851
+852
+853
+854
+855
+856
+857
+858
+859
+860
+861
+862
+863
+864
+865
+866
+867
+868
+869
+870
+871
+872
+873
+874
+875
+876
+877
+878
+879
+880
+881
+882
+883
+884
+885
+886
+887
+888
+889
+890
+891
+892
+893
+894
+895
+896
+897
+898
+899
+900
+901
+902
+903
+904
+905
+906
+907
+908
+909
+910
+911
+912
+913
+914
+915
+916
+917
+918
+919
+920
+921
+922
+923
+924
+925
+926
+927
+928
+929
+930
+931
+932
+933
+934
+935
+936
+937
+938
+939
+940
+941
+942
+943
+944
+945
+946
+947
+948
+949
+950
+951
+952
+953
+954
+955
+956
+957
+958
+959
+960
+961
+962
+963
+964
+965
+966
+967
+968
+969
+970
+971
+972
+973
+974
+975
+976
+977
+978
+979
+980
+981
+982
+983
+984
+985
+986
+987
+988
+989
+990
+991
+992
+993
+994
+995
+996
+997
+998
+999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
+1122
+1123
+1124
+1125
+1126
+1127
+1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
+1143
+1144
+1145
+1146
+1147
+1148
+1149
+1150
+1151
+1152
+1153
+1154
+1155
+1156
+1157
+1158
+1159
+1160
+1161
+1162
+1163
+1164
+1165
+1166
+1167
+1168
+1169
+1170
+1171
+1172
+1173
+1174
+1175
+1176
+1177
+1178
+1179
+1180
+1181
+1182
+1183
+1184
+1185
+1186
+1187
+1188
+1189
+1190
+1191
+1192
+1193
+1194
+1195
+1196
+1197
+1198
+1199
+1200
+1201
+1202
+1203
+1204
+1205
+1206
+1207
+1208
+1209
+1210
+1211
+1212
+1213
+1214
+1215
+1216
+1217
+1218
+1219
+1220
+1221
+1222
+1223
+1224
+1225
+1226
+1227
+1228
+1229
+1230
+1231
+1232
+1233
+1234
+1235
+1236
+1237
+1238
+1239
+1240
+1241
+1242
+1243
+1244
+1245
+1246
+1247
+1248
+1249
+1250
+1251
+1252
+1253
+1254
+1255
+1256
+1257
+1258
+1259
+1260
+1261
+1262
+1263
+1264
+1265
+1266
+1267
+1268
+1269
+1270
+1271
+1272
+1273
+1274
+1275
+1276
+1277
+1278
+1279
+1280
+1281
+1282
+1283
+1284
+1285
+1286
+1287
+1288
+1289
+1290
+1291
+1292
+1293
+1294
+1295
+1296
+1297
+1298
+1299
+1300
+1301
+1302
+1303
+1304
+1305
+1306
+1307
+1308
+1309
+1310
+1311
+1312
+1313
+1314
+1315
+1316
+1317
+1318
+1319
+1320
+1321
+1322
+1323
+1324
+1325
+1326
+1327
+1328
+1329
+1330
+1331
+1332
+1333
+1334
+1335
+1336
+1337
+1338
+1339
+1340
+1341
+1342
+1343
+1344
+1345
+1346
+1347
+1348
+1349
+1350
+1351
+1352
+1353
+1354
+1355
+1356
+1357
+1358
+1359
+1360
+1361
+1362
+1363
+1364
+1365
+1366
+1367
+1368
+1369
+1370
+1371
+1372
+1373
+1374
+1375
+1376
+1377
+1378
+1379
+1380
+1381
+1382
+1383
+1384
+1385
+1386
+1387
+1388
+1389
+1390
+1391
+1392
+1393
+1394
+1395
+1396
+1397
+1398
+1399
+1400
+1401
+1402
+1403
+1404
+1405
+1406
+1407
+1408
+1409
+1410
+1411
+1412
+1413
+1414
+1415
+1416
+1417
+1418
+1419
+1420
+1421
+1422
+1423
+1424
+1425
+1426
+1427
+1428
+1429
+1430
+1431
+1432
+1433
+1434
+1435
+1436
+1437
+1438
+1439
+1440
+1441
+1442
+1443
+1444
+1445
+1446
+1447
+1448
+1449
+1450
+1451
+1452
+1453
+1454
+1455
+1456
+1457
+1458
+1459
+1460
+1461
+1462
+1463
+1464
+1465
+1466
+1467
+1468
+1469
+1470
+1471
+1472
+1473
+1474
+1475
+1476
+1477
+1478
+1479
+1480
+1481
+1482
+1483
+1484
+1485
+1486
+1487
+1488
+1489
+1490
+1491
+1492
+1493
+1494
+1495
+1496
+1497
+1498
+1499
+1500
+1501
+1502
+1503
+1504
+1505
+1506
+1507
+1508
+1509
+1510
+1511
+1512
+1513
+1514
+1515
+1516
+1517
+1518
+1519
+1520
+1521
+1522
+1523
+1524
+1525
+1526
+1527
+1528
+1529
+1530
+1531
+1532
+1533
+1534
+1535
+1536
+1537
+1538
+1539
+1540
+1541
+1542
+1543
+1544
+1545
+1546
+1547
+1548
+1549
+1550
+1551
+1552
+1553
+1554
+1555
+1556
+1557
+1558
+1559
+1560
+1561
+1562
+1563
+1564
+1565
+1566
+1567
+1568
+1569
+1570
+1571
+1572
+1573
+1574
+1575
+1576
+1577
+1578
+1579
+1580
+1581
+1582
+1583
+1584
+1585
+1586
+1587
+1588
+1589
+1590
+1591
+1592
+1593
+1594
+1595
+1596
+1597
+1598
+1599
+1600
+1601
+1602
+1603
+1604
+1605
+1606
+1607
+1608
+1609
+1610
+1611
+1612
+1613
+1614
+1615
+1616
+1617
+1618
+1619
+1620
+1621
+1622
+1623
+1624
+1625
+1626
+1627
+1628
+1629
+1630
+1631
+1632
+1633
+1634
+1635
+1636
+1637
+1638
+1639
+1640
+1641
+1642
+1643
+1644
+1645
+1646
+1647
+1648
+1649
+1650
+1651
+1652
+1653
+1654
+1655
+1656
+1657
+1658
+1659
+1660
+1661
+1662
+1663
+1664
+1665
+1666
+1667
+1668
+1669
+1670
+1671
+1672
+1673
+1674
+1675
+1676
+1677
+1678
+1679
+1680
+1681
+1682
+1683
+1684
+1685
+1686
+1687
+1688
+1689
+1690
+1691
+1692
+1693
+1694
+1695
+1696
+1697
+1698
+1699
+1700
+1701
+1702
+1703
+1704
+1705
+1706
+1707
+1708
+1709
+1710
+1711
+1712
+1713
+1714
+1715
+1716
+1717
+1718
+1719
+1720
+1721
+1722
+1723
+1724
+1725
+1726
+1727
+1728
+1729
+1730
+1731
+1732
+1733
+1734
+1735
+1736
+1737
+1738
+1739
+1740
+1741
+1742
+1743
+1744
+1745
+1746
+1747
+1748
+1749
+1750
+1751
+1752
+1753
+1754
+1755
+1756
+1757
+1758
+1759
+1760
+1761
+1762
+1763
+1764
+1765
+1766
+1767
+1768
+1769
+1770
+1771
+1772
+1773
+1774
+1775
+1776
+1777
+1778
+1779
+1780
+1781
+1782
+1783
+1784
+1785
+1786
+1787
+1788
+1789
+1790
+1791
+1792
+1793
+1794
+1795
+1796
+1797
+1798
+1799
+1800
+1801
+1802
+1803
+1804
+1805
+1806
+1807
+1808
+1809
+1810
+1811
+1812
+1813
+1814
+1815
+1816
+1817
+1818
+1819
+1820
+1821
+1822
+1823
+1824
+1825
+1826
+1827
+1828
+1829
+1830
+1831
+1832
+1833
+1834
+1835
+1836
+1837
+1838
+1839
+1840
+1841
+1842
+1843
+1844
+1845
+1846
+1847
+1848
+1849
+1850
+1851
+1852
+1853
+1854
+1855
+1856
+1857
+1858
+1859
+1860
+1861
+1862
+1863
+1864
+1865
+1866
+1867
+1868
+1869
+1870
+1871
+1872
+1873
+1874
+1875
+1876
+1877
+1878
+1879
+1880
+1881
+1882
+1883
+1884
+1885
+1886
+1887
+1888
+1889
+1890
+1891
+1892
+1893
+1894
+1895
+1896
+1897
+1898
+1899
+1900
+1901
+1902
+1903
+1904
+1905
+1906
+1907
+1908
+1909
+1910
+1911
+1912
+1913
+1914
+1915
+1916
+1917
+1918
+1919
+1920
+1921
+1922
+1923
+1924
+1925
+1926
+1927
+1928
+1929
+1930
+1931
+1932
+1933
+1934
+1935
+1936
+1937
+1938
+1939
+1940
+1941
+1942
+1943
+1944
+1945
+1946
+1947
+1948
+1949
+1950
+1951
+1952
+1953
+1954
+1955
+1956
+1957
+1958
+1959
+1960
+1961
+1962
+1963
+1964
+1965
+1966
+1967
+1968
+1969
+1970
+1971
+1972
+1973
+1974
+1975
+1976
+1977
+1978
+1979
+1980
+1981
+1982
+1983
+1984
+1985
+1986
+1987
+1988
+1989
+1990
+1991
+1992
+1993
+1994
+1995
+1996
+1997
+1998
+1999
+2000
+2001
+2002
+2003
+2004
+2005
+2006
+2007
+2008
+2009
+2010
+2011
+2012
+2013
+2014
+2015
+2016
+2017
+2018
+2019
+2020
+2021
+2022
+2023
+2024
+2025
+2026
+2027
+2028
+2029
+2030
+2031
+2032
+2033
+2034
+2035
+2036
+2037
+2038
+2039
+2040
+2041
+2042
+2043
+2044
+2045
+2046
+2047
+2048
+2049
+2050
+2051
+2052
+2053
+2054
+2055
+2056
+2057
+2058
+2059
+2060
+2061
+2062
+2063
+2064
+2065
+2066
+2067
+2068
+2069
+2070
+2071
+2072
+2073
+2074
+2075
+2076
+2077
+2078
+2079
+2080
+2081
+2082
+2083
+2084
+2085
+2086
+2087
+2088
+2089
+2090
+2091
+2092
+2093
+2094
+2095
+2096
+2097
+2098
+2099
+2100
+2101
+2102
+2103
+2104
+2105
+2106
+2107
+2108
+2109
+2110
+2111
+2112
+2113
+2114
+2115
+2116
+2117
+2118
+2119
+2120
+2121
+2122
+2123
+2124
+2125
+2126
+2127
+2128
+2129
+2130
+2131
+2132
+2133
+2134
+2135
+2136
+2137
+2138
+2139
+2140
+2141
+2142
+2143
+2144
+2145
+2146
+2147
+2148
+2149
+2150
+2151
+2152
+2153
+2154
+2155
+2156
+2157
+2158
+2159
+2160
+2161
+2162
+2163
+2164
+2165
+2166
+2167
+2168
+2169
+2170
+2171
+2172
+2173
+2174
+2175
+2176
+2177
+2178
+2179
+2180
+2181
+2182
+2183
+2184
+2185
+2186
+2187
+2188
+2189
+2190
+2191
+2192
+2193
+2194
+2195
+2196
+2197
+2198
+2199
+2200
+2201
+2202
+2203
+2204
+2205
+2206
+2207
+2208
+2209
+2210
+2211
+2212
+2213
+2214
+2215
+2216
+2217
+2218
+2219
+2220
+2221
+2222
+2223
+2224
+2225
+2226
+2227
+2228
+2229
+2230
+2231
+2232
+2233
+2234
+2235
+2236
+2237
+2238
+2239
+2240
+2241
+2242
+2243
+2244
+2245
+2246
+2247
+2248
+2249
+2250
+2251
+2252
+2253
+2254
+2255
+2256
+2257
+2258
+2259
+2260
+2261
+2262
+2263
+2264
+2265
+2266
+2267
+2268
+2269
+2270
+2271
+2272
+2273
+2274
+2275
+2276
+2277
+2278
+2279
+2280
+2281
+2282
+2283
+2284
+2285
+2286
+2287
+2288
+2289
+2290
+2291
+2292
+2293
+2294
+2295
+2296
+2297
+2298
+2299
+2300
+2301
+2302
+2303
+2304
+2305
+2306
+2307
+2308
+2309
+2310
+2311
+2312
+2313
+2314
+2315
+2316
+2317
+2318
+2319
+2320
+2321
+2322
+2323
+2324
+2325
+2326
+2327
+2328
+2329
+2330
+2331
+2332
+2333
+2334
+2335
+2336
+2337
+2338
+2339
+2340
+2341
+2342
+2343
+2344
+2345
+2346
+2347
+2348
+2349
+2350
+2351
+2352
+2353
+2354
+2355
+2356
+2357
+2358
+2359
+2360
+2361
+2362
+2363
+2364
+2365
+2366
+2367
+2368
+2369
+2370
+2371
+2372
+2373
+2374
+2375
+2376
+2377
+2378
+2379
+2380
+2381
+2382
+2383
+2384
+2385
+2386
+2387
+2388
+2389
+2390
+2391
+2392
+2393
+2394
+2395
+2396
+2397
+2398
+2399
+2400
+2401
+2402
+2403
+2404
+2405
+2406
+2407
+2408
+2409
+2410
+2411
+2412
+2413
+2414
+2415
+2416
+2417
+2418
+2419
+2420
+2421
+2422
+2423
+2424
+2425
+2426
+2427
+2428
+2429
+2430
+2431
+2432
+2433
+2434
+2435
+2436
+2437
+2438
+2439
+2440
+2441
+2442
+2443
+2444
+2445
+2446
+2447
+2448
+2449
+2450
+2451
+2452
+2453
+2454
+2455
+2456
+2457
+2458
+2459
+2460
+2461
+2462
+2463
+2464
+2465
+2466
+2467
+2468
+2469
+2470
+2471
+2472
+2473
+2474
+2475
+2476
+2477
+2478
+2479
+2480
+2481
+2482
+2483
+2484
+2485
+2486
+2487
+2488
+2489
+2490
+2491
+2492
+2493
+2494
+2495
+2496
+2497
+2498
+2499
+2500
+2501
+2502
+2503
+2504
+2505
+2506
+2507
+2508
+2509
+2510
+2511
+2512
+2513
+2514
+2515
+2516
+2517
+2518
+2519
+2520
+2521
+2522
+2523
+2524
+2525
+2526
+2527
+2528
+2529
+2530
+2531
+2532
+2533
+2534
+2535
+2536
+2537
+2538
+2539
+2540
+2541
+2542
+2543
+2544
+2545
+2546
+2547
+2548
+2549
+2550
+2551
+2552
+2553
+2554
+2555
+2556
+2557
+2558
+2559
+2560
+2561
+2562
+2563
+2564
+2565
+2566
+2567
+2568
+2569
+2570
+2571
+2572
+2573
+2574
+2575
+2576
+2577
+2578
+2579
+2580
+2581
+2582
+2583
+2584
+2585
+2586
+2587
+2588
+2589
+2590
+2591
+2592
+2593
+2594
+2595
+2596
+2597
+2598
+2599
+2600
+2601
+2602
+2603
+2604
+2605
+2606
+2607
+2608
+2609
+2610
+2611
+2612
+2613
+2614
+2615
+2616
+2617
+2618
+2619
+2620
+2621
+2622
+2623
+2624
+2625
+2626
+2627
+2628
+2629
+2630
+2631
+2632
+2633
+2634
+2635
+2636
+2637
+2638
+2639
+2640
+2641
+2642
+2643
+2644
+2645
+2646
+2647
+2648
+2649
+2650
+2651
+2652
+2653
+2654
+2655
+2656
+2657
+2658
+2659
+2660
+2661
+2662
+2663
+2664
+2665
+2666
+2667
+2668
+2669
+2670
+2671
+2672
+2673
+2674
+2675
+2676
+2677
+2678
+2679
+2680
+2681
+2682
+2683
+2684
+2685
+2686
+2687
+2688
+2689
+2690
+2691
+2692
+2693
+2694
+2695
+2696
+2697
+2698
+2699
+2700
+2701
+2702
+2703
+2704
+2705
+2706
+2707
+2708
+2709
+2710
+2711
+2712
+2713
+2714
+2715
+2716
+2717
+2718
+2719
+2720
+2721
+2722
+2723
+2724
+2725
+2726
+2727
+2728
+2729
+2730
+2731
+2732
+2733
+2734
+2735
+2736
+2737
+2738
+2739
+2740
+2741
+2742
+2743
+2744
+2745
+2746
+2747
+2748
+2749
+2750
+2751
+2752
+2753
+2754
+2755
+2756
+2757
+2758
+2759
+2760
+2761
+2762
+2763
+2764
+2765
+2766
+2767
+2768
+2769
+2770
+2771
+2772
+2773
+2774
+2775
+2776
+2777
+2778
+2779
+2780
+2781
+2782
+2783
+2784
+2785
+2786
+2787
+2788
+2789
+2790
+2791
+2792
+2793
+2794
+2795
+2796
+2797
+2798
+2799
+2800
+2801
+2802
+2803
+2804
+2805
+2806
+2807
+2808
+2809
+2810
+2811
+2812
+2813
+2814
+2815
+2816
+2817
+2818
+2819
+2820
+2821
+2822
+2823
+2824
+2825
+2826
+2827
+2828
+2829
+2830
+2831
+2832
+2833
+2834
+2835
+2836
+2837
+2838
+2839
+2840
+2841
+2842
+2843
+2844
+2845
+2846
+2847
+2848
+2849
+2850
+2851
+2852
+2853
+2854
+2855
+2856
+2857
+2858
+2859
+2860
+2861
+2862
+2863
+2864
+2865
+2866
+2867
+2868
+2869
+2870
+2871
+2872
+2873
+2874
+2875
+2876
+2877
+2878
+2879
+2880
+2881
+2882
+2883
+2884
+2885
+2886
+2887
+2888
+2889
+2890
+2891
+2892
+2893
+2894
+2895
+2896
+2897
+2898
+2899
+2900
+2901
+2902
+2903
+2904
+2905
+2906
+2907
+2908
+2909
+2910
+2911
+2912
+2913
+2914
+2915
+2916
+2917
+2918
+2919
+2920
+2921
+2922
+2923
+2924
+2925
+2926
+2927
+2928
+2929
+2930
+2931
+2932
+2933
+2934
+2935
+2936
+2937
+2938
+2939
+2940
+2941
+2942
+2943
+2944
+2945
+2946
+2947
+2948
+2949
+2950
+2951
+2952
+2953
+2954
+2955
+2956
+2957
+2958
+2959
+2960
+2961
+2962
+2963
+2964
+2965
+2966
+2967
+2968
+2969
+2970
+2971
+2972
+2973
+2974
+2975
+2976
+2977
+2978
+2979
+2980
+2981
+2982
+2983
+2984
+2985
+2986
+2987
+2988
+2989
+2990
+2991
+2992
+2993
+2994
+2995
+2996
+2997
+2998
+2999
+3000
+3001
+3002
+3003
+3004
+3005
+3006
+3007
+3008
+3009
+3010
+3011
+3012
+3013
+3014
+3015
+3016
+3017
+3018
+3019
+3020
+3021
+3022
+3023
+3024
+3025
+3026
+3027
+3028
+3029
+3030
+3031
+3032
+3033
+3034
+3035
+3036
+3037
+3038
+3039
+3040
+3041
+3042
+3043
+3044
+3045
+3046
+3047
+3048
+3049
+3050
+3051
+3052
+3053
+3054
+3055
+3056
+3057
+3058
+3059
+3060
+3061
+3062
+3063
+3064
+3065
+3066
+3067
+3068
+3069
+3070
+3071
+3072
+3073
+3074
+3075
+3076
+3077
+3078
+3079
+3080
+3081
+3082
+3083
+3084
+3085
+3086
+3087
+3088
+3089
+3090
+3091
+3092
+3093
+3094
+3095
+3096
+3097
+3098
+3099
+3100
+3101
+3102
+3103
+3104
+3105
+3106
+3107
+3108
+3109
+3110
+3111
+3112
+3113
+3114
+3115
+3116
+3117
+3118
+3119
+3120
+3121
+3122
+3123
+3124
+3125
+3126
+3127
+3128
+3129
+3130
+3131
+3132
+3133
+3134
+3135
+3136
+3137
+3138
+3139
+3140
+3141
+3142
+3143
+3144
+3145
+3146
+3147
+3148
+3149
+3150
+3151
+3152
+3153
+3154
+3155
+3156
+3157
+3158
+3159
+3160
+3161
+3162
+3163
+3164
+3165
+3166
+3167
+3168
+3169
+3170
+3171
+3172
+3173
+3174
+3175
+3176
+3177
+3178
+3179
+3180
+3181
+3182
+3183
+3184
+3185
+3186
+3187
+3188
+3189
+3190
+3191
+3192
+3193
+3194
+3195
+3196
+3197
+3198
+3199
+3200
+3201
+3202
+3203
+3204
+3205
+3206
+3207
+3208
+3209
+3210
+3211
+3212
+3213
+3214
+3215
+3216
+3217
+3218
+3219
+3220
+3221
+3222
+3223
+3224
+3225
+3226
+3227
+3228
+3229
+3230
+3231
+3232
+3233
+3234
+3235
+3236
+3237
+3238
+3239
+3240
+3241
+3242
+3243
+3244
+3245
+3246
+3247
+3248
+3249
+3250
+3251
+3252
+3253
+3254
+3255
+3256
+3257
+3258
+3259
+3260
+3261
+3262
+3263
+3264
+3265
+3266
+3267
+3268
+3269
+3270
+3271
+3272
+3273
+3274
+3275
+3276
+3277
+3278
+3279
+3280
+3281
+3282
+3283
+3284
+3285
+3286
+3287
+3288
+3289
+3290
+3291
+3292
+3293
+3294
+3295
+3296
+3297
+3298
+3299
+3300
+3301
+3302
+3303
+3304
+3305
+3306
+3307
+3308
+3309
+3310
+3311
+3312
+3313
+3314
+3315
+3316
+3317
+3318
+3319
+3320
+3321
+3322
+3323
+3324
+3325
+3326
+3327
+3328
+3329
+3330
+3331
+3332
+3333
+3334
+3335
+3336
+3337
+3338
+3339
+3340
+3341
+3342
+3343
+3344
+3345
+3346
+3347
+3348
+3349
+3350
+3351
+3352
+3353
+3354
+3355
+3356
+3357
+3358
+3359
+3360
+3361
+3362
+3363
+3364
+3365
+3366
+3367
+3368
+3369
+3370
+3371
+3372
+3373
+3374
+3375
+3376
+3377
+3378
+3379
+3380
+3381
+3382
+3383
+3384
+3385
+3386
+3387
+3388
+3389
+3390
+3391
+3392
+3393
+3394
+3395
+3396
+3397
+3398
+3399
+3400
+3401
+3402
+3403
+3404
+3405
+3406
+3407
+3408
+3409
+3410
+3411
+3412
+3413
+3414
+3415
+3416
+3417
+3418
+3419
+3420
+3421
+3422
+3423
+3424
+3425
+3426
+3427
+3428
+3429
+3430
+3431
+3432
+3433
+3434
+3435
+3436
+3437
+3438
+3439
+3440
+3441
+3442
+3443
+3444
+3445
+3446
+3447
+3448
+3449
+3450
+3451
+3452
+3453
+3454
+3455
+3456
+3457
+3458
+3459
+3460
+3461
+3462
+3463
+3464
+3465
+3466
+3467
+3468
+3469
+3470
+3471
+3472
+3473
+3474
+3475
+3476
+3477
+3478
+3479
+3480
+3481
+3482
+3483
+3484
+3485
+3486
+3487
+3488
+3489
+3490
+3491
+3492
+3493
+3494
+3495
+3496
+3497
+3498
+3499
+3500
+3501
+3502
+3503
+3504
+3505
+3506
+3507
+3508
+3509
+3510
+3511
+3512
+3513
+3514
+3515
+3516
+3517
+3518
+3519
+3520
+3521
+3522
+3523
+3524
+3525
+3526
+3527
+3528
+3529
+3530
+3531
+3532
+3533
+3534
+3535
+3536
+3537
+3538
+3539
+3540
+3541
+3542
+3543
+3544
+3545
+3546
+3547
+3548
+3549
+3550
+3551
+3552
+3553
+3554
+3555
+3556
+3557
+3558
+3559
+3560
+3561
+3562
+3563
+3564
+3565
+3566
+3567
+3568
+3569
+3570
+3571
+3572
+3573
+3574
+3575
+3576
+3577
+3578
+3579
+3580
+3581
+3582
+3583
+3584
+3585
+3586
+3587
+3588
+3589
+3590
+3591
+3592
+3593
+3594
+3595
+3596
+3597
+3598
+3599
+3600
+3601
+3602
+3603
+3604
+3605
+3606
+3607
+3608
+3609
+3610
+3611
+3612
+3613
+3614
+3615
+3616
+3617
+3618
+3619
+3620
+3621
+3622
+3623
+3624
+3625
+3626
+3627
+3628
+3629
+3630
+3631
+3632
+3633
+3634
+3635
+3636
+3637
+3638
+3639
+3640
+3641
+3642
+3643
+3644
+3645
+3646
+3647
+3648
+3649
+3650
+3651
+3652
+3653
+3654
+3655
+3656
+3657
+3658
+3659
+3660
+3661
+3662
+3663
+3664
+3665
+3666
+3667
+3668
+3669
+3670
+3671
+3672
+3673
+3674
+3675
+3676
+3677
+3678
+3679
+3680
+3681
+3682
+3683
+3684
+3685
+3686
+3687
+3688
+3689
+3690
+3691
+3692
+3693
+3694
+3695
+3696
+3697
+3698
+3699
+3700
+3701
+3702
+3703
+3704
+3705
+3706
+3707
+3708
+3709
+3710
+3711
+3712
+3713
+3714
+3715
+3716
+3717
+3718
+3719
+3720
+3721
+3722
+3723
+3724
+3725
+3726
+3727
+3728
+3729
+3730
+3731
+3732
+3733
+3734
+3735
+3736
+3737
+3738
+3739
+3740
+3741
+3742
+3743
+3744
+3745
+3746
+3747
+3748
+3749
+3750
+3751
+3752
+3753
+3754
+3755
+3756
+3757
+3758
+3759
+3760
+3761
+3762
+3763
+3764
+3765
+3766
+3767
+3768
+3769
+3770
+3771
+3772
+3773
+3774
+3775
+3776
+3777
+3778
+3779
+3780
+3781
+3782
+3783
+3784
+3785
+3786
+3787
+3788
+3789
+3790
+3791
+3792
+3793
+3794
+3795
+3796
+3797
+3798
+3799
+3800
+3801
+3802
+3803
+3804
+3805
+3806
+3807
+3808
+3809
+3810
+3811
+3812
+3813
+3814
+3815
+3816
+3817
+3818
+3819
+3820
+3821
+3822
+3823
+3824
+3825
+3826
+3827
+3828
+3829
+3830
+3831
+3832
+3833
+3834
+3835
+3836
+3837
+3838
+3839
+3840
+3841
+3842
+3843
+3844
+3845
+3846
+3847
+3848
+3849
+3850
+3851
+3852
+3853
+3854
+3855
+3856
+3857
+3858
+3859
+3860
+3861
+3862
+3863
+3864
+3865
+3866
+3867
+3868
+3869
+3870
+3871
+3872
+3873
+3874
+3875
+3876
+3877
+3878
+3879
+3880
+3881
+3882
+3883
+3884
+3885
+3886
+3887
+3888
+3889
+3890
+3891
+3892
+3893
+3894
+3895
+3896
+3897
+3898
+3899
+3900
+3901
+3902
+3903
+3904
+3905
+3906
+3907
+3908
+3909
+3910
+3911
+3912
+3913
+3914
+3915
+3916
+3917
+3918
+3919
+3920
+3921
+3922
+3923
+3924
+3925
+3926
+3927
+3928
+3929
+3930
+3931
+3932
+3933
+3934
+3935
+3936
+3937
+3938
+3939
+3940
+3941
+3942
+3943
+3944
+3945
+3946
+3947
+3948
+3949
+3950
+3951
+3952
+3953
+3954
+3955
+3956
+3957
+3958
+3959
+3960
+3961
+3962
+3963
+3964
+3965
+3966
+3967
+3968
+3969
+3970
+3971
+3972
+3973
+3974
+3975
+3976
+3977
+3978
+3979
+3980
+3981
+3982
+3983
+3984
+3985
+3986
+3987
+3988
+3989
+3990
+3991
+3992
+3993
+3994
+3995
+3996
+3997
+3998
+3999
+4000
+4001
+4002
+4003
+4004
+4005
+4006
+4007
+4008
+4009
+4010
+4011
+4012
+4013
+4014
+4015
+4016
+4017
+4018
+4019
+4020
+4021
+4022
+4023
+4024
+4025
+4026
+4027
+4028
+4029
+4030
+4031
+4032
+4033
+4034
+4035
+4036
+4037
+4038
+4039
+4040
+4041
+4042
+4043
+4044
+4045
+4046
+4047
+4048
+4049
+4050
+4051
+4052
+4053
+4054
+4055
+4056
+4057
+4058
+4059
+4060
+4061
+4062
+4063
+4064
+4065
+4066
+4067
+4068
+4069
+4070
+4071
+4072
+4073
+4074
+4075
+4076
+4077
+4078
+4079
+4080
+4081
+4082
+4083
+4084
+4085
+4086
+4087
+4088
+4089
+4090
+4091
+4092
+4093
+4094
+4095
+4096
+4097
+4098
+4099
+4100
+4101
+4102
+4103
+4104
+4105
+4106
+4107
+4108
+4109
+4110
+4111
+4112
+4113
+4114
+4115
+4116
+4117
+4118
+4119
+4120
+4121
+4122
+4123
+4124
+4125
+4126
+4127
+4128
+4129
+4130
+4131
+4132
+4133
+4134
+4135
+4136
+4137
+4138
+4139
+4140
+4141
+4142
+4143
+4144
+4145
+4146
+4147
+4148
+4149
+4150
+4151
+4152
+4153
+4154
+4155
+4156
+4157
+4158
+4159
+4160
+4161
+4162
+4163
+4164
+4165
+4166
+4167
+4168
+4169
+4170
+4171
+4172
+4173
+4174
+4175
+4176
+4177
+4178
+4179
+4180
+4181
+4182
+4183
+4184
+4185
+4186
+4187
+4188
+4189
+4190
+4191
+4192
+4193
+4194
+4195
+4196
+4197
+4198
+4199
+4200
+4201
+4202
+4203
+4204
+4205
+4206
+4207
+4208
+4209
+4210
+4211
+4212
+4213
+4214
+4215
+4216
+4217
+4218
+4219
+4220
+4221
+4222
+4223
+4224
+4225
+4226
+4227
+4228
+4229
+4230
+4231
+4232
+4233
+4234
+4235
+4236
+4237
+4238
+4239
+4240
+4241
+4242
+4243
+4244
+4245
+4246
+4247
+4248
+4249
+4250
+4251
+4252
+4253
+4254
+4255
+4256
+4257
+4258
+4259
+4260
+4261
+4262
+4263
+4264
+4265
+4266
+4267
+4268
+4269
+4270
+4271
+4272
+4273
+4274
+4275
+4276
+4277
+4278
+4279
+4280
+4281
+4282
+4283
+4284
+4285
+4286
+4287
+4288
+4289
+4290
+4291
+4292
+4293
+4294
+4295
+4296
+4297
+4298
+4299
+4300
+4301
+4302
+4303
+4304
+4305
+4306
+4307
+4308
+4309
+4310
+4311
+4312
+4313
+4314
+4315
+4316
+4317
+4318
+4319
+4320
+4321
+4322
+4323
+4324
+4325
+4326
+4327
+4328
+4329
+4330
+4331
+4332
+4333
+4334
+4335
+4336
+4337
+4338
+4339
+4340
+4341
+4342
+4343
+4344
+4345
+4346
+4347
+4348
+4349
+4350
+4351
+4352
+4353
+4354
+4355
+4356
+4357
+4358
+4359
+4360
+4361
+4362
+4363
+4364
+4365
+4366
+4367
+4368
+4369
+4370
+4371
+4372
+4373
+4374
+4375
+4376
+4377
+4378
+4379
+4380
+4381
+4382
+4383
+4384
+4385
+4386
+4387
+4388
+4389
+4390
+4391
+4392
+4393
+4394
+4395
+4396
+4397
+4398
+4399
+4400
+4401
+4402
+4403
+4404
+4405
+4406
+4407
+4408
+4409
+4410
+4411
+4412
+4413
+4414
+4415
+4416
+4417
+4418
+4419
+4420
+4421
+4422
+4423
+4424
+4425
+4426
+4427
+4428
+4429
+4430
+4431
+4432
+4433
+4434
+4435
+4436
+4437
+4438
+4439
+4440
+4441
+4442
+4443
+4444
+4445
+4446
+4447
+4448
+4449
+4450
+4451
+4452
+4453
+4454
+4455
+4456
+4457
+4458
+4459
+4460
+4461
+4462
+4463
+4464
+4465
+4466
+4467
+4468
+4469
+4470
+4471
+4472
+4473
+4474
+4475
+4476
+4477
+4478
+4479
+4480
+4481
+4482
+4483
+4484
+4485
+4486
+4487
+4488
+4489
+4490
+4491
+4492
+4493
+4494
+4495
+4496
+4497
+4498
+4499
+4500
+4501
+4502
+4503
+4504
+4505
+4506
+4507
+4508
+4509
+4510
+4511
+4512
+4513
+4514
+4515
+4516
+4517
+4518
+4519
+4520
+4521
+4522
+4523
+4524
+4525
+4526
+4527
+4528
+4529
+4530
+4531
+4532
+4533
+4534
+4535
+4536
+4537
+4538
+4539
+4540
+4541
+4542
+4543
+4544
+4545
+4546
+4547
+4548
+4549
+4550
+4551
+4552
+4553
+4554
+4555
+4556
+4557
+4558
+4559
+4560
+4561
+4562
+4563
+4564
+4565
+4566
+4567
+4568
+4569
+4570
+4571
+4572
+4573
+4574
+4575
+4576
+4577
+4578
+4579
+4580
+4581
+4582
+4583
+4584
+4585
+4586
+4587
+4588
+4589
+4590
+4591
+4592
+4593
+4594
+4595
+4596
+4597
+4598
+4599
+4600
+4601
+4602
+4603
+4604
+4605
+4606
+4607
+4608
+4609
+4610
+4611
+4612
+4613
+4614
+4615
+4616
+4617
+4618
+4619
+4620
+4621
+4622
+4623
+4624
+4625
+4626
+4627
+4628
+4629
+4630
+4631
+4632
+4633
+4634
+4635
+4636
+4637
+4638
+4639
+4640
+4641
+4642
+4643
+4644
+4645
+4646
+4647
+4648
+4649
+4650
+4651
+4652
+4653
+4654
+4655
+4656
+4657
+4658
+4659
+4660
+4661
+4662
+4663
+4664
+4665
+4666
+4667
+4668
+4669
+4670
+4671
+4672
+4673
+4674
+4675
+4676
+4677
+4678
+4679
+4680
+4681
+4682
+4683
+4684
+4685
+4686
+4687
+4688
+4689
+4690
+4691
+4692
+4693
+4694
+4695
+4696
+4697
+4698
+4699
+4700
+4701
+4702
+4703
+4704
+4705
+4706
+4707
+4708
+4709
+4710
+4711
+4712
+4713
+4714
+4715
+4716
+4717
+4718
+4719
+4720
+4721
+4722
+4723
+4724
+4725
+4726
+4727
+4728
+4729
+4730
+4731
+4732
+4733
+4734
+4735
+4736
+4737
+4738
+4739
+4740
+4741
+4742
+4743
+4744
+4745
+4746
+4747
+4748
+4749
+4750
+4751
+4752
+4753
+4754
+4755
+4756
+4757
+4758
+4759
+4760
+4761
+4762
+4763
+4764
+4765
+4766
+4767
+4768
+4769
+4770
+4771
+4772
+4773
+4774
+4775
+4776
+4777
+4778
+4779
+4780
+4781
+4782
+4783
+4784
+4785
+4786
+4787
+4788
+4789
+4790
+4791
+4792
+4793
+4794
+4795
+4796
+4797
+4798
+4799
+4800
+4801
+4802
+4803
+4804
+4805
+4806
+4807
+4808
+4809
+4810
+4811
+4812
+4813
+4814
+4815
+4816
+4817
+
use crate::alloc::alloc::{handle_alloc_error, Layout};
+use crate::scopeguard::{guard, ScopeGuard};
+use crate::TryReserveError;
+use core::iter::FusedIterator;
+use core::marker::PhantomData;
+use core::mem;
+use core::mem::MaybeUninit;
+use core::ptr::NonNull;
+use core::{hint, ptr};
+
+cfg_if! {
+    // Use the SSE2 implementation if possible: it allows us to scan 16 buckets
+    // at once instead of 8. We don't bother with AVX since it would require
+    // runtime dispatch and wouldn't gain us much anyways: the probability of
+    // finding a match drops off drastically after the first few buckets.
+    //
+    // I attempted an implementation on ARM using NEON instructions, but it
+    // turns out that most NEON instructions have multi-cycle latency, which in
+    // the end outweighs any gains over the generic implementation.
+    if #[cfg(all(
+        target_feature = "sse2",
+        any(target_arch = "x86", target_arch = "x86_64"),
+        not(miri),
+    ))] {
+        mod sse2;
+        use sse2 as imp;
+    } else if #[cfg(all(
+        target_arch = "aarch64",
+        target_feature = "neon",
+        // NEON intrinsics are currently broken on big-endian targets.
+        // See https://github.com/rust-lang/stdarch/issues/1484.
+        target_endian = "little",
+        not(miri),
+    ))] {
+        mod neon;
+        use neon as imp;
+    } else {
+        mod generic;
+        use generic as imp;
+    }
+}
+
+mod alloc;
+pub(crate) use self::alloc::{do_alloc, Allocator, Global};
+
+mod bitmask;
+
+use self::bitmask::BitMaskIter;
+use self::imp::Group;
+
+// Branch prediction hint. This is currently only available on nightly but it
+// consistently improves performance by 10-15%.
+#[cfg(not(feature = "nightly"))]
+use core::convert::identity as likely;
+#[cfg(not(feature = "nightly"))]
+use core::convert::identity as unlikely;
+#[cfg(feature = "nightly")]
+use core::intrinsics::{likely, unlikely};
+
+// FIXME: use strict provenance functions once they are stable.
+// Implement it with a transmute for now.
+#[inline(always)]
+#[allow(clippy::useless_transmute)] // clippy is wrong, cast and transmute are different here
+fn invalid_mut<T>(addr: usize) -> *mut T {
+    unsafe { core::mem::transmute(addr) }
+}
+
+#[inline]
+unsafe fn offset_from<T>(to: *const T, from: *const T) -> usize {
+    to.offset_from(from) as usize
+}
+
+/// Whether memory allocation errors should return an error or abort.
+#[derive(Copy, Clone)]
+enum Fallibility {
+    Fallible,
+    Infallible,
+}
+
+impl Fallibility {
+    /// Error to return on capacity overflow.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn capacity_overflow(self) -> TryReserveError {
+        match self {
+            Fallibility::Fallible => TryReserveError::CapacityOverflow,
+            Fallibility::Infallible => panic!("Hash table capacity overflow"),
+        }
+    }
+
+    /// Error to return on allocation error.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn alloc_err(self, layout: Layout) -> TryReserveError {
+        match self {
+            Fallibility::Fallible => TryReserveError::AllocError { layout },
+            Fallibility::Infallible => handle_alloc_error(layout),
+        }
+    }
+}
+
+trait SizedTypeProperties: Sized {
+    const IS_ZERO_SIZED: bool = mem::size_of::<Self>() == 0;
+    const NEEDS_DROP: bool = mem::needs_drop::<Self>();
+}
+
+impl<T> SizedTypeProperties for T {}
+
+/// Control byte value for an empty bucket.
+const EMPTY: u8 = 0b1111_1111;
+
+/// Control byte value for a deleted bucket.
+const DELETED: u8 = 0b1000_0000;
+
+/// Checks whether a control byte represents a full bucket (top bit is clear).
+#[inline]
+fn is_full(ctrl: u8) -> bool {
+    ctrl & 0x80 == 0
+}
+
+/// Checks whether a control byte represents a special value (top bit is set).
+#[inline]
+fn is_special(ctrl: u8) -> bool {
+    ctrl & 0x80 != 0
+}
+
+/// Checks whether a special control value is EMPTY (just check 1 bit).
+#[inline]
+fn special_is_empty(ctrl: u8) -> bool {
+    debug_assert!(is_special(ctrl));
+    ctrl & 0x01 != 0
+}
+
+/// Primary hash function, used to select the initial bucket to probe from.
+#[inline]
+#[allow(clippy::cast_possible_truncation)]
+fn h1(hash: u64) -> usize {
+    // On 32-bit platforms we simply ignore the higher hash bits.
+    hash as usize
+}
+
+// Constant for h2 function that grabing the top 7 bits of the hash.
+const MIN_HASH_LEN: usize = if mem::size_of::<usize>() < mem::size_of::<u64>() {
+    mem::size_of::<usize>()
+} else {
+    mem::size_of::<u64>()
+};
+
+/// Secondary hash function, saved in the low 7 bits of the control byte.
+#[inline]
+#[allow(clippy::cast_possible_truncation)]
+fn h2(hash: u64) -> u8 {
+    // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit
+    // value, some hash functions (such as FxHash) produce a usize result
+    // instead, which means that the top 32 bits are 0 on 32-bit platforms.
+    // So we use MIN_HASH_LEN constant to handle this.
+    let top7 = hash >> (MIN_HASH_LEN * 8 - 7);
+    (top7 & 0x7f) as u8 // truncation
+}
+
+/// Probe sequence based on triangular numbers, which is guaranteed (since our
+/// table size is a power of two) to visit every group of elements exactly once.
+///
+/// A triangular probe has us jump by 1 more group every time. So first we
+/// jump by 1 group (meaning we just continue our linear scan), then 2 groups
+/// (skipping over 1 group), then 3 groups (skipping over 2 groups), and so on.
+///
+/// Proof that the probe will visit every group in the table:
+/// <https://fgiesen.wordpress.com/2015/02/22/triangular-numbers-mod-2n/>
+struct ProbeSeq {
+    pos: usize,
+    stride: usize,
+}
+
+impl ProbeSeq {
+    #[inline]
+    fn move_next(&mut self, bucket_mask: usize) {
+        // We should have found an empty bucket by now and ended the probe.
+        debug_assert!(
+            self.stride <= bucket_mask,
+            "Went past end of probe sequence"
+        );
+
+        self.stride += Group::WIDTH;
+        self.pos += self.stride;
+        self.pos &= bucket_mask;
+    }
+}
+
+/// Returns the number of buckets needed to hold the given number of items,
+/// taking the maximum load factor into account.
+///
+/// Returns `None` if an overflow occurs.
+// Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258
+#[cfg_attr(target_os = "emscripten", inline(never))]
+#[cfg_attr(not(target_os = "emscripten"), inline)]
+fn capacity_to_buckets(cap: usize) -> Option<usize> {
+    debug_assert_ne!(cap, 0);
+
+    // For small tables we require at least 1 empty bucket so that lookups are
+    // guaranteed to terminate if an element doesn't exist in the table.
+    if cap < 8 {
+        // We don't bother with a table size of 2 buckets since that can only
+        // hold a single element. Instead we skip directly to a 4 bucket table
+        // which can hold 3 elements.
+        return Some(if cap < 4 { 4 } else { 8 });
+    }
+
+    // Otherwise require 1/8 buckets to be empty (87.5% load)
+    //
+    // Be careful when modifying this, calculate_layout relies on the
+    // overflow check here.
+    let adjusted_cap = cap.checked_mul(8)? / 7;
+
+    // Any overflows will have been caught by the checked_mul. Also, any
+    // rounding errors from the division above will be cleaned up by
+    // next_power_of_two (which can't overflow because of the previous division).
+    Some(adjusted_cap.next_power_of_two())
+}
+
+/// Returns the maximum effective capacity for the given bucket mask, taking
+/// the maximum load factor into account.
+#[inline]
+fn bucket_mask_to_capacity(bucket_mask: usize) -> usize {
+    if bucket_mask < 8 {
+        // For tables with 1/2/4/8 buckets, we always reserve one empty slot.
+        // Keep in mind that the bucket mask is one less than the bucket count.
+        bucket_mask
+    } else {
+        // For larger tables we reserve 12.5% of the slots as empty.
+        ((bucket_mask + 1) / 8) * 7
+    }
+}
+
+/// Helper which allows the max calculation for ctrl_align to be statically computed for each T
+/// while keeping the rest of `calculate_layout_for` independent of `T`
+#[derive(Copy, Clone)]
+struct TableLayout {
+    size: usize,
+    ctrl_align: usize,
+}
+
+impl TableLayout {
+    #[inline]
+    const fn new<T>() -> Self {
+        let layout = Layout::new::<T>();
+        Self {
+            size: layout.size(),
+            ctrl_align: if layout.align() > Group::WIDTH {
+                layout.align()
+            } else {
+                Group::WIDTH
+            },
+        }
+    }
+
+    #[inline]
+    fn calculate_layout_for(self, buckets: usize) -> Option<(Layout, usize)> {
+        debug_assert!(buckets.is_power_of_two());
+
+        let TableLayout { size, ctrl_align } = self;
+        // Manual layout calculation since Layout methods are not yet stable.
+        let ctrl_offset =
+            size.checked_mul(buckets)?.checked_add(ctrl_align - 1)? & !(ctrl_align - 1);
+        let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?;
+
+        // We need an additional check to ensure that the allocation doesn't
+        // exceed `isize::MAX` (https://github.com/rust-lang/rust/pull/95295).
+        if len > isize::MAX as usize - (ctrl_align - 1) {
+            return None;
+        }
+
+        Some((
+            unsafe { Layout::from_size_align_unchecked(len, ctrl_align) },
+            ctrl_offset,
+        ))
+    }
+}
+
+/// A reference to an empty bucket into which an can be inserted.
+pub struct InsertSlot {
+    index: usize,
+}
+
+/// A reference to a hash table bucket containing a `T`.
+///
+/// This is usually just a pointer to the element itself. However if the element
+/// is a ZST, then we instead track the index of the element in the table so
+/// that `erase` works properly.
+pub struct Bucket<T> {
+    // Actually it is pointer to next element than element itself
+    // this is needed to maintain pointer arithmetic invariants
+    // keeping direct pointer to element introduces difficulty.
+    // Using `NonNull` for variance and niche layout
+    ptr: NonNull<T>,
+}
+
+// This Send impl is needed for rayon support. This is safe since Bucket is
+// never exposed in a public API.
+unsafe impl<T> Send for Bucket<T> {}
+
+impl<T> Clone for Bucket<T> {
+    #[inline]
+    fn clone(&self) -> Self {
+        Self { ptr: self.ptr }
+    }
+}
+
+impl<T> Bucket<T> {
+    /// Creates a [`Bucket`] that contain pointer to the data.
+    /// The pointer calculation is performed by calculating the
+    /// offset from given `base` pointer (convenience for
+    /// `base.as_ptr().sub(index)`).
+    ///
+    /// `index` is in units of `T`; e.g., an `index` of 3 represents a pointer
+    /// offset of `3 * size_of::<T>()` bytes.
+    ///
+    /// If the `T` is a ZST, then we instead track the index of the element
+    /// in the table so that `erase` works properly (return
+    /// `NonNull::new_unchecked((index + 1) as *mut T)`)
+    ///
+    /// # Safety
+    ///
+    /// If `mem::size_of::<T>() != 0`, then the safety rules are directly derived
+    /// from the safety rules for [`<*mut T>::sub`] method of `*mut T` and the safety
+    /// rules of [`NonNull::new_unchecked`] function.
+    ///
+    /// Thus, in order to uphold the safety contracts for the [`<*mut T>::sub`] method
+    /// and [`NonNull::new_unchecked`] function, as well as for the correct
+    /// logic of the work of this crate, the following rules are necessary and
+    /// sufficient:
+    ///
+    /// * the `base` pointer must not be `dangling` and must points to the
+    ///   end of the first `value element` from the `data part` of the table, i.e.
+    ///   must be the pointer that returned by [`RawTable::data_end`] or by
+    ///   [`RawTableInner::data_end<T>`];
+    ///
+    /// * `index` must not be greater than `RawTableInner.bucket_mask`, i.e.
+    ///   `index <= RawTableInner.bucket_mask` or, in other words, `(index + 1)`
+    ///   must be no greater than the number returned by the function
+    ///   [`RawTable::buckets`] or [`RawTableInner::buckets`].
+    ///
+    /// If `mem::size_of::<T>() == 0`, then the only requirement is that the
+    /// `index` must not be greater than `RawTableInner.bucket_mask`, i.e.
+    /// `index <= RawTableInner.bucket_mask` or, in other words, `(index + 1)`
+    /// must be no greater than the number returned by the function
+    /// [`RawTable::buckets`] or [`RawTableInner::buckets`].
+    ///
+    /// [`Bucket`]: crate::raw::Bucket
+    /// [`<*mut T>::sub`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.sub-1
+    /// [`NonNull::new_unchecked`]: https://doc.rust-lang.org/stable/std/ptr/struct.NonNull.html#method.new_unchecked
+    /// [`RawTable::data_end`]: crate::raw::RawTable::data_end
+    /// [`RawTableInner::data_end<T>`]: RawTableInner::data_end<T>
+    /// [`RawTable::buckets`]: crate::raw::RawTable::buckets
+    /// [`RawTableInner::buckets`]: RawTableInner::buckets
+    #[inline]
+    unsafe fn from_base_index(base: NonNull<T>, index: usize) -> Self {
+        // If mem::size_of::<T>() != 0 then return a pointer to an `element` in
+        // the data part of the table (we start counting from "0", so that
+        // in the expression T[last], the "last" index actually one less than the
+        // "buckets" number in the table, i.e. "last = RawTableInner.bucket_mask"):
+        //
+        //                   `from_base_index(base, 1).as_ptr()` returns a pointer that
+        //                   points here in the data part of the table
+        //                   (to the start of T1)
+        //                        |
+        //                        |        `base: NonNull<T>` must point here
+        //                        |         (to the end of T0 or to the start of C0)
+        //                        v         v
+        // [Padding], Tlast, ..., |T1|, T0, |C0, C1, ..., Clast
+        //                           ^
+        //                           `from_base_index(base, 1)` returns a pointer
+        //                           that points here in the data part of the table
+        //                           (to the end of T1)
+        //
+        // where: T0...Tlast - our stored data; C0...Clast - control bytes
+        // or metadata for data.
+        let ptr = if T::IS_ZERO_SIZED {
+            // won't overflow because index must be less than length (bucket_mask)
+            // and bucket_mask is guaranteed to be less than `isize::MAX`
+            // (see TableLayout::calculate_layout_for method)
+            invalid_mut(index + 1)
+        } else {
+            base.as_ptr().sub(index)
+        };
+        Self {
+            ptr: NonNull::new_unchecked(ptr),
+        }
+    }
+
+    /// Calculates the index of a [`Bucket`] as distance between two pointers
+    /// (convenience for `base.as_ptr().offset_from(self.ptr.as_ptr()) as usize`).
+    /// The returned value is in units of T: the distance in bytes divided by
+    /// [`core::mem::size_of::<T>()`].
+    ///
+    /// If the `T` is a ZST, then we return the index of the element in
+    /// the table so that `erase` works properly (return `self.ptr.as_ptr() as usize - 1`).
+    ///
+    /// This function is the inverse of [`from_base_index`].
+    ///
+    /// # Safety
+    ///
+    /// If `mem::size_of::<T>() != 0`, then the safety rules are directly derived
+    /// from the safety rules for [`<*const T>::offset_from`] method of `*const T`.
+    ///
+    /// Thus, in order to uphold the safety contracts for [`<*const T>::offset_from`]
+    /// method, as well as for the correct logic of the work of this crate, the
+    /// following rules are necessary and sufficient:
+    ///
+    /// * `base` contained pointer must not be `dangling` and must point to the
+    ///   end of the first `element` from the `data part` of the table, i.e.
+    ///   must be a pointer that returns by [`RawTable::data_end`] or by
+    ///   [`RawTableInner::data_end<T>`];
+    ///
+    /// * `self` also must not contain dangling pointer;
+    ///
+    /// * both `self` and `base` must be created from the same [`RawTable`]
+    ///   (or [`RawTableInner`]).
+    ///
+    /// If `mem::size_of::<T>() == 0`, this function is always safe.
+    ///
+    /// [`Bucket`]: crate::raw::Bucket
+    /// [`from_base_index`]: crate::raw::Bucket::from_base_index
+    /// [`RawTable::data_end`]: crate::raw::RawTable::data_end
+    /// [`RawTableInner::data_end<T>`]: RawTableInner::data_end<T>
+    /// [`RawTable`]: crate::raw::RawTable
+    /// [`RawTableInner`]: RawTableInner
+    /// [`<*const T>::offset_from`]: https://doc.rust-lang.org/nightly/core/primitive.pointer.html#method.offset_from
+    #[inline]
+    unsafe fn to_base_index(&self, base: NonNull<T>) -> usize {
+        // If mem::size_of::<T>() != 0 then return an index under which we used to store the
+        // `element` in the data part of the table (we start counting from "0", so
+        // that in the expression T[last], the "last" index actually is one less than the
+        // "buckets" number in the table, i.e. "last = RawTableInner.bucket_mask").
+        // For example for 5th element in table calculation is performed like this:
+        //
+        //                        mem::size_of::<T>()
+        //                          |
+        //                          |         `self = from_base_index(base, 5)` that returns pointer
+        //                          |         that points here in tha data part of the table
+        //                          |         (to the end of T5)
+        //                          |           |                    `base: NonNull<T>` must point here
+        //                          v           |                    (to the end of T0 or to the start of C0)
+        //                        /???\         v                      v
+        // [Padding], Tlast, ..., |T10|, ..., T5|, T4, T3, T2, T1, T0, |C0, C1, C2, C3, C4, C5, ..., C10, ..., Clast
+        //                                      \__________  __________/
+        //                                                 \/
+        //                                     `bucket.to_base_index(base)` = 5
+        //                                     (base.as_ptr() as usize - self.ptr.as_ptr() as usize) / mem::size_of::<T>()
+        //
+        // where: T0...Tlast - our stored data; C0...Clast - control bytes or metadata for data.
+        if T::IS_ZERO_SIZED {
+            // this can not be UB
+            self.ptr.as_ptr() as usize - 1
+        } else {
+            offset_from(base.as_ptr(), self.ptr.as_ptr())
+        }
+    }
+
+    /// Acquires the underlying raw pointer `*mut T` to `data`.
+    ///
+    /// # Note
+    ///
+    /// If `T` is not [`Copy`], do not use `*mut T` methods that can cause calling the
+    /// destructor of `T` (for example the [`<*mut T>::drop_in_place`] method), because
+    /// for properly dropping the data we also need to clear `data` control bytes. If we
+    /// drop data, but do not clear `data control byte` it leads to double drop when
+    /// [`RawTable`] goes out of scope.
+    ///
+    /// If you modify an already initialized `value`, so [`Hash`] and [`Eq`] on the new
+    /// `T` value and its borrowed form *must* match those for the old `T` value, as the map
+    /// will not re-evaluate where the new value should go, meaning the value may become
+    /// "lost" if their location does not reflect their state.
+    ///
+    /// [`RawTable`]: crate::raw::RawTable
+    /// [`<*mut T>::drop_in_place`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.drop_in_place
+    /// [`Hash`]: https://doc.rust-lang.org/core/hash/trait.Hash.html
+    /// [`Eq`]: https://doc.rust-lang.org/core/cmp/trait.Eq.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "raw")]
+    /// # fn test() {
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::raw::{Bucket, RawTable};
+    ///
+    /// type NewHashBuilder = core::hash::BuildHasherDefault<ahash::AHasher>;
+    ///
+    /// fn make_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// let hash_builder = NewHashBuilder::default();
+    /// let mut table = RawTable::new();
+    ///
+    /// let value = ("a", 100);
+    /// let hash = make_hash(&hash_builder, &value.0);
+    ///
+    /// table.insert(hash, value.clone(), |val| make_hash(&hash_builder, &val.0));
+    ///
+    /// let bucket: Bucket<(&str, i32)> = table.find(hash, |(k1, _)| k1 == &value.0).unwrap();
+    ///
+    /// assert_eq!(unsafe { &*bucket.as_ptr() }, &("a", 100));
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "raw")]
+    /// #     test()
+    /// # }
+    /// ```
+    #[inline]
+    pub fn as_ptr(&self) -> *mut T {
+        if T::IS_ZERO_SIZED {
+            // Just return an arbitrary ZST pointer which is properly aligned
+            // invalid pointer is good enough for ZST
+            invalid_mut(mem::align_of::<T>())
+        } else {
+            unsafe { self.ptr.as_ptr().sub(1) }
+        }
+    }
+
+    /// Create a new [`Bucket`] that is offset from the `self` by the given
+    /// `offset`. The pointer calculation is performed by calculating the
+    /// offset from `self` pointer (convenience for `self.ptr.as_ptr().sub(offset)`).
+    /// This function is used for iterators.
+    ///
+    /// `offset` is in units of `T`; e.g., a `offset` of 3 represents a pointer
+    /// offset of `3 * size_of::<T>()` bytes.
+    ///
+    /// # Safety
+    ///
+    /// If `mem::size_of::<T>() != 0`, then the safety rules are directly derived
+    /// from the safety rules for [`<*mut T>::sub`] method of `*mut T` and safety
+    /// rules of [`NonNull::new_unchecked`] function.
+    ///
+    /// Thus, in order to uphold the safety contracts for [`<*mut T>::sub`] method
+    /// and [`NonNull::new_unchecked`] function, as well as for the correct
+    /// logic of the work of this crate, the following rules are necessary and
+    /// sufficient:
+    ///
+    /// * `self` contained pointer must not be `dangling`;
+    ///
+    /// * `self.to_base_index() + ofset` must not be greater than `RawTableInner.bucket_mask`,
+    ///   i.e. `(self.to_base_index() + ofset) <= RawTableInner.bucket_mask` or, in other
+    ///   words, `self.to_base_index() + ofset + 1` must be no greater than the number returned
+    ///   by the function [`RawTable::buckets`] or [`RawTableInner::buckets`].
+    ///
+    /// If `mem::size_of::<T>() == 0`, then the only requirement is that the
+    /// `self.to_base_index() + ofset` must not be greater than `RawTableInner.bucket_mask`,
+    /// i.e. `(self.to_base_index() + ofset) <= RawTableInner.bucket_mask` or, in other words,
+    /// `self.to_base_index() + ofset + 1` must be no greater than the number returned by the
+    /// function [`RawTable::buckets`] or [`RawTableInner::buckets`].
+    ///
+    /// [`Bucket`]: crate::raw::Bucket
+    /// [`<*mut T>::sub`]: https://doc.rust-lang.org/core/primitive.pointer.html#method.sub-1
+    /// [`NonNull::new_unchecked`]: https://doc.rust-lang.org/stable/std/ptr/struct.NonNull.html#method.new_unchecked
+    /// [`RawTable::buckets`]: crate::raw::RawTable::buckets
+    /// [`RawTableInner::buckets`]: RawTableInner::buckets
+    #[inline]
+    unsafe fn next_n(&self, offset: usize) -> Self {
+        let ptr = if T::IS_ZERO_SIZED {
+            // invalid pointer is good enough for ZST
+            invalid_mut(self.ptr.as_ptr() as usize + offset)
+        } else {
+            self.ptr.as_ptr().sub(offset)
+        };
+        Self {
+            ptr: NonNull::new_unchecked(ptr),
+        }
+    }
+
+    /// Executes the destructor (if any) of the pointed-to `data`.
+    ///
+    /// # Safety
+    ///
+    /// See [`ptr::drop_in_place`] for safety concerns.
+    ///
+    /// You should use [`RawTable::erase`] instead of this function,
+    /// or be careful with calling this function directly, because for
+    /// properly dropping the data we need also clear `data` control bytes.
+    /// If we drop data, but do not erase `data control byte` it leads to
+    /// double drop when [`RawTable`] goes out of scope.
+    ///
+    /// [`ptr::drop_in_place`]: https://doc.rust-lang.org/core/ptr/fn.drop_in_place.html
+    /// [`RawTable`]: crate::raw::RawTable
+    /// [`RawTable::erase`]: crate::raw::RawTable::erase
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(crate) unsafe fn drop(&self) {
+        self.as_ptr().drop_in_place();
+    }
+
+    /// Reads the `value` from `self` without moving it. This leaves the
+    /// memory in `self` unchanged.
+    ///
+    /// # Safety
+    ///
+    /// See [`ptr::read`] for safety concerns.
+    ///
+    /// You should use [`RawTable::remove`] instead of this function,
+    /// or be careful with calling this function directly, because compiler
+    /// calls its destructor when readed `value` goes out of scope. It
+    /// can cause double dropping when [`RawTable`] goes out of scope,
+    /// because of not erased `data control byte`.
+    ///
+    /// [`ptr::read`]: https://doc.rust-lang.org/core/ptr/fn.read.html
+    /// [`RawTable`]: crate::raw::RawTable
+    /// [`RawTable::remove`]: crate::raw::RawTable::remove
+    #[inline]
+    pub(crate) unsafe fn read(&self) -> T {
+        self.as_ptr().read()
+    }
+
+    /// Overwrites a memory location with the given `value` without reading
+    /// or dropping the old value (like [`ptr::write`] function).
+    ///
+    /// # Safety
+    ///
+    /// See [`ptr::write`] for safety concerns.
+    ///
+    /// # Note
+    ///
+    /// [`Hash`] and [`Eq`] on the new `T` value and its borrowed form *must* match
+    /// those for the old `T` value, as the map will not re-evaluate where the new
+    /// value should go, meaning the value may become "lost" if their location
+    /// does not reflect their state.
+    ///
+    /// [`ptr::write`]: https://doc.rust-lang.org/core/ptr/fn.write.html
+    /// [`Hash`]: https://doc.rust-lang.org/core/hash/trait.Hash.html
+    /// [`Eq`]: https://doc.rust-lang.org/core/cmp/trait.Eq.html
+    #[inline]
+    pub(crate) unsafe fn write(&self, val: T) {
+        self.as_ptr().write(val);
+    }
+
+    /// Returns a shared immutable reference to the `value`.
+    ///
+    /// # Safety
+    ///
+    /// See [`NonNull::as_ref`] for safety concerns.
+    ///
+    /// [`NonNull::as_ref`]: https://doc.rust-lang.org/core/ptr/struct.NonNull.html#method.as_ref
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "raw")]
+    /// # fn test() {
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::raw::{Bucket, RawTable};
+    ///
+    /// type NewHashBuilder = core::hash::BuildHasherDefault<ahash::AHasher>;
+    ///
+    /// fn make_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// let hash_builder = NewHashBuilder::default();
+    /// let mut table = RawTable::new();
+    ///
+    /// let value: (&str, String) = ("A pony", "is a small horse".to_owned());
+    /// let hash = make_hash(&hash_builder, &value.0);
+    ///
+    /// table.insert(hash, value.clone(), |val| make_hash(&hash_builder, &val.0));
+    ///
+    /// let bucket: Bucket<(&str, String)> = table.find(hash, |(k, _)| k == &value.0).unwrap();
+    ///
+    /// assert_eq!(
+    ///     unsafe { bucket.as_ref() },
+    ///     &("A pony", "is a small horse".to_owned())
+    /// );
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "raw")]
+    /// #     test()
+    /// # }
+    /// ```
+    #[inline]
+    pub unsafe fn as_ref<'a>(&self) -> &'a T {
+        &*self.as_ptr()
+    }
+
+    /// Returns a unique mutable reference to the `value`.
+    ///
+    /// # Safety
+    ///
+    /// See [`NonNull::as_mut`] for safety concerns.
+    ///
+    /// # Note
+    ///
+    /// [`Hash`] and [`Eq`] on the new `T` value and its borrowed form *must* match
+    /// those for the old `T` value, as the map will not re-evaluate where the new
+    /// value should go, meaning the value may become "lost" if their location
+    /// does not reflect their state.
+    ///
+    /// [`NonNull::as_mut`]: https://doc.rust-lang.org/core/ptr/struct.NonNull.html#method.as_mut
+    /// [`Hash`]: https://doc.rust-lang.org/core/hash/trait.Hash.html
+    /// [`Eq`]: https://doc.rust-lang.org/core/cmp/trait.Eq.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "raw")]
+    /// # fn test() {
+    /// use core::hash::{BuildHasher, Hash};
+    /// use hashbrown::raw::{Bucket, RawTable};
+    ///
+    /// type NewHashBuilder = core::hash::BuildHasherDefault<ahash::AHasher>;
+    ///
+    /// fn make_hash<K: Hash + ?Sized, S: BuildHasher>(hash_builder: &S, key: &K) -> u64 {
+    ///     use core::hash::Hasher;
+    ///     let mut state = hash_builder.build_hasher();
+    ///     key.hash(&mut state);
+    ///     state.finish()
+    /// }
+    ///
+    /// let hash_builder = NewHashBuilder::default();
+    /// let mut table = RawTable::new();
+    ///
+    /// let value: (&str, String) = ("A pony", "is a small horse".to_owned());
+    /// let hash = make_hash(&hash_builder, &value.0);
+    ///
+    /// table.insert(hash, value.clone(), |val| make_hash(&hash_builder, &val.0));
+    ///
+    /// let bucket: Bucket<(&str, String)> = table.find(hash, |(k, _)| k == &value.0).unwrap();
+    ///
+    /// unsafe {
+    ///     bucket
+    ///         .as_mut()
+    ///         .1
+    ///         .push_str(" less than 147 cm at the withers")
+    /// };
+    /// assert_eq!(
+    ///     unsafe { bucket.as_ref() },
+    ///     &(
+    ///         "A pony",
+    ///         "is a small horse less than 147 cm at the withers".to_owned()
+    ///     )
+    /// );
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "raw")]
+    /// #     test()
+    /// # }
+    /// ```
+    #[inline]
+    pub unsafe fn as_mut<'a>(&self) -> &'a mut T {
+        &mut *self.as_ptr()
+    }
+
+    /// Copies `size_of<T>` bytes from `other` to `self`. The source
+    /// and destination may *not* overlap.
+    ///
+    /// # Safety
+    ///
+    /// See [`ptr::copy_nonoverlapping`] for safety concerns.
+    ///
+    /// Like [`read`], `copy_nonoverlapping` creates a bitwise copy of `T`, regardless of
+    /// whether `T` is [`Copy`]. If `T` is not [`Copy`], using *both* the values
+    /// in the region beginning at `*self` and the region beginning at `*other` can
+    /// [violate memory safety].
+    ///
+    /// # Note
+    ///
+    /// [`Hash`] and [`Eq`] on the new `T` value and its borrowed form *must* match
+    /// those for the old `T` value, as the map will not re-evaluate where the new
+    /// value should go, meaning the value may become "lost" if their location
+    /// does not reflect their state.
+    ///
+    /// [`ptr::copy_nonoverlapping`]: https://doc.rust-lang.org/core/ptr/fn.copy_nonoverlapping.html
+    /// [`read`]: https://doc.rust-lang.org/core/ptr/fn.read.html
+    /// [violate memory safety]: https://doc.rust-lang.org/std/ptr/fn.read.html#ownership-of-the-returned-value
+    /// [`Hash`]: https://doc.rust-lang.org/core/hash/trait.Hash.html
+    /// [`Eq`]: https://doc.rust-lang.org/core/cmp/trait.Eq.html
+    #[cfg(feature = "raw")]
+    #[inline]
+    pub unsafe fn copy_from_nonoverlapping(&self, other: &Self) {
+        self.as_ptr().copy_from_nonoverlapping(other.as_ptr(), 1);
+    }
+}
+
+/// A raw hash table with an unsafe API.
+pub struct RawTable<T, A: Allocator = Global> {
+    table: RawTableInner,
+    alloc: A,
+    // Tell dropck that we own instances of T.
+    marker: PhantomData<T>,
+}
+
+/// Non-generic part of `RawTable` which allows functions to be instantiated only once regardless
+/// of how many different key-value types are used.
+struct RawTableInner {
+    // Mask to get an index from a hash value. The value is one less than the
+    // number of buckets in the table.
+    bucket_mask: usize,
+
+    // [Padding], T1, T2, ..., Tlast, C1, C2, ...
+    //                                ^ points here
+    ctrl: NonNull<u8>,
+
+    // Number of elements that can be inserted before we need to grow the table
+    growth_left: usize,
+
+    // Number of elements in the table, only really used by len()
+    items: usize,
+}
+
+impl<T> RawTable<T, Global> {
+    /// Creates a new empty hash table without allocating any memory.
+    ///
+    /// In effect this returns a table with exactly 1 bucket. However we can
+    /// leave the data pointer dangling since that bucket is never written to
+    /// due to our load factor forcing us to always have at least 1 free bucket.
+    #[inline]
+    pub const fn new() -> Self {
+        Self {
+            table: RawTableInner::NEW,
+            alloc: Global,
+            marker: PhantomData,
+        }
+    }
+
+    /// Attempts to allocate a new hash table with at least enough capacity
+    /// for inserting the given number of elements without reallocating.
+    #[cfg(feature = "raw")]
+    pub fn try_with_capacity(capacity: usize) -> Result<Self, TryReserveError> {
+        Self::try_with_capacity_in(capacity, Global)
+    }
+
+    /// Allocates a new hash table with at least enough capacity for inserting
+    /// the given number of elements without reallocating.
+    pub fn with_capacity(capacity: usize) -> Self {
+        Self::with_capacity_in(capacity, Global)
+    }
+}
+
+impl<T, A: Allocator> RawTable<T, A> {
+    const TABLE_LAYOUT: TableLayout = TableLayout::new::<T>();
+
+    /// Creates a new empty hash table without allocating any memory, using the
+    /// given allocator.
+    ///
+    /// In effect this returns a table with exactly 1 bucket. However we can
+    /// leave the data pointer dangling since that bucket is never written to
+    /// due to our load factor forcing us to always have at least 1 free bucket.
+    #[inline]
+    pub const fn new_in(alloc: A) -> Self {
+        Self {
+            table: RawTableInner::NEW,
+            alloc,
+            marker: PhantomData,
+        }
+    }
+
+    /// Allocates a new hash table with the given number of buckets.
+    ///
+    /// The control bytes are left uninitialized.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn new_uninitialized(
+        alloc: A,
+        buckets: usize,
+        fallibility: Fallibility,
+    ) -> Result<Self, TryReserveError> {
+        debug_assert!(buckets.is_power_of_two());
+
+        Ok(Self {
+            table: RawTableInner::new_uninitialized(
+                &alloc,
+                Self::TABLE_LAYOUT,
+                buckets,
+                fallibility,
+            )?,
+            alloc,
+            marker: PhantomData,
+        })
+    }
+
+    /// Attempts to allocate a new hash table using the given allocator, with at least enough
+    /// capacity for inserting the given number of elements without reallocating.
+    #[cfg(feature = "raw")]
+    pub fn try_with_capacity_in(capacity: usize, alloc: A) -> Result<Self, TryReserveError> {
+        Ok(Self {
+            table: RawTableInner::fallible_with_capacity(
+                &alloc,
+                Self::TABLE_LAYOUT,
+                capacity,
+                Fallibility::Fallible,
+            )?,
+            alloc,
+            marker: PhantomData,
+        })
+    }
+
+    /// Allocates a new hash table using the given allocator, with at least enough capacity for
+    /// inserting the given number of elements without reallocating.
+    pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
+        Self {
+            table: RawTableInner::with_capacity(&alloc, Self::TABLE_LAYOUT, capacity),
+            alloc,
+            marker: PhantomData,
+        }
+    }
+
+    /// Returns a reference to the underlying allocator.
+    #[inline]
+    pub fn allocator(&self) -> &A {
+        &self.alloc
+    }
+
+    /// Returns pointer to one past last `data` element in the table as viewed from
+    /// the start point of the allocation.
+    ///
+    /// The caller must ensure that the `RawTable` outlives the returned [`NonNull<T>`],
+    /// otherwise using it may result in [`undefined behavior`].
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    pub fn data_end(&self) -> NonNull<T> {
+        //                        `self.table.ctrl.cast()` returns pointer that
+        //                        points here (to the end of `T0`)
+        //                          ∨
+        // [Pad], T_n, ..., T1, T0, |CT0, CT1, ..., CT_n|, CTa_0, CTa_1, ..., CTa_m
+        //                           \________  ________/
+        //                                    \/
+        //       `n = buckets - 1`, i.e. `RawTable::buckets() - 1`
+        //
+        // where: T0...T_n  - our stored data;
+        //        CT0...CT_n - control bytes or metadata for `data`.
+        //        CTa_0...CTa_m - additional control bytes, where `m = Group::WIDTH - 1` (so that the search
+        //                        with loading `Group` bytes from the heap works properly, even if the result
+        //                        of `h1(hash) & self.bucket_mask` is equal to `self.bucket_mask`). See also
+        //                        `RawTableInner::set_ctrl` function.
+        //
+        // P.S. `h1(hash) & self.bucket_mask` is the same as `hash as usize % self.buckets()` because the number
+        // of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+        self.table.ctrl.cast()
+    }
+
+    /// Returns pointer to start of data table.
+    #[inline]
+    #[cfg(any(feature = "raw", feature = "nightly"))]
+    pub unsafe fn data_start(&self) -> NonNull<T> {
+        NonNull::new_unchecked(self.data_end().as_ptr().wrapping_sub(self.buckets()))
+    }
+
+    /// Return the information about memory allocated by the table.
+    ///
+    /// `RawTable` allocates single memory block to store both data and metadata.
+    /// This function returns allocation size and alignment and the beginning of the area.
+    /// These are the arguments which will be passed to `dealloc` when the table is dropped.
+    ///
+    /// This function might be useful for memory profiling.
+    #[inline]
+    #[cfg(feature = "raw")]
+    pub fn allocation_info(&self) -> (NonNull<u8>, Layout) {
+        // SAFETY: We use the same `table_layout` that was used to allocate
+        // this table.
+        unsafe { self.table.allocation_info_or_zero(Self::TABLE_LAYOUT) }
+    }
+
+    /// Returns the index of a bucket from a `Bucket`.
+    #[inline]
+    pub unsafe fn bucket_index(&self, bucket: &Bucket<T>) -> usize {
+        bucket.to_base_index(self.data_end())
+    }
+
+    /// Returns a pointer to an element in the table.
+    ///
+    /// The caller must ensure that the `RawTable` outlives the returned [`Bucket<T>`],
+    /// otherwise using it may result in [`undefined behavior`].
+    ///
+    /// # Safety
+    ///
+    /// If `mem::size_of::<T>() != 0`, then the caller of this function must observe the
+    /// following safety rules:
+    ///
+    /// * The table must already be allocated;
+    ///
+    /// * The `index` must not be greater than the number returned by the [`RawTable::buckets`]
+    ///   function, i.e. `(index + 1) <= self.buckets()`.
+    ///
+    /// It is safe to call this function with index of zero (`index == 0`) on a table that has
+    /// not been allocated, but using the returned [`Bucket`] results in [`undefined behavior`].
+    ///
+    /// If `mem::size_of::<T>() == 0`, then the only requirement is that the `index` must
+    /// not be greater than the number returned by the [`RawTable::buckets`] function, i.e.
+    /// `(index + 1) <= self.buckets()`.
+    ///
+    /// [`RawTable::buckets`]: RawTable::buckets
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    pub unsafe fn bucket(&self, index: usize) -> Bucket<T> {
+        // If mem::size_of::<T>() != 0 then return a pointer to the `element` in the `data part` of the table
+        // (we start counting from "0", so that in the expression T[n], the "n" index actually one less than
+        // the "buckets" number of our `RawTable`, i.e. "n = RawTable::buckets() - 1"):
+        //
+        //           `table.bucket(3).as_ptr()` returns a pointer that points here in the `data`
+        //           part of the `RawTable`, i.e. to the start of T3 (see `Bucket::as_ptr`)
+        //                  |
+        //                  |               `base = self.data_end()` points here
+        //                  |               (to the start of CT0 or to the end of T0)
+        //                  v                 v
+        // [Pad], T_n, ..., |T3|, T2, T1, T0, |CT0, CT1, CT2, CT3, ..., CT_n, CTa_0, CTa_1, ..., CTa_m
+        //                     ^                                              \__________  __________/
+        //        `table.bucket(3)` returns a pointer that points                        \/
+        //         here in the `data` part of the `RawTable` (to              additional control bytes
+        //         the end of T3)                                              `m = Group::WIDTH - 1`
+        //
+        // where: T0...T_n  - our stored data;
+        //        CT0...CT_n - control bytes or metadata for `data`;
+        //        CTa_0...CTa_m - additional control bytes (so that the search with loading `Group` bytes from
+        //                        the heap works properly, even if the result of `h1(hash) & self.table.bucket_mask`
+        //                        is equal to `self.table.bucket_mask`). See also `RawTableInner::set_ctrl` function.
+        //
+        // P.S. `h1(hash) & self.table.bucket_mask` is the same as `hash as usize % self.buckets()` because the number
+        // of buckets is a power of two, and `self.table.bucket_mask = self.buckets() - 1`.
+        debug_assert_ne!(self.table.bucket_mask, 0);
+        debug_assert!(index < self.buckets());
+        Bucket::from_base_index(self.data_end(), index)
+    }
+
+    /// Erases an element from the table without dropping it.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn erase_no_drop(&mut self, item: &Bucket<T>) {
+        let index = self.bucket_index(item);
+        self.table.erase(index);
+    }
+
+    /// Erases an element from the table, dropping it in place.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::needless_pass_by_value)]
+    pub unsafe fn erase(&mut self, item: Bucket<T>) {
+        // Erase the element from the table first since drop might panic.
+        self.erase_no_drop(&item);
+        item.drop();
+    }
+
+    /// Finds and erases an element from the table, dropping it in place.
+    /// Returns true if an element was found.
+    #[cfg(feature = "raw")]
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        if let Some(bucket) = self.find(hash, eq) {
+            unsafe {
+                self.erase(bucket);
+            }
+            true
+        } else {
+            false
+        }
+    }
+
+    /// Removes an element from the table, returning it.
+    ///
+    /// This also returns an `InsertSlot` pointing to the newly free bucket.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[allow(clippy::needless_pass_by_value)]
+    pub unsafe fn remove(&mut self, item: Bucket<T>) -> (T, InsertSlot) {
+        self.erase_no_drop(&item);
+        (
+            item.read(),
+            InsertSlot {
+                index: self.bucket_index(&item),
+            },
+        )
+    }
+
+    /// Finds and removes an element from the table, returning it.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<T> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.find(hash, eq) {
+            Some(bucket) => Some(unsafe { self.remove(bucket).0 }),
+            None => None,
+        }
+    }
+
+    /// Marks all table buckets as empty without dropping their contents.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn clear_no_drop(&mut self) {
+        self.table.clear_no_drop();
+    }
+
+    /// Removes all elements from the table without freeing the backing memory.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn clear(&mut self) {
+        if self.is_empty() {
+            // Special case empty table to avoid surprising O(capacity) time.
+            return;
+        }
+        // Ensure that the table is reset even if one of the drops panic
+        let mut self_ = guard(self, |self_| self_.clear_no_drop());
+        unsafe {
+            // SAFETY: ScopeGuard sets to zero the `items` field of the table
+            // even in case of panic during the dropping of the elements so
+            // that there will be no double drop of the elements.
+            self_.table.drop_elements::<T>();
+        }
+    }
+
+    /// Shrinks the table to fit `max(self.len(), min_size)` elements.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) {
+        // Calculate the minimal number of elements that we need to reserve
+        // space for.
+        let min_size = usize::max(self.table.items, min_size);
+        if min_size == 0 {
+            let mut old_inner = mem::replace(&mut self.table, RawTableInner::NEW);
+            unsafe {
+                // SAFETY:
+                // 1. We call the function only once;
+                // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`]
+                //    and [`TableLayout`] that were used to allocate this table.
+                // 3. If any elements' drop function panics, then there will only be a memory leak,
+                //    because we have replaced the inner table with a new one.
+                old_inner.drop_inner_table::<T, _>(&self.alloc, Self::TABLE_LAYOUT);
+            }
+            return;
+        }
+
+        // Calculate the number of buckets that we need for this number of
+        // elements. If the calculation overflows then the requested bucket
+        // count must be larger than what we have right and nothing needs to be
+        // done.
+        let min_buckets = match capacity_to_buckets(min_size) {
+            Some(buckets) => buckets,
+            None => return,
+        };
+
+        // If we have more buckets than we need, shrink the table.
+        if min_buckets < self.buckets() {
+            // Fast path if the table is empty
+            if self.table.items == 0 {
+                let new_inner =
+                    RawTableInner::with_capacity(&self.alloc, Self::TABLE_LAYOUT, min_size);
+                let mut old_inner = mem::replace(&mut self.table, new_inner);
+                unsafe {
+                    // SAFETY:
+                    // 1. We call the function only once;
+                    // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`]
+                    //    and [`TableLayout`] that were used to allocate this table.
+                    // 3. If any elements' drop function panics, then there will only be a memory leak,
+                    //    because we have replaced the inner table with a new one.
+                    old_inner.drop_inner_table::<T, _>(&self.alloc, Self::TABLE_LAYOUT);
+                }
+            } else {
+                // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+                unsafe {
+                    // SAFETY:
+                    // 1. We know for sure that `min_size >= self.table.items`.
+                    // 2. The [`RawTableInner`] must already have properly initialized control bytes since
+                    //    we will never expose RawTable::new_uninitialized in a public API.
+                    if self
+                        .resize(min_size, hasher, Fallibility::Infallible)
+                        .is_err()
+                    {
+                        // SAFETY: The result of calling the `resize` function cannot be an error
+                        // because `fallibility == Fallibility::Infallible.
+                        hint::unreachable_unchecked()
+                    }
+                }
+            }
+        }
+    }
+
+    /// Ensures that at least `additional` items can be inserted into the table
+    /// without reallocation.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) {
+        if unlikely(additional > self.table.growth_left) {
+            // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+            unsafe {
+                // SAFETY: The [`RawTableInner`] must already have properly initialized control
+                // bytes since we will never expose RawTable::new_uninitialized in a public API.
+                if self
+                    .reserve_rehash(additional, hasher, Fallibility::Infallible)
+                    .is_err()
+                {
+                    // SAFETY: All allocation errors will be caught inside `RawTableInner::reserve_rehash`.
+                    hint::unreachable_unchecked()
+                }
+            }
+        }
+    }
+
+    /// Tries to ensure that at least `additional` items can be inserted into
+    /// the table without reallocation.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn try_reserve(
+        &mut self,
+        additional: usize,
+        hasher: impl Fn(&T) -> u64,
+    ) -> Result<(), TryReserveError> {
+        if additional > self.table.growth_left {
+            // SAFETY: The [`RawTableInner`] must already have properly initialized control
+            // bytes since we will never expose RawTable::new_uninitialized in a public API.
+            unsafe { self.reserve_rehash(additional, hasher, Fallibility::Fallible) }
+        } else {
+            Ok(())
+        }
+    }
+
+    /// Out-of-line slow path for `reserve` and `try_reserve`.
+    ///
+    /// # Safety
+    ///
+    /// The [`RawTableInner`] must have properly initialized control bytes,
+    /// otherwise calling this function results in [`undefined behavior`]
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[cold]
+    #[inline(never)]
+    unsafe fn reserve_rehash(
+        &mut self,
+        additional: usize,
+        hasher: impl Fn(&T) -> u64,
+        fallibility: Fallibility,
+    ) -> Result<(), TryReserveError> {
+        unsafe {
+            // SAFETY:
+            // 1. We know for sure that `alloc` and `layout` matches the [`Allocator`] and
+            //    [`TableLayout`] that were used to allocate this table.
+            // 2. The `drop` function is the actual drop function of the elements stored in
+            //    the table.
+            // 3. The caller ensures that the control bytes of the `RawTableInner`
+            //    are already initialized.
+            self.table.reserve_rehash_inner(
+                &self.alloc,
+                additional,
+                &|table, index| hasher(table.bucket::<T>(index).as_ref()),
+                fallibility,
+                Self::TABLE_LAYOUT,
+                if T::NEEDS_DROP {
+                    Some(mem::transmute(ptr::drop_in_place::<T> as unsafe fn(*mut T)))
+                } else {
+                    None
+                },
+            )
+        }
+    }
+
+    /// Allocates a new table of a different size and moves the contents of the
+    /// current table into it.
+    ///
+    /// # Safety
+    ///
+    /// The [`RawTableInner`] must have properly initialized control bytes,
+    /// otherwise calling this function results in [`undefined behavior`]
+    ///
+    /// The caller of this function must ensure that `capacity >= self.table.items`
+    /// otherwise:
+    ///
+    /// * If `self.table.items != 0`, calling of this function with `capacity`
+    ///   equal to 0 (`capacity == 0`) results in [`undefined behavior`].
+    ///
+    /// * If `capacity_to_buckets(capacity) < Group::WIDTH` and
+    ///   `self.table.items > capacity_to_buckets(capacity)`
+    ///   calling this function results in [`undefined behavior`].
+    ///
+    /// * If `capacity_to_buckets(capacity) >= Group::WIDTH` and
+    ///   `self.table.items > capacity_to_buckets(capacity)`
+    ///   calling this function are never return (will go into an
+    ///   infinite loop).
+    ///
+    /// See [`RawTableInner::find_insert_slot`] for more information.
+    ///
+    /// [`RawTableInner::find_insert_slot`]: RawTableInner::find_insert_slot
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    unsafe fn resize(
+        &mut self,
+        capacity: usize,
+        hasher: impl Fn(&T) -> u64,
+        fallibility: Fallibility,
+    ) -> Result<(), TryReserveError> {
+        // SAFETY:
+        // 1. The caller of this function guarantees that `capacity >= self.table.items`.
+        // 2. We know for sure that `alloc` and `layout` matches the [`Allocator`] and
+        //    [`TableLayout`] that were used to allocate this table.
+        // 3. The caller ensures that the control bytes of the `RawTableInner`
+        //    are already initialized.
+        self.table.resize_inner(
+            &self.alloc,
+            capacity,
+            &|table, index| hasher(table.bucket::<T>(index).as_ref()),
+            fallibility,
+            Self::TABLE_LAYOUT,
+        )
+    }
+
+    /// Inserts a new element into the table, and returns its raw bucket.
+    ///
+    /// This does not check if the given element already exists in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket<T> {
+        unsafe {
+            // SAFETY:
+            // 1. The [`RawTableInner`] must already have properly initialized control bytes since
+            //    we will never expose `RawTable::new_uninitialized` in a public API.
+            //
+            // 2. We reserve additional space (if necessary) right after calling this function.
+            let mut slot = self.table.find_insert_slot(hash);
+
+            // We can avoid growing the table once we have reached our load factor if we are replacing
+            // a tombstone. This works since the number of EMPTY slots does not change in this case.
+            //
+            // SAFETY: The function is guaranteed to return [`InsertSlot`] that contains an index
+            // in the range `0..=self.buckets()`.
+            let old_ctrl = *self.table.ctrl(slot.index);
+            if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) {
+                self.reserve(1, hasher);
+                // SAFETY: We know for sure that `RawTableInner` has control bytes
+                // initialized and that there is extra space in the table.
+                slot = self.table.find_insert_slot(hash);
+            }
+
+            self.insert_in_slot(hash, slot, value)
+        }
+    }
+
+    /// Attempts to insert a new element without growing the table and return its raw bucket.
+    ///
+    /// Returns an `Err` containing the given element if inserting it would require growing the
+    /// table.
+    ///
+    /// This does not check if the given element already exists in the table.
+    #[cfg(feature = "raw")]
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn try_insert_no_grow(&mut self, hash: u64, value: T) -> Result<Bucket<T>, T> {
+        unsafe {
+            match self.table.prepare_insert_no_grow(hash) {
+                Ok(index) => {
+                    let bucket = self.bucket(index);
+                    bucket.write(value);
+                    Ok(bucket)
+                }
+                Err(()) => Err(value),
+            }
+        }
+    }
+
+    /// Inserts a new element into the table, and returns a mutable reference to it.
+    ///
+    /// This does not check if the given element already exists in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T {
+        unsafe { self.insert(hash, value, hasher).as_mut() }
+    }
+
+    /// Inserts a new element into the table, without growing the table.
+    ///
+    /// There must be enough space in the table to insert the new element.
+    ///
+    /// This does not check if the given element already exists in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[cfg(any(feature = "raw", feature = "rustc-internal-api"))]
+    pub unsafe fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket<T> {
+        let (index, old_ctrl) = self.table.prepare_insert_slot(hash);
+        let bucket = self.table.bucket(index);
+
+        // If we are replacing a DELETED entry then we don't need to update
+        // the load counter.
+        self.table.growth_left -= special_is_empty(old_ctrl) as usize;
+
+        bucket.write(value);
+        self.table.items += 1;
+        bucket
+    }
+
+    /// Temporary removes a bucket, applying the given function to the removed
+    /// element and optionally put back the returned value in the same bucket.
+    ///
+    /// Returns `true` if the bucket still contains an element
+    ///
+    /// This does not check if the given bucket is actually occupied.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn replace_bucket_with<F>(&mut self, bucket: Bucket<T>, f: F) -> bool
+    where
+        F: FnOnce(T) -> Option<T>,
+    {
+        let index = self.bucket_index(&bucket);
+        let old_ctrl = *self.table.ctrl(index);
+        debug_assert!(self.is_bucket_full(index));
+        let old_growth_left = self.table.growth_left;
+        let item = self.remove(bucket).0;
+        if let Some(new_item) = f(item) {
+            self.table.growth_left = old_growth_left;
+            self.table.set_ctrl(index, old_ctrl);
+            self.table.items += 1;
+            self.bucket(index).write(new_item);
+            true
+        } else {
+            false
+        }
+    }
+
+    /// Searches for an element in the table. If the element is not found,
+    /// returns `Err` with the position of a slot where an element with the
+    /// same hash could be inserted.
+    ///
+    /// This function may resize the table if additional space is required for
+    /// inserting an element.
+    #[inline]
+    pub fn find_or_find_insert_slot(
+        &mut self,
+        hash: u64,
+        mut eq: impl FnMut(&T) -> bool,
+        hasher: impl Fn(&T) -> u64,
+    ) -> Result<Bucket<T>, InsertSlot> {
+        self.reserve(1, hasher);
+
+        unsafe {
+            // SAFETY:
+            // 1. We know for sure that there is at least one empty `bucket` in the table.
+            // 2. The [`RawTableInner`] must already have properly initialized control bytes since we will
+            //    never expose `RawTable::new_uninitialized` in a public API.
+            // 3. The `find_or_find_insert_slot_inner` function returns the `index` of only the full bucket,
+            //    which is in the range `0..self.buckets()` (since there is at least one empty `bucket` in
+            //    the table), so calling `self.bucket(index)` and `Bucket::as_ref` is safe.
+            match self
+                .table
+                .find_or_find_insert_slot_inner(hash, &mut |index| eq(self.bucket(index).as_ref()))
+            {
+                // SAFETY: See explanation above.
+                Ok(index) => Ok(self.bucket(index)),
+                Err(slot) => Err(slot),
+            }
+        }
+    }
+
+    /// Inserts a new element into the table in the given slot, and returns its
+    /// raw bucket.
+    ///
+    /// # Safety
+    ///
+    /// `slot` must point to a slot previously returned by
+    /// `find_or_find_insert_slot`, and no mutation of the table must have
+    /// occurred since that call.
+    #[inline]
+    pub unsafe fn insert_in_slot(&mut self, hash: u64, slot: InsertSlot, value: T) -> Bucket<T> {
+        let old_ctrl = *self.table.ctrl(slot.index);
+        self.table.record_item_insert_at(slot.index, old_ctrl, hash);
+
+        let bucket = self.bucket(slot.index);
+        bucket.write(value);
+        bucket
+    }
+
+    /// Searches for an element in the table.
+    #[inline]
+    pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option<Bucket<T>> {
+        unsafe {
+            // SAFETY:
+            // 1. The [`RawTableInner`] must already have properly initialized control bytes since we
+            //    will never expose `RawTable::new_uninitialized` in a public API.
+            // 1. The `find_inner` function returns the `index` of only the full bucket, which is in
+            //    the range `0..self.buckets()`, so calling `self.bucket(index)` and `Bucket::as_ref`
+            //    is safe.
+            let result = self
+                .table
+                .find_inner(hash, &mut |index| eq(self.bucket(index).as_ref()));
+
+            // Avoid `Option::map` because it bloats LLVM IR.
+            match result {
+                // SAFETY: See explanation above.
+                Some(index) => Some(self.bucket(index)),
+                None => None,
+            }
+        }
+    }
+
+    /// Gets a reference to an element in the table.
+    #[inline]
+    pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.find(hash, eq) {
+            Some(bucket) => Some(unsafe { bucket.as_ref() }),
+            None => None,
+        }
+    }
+
+    /// Gets a mutable reference to an element in the table.
+    #[inline]
+    pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.find(hash, eq) {
+            Some(bucket) => Some(unsafe { bucket.as_mut() }),
+            None => None,
+        }
+    }
+
+    /// Attempts to get mutable references to `N` entries in the table at once.
+    ///
+    /// Returns an array of length `N` with the results of each query.
+    ///
+    /// At most one mutable reference will be returned to any entry. `None` will be returned if any
+    /// of the hashes are duplicates. `None` will be returned if the hash is not found.
+    ///
+    /// The `eq` argument should be a closure such that `eq(i, k)` returns true if `k` is equal to
+    /// the `i`th key to be looked up.
+    pub fn get_many_mut<const N: usize>(
+        &mut self,
+        hashes: [u64; N],
+        eq: impl FnMut(usize, &T) -> bool,
+    ) -> Option<[&'_ mut T; N]> {
+        unsafe {
+            let ptrs = self.get_many_mut_pointers(hashes, eq)?;
+
+            for (i, &cur) in ptrs.iter().enumerate() {
+                if ptrs[..i].iter().any(|&prev| ptr::eq::<T>(prev, cur)) {
+                    return None;
+                }
+            }
+            // All bucket are distinct from all previous buckets so we're clear to return the result
+            // of the lookup.
+
+            // TODO use `MaybeUninit::array_assume_init` here instead once that's stable.
+            Some(mem::transmute_copy(&ptrs))
+        }
+    }
+
+    pub unsafe fn get_many_unchecked_mut<const N: usize>(
+        &mut self,
+        hashes: [u64; N],
+        eq: impl FnMut(usize, &T) -> bool,
+    ) -> Option<[&'_ mut T; N]> {
+        let ptrs = self.get_many_mut_pointers(hashes, eq)?;
+        Some(mem::transmute_copy(&ptrs))
+    }
+
+    unsafe fn get_many_mut_pointers<const N: usize>(
+        &mut self,
+        hashes: [u64; N],
+        mut eq: impl FnMut(usize, &T) -> bool,
+    ) -> Option<[*mut T; N]> {
+        // TODO use `MaybeUninit::uninit_array` here instead once that's stable.
+        let mut outs: MaybeUninit<[*mut T; N]> = MaybeUninit::uninit();
+        let outs_ptr = outs.as_mut_ptr();
+
+        for (i, &hash) in hashes.iter().enumerate() {
+            let cur = self.find(hash, |k| eq(i, k))?;
+            *(*outs_ptr).get_unchecked_mut(i) = cur.as_mut();
+        }
+
+        // TODO use `MaybeUninit::array_assume_init` here instead once that's stable.
+        Some(outs.assume_init())
+    }
+
+    /// Returns the number of elements the map can hold without reallocating.
+    ///
+    /// This number is a lower bound; the table might be able to hold
+    /// more, but is guaranteed to be able to hold at least this many.
+    #[inline]
+    pub fn capacity(&self) -> usize {
+        self.table.items + self.table.growth_left
+    }
+
+    /// Returns the number of elements in the table.
+    #[inline]
+    pub fn len(&self) -> usize {
+        self.table.items
+    }
+
+    /// Returns `true` if the table contains no elements.
+    #[inline]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// Returns the number of buckets in the table.
+    #[inline]
+    pub fn buckets(&self) -> usize {
+        self.table.bucket_mask + 1
+    }
+
+    /// Checks whether the bucket at `index` is full.
+    ///
+    /// # Safety
+    ///
+    /// The caller must ensure `index` is less than the number of buckets.
+    #[inline]
+    pub unsafe fn is_bucket_full(&self, index: usize) -> bool {
+        self.table.is_bucket_full(index)
+    }
+
+    /// Returns an iterator over every element in the table. It is up to
+    /// the caller to ensure that the `RawTable` outlives the `RawIter`.
+    /// Because we cannot make the `next` method unsafe on the `RawIter`
+    /// struct, we have to make the `iter` method unsafe.
+    #[inline]
+    pub unsafe fn iter(&self) -> RawIter<T> {
+        // SAFETY:
+        // 1. The caller must uphold the safety contract for `iter` method.
+        // 2. The [`RawTableInner`] must already have properly initialized control bytes since
+        //    we will never expose RawTable::new_uninitialized in a public API.
+        self.table.iter()
+    }
+
+    /// Returns an iterator over occupied buckets that could match a given hash.
+    ///
+    /// `RawTable` only stores 7 bits of the hash value, so this iterator may
+    /// return items that have a hash value different than the one provided. You
+    /// should always validate the returned values before using them.
+    ///
+    /// It is up to the caller to ensure that the `RawTable` outlives the
+    /// `RawIterHash`. Because we cannot make the `next` method unsafe on the
+    /// `RawIterHash` struct, we have to make the `iter_hash` method unsafe.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[cfg(feature = "raw")]
+    pub unsafe fn iter_hash(&self, hash: u64) -> RawIterHash<T> {
+        RawIterHash::new(self, hash)
+    }
+
+    /// Returns an iterator which removes all elements from the table without
+    /// freeing the memory.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn drain(&mut self) -> RawDrain<'_, T, A> {
+        unsafe {
+            let iter = self.iter();
+            self.drain_iter_from(iter)
+        }
+    }
+
+    /// Returns an iterator which removes all elements from the table without
+    /// freeing the memory.
+    ///
+    /// Iteration starts at the provided iterator's current location.
+    ///
+    /// It is up to the caller to ensure that the iterator is valid for this
+    /// `RawTable` and covers all items that remain in the table.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub unsafe fn drain_iter_from(&mut self, iter: RawIter<T>) -> RawDrain<'_, T, A> {
+        debug_assert_eq!(iter.len(), self.len());
+        RawDrain {
+            iter,
+            table: mem::replace(&mut self.table, RawTableInner::NEW),
+            orig_table: NonNull::from(&mut self.table),
+            marker: PhantomData,
+        }
+    }
+
+    /// Returns an iterator which consumes all elements from the table.
+    ///
+    /// Iteration starts at the provided iterator's current location.
+    ///
+    /// It is up to the caller to ensure that the iterator is valid for this
+    /// `RawTable` and covers all items that remain in the table.
+    pub unsafe fn into_iter_from(self, iter: RawIter<T>) -> RawIntoIter<T, A> {
+        debug_assert_eq!(iter.len(), self.len());
+
+        let allocation = self.into_allocation();
+        RawIntoIter {
+            iter,
+            allocation,
+            marker: PhantomData,
+        }
+    }
+
+    /// Converts the table into a raw allocation. The contents of the table
+    /// should be dropped using a `RawIter` before freeing the allocation.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(crate) fn into_allocation(self) -> Option<(NonNull<u8>, Layout, A)> {
+        let alloc = if self.table.is_empty_singleton() {
+            None
+        } else {
+            // Avoid `Option::unwrap_or_else` because it bloats LLVM IR.
+            let (layout, ctrl_offset) =
+                match Self::TABLE_LAYOUT.calculate_layout_for(self.table.buckets()) {
+                    Some(lco) => lco,
+                    None => unsafe { hint::unreachable_unchecked() },
+                };
+            Some((
+                unsafe { NonNull::new_unchecked(self.table.ctrl.as_ptr().sub(ctrl_offset)) },
+                layout,
+                unsafe { ptr::read(&self.alloc) },
+            ))
+        };
+        mem::forget(self);
+        alloc
+    }
+}
+
+unsafe impl<T, A: Allocator> Send for RawTable<T, A>
+where
+    T: Send,
+    A: Send,
+{
+}
+unsafe impl<T, A: Allocator> Sync for RawTable<T, A>
+where
+    T: Sync,
+    A: Sync,
+{
+}
+
+impl RawTableInner {
+    const NEW: Self = RawTableInner::new();
+
+    /// Creates a new empty hash table without allocating any memory.
+    ///
+    /// In effect this returns a table with exactly 1 bucket. However we can
+    /// leave the data pointer dangling since that bucket is never accessed
+    /// due to our load factor forcing us to always have at least 1 free bucket.
+    #[inline]
+    const fn new() -> Self {
+        Self {
+            // Be careful to cast the entire slice to a raw pointer.
+            ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) },
+            bucket_mask: 0,
+            items: 0,
+            growth_left: 0,
+        }
+    }
+}
+
+impl RawTableInner {
+    /// Allocates a new [`RawTableInner`] with the given number of buckets.
+    /// The control bytes and buckets are left uninitialized.
+    ///
+    /// # Safety
+    ///
+    /// The caller of this function must ensure that the `buckets` is power of two
+    /// and also initialize all control bytes of the length `self.bucket_mask + 1 +
+    /// Group::WIDTH` with the [`EMPTY`] bytes.
+    ///
+    /// See also [`Allocator`] API for other safety concerns.
+    ///
+    /// [`Allocator`]: https://doc.rust-lang.org/alloc/alloc/trait.Allocator.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn new_uninitialized<A>(
+        alloc: &A,
+        table_layout: TableLayout,
+        buckets: usize,
+        fallibility: Fallibility,
+    ) -> Result<Self, TryReserveError>
+    where
+        A: Allocator,
+    {
+        debug_assert!(buckets.is_power_of_two());
+
+        // Avoid `Option::ok_or_else` because it bloats LLVM IR.
+        let (layout, ctrl_offset) = match table_layout.calculate_layout_for(buckets) {
+            Some(lco) => lco,
+            None => return Err(fallibility.capacity_overflow()),
+        };
+
+        let ptr: NonNull<u8> = match do_alloc(alloc, layout) {
+            Ok(block) => block.cast(),
+            Err(_) => return Err(fallibility.alloc_err(layout)),
+        };
+
+        // SAFETY: null pointer will be caught in above check
+        let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset));
+        Ok(Self {
+            ctrl,
+            bucket_mask: buckets - 1,
+            items: 0,
+            growth_left: bucket_mask_to_capacity(buckets - 1),
+        })
+    }
+
+    /// Attempts to allocate a new [`RawTableInner`] with at least enough
+    /// capacity for inserting the given number of elements without reallocating.
+    ///
+    /// All the control bytes are initialized with the [`EMPTY`] bytes.
+    #[inline]
+    fn fallible_with_capacity<A>(
+        alloc: &A,
+        table_layout: TableLayout,
+        capacity: usize,
+        fallibility: Fallibility,
+    ) -> Result<Self, TryReserveError>
+    where
+        A: Allocator,
+    {
+        if capacity == 0 {
+            Ok(Self::NEW)
+        } else {
+            // SAFETY: We checked that we could successfully allocate the new table, and then
+            // initialized all control bytes with the constant `EMPTY` byte.
+            unsafe {
+                let buckets =
+                    capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?;
+
+                let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?;
+                // SAFETY: We checked that the table is allocated and therefore the table already has
+                // `self.bucket_mask + 1 + Group::WIDTH` number of control bytes (see TableLayout::calculate_layout_for)
+                // so writing `self.num_ctrl_bytes() == bucket_mask + 1 + Group::WIDTH` bytes is safe.
+                result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes());
+
+                Ok(result)
+            }
+        }
+    }
+
+    /// Allocates a new [`RawTableInner`] with at least enough capacity for inserting
+    /// the given number of elements without reallocating.
+    ///
+    /// Panics if the new capacity exceeds [`isize::MAX`] bytes and [`abort`] the program
+    /// in case of allocation error. Use [`fallible_with_capacity`] instead if you want to
+    /// handle memory allocation failure.
+    ///
+    /// All the control bytes are initialized with the [`EMPTY`] bytes.
+    ///
+    /// [`fallible_with_capacity`]: RawTableInner::fallible_with_capacity
+    /// [`abort`]: https://doc.rust-lang.org/alloc/alloc/fn.handle_alloc_error.html
+    fn with_capacity<A>(alloc: &A, table_layout: TableLayout, capacity: usize) -> Self
+    where
+        A: Allocator,
+    {
+        // Avoid `Result::unwrap_or_else` because it bloats LLVM IR.
+        match Self::fallible_with_capacity(alloc, table_layout, capacity, Fallibility::Infallible) {
+            Ok(table_inner) => table_inner,
+            // SAFETY: All allocation errors will be caught inside `RawTableInner::new_uninitialized`.
+            Err(_) => unsafe { hint::unreachable_unchecked() },
+        }
+    }
+
+    /// Fixes up an insertion slot returned by the [`RawTableInner::find_insert_slot_in_group`] method.
+    ///
+    /// In tables smaller than the group width (`self.buckets() < Group::WIDTH`), trailing control
+    /// bytes outside the range of the table are filled with [`EMPTY`] entries. These will unfortunately
+    /// trigger a match of [`RawTableInner::find_insert_slot_in_group`] function. This is because
+    /// the `Some(bit)` returned by `group.match_empty_or_deleted().lowest_set_bit()` after masking
+    /// (`(probe_seq.pos + bit) & self.bucket_mask`) may point to a full bucket that is already occupied.
+    /// We detect this situation here and perform a second scan starting at the beginning of the table.
+    /// This second scan is guaranteed to find an empty slot (due to the load factor) before hitting the
+    /// trailing control bytes (containing [`EMPTY`] bytes).
+    ///
+    /// If this function is called correctly, it is guaranteed to return [`InsertSlot`] with an
+    /// index of an empty or deleted bucket in the range `0..self.buckets()` (see `Warning` and
+    /// `Safety`).
+    ///
+    /// # Warning
+    ///
+    /// The table must have at least 1 empty or deleted `bucket`, otherwise if the table is less than
+    /// the group width (`self.buckets() < Group::WIDTH`) this function returns an index outside of the
+    /// table indices range `0..self.buckets()` (`0..=self.bucket_mask`). Attempt to write data at that
+    /// index will cause immediate [`undefined behavior`].
+    ///
+    /// # Safety
+    ///
+    /// The safety rules are directly derived from the safety rules for [`RawTableInner::ctrl`] method.
+    /// Thus, in order to uphold those safety contracts, as well as for the correct logic of the work
+    /// of this crate, the following rules are necessary and sufficient:
+    ///
+    /// * The [`RawTableInner`] must have properly initialized control bytes otherwise calling this
+    ///   function results in [`undefined behavior`].
+    ///
+    /// * This function must only be used on insertion slots found by [`RawTableInner::find_insert_slot_in_group`]
+    ///   (after the `find_insert_slot_in_group` function, but before insertion into the table).
+    ///
+    /// * The `index` must not be greater than the `self.bucket_mask`, i.e. `(index + 1) <= self.buckets()`
+    ///   (this one is provided by the [`RawTableInner::find_insert_slot_in_group`] function).
+    ///
+    /// Calling this function with an index not provided by [`RawTableInner::find_insert_slot_in_group`]
+    /// may result in [`undefined behavior`] even if the index satisfies the safety rules of the
+    /// [`RawTableInner::ctrl`] function (`index < self.bucket_mask + 1 + Group::WIDTH`).
+    ///
+    /// [`RawTableInner::ctrl`]: RawTableInner::ctrl
+    /// [`RawTableInner::find_insert_slot_in_group`]: RawTableInner::find_insert_slot_in_group
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn fix_insert_slot(&self, mut index: usize) -> InsertSlot {
+        // SAFETY: The caller of this function ensures that `index` is in the range `0..=self.bucket_mask`.
+        if unlikely(self.is_bucket_full(index)) {
+            debug_assert!(self.bucket_mask < Group::WIDTH);
+            // SAFETY:
+            //
+            // * Since the caller of this function ensures that the control bytes are properly
+            //   initialized and `ptr = self.ctrl(0)` points to the start of the array of control
+            //   bytes, therefore: `ctrl` is valid for reads, properly aligned to `Group::WIDTH`
+            //   and points to the properly initialized control bytes (see also
+            //   `TableLayout::calculate_layout_for` and `ptr::read`);
+            //
+            // * Because the caller of this function ensures that the index was provided by the
+            //   `self.find_insert_slot_in_group()` function, so for for tables larger than the
+            //   group width (self.buckets() >= Group::WIDTH), we will never end up in the given
+            //   branch, since `(probe_seq.pos + bit) & self.bucket_mask` in `find_insert_slot_in_group`
+            //   cannot return a full bucket index. For tables smaller than the group width, calling
+            //   the `unwrap_unchecked` function is also safe, as the trailing control bytes outside
+            //   the range of the table are filled with EMPTY bytes (and we know for sure that there
+            //   is at least one FULL bucket), so this second scan either finds an empty slot (due to
+            //   the load factor) or hits the trailing control bytes (containing EMPTY).
+            index = Group::load_aligned(self.ctrl(0))
+                .match_empty_or_deleted()
+                .lowest_set_bit()
+                .unwrap_unchecked();
+        }
+        InsertSlot { index }
+    }
+
+    /// Finds the position to insert something in a group.
+    ///
+    /// **This may have false positives and must be fixed up with `fix_insert_slot`
+    /// before it's used.**
+    ///
+    /// The function is guaranteed to return the index of an empty or deleted [`Bucket`]
+    /// in the range `0..self.buckets()` (`0..=self.bucket_mask`).
+    #[inline]
+    fn find_insert_slot_in_group(&self, group: &Group, probe_seq: &ProbeSeq) -> Option<usize> {
+        let bit = group.match_empty_or_deleted().lowest_set_bit();
+
+        if likely(bit.is_some()) {
+            // This is the same as `(probe_seq.pos + bit) % self.buckets()` because the number
+            // of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+            Some((probe_seq.pos + bit.unwrap()) & self.bucket_mask)
+        } else {
+            None
+        }
+    }
+
+    /// Searches for an element in the table, or a potential slot where that element could
+    /// be inserted (an empty or deleted [`Bucket`] index).
+    ///
+    /// This uses dynamic dispatch to reduce the amount of code generated, but that is
+    /// eliminated by LLVM optimizations.
+    ///
+    /// This function does not make any changes to the `data` part of the table, or any
+    /// changes to the `items` or `growth_left` field of the table.
+    ///
+    /// The table must have at least 1 empty or deleted `bucket`, otherwise, if the
+    /// `eq: &mut dyn FnMut(usize) -> bool` function does not return `true`, this function
+    /// will never return (will go into an infinite loop) for tables larger than the group
+    /// width, or return an index outside of the table indices range if the table is less
+    /// than the group width.
+    ///
+    /// This function is guaranteed to provide the `eq: &mut dyn FnMut(usize) -> bool`
+    /// function with only `FULL` buckets' indices and return the `index` of the found
+    /// element (as `Ok(index)`). If the element is not found and there is at least 1
+    /// empty or deleted [`Bucket`] in the table, the function is guaranteed to return
+    /// [InsertSlot] with an index in the range `0..self.buckets()`, but in any case,
+    /// if this function returns [`InsertSlot`], it will contain an index in the range
+    /// `0..=self.buckets()`.
+    ///
+    /// # Safety
+    ///
+    /// The [`RawTableInner`] must have properly initialized control bytes otherwise calling
+    /// this function results in [`undefined behavior`].
+    ///
+    /// Attempt to write data at the [`InsertSlot`] returned by this function when the table is
+    /// less than the group width and if there was not at least one empty or deleted bucket in
+    /// the table will cause immediate [`undefined behavior`]. This is because in this case the
+    /// function will return `self.bucket_mask + 1` as an index due to the trailing [`EMPTY]
+    /// control bytes outside the table range.
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn find_or_find_insert_slot_inner(
+        &self,
+        hash: u64,
+        eq: &mut dyn FnMut(usize) -> bool,
+    ) -> Result<usize, InsertSlot> {
+        let mut insert_slot = None;
+
+        let h2_hash = h2(hash);
+        let mut probe_seq = self.probe_seq(hash);
+
+        loop {
+            // SAFETY:
+            // * Caller of this function ensures that the control bytes are properly initialized.
+            //
+            // * `ProbeSeq.pos` cannot be greater than `self.bucket_mask = self.buckets() - 1`
+            //   of the table due to masking with `self.bucket_mask` and also because mumber of
+            //   buckets is a power of two (see `self.probe_seq` function).
+            //
+            // * Even if `ProbeSeq.pos` returns `position == self.bucket_mask`, it is safe to
+            //   call `Group::load` due to the extended control bytes range, which is
+            //  `self.bucket_mask + 1 + Group::WIDTH` (in fact, this means that the last control
+            //   byte will never be read for the allocated table);
+            //
+            // * Also, even if `RawTableInner` is not already allocated, `ProbeSeq.pos` will
+            //   always return "0" (zero), so Group::load will read unaligned `Group::static_empty()`
+            //   bytes, which is safe (see RawTableInner::new).
+            let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) };
+
+            for bit in group.match_byte(h2_hash) {
+                let index = (probe_seq.pos + bit) & self.bucket_mask;
+
+                if likely(eq(index)) {
+                    return Ok(index);
+                }
+            }
+
+            // We didn't find the element we were looking for in the group, try to get an
+            // insertion slot from the group if we don't have one yet.
+            if likely(insert_slot.is_none()) {
+                insert_slot = self.find_insert_slot_in_group(&group, &probe_seq);
+            }
+
+            // Only stop the search if the group contains at least one empty element.
+            // Otherwise, the element that we are looking for might be in a following group.
+            if likely(group.match_empty().any_bit_set()) {
+                // We must have found a insert slot by now, since the current group contains at
+                // least one. For tables smaller than the group width, there will still be an
+                // empty element in the current (and only) group due to the load factor.
+                unsafe {
+                    // SAFETY:
+                    // * Caller of this function ensures that the control bytes are properly initialized.
+                    //
+                    // * We use this function with the slot / index found by `self.find_insert_slot_in_group`
+                    return Err(self.fix_insert_slot(insert_slot.unwrap_unchecked()));
+                }
+            }
+
+            probe_seq.move_next(self.bucket_mask);
+        }
+    }
+
+    /// Searches for an empty or deleted bucket which is suitable for inserting a new
+    /// element and sets the hash for that slot. Returns an index of that slot and the
+    /// old control byte stored in the found index.
+    ///
+    /// This function does not check if the given element exists in the table. Also,
+    /// this function does not check if there is enough space in the table to insert
+    /// a new element. Caller of the funtion must make ensure that the table has at
+    /// least 1 empty or deleted `bucket`, otherwise this function will never return
+    /// (will go into an infinite loop) for tables larger than the group width, or
+    /// return an index outside of the table indices range if the table is less than
+    /// the group width.
+    ///
+    /// If there is at least 1 empty or deleted `bucket` in the table, the function is
+    /// guaranteed to return an `index` in the range `0..self.buckets()`, but in any case,
+    /// if this function returns an `index` it will be in the range `0..=self.buckets()`.
+    ///
+    /// This function does not make any changes to the `data` parts of the table,
+    /// or any changes to the `items` or `growth_left` field of the table.
+    ///
+    /// # Safety
+    ///
+    /// The safety rules are directly derived from the safety rules for the
+    /// [`RawTableInner::set_ctrl_h2`] and [`RawTableInner::find_insert_slot`] methods.
+    /// Thus, in order to uphold the safety contracts for that methods, as well as for
+    /// the correct logic of the work of this crate, you must observe the following rules
+    /// when calling this function:
+    ///
+    /// * The [`RawTableInner`] has already been allocated and has properly initialized
+    ///   control bytes otherwise calling this function results in [`undefined behavior`].
+    ///
+    /// * The caller of this function must ensure that the "data" parts of the table
+    ///   will have an entry in the returned index (matching the given hash) right
+    ///   after calling this function.
+    ///
+    /// Attempt to write data at the `index` returned by this function when the table is
+    /// less than the group width and if there was not at least one empty or deleted bucket in
+    /// the table will cause immediate [`undefined behavior`]. This is because in this case the
+    /// function will return `self.bucket_mask + 1` as an index due to the trailing [`EMPTY]
+    /// control bytes outside the table range.
+    ///
+    /// The caller must independently increase the `items` field of the table, and also,
+    /// if the old control byte was [`EMPTY`], then decrease the table's `growth_left`
+    /// field, and do not change it if the old control byte was [`DELETED`].
+    ///
+    /// See also [`Bucket::as_ptr`] method, for more information about of properly removing
+    /// or saving `element` from / into the [`RawTable`] / [`RawTableInner`].
+    ///
+    /// [`Bucket::as_ptr`]: Bucket::as_ptr
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    /// [`RawTableInner::ctrl`]: RawTableInner::ctrl
+    /// [`RawTableInner::set_ctrl_h2`]: RawTableInner::set_ctrl_h2
+    /// [`RawTableInner::find_insert_slot`]: RawTableInner::find_insert_slot
+    #[inline]
+    unsafe fn prepare_insert_slot(&mut self, hash: u64) -> (usize, u8) {
+        // SAFETY: Caller of this function ensures that the control bytes are properly initialized.
+        let index: usize = self.find_insert_slot(hash).index;
+        // SAFETY:
+        // 1. The `find_insert_slot` function either returns an `index` less than or
+        //    equal to `self.buckets() = self.bucket_mask + 1` of the table, or never
+        //    returns if it cannot find an empty or deleted slot.
+        // 2. The caller of this function guarantees that the table has already been
+        //    allocated
+        let old_ctrl = *self.ctrl(index);
+        self.set_ctrl_h2(index, hash);
+        (index, old_ctrl)
+    }
+
+    /// Searches for an empty or deleted bucket which is suitable for inserting
+    /// a new element, returning the `index` for the new [`Bucket`].
+    ///
+    /// This function does not make any changes to the `data` part of the table, or any
+    /// changes to the `items` or `growth_left` field of the table.
+    ///
+    /// The table must have at least 1 empty or deleted `bucket`, otherwise this function
+    /// will never return (will go into an infinite loop) for tables larger than the group
+    /// width, or return an index outside of the table indices range if the table is less
+    /// than the group width.
+    ///
+    /// If there is at least 1 empty or deleted `bucket` in the table, the function is
+    /// guaranteed to return [`InsertSlot`] with an index in the range `0..self.buckets()`,
+    /// but in any case, if this function returns [`InsertSlot`], it will contain an index
+    /// in the range `0..=self.buckets()`.
+    ///
+    /// # Safety
+    ///
+    /// The [`RawTableInner`] must have properly initialized control bytes otherwise calling
+    /// this function results in [`undefined behavior`].
+    ///
+    /// Attempt to write data at the [`InsertSlot`] returned by this function when the table is
+    /// less than the group width and if there was not at least one empty or deleted bucket in
+    /// the table will cause immediate [`undefined behavior`]. This is because in this case the
+    /// function will return `self.bucket_mask + 1` as an index due to the trailing [`EMPTY]
+    /// control bytes outside the table range.
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn find_insert_slot(&self, hash: u64) -> InsertSlot {
+        let mut probe_seq = self.probe_seq(hash);
+        loop {
+            // SAFETY:
+            // * Caller of this function ensures that the control bytes are properly initialized.
+            //
+            // * `ProbeSeq.pos` cannot be greater than `self.bucket_mask = self.buckets() - 1`
+            //   of the table due to masking with `self.bucket_mask` and also because mumber of
+            //   buckets is a power of two (see `self.probe_seq` function).
+            //
+            // * Even if `ProbeSeq.pos` returns `position == self.bucket_mask`, it is safe to
+            //   call `Group::load` due to the extended control bytes range, which is
+            //  `self.bucket_mask + 1 + Group::WIDTH` (in fact, this means that the last control
+            //   byte will never be read for the allocated table);
+            //
+            // * Also, even if `RawTableInner` is not already allocated, `ProbeSeq.pos` will
+            //   always return "0" (zero), so Group::load will read unaligned `Group::static_empty()`
+            //   bytes, which is safe (see RawTableInner::new).
+            let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) };
+
+            let index = self.find_insert_slot_in_group(&group, &probe_seq);
+            if likely(index.is_some()) {
+                // SAFETY:
+                // * Caller of this function ensures that the control bytes are properly initialized.
+                //
+                // * We use this function with the slot / index found by `self.find_insert_slot_in_group`
+                unsafe {
+                    return self.fix_insert_slot(index.unwrap_unchecked());
+                }
+            }
+            probe_seq.move_next(self.bucket_mask);
+        }
+    }
+
+    /// Searches for an element in a table, returning the `index` of the found element.
+    /// This uses dynamic dispatch to reduce the amount of code generated, but it is
+    /// eliminated by LLVM optimizations.
+    ///
+    /// This function does not make any changes to the `data` part of the table, or any
+    /// changes to the `items` or `growth_left` field of the table.
+    ///
+    /// The table must have at least 1 empty `bucket`, otherwise, if the
+    /// `eq: &mut dyn FnMut(usize) -> bool` function does not return `true`,
+    /// this function will also never return (will go into an infinite loop).
+    ///
+    /// This function is guaranteed to provide the `eq: &mut dyn FnMut(usize) -> bool`
+    /// function with only `FULL` buckets' indices and return the `index` of the found
+    /// element as `Some(index)`, so the index will always be in the range
+    /// `0..self.buckets()`.
+    ///
+    /// # Safety
+    ///
+    /// The [`RawTableInner`] must have properly initialized control bytes otherwise calling
+    /// this function results in [`undefined behavior`].
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline(always)]
+    unsafe fn find_inner(&self, hash: u64, eq: &mut dyn FnMut(usize) -> bool) -> Option<usize> {
+        let h2_hash = h2(hash);
+        let mut probe_seq = self.probe_seq(hash);
+
+        loop {
+            // SAFETY:
+            // * Caller of this function ensures that the control bytes are properly initialized.
+            //
+            // * `ProbeSeq.pos` cannot be greater than `self.bucket_mask = self.buckets() - 1`
+            //   of the table due to masking with `self.bucket_mask`.
+            //
+            // * Even if `ProbeSeq.pos` returns `position == self.bucket_mask`, it is safe to
+            //   call `Group::load` due to the extended control bytes range, which is
+            //  `self.bucket_mask + 1 + Group::WIDTH` (in fact, this means that the last control
+            //   byte will never be read for the allocated table);
+            //
+            // * Also, even if `RawTableInner` is not already allocated, `ProbeSeq.pos` will
+            //   always return "0" (zero), so Group::load will read unaligned `Group::static_empty()`
+            //   bytes, which is safe (see RawTableInner::new_in).
+            let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) };
+
+            for bit in group.match_byte(h2_hash) {
+                // This is the same as `(probe_seq.pos + bit) % self.buckets()` because the number
+                // of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+                let index = (probe_seq.pos + bit) & self.bucket_mask;
+
+                if likely(eq(index)) {
+                    return Some(index);
+                }
+            }
+
+            if likely(group.match_empty().any_bit_set()) {
+                return None;
+            }
+
+            probe_seq.move_next(self.bucket_mask);
+        }
+    }
+
+    /// Prepares for rehashing data in place (that is, without allocating new memory).
+    /// Converts all full index `control bytes` to `DELETED` and all `DELETED` control
+    /// bytes to `EMPTY`, i.e. performs the following conversion:
+    ///
+    /// - `EMPTY` control bytes   -> `EMPTY`;
+    /// - `DELETED` control bytes -> `EMPTY`;
+    /// - `FULL` control bytes    -> `DELETED`.
+    ///
+    /// This function does not make any changes to the `data` parts of the table,
+    /// or any changes to the `items` or `growth_left` field of the table.
+    ///
+    /// # Safety
+    ///
+    /// You must observe the following safety rules when calling this function:
+    ///
+    /// * The [`RawTableInner`] has already been allocated;
+    ///
+    /// * The caller of this function must convert the `DELETED` bytes back to `FULL`
+    ///   bytes when re-inserting them into their ideal position (which was impossible
+    ///   to do during the first insert due to tombstones). If the caller does not do
+    ///   this, then calling this function may result in a memory leak.
+    ///
+    /// * The [`RawTableInner`] must have properly initialized control bytes otherwise
+    ///   calling this function results in [`undefined behavior`].
+    ///
+    /// Calling this function on a table that has not been allocated results in
+    /// [`undefined behavior`].
+    ///
+    /// See also [`Bucket::as_ptr`] method, for more information about of properly removing
+    /// or saving `data element` from / into the [`RawTable`] / [`RawTableInner`].
+    ///
+    /// [`Bucket::as_ptr`]: Bucket::as_ptr
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[allow(clippy::mut_mut)]
+    #[inline]
+    unsafe fn prepare_rehash_in_place(&mut self) {
+        // Bulk convert all full control bytes to DELETED, and all DELETED control bytes to EMPTY.
+        // This effectively frees up all buckets containing a DELETED entry.
+        //
+        // SAFETY:
+        // 1. `i` is guaranteed to be within bounds since we are iterating from zero to `buckets - 1`;
+        // 2. Even if `i` will be `i == self.bucket_mask`, it is safe to call `Group::load_aligned`
+        //    due to the extended control bytes range, which is `self.bucket_mask + 1 + Group::WIDTH`;
+        // 3. The caller of this function guarantees that [`RawTableInner`] has already been allocated;
+        // 4. We can use `Group::load_aligned` and `Group::store_aligned` here since we start from 0
+        //    and go to the end with a step equal to `Group::WIDTH` (see TableLayout::calculate_layout_for).
+        for i in (0..self.buckets()).step_by(Group::WIDTH) {
+            let group = Group::load_aligned(self.ctrl(i));
+            let group = group.convert_special_to_empty_and_full_to_deleted();
+            group.store_aligned(self.ctrl(i));
+        }
+
+        // Fix up the trailing control bytes. See the comments in set_ctrl
+        // for the handling of tables smaller than the group width.
+        //
+        // SAFETY: The caller of this function guarantees that [`RawTableInner`]
+        // has already been allocated
+        if unlikely(self.buckets() < Group::WIDTH) {
+            // SAFETY: We have `self.bucket_mask + 1 + Group::WIDTH` number of control bytes,
+            // so copying `self.buckets() == self.bucket_mask + 1` bytes with offset equal to
+            // `Group::WIDTH` is safe
+            self.ctrl(0)
+                .copy_to(self.ctrl(Group::WIDTH), self.buckets());
+        } else {
+            // SAFETY: We have `self.bucket_mask + 1 + Group::WIDTH` number of
+            // control bytes,so copying `Group::WIDTH` bytes with offset equal
+            // to `self.buckets() == self.bucket_mask + 1` is safe
+            self.ctrl(0)
+                .copy_to(self.ctrl(self.buckets()), Group::WIDTH);
+        }
+    }
+
+    /// Returns an iterator over every element in the table.
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result
+    /// is [`undefined behavior`]:
+    ///
+    /// * The caller has to ensure that the `RawTableInner` outlives the
+    ///   `RawIter`. Because we cannot make the `next` method unsafe on
+    ///   the `RawIter` struct, we have to make the `iter` method unsafe.
+    ///
+    /// * The [`RawTableInner`] must have properly initialized control bytes.
+    ///
+    /// The type `T` must be the actual type of the elements stored in the table,
+    /// otherwise using the returned [`RawIter`] results in [`undefined behavior`].
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn iter<T>(&self) -> RawIter<T> {
+        // SAFETY:
+        // 1. Since the caller of this function ensures that the control bytes
+        //    are properly initialized and `self.data_end()` points to the start
+        //    of the array of control bytes, therefore: `ctrl` is valid for reads,
+        //    properly aligned to `Group::WIDTH` and points to the properly initialized
+        //    control bytes.
+        // 2. `data` bucket index in the table is equal to the `ctrl` index (i.e.
+        //    equal to zero).
+        // 3. We pass the exact value of buckets of the table to the function.
+        //
+        //                         `ctrl` points here (to the start
+        //                         of the first control byte `CT0`)
+        //                          ∨
+        // [Pad], T_n, ..., T1, T0, |CT0, CT1, ..., CT_n|, CTa_0, CTa_1, ..., CTa_m
+        //                           \________  ________/
+        //                                    \/
+        //       `n = buckets - 1`, i.e. `RawTableInner::buckets() - 1`
+        //
+        // where: T0...T_n  - our stored data;
+        //        CT0...CT_n - control bytes or metadata for `data`.
+        //        CTa_0...CTa_m - additional control bytes, where `m = Group::WIDTH - 1` (so that the search
+        //                        with loading `Group` bytes from the heap works properly, even if the result
+        //                        of `h1(hash) & self.bucket_mask` is equal to `self.bucket_mask`). See also
+        //                        `RawTableInner::set_ctrl` function.
+        //
+        // P.S. `h1(hash) & self.bucket_mask` is the same as `hash as usize % self.buckets()` because the number
+        // of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+        let data = Bucket::from_base_index(self.data_end(), 0);
+        RawIter {
+            // SAFETY: See explanation above
+            iter: RawIterRange::new(self.ctrl.as_ptr(), data, self.buckets()),
+            items: self.items,
+        }
+    }
+
+    /// Executes the destructors (if any) of the values stored in the table.
+    ///
+    /// # Note
+    ///
+    /// This function does not erase the control bytes of the table and does
+    /// not make any changes to the `items` or `growth_left` fields of the
+    /// table. If necessary, the caller of this function must manually set
+    /// up these table fields, for example using the [`clear_no_drop`] function.
+    ///
+    /// Be careful during calling this function, because drop function of
+    /// the elements can panic, and this can leave table in an inconsistent
+    /// state.
+    ///
+    /// # Safety
+    ///
+    /// The type `T` must be the actual type of the elements stored in the table,
+    /// otherwise calling this function may result in [`undefined behavior`].
+    ///
+    /// If `T` is a type that should be dropped and **the table is not empty**,
+    /// calling this function more than once results in [`undefined behavior`].
+    ///
+    /// If `T` is not [`Copy`], attempting to use values stored in the table after
+    /// calling this function may result in [`undefined behavior`].
+    ///
+    /// It is safe to call this function on a table that has not been allocated,
+    /// on a table with uninitialized control bytes, and on a table with no actual
+    /// data but with `Full` control bytes if `self.items == 0`.
+    ///
+    /// See also [`Bucket::drop`] / [`Bucket::as_ptr`] methods, for more information
+    /// about of properly removing or saving `element` from / into the [`RawTable`] /
+    /// [`RawTableInner`].
+    ///
+    /// [`Bucket::drop`]: Bucket::drop
+    /// [`Bucket::as_ptr`]: Bucket::as_ptr
+    /// [`clear_no_drop`]: RawTableInner::clear_no_drop
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    unsafe fn drop_elements<T>(&mut self) {
+        // Check that `self.items != 0`. Protects against the possibility
+        // of creating an iterator on an table with uninitialized control bytes.
+        if T::NEEDS_DROP && self.items != 0 {
+            // SAFETY: We know for sure that RawTableInner will outlive the
+            // returned `RawIter` iterator, and the caller of this function
+            // must uphold the safety contract for `drop_elements` method.
+            for item in self.iter::<T>() {
+                // SAFETY: The caller must uphold the safety contract for
+                // `drop_elements` method.
+                item.drop();
+            }
+        }
+    }
+
+    /// Executes the destructors (if any) of the values stored in the table and than
+    /// deallocates the table.
+    ///
+    /// # Note
+    ///
+    /// Calling this function automatically makes invalid (dangling) all instances of
+    /// buckets ([`Bucket`]) and makes invalid (dangling) the `ctrl` field of the table.
+    ///
+    /// This function does not make any changes to the `bucket_mask`, `items` or `growth_left`
+    /// fields of the table. If necessary, the caller of this function must manually set
+    /// up these table fields.
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result is [`undefined behavior`]:
+    ///
+    /// * Calling this function more than once;
+    ///
+    /// * The type `T` must be the actual type of the elements stored in the table.
+    ///
+    /// * The `alloc` must be the same [`Allocator`] as the `Allocator` that was used
+    ///   to allocate this table.
+    ///
+    /// * The `table_layout` must be the same [`TableLayout`] as the `TableLayout` that
+    ///   was used to allocate this table.
+    ///
+    /// The caller of this function should pay attention to the possibility of the
+    /// elements' drop function panicking, because this:
+    ///
+    ///    * May leave the table in an inconsistent state;
+    ///
+    ///    * Memory is never deallocated, so a memory leak may occur.
+    ///
+    /// Attempt to use the `ctrl` field of the table (dereference) after calling this
+    /// function results in [`undefined behavior`].
+    ///
+    /// It is safe to call this function on a table that has not been allocated,
+    /// on a table with uninitialized control bytes, and on a table with no actual
+    /// data but with `Full` control bytes if `self.items == 0`.
+    ///
+    /// See also [`RawTableInner::drop_elements`] or [`RawTableInner::free_buckets`]
+    /// for more  information.
+    ///
+    /// [`RawTableInner::drop_elements`]: RawTableInner::drop_elements
+    /// [`RawTableInner::free_buckets`]: RawTableInner::free_buckets
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    unsafe fn drop_inner_table<T, A: Allocator>(&mut self, alloc: &A, table_layout: TableLayout) {
+        if !self.is_empty_singleton() {
+            unsafe {
+                // SAFETY: The caller must uphold the safety contract for `drop_inner_table` method.
+                self.drop_elements::<T>();
+                // SAFETY:
+                // 1. We have checked that our table is allocated.
+                // 2. The caller must uphold the safety contract for `drop_inner_table` method.
+                self.free_buckets(alloc, table_layout);
+            }
+        }
+    }
+
+    /// Returns a pointer to an element in the table (convenience for
+    /// `Bucket::from_base_index(self.data_end::<T>(), index)`).
+    ///
+    /// The caller must ensure that the `RawTableInner` outlives the returned [`Bucket<T>`],
+    /// otherwise using it may result in [`undefined behavior`].
+    ///
+    /// # Safety
+    ///
+    /// If `mem::size_of::<T>() != 0`, then the safety rules are directly derived from the
+    /// safety rules of the [`Bucket::from_base_index`] function. Therefore, when calling
+    /// this function, the following safety rules must be observed:
+    ///
+    /// * The table must already be allocated;
+    ///
+    /// * The `index` must not be greater than the number returned by the [`RawTableInner::buckets`]
+    ///   function, i.e. `(index + 1) <= self.buckets()`.
+    ///
+    /// * The type `T` must be the actual type of the elements stored in the table, otherwise
+    ///   using the returned [`Bucket`] may result in [`undefined behavior`].
+    ///
+    /// It is safe to call this function with index of zero (`index == 0`) on a table that has
+    /// not been allocated, but using the returned [`Bucket`] results in [`undefined behavior`].
+    ///
+    /// If `mem::size_of::<T>() == 0`, then the only requirement is that the `index` must
+    /// not be greater than the number returned by the [`RawTable::buckets`] function, i.e.
+    /// `(index + 1) <= self.buckets()`.
+    ///
+    /// ```none
+    /// If mem::size_of::<T>() != 0 then return a pointer to the `element` in the `data part` of the table
+    /// (we start counting from "0", so that in the expression T[n], the "n" index actually one less than
+    /// the "buckets" number of our `RawTableInner`, i.e. "n = RawTableInner::buckets() - 1"):
+    ///
+    ///           `table.bucket(3).as_ptr()` returns a pointer that points here in the `data`
+    ///           part of the `RawTableInner`, i.e. to the start of T3 (see [`Bucket::as_ptr`])
+    ///                  |
+    ///                  |               `base = table.data_end::<T>()` points here
+    ///                  |               (to the start of CT0 or to the end of T0)
+    ///                  v                 v
+    /// [Pad], T_n, ..., |T3|, T2, T1, T0, |CT0, CT1, CT2, CT3, ..., CT_n, CTa_0, CTa_1, ..., CTa_m
+    ///                     ^                                              \__________  __________/
+    ///        `table.bucket(3)` returns a pointer that points                        \/
+    ///         here in the `data` part of the `RawTableInner`             additional control bytes
+    ///         (to the end of T3)                                          `m = Group::WIDTH - 1`
+    ///
+    /// where: T0...T_n  - our stored data;
+    ///        CT0...CT_n - control bytes or metadata for `data`;
+    ///        CTa_0...CTa_m - additional control bytes (so that the search with loading `Group` bytes from
+    ///                        the heap works properly, even if the result of `h1(hash) & self.bucket_mask`
+    ///                        is equal to `self.bucket_mask`). See also `RawTableInner::set_ctrl` function.
+    ///
+    /// P.S. `h1(hash) & self.bucket_mask` is the same as `hash as usize % self.buckets()` because the number
+    /// of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+    /// ```
+    ///
+    /// [`Bucket::from_base_index`]: Bucket::from_base_index
+    /// [`RawTableInner::buckets`]: RawTableInner::buckets
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn bucket<T>(&self, index: usize) -> Bucket<T> {
+        debug_assert_ne!(self.bucket_mask, 0);
+        debug_assert!(index < self.buckets());
+        Bucket::from_base_index(self.data_end(), index)
+    }
+
+    /// Returns a raw `*mut u8` pointer to the start of the `data` element in the table
+    /// (convenience for `self.data_end::<u8>().as_ptr().sub((index + 1) * size_of)`).
+    ///
+    /// The caller must ensure that the `RawTableInner` outlives the returned `*mut u8`,
+    /// otherwise using it may result in [`undefined behavior`].
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result is [`undefined behavior`]:
+    ///
+    /// * The table must already be allocated;
+    ///
+    /// * The `index` must not be greater than the number returned by the [`RawTableInner::buckets`]
+    ///   function, i.e. `(index + 1) <= self.buckets()`;
+    ///
+    /// * The `size_of` must be equal to the size of the elements stored in the table;
+    ///
+    /// ```none
+    /// If mem::size_of::<T>() != 0 then return a pointer to the `element` in the `data part` of the table
+    /// (we start counting from "0", so that in the expression T[n], the "n" index actually one less than
+    /// the "buckets" number of our `RawTableInner`, i.e. "n = RawTableInner::buckets() - 1"):
+    ///
+    ///           `table.bucket_ptr(3, mem::size_of::<T>())` returns a pointer that points here in the
+    ///           `data` part of the `RawTableInner`, i.e. to the start of T3
+    ///                  |
+    ///                  |               `base = table.data_end::<u8>()` points here
+    ///                  |               (to the start of CT0 or to the end of T0)
+    ///                  v                 v
+    /// [Pad], T_n, ..., |T3|, T2, T1, T0, |CT0, CT1, CT2, CT3, ..., CT_n, CTa_0, CTa_1, ..., CTa_m
+    ///                                                                    \__________  __________/
+    ///                                                                               \/
+    ///                                                                    additional control bytes
+    ///                                                                     `m = Group::WIDTH - 1`
+    ///
+    /// where: T0...T_n  - our stored data;
+    ///        CT0...CT_n - control bytes or metadata for `data`;
+    ///        CTa_0...CTa_m - additional control bytes (so that the search with loading `Group` bytes from
+    ///                        the heap works properly, even if the result of `h1(hash) & self.bucket_mask`
+    ///                        is equal to `self.bucket_mask`). See also `RawTableInner::set_ctrl` function.
+    ///
+    /// P.S. `h1(hash) & self.bucket_mask` is the same as `hash as usize % self.buckets()` because the number
+    /// of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+    /// ```
+    ///
+    /// [`RawTableInner::buckets`]: RawTableInner::buckets
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 {
+        debug_assert_ne!(self.bucket_mask, 0);
+        debug_assert!(index < self.buckets());
+        let base: *mut u8 = self.data_end().as_ptr();
+        base.sub((index + 1) * size_of)
+    }
+
+    /// Returns pointer to one past last `data` element in the table as viewed from
+    /// the start point of the allocation (convenience for `self.ctrl.cast()`).
+    ///
+    /// This function actually returns a pointer to the end of the `data element` at
+    /// index "0" (zero).
+    ///
+    /// The caller must ensure that the `RawTableInner` outlives the returned [`NonNull<T>`],
+    /// otherwise using it may result in [`undefined behavior`].
+    ///
+    /// # Note
+    ///
+    /// The type `T` must be the actual type of the elements stored in the table, otherwise
+    /// using the returned [`NonNull<T>`] may result in [`undefined behavior`].
+    ///
+    /// ```none
+    ///                        `table.data_end::<T>()` returns pointer that points here
+    ///                        (to the end of `T0`)
+    ///                          ∨
+    /// [Pad], T_n, ..., T1, T0, |CT0, CT1, ..., CT_n|, CTa_0, CTa_1, ..., CTa_m
+    ///                           \________  ________/
+    ///                                    \/
+    ///       `n = buckets - 1`, i.e. `RawTableInner::buckets() - 1`
+    ///
+    /// where: T0...T_n  - our stored data;
+    ///        CT0...CT_n - control bytes or metadata for `data`.
+    ///        CTa_0...CTa_m - additional control bytes, where `m = Group::WIDTH - 1` (so that the search
+    ///                        with loading `Group` bytes from the heap works properly, even if the result
+    ///                        of `h1(hash) & self.bucket_mask` is equal to `self.bucket_mask`). See also
+    ///                        `RawTableInner::set_ctrl` function.
+    ///
+    /// P.S. `h1(hash) & self.bucket_mask` is the same as `hash as usize % self.buckets()` because the number
+    /// of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+    /// ```
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    fn data_end<T>(&self) -> NonNull<T> {
+        self.ctrl.cast()
+    }
+
+    /// Returns an iterator-like object for a probe sequence on the table.
+    ///
+    /// This iterator never terminates, but is guaranteed to visit each bucket
+    /// group exactly once. The loop using `probe_seq` must terminate upon
+    /// reaching a group containing an empty bucket.
+    #[inline]
+    fn probe_seq(&self, hash: u64) -> ProbeSeq {
+        ProbeSeq {
+            // This is the same as `hash as usize % self.buckets()` because the number
+            // of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+            pos: h1(hash) & self.bucket_mask,
+            stride: 0,
+        }
+    }
+
+    /// Returns the index of a bucket for which a value must be inserted if there is enough rooom
+    /// in the table, otherwise returns error
+    #[cfg(feature = "raw")]
+    #[inline]
+    unsafe fn prepare_insert_no_grow(&mut self, hash: u64) -> Result<usize, ()> {
+        let index = self.find_insert_slot(hash).index;
+        let old_ctrl = *self.ctrl(index);
+        if unlikely(self.growth_left == 0 && special_is_empty(old_ctrl)) {
+            Err(())
+        } else {
+            self.record_item_insert_at(index, old_ctrl, hash);
+            Ok(index)
+        }
+    }
+
+    #[inline]
+    unsafe fn record_item_insert_at(&mut self, index: usize, old_ctrl: u8, hash: u64) {
+        self.growth_left -= usize::from(special_is_empty(old_ctrl));
+        self.set_ctrl_h2(index, hash);
+        self.items += 1;
+    }
+
+    #[inline]
+    fn is_in_same_group(&self, i: usize, new_i: usize, hash: u64) -> bool {
+        let probe_seq_pos = self.probe_seq(hash).pos;
+        let probe_index =
+            |pos: usize| (pos.wrapping_sub(probe_seq_pos) & self.bucket_mask) / Group::WIDTH;
+        probe_index(i) == probe_index(new_i)
+    }
+
+    /// Sets a control byte to the hash, and possibly also the replicated control byte at
+    /// the end of the array.
+    ///
+    /// This function does not make any changes to the `data` parts of the table,
+    /// or any changes to the `items` or `growth_left` field of the table.
+    ///
+    /// # Safety
+    ///
+    /// The safety rules are directly derived from the safety rules for [`RawTableInner::set_ctrl`]
+    /// method. Thus, in order to uphold the safety contracts for the method, you must observe the
+    /// following rules when calling this function:
+    ///
+    /// * The [`RawTableInner`] has already been allocated;
+    ///
+    /// * The `index` must not be greater than the `RawTableInner.bucket_mask`, i.e.
+    ///   `index <= RawTableInner.bucket_mask` or, in other words, `(index + 1)` must
+    ///   be no greater than the number returned by the function [`RawTableInner::buckets`].
+    ///
+    /// Calling this function on a table that has not been allocated results in [`undefined behavior`].
+    ///
+    /// See also [`Bucket::as_ptr`] method, for more information about of properly removing
+    /// or saving `data element` from / into the [`RawTable`] / [`RawTableInner`].
+    ///
+    /// [`RawTableInner::set_ctrl`]: RawTableInner::set_ctrl
+    /// [`RawTableInner::buckets`]: RawTableInner::buckets
+    /// [`Bucket::as_ptr`]: Bucket::as_ptr
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn set_ctrl_h2(&mut self, index: usize, hash: u64) {
+        // SAFETY: The caller must uphold the safety rules for the [`RawTableInner::set_ctrl_h2`]
+        self.set_ctrl(index, h2(hash));
+    }
+
+    /// Replaces the hash in the control byte at the given index with the provided one,
+    /// and possibly also replicates the new control byte at the end of the array of control
+    /// bytes, returning the old control byte.
+    ///
+    /// This function does not make any changes to the `data` parts of the table,
+    /// or any changes to the `items` or `growth_left` field of the table.
+    ///
+    /// # Safety
+    ///
+    /// The safety rules are directly derived from the safety rules for [`RawTableInner::set_ctrl_h2`]
+    /// and [`RawTableInner::ctrl`] methods. Thus, in order to uphold the safety contracts for both
+    /// methods, you must observe the following rules when calling this function:
+    ///
+    /// * The [`RawTableInner`] has already been allocated;
+    ///
+    /// * The `index` must not be greater than the `RawTableInner.bucket_mask`, i.e.
+    ///   `index <= RawTableInner.bucket_mask` or, in other words, `(index + 1)` must
+    ///   be no greater than the number returned by the function [`RawTableInner::buckets`].
+    ///
+    /// Calling this function on a table that has not been allocated results in [`undefined behavior`].
+    ///
+    /// See also [`Bucket::as_ptr`] method, for more information about of properly removing
+    /// or saving `data element` from / into the [`RawTable`] / [`RawTableInner`].
+    ///
+    /// [`RawTableInner::set_ctrl_h2`]: RawTableInner::set_ctrl_h2
+    /// [`RawTableInner::buckets`]: RawTableInner::buckets
+    /// [`Bucket::as_ptr`]: Bucket::as_ptr
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn replace_ctrl_h2(&mut self, index: usize, hash: u64) -> u8 {
+        // SAFETY: The caller must uphold the safety rules for the [`RawTableInner::replace_ctrl_h2`]
+        let prev_ctrl = *self.ctrl(index);
+        self.set_ctrl_h2(index, hash);
+        prev_ctrl
+    }
+
+    /// Sets a control byte, and possibly also the replicated control byte at
+    /// the end of the array.
+    ///
+    /// This function does not make any changes to the `data` parts of the table,
+    /// or any changes to the `items` or `growth_left` field of the table.
+    ///
+    /// # Safety
+    ///
+    /// You must observe the following safety rules when calling this function:
+    ///
+    /// * The [`RawTableInner`] has already been allocated;
+    ///
+    /// * The `index` must not be greater than the `RawTableInner.bucket_mask`, i.e.
+    ///   `index <= RawTableInner.bucket_mask` or, in other words, `(index + 1)` must
+    ///   be no greater than the number returned by the function [`RawTableInner::buckets`].
+    ///
+    /// Calling this function on a table that has not been allocated results in [`undefined behavior`].
+    ///
+    /// See also [`Bucket::as_ptr`] method, for more information about of properly removing
+    /// or saving `data element` from / into the [`RawTable`] / [`RawTableInner`].
+    ///
+    /// [`RawTableInner::buckets`]: RawTableInner::buckets
+    /// [`Bucket::as_ptr`]: Bucket::as_ptr
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn set_ctrl(&mut self, index: usize, ctrl: u8) {
+        // Replicate the first Group::WIDTH control bytes at the end of
+        // the array without using a branch. If the tables smaller than
+        // the group width (self.buckets() < Group::WIDTH),
+        // `index2 = Group::WIDTH + index`, otherwise `index2` is:
+        //
+        // - If index >= Group::WIDTH then index == index2.
+        // - Otherwise index2 == self.bucket_mask + 1 + index.
+        //
+        // The very last replicated control byte is never actually read because
+        // we mask the initial index for unaligned loads, but we write it
+        // anyways because it makes the set_ctrl implementation simpler.
+        //
+        // If there are fewer buckets than Group::WIDTH then this code will
+        // replicate the buckets at the end of the trailing group. For example
+        // with 2 buckets and a group size of 4, the control bytes will look
+        // like this:
+        //
+        //     Real    |             Replicated
+        // ---------------------------------------------
+        // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] |
+        // ---------------------------------------------
+
+        // This is the same as `(index.wrapping_sub(Group::WIDTH)) % self.buckets() + Group::WIDTH`
+        // because the number of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+        let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH;
+
+        // SAFETY: The caller must uphold the safety rules for the [`RawTableInner::set_ctrl`]
+        *self.ctrl(index) = ctrl;
+        *self.ctrl(index2) = ctrl;
+    }
+
+    /// Returns a pointer to a control byte.
+    ///
+    /// # Safety
+    ///
+    /// For the allocated [`RawTableInner`], the result is [`Undefined Behavior`],
+    /// if the `index` is greater than the `self.bucket_mask + 1 + Group::WIDTH`.
+    /// In that case, calling this function with `index == self.bucket_mask + 1 + Group::WIDTH`
+    /// will return a pointer to the end of the allocated table and it is useless on its own.
+    ///
+    /// Calling this function with `index >= self.bucket_mask + 1 + Group::WIDTH` on a
+    /// table that has not been allocated results in [`Undefined Behavior`].
+    ///
+    /// So to satisfy both requirements you should always follow the rule that
+    /// `index < self.bucket_mask + 1 + Group::WIDTH`
+    ///
+    /// Calling this function on [`RawTableInner`] that are not already allocated is safe
+    /// for read-only purpose.
+    ///
+    /// See also [`Bucket::as_ptr()`] method, for more information about of properly removing
+    /// or saving `data element` from / into the [`RawTable`] / [`RawTableInner`].
+    ///
+    /// [`Bucket::as_ptr()`]: Bucket::as_ptr()
+    /// [`Undefined Behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn ctrl(&self, index: usize) -> *mut u8 {
+        debug_assert!(index < self.num_ctrl_bytes());
+        // SAFETY: The caller must uphold the safety rules for the [`RawTableInner::ctrl`]
+        self.ctrl.as_ptr().add(index)
+    }
+
+    #[inline]
+    fn buckets(&self) -> usize {
+        self.bucket_mask + 1
+    }
+
+    /// Checks whether the bucket at `index` is full.
+    ///
+    /// # Safety
+    ///
+    /// The caller must ensure `index` is less than the number of buckets.
+    #[inline]
+    unsafe fn is_bucket_full(&self, index: usize) -> bool {
+        debug_assert!(index < self.buckets());
+        is_full(*self.ctrl(index))
+    }
+
+    #[inline]
+    fn num_ctrl_bytes(&self) -> usize {
+        self.bucket_mask + 1 + Group::WIDTH
+    }
+
+    #[inline]
+    fn is_empty_singleton(&self) -> bool {
+        self.bucket_mask == 0
+    }
+
+    /// Attempts to allocate a new hash table with at least enough capacity
+    /// for inserting the given number of elements without reallocating,
+    /// and return it inside ScopeGuard to protect against panic in the hash
+    /// function.
+    ///
+    /// # Note
+    ///
+    /// It is recommended (but not required):
+    ///
+    /// * That the new table's `capacity` be greater than or equal to `self.items`.
+    ///
+    /// * The `alloc` is the same [`Allocator`] as the `Allocator` used
+    ///   to allocate this table.
+    ///
+    /// * The `table_layout` is the same [`TableLayout`] as the `TableLayout` used
+    ///   to allocate this table.
+    ///
+    /// If `table_layout` does not match the `TableLayout` that was used to allocate
+    /// this table, then using `mem::swap` with the `self` and the new table returned
+    /// by this function results in [`undefined behavior`].
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[allow(clippy::mut_mut)]
+    #[inline]
+    fn prepare_resize<'a, A>(
+        &self,
+        alloc: &'a A,
+        table_layout: TableLayout,
+        capacity: usize,
+        fallibility: Fallibility,
+    ) -> Result<crate::scopeguard::ScopeGuard<Self, impl FnMut(&mut Self) + 'a>, TryReserveError>
+    where
+        A: Allocator,
+    {
+        debug_assert!(self.items <= capacity);
+
+        // Allocate and initialize the new table.
+        let new_table =
+            RawTableInner::fallible_with_capacity(alloc, table_layout, capacity, fallibility)?;
+
+        // The hash function may panic, in which case we simply free the new
+        // table without dropping any elements that may have been copied into
+        // it.
+        //
+        // This guard is also used to free the old table on success, see
+        // the comment at the bottom of this function.
+        Ok(guard(new_table, move |self_| {
+            if !self_.is_empty_singleton() {
+                // SAFETY:
+                // 1. We have checked that our table is allocated.
+                // 2. We know for sure that the `alloc` and `table_layout` matches the
+                //    [`Allocator`] and [`TableLayout`] used to allocate this table.
+                unsafe { self_.free_buckets(alloc, table_layout) };
+            }
+        }))
+    }
+
+    /// Reserves or rehashes to make room for `additional` more elements.
+    ///
+    /// This uses dynamic dispatch to reduce the amount of
+    /// code generated, but it is eliminated by LLVM optimizations when inlined.
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result is
+    /// [`undefined behavior`]:
+    ///
+    /// * The `alloc` must be the same [`Allocator`] as the `Allocator` used
+    ///   to allocate this table.
+    ///
+    /// * The `layout` must be the same [`TableLayout`] as the `TableLayout`
+    ///   used to allocate this table.
+    ///
+    /// * The `drop` function (`fn(*mut u8)`) must be the actual drop function of
+    ///   the elements stored in the table.
+    ///
+    /// * The [`RawTableInner`] must have properly initialized control bytes.
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[allow(clippy::inline_always)]
+    #[inline(always)]
+    unsafe fn reserve_rehash_inner<A>(
+        &mut self,
+        alloc: &A,
+        additional: usize,
+        hasher: &dyn Fn(&mut Self, usize) -> u64,
+        fallibility: Fallibility,
+        layout: TableLayout,
+        drop: Option<fn(*mut u8)>,
+    ) -> Result<(), TryReserveError>
+    where
+        A: Allocator,
+    {
+        // Avoid `Option::ok_or_else` because it bloats LLVM IR.
+        let new_items = match self.items.checked_add(additional) {
+            Some(new_items) => new_items,
+            None => return Err(fallibility.capacity_overflow()),
+        };
+        let full_capacity = bucket_mask_to_capacity(self.bucket_mask);
+        if new_items <= full_capacity / 2 {
+            // Rehash in-place without re-allocating if we have plenty of spare
+            // capacity that is locked up due to DELETED entries.
+
+            // SAFETY:
+            // 1. We know for sure that `[`RawTableInner`]` has already been allocated
+            //    (since new_items <= full_capacity / 2);
+            // 2. The caller ensures that `drop` function is the actual drop function of
+            //    the elements stored in the table.
+            // 3. The caller ensures that `layout` matches the [`TableLayout`] that was
+            //    used to allocate this table.
+            // 4. The caller ensures that the control bytes of the `RawTableInner`
+            //    are already initialized.
+            self.rehash_in_place(hasher, layout.size, drop);
+            Ok(())
+        } else {
+            // Otherwise, conservatively resize to at least the next size up
+            // to avoid churning deletes into frequent rehashes.
+            //
+            // SAFETY:
+            // 1. We know for sure that `capacity >= self.items`.
+            // 2. The caller ensures that `alloc` and `layout` matches the [`Allocator`] and
+            //    [`TableLayout`] that were used to allocate this table.
+            // 3. The caller ensures that the control bytes of the `RawTableInner`
+            //    are already initialized.
+            self.resize_inner(
+                alloc,
+                usize::max(new_items, full_capacity + 1),
+                hasher,
+                fallibility,
+                layout,
+            )
+        }
+    }
+
+    /// Returns an iterator over full buckets indices in the table.
+    ///
+    /// # Safety
+    ///
+    /// Behavior is undefined if any of the following conditions are violated:
+    ///
+    /// * The caller has to ensure that the `RawTableInner` outlives the
+    ///   `FullBucketsIndices`. Because we cannot make the `next` method
+    ///   unsafe on the `FullBucketsIndices` struct, we have to make the
+    ///   `full_buckets_indices` method unsafe.
+    ///
+    /// * The [`RawTableInner`] must have properly initialized control bytes.
+    #[inline(always)]
+    unsafe fn full_buckets_indices(&self) -> FullBucketsIndices {
+        // SAFETY:
+        // 1. Since the caller of this function ensures that the control bytes
+        //    are properly initialized and `self.ctrl(0)` points to the start
+        //    of the array of control bytes, therefore: `ctrl` is valid for reads,
+        //    properly aligned to `Group::WIDTH` and points to the properly initialized
+        //    control bytes.
+        // 2. The value of `items` is equal to the amount of data (values) added
+        //    to the table.
+        //
+        //                         `ctrl` points here (to the start
+        //                         of the first control byte `CT0`)
+        //                          ∨
+        // [Pad], T_n, ..., T1, T0, |CT0, CT1, ..., CT_n|, Group::WIDTH
+        //                           \________  ________/
+        //                                    \/
+        //       `n = buckets - 1`, i.e. `RawTableInner::buckets() - 1`
+        //
+        // where: T0...T_n  - our stored data;
+        //        CT0...CT_n - control bytes or metadata for `data`.
+        let ctrl = NonNull::new_unchecked(self.ctrl(0));
+
+        FullBucketsIndices {
+            // Load the first group
+            // SAFETY: See explanation above.
+            current_group: Group::load_aligned(ctrl.as_ptr()).match_full().into_iter(),
+            group_first_index: 0,
+            ctrl,
+            items: self.items,
+        }
+    }
+
+    /// Allocates a new table of a different size and moves the contents of the
+    /// current table into it.
+    ///
+    /// This uses dynamic dispatch to reduce the amount of
+    /// code generated, but it is eliminated by LLVM optimizations when inlined.
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result is
+    /// [`undefined behavior`]:
+    ///
+    /// * The `alloc` must be the same [`Allocator`] as the `Allocator` used
+    ///   to allocate this table;
+    ///
+    /// * The `layout` must be the same [`TableLayout`] as the `TableLayout`
+    ///   used to allocate this table;
+    ///
+    /// * The [`RawTableInner`] must have properly initialized control bytes.
+    ///
+    /// The caller of this function must ensure that `capacity >= self.items`
+    /// otherwise:
+    ///
+    /// * If `self.items != 0`, calling of this function with `capacity == 0`
+    ///   results in [`undefined behavior`].
+    ///
+    /// * If `capacity_to_buckets(capacity) < Group::WIDTH` and
+    ///   `self.items > capacity_to_buckets(capacity)` calling this function
+    ///   results in [`undefined behavior`].
+    ///
+    /// * If `capacity_to_buckets(capacity) >= Group::WIDTH` and
+    ///   `self.items > capacity_to_buckets(capacity)` calling this function
+    ///   are never return (will go into an infinite loop).
+    ///
+    /// Note: It is recommended (but not required) that the new table's `capacity`
+    /// be greater than or equal to `self.items`. In case if `capacity <= self.items`
+    /// this function can never return. See [`RawTableInner::find_insert_slot`] for
+    /// more information.
+    ///
+    /// [`RawTableInner::find_insert_slot`]: RawTableInner::find_insert_slot
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[allow(clippy::inline_always)]
+    #[inline(always)]
+    unsafe fn resize_inner<A>(
+        &mut self,
+        alloc: &A,
+        capacity: usize,
+        hasher: &dyn Fn(&mut Self, usize) -> u64,
+        fallibility: Fallibility,
+        layout: TableLayout,
+    ) -> Result<(), TryReserveError>
+    where
+        A: Allocator,
+    {
+        // SAFETY: We know for sure that `alloc` and `layout` matches the [`Allocator`] and [`TableLayout`]
+        // that were used to allocate this table.
+        let mut new_table = self.prepare_resize(alloc, layout, capacity, fallibility)?;
+
+        // SAFETY: We know for sure that RawTableInner will outlive the
+        // returned `FullBucketsIndices` iterator, and the caller of this
+        // function ensures that the control bytes are properly initialized.
+        for full_byte_index in self.full_buckets_indices() {
+            // This may panic.
+            let hash = hasher(self, full_byte_index);
+
+            // SAFETY:
+            // We can use a simpler version of insert() here since:
+            // 1. There are no DELETED entries.
+            // 2. We know there is enough space in the table.
+            // 3. All elements are unique.
+            // 4. The caller of this function guarantees that `capacity > 0`
+            //    so `new_table` must already have some allocated memory.
+            // 5. We set `growth_left` and `items` fields of the new table
+            //    after the loop.
+            // 6. We insert into the table, at the returned index, the data
+            //    matching the given hash immediately after calling this function.
+            let (new_index, _) = new_table.prepare_insert_slot(hash);
+
+            // SAFETY:
+            //
+            // * `src` is valid for reads of `layout.size` bytes, since the
+            //   table is alive and the `full_byte_index` is guaranteed to be
+            //   within bounds (see `FullBucketsIndices::next_impl`);
+            //
+            // * `dst` is valid for writes of `layout.size` bytes, since the
+            //   caller ensures that `table_layout` matches the [`TableLayout`]
+            //   that was used to allocate old table and we have the `new_index`
+            //   returned by `prepare_insert_slot`.
+            //
+            // * Both `src` and `dst` are properly aligned.
+            //
+            // * Both `src` and `dst` point to different region of memory.
+            ptr::copy_nonoverlapping(
+                self.bucket_ptr(full_byte_index, layout.size),
+                new_table.bucket_ptr(new_index, layout.size),
+                layout.size,
+            );
+        }
+
+        // The hash function didn't panic, so we can safely set the
+        // `growth_left` and `items` fields of the new table.
+        new_table.growth_left -= self.items;
+        new_table.items = self.items;
+
+        // We successfully copied all elements without panicking. Now replace
+        // self with the new table. The old table will have its memory freed but
+        // the items will not be dropped (since they have been moved into the
+        // new table).
+        // SAFETY: The caller ensures that `table_layout` matches the [`TableLayout`]
+        // that was used to allocate this table.
+        mem::swap(self, &mut new_table);
+
+        Ok(())
+    }
+
+    /// Rehashes the contents of the table in place (i.e. without changing the
+    /// allocation).
+    ///
+    /// If `hasher` panics then some the table's contents may be lost.
+    ///
+    /// This uses dynamic dispatch to reduce the amount of
+    /// code generated, but it is eliminated by LLVM optimizations when inlined.
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result is [`undefined behavior`]:
+    ///
+    /// * The `size_of` must be equal to the size of the elements stored in the table;
+    ///
+    /// * The `drop` function (`fn(*mut u8)`) must be the actual drop function of
+    ///   the elements stored in the table.
+    ///
+    /// * The [`RawTableInner`] has already been allocated;
+    ///
+    /// * The [`RawTableInner`] must have properly initialized control bytes.
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[allow(clippy::inline_always)]
+    #[cfg_attr(feature = "inline-more", inline(always))]
+    #[cfg_attr(not(feature = "inline-more"), inline)]
+    unsafe fn rehash_in_place(
+        &mut self,
+        hasher: &dyn Fn(&mut Self, usize) -> u64,
+        size_of: usize,
+        drop: Option<fn(*mut u8)>,
+    ) {
+        // If the hash function panics then properly clean up any elements
+        // that we haven't rehashed yet. We unfortunately can't preserve the
+        // element since we lost their hash and have no way of recovering it
+        // without risking another panic.
+        self.prepare_rehash_in_place();
+
+        let mut guard = guard(self, move |self_| {
+            if let Some(drop) = drop {
+                for i in 0..self_.buckets() {
+                    if *self_.ctrl(i) == DELETED {
+                        self_.set_ctrl(i, EMPTY);
+                        drop(self_.bucket_ptr(i, size_of));
+                        self_.items -= 1;
+                    }
+                }
+            }
+            self_.growth_left = bucket_mask_to_capacity(self_.bucket_mask) - self_.items;
+        });
+
+        // At this point, DELETED elements are elements that we haven't
+        // rehashed yet. Find them and re-insert them at their ideal
+        // position.
+        'outer: for i in 0..guard.buckets() {
+            if *guard.ctrl(i) != DELETED {
+                continue;
+            }
+
+            let i_p = guard.bucket_ptr(i, size_of);
+
+            'inner: loop {
+                // Hash the current item
+                let hash = hasher(*guard, i);
+
+                // Search for a suitable place to put it
+                //
+                // SAFETY: Caller of this function ensures that the control bytes
+                // are properly initialized.
+                let new_i = guard.find_insert_slot(hash).index;
+
+                // Probing works by scanning through all of the control
+                // bytes in groups, which may not be aligned to the group
+                // size. If both the new and old position fall within the
+                // same unaligned group, then there is no benefit in moving
+                // it and we can just continue to the next item.
+                if likely(guard.is_in_same_group(i, new_i, hash)) {
+                    guard.set_ctrl_h2(i, hash);
+                    continue 'outer;
+                }
+
+                let new_i_p = guard.bucket_ptr(new_i, size_of);
+
+                // We are moving the current item to a new position. Write
+                // our H2 to the control byte of the new position.
+                let prev_ctrl = guard.replace_ctrl_h2(new_i, hash);
+                if prev_ctrl == EMPTY {
+                    guard.set_ctrl(i, EMPTY);
+                    // If the target slot is empty, simply move the current
+                    // element into the new slot and clear the old control
+                    // byte.
+                    ptr::copy_nonoverlapping(i_p, new_i_p, size_of);
+                    continue 'outer;
+                } else {
+                    // If the target slot is occupied, swap the two elements
+                    // and then continue processing the element that we just
+                    // swapped into the old slot.
+                    debug_assert_eq!(prev_ctrl, DELETED);
+                    ptr::swap_nonoverlapping(i_p, new_i_p, size_of);
+                    continue 'inner;
+                }
+            }
+        }
+
+        guard.growth_left = bucket_mask_to_capacity(guard.bucket_mask) - guard.items;
+
+        mem::forget(guard);
+    }
+
+    /// Deallocates the table without dropping any entries.
+    ///
+    /// # Note
+    ///
+    /// This function must be called only after [`drop_elements`](RawTableInner::drop_elements),
+    /// else it can lead to leaking of memory. Also calling this function automatically
+    /// makes invalid (dangling) all instances of buckets ([`Bucket`]) and makes invalid
+    /// (dangling) the `ctrl` field of the table.
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result is [`Undefined Behavior`]:
+    ///
+    /// * The [`RawTableInner`] has already been allocated;
+    ///
+    /// * The `alloc` must be the same [`Allocator`] as the `Allocator` that was used
+    ///   to allocate this table.
+    ///
+    /// * The `table_layout` must be the same [`TableLayout`] as the `TableLayout` that was used
+    ///   to allocate this table.
+    ///
+    /// See also [`GlobalAlloc::dealloc`] or [`Allocator::deallocate`] for more  information.
+    ///
+    /// [`Undefined Behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    /// [`GlobalAlloc::dealloc`]: https://doc.rust-lang.org/alloc/alloc/trait.GlobalAlloc.html#tymethod.dealloc
+    /// [`Allocator::deallocate`]: https://doc.rust-lang.org/alloc/alloc/trait.Allocator.html#tymethod.deallocate
+    #[inline]
+    unsafe fn free_buckets<A>(&mut self, alloc: &A, table_layout: TableLayout)
+    where
+        A: Allocator,
+    {
+        // SAFETY: The caller must uphold the safety contract for `free_buckets`
+        // method.
+        let (ptr, layout) = self.allocation_info(table_layout);
+        alloc.deallocate(ptr, layout);
+    }
+
+    /// Returns a pointer to the allocated memory and the layout that was used to
+    /// allocate the table.
+    ///
+    /// # Safety
+    ///
+    /// Caller of this function must observe the following safety rules:
+    ///
+    /// * The [`RawTableInner`] has already been allocated, otherwise
+    ///   calling this function results in [`undefined behavior`]
+    ///
+    /// * The `table_layout` must be the same [`TableLayout`] as the `TableLayout`
+    ///   that was used to allocate this table. Failure to comply with this condition
+    ///   may result in [`undefined behavior`].
+    ///
+    /// See also [`GlobalAlloc::dealloc`] or [`Allocator::deallocate`] for more  information.
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    /// [`GlobalAlloc::dealloc`]: https://doc.rust-lang.org/alloc/alloc/trait.GlobalAlloc.html#tymethod.dealloc
+    /// [`Allocator::deallocate`]: https://doc.rust-lang.org/alloc/alloc/trait.Allocator.html#tymethod.deallocate
+    #[inline]
+    unsafe fn allocation_info(&self, table_layout: TableLayout) -> (NonNull<u8>, Layout) {
+        debug_assert!(
+            !self.is_empty_singleton(),
+            "this function can only be called on non-empty tables"
+        );
+
+        // Avoid `Option::unwrap_or_else` because it bloats LLVM IR.
+        let (layout, ctrl_offset) = match table_layout.calculate_layout_for(self.buckets()) {
+            Some(lco) => lco,
+            None => unsafe { hint::unreachable_unchecked() },
+        };
+        (
+            // SAFETY: The caller must uphold the safety contract for `allocation_info` method.
+            unsafe { NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)) },
+            layout,
+        )
+    }
+
+    /// Returns a pointer to the allocated memory and the layout that was used to
+    /// allocate the table. If [`RawTableInner`] has not been allocated, this
+    /// function return `dangling` pointer and `()` (unit) layout.
+    ///
+    /// # Safety
+    ///
+    /// The `table_layout` must be the same [`TableLayout`] as the `TableLayout`
+    /// that was used to allocate this table. Failure to comply with this condition
+    /// may result in [`undefined behavior`].
+    ///
+    /// See also [`GlobalAlloc::dealloc`] or [`Allocator::deallocate`] for more  information.
+    ///
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    /// [`GlobalAlloc::dealloc`]: https://doc.rust-lang.org/alloc/alloc/trait.GlobalAlloc.html#tymethod.dealloc
+    /// [`Allocator::deallocate`]: https://doc.rust-lang.org/alloc/alloc/trait.Allocator.html#tymethod.deallocate
+    #[cfg(feature = "raw")]
+    unsafe fn allocation_info_or_zero(&self, table_layout: TableLayout) -> (NonNull<u8>, Layout) {
+        if self.is_empty_singleton() {
+            (NonNull::dangling(), Layout::new::<()>())
+        } else {
+            // SAFETY:
+            // 1. We have checked that our table is allocated.
+            // 2. The caller ensures that `table_layout` matches the [`TableLayout`]
+            // that was used to allocate this table.
+            unsafe { self.allocation_info(table_layout) }
+        }
+    }
+
+    /// Marks all table buckets as empty without dropping their contents.
+    #[inline]
+    fn clear_no_drop(&mut self) {
+        if !self.is_empty_singleton() {
+            unsafe {
+                self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes());
+            }
+        }
+        self.items = 0;
+        self.growth_left = bucket_mask_to_capacity(self.bucket_mask);
+    }
+
+    /// Erases the [`Bucket`]'s control byte at the given index so that it does not
+    /// triggered as full, decreases the `items` of the table and, if it can be done,
+    /// increases `self.growth_left`.
+    ///
+    /// This function does not actually erase / drop the [`Bucket`] itself, i.e. it
+    /// does not make any changes to the `data` parts of the table. The caller of this
+    /// function must take care to properly drop the `data`, otherwise calling this
+    /// function may result in a memory leak.
+    ///
+    /// # Safety
+    ///
+    /// You must observe the following safety rules when calling this function:
+    ///
+    /// * The [`RawTableInner`] has already been allocated;
+    ///
+    /// * It must be the full control byte at the given position;
+    ///
+    /// * The `index` must not be greater than the `RawTableInner.bucket_mask`, i.e.
+    ///   `index <= RawTableInner.bucket_mask` or, in other words, `(index + 1)` must
+    ///   be no greater than the number returned by the function [`RawTableInner::buckets`].
+    ///
+    /// Calling this function on a table that has not been allocated results in [`undefined behavior`].
+    ///
+    /// Calling this function on a table with no elements is unspecified, but calling subsequent
+    /// functions is likely to result in [`undefined behavior`] due to overflow subtraction
+    /// (`self.items -= 1 cause overflow when self.items == 0`).
+    ///
+    /// See also [`Bucket::as_ptr`] method, for more information about of properly removing
+    /// or saving `data element` from / into the [`RawTable`] / [`RawTableInner`].
+    ///
+    /// [`RawTableInner::buckets`]: RawTableInner::buckets
+    /// [`Bucket::as_ptr`]: Bucket::as_ptr
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline]
+    unsafe fn erase(&mut self, index: usize) {
+        debug_assert!(self.is_bucket_full(index));
+
+        // This is the same as `index.wrapping_sub(Group::WIDTH) % self.buckets()` because
+        // the number of buckets is a power of two, and `self.bucket_mask = self.buckets() - 1`.
+        let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask;
+        // SAFETY:
+        // - The caller must uphold the safety contract for `erase` method;
+        // - `index_before` is guaranteed to be in range due to masking with `self.bucket_mask`
+        let empty_before = Group::load(self.ctrl(index_before)).match_empty();
+        let empty_after = Group::load(self.ctrl(index)).match_empty();
+
+        // Inserting and searching in the map is performed by two key functions:
+        //
+        // - The `find_insert_slot` function that looks up the index of any `EMPTY` or `DELETED`
+        //   slot in a group to be able to insert. If it doesn't find an `EMPTY` or `DELETED`
+        //   slot immediately in the first group, it jumps to the next `Group` looking for it,
+        //   and so on until it has gone through all the groups in the control bytes.
+        //
+        // - The `find_inner` function that looks for the index of the desired element by looking
+        //   at all the `FULL` bytes in the group. If it did not find the element right away, and
+        //   there is no `EMPTY` byte in the group, then this means that the `find_insert_slot`
+        //   function may have found a suitable slot in the next group. Therefore, `find_inner`
+        //   jumps further, and if it does not find the desired element and again there is no `EMPTY`
+        //   byte, then it jumps further, and so on. The search stops only if `find_inner` function
+        //   finds the desired element or hits an `EMPTY` slot/byte.
+        //
+        // Accordingly, this leads to two consequences:
+        //
+        // - The map must have `EMPTY` slots (bytes);
+        //
+        // - You can't just mark the byte to be erased as `EMPTY`, because otherwise the `find_inner`
+        //   function may stumble upon an `EMPTY` byte before finding the desired element and stop
+        //   searching.
+        //
+        // Thus it is necessary to check all bytes after and before the erased element. If we are in
+        // a contiguous `Group` of `FULL` or `DELETED` bytes (the number of `FULL` or `DELETED` bytes
+        // before and after is greater than or equal to `Group::WIDTH`), then we must mark our byte as
+        // `DELETED` in order for the `find_inner` function to go further. On the other hand, if there
+        // is at least one `EMPTY` slot in the `Group`, then the `find_inner` function will still stumble
+        // upon an `EMPTY` byte, so we can safely mark our erased byte as `EMPTY` as well.
+        //
+        // Finally, since `index_before == (index.wrapping_sub(Group::WIDTH) & self.bucket_mask) == index`
+        // and given all of the above, tables smaller than the group width (self.buckets() < Group::WIDTH)
+        // cannot have `DELETED` bytes.
+        //
+        // Note that in this context `leading_zeros` refers to the bytes at the end of a group, while
+        // `trailing_zeros` refers to the bytes at the beginning of a group.
+        let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH {
+            DELETED
+        } else {
+            self.growth_left += 1;
+            EMPTY
+        };
+        // SAFETY: the caller must uphold the safety contract for `erase` method.
+        self.set_ctrl(index, ctrl);
+        self.items -= 1;
+    }
+}
+
+impl<T: Clone, A: Allocator + Clone> Clone for RawTable<T, A> {
+    fn clone(&self) -> Self {
+        if self.table.is_empty_singleton() {
+            Self::new_in(self.alloc.clone())
+        } else {
+            unsafe {
+                // Avoid `Result::ok_or_else` because it bloats LLVM IR.
+                //
+                // SAFETY: This is safe as we are taking the size of an already allocated table
+                // and therefore сapacity overflow cannot occur, `self.table.buckets()` is power
+                // of two and all allocator errors will be caught inside `RawTableInner::new_uninitialized`.
+                let mut new_table = match Self::new_uninitialized(
+                    self.alloc.clone(),
+                    self.table.buckets(),
+                    Fallibility::Infallible,
+                ) {
+                    Ok(table) => table,
+                    Err(_) => hint::unreachable_unchecked(),
+                };
+
+                // Cloning elements may fail (the clone function may panic). But we don't
+                // need to worry about uninitialized control bits, since:
+                // 1. The number of items (elements) in the table is zero, which means that
+                //    the control bits will not be readed by Drop function.
+                // 2. The `clone_from_spec` method will first copy all control bits from
+                //    `self` (thus initializing them). But this will not affect the `Drop`
+                //    function, since the `clone_from_spec` function sets `items` only after
+                //    successfully clonning all elements.
+                new_table.clone_from_spec(self);
+                new_table
+            }
+        }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        if source.table.is_empty_singleton() {
+            let mut old_inner = mem::replace(&mut self.table, RawTableInner::NEW);
+            unsafe {
+                // SAFETY:
+                // 1. We call the function only once;
+                // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`]
+                //    and [`TableLayout`] that were used to allocate this table.
+                // 3. If any elements' drop function panics, then there will only be a memory leak,
+                //    because we have replaced the inner table with a new one.
+                old_inner.drop_inner_table::<T, _>(&self.alloc, Self::TABLE_LAYOUT);
+            }
+        } else {
+            unsafe {
+                // Make sure that if any panics occurs, we clear the table and
+                // leave it in an empty state.
+                let mut self_ = guard(self, |self_| {
+                    self_.clear_no_drop();
+                });
+
+                // First, drop all our elements without clearing the control
+                // bytes. If this panics then the scope guard will clear the
+                // table, leaking any elements that were not dropped yet.
+                //
+                // This leak is unavoidable: we can't try dropping more elements
+                // since this could lead to another panic and abort the process.
+                //
+                // SAFETY: If something gets wrong we clear our table right after
+                // dropping the elements, so there is no double drop, since `items`
+                // will be equal to zero.
+                self_.table.drop_elements::<T>();
+
+                // If necessary, resize our table to match the source.
+                if self_.buckets() != source.buckets() {
+                    let new_inner = match RawTableInner::new_uninitialized(
+                        &self_.alloc,
+                        Self::TABLE_LAYOUT,
+                        source.buckets(),
+                        Fallibility::Infallible,
+                    ) {
+                        Ok(table) => table,
+                        Err(_) => hint::unreachable_unchecked(),
+                    };
+                    // Replace the old inner with new uninitialized one. It's ok, since if something gets
+                    // wrong `ScopeGuard` will initialize all control bytes and leave empty table.
+                    let mut old_inner = mem::replace(&mut self_.table, new_inner);
+                    if !old_inner.is_empty_singleton() {
+                        // SAFETY:
+                        // 1. We have checked that our table is allocated.
+                        // 2. We know for sure that `alloc` and `table_layout` matches
+                        // the [`Allocator`] and [`TableLayout`] that were used to allocate this table.
+                        old_inner.free_buckets(&self_.alloc, Self::TABLE_LAYOUT);
+                    }
+                }
+
+                // Cloning elements may fail (the clone function may panic), but the `ScopeGuard`
+                // inside the `clone_from_impl` function will take care of that, dropping all
+                // cloned elements if necessary. Our `ScopeGuard` will clear the table.
+                self_.clone_from_spec(source);
+
+                // Disarm the scope guard if cloning was successful.
+                ScopeGuard::into_inner(self_);
+            }
+        }
+    }
+}
+
+/// Specialization of `clone_from` for `Copy` types
+trait RawTableClone {
+    unsafe fn clone_from_spec(&mut self, source: &Self);
+}
+impl<T: Clone, A: Allocator + Clone> RawTableClone for RawTable<T, A> {
+    default_fn! {
+        #[cfg_attr(feature = "inline-more", inline)]
+        unsafe fn clone_from_spec(&mut self, source: &Self) {
+            self.clone_from_impl(source);
+        }
+    }
+}
+#[cfg(feature = "nightly")]
+impl<T: Copy, A: Allocator + Clone> RawTableClone for RawTable<T, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn clone_from_spec(&mut self, source: &Self) {
+        source
+            .table
+            .ctrl(0)
+            .copy_to_nonoverlapping(self.table.ctrl(0), self.table.num_ctrl_bytes());
+        source
+            .data_start()
+            .as_ptr()
+            .copy_to_nonoverlapping(self.data_start().as_ptr(), self.table.buckets());
+
+        self.table.items = source.table.items;
+        self.table.growth_left = source.table.growth_left;
+    }
+}
+
+impl<T: Clone, A: Allocator + Clone> RawTable<T, A> {
+    /// Common code for clone and clone_from. Assumes:
+    /// - `self.buckets() == source.buckets()`.
+    /// - Any existing elements have been dropped.
+    /// - The control bytes are not initialized yet.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn clone_from_impl(&mut self, source: &Self) {
+        // Copy the control bytes unchanged. We do this in a single pass
+        source
+            .table
+            .ctrl(0)
+            .copy_to_nonoverlapping(self.table.ctrl(0), self.table.num_ctrl_bytes());
+
+        // The cloning of elements may panic, in which case we need
+        // to make sure we drop only the elements that have been
+        // cloned so far.
+        let mut guard = guard((0, &mut *self), |(index, self_)| {
+            if T::NEEDS_DROP {
+                for i in 0..*index {
+                    if self_.is_bucket_full(i) {
+                        self_.bucket(i).drop();
+                    }
+                }
+            }
+        });
+
+        for from in source.iter() {
+            let index = source.bucket_index(&from);
+            let to = guard.1.bucket(index);
+            to.write(from.as_ref().clone());
+
+            // Update the index in case we need to unwind.
+            guard.0 = index + 1;
+        }
+
+        // Successfully cloned all items, no need to clean up.
+        mem::forget(guard);
+
+        self.table.items = source.table.items;
+        self.table.growth_left = source.table.growth_left;
+    }
+
+    /// Variant of `clone_from` to use when a hasher is available.
+    #[cfg(feature = "raw")]
+    pub fn clone_from_with_hasher(&mut self, source: &Self, hasher: impl Fn(&T) -> u64) {
+        // If we have enough capacity in the table, just clear it and insert
+        // elements one by one. We don't do this if we have the same number of
+        // buckets as the source since we can just copy the contents directly
+        // in that case.
+        if self.table.buckets() != source.table.buckets()
+            && bucket_mask_to_capacity(self.table.bucket_mask) >= source.len()
+        {
+            self.clear();
+
+            let mut guard_self = guard(&mut *self, |self_| {
+                // Clear the partially copied table if a panic occurs, otherwise
+                // items and growth_left will be out of sync with the contents
+                // of the table.
+                self_.clear();
+            });
+
+            unsafe {
+                for item in source.iter() {
+                    // This may panic.
+                    let item = item.as_ref().clone();
+                    let hash = hasher(&item);
+
+                    // We can use a simpler version of insert() here since:
+                    // - there are no DELETED entries.
+                    // - we know there is enough space in the table.
+                    // - all elements are unique.
+                    let (index, _) = guard_self.table.prepare_insert_slot(hash);
+                    guard_self.bucket(index).write(item);
+                }
+            }
+
+            // Successfully cloned all items, no need to clean up.
+            mem::forget(guard_self);
+
+            self.table.items = source.table.items;
+            self.table.growth_left -= source.table.items;
+        } else {
+            self.clone_from(source);
+        }
+    }
+}
+
+impl<T, A: Allocator + Default> Default for RawTable<T, A> {
+    #[inline]
+    fn default() -> Self {
+        Self::new_in(Default::default())
+    }
+}
+
+#[cfg(feature = "nightly")]
+unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawTable<T, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        unsafe {
+            // SAFETY:
+            // 1. We call the function only once;
+            // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`]
+            //    and [`TableLayout`] that were used to allocate this table.
+            // 3. If the drop function of any elements fails, then only a memory leak will occur,
+            //    and we don't care because we are inside the `Drop` function of the `RawTable`,
+            //    so there won't be any table left in an inconsistent state.
+            self.table
+                .drop_inner_table::<T, _>(&self.alloc, Self::TABLE_LAYOUT);
+        }
+    }
+}
+#[cfg(not(feature = "nightly"))]
+impl<T, A: Allocator> Drop for RawTable<T, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        unsafe {
+            // SAFETY:
+            // 1. We call the function only once;
+            // 2. We know for sure that `alloc` and `table_layout` matches the [`Allocator`]
+            //    and [`TableLayout`] that were used to allocate this table.
+            // 3. If the drop function of any elements fails, then only a memory leak will occur,
+            //    and we don't care because we are inside the `Drop` function of the `RawTable`,
+            //    so there won't be any table left in an inconsistent state.
+            self.table
+                .drop_inner_table::<T, _>(&self.alloc, Self::TABLE_LAYOUT);
+        }
+    }
+}
+
+impl<T, A: Allocator> IntoIterator for RawTable<T, A> {
+    type Item = T;
+    type IntoIter = RawIntoIter<T, A>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> RawIntoIter<T, A> {
+        unsafe {
+            let iter = self.iter();
+            self.into_iter_from(iter)
+        }
+    }
+}
+
+/// Iterator over a sub-range of a table. Unlike `RawIter` this iterator does
+/// not track an item count.
+pub(crate) struct RawIterRange<T> {
+    // Mask of full buckets in the current group. Bits are cleared from this
+    // mask as each element is processed.
+    current_group: BitMaskIter,
+
+    // Pointer to the buckets for the current group.
+    data: Bucket<T>,
+
+    // Pointer to the next group of control bytes,
+    // Must be aligned to the group size.
+    next_ctrl: *const u8,
+
+    // Pointer one past the last control byte of this range.
+    end: *const u8,
+}
+
+impl<T> RawIterRange<T> {
+    /// Returns a `RawIterRange` covering a subset of a table.
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result is
+    /// [`undefined behavior`]:
+    ///
+    /// * `ctrl` must be [valid] for reads, i.e. table outlives the `RawIterRange`;
+    ///
+    /// * `ctrl` must be properly aligned to the group size (Group::WIDTH);
+    ///
+    /// * `ctrl` must point to the array of properly initialized control bytes;
+    ///
+    /// * `data` must be the [`Bucket`] at the `ctrl` index in the table;
+    ///
+    /// * the value of `len` must be less than or equal to the number of table buckets,
+    ///   and the returned value of `ctrl.as_ptr().add(len).offset_from(ctrl.as_ptr())`
+    ///   must be positive.
+    ///
+    /// * The `ctrl.add(len)` pointer must be either in bounds or one
+    ///   byte past the end of the same [allocated table].
+    ///
+    /// * The `len` must be a power of two.
+    ///
+    /// [valid]: https://doc.rust-lang.org/std/ptr/index.html#safety
+    /// [`undefined behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn new(ctrl: *const u8, data: Bucket<T>, len: usize) -> Self {
+        debug_assert_ne!(len, 0);
+        debug_assert_eq!(ctrl as usize % Group::WIDTH, 0);
+        // SAFETY: The caller must uphold the safety rules for the [`RawIterRange::new`]
+        let end = ctrl.add(len);
+
+        // Load the first group and advance ctrl to point to the next group
+        // SAFETY: The caller must uphold the safety rules for the [`RawIterRange::new`]
+        let current_group = Group::load_aligned(ctrl).match_full();
+        let next_ctrl = ctrl.add(Group::WIDTH);
+
+        Self {
+            current_group: current_group.into_iter(),
+            data,
+            next_ctrl,
+            end,
+        }
+    }
+
+    /// Splits a `RawIterRange` into two halves.
+    ///
+    /// Returns `None` if the remaining range is smaller than or equal to the
+    /// group width.
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[cfg(feature = "rayon")]
+    pub(crate) fn split(mut self) -> (Self, Option<RawIterRange<T>>) {
+        unsafe {
+            if self.end <= self.next_ctrl {
+                // Nothing to split if the group that we are current processing
+                // is the last one.
+                (self, None)
+            } else {
+                // len is the remaining number of elements after the group that
+                // we are currently processing. It must be a multiple of the
+                // group size (small tables are caught by the check above).
+                let len = offset_from(self.end, self.next_ctrl);
+                debug_assert_eq!(len % Group::WIDTH, 0);
+
+                // Split the remaining elements into two halves, but round the
+                // midpoint down in case there is an odd number of groups
+                // remaining. This ensures that:
+                // - The tail is at least 1 group long.
+                // - The split is roughly even considering we still have the
+                //   current group to process.
+                let mid = (len / 2) & !(Group::WIDTH - 1);
+
+                let tail = Self::new(
+                    self.next_ctrl.add(mid),
+                    self.data.next_n(Group::WIDTH).next_n(mid),
+                    len - mid,
+                );
+                debug_assert_eq!(
+                    self.data.next_n(Group::WIDTH).next_n(mid).ptr,
+                    tail.data.ptr
+                );
+                debug_assert_eq!(self.end, tail.end);
+                self.end = self.next_ctrl.add(mid);
+                debug_assert_eq!(self.end.add(Group::WIDTH), tail.next_ctrl);
+                (self, Some(tail))
+            }
+        }
+    }
+
+    /// # Safety
+    /// If DO_CHECK_PTR_RANGE is false, caller must ensure that we never try to iterate
+    /// after yielding all elements.
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn next_impl<const DO_CHECK_PTR_RANGE: bool>(&mut self) -> Option<Bucket<T>> {
+        loop {
+            if let Some(index) = self.current_group.next() {
+                return Some(self.data.next_n(index));
+            }
+
+            if DO_CHECK_PTR_RANGE && self.next_ctrl >= self.end {
+                return None;
+            }
+
+            // We might read past self.end up to the next group boundary,
+            // but this is fine because it only occurs on tables smaller
+            // than the group size where the trailing control bytes are all
+            // EMPTY. On larger tables self.end is guaranteed to be aligned
+            // to the group size (since tables are power-of-two sized).
+            self.current_group = Group::load_aligned(self.next_ctrl).match_full().into_iter();
+            self.data = self.data.next_n(Group::WIDTH);
+            self.next_ctrl = self.next_ctrl.add(Group::WIDTH);
+        }
+    }
+
+    /// Folds every element into an accumulator by applying an operation,
+    /// returning the final result.
+    ///
+    /// `fold_impl()` takes three arguments: the number of items remaining in
+    /// the iterator, an initial value, and a closure with two arguments: an
+    /// 'accumulator', and an element. The closure returns the value that the
+    /// accumulator should have for the next iteration.
+    ///
+    /// The initial value is the value the accumulator will have on the first call.
+    ///
+    /// After applying this closure to every element of the iterator, `fold_impl()`
+    /// returns the accumulator.
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result is
+    /// [`Undefined Behavior`]:
+    ///
+    /// * The [`RawTableInner`] / [`RawTable`] must be alive and not moved,
+    ///   i.e. table outlives the `RawIterRange`;
+    ///
+    /// * The provided `n` value must match the actual number of items
+    ///   in the table.
+    ///
+    /// [`Undefined Behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[allow(clippy::while_let_on_iterator)]
+    #[cfg_attr(feature = "inline-more", inline)]
+    unsafe fn fold_impl<F, B>(mut self, mut n: usize, mut acc: B, mut f: F) -> B
+    where
+        F: FnMut(B, Bucket<T>) -> B,
+    {
+        loop {
+            while let Some(index) = self.current_group.next() {
+                // The returned `index` will always be in the range `0..Group::WIDTH`,
+                // so that calling `self.data.next_n(index)` is safe (see detailed explanation below).
+                debug_assert!(n != 0);
+                let bucket = self.data.next_n(index);
+                acc = f(acc, bucket);
+                n -= 1;
+            }
+
+            if n == 0 {
+                return acc;
+            }
+
+            // SAFETY: The caller of this function ensures that:
+            //
+            // 1. The provided `n` value matches the actual number of items in the table;
+            // 2. The table is alive and did not moved.
+            //
+            // Taking the above into account, we always stay within the bounds, because:
+            //
+            // 1. For tables smaller than the group width (self.buckets() <= Group::WIDTH),
+            //    we will never end up in the given branch, since we should have already
+            //    yielded all the elements of the table.
+            //
+            // 2. For tables larger than the group width. The number of buckets is a
+            //    power of two (2 ^ n), Group::WIDTH is also power of two (2 ^ k). Since
+            //    `(2 ^ n) > (2 ^ k)`, than `(2 ^ n) % (2 ^ k) = 0`. As we start from the
+            //    start of the array of control bytes, and never try to iterate after
+            //    getting all the elements, the last `self.current_group` will read bytes
+            //    from the `self.buckets() - Group::WIDTH` index.  We know also that
+            //    `self.current_group.next()` will always retun indices within the range
+            //    `0..Group::WIDTH`.
+            //
+            //    Knowing all of the above and taking into account that we are synchronizing
+            //    the `self.data` index with the index we used to read the `self.current_group`,
+            //    the subsequent `self.data.next_n(index)` will always return a bucket with
+            //    an index number less than `self.buckets()`.
+            //
+            //    The last `self.next_ctrl`, whose index would be `self.buckets()`, will never
+            //    actually be read, since we should have already yielded all the elements of
+            //    the table.
+            self.current_group = Group::load_aligned(self.next_ctrl).match_full().into_iter();
+            self.data = self.data.next_n(Group::WIDTH);
+            self.next_ctrl = self.next_ctrl.add(Group::WIDTH);
+        }
+    }
+}
+
+// We make raw iterators unconditionally Send and Sync, and let the PhantomData
+// in the actual iterator implementations determine the real Send/Sync bounds.
+unsafe impl<T> Send for RawIterRange<T> {}
+unsafe impl<T> Sync for RawIterRange<T> {}
+
+impl<T> Clone for RawIterRange<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Self {
+            data: self.data.clone(),
+            next_ctrl: self.next_ctrl,
+            current_group: self.current_group,
+            end: self.end,
+        }
+    }
+}
+
+impl<T> Iterator for RawIterRange<T> {
+    type Item = Bucket<T>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<Bucket<T>> {
+        unsafe {
+            // SAFETY: We set checker flag to true.
+            self.next_impl::<true>()
+        }
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        // We don't have an item count, so just guess based on the range size.
+        let remaining_buckets = if self.end > self.next_ctrl {
+            unsafe { offset_from(self.end, self.next_ctrl) }
+        } else {
+            0
+        };
+
+        // Add a group width to include the group we are currently processing.
+        (0, Some(Group::WIDTH + remaining_buckets))
+    }
+}
+
+impl<T> FusedIterator for RawIterRange<T> {}
+
+/// Iterator which returns a raw pointer to every full bucket in the table.
+///
+/// For maximum flexibility this iterator is not bound by a lifetime, but you
+/// must observe several rules when using it:
+/// - You must not free the hash table while iterating (including via growing/shrinking).
+/// - It is fine to erase a bucket that has been yielded by the iterator.
+/// - Erasing a bucket that has not yet been yielded by the iterator may still
+///   result in the iterator yielding that bucket (unless `reflect_remove` is called).
+/// - It is unspecified whether an element inserted after the iterator was
+///   created will be yielded by that iterator (unless `reflect_insert` is called).
+/// - The order in which the iterator yields bucket is unspecified and may
+///   change in the future.
+pub struct RawIter<T> {
+    pub(crate) iter: RawIterRange<T>,
+    items: usize,
+}
+
+impl<T> RawIter<T> {
+    /// Refresh the iterator so that it reflects a removal from the given bucket.
+    ///
+    /// For the iterator to remain valid, this method must be called once
+    /// for each removed bucket before `next` is called again.
+    ///
+    /// This method should be called _before_ the removal is made. It is not necessary to call this
+    /// method if you are removing an item that this iterator yielded in the past.
+    #[cfg(feature = "raw")]
+    pub unsafe fn reflect_remove(&mut self, b: &Bucket<T>) {
+        self.reflect_toggle_full(b, false);
+    }
+
+    /// Refresh the iterator so that it reflects an insertion into the given bucket.
+    ///
+    /// For the iterator to remain valid, this method must be called once
+    /// for each insert before `next` is called again.
+    ///
+    /// This method does not guarantee that an insertion of a bucket with a greater
+    /// index than the last one yielded will be reflected in the iterator.
+    ///
+    /// This method should be called _after_ the given insert is made.
+    #[cfg(feature = "raw")]
+    pub unsafe fn reflect_insert(&mut self, b: &Bucket<T>) {
+        self.reflect_toggle_full(b, true);
+    }
+
+    /// Refresh the iterator so that it reflects a change to the state of the given bucket.
+    #[cfg(feature = "raw")]
+    unsafe fn reflect_toggle_full(&mut self, b: &Bucket<T>, is_insert: bool) {
+        if b.as_ptr() > self.iter.data.as_ptr() {
+            // The iterator has already passed the bucket's group.
+            // So the toggle isn't relevant to this iterator.
+            return;
+        }
+
+        if self.iter.next_ctrl < self.iter.end
+            && b.as_ptr() <= self.iter.data.next_n(Group::WIDTH).as_ptr()
+        {
+            // The iterator has not yet reached the bucket's group.
+            // We don't need to reload anything, but we do need to adjust the item count.
+
+            if cfg!(debug_assertions) {
+                // Double-check that the user isn't lying to us by checking the bucket state.
+                // To do that, we need to find its control byte. We know that self.iter.data is
+                // at self.iter.next_ctrl - Group::WIDTH, so we work from there:
+                let offset = offset_from(self.iter.data.as_ptr(), b.as_ptr());
+                let ctrl = self.iter.next_ctrl.sub(Group::WIDTH).add(offset);
+                // This method should be called _before_ a removal, or _after_ an insert,
+                // so in both cases the ctrl byte should indicate that the bucket is full.
+                assert!(is_full(*ctrl));
+            }
+
+            if is_insert {
+                self.items += 1;
+            } else {
+                self.items -= 1;
+            }
+
+            return;
+        }
+
+        // The iterator is at the bucket group that the toggled bucket is in.
+        // We need to do two things:
+        //
+        //  - Determine if the iterator already yielded the toggled bucket.
+        //    If it did, we're done.
+        //  - Otherwise, update the iterator cached group so that it won't
+        //    yield a to-be-removed bucket, or _will_ yield a to-be-added bucket.
+        //    We'll also need to update the item count accordingly.
+        if let Some(index) = self.iter.current_group.0.lowest_set_bit() {
+            let next_bucket = self.iter.data.next_n(index);
+            if b.as_ptr() > next_bucket.as_ptr() {
+                // The toggled bucket is "before" the bucket the iterator would yield next. We
+                // therefore don't need to do anything --- the iterator has already passed the
+                // bucket in question.
+                //
+                // The item count must already be correct, since a removal or insert "prior" to
+                // the iterator's position wouldn't affect the item count.
+            } else {
+                // The removed bucket is an upcoming bucket. We need to make sure it does _not_
+                // get yielded, and also that it's no longer included in the item count.
+                //
+                // NOTE: We can't just reload the group here, both since that might reflect
+                // inserts we've already passed, and because that might inadvertently unset the
+                // bits for _other_ removals. If we do that, we'd have to also decrement the
+                // item count for those other bits that we unset. But the presumably subsequent
+                // call to reflect for those buckets might _also_ decrement the item count.
+                // Instead, we _just_ flip the bit for the particular bucket the caller asked
+                // us to reflect.
+                let our_bit = offset_from(self.iter.data.as_ptr(), b.as_ptr());
+                let was_full = self.iter.current_group.flip(our_bit);
+                debug_assert_ne!(was_full, is_insert);
+
+                if is_insert {
+                    self.items += 1;
+                } else {
+                    self.items -= 1;
+                }
+
+                if cfg!(debug_assertions) {
+                    if b.as_ptr() == next_bucket.as_ptr() {
+                        // The removed bucket should no longer be next
+                        debug_assert_ne!(self.iter.current_group.0.lowest_set_bit(), Some(index));
+                    } else {
+                        // We should not have changed what bucket comes next.
+                        debug_assert_eq!(self.iter.current_group.0.lowest_set_bit(), Some(index));
+                    }
+                }
+            }
+        } else {
+            // We must have already iterated past the removed item.
+        }
+    }
+
+    unsafe fn drop_elements(&mut self) {
+        if T::NEEDS_DROP && self.items != 0 {
+            for item in self {
+                item.drop();
+            }
+        }
+    }
+}
+
+impl<T> Clone for RawIter<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Self {
+            iter: self.iter.clone(),
+            items: self.items,
+        }
+    }
+}
+
+impl<T> Iterator for RawIter<T> {
+    type Item = Bucket<T>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<Bucket<T>> {
+        // Inner iterator iterates over buckets
+        // so it can do unnecessary work if we already yielded all items.
+        if self.items == 0 {
+            return None;
+        }
+
+        let nxt = unsafe {
+            // SAFETY: We check number of items to yield using `items` field.
+            self.iter.next_impl::<false>()
+        };
+
+        debug_assert!(nxt.is_some());
+        self.items -= 1;
+
+        nxt
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (self.items, Some(self.items))
+    }
+
+    #[inline]
+    fn fold<B, F>(self, init: B, f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        unsafe { self.iter.fold_impl(self.items, init, f) }
+    }
+}
+
+impl<T> ExactSizeIterator for RawIter<T> {}
+impl<T> FusedIterator for RawIter<T> {}
+
+/// Iterator which returns an index of every full bucket in the table.
+///
+/// For maximum flexibility this iterator is not bound by a lifetime, but you
+/// must observe several rules when using it:
+/// - You must not free the hash table while iterating (including via growing/shrinking).
+/// - It is fine to erase a bucket that has been yielded by the iterator.
+/// - Erasing a bucket that has not yet been yielded by the iterator may still
+///   result in the iterator yielding index of that bucket.
+/// - It is unspecified whether an element inserted after the iterator was
+///   created will be yielded by that iterator.
+/// - The order in which the iterator yields indices of the buckets is unspecified
+///   and may change in the future.
+pub(crate) struct FullBucketsIndices {
+    // Mask of full buckets in the current group. Bits are cleared from this
+    // mask as each element is processed.
+    current_group: BitMaskIter,
+
+    // Initial value of the bytes' indices of the current group (relative
+    // to the start of the control bytes).
+    group_first_index: usize,
+
+    // Pointer to the current group of control bytes,
+    // Must be aligned to the group size (Group::WIDTH).
+    ctrl: NonNull<u8>,
+
+    // Number of elements in the table.
+    items: usize,
+}
+
+impl FullBucketsIndices {
+    /// Advances the iterator and returns the next value.
+    ///
+    /// # Safety
+    ///
+    /// If any of the following conditions are violated, the result is
+    /// [`Undefined Behavior`]:
+    ///
+    /// * The [`RawTableInner`] / [`RawTable`] must be alive and not moved,
+    ///   i.e. table outlives the `FullBucketsIndices`;
+    ///
+    /// * It never tries to iterate after getting all elements.
+    ///
+    /// [`Undefined Behavior`]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[inline(always)]
+    unsafe fn next_impl(&mut self) -> Option<usize> {
+        loop {
+            if let Some(index) = self.current_group.next() {
+                // The returned `self.group_first_index + index` will always
+                // be in the range `0..self.buckets()`. See explanation below.
+                return Some(self.group_first_index + index);
+            }
+
+            // SAFETY: The caller of this function ensures that:
+            //
+            // 1. It never tries to iterate after getting all the elements;
+            // 2. The table is alive and did not moved;
+            // 3. The first `self.ctrl` pointed to the start of the array of control bytes.
+            //
+            // Taking the above into account, we always stay within the bounds, because:
+            //
+            // 1. For tables smaller than the group width (self.buckets() <= Group::WIDTH),
+            //    we will never end up in the given branch, since we should have already
+            //    yielded all the elements of the table.
+            //
+            // 2. For tables larger than the group width. The number of buckets is a
+            //    power of two (2 ^ n), Group::WIDTH is also power of two (2 ^ k). Since
+            //    `(2 ^ n) > (2 ^ k)`, than `(2 ^ n) % (2 ^ k) = 0`. As we start from the
+            //    the start of the array of control bytes, and never try to iterate after
+            //    getting all the elements, the last `self.ctrl` will be equal to
+            //    the `self.buckets() - Group::WIDTH`, so `self.current_group.next()`
+            //    will always contains indices within the range `0..Group::WIDTH`,
+            //    and subsequent `self.group_first_index + index` will always return a
+            //    number less than `self.buckets()`.
+            self.ctrl = NonNull::new_unchecked(self.ctrl.as_ptr().add(Group::WIDTH));
+
+            // SAFETY: See explanation above.
+            self.current_group = Group::load_aligned(self.ctrl.as_ptr())
+                .match_full()
+                .into_iter();
+            self.group_first_index += Group::WIDTH;
+        }
+    }
+}
+
+impl Iterator for FullBucketsIndices {
+    type Item = usize;
+
+    /// Advances the iterator and returns the next value. It is up to
+    /// the caller to ensure that the `RawTable` outlives the `FullBucketsIndices`,
+    /// because we cannot make the `next` method unsafe.
+    #[inline(always)]
+    fn next(&mut self) -> Option<usize> {
+        // Return if we already yielded all items.
+        if self.items == 0 {
+            return None;
+        }
+
+        let nxt = unsafe {
+            // SAFETY:
+            // 1. We check number of items to yield using `items` field.
+            // 2. The caller ensures that the table is alive and has not moved.
+            self.next_impl()
+        };
+
+        debug_assert!(nxt.is_some());
+        self.items -= 1;
+
+        nxt
+    }
+
+    #[inline(always)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (self.items, Some(self.items))
+    }
+}
+
+impl ExactSizeIterator for FullBucketsIndices {}
+impl FusedIterator for FullBucketsIndices {}
+
+/// Iterator which consumes a table and returns elements.
+pub struct RawIntoIter<T, A: Allocator = Global> {
+    iter: RawIter<T>,
+    allocation: Option<(NonNull<u8>, Layout, A)>,
+    marker: PhantomData<T>,
+}
+
+impl<T, A: Allocator> RawIntoIter<T, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter(&self) -> RawIter<T> {
+        self.iter.clone()
+    }
+}
+
+unsafe impl<T, A: Allocator> Send for RawIntoIter<T, A>
+where
+    T: Send,
+    A: Send,
+{
+}
+unsafe impl<T, A: Allocator> Sync for RawIntoIter<T, A>
+where
+    T: Sync,
+    A: Sync,
+{
+}
+
+#[cfg(feature = "nightly")]
+unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawIntoIter<T, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        unsafe {
+            // Drop all remaining elements
+            self.iter.drop_elements();
+
+            // Free the table
+            if let Some((ptr, layout, ref alloc)) = self.allocation {
+                alloc.deallocate(ptr, layout);
+            }
+        }
+    }
+}
+#[cfg(not(feature = "nightly"))]
+impl<T, A: Allocator> Drop for RawIntoIter<T, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        unsafe {
+            // Drop all remaining elements
+            self.iter.drop_elements();
+
+            // Free the table
+            if let Some((ptr, layout, ref alloc)) = self.allocation {
+                alloc.deallocate(ptr, layout);
+            }
+        }
+    }
+}
+
+impl<T, A: Allocator> Iterator for RawIntoIter<T, A> {
+    type Item = T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<T> {
+        unsafe { Some(self.iter.next()?.read()) }
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+impl<T, A: Allocator> ExactSizeIterator for RawIntoIter<T, A> {}
+impl<T, A: Allocator> FusedIterator for RawIntoIter<T, A> {}
+
+/// Iterator which consumes elements without freeing the table storage.
+pub struct RawDrain<'a, T, A: Allocator = Global> {
+    iter: RawIter<T>,
+
+    // The table is moved into the iterator for the duration of the drain. This
+    // ensures that an empty table is left if the drain iterator is leaked
+    // without dropping.
+    table: RawTableInner,
+    orig_table: NonNull<RawTableInner>,
+
+    // We don't use a &'a mut RawTable<T> because we want RawDrain to be
+    // covariant over T.
+    marker: PhantomData<&'a RawTable<T, A>>,
+}
+
+impl<T, A: Allocator> RawDrain<'_, T, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter(&self) -> RawIter<T> {
+        self.iter.clone()
+    }
+}
+
+unsafe impl<T, A: Allocator> Send for RawDrain<'_, T, A>
+where
+    T: Send,
+    A: Send,
+{
+}
+unsafe impl<T, A: Allocator> Sync for RawDrain<'_, T, A>
+where
+    T: Sync,
+    A: Sync,
+{
+}
+
+impl<T, A: Allocator> Drop for RawDrain<'_, T, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn drop(&mut self) {
+        unsafe {
+            // Drop all remaining elements. Note that this may panic.
+            self.iter.drop_elements();
+
+            // Reset the contents of the table now that all elements have been
+            // dropped.
+            self.table.clear_no_drop();
+
+            // Move the now empty table back to its original location.
+            self.orig_table
+                .as_ptr()
+                .copy_from_nonoverlapping(&self.table, 1);
+        }
+    }
+}
+
+impl<T, A: Allocator> Iterator for RawDrain<'_, T, A> {
+    type Item = T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<T> {
+        unsafe {
+            let item = self.iter.next()?;
+            Some(item.read())
+        }
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+impl<T, A: Allocator> ExactSizeIterator for RawDrain<'_, T, A> {}
+impl<T, A: Allocator> FusedIterator for RawDrain<'_, T, A> {}
+
+/// Iterator over occupied buckets that could match a given hash.
+///
+/// `RawTable` only stores 7 bits of the hash value, so this iterator may return
+/// items that have a hash value different than the one provided. You should
+/// always validate the returned values before using them.
+///
+/// For maximum flexibility this iterator is not bound by a lifetime, but you
+/// must observe several rules when using it:
+/// - You must not free the hash table while iterating (including via growing/shrinking).
+/// - It is fine to erase a bucket that has been yielded by the iterator.
+/// - Erasing a bucket that has not yet been yielded by the iterator may still
+///   result in the iterator yielding that bucket.
+/// - It is unspecified whether an element inserted after the iterator was
+///   created will be yielded by that iterator.
+/// - The order in which the iterator yields buckets is unspecified and may
+///   change in the future.
+pub struct RawIterHash<T> {
+    inner: RawIterHashInner,
+    _marker: PhantomData<T>,
+}
+
+struct RawIterHashInner {
+    // See `RawTableInner`'s corresponding fields for details.
+    // We can't store a `*const RawTableInner` as it would get
+    // invalidated by the user calling `&mut` methods on `RawTable`.
+    bucket_mask: usize,
+    ctrl: NonNull<u8>,
+
+    // The top 7 bits of the hash.
+    h2_hash: u8,
+
+    // The sequence of groups to probe in the search.
+    probe_seq: ProbeSeq,
+
+    group: Group,
+
+    // The elements within the group with a matching h2-hash.
+    bitmask: BitMaskIter,
+}
+
+impl<T> RawIterHash<T> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[cfg(feature = "raw")]
+    unsafe fn new<A: Allocator>(table: &RawTable<T, A>, hash: u64) -> Self {
+        RawIterHash {
+            inner: RawIterHashInner::new(&table.table, hash),
+            _marker: PhantomData,
+        }
+    }
+}
+impl RawIterHashInner {
+    #[cfg_attr(feature = "inline-more", inline)]
+    #[cfg(feature = "raw")]
+    unsafe fn new(table: &RawTableInner, hash: u64) -> Self {
+        let h2_hash = h2(hash);
+        let probe_seq = table.probe_seq(hash);
+        let group = Group::load(table.ctrl(probe_seq.pos));
+        let bitmask = group.match_byte(h2_hash).into_iter();
+
+        RawIterHashInner {
+            bucket_mask: table.bucket_mask,
+            ctrl: table.ctrl,
+            h2_hash,
+            probe_seq,
+            group,
+            bitmask,
+        }
+    }
+}
+
+impl<T> Iterator for RawIterHash<T> {
+    type Item = Bucket<T>;
+
+    fn next(&mut self) -> Option<Bucket<T>> {
+        unsafe {
+            match self.inner.next() {
+                Some(index) => {
+                    // Can't use `RawTable::bucket` here as we don't have
+                    // an actual `RawTable` reference to use.
+                    debug_assert!(index <= self.inner.bucket_mask);
+                    let bucket = Bucket::from_base_index(self.inner.ctrl.cast(), index);
+                    Some(bucket)
+                }
+                None => None,
+            }
+        }
+    }
+}
+
+impl Iterator for RawIterHashInner {
+    type Item = usize;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        unsafe {
+            loop {
+                if let Some(bit) = self.bitmask.next() {
+                    let index = (self.probe_seq.pos + bit) & self.bucket_mask;
+                    return Some(index);
+                }
+                if likely(self.group.match_empty().any_bit_set()) {
+                    return None;
+                }
+                self.probe_seq.move_next(self.bucket_mask);
+
+                // Can't use `RawTableInner::ctrl` here as we don't have
+                // an actual `RawTableInner` reference to use.
+                let index = self.probe_seq.pos;
+                debug_assert!(index < self.bucket_mask + 1 + Group::WIDTH);
+                let group_ctrl = self.ctrl.as_ptr().add(index);
+
+                self.group = Group::load(group_ctrl);
+                self.bitmask = self.group.match_byte(self.h2_hash).into_iter();
+            }
+        }
+    }
+}
+
+pub(crate) struct RawExtractIf<'a, T, A: Allocator> {
+    pub iter: RawIter<T>,
+    pub table: &'a mut RawTable<T, A>,
+}
+
+impl<T, A: Allocator> RawExtractIf<'_, T, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub(crate) fn next<F>(&mut self, mut f: F) -> Option<T>
+    where
+        F: FnMut(&mut T) -> bool,
+    {
+        unsafe {
+            for item in &mut self.iter {
+                if f(item.as_mut()) {
+                    return Some(self.table.remove(item).0);
+                }
+            }
+        }
+        None
+    }
+}
+
+#[cfg(test)]
+mod test_map {
+    use super::*;
+
+    fn rehash_in_place<T>(table: &mut RawTable<T>, hasher: impl Fn(&T) -> u64) {
+        unsafe {
+            table.table.rehash_in_place(
+                &|table, index| hasher(table.bucket::<T>(index).as_ref()),
+                mem::size_of::<T>(),
+                if mem::needs_drop::<T>() {
+                    Some(mem::transmute(ptr::drop_in_place::<T> as unsafe fn(*mut T)))
+                } else {
+                    None
+                },
+            );
+        }
+    }
+
+    #[test]
+    fn rehash() {
+        let mut table = RawTable::new();
+        let hasher = |i: &u64| *i;
+        for i in 0..100 {
+            table.insert(i, i, hasher);
+        }
+
+        for i in 0..100 {
+            unsafe {
+                assert_eq!(table.find(i, |x| *x == i).map(|b| b.read()), Some(i));
+            }
+            assert!(table.find(i + 100, |x| *x == i + 100).is_none());
+        }
+
+        rehash_in_place(&mut table, hasher);
+
+        for i in 0..100 {
+            unsafe {
+                assert_eq!(table.find(i, |x| *x == i).map(|b| b.read()), Some(i));
+            }
+            assert!(table.find(i + 100, |x| *x == i + 100).is_none());
+        }
+    }
+
+    /// CHECKING THAT WE ARE NOT TRYING TO READ THE MEMORY OF
+    /// AN UNINITIALIZED TABLE DURING THE DROP
+    #[test]
+    fn test_drop_uninitialized() {
+        use ::alloc::vec::Vec;
+
+        let table = unsafe {
+            // SAFETY: The `buckets` is power of two and we're not
+            // trying to actually use the returned RawTable.
+            RawTable::<(u64, Vec<i32>)>::new_uninitialized(Global, 8, Fallibility::Infallible)
+                .unwrap()
+        };
+        drop(table);
+    }
+
+    /// CHECKING THAT WE DON'T TRY TO DROP DATA IF THE `ITEMS`
+    /// ARE ZERO, EVEN IF WE HAVE `FULL` CONTROL BYTES.
+    #[test]
+    fn test_drop_zero_items() {
+        use ::alloc::vec::Vec;
+        unsafe {
+            // SAFETY: The `buckets` is power of two and we're not
+            // trying to actually use the returned RawTable.
+            let table =
+                RawTable::<(u64, Vec<i32>)>::new_uninitialized(Global, 8, Fallibility::Infallible)
+                    .unwrap();
+
+            // WE SIMULATE, AS IT WERE, A FULL TABLE.
+
+            // SAFETY: We checked that the table is allocated and therefore the table already has
+            // `self.bucket_mask + 1 + Group::WIDTH` number of control bytes (see TableLayout::calculate_layout_for)
+            // so writing `table.table.num_ctrl_bytes() == bucket_mask + 1 + Group::WIDTH` bytes is safe.
+            table
+                .table
+                .ctrl(0)
+                .write_bytes(EMPTY, table.table.num_ctrl_bytes());
+
+            // SAFETY: table.capacity() is guaranteed to be smaller than table.buckets()
+            table.table.ctrl(0).write_bytes(0, table.capacity());
+
+            // Fix up the trailing control bytes. See the comments in set_ctrl
+            // for the handling of tables smaller than the group width.
+            if table.buckets() < Group::WIDTH {
+                // SAFETY: We have `self.bucket_mask + 1 + Group::WIDTH` number of control bytes,
+                // so copying `self.buckets() == self.bucket_mask + 1` bytes with offset equal to
+                // `Group::WIDTH` is safe
+                table
+                    .table
+                    .ctrl(0)
+                    .copy_to(table.table.ctrl(Group::WIDTH), table.table.buckets());
+            } else {
+                // SAFETY: We have `self.bucket_mask + 1 + Group::WIDTH` number of
+                // control bytes,so copying `Group::WIDTH` bytes with offset equal
+                // to `self.buckets() == self.bucket_mask + 1` is safe
+                table
+                    .table
+                    .ctrl(0)
+                    .copy_to(table.table.ctrl(table.table.buckets()), Group::WIDTH);
+            }
+            drop(table);
+        }
+    }
+
+    /// CHECKING THAT WE DON'T TRY TO DROP DATA IF THE `ITEMS`
+    /// ARE ZERO, EVEN IF WE HAVE `FULL` CONTROL BYTES.
+    #[test]
+    fn test_catch_panic_clone_from() {
+        use ::alloc::sync::Arc;
+        use ::alloc::vec::Vec;
+        use allocator_api2::alloc::{AllocError, Allocator, Global};
+        use core::sync::atomic::{AtomicI8, Ordering};
+        use std::thread;
+
+        struct MyAllocInner {
+            drop_count: Arc<AtomicI8>,
+        }
+
+        #[derive(Clone)]
+        struct MyAlloc {
+            _inner: Arc<MyAllocInner>,
+        }
+
+        impl Drop for MyAllocInner {
+            fn drop(&mut self) {
+                println!("MyAlloc freed.");
+                self.drop_count.fetch_sub(1, Ordering::SeqCst);
+            }
+        }
+
+        unsafe impl Allocator for MyAlloc {
+            fn allocate(&self, layout: Layout) -> std::result::Result<NonNull<[u8]>, AllocError> {
+                let g = Global;
+                g.allocate(layout)
+            }
+
+            unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
+                let g = Global;
+                g.deallocate(ptr, layout)
+            }
+        }
+
+        const DISARMED: bool = false;
+        const ARMED: bool = true;
+
+        struct CheckedCloneDrop {
+            panic_in_clone: bool,
+            dropped: bool,
+            need_drop: Vec<u64>,
+        }
+
+        impl Clone for CheckedCloneDrop {
+            fn clone(&self) -> Self {
+                if self.panic_in_clone {
+                    panic!("panic in clone")
+                }
+                Self {
+                    panic_in_clone: self.panic_in_clone,
+                    dropped: self.dropped,
+                    need_drop: self.need_drop.clone(),
+                }
+            }
+        }
+
+        impl Drop for CheckedCloneDrop {
+            fn drop(&mut self) {
+                if self.dropped {
+                    panic!("double drop");
+                }
+                self.dropped = true;
+            }
+        }
+
+        let dropped: Arc<AtomicI8> = Arc::new(AtomicI8::new(2));
+
+        let mut table = RawTable::new_in(MyAlloc {
+            _inner: Arc::new(MyAllocInner {
+                drop_count: dropped.clone(),
+            }),
+        });
+
+        for (idx, panic_in_clone) in core::iter::repeat(DISARMED).take(7).enumerate() {
+            let idx = idx as u64;
+            table.insert(
+                idx,
+                (
+                    idx,
+                    CheckedCloneDrop {
+                        panic_in_clone,
+                        dropped: false,
+                        need_drop: vec![idx],
+                    },
+                ),
+                |(k, _)| *k,
+            );
+        }
+
+        assert_eq!(table.len(), 7);
+
+        thread::scope(|s| {
+            let result = s.spawn(|| {
+                let armed_flags = [
+                    DISARMED, DISARMED, ARMED, DISARMED, DISARMED, DISARMED, DISARMED,
+                ];
+                let mut scope_table = RawTable::new_in(MyAlloc {
+                    _inner: Arc::new(MyAllocInner {
+                        drop_count: dropped.clone(),
+                    }),
+                });
+                for (idx, &panic_in_clone) in armed_flags.iter().enumerate() {
+                    let idx = idx as u64;
+                    scope_table.insert(
+                        idx,
+                        (
+                            idx,
+                            CheckedCloneDrop {
+                                panic_in_clone,
+                                dropped: false,
+                                need_drop: vec![idx + 100],
+                            },
+                        ),
+                        |(k, _)| *k,
+                    );
+                }
+                table.clone_from(&scope_table);
+            });
+            assert!(result.join().is_err());
+        });
+
+        // Let's check that all iterators work fine and do not return elements
+        // (especially `RawIterRange`, which does not depend on the number of
+        // elements in the table, but looks directly at the control bytes)
+        //
+        // SAFETY: We know for sure that `RawTable` will outlive
+        // the returned `RawIter / RawIterRange` iterator.
+        assert_eq!(table.len(), 0);
+        assert_eq!(unsafe { table.iter().count() }, 0);
+        assert_eq!(unsafe { table.iter().iter.count() }, 0);
+
+        for idx in 0..table.buckets() {
+            let idx = idx as u64;
+            assert!(
+                table.find(idx, |(k, _)| *k == idx).is_none(),
+                "Index: {idx}"
+            );
+        }
+
+        // All allocator clones should already be dropped.
+        assert_eq!(dropped.load(Ordering::SeqCst), 1);
+    }
+}
+
\ No newline at end of file diff --git a/src/hashbrown/raw/sse2.rs.html b/src/hashbrown/raw/sse2.rs.html new file mode 100644 index 000000000..6d0282b3a --- /dev/null +++ b/src/hashbrown/raw/sse2.rs.html @@ -0,0 +1,299 @@ +sse2.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+
use super::bitmask::BitMask;
+use super::EMPTY;
+use core::mem;
+use core::num::NonZeroU16;
+
+#[cfg(target_arch = "x86")]
+use core::arch::x86;
+#[cfg(target_arch = "x86_64")]
+use core::arch::x86_64 as x86;
+
+pub(crate) type BitMaskWord = u16;
+pub(crate) type NonZeroBitMaskWord = NonZeroU16;
+pub(crate) const BITMASK_STRIDE: usize = 1;
+pub(crate) const BITMASK_MASK: BitMaskWord = 0xffff;
+pub(crate) const BITMASK_ITER_MASK: BitMaskWord = !0;
+
+/// Abstraction over a group of control bytes which can be scanned in
+/// parallel.
+///
+/// This implementation uses a 128-bit SSE value.
+#[derive(Copy, Clone)]
+pub(crate) struct Group(x86::__m128i);
+
+// FIXME: https://github.com/rust-lang/rust-clippy/issues/3859
+#[allow(clippy::use_self)]
+impl Group {
+    /// Number of bytes in the group.
+    pub(crate) const WIDTH: usize = mem::size_of::<Self>();
+
+    /// Returns a full group of empty bytes, suitable for use as the initial
+    /// value for an empty hash table.
+    ///
+    /// This is guaranteed to be aligned to the group size.
+    #[inline]
+    #[allow(clippy::items_after_statements)]
+    pub(crate) const fn static_empty() -> &'static [u8; Group::WIDTH] {
+        #[repr(C)]
+        struct AlignedBytes {
+            _align: [Group; 0],
+            bytes: [u8; Group::WIDTH],
+        }
+        const ALIGNED_BYTES: AlignedBytes = AlignedBytes {
+            _align: [],
+            bytes: [EMPTY; Group::WIDTH],
+        };
+        &ALIGNED_BYTES.bytes
+    }
+
+    /// Loads a group of bytes starting at the given address.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)] // unaligned load
+    pub(crate) unsafe fn load(ptr: *const u8) -> Self {
+        Group(x86::_mm_loadu_si128(ptr.cast()))
+    }
+
+    /// Loads a group of bytes starting at the given address, which must be
+    /// aligned to `mem::align_of::<Group>()`.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)]
+    pub(crate) unsafe fn load_aligned(ptr: *const u8) -> Self {
+        // FIXME: use align_offset once it stabilizes
+        debug_assert_eq!(ptr as usize & (mem::align_of::<Self>() - 1), 0);
+        Group(x86::_mm_load_si128(ptr.cast()))
+    }
+
+    /// Stores the group of bytes to the given address, which must be
+    /// aligned to `mem::align_of::<Group>()`.
+    #[inline]
+    #[allow(clippy::cast_ptr_alignment)]
+    pub(crate) unsafe fn store_aligned(self, ptr: *mut u8) {
+        // FIXME: use align_offset once it stabilizes
+        debug_assert_eq!(ptr as usize & (mem::align_of::<Self>() - 1), 0);
+        x86::_mm_store_si128(ptr.cast(), self.0);
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which have
+    /// the given value.
+    #[inline]
+    pub(crate) fn match_byte(self, byte: u8) -> BitMask {
+        #[allow(
+            clippy::cast_possible_wrap, // byte: u8 as i8
+            // byte: i32 as u16
+            //   note: _mm_movemask_epi8 returns a 16-bit mask in a i32, the
+            //   upper 16-bits of the i32 are zeroed:
+            clippy::cast_sign_loss,
+            clippy::cast_possible_truncation
+        )]
+        unsafe {
+            let cmp = x86::_mm_cmpeq_epi8(self.0, x86::_mm_set1_epi8(byte as i8));
+            BitMask(x86::_mm_movemask_epi8(cmp) as u16)
+        }
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which are
+    /// `EMPTY`.
+    #[inline]
+    pub(crate) fn match_empty(self) -> BitMask {
+        self.match_byte(EMPTY)
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which are
+    /// `EMPTY` or `DELETED`.
+    #[inline]
+    pub(crate) fn match_empty_or_deleted(self) -> BitMask {
+        #[allow(
+            // byte: i32 as u16
+            //   note: _mm_movemask_epi8 returns a 16-bit mask in a i32, the
+            //   upper 16-bits of the i32 are zeroed:
+            clippy::cast_sign_loss,
+            clippy::cast_possible_truncation
+        )]
+        unsafe {
+            // A byte is EMPTY or DELETED iff the high bit is set
+            BitMask(x86::_mm_movemask_epi8(self.0) as u16)
+        }
+    }
+
+    /// Returns a `BitMask` indicating all bytes in the group which are full.
+    #[inline]
+    pub(crate) fn match_full(&self) -> BitMask {
+        self.match_empty_or_deleted().invert()
+    }
+
+    /// Performs the following transformation on all bytes in the group:
+    /// - `EMPTY => EMPTY`
+    /// - `DELETED => EMPTY`
+    /// - `FULL => DELETED`
+    #[inline]
+    pub(crate) fn convert_special_to_empty_and_full_to_deleted(self) -> Self {
+        // Map high_bit = 1 (EMPTY or DELETED) to 1111_1111
+        // and high_bit = 0 (FULL) to 1000_0000
+        //
+        // Here's this logic expanded to concrete values:
+        //   let special = 0 > byte = 1111_1111 (true) or 0000_0000 (false)
+        //   1111_1111 | 1000_0000 = 1111_1111
+        //   0000_0000 | 1000_0000 = 1000_0000
+        #[allow(
+            clippy::cast_possible_wrap, // byte: 0x80_u8 as i8
+        )]
+        unsafe {
+            let zero = x86::_mm_setzero_si128();
+            let special = x86::_mm_cmpgt_epi8(zero, self.0);
+            Group(x86::_mm_or_si128(
+                special,
+                x86::_mm_set1_epi8(0x80_u8 as i8),
+            ))
+        }
+    }
+}
+
\ No newline at end of file diff --git a/src/hashbrown/scopeguard.rs.html b/src/hashbrown/scopeguard.rs.html new file mode 100644 index 000000000..0c932e24e --- /dev/null +++ b/src/hashbrown/scopeguard.rs.html @@ -0,0 +1,145 @@ +scopeguard.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+
// Extracted from the scopeguard crate
+use core::{
+    mem::ManuallyDrop,
+    ops::{Deref, DerefMut},
+    ptr,
+};
+
+pub struct ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    dropfn: F,
+    value: T,
+}
+
+#[inline]
+pub fn guard<T, F>(value: T, dropfn: F) -> ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    ScopeGuard { dropfn, value }
+}
+
+impl<T, F> ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    #[inline]
+    pub fn into_inner(guard: Self) -> T {
+        // Cannot move out of Drop-implementing types, so
+        // ptr::read the value out of a ManuallyDrop<Self>
+        // Don't use mem::forget as that might invalidate value
+        let guard = ManuallyDrop::new(guard);
+        unsafe {
+            let value = ptr::read(&guard.value);
+            // read the closure so that it is dropped
+            let _ = ptr::read(&guard.dropfn);
+            value
+        }
+    }
+}
+
+impl<T, F> Deref for ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    type Target = T;
+    #[inline]
+    fn deref(&self) -> &T {
+        &self.value
+    }
+}
+
+impl<T, F> DerefMut for ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    #[inline]
+    fn deref_mut(&mut self) -> &mut T {
+        &mut self.value
+    }
+}
+
+impl<T, F> Drop for ScopeGuard<T, F>
+where
+    F: FnMut(&mut T),
+{
+    #[inline]
+    fn drop(&mut self) {
+        (self.dropfn)(&mut self.value);
+    }
+}
+
\ No newline at end of file diff --git a/src/hashbrown/set.rs.html b/src/hashbrown/set.rs.html new file mode 100644 index 000000000..0313a77bb --- /dev/null +++ b/src/hashbrown/set.rs.html @@ -0,0 +1,5941 @@ +set.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
+445
+446
+447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
+469
+470
+471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
+505
+506
+507
+508
+509
+510
+511
+512
+513
+514
+515
+516
+517
+518
+519
+520
+521
+522
+523
+524
+525
+526
+527
+528
+529
+530
+531
+532
+533
+534
+535
+536
+537
+538
+539
+540
+541
+542
+543
+544
+545
+546
+547
+548
+549
+550
+551
+552
+553
+554
+555
+556
+557
+558
+559
+560
+561
+562
+563
+564
+565
+566
+567
+568
+569
+570
+571
+572
+573
+574
+575
+576
+577
+578
+579
+580
+581
+582
+583
+584
+585
+586
+587
+588
+589
+590
+591
+592
+593
+594
+595
+596
+597
+598
+599
+600
+601
+602
+603
+604
+605
+606
+607
+608
+609
+610
+611
+612
+613
+614
+615
+616
+617
+618
+619
+620
+621
+622
+623
+624
+625
+626
+627
+628
+629
+630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
+646
+647
+648
+649
+650
+651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
+664
+665
+666
+667
+668
+669
+670
+671
+672
+673
+674
+675
+676
+677
+678
+679
+680
+681
+682
+683
+684
+685
+686
+687
+688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
+699
+700
+701
+702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
+720
+721
+722
+723
+724
+725
+726
+727
+728
+729
+730
+731
+732
+733
+734
+735
+736
+737
+738
+739
+740
+741
+742
+743
+744
+745
+746
+747
+748
+749
+750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
+769
+770
+771
+772
+773
+774
+775
+776
+777
+778
+779
+780
+781
+782
+783
+784
+785
+786
+787
+788
+789
+790
+791
+792
+793
+794
+795
+796
+797
+798
+799
+800
+801
+802
+803
+804
+805
+806
+807
+808
+809
+810
+811
+812
+813
+814
+815
+816
+817
+818
+819
+820
+821
+822
+823
+824
+825
+826
+827
+828
+829
+830
+831
+832
+833
+834
+835
+836
+837
+838
+839
+840
+841
+842
+843
+844
+845
+846
+847
+848
+849
+850
+851
+852
+853
+854
+855
+856
+857
+858
+859
+860
+861
+862
+863
+864
+865
+866
+867
+868
+869
+870
+871
+872
+873
+874
+875
+876
+877
+878
+879
+880
+881
+882
+883
+884
+885
+886
+887
+888
+889
+890
+891
+892
+893
+894
+895
+896
+897
+898
+899
+900
+901
+902
+903
+904
+905
+906
+907
+908
+909
+910
+911
+912
+913
+914
+915
+916
+917
+918
+919
+920
+921
+922
+923
+924
+925
+926
+927
+928
+929
+930
+931
+932
+933
+934
+935
+936
+937
+938
+939
+940
+941
+942
+943
+944
+945
+946
+947
+948
+949
+950
+951
+952
+953
+954
+955
+956
+957
+958
+959
+960
+961
+962
+963
+964
+965
+966
+967
+968
+969
+970
+971
+972
+973
+974
+975
+976
+977
+978
+979
+980
+981
+982
+983
+984
+985
+986
+987
+988
+989
+990
+991
+992
+993
+994
+995
+996
+997
+998
+999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
+1122
+1123
+1124
+1125
+1126
+1127
+1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
+1143
+1144
+1145
+1146
+1147
+1148
+1149
+1150
+1151
+1152
+1153
+1154
+1155
+1156
+1157
+1158
+1159
+1160
+1161
+1162
+1163
+1164
+1165
+1166
+1167
+1168
+1169
+1170
+1171
+1172
+1173
+1174
+1175
+1176
+1177
+1178
+1179
+1180
+1181
+1182
+1183
+1184
+1185
+1186
+1187
+1188
+1189
+1190
+1191
+1192
+1193
+1194
+1195
+1196
+1197
+1198
+1199
+1200
+1201
+1202
+1203
+1204
+1205
+1206
+1207
+1208
+1209
+1210
+1211
+1212
+1213
+1214
+1215
+1216
+1217
+1218
+1219
+1220
+1221
+1222
+1223
+1224
+1225
+1226
+1227
+1228
+1229
+1230
+1231
+1232
+1233
+1234
+1235
+1236
+1237
+1238
+1239
+1240
+1241
+1242
+1243
+1244
+1245
+1246
+1247
+1248
+1249
+1250
+1251
+1252
+1253
+1254
+1255
+1256
+1257
+1258
+1259
+1260
+1261
+1262
+1263
+1264
+1265
+1266
+1267
+1268
+1269
+1270
+1271
+1272
+1273
+1274
+1275
+1276
+1277
+1278
+1279
+1280
+1281
+1282
+1283
+1284
+1285
+1286
+1287
+1288
+1289
+1290
+1291
+1292
+1293
+1294
+1295
+1296
+1297
+1298
+1299
+1300
+1301
+1302
+1303
+1304
+1305
+1306
+1307
+1308
+1309
+1310
+1311
+1312
+1313
+1314
+1315
+1316
+1317
+1318
+1319
+1320
+1321
+1322
+1323
+1324
+1325
+1326
+1327
+1328
+1329
+1330
+1331
+1332
+1333
+1334
+1335
+1336
+1337
+1338
+1339
+1340
+1341
+1342
+1343
+1344
+1345
+1346
+1347
+1348
+1349
+1350
+1351
+1352
+1353
+1354
+1355
+1356
+1357
+1358
+1359
+1360
+1361
+1362
+1363
+1364
+1365
+1366
+1367
+1368
+1369
+1370
+1371
+1372
+1373
+1374
+1375
+1376
+1377
+1378
+1379
+1380
+1381
+1382
+1383
+1384
+1385
+1386
+1387
+1388
+1389
+1390
+1391
+1392
+1393
+1394
+1395
+1396
+1397
+1398
+1399
+1400
+1401
+1402
+1403
+1404
+1405
+1406
+1407
+1408
+1409
+1410
+1411
+1412
+1413
+1414
+1415
+1416
+1417
+1418
+1419
+1420
+1421
+1422
+1423
+1424
+1425
+1426
+1427
+1428
+1429
+1430
+1431
+1432
+1433
+1434
+1435
+1436
+1437
+1438
+1439
+1440
+1441
+1442
+1443
+1444
+1445
+1446
+1447
+1448
+1449
+1450
+1451
+1452
+1453
+1454
+1455
+1456
+1457
+1458
+1459
+1460
+1461
+1462
+1463
+1464
+1465
+1466
+1467
+1468
+1469
+1470
+1471
+1472
+1473
+1474
+1475
+1476
+1477
+1478
+1479
+1480
+1481
+1482
+1483
+1484
+1485
+1486
+1487
+1488
+1489
+1490
+1491
+1492
+1493
+1494
+1495
+1496
+1497
+1498
+1499
+1500
+1501
+1502
+1503
+1504
+1505
+1506
+1507
+1508
+1509
+1510
+1511
+1512
+1513
+1514
+1515
+1516
+1517
+1518
+1519
+1520
+1521
+1522
+1523
+1524
+1525
+1526
+1527
+1528
+1529
+1530
+1531
+1532
+1533
+1534
+1535
+1536
+1537
+1538
+1539
+1540
+1541
+1542
+1543
+1544
+1545
+1546
+1547
+1548
+1549
+1550
+1551
+1552
+1553
+1554
+1555
+1556
+1557
+1558
+1559
+1560
+1561
+1562
+1563
+1564
+1565
+1566
+1567
+1568
+1569
+1570
+1571
+1572
+1573
+1574
+1575
+1576
+1577
+1578
+1579
+1580
+1581
+1582
+1583
+1584
+1585
+1586
+1587
+1588
+1589
+1590
+1591
+1592
+1593
+1594
+1595
+1596
+1597
+1598
+1599
+1600
+1601
+1602
+1603
+1604
+1605
+1606
+1607
+1608
+1609
+1610
+1611
+1612
+1613
+1614
+1615
+1616
+1617
+1618
+1619
+1620
+1621
+1622
+1623
+1624
+1625
+1626
+1627
+1628
+1629
+1630
+1631
+1632
+1633
+1634
+1635
+1636
+1637
+1638
+1639
+1640
+1641
+1642
+1643
+1644
+1645
+1646
+1647
+1648
+1649
+1650
+1651
+1652
+1653
+1654
+1655
+1656
+1657
+1658
+1659
+1660
+1661
+1662
+1663
+1664
+1665
+1666
+1667
+1668
+1669
+1670
+1671
+1672
+1673
+1674
+1675
+1676
+1677
+1678
+1679
+1680
+1681
+1682
+1683
+1684
+1685
+1686
+1687
+1688
+1689
+1690
+1691
+1692
+1693
+1694
+1695
+1696
+1697
+1698
+1699
+1700
+1701
+1702
+1703
+1704
+1705
+1706
+1707
+1708
+1709
+1710
+1711
+1712
+1713
+1714
+1715
+1716
+1717
+1718
+1719
+1720
+1721
+1722
+1723
+1724
+1725
+1726
+1727
+1728
+1729
+1730
+1731
+1732
+1733
+1734
+1735
+1736
+1737
+1738
+1739
+1740
+1741
+1742
+1743
+1744
+1745
+1746
+1747
+1748
+1749
+1750
+1751
+1752
+1753
+1754
+1755
+1756
+1757
+1758
+1759
+1760
+1761
+1762
+1763
+1764
+1765
+1766
+1767
+1768
+1769
+1770
+1771
+1772
+1773
+1774
+1775
+1776
+1777
+1778
+1779
+1780
+1781
+1782
+1783
+1784
+1785
+1786
+1787
+1788
+1789
+1790
+1791
+1792
+1793
+1794
+1795
+1796
+1797
+1798
+1799
+1800
+1801
+1802
+1803
+1804
+1805
+1806
+1807
+1808
+1809
+1810
+1811
+1812
+1813
+1814
+1815
+1816
+1817
+1818
+1819
+1820
+1821
+1822
+1823
+1824
+1825
+1826
+1827
+1828
+1829
+1830
+1831
+1832
+1833
+1834
+1835
+1836
+1837
+1838
+1839
+1840
+1841
+1842
+1843
+1844
+1845
+1846
+1847
+1848
+1849
+1850
+1851
+1852
+1853
+1854
+1855
+1856
+1857
+1858
+1859
+1860
+1861
+1862
+1863
+1864
+1865
+1866
+1867
+1868
+1869
+1870
+1871
+1872
+1873
+1874
+1875
+1876
+1877
+1878
+1879
+1880
+1881
+1882
+1883
+1884
+1885
+1886
+1887
+1888
+1889
+1890
+1891
+1892
+1893
+1894
+1895
+1896
+1897
+1898
+1899
+1900
+1901
+1902
+1903
+1904
+1905
+1906
+1907
+1908
+1909
+1910
+1911
+1912
+1913
+1914
+1915
+1916
+1917
+1918
+1919
+1920
+1921
+1922
+1923
+1924
+1925
+1926
+1927
+1928
+1929
+1930
+1931
+1932
+1933
+1934
+1935
+1936
+1937
+1938
+1939
+1940
+1941
+1942
+1943
+1944
+1945
+1946
+1947
+1948
+1949
+1950
+1951
+1952
+1953
+1954
+1955
+1956
+1957
+1958
+1959
+1960
+1961
+1962
+1963
+1964
+1965
+1966
+1967
+1968
+1969
+1970
+1971
+1972
+1973
+1974
+1975
+1976
+1977
+1978
+1979
+1980
+1981
+1982
+1983
+1984
+1985
+1986
+1987
+1988
+1989
+1990
+1991
+1992
+1993
+1994
+1995
+1996
+1997
+1998
+1999
+2000
+2001
+2002
+2003
+2004
+2005
+2006
+2007
+2008
+2009
+2010
+2011
+2012
+2013
+2014
+2015
+2016
+2017
+2018
+2019
+2020
+2021
+2022
+2023
+2024
+2025
+2026
+2027
+2028
+2029
+2030
+2031
+2032
+2033
+2034
+2035
+2036
+2037
+2038
+2039
+2040
+2041
+2042
+2043
+2044
+2045
+2046
+2047
+2048
+2049
+2050
+2051
+2052
+2053
+2054
+2055
+2056
+2057
+2058
+2059
+2060
+2061
+2062
+2063
+2064
+2065
+2066
+2067
+2068
+2069
+2070
+2071
+2072
+2073
+2074
+2075
+2076
+2077
+2078
+2079
+2080
+2081
+2082
+2083
+2084
+2085
+2086
+2087
+2088
+2089
+2090
+2091
+2092
+2093
+2094
+2095
+2096
+2097
+2098
+2099
+2100
+2101
+2102
+2103
+2104
+2105
+2106
+2107
+2108
+2109
+2110
+2111
+2112
+2113
+2114
+2115
+2116
+2117
+2118
+2119
+2120
+2121
+2122
+2123
+2124
+2125
+2126
+2127
+2128
+2129
+2130
+2131
+2132
+2133
+2134
+2135
+2136
+2137
+2138
+2139
+2140
+2141
+2142
+2143
+2144
+2145
+2146
+2147
+2148
+2149
+2150
+2151
+2152
+2153
+2154
+2155
+2156
+2157
+2158
+2159
+2160
+2161
+2162
+2163
+2164
+2165
+2166
+2167
+2168
+2169
+2170
+2171
+2172
+2173
+2174
+2175
+2176
+2177
+2178
+2179
+2180
+2181
+2182
+2183
+2184
+2185
+2186
+2187
+2188
+2189
+2190
+2191
+2192
+2193
+2194
+2195
+2196
+2197
+2198
+2199
+2200
+2201
+2202
+2203
+2204
+2205
+2206
+2207
+2208
+2209
+2210
+2211
+2212
+2213
+2214
+2215
+2216
+2217
+2218
+2219
+2220
+2221
+2222
+2223
+2224
+2225
+2226
+2227
+2228
+2229
+2230
+2231
+2232
+2233
+2234
+2235
+2236
+2237
+2238
+2239
+2240
+2241
+2242
+2243
+2244
+2245
+2246
+2247
+2248
+2249
+2250
+2251
+2252
+2253
+2254
+2255
+2256
+2257
+2258
+2259
+2260
+2261
+2262
+2263
+2264
+2265
+2266
+2267
+2268
+2269
+2270
+2271
+2272
+2273
+2274
+2275
+2276
+2277
+2278
+2279
+2280
+2281
+2282
+2283
+2284
+2285
+2286
+2287
+2288
+2289
+2290
+2291
+2292
+2293
+2294
+2295
+2296
+2297
+2298
+2299
+2300
+2301
+2302
+2303
+2304
+2305
+2306
+2307
+2308
+2309
+2310
+2311
+2312
+2313
+2314
+2315
+2316
+2317
+2318
+2319
+2320
+2321
+2322
+2323
+2324
+2325
+2326
+2327
+2328
+2329
+2330
+2331
+2332
+2333
+2334
+2335
+2336
+2337
+2338
+2339
+2340
+2341
+2342
+2343
+2344
+2345
+2346
+2347
+2348
+2349
+2350
+2351
+2352
+2353
+2354
+2355
+2356
+2357
+2358
+2359
+2360
+2361
+2362
+2363
+2364
+2365
+2366
+2367
+2368
+2369
+2370
+2371
+2372
+2373
+2374
+2375
+2376
+2377
+2378
+2379
+2380
+2381
+2382
+2383
+2384
+2385
+2386
+2387
+2388
+2389
+2390
+2391
+2392
+2393
+2394
+2395
+2396
+2397
+2398
+2399
+2400
+2401
+2402
+2403
+2404
+2405
+2406
+2407
+2408
+2409
+2410
+2411
+2412
+2413
+2414
+2415
+2416
+2417
+2418
+2419
+2420
+2421
+2422
+2423
+2424
+2425
+2426
+2427
+2428
+2429
+2430
+2431
+2432
+2433
+2434
+2435
+2436
+2437
+2438
+2439
+2440
+2441
+2442
+2443
+2444
+2445
+2446
+2447
+2448
+2449
+2450
+2451
+2452
+2453
+2454
+2455
+2456
+2457
+2458
+2459
+2460
+2461
+2462
+2463
+2464
+2465
+2466
+2467
+2468
+2469
+2470
+2471
+2472
+2473
+2474
+2475
+2476
+2477
+2478
+2479
+2480
+2481
+2482
+2483
+2484
+2485
+2486
+2487
+2488
+2489
+2490
+2491
+2492
+2493
+2494
+2495
+2496
+2497
+2498
+2499
+2500
+2501
+2502
+2503
+2504
+2505
+2506
+2507
+2508
+2509
+2510
+2511
+2512
+2513
+2514
+2515
+2516
+2517
+2518
+2519
+2520
+2521
+2522
+2523
+2524
+2525
+2526
+2527
+2528
+2529
+2530
+2531
+2532
+2533
+2534
+2535
+2536
+2537
+2538
+2539
+2540
+2541
+2542
+2543
+2544
+2545
+2546
+2547
+2548
+2549
+2550
+2551
+2552
+2553
+2554
+2555
+2556
+2557
+2558
+2559
+2560
+2561
+2562
+2563
+2564
+2565
+2566
+2567
+2568
+2569
+2570
+2571
+2572
+2573
+2574
+2575
+2576
+2577
+2578
+2579
+2580
+2581
+2582
+2583
+2584
+2585
+2586
+2587
+2588
+2589
+2590
+2591
+2592
+2593
+2594
+2595
+2596
+2597
+2598
+2599
+2600
+2601
+2602
+2603
+2604
+2605
+2606
+2607
+2608
+2609
+2610
+2611
+2612
+2613
+2614
+2615
+2616
+2617
+2618
+2619
+2620
+2621
+2622
+2623
+2624
+2625
+2626
+2627
+2628
+2629
+2630
+2631
+2632
+2633
+2634
+2635
+2636
+2637
+2638
+2639
+2640
+2641
+2642
+2643
+2644
+2645
+2646
+2647
+2648
+2649
+2650
+2651
+2652
+2653
+2654
+2655
+2656
+2657
+2658
+2659
+2660
+2661
+2662
+2663
+2664
+2665
+2666
+2667
+2668
+2669
+2670
+2671
+2672
+2673
+2674
+2675
+2676
+2677
+2678
+2679
+2680
+2681
+2682
+2683
+2684
+2685
+2686
+2687
+2688
+2689
+2690
+2691
+2692
+2693
+2694
+2695
+2696
+2697
+2698
+2699
+2700
+2701
+2702
+2703
+2704
+2705
+2706
+2707
+2708
+2709
+2710
+2711
+2712
+2713
+2714
+2715
+2716
+2717
+2718
+2719
+2720
+2721
+2722
+2723
+2724
+2725
+2726
+2727
+2728
+2729
+2730
+2731
+2732
+2733
+2734
+2735
+2736
+2737
+2738
+2739
+2740
+2741
+2742
+2743
+2744
+2745
+2746
+2747
+2748
+2749
+2750
+2751
+2752
+2753
+2754
+2755
+2756
+2757
+2758
+2759
+2760
+2761
+2762
+2763
+2764
+2765
+2766
+2767
+2768
+2769
+2770
+2771
+2772
+2773
+2774
+2775
+2776
+2777
+2778
+2779
+2780
+2781
+2782
+2783
+2784
+2785
+2786
+2787
+2788
+2789
+2790
+2791
+2792
+2793
+2794
+2795
+2796
+2797
+2798
+2799
+2800
+2801
+2802
+2803
+2804
+2805
+2806
+2807
+2808
+2809
+2810
+2811
+2812
+2813
+2814
+2815
+2816
+2817
+2818
+2819
+2820
+2821
+2822
+2823
+2824
+2825
+2826
+2827
+2828
+2829
+2830
+2831
+2832
+2833
+2834
+2835
+2836
+2837
+2838
+2839
+2840
+2841
+2842
+2843
+2844
+2845
+2846
+2847
+2848
+2849
+2850
+2851
+2852
+2853
+2854
+2855
+2856
+2857
+2858
+2859
+2860
+2861
+2862
+2863
+2864
+2865
+2866
+2867
+2868
+2869
+2870
+2871
+2872
+2873
+2874
+2875
+2876
+2877
+2878
+2879
+2880
+2881
+2882
+2883
+2884
+2885
+2886
+2887
+2888
+2889
+2890
+2891
+2892
+2893
+2894
+2895
+2896
+2897
+2898
+2899
+2900
+2901
+2902
+2903
+2904
+2905
+2906
+2907
+2908
+2909
+2910
+2911
+2912
+2913
+2914
+2915
+2916
+2917
+2918
+2919
+2920
+2921
+2922
+2923
+2924
+2925
+2926
+2927
+2928
+2929
+2930
+2931
+2932
+2933
+2934
+2935
+2936
+2937
+2938
+2939
+2940
+2941
+2942
+2943
+2944
+2945
+2946
+2947
+2948
+2949
+2950
+2951
+2952
+2953
+2954
+2955
+2956
+2957
+2958
+2959
+2960
+2961
+2962
+2963
+2964
+2965
+2966
+2967
+2968
+2969
+2970
+
#[cfg(feature = "raw")]
+use crate::raw::RawTable;
+use crate::{Equivalent, TryReserveError};
+use alloc::borrow::ToOwned;
+use core::fmt;
+use core::hash::{BuildHasher, Hash};
+use core::iter::{Chain, FusedIterator};
+use core::ops::{BitAnd, BitOr, BitXor, Sub};
+
+use super::map::{self, DefaultHashBuilder, HashMap, Keys};
+use crate::raw::{Allocator, Global, RawExtractIf};
+
+// Future Optimization (FIXME!)
+// =============================
+//
+// Iteration over zero sized values is a noop. There is no need
+// for `bucket.val` in the case of HashSet. I suppose we would need HKT
+// to get rid of it properly.
+
+/// A hash set implemented as a `HashMap` where the value is `()`.
+///
+/// As with the [`HashMap`] type, a `HashSet` requires that the elements
+/// implement the [`Eq`] and [`Hash`] traits. This can frequently be achieved by
+/// using `#[derive(PartialEq, Eq, Hash)]`. If you implement these yourself,
+/// it is important that the following property holds:
+///
+/// ```text
+/// k1 == k2 -> hash(k1) == hash(k2)
+/// ```
+///
+/// In other words, if two keys are equal, their hashes must be equal.
+///
+///
+/// It is a logic error for an item to be modified in such a way that the
+/// item's hash, as determined by the [`Hash`] trait, or its equality, as
+/// determined by the [`Eq`] trait, changes while it is in the set. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or
+/// unsafe code.
+///
+/// It is also a logic error for the [`Hash`] implementation of a key to panic.
+/// This is generally only possible if the trait is implemented manually. If a
+/// panic does occur then the contents of the `HashSet` may become corrupted and
+/// some items may be dropped from the table.
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::HashSet;
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `HashSet<String>` in this example).
+/// let mut books = HashSet::new();
+///
+/// // Add some books.
+/// books.insert("A Dance With Dragons".to_string());
+/// books.insert("To Kill a Mockingbird".to_string());
+/// books.insert("The Odyssey".to_string());
+/// books.insert("The Great Gatsby".to_string());
+///
+/// // Check for a specific one.
+/// if !books.contains("The Winds of Winter") {
+///     println!("We have {} books, but The Winds of Winter ain't one.",
+///              books.len());
+/// }
+///
+/// // Remove a book.
+/// books.remove("The Odyssey");
+///
+/// // Iterate over everything.
+/// for book in &books {
+///     println!("{}", book);
+/// }
+/// ```
+///
+/// The easiest way to use `HashSet` with a custom type is to derive
+/// [`Eq`] and [`Hash`]. We must also derive [`PartialEq`]. This will in the
+/// future be implied by [`Eq`].
+///
+/// ```
+/// use hashbrown::HashSet;
+/// #[derive(Hash, Eq, PartialEq, Debug)]
+/// struct Viking {
+///     name: String,
+///     power: usize,
+/// }
+///
+/// let mut vikings = HashSet::new();
+///
+/// vikings.insert(Viking { name: "Einar".to_string(), power: 9 });
+/// vikings.insert(Viking { name: "Einar".to_string(), power: 9 });
+/// vikings.insert(Viking { name: "Olaf".to_string(), power: 4 });
+/// vikings.insert(Viking { name: "Harald".to_string(), power: 8 });
+///
+/// // Use derived implementation to print the vikings.
+/// for x in &vikings {
+///     println!("{:?}", x);
+/// }
+/// ```
+///
+/// A `HashSet` with fixed list of elements can be initialized from an array:
+///
+/// ```
+/// use hashbrown::HashSet;
+///
+/// let viking_names: HashSet<&'static str> =
+///     [ "Einar", "Olaf", "Harald" ].into_iter().collect();
+/// // use the values stored in the set
+/// ```
+///
+/// [`Cell`]: https://doc.rust-lang.org/std/cell/struct.Cell.html
+/// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+/// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+/// [`HashMap`]: struct.HashMap.html
+/// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html
+/// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html
+pub struct HashSet<T, S = DefaultHashBuilder, A: Allocator = Global> {
+    pub(crate) map: HashMap<T, (), S, A>,
+}
+
+impl<T: Clone, S: Clone, A: Allocator + Clone> Clone for HashSet<T, S, A> {
+    fn clone(&self) -> Self {
+        HashSet {
+            map: self.map.clone(),
+        }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        self.map.clone_from(&source.map);
+    }
+}
+
+#[cfg(feature = "ahash")]
+impl<T> HashSet<T, DefaultHashBuilder> {
+    /// Creates an empty `HashSet`.
+    ///
+    /// The hash set is initially created with a capacity of 0, so it will not allocate until it
+    /// is first inserted into.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashSet`], for example with
+    /// [`with_hasher`](HashSet::with_hasher) method.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let set: HashSet<i32> = HashSet::new();
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn new() -> Self {
+        Self {
+            map: HashMap::new(),
+        }
+    }
+
+    /// Creates an empty `HashSet` with the specified capacity.
+    ///
+    /// The hash set will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash set will not allocate.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashSet`], for example with
+    /// [`with_capacity_and_hasher`](HashSet::with_capacity_and_hasher) method.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let set: HashSet<i32> = HashSet::with_capacity(10);
+    /// assert!(set.capacity() >= 10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity(capacity: usize) -> Self {
+        Self {
+            map: HashMap::with_capacity(capacity),
+        }
+    }
+}
+
+#[cfg(feature = "ahash")]
+impl<T: Hash + Eq, A: Allocator> HashSet<T, DefaultHashBuilder, A> {
+    /// Creates an empty `HashSet`.
+    ///
+    /// The hash set is initially created with a capacity of 0, so it will not allocate until it
+    /// is first inserted into.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashSet`], for example with
+    /// [`with_hasher_in`](HashSet::with_hasher_in) method.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let set: HashSet<i32> = HashSet::new();
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn new_in(alloc: A) -> Self {
+        Self {
+            map: HashMap::new_in(alloc),
+        }
+    }
+
+    /// Creates an empty `HashSet` with the specified capacity.
+    ///
+    /// The hash set will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash set will not allocate.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashSet`], for example with
+    /// [`with_capacity_and_hasher_in`](HashSet::with_capacity_and_hasher_in) method.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let set: HashSet<i32> = HashSet::with_capacity(10);
+    /// assert!(set.capacity() >= 10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
+        Self {
+            map: HashMap::with_capacity_in(capacity, alloc),
+        }
+    }
+}
+
+impl<T, S, A: Allocator> HashSet<T, S, A> {
+    /// Returns the number of elements the set can hold without reallocating.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let set: HashSet<i32> = HashSet::with_capacity(100);
+    /// assert!(set.capacity() >= 100);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn capacity(&self) -> usize {
+        self.map.capacity()
+    }
+
+    /// An iterator visiting all elements in arbitrary order.
+    /// The iterator element type is `&'a T`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let mut set = HashSet::new();
+    /// set.insert("a");
+    /// set.insert("b");
+    ///
+    /// // Will print in an arbitrary order.
+    /// for x in set.iter() {
+    ///     println!("{}", x);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn iter(&self) -> Iter<'_, T> {
+        Iter {
+            iter: self.map.keys(),
+        }
+    }
+
+    /// Returns the number of elements in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut v = HashSet::new();
+    /// assert_eq!(v.len(), 0);
+    /// v.insert(1);
+    /// assert_eq!(v.len(), 1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn len(&self) -> usize {
+        self.map.len()
+    }
+
+    /// Returns `true` if the set contains no elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut v = HashSet::new();
+    /// assert!(v.is_empty());
+    /// v.insert(1);
+    /// assert!(!v.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn is_empty(&self) -> bool {
+        self.map.is_empty()
+    }
+
+    /// Clears the set, returning all elements in an iterator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// assert!(!set.is_empty());
+    ///
+    /// // print 1, 2, 3 in an arbitrary order
+    /// for i in set.drain() {
+    ///     println!("{}", i);
+    /// }
+    ///
+    /// assert!(set.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn drain(&mut self) -> Drain<'_, T, A> {
+        Drain {
+            iter: self.map.drain(),
+        }
+    }
+
+    /// Retains only the elements specified by the predicate.
+    ///
+    /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let xs = [1,2,3,4,5,6];
+    /// let mut set: HashSet<i32> = xs.into_iter().collect();
+    /// set.retain(|&k| k % 2 == 0);
+    /// assert_eq!(set.len(), 3);
+    /// ```
+    pub fn retain<F>(&mut self, mut f: F)
+    where
+        F: FnMut(&T) -> bool,
+    {
+        self.map.retain(|k, _| f(k));
+    }
+
+    /// Drains elements which are true under the given predicate,
+    /// and returns an iterator over the removed items.
+    ///
+    /// In other words, move all elements `e` such that `f(&e)` returns `true` out
+    /// into another iterator.
+    ///
+    /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
+    /// or the iteration short-circuits, then the remaining elements will be retained.
+    /// Use [`retain()`] with a negated predicate if you do not need the returned iterator.
+    ///
+    /// [`retain()`]: HashSet::retain
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<i32> = (0..8).collect();
+    /// let drained: HashSet<i32> = set.extract_if(|v| v % 2 == 0).collect();
+    ///
+    /// let mut evens = drained.into_iter().collect::<Vec<_>>();
+    /// let mut odds = set.into_iter().collect::<Vec<_>>();
+    /// evens.sort();
+    /// odds.sort();
+    ///
+    /// assert_eq!(evens, vec![0, 2, 4, 6]);
+    /// assert_eq!(odds, vec![1, 3, 5, 7]);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, T, F, A>
+    where
+        F: FnMut(&T) -> bool,
+    {
+        ExtractIf {
+            f,
+            inner: RawExtractIf {
+                iter: unsafe { self.map.table.iter() },
+                table: &mut self.map.table,
+            },
+        }
+    }
+
+    /// Clears the set, removing all values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut v = HashSet::new();
+    /// v.insert(1);
+    /// v.clear();
+    /// assert!(v.is_empty());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn clear(&mut self) {
+        self.map.clear();
+    }
+}
+
+impl<T, S> HashSet<T, S, Global> {
+    /// Creates a new empty hash set which will use the given hasher to hash
+    /// keys.
+    ///
+    /// The hash set is initially created with a capacity of 0, so it will not
+    /// allocate until it is first inserted into.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashSet`].
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashSet to be useful, see its documentation for details.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut set = HashSet::with_hasher(s);
+    /// set.insert(2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub const fn with_hasher(hasher: S) -> Self {
+        Self {
+            map: HashMap::with_hasher(hasher),
+        }
+    }
+
+    /// Creates an empty `HashSet` with the specified capacity, using
+    /// `hasher` to hash the keys.
+    ///
+    /// The hash set will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash set will not allocate.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashSet`].
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashSet to be useful, see its documentation for details.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut set = HashSet::with_capacity_and_hasher(10, s);
+    /// set.insert(1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self {
+        Self {
+            map: HashMap::with_capacity_and_hasher(capacity, hasher),
+        }
+    }
+}
+
+impl<T, S, A> HashSet<T, S, A>
+where
+    A: Allocator,
+{
+    /// Returns a reference to the underlying allocator.
+    #[inline]
+    pub fn allocator(&self) -> &A {
+        self.map.allocator()
+    }
+
+    /// Creates a new empty hash set which will use the given hasher to hash
+    /// keys.
+    ///
+    /// The hash set is initially created with a capacity of 0, so it will not
+    /// allocate until it is first inserted into.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashSet`].
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashSet to be useful, see its documentation for details.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut set = HashSet::with_hasher(s);
+    /// set.insert(2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub const fn with_hasher_in(hasher: S, alloc: A) -> Self {
+        Self {
+            map: HashMap::with_hasher_in(hasher, alloc),
+        }
+    }
+
+    /// Creates an empty `HashSet` with the specified capacity, using
+    /// `hasher` to hash the keys.
+    ///
+    /// The hash set will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash set will not allocate.
+    ///
+    /// # HashDoS resistance
+    ///
+    /// The `hash_builder` normally use a fixed key by default and that does
+    /// not allow the `HashSet` to be protected against attacks such as [`HashDoS`].
+    /// Users who require HashDoS resistance should explicitly use
+    /// [`ahash::RandomState`] or [`std::collections::hash_map::RandomState`]
+    /// as the hasher when creating a [`HashSet`].
+    ///
+    /// The `hash_builder` passed should implement the [`BuildHasher`] trait for
+    /// the HashSet to be useful, see its documentation for details.
+    ///
+    /// [`HashDoS`]: https://en.wikipedia.org/wiki/Collision_attack
+    /// [`std::collections::hash_map::RandomState`]: https://doc.rust-lang.org/std/collections/hash_map/struct.RandomState.html
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let s = DefaultHashBuilder::default();
+    /// let mut set = HashSet::with_capacity_and_hasher(10, s);
+    /// set.insert(1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn with_capacity_and_hasher_in(capacity: usize, hasher: S, alloc: A) -> Self {
+        Self {
+            map: HashMap::with_capacity_and_hasher_in(capacity, hasher, alloc),
+        }
+    }
+
+    /// Returns a reference to the set's [`BuildHasher`].
+    ///
+    /// [`BuildHasher`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_map::DefaultHashBuilder;
+    ///
+    /// let hasher = DefaultHashBuilder::default();
+    /// let set: HashSet<i32> = HashSet::with_hasher(hasher);
+    /// let hasher: &DefaultHashBuilder = set.hasher();
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn hasher(&self) -> &S {
+        self.map.hasher()
+    }
+}
+
+impl<T, S, A> HashSet<T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    /// Reserves capacity for at least `additional` more elements to be inserted
+    /// in the `HashSet`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity exceeds [`isize::MAX`] bytes and [`abort`] the program
+    /// in case of allocation error. Use [`try_reserve`](HashSet::try_reserve) instead
+    /// if you want to handle memory allocation failure.
+    ///
+    /// [`isize::MAX`]: https://doc.rust-lang.org/std/primitive.isize.html
+    /// [`abort`]: https://doc.rust-lang.org/alloc/alloc/fn.handle_alloc_error.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let mut set: HashSet<i32> = HashSet::new();
+    /// set.reserve(10);
+    /// assert!(set.capacity() >= 10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn reserve(&mut self, additional: usize) {
+        self.map.reserve(additional);
+    }
+
+    /// Tries to reserve capacity for at least `additional` more elements to be inserted
+    /// in the given `HashSet<K,V>`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let mut set: HashSet<i32> = HashSet::new();
+    /// set.try_reserve(10).expect("why is the test harness OOMing on 10 bytes?");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        self.map.try_reserve(additional)
+    }
+
+    /// Shrinks the capacity of the set as much as possible. It will drop
+    /// down as much as possible while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::with_capacity(100);
+    /// set.insert(1);
+    /// set.insert(2);
+    /// assert!(set.capacity() >= 100);
+    /// set.shrink_to_fit();
+    /// assert!(set.capacity() >= 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to_fit(&mut self) {
+        self.map.shrink_to_fit();
+    }
+
+    /// Shrinks the capacity of the set with a lower limit. It will drop
+    /// down no lower than the supplied limit while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// Panics if the current capacity is smaller than the supplied
+    /// minimum capacity.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::with_capacity(100);
+    /// set.insert(1);
+    /// set.insert(2);
+    /// assert!(set.capacity() >= 100);
+    /// set.shrink_to(10);
+    /// assert!(set.capacity() >= 10);
+    /// set.shrink_to(0);
+    /// assert!(set.capacity() >= 2);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn shrink_to(&mut self, min_capacity: usize) {
+        self.map.shrink_to(min_capacity);
+    }
+
+    /// Visits the values representing the difference,
+    /// i.e., the values that are in `self` but not in `other`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+    ///
+    /// // Can be seen as `a - b`.
+    /// for x in a.difference(&b) {
+    ///     println!("{}", x); // Print 1
+    /// }
+    ///
+    /// let diff: HashSet<_> = a.difference(&b).collect();
+    /// assert_eq!(diff, [1].iter().collect());
+    ///
+    /// // Note that difference is not symmetric,
+    /// // and `b - a` means something else:
+    /// let diff: HashSet<_> = b.difference(&a).collect();
+    /// assert_eq!(diff, [4].iter().collect());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn difference<'a>(&'a self, other: &'a Self) -> Difference<'a, T, S, A> {
+        Difference {
+            iter: self.iter(),
+            other,
+        }
+    }
+
+    /// Visits the values representing the symmetric difference,
+    /// i.e., the values that are in `self` or in `other` but not in both.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+    ///
+    /// // Print 1, 4 in arbitrary order.
+    /// for x in a.symmetric_difference(&b) {
+    ///     println!("{}", x);
+    /// }
+    ///
+    /// let diff1: HashSet<_> = a.symmetric_difference(&b).collect();
+    /// let diff2: HashSet<_> = b.symmetric_difference(&a).collect();
+    ///
+    /// assert_eq!(diff1, diff2);
+    /// assert_eq!(diff1, [1, 4].iter().collect());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn symmetric_difference<'a>(&'a self, other: &'a Self) -> SymmetricDifference<'a, T, S, A> {
+        SymmetricDifference {
+            iter: self.difference(other).chain(other.difference(self)),
+        }
+    }
+
+    /// Visits the values representing the intersection,
+    /// i.e., the values that are both in `self` and `other`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+    ///
+    /// // Print 2, 3 in arbitrary order.
+    /// for x in a.intersection(&b) {
+    ///     println!("{}", x);
+    /// }
+    ///
+    /// let intersection: HashSet<_> = a.intersection(&b).collect();
+    /// assert_eq!(intersection, [2, 3].iter().collect());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn intersection<'a>(&'a self, other: &'a Self) -> Intersection<'a, T, S, A> {
+        let (smaller, larger) = if self.len() <= other.len() {
+            (self, other)
+        } else {
+            (other, self)
+        };
+        Intersection {
+            iter: smaller.iter(),
+            other: larger,
+        }
+    }
+
+    /// Visits the values representing the union,
+    /// i.e., all the values in `self` or `other`, without duplicates.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = [4, 2, 3, 4].into_iter().collect();
+    ///
+    /// // Print 1, 2, 3, 4 in arbitrary order.
+    /// for x in a.union(&b) {
+    ///     println!("{}", x);
+    /// }
+    ///
+    /// let union: HashSet<_> = a.union(&b).collect();
+    /// assert_eq!(union, [1, 2, 3, 4].iter().collect());
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn union<'a>(&'a self, other: &'a Self) -> Union<'a, T, S, A> {
+        // We'll iterate one set in full, and only the remaining difference from the other.
+        // Use the smaller set for the difference in order to reduce hash lookups.
+        let (smaller, larger) = if self.len() <= other.len() {
+            (self, other)
+        } else {
+            (other, self)
+        };
+        Union {
+            iter: larger.iter().chain(smaller.difference(larger)),
+        }
+    }
+
+    /// Returns `true` if the set contains a value.
+    ///
+    /// The value may be any borrowed form of the set's value type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let set: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// assert_eq!(set.contains(&1), true);
+    /// assert_eq!(set.contains(&4), false);
+    /// ```
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
+    where
+        Q: Hash + Equivalent<T>,
+    {
+        self.map.contains_key(value)
+    }
+
+    /// Returns a reference to the value in the set, if any, that is equal to the given value.
+    ///
+    /// The value may be any borrowed form of the set's value type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let set: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// assert_eq!(set.get(&2), Some(&2));
+    /// assert_eq!(set.get(&4), None);
+    /// ```
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
+    where
+        Q: Hash + Equivalent<T>,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.map.get_key_value(value) {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+
+    /// Inserts the given `value` into the set if it is not present, then
+    /// returns a reference to the value in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// assert_eq!(set.len(), 3);
+    /// assert_eq!(set.get_or_insert(2), &2);
+    /// assert_eq!(set.get_or_insert(100), &100);
+    /// assert_eq!(set.len(), 4); // 100 was inserted
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_or_insert(&mut self, value: T) -> &T {
+        // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
+        // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
+        self.map
+            .raw_entry_mut()
+            .from_key(&value)
+            .or_insert(value, ())
+            .0
+    }
+
+    /// Inserts an owned copy of the given `value` into the set if it is not
+    /// present, then returns a reference to the value in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<String> = ["cat", "dog", "horse"]
+    ///     .iter().map(|&pet| pet.to_owned()).collect();
+    ///
+    /// assert_eq!(set.len(), 3);
+    /// for &pet in &["cat", "dog", "fish"] {
+    ///     let value = set.get_or_insert_owned(pet);
+    ///     assert_eq!(value, pet);
+    /// }
+    /// assert_eq!(set.len(), 4); // a new "fish" was inserted
+    /// ```
+    #[inline]
+    pub fn get_or_insert_owned<Q: ?Sized>(&mut self, value: &Q) -> &T
+    where
+        Q: Hash + Equivalent<T> + ToOwned<Owned = T>,
+    {
+        // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
+        // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
+        self.map
+            .raw_entry_mut()
+            .from_key(value)
+            .or_insert_with(|| (value.to_owned(), ()))
+            .0
+    }
+
+    /// Inserts a value computed from `f` into the set if the given `value` is
+    /// not present, then returns a reference to the value in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<String> = ["cat", "dog", "horse"]
+    ///     .iter().map(|&pet| pet.to_owned()).collect();
+    ///
+    /// assert_eq!(set.len(), 3);
+    /// for &pet in &["cat", "dog", "fish"] {
+    ///     let value = set.get_or_insert_with(pet, str::to_owned);
+    ///     assert_eq!(value, pet);
+    /// }
+    /// assert_eq!(set.len(), 4); // a new "fish" was inserted
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get_or_insert_with<Q: ?Sized, F>(&mut self, value: &Q, f: F) -> &T
+    where
+        Q: Hash + Equivalent<T>,
+        F: FnOnce(&Q) -> T,
+    {
+        // Although the raw entry gives us `&mut T`, we only return `&T` to be consistent with
+        // `get`. Key mutation is "raw" because you're not supposed to affect `Eq` or `Hash`.
+        self.map
+            .raw_entry_mut()
+            .from_key(value)
+            .or_insert_with(|| (f(value), ()))
+            .0
+    }
+
+    /// Gets the given value's corresponding entry in the set for in-place manipulation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_set::Entry::*;
+    ///
+    /// let mut singles = HashSet::new();
+    /// let mut dupes = HashSet::new();
+    ///
+    /// for ch in "a short treatise on fungi".chars() {
+    ///     if let Vacant(dupe_entry) = dupes.entry(ch) {
+    ///         // We haven't already seen a duplicate, so
+    ///         // check if we've at least seen it once.
+    ///         match singles.entry(ch) {
+    ///             Vacant(single_entry) => {
+    ///                 // We found a new character for the first time.
+    ///                 single_entry.insert()
+    ///             }
+    ///             Occupied(single_entry) => {
+    ///                 // We've already seen this once, "move" it to dupes.
+    ///                 single_entry.remove();
+    ///                 dupe_entry.insert();
+    ///             }
+    ///         }
+    ///     }
+    /// }
+    ///
+    /// assert!(!singles.contains(&'t') && dupes.contains(&'t'));
+    /// assert!(singles.contains(&'u') && !dupes.contains(&'u'));
+    /// assert!(!singles.contains(&'v') && !dupes.contains(&'v'));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn entry(&mut self, value: T) -> Entry<'_, T, S, A> {
+        match self.map.entry(value) {
+            map::Entry::Occupied(entry) => Entry::Occupied(OccupiedEntry { inner: entry }),
+            map::Entry::Vacant(entry) => Entry::Vacant(VacantEntry { inner: entry }),
+        }
+    }
+
+    /// Returns `true` if `self` has no elements in common with `other`.
+    /// This is equivalent to checking for an empty intersection.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// let mut b = HashSet::new();
+    ///
+    /// assert_eq!(a.is_disjoint(&b), true);
+    /// b.insert(4);
+    /// assert_eq!(a.is_disjoint(&b), true);
+    /// b.insert(1);
+    /// assert_eq!(a.is_disjoint(&b), false);
+    /// ```
+    pub fn is_disjoint(&self, other: &Self) -> bool {
+        self.iter().all(|v| !other.contains(v))
+    }
+
+    /// Returns `true` if the set is a subset of another,
+    /// i.e., `other` contains at least all the values in `self`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let sup: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// let mut set = HashSet::new();
+    ///
+    /// assert_eq!(set.is_subset(&sup), true);
+    /// set.insert(2);
+    /// assert_eq!(set.is_subset(&sup), true);
+    /// set.insert(4);
+    /// assert_eq!(set.is_subset(&sup), false);
+    /// ```
+    pub fn is_subset(&self, other: &Self) -> bool {
+        self.len() <= other.len() && self.iter().all(|v| other.contains(v))
+    }
+
+    /// Returns `true` if the set is a superset of another,
+    /// i.e., `self` contains at least all the values in `other`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let sub: HashSet<_> = [1, 2].into_iter().collect();
+    /// let mut set = HashSet::new();
+    ///
+    /// assert_eq!(set.is_superset(&sub), false);
+    ///
+    /// set.insert(0);
+    /// set.insert(1);
+    /// assert_eq!(set.is_superset(&sub), false);
+    ///
+    /// set.insert(2);
+    /// assert_eq!(set.is_superset(&sub), true);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn is_superset(&self, other: &Self) -> bool {
+        other.is_subset(self)
+    }
+
+    /// Adds a value to the set.
+    ///
+    /// If the set did not have this value present, `true` is returned.
+    ///
+    /// If the set did have this value present, `false` is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::new();
+    ///
+    /// assert_eq!(set.insert(2), true);
+    /// assert_eq!(set.insert(2), false);
+    /// assert_eq!(set.len(), 1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(&mut self, value: T) -> bool {
+        self.map.insert(value, ()).is_none()
+    }
+
+    /// Insert a value the set without checking if the value already exists in the set.
+    ///
+    /// Returns a reference to the value just inserted.
+    ///
+    /// This operation is safe if a value does not exist in the set.
+    ///
+    /// However, if a value exists in the set already, the behavior is unspecified:
+    /// this operation may panic, loop forever, or any following operation with the set
+    /// may panic, loop forever or return arbitrary result.
+    ///
+    /// That said, this operation (and following operations) are guaranteed to
+    /// not violate memory safety.
+    ///
+    /// This operation is faster than regular insert, because it does not perform
+    /// lookup before insertion.
+    ///
+    /// This operation is useful during initial population of the set.
+    /// For example, when constructing a set from another set, we know
+    /// that values are unique.
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert_unique_unchecked(&mut self, value: T) -> &T {
+        self.map.insert_unique_unchecked(value, ()).0
+    }
+
+    /// Adds a value to the set, replacing the existing value, if any, that is equal to the given
+    /// one. Returns the replaced value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::new();
+    /// set.insert(Vec::<i32>::new());
+    ///
+    /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
+    /// set.replace(Vec::with_capacity(10));
+    /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace(&mut self, value: T) -> Option<T> {
+        match self.map.entry(value) {
+            map::Entry::Occupied(occupied) => Some(occupied.replace_key()),
+            map::Entry::Vacant(vacant) => {
+                vacant.insert(());
+                None
+            }
+        }
+    }
+
+    /// Removes a value from the set. Returns whether the value was
+    /// present in the set.
+    ///
+    /// The value may be any borrowed form of the set's value type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set = HashSet::new();
+    ///
+    /// set.insert(2);
+    /// assert_eq!(set.remove(&2), true);
+    /// assert_eq!(set.remove(&2), false);
+    /// ```
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
+    where
+        Q: Hash + Equivalent<T>,
+    {
+        self.map.remove(value).is_some()
+    }
+
+    /// Removes and returns the value in the set, if any, that is equal to the given one.
+    ///
+    /// The value may be any borrowed form of the set's value type, but
+    /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
+    /// the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<_> = [1, 2, 3].into_iter().collect();
+    /// assert_eq!(set.take(&2), Some(2));
+    /// assert_eq!(set.take(&2), None);
+    /// ```
+    ///
+    /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html
+    /// [`Hash`]: https://doc.rust-lang.org/std/hash/trait.Hash.html
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
+    where
+        Q: Hash + Equivalent<T>,
+    {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.map.remove_entry(value) {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+}
+
+impl<T, S, A: Allocator> HashSet<T, S, A> {
+    /// Returns a reference to the [`RawTable`] used underneath [`HashSet`].
+    /// This function is only available if the `raw` feature of the crate is enabled.
+    ///
+    /// # Note
+    ///
+    /// Calling this function is safe, but using the raw hash table API may require
+    /// unsafe functions or blocks.
+    ///
+    /// `RawTable` API gives the lowest level of control under the set that can be useful
+    /// for extending the HashSet's API, but may lead to *[undefined behavior]*.
+    ///
+    /// [`HashSet`]: struct.HashSet.html
+    /// [`RawTable`]: crate::raw::RawTable
+    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[cfg(feature = "raw")]
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn raw_table(&self) -> &RawTable<(T, ()), A> {
+        self.map.raw_table()
+    }
+
+    /// Returns a mutable reference to the [`RawTable`] used underneath [`HashSet`].
+    /// This function is only available if the `raw` feature of the crate is enabled.
+    ///
+    /// # Note
+    ///
+    /// Calling this function is safe, but using the raw hash table API may require
+    /// unsafe functions or blocks.
+    ///
+    /// `RawTable` API gives the lowest level of control under the set that can be useful
+    /// for extending the HashSet's API, but may lead to *[undefined behavior]*.
+    ///
+    /// [`HashSet`]: struct.HashSet.html
+    /// [`RawTable`]: crate::raw::RawTable
+    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    #[cfg(feature = "raw")]
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn raw_table_mut(&mut self) -> &mut RawTable<(T, ()), A> {
+        self.map.raw_table_mut()
+    }
+}
+
+impl<T, S, A> PartialEq for HashSet<T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    fn eq(&self, other: &Self) -> bool {
+        if self.len() != other.len() {
+            return false;
+        }
+
+        self.iter().all(|key| other.contains(key))
+    }
+}
+
+impl<T, S, A> Eq for HashSet<T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+}
+
+impl<T, S, A> fmt::Debug for HashSet<T, S, A>
+where
+    T: fmt::Debug,
+    A: Allocator,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_set().entries(self.iter()).finish()
+    }
+}
+
+impl<T, S, A> From<HashMap<T, (), S, A>> for HashSet<T, S, A>
+where
+    A: Allocator,
+{
+    fn from(map: HashMap<T, (), S, A>) -> Self {
+        Self { map }
+    }
+}
+
+impl<T, S, A> FromIterator<T> for HashSet<T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher + Default,
+    A: Default + Allocator,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
+        let mut set = Self::with_hasher_in(Default::default(), Default::default());
+        set.extend(iter);
+        set
+    }
+}
+
+// The default hasher is used to match the std implementation signature
+#[cfg(feature = "ahash")]
+impl<T, A, const N: usize> From<[T; N]> for HashSet<T, DefaultHashBuilder, A>
+where
+    T: Eq + Hash,
+    A: Default + Allocator,
+{
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let set1 = HashSet::from([1, 2, 3, 4]);
+    /// let set2: HashSet<_> = [1, 2, 3, 4].into();
+    /// assert_eq!(set1, set2);
+    /// ```
+    fn from(arr: [T; N]) -> Self {
+        arr.into_iter().collect()
+    }
+}
+
+impl<T, S, A> Extend<T> for HashSet<T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+        self.map.extend(iter.into_iter().map(|k| (k, ())));
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_one(&mut self, k: T) {
+        self.map.insert(k, ());
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_reserve(&mut self, additional: usize) {
+        Extend::<(T, ())>::extend_reserve(&mut self.map, additional);
+    }
+}
+
+impl<'a, T, S, A> Extend<&'a T> for HashSet<T, S, A>
+where
+    T: 'a + Eq + Hash + Copy,
+    S: BuildHasher,
+    A: Allocator,
+{
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+        self.extend(iter.into_iter().copied());
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_one(&mut self, k: &'a T) {
+        self.map.insert(*k, ());
+    }
+
+    #[inline]
+    #[cfg(feature = "nightly")]
+    fn extend_reserve(&mut self, additional: usize) {
+        Extend::<(T, ())>::extend_reserve(&mut self.map, additional);
+    }
+}
+
+impl<T, S, A> Default for HashSet<T, S, A>
+where
+    S: Default,
+    A: Default + Allocator,
+{
+    /// Creates an empty `HashSet<T, S>` with the `Default` value for the hasher.
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn default() -> Self {
+        Self {
+            map: HashMap::default(),
+        }
+    }
+}
+
+impl<T, S, A> BitOr<&HashSet<T, S, A>> for &HashSet<T, S, A>
+where
+    T: Eq + Hash + Clone,
+    S: BuildHasher + Default,
+    A: Allocator,
+{
+    type Output = HashSet<T, S>;
+
+    /// Returns the union of `self` and `rhs` as a new `HashSet<T, S>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+    ///
+    /// let set = &a | &b;
+    ///
+    /// let mut i = 0;
+    /// let expected = [1, 2, 3, 4, 5];
+    /// for x in &set {
+    ///     assert!(expected.contains(x));
+    ///     i += 1;
+    /// }
+    /// assert_eq!(i, expected.len());
+    /// ```
+    fn bitor(self, rhs: &HashSet<T, S, A>) -> HashSet<T, S> {
+        self.union(rhs).cloned().collect()
+    }
+}
+
+impl<T, S, A> BitAnd<&HashSet<T, S, A>> for &HashSet<T, S, A>
+where
+    T: Eq + Hash + Clone,
+    S: BuildHasher + Default,
+    A: Allocator,
+{
+    type Output = HashSet<T, S>;
+
+    /// Returns the intersection of `self` and `rhs` as a new `HashSet<T, S>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = vec![2, 3, 4].into_iter().collect();
+    ///
+    /// let set = &a & &b;
+    ///
+    /// let mut i = 0;
+    /// let expected = [2, 3];
+    /// for x in &set {
+    ///     assert!(expected.contains(x));
+    ///     i += 1;
+    /// }
+    /// assert_eq!(i, expected.len());
+    /// ```
+    fn bitand(self, rhs: &HashSet<T, S, A>) -> HashSet<T, S> {
+        self.intersection(rhs).cloned().collect()
+    }
+}
+
+impl<T, S> BitXor<&HashSet<T, S>> for &HashSet<T, S>
+where
+    T: Eq + Hash + Clone,
+    S: BuildHasher + Default,
+{
+    type Output = HashSet<T, S>;
+
+    /// Returns the symmetric difference of `self` and `rhs` as a new `HashSet<T, S>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+    ///
+    /// let set = &a ^ &b;
+    ///
+    /// let mut i = 0;
+    /// let expected = [1, 2, 4, 5];
+    /// for x in &set {
+    ///     assert!(expected.contains(x));
+    ///     i += 1;
+    /// }
+    /// assert_eq!(i, expected.len());
+    /// ```
+    fn bitxor(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
+        self.symmetric_difference(rhs).cloned().collect()
+    }
+}
+
+impl<T, S> Sub<&HashSet<T, S>> for &HashSet<T, S>
+where
+    T: Eq + Hash + Clone,
+    S: BuildHasher + Default,
+{
+    type Output = HashSet<T, S>;
+
+    /// Returns the difference of `self` and `rhs` as a new `HashSet<T, S>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
+    ///
+    /// let set = &a - &b;
+    ///
+    /// let mut i = 0;
+    /// let expected = [1, 2];
+    /// for x in &set {
+    ///     assert!(expected.contains(x));
+    ///     i += 1;
+    /// }
+    /// assert_eq!(i, expected.len());
+    /// ```
+    fn sub(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
+        self.difference(rhs).cloned().collect()
+    }
+}
+
+/// An iterator over the items of a `HashSet`.
+///
+/// This `struct` is created by the [`iter`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`iter`]: struct.HashSet.html#method.iter
+pub struct Iter<'a, K> {
+    iter: Keys<'a, K, ()>,
+}
+
+/// An owning iterator over the items of a `HashSet`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`HashSet`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`into_iter`]: struct.HashSet.html#method.into_iter
+pub struct IntoIter<K, A: Allocator = Global> {
+    iter: map::IntoIter<K, (), A>,
+}
+
+/// A draining iterator over the items of a `HashSet`.
+///
+/// This `struct` is created by the [`drain`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`drain`]: struct.HashSet.html#method.drain
+pub struct Drain<'a, K, A: Allocator = Global> {
+    iter: map::Drain<'a, K, (), A>,
+}
+
+/// A draining iterator over entries of a `HashSet` which don't satisfy the predicate `f`.
+///
+/// This `struct` is created by the [`extract_if`] method on [`HashSet`]. See its
+/// documentation for more.
+///
+/// [`extract_if`]: struct.HashSet.html#method.extract_if
+/// [`HashSet`]: struct.HashSet.html
+#[must_use = "Iterators are lazy unless consumed"]
+pub struct ExtractIf<'a, K, F, A: Allocator = Global>
+where
+    F: FnMut(&K) -> bool,
+{
+    f: F,
+    inner: RawExtractIf<'a, (K, ()), A>,
+}
+
+/// A lazy iterator producing elements in the intersection of `HashSet`s.
+///
+/// This `struct` is created by the [`intersection`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`intersection`]: struct.HashSet.html#method.intersection
+pub struct Intersection<'a, T, S, A: Allocator = Global> {
+    // iterator of the first set
+    iter: Iter<'a, T>,
+    // the second set
+    other: &'a HashSet<T, S, A>,
+}
+
+/// A lazy iterator producing elements in the difference of `HashSet`s.
+///
+/// This `struct` is created by the [`difference`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`difference`]: struct.HashSet.html#method.difference
+pub struct Difference<'a, T, S, A: Allocator = Global> {
+    // iterator of the first set
+    iter: Iter<'a, T>,
+    // the second set
+    other: &'a HashSet<T, S, A>,
+}
+
+/// A lazy iterator producing elements in the symmetric difference of `HashSet`s.
+///
+/// This `struct` is created by the [`symmetric_difference`] method on
+/// [`HashSet`]. See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`symmetric_difference`]: struct.HashSet.html#method.symmetric_difference
+pub struct SymmetricDifference<'a, T, S, A: Allocator = Global> {
+    iter: Chain<Difference<'a, T, S, A>, Difference<'a, T, S, A>>,
+}
+
+/// A lazy iterator producing elements in the union of `HashSet`s.
+///
+/// This `struct` is created by the [`union`] method on [`HashSet`].
+/// See its documentation for more.
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`union`]: struct.HashSet.html#method.union
+pub struct Union<'a, T, S, A: Allocator = Global> {
+    iter: Chain<Iter<'a, T>, Difference<'a, T, S, A>>,
+}
+
+impl<'a, T, S, A: Allocator> IntoIterator for &'a HashSet<T, S, A> {
+    type Item = &'a T;
+    type IntoIter = Iter<'a, T>;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> Iter<'a, T> {
+        self.iter()
+    }
+}
+
+impl<T, S, A: Allocator> IntoIterator for HashSet<T, S, A> {
+    type Item = T;
+    type IntoIter = IntoIter<T, A>;
+
+    /// Creates a consuming iterator, that is, one that moves each value out
+    /// of the set in arbitrary order. The set cannot be used after calling
+    /// this.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// let mut set = HashSet::new();
+    /// set.insert("a".to_string());
+    /// set.insert("b".to_string());
+    ///
+    /// // Not possible to collect to a Vec<String> with a regular `.iter()`.
+    /// let v: Vec<String> = set.into_iter().collect();
+    ///
+    /// // Will print in an arbitrary order.
+    /// for x in &v {
+    ///     println!("{}", x);
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn into_iter(self) -> IntoIter<T, A> {
+        IntoIter {
+            iter: self.map.into_iter(),
+        }
+    }
+}
+
+impl<K> Clone for Iter<'_, K> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Iter {
+            iter: self.iter.clone(),
+        }
+    }
+}
+impl<'a, K> Iterator for Iter<'a, K> {
+    type Item = &'a K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a K> {
+        self.iter.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.iter.fold(init, f)
+    }
+}
+impl<'a, K> ExactSizeIterator for Iter<'a, K> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
+}
+impl<K> FusedIterator for Iter<'_, K> {}
+
+impl<K: fmt::Debug> fmt::Debug for Iter<'_, K> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<K, A: Allocator> Iterator for IntoIter<K, A> {
+    type Item = K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<K> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.iter.next() {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.iter.fold(init, |acc, (k, ())| f(acc, k))
+    }
+}
+impl<K, A: Allocator> ExactSizeIterator for IntoIter<K, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
+}
+impl<K, A: Allocator> FusedIterator for IntoIter<K, A> {}
+
+impl<K: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<K, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let entries_iter = self.iter.iter().map(|(k, _)| k);
+        f.debug_list().entries(entries_iter).finish()
+    }
+}
+
+impl<K, A: Allocator> Iterator for Drain<'_, K, A> {
+    type Item = K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<K> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.iter.next() {
+            Some((k, _)) => Some(k),
+            None => None,
+        }
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.iter.fold(init, |acc, (k, ())| f(acc, k))
+    }
+}
+impl<K, A: Allocator> ExactSizeIterator for Drain<'_, K, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
+}
+impl<K, A: Allocator> FusedIterator for Drain<'_, K, A> {}
+
+impl<K: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, K, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let entries_iter = self.iter.iter().map(|(k, _)| k);
+        f.debug_list().entries(entries_iter).finish()
+    }
+}
+
+impl<K, F, A: Allocator> Iterator for ExtractIf<'_, K, F, A>
+where
+    F: FnMut(&K) -> bool,
+{
+    type Item = K;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<Self::Item> {
+        self.inner
+            .next(|&mut (ref k, ())| (self.f)(k))
+            .map(|(k, ())| k)
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (0, self.inner.iter.size_hint().1)
+    }
+}
+
+impl<K, F, A: Allocator> FusedIterator for ExtractIf<'_, K, F, A> where F: FnMut(&K) -> bool {}
+
+impl<T, S, A: Allocator> Clone for Intersection<'_, T, S, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Intersection {
+            iter: self.iter.clone(),
+            ..*self
+        }
+    }
+}
+
+impl<'a, T, S, A> Iterator for Intersection<'a, T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    type Item = &'a T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a T> {
+        loop {
+            let elt = self.iter.next()?;
+            if self.other.contains(elt) {
+                return Some(elt);
+            }
+        }
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let (_, upper) = self.iter.size_hint();
+        (0, upper)
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.iter.fold(init, |acc, elt| {
+            if self.other.contains(elt) {
+                f(acc, elt)
+            } else {
+                acc
+            }
+        })
+    }
+}
+
+impl<T, S, A> fmt::Debug for Intersection<'_, T, S, A>
+where
+    T: fmt::Debug + Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<T, S, A> FusedIterator for Intersection<'_, T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+}
+
+impl<T, S, A: Allocator> Clone for Difference<'_, T, S, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Difference {
+            iter: self.iter.clone(),
+            ..*self
+        }
+    }
+}
+
+impl<'a, T, S, A> Iterator for Difference<'a, T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    type Item = &'a T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a T> {
+        loop {
+            let elt = self.iter.next()?;
+            if !self.other.contains(elt) {
+                return Some(elt);
+            }
+        }
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let (_, upper) = self.iter.size_hint();
+        (0, upper)
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.iter.fold(init, |acc, elt| {
+            if self.other.contains(elt) {
+                acc
+            } else {
+                f(acc, elt)
+            }
+        })
+    }
+}
+
+impl<T, S, A> FusedIterator for Difference<'_, T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+}
+
+impl<T, S, A> fmt::Debug for Difference<'_, T, S, A>
+where
+    T: fmt::Debug + Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<T, S, A: Allocator> Clone for SymmetricDifference<'_, T, S, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        SymmetricDifference {
+            iter: self.iter.clone(),
+        }
+    }
+}
+
+impl<'a, T, S, A> Iterator for SymmetricDifference<'a, T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    type Item = &'a T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a T> {
+        self.iter.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.iter.fold(init, f)
+    }
+}
+
+impl<T, S, A> FusedIterator for SymmetricDifference<'_, T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+}
+
+impl<T, S, A> fmt::Debug for SymmetricDifference<'_, T, S, A>
+where
+    T: fmt::Debug + Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<T, S, A: Allocator> Clone for Union<'_, T, S, A> {
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn clone(&self) -> Self {
+        Union {
+            iter: self.iter.clone(),
+        }
+    }
+}
+
+impl<T, S, A> FusedIterator for Union<'_, T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+}
+
+impl<T, S, A> fmt::Debug for Union<'_, T, S, A>
+where
+    T: fmt::Debug + Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+impl<'a, T, S, A> Iterator for Union<'a, T, S, A>
+where
+    T: Eq + Hash,
+    S: BuildHasher,
+    A: Allocator,
+{
+    type Item = &'a T;
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn next(&mut self) -> Option<&'a T> {
+        self.iter.next()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn fold<B, F>(self, init: B, f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.iter.fold(init, f)
+    }
+}
+
+/// A view into a single entry in a set, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`HashSet`].
+///
+/// [`HashSet`]: struct.HashSet.html
+/// [`entry`]: struct.HashSet.html#method.entry
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_set::{Entry, HashSet, OccupiedEntry};
+///
+/// let mut set = HashSet::new();
+/// set.extend(["a", "b", "c"]);
+/// assert_eq!(set.len(), 3);
+///
+/// // Existing value (insert)
+/// let entry: Entry<_, _> = set.entry("a");
+/// let _raw_o: OccupiedEntry<_, _> = entry.insert();
+/// assert_eq!(set.len(), 3);
+/// // Nonexistent value (insert)
+/// set.entry("d").insert();
+///
+/// // Existing value (or_insert)
+/// set.entry("b").or_insert();
+/// // Nonexistent value (or_insert)
+/// set.entry("e").or_insert();
+///
+/// println!("Our HashSet: {:?}", set);
+///
+/// let mut vec: Vec<_> = set.iter().copied().collect();
+/// // The `Iter` iterator produces items in arbitrary order, so the
+/// // items must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, ["a", "b", "c", "d", "e"]);
+/// ```
+pub enum Entry<'a, T, S, A = Global>
+where
+    A: Allocator,
+{
+    /// An occupied entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_set::{Entry, HashSet};
+    /// let mut set: HashSet<_> = ["a", "b"].into();
+    ///
+    /// match set.entry("a") {
+    ///     Entry::Vacant(_) => unreachable!(),
+    ///     Entry::Occupied(_) => { }
+    /// }
+    /// ```
+    Occupied(OccupiedEntry<'a, T, S, A>),
+
+    /// A vacant entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_set::{Entry, HashSet};
+    /// let mut set: HashSet<&str> = HashSet::new();
+    ///
+    /// match set.entry("a") {
+    ///     Entry::Occupied(_) => unreachable!(),
+    ///     Entry::Vacant(_) => { }
+    /// }
+    /// ```
+    Vacant(VacantEntry<'a, T, S, A>),
+}
+
+impl<T: fmt::Debug, S, A: Allocator> fmt::Debug for Entry<'_, T, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+            Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+        }
+    }
+}
+
+/// A view into an occupied entry in a `HashSet`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_set::{Entry, HashSet, OccupiedEntry};
+///
+/// let mut set = HashSet::new();
+/// set.extend(["a", "b", "c"]);
+///
+/// let _entry_o: OccupiedEntry<_, _> = set.entry("a").insert();
+/// assert_eq!(set.len(), 3);
+///
+/// // Existing key
+/// match set.entry("a") {
+///     Entry::Vacant(_) => unreachable!(),
+///     Entry::Occupied(view) => {
+///         assert_eq!(view.get(), &"a");
+///     }
+/// }
+///
+/// assert_eq!(set.len(), 3);
+///
+/// // Existing key (take)
+/// match set.entry("c") {
+///     Entry::Vacant(_) => unreachable!(),
+///     Entry::Occupied(view) => {
+///         assert_eq!(view.remove(), "c");
+///     }
+/// }
+/// assert_eq!(set.get(&"c"), None);
+/// assert_eq!(set.len(), 2);
+/// ```
+pub struct OccupiedEntry<'a, T, S, A: Allocator = Global> {
+    inner: map::OccupiedEntry<'a, T, (), S, A>,
+}
+
+impl<T: fmt::Debug, S, A: Allocator> fmt::Debug for OccupiedEntry<'_, T, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OccupiedEntry")
+            .field("value", self.get())
+            .finish()
+    }
+}
+
+/// A view into a vacant entry in a `HashSet`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+///
+/// # Examples
+///
+/// ```
+/// use hashbrown::hash_set::{Entry, HashSet, VacantEntry};
+///
+/// let mut set = HashSet::<&str>::new();
+///
+/// let entry_v: VacantEntry<_, _> = match set.entry("a") {
+///     Entry::Vacant(view) => view,
+///     Entry::Occupied(_) => unreachable!(),
+/// };
+/// entry_v.insert();
+/// assert!(set.contains("a") && set.len() == 1);
+///
+/// // Nonexistent key (insert)
+/// match set.entry("b") {
+///     Entry::Vacant(view) => view.insert(),
+///     Entry::Occupied(_) => unreachable!(),
+/// }
+/// assert!(set.contains("b") && set.len() == 2);
+/// ```
+pub struct VacantEntry<'a, T, S, A: Allocator = Global> {
+    inner: map::VacantEntry<'a, T, (), S, A>,
+}
+
+impl<T: fmt::Debug, S, A: Allocator> fmt::Debug for VacantEntry<'_, T, S, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("VacantEntry").field(self.get()).finish()
+    }
+}
+
+impl<'a, T, S, A: Allocator> Entry<'a, T, S, A> {
+    /// Sets the value of the entry, and returns an OccupiedEntry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<&str> = HashSet::new();
+    /// let entry = set.entry("horseyland").insert();
+    ///
+    /// assert_eq!(entry.get(), &"horseyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self) -> OccupiedEntry<'a, T, S, A>
+    where
+        T: Hash,
+        S: BuildHasher,
+    {
+        match self {
+            Entry::Occupied(entry) => entry,
+            Entry::Vacant(entry) => entry.insert_entry(),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting if it was vacant.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<&str> = HashSet::new();
+    ///
+    /// // nonexistent key
+    /// set.entry("poneyland").or_insert();
+    /// assert!(set.contains("poneyland"));
+    ///
+    /// // existing key
+    /// set.entry("poneyland").or_insert();
+    /// assert!(set.contains("poneyland"));
+    /// assert_eq!(set.len(), 1);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn or_insert(self)
+    where
+        T: Hash,
+        S: BuildHasher,
+    {
+        if let Entry::Vacant(entry) = self {
+            entry.insert();
+        }
+    }
+
+    /// Returns a reference to this entry's value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<&str> = HashSet::new();
+    /// set.entry("poneyland").or_insert();
+    /// // existing key
+    /// assert_eq!(set.entry("poneyland").get(), &"poneyland");
+    /// // nonexistent key
+    /// assert_eq!(set.entry("horseland").get(), &"horseland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get(&self) -> &T {
+        match *self {
+            Entry::Occupied(ref entry) => entry.get(),
+            Entry::Vacant(ref entry) => entry.get(),
+        }
+    }
+}
+
+impl<T, S, A: Allocator> OccupiedEntry<'_, T, S, A> {
+    /// Gets a reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_set::{Entry, HashSet};
+    ///
+    /// let mut set: HashSet<&str> = HashSet::new();
+    /// set.entry("poneyland").or_insert();
+    ///
+    /// match set.entry("poneyland") {
+    ///     Entry::Vacant(_) => panic!(),
+    ///     Entry::Occupied(entry) => assert_eq!(entry.get(), &"poneyland"),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get(&self) -> &T {
+        self.inner.key()
+    }
+
+    /// Takes the value out of the entry, and returns it.
+    /// Keeps the allocated memory for reuse.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_set::Entry;
+    ///
+    /// let mut set: HashSet<&str> = HashSet::new();
+    /// // The set is empty
+    /// assert!(set.is_empty() && set.capacity() == 0);
+    ///
+    /// set.entry("poneyland").or_insert();
+    /// let capacity_before_remove = set.capacity();
+    ///
+    /// if let Entry::Occupied(o) = set.entry("poneyland") {
+    ///     assert_eq!(o.remove(), "poneyland");
+    /// }
+    ///
+    /// assert_eq!(set.contains("poneyland"), false);
+    /// // Now set hold none elements but capacity is equal to the old one
+    /// assert!(set.len() == 0 && set.capacity() == capacity_before_remove);
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove(self) -> T {
+        self.inner.remove_entry().0
+    }
+
+    /// Replaces the entry, returning the old value. The new value in the hash map will be
+    /// the value used to create this entry.
+    ///
+    /// # Panics
+    ///
+    /// Will panic if this OccupiedEntry was created through [`Entry::insert`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    ///  use hashbrown::hash_set::{Entry, HashSet};
+    ///  use std::rc::Rc;
+    ///
+    ///  let mut set: HashSet<Rc<String>> = HashSet::new();
+    ///  let key_one = Rc::new("Stringthing".to_string());
+    ///  let key_two = Rc::new("Stringthing".to_string());
+    ///
+    ///  set.insert(key_one.clone());
+    ///  assert!(Rc::strong_count(&key_one) == 2 && Rc::strong_count(&key_two) == 1);
+    ///
+    ///  match set.entry(key_two.clone()) {
+    ///      Entry::Occupied(entry) => {
+    ///          let old_key: Rc<String> = entry.replace();
+    ///          assert!(Rc::ptr_eq(&key_one, &old_key));
+    ///      }
+    ///      Entry::Vacant(_) => panic!(),
+    ///  }
+    ///
+    ///  assert!(Rc::strong_count(&key_one) == 1 && Rc::strong_count(&key_two) == 2);
+    ///  assert!(set.contains(&"Stringthing".to_owned()));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn replace(self) -> T {
+        self.inner.replace_key()
+    }
+}
+
+impl<'a, T, S, A: Allocator> VacantEntry<'a, T, S, A> {
+    /// Gets a reference to the value that would be used when inserting
+    /// through the `VacantEntry`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    ///
+    /// let mut set: HashSet<&str> = HashSet::new();
+    /// assert_eq!(set.entry("poneyland").get(), &"poneyland");
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn get(&self) -> &T {
+        self.inner.key()
+    }
+
+    /// Take ownership of the value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::hash_set::{Entry, HashSet};
+    ///
+    /// let mut set: HashSet<&str> = HashSet::new();
+    ///
+    /// match set.entry("poneyland") {
+    ///     Entry::Occupied(_) => panic!(),
+    ///     Entry::Vacant(v) => assert_eq!(v.into_value(), "poneyland"),
+    /// }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn into_value(self) -> T {
+        self.inner.into_key()
+    }
+
+    /// Sets the value of the entry with the VacantEntry's value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashSet;
+    /// use hashbrown::hash_set::Entry;
+    ///
+    /// let mut set: HashSet<&str> = HashSet::new();
+    ///
+    /// if let Entry::Vacant(o) = set.entry("poneyland") {
+    ///     o.insert();
+    /// }
+    /// assert!(set.contains("poneyland"));
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn insert(self)
+    where
+        T: Hash,
+        S: BuildHasher,
+    {
+        self.inner.insert(());
+    }
+
+    #[cfg_attr(feature = "inline-more", inline)]
+    fn insert_entry(self) -> OccupiedEntry<'a, T, S, A>
+    where
+        T: Hash,
+        S: BuildHasher,
+    {
+        OccupiedEntry {
+            inner: self.inner.insert_entry(()),
+        }
+    }
+}
+
+#[allow(dead_code)]
+fn assert_covariance() {
+    fn set<'new>(v: HashSet<&'static str>) -> HashSet<&'new str> {
+        v
+    }
+    fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> {
+        v
+    }
+    fn into_iter<'new, A: Allocator>(v: IntoIter<&'static str, A>) -> IntoIter<&'new str, A> {
+        v
+    }
+    fn difference<'a, 'new, A: Allocator>(
+        v: Difference<'a, &'static str, DefaultHashBuilder, A>,
+    ) -> Difference<'a, &'new str, DefaultHashBuilder, A> {
+        v
+    }
+    fn symmetric_difference<'a, 'new, A: Allocator>(
+        v: SymmetricDifference<'a, &'static str, DefaultHashBuilder, A>,
+    ) -> SymmetricDifference<'a, &'new str, DefaultHashBuilder, A> {
+        v
+    }
+    fn intersection<'a, 'new, A: Allocator>(
+        v: Intersection<'a, &'static str, DefaultHashBuilder, A>,
+    ) -> Intersection<'a, &'new str, DefaultHashBuilder, A> {
+        v
+    }
+    fn union<'a, 'new, A: Allocator>(
+        v: Union<'a, &'static str, DefaultHashBuilder, A>,
+    ) -> Union<'a, &'new str, DefaultHashBuilder, A> {
+        v
+    }
+    fn drain<'new, A: Allocator>(d: Drain<'static, &'static str, A>) -> Drain<'new, &'new str, A> {
+        d
+    }
+}
+
+#[cfg(test)]
+mod test_set {
+    use super::super::map::DefaultHashBuilder;
+    use super::HashSet;
+    use std::vec::Vec;
+
+    #[test]
+    fn test_zero_capacities() {
+        type HS = HashSet<i32>;
+
+        let s = HS::new();
+        assert_eq!(s.capacity(), 0);
+
+        let s = HS::default();
+        assert_eq!(s.capacity(), 0);
+
+        let s = HS::with_hasher(DefaultHashBuilder::default());
+        assert_eq!(s.capacity(), 0);
+
+        let s = HS::with_capacity(0);
+        assert_eq!(s.capacity(), 0);
+
+        let s = HS::with_capacity_and_hasher(0, DefaultHashBuilder::default());
+        assert_eq!(s.capacity(), 0);
+
+        let mut s = HS::new();
+        s.insert(1);
+        s.insert(2);
+        s.remove(&1);
+        s.remove(&2);
+        s.shrink_to_fit();
+        assert_eq!(s.capacity(), 0);
+
+        let mut s = HS::new();
+        s.reserve(0);
+        assert_eq!(s.capacity(), 0);
+    }
+
+    #[test]
+    fn test_disjoint() {
+        let mut xs = HashSet::new();
+        let mut ys = HashSet::new();
+        assert!(xs.is_disjoint(&ys));
+        assert!(ys.is_disjoint(&xs));
+        assert!(xs.insert(5));
+        assert!(ys.insert(11));
+        assert!(xs.is_disjoint(&ys));
+        assert!(ys.is_disjoint(&xs));
+        assert!(xs.insert(7));
+        assert!(xs.insert(19));
+        assert!(xs.insert(4));
+        assert!(ys.insert(2));
+        assert!(ys.insert(-11));
+        assert!(xs.is_disjoint(&ys));
+        assert!(ys.is_disjoint(&xs));
+        assert!(ys.insert(7));
+        assert!(!xs.is_disjoint(&ys));
+        assert!(!ys.is_disjoint(&xs));
+    }
+
+    #[test]
+    fn test_subset_and_superset() {
+        let mut a = HashSet::new();
+        assert!(a.insert(0));
+        assert!(a.insert(5));
+        assert!(a.insert(11));
+        assert!(a.insert(7));
+
+        let mut b = HashSet::new();
+        assert!(b.insert(0));
+        assert!(b.insert(7));
+        assert!(b.insert(19));
+        assert!(b.insert(250));
+        assert!(b.insert(11));
+        assert!(b.insert(200));
+
+        assert!(!a.is_subset(&b));
+        assert!(!a.is_superset(&b));
+        assert!(!b.is_subset(&a));
+        assert!(!b.is_superset(&a));
+
+        assert!(b.insert(5));
+
+        assert!(a.is_subset(&b));
+        assert!(!a.is_superset(&b));
+        assert!(!b.is_subset(&a));
+        assert!(b.is_superset(&a));
+    }
+
+    #[test]
+    fn test_iterate() {
+        let mut a = HashSet::new();
+        for i in 0..32 {
+            assert!(a.insert(i));
+        }
+        let mut observed: u32 = 0;
+        for k in &a {
+            observed |= 1 << *k;
+        }
+        assert_eq!(observed, 0xFFFF_FFFF);
+    }
+
+    #[test]
+    fn test_intersection() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(11));
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(77));
+        assert!(a.insert(103));
+        assert!(a.insert(5));
+        assert!(a.insert(-5));
+
+        assert!(b.insert(2));
+        assert!(b.insert(11));
+        assert!(b.insert(77));
+        assert!(b.insert(-9));
+        assert!(b.insert(-42));
+        assert!(b.insert(5));
+        assert!(b.insert(3));
+
+        let mut i = 0;
+        let expected = [3, 5, 11, 77];
+        for x in a.intersection(&b) {
+            assert!(expected.contains(x));
+            i += 1;
+        }
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_difference() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(5));
+        assert!(a.insert(9));
+        assert!(a.insert(11));
+
+        assert!(b.insert(3));
+        assert!(b.insert(9));
+
+        let mut i = 0;
+        let expected = [1, 5, 11];
+        for x in a.difference(&b) {
+            assert!(expected.contains(x));
+            i += 1;
+        }
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_symmetric_difference() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(5));
+        assert!(a.insert(9));
+        assert!(a.insert(11));
+
+        assert!(b.insert(-2));
+        assert!(b.insert(3));
+        assert!(b.insert(9));
+        assert!(b.insert(14));
+        assert!(b.insert(22));
+
+        let mut i = 0;
+        let expected = [-2, 1, 5, 11, 14, 22];
+        for x in a.symmetric_difference(&b) {
+            assert!(expected.contains(x));
+            i += 1;
+        }
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_union() {
+        let mut a = HashSet::new();
+        let mut b = HashSet::new();
+
+        assert!(a.insert(1));
+        assert!(a.insert(3));
+        assert!(a.insert(5));
+        assert!(a.insert(9));
+        assert!(a.insert(11));
+        assert!(a.insert(16));
+        assert!(a.insert(19));
+        assert!(a.insert(24));
+
+        assert!(b.insert(-2));
+        assert!(b.insert(1));
+        assert!(b.insert(5));
+        assert!(b.insert(9));
+        assert!(b.insert(13));
+        assert!(b.insert(19));
+
+        let mut i = 0;
+        let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24];
+        for x in a.union(&b) {
+            assert!(expected.contains(x));
+            i += 1;
+        }
+        assert_eq!(i, expected.len());
+    }
+
+    #[test]
+    fn test_from_map() {
+        let mut a = crate::HashMap::new();
+        a.insert(1, ());
+        a.insert(2, ());
+        a.insert(3, ());
+        a.insert(4, ());
+
+        let a: HashSet<_> = a.into();
+
+        assert_eq!(a.len(), 4);
+        assert!(a.contains(&1));
+        assert!(a.contains(&2));
+        assert!(a.contains(&3));
+        assert!(a.contains(&4));
+    }
+
+    #[test]
+    fn test_from_iter() {
+        let xs = [1, 2, 2, 3, 4, 5, 6, 7, 8, 9];
+
+        let set: HashSet<_> = xs.iter().copied().collect();
+
+        for x in &xs {
+            assert!(set.contains(x));
+        }
+
+        assert_eq!(set.iter().len(), xs.len() - 1);
+    }
+
+    #[test]
+    fn test_move_iter() {
+        let hs = {
+            let mut hs = HashSet::new();
+
+            hs.insert('a');
+            hs.insert('b');
+
+            hs
+        };
+
+        let v = hs.into_iter().collect::<Vec<char>>();
+        assert!(v == ['a', 'b'] || v == ['b', 'a']);
+    }
+
+    #[test]
+    fn test_eq() {
+        // These constants once happened to expose a bug in insert().
+        // I'm keeping them around to prevent a regression.
+        let mut s1 = HashSet::new();
+
+        s1.insert(1);
+        s1.insert(2);
+        s1.insert(3);
+
+        let mut s2 = HashSet::new();
+
+        s2.insert(1);
+        s2.insert(2);
+
+        assert!(s1 != s2);
+
+        s2.insert(3);
+
+        assert_eq!(s1, s2);
+    }
+
+    #[test]
+    fn test_show() {
+        let mut set = HashSet::new();
+        let empty = HashSet::<i32>::new();
+
+        set.insert(1);
+        set.insert(2);
+
+        let set_str = format!("{set:?}");
+
+        assert!(set_str == "{1, 2}" || set_str == "{2, 1}");
+        assert_eq!(format!("{empty:?}"), "{}");
+    }
+
+    #[test]
+    fn test_trivial_drain() {
+        let mut s = HashSet::<i32>::new();
+        for _ in s.drain() {}
+        assert!(s.is_empty());
+        drop(s);
+
+        let mut s = HashSet::<i32>::new();
+        drop(s.drain());
+        assert!(s.is_empty());
+    }
+
+    #[test]
+    fn test_drain() {
+        let mut s: HashSet<_> = (1..100).collect();
+
+        // try this a bunch of times to make sure we don't screw up internal state.
+        for _ in 0..20 {
+            assert_eq!(s.len(), 99);
+
+            {
+                let mut last_i = 0;
+                let mut d = s.drain();
+                for (i, x) in d.by_ref().take(50).enumerate() {
+                    last_i = i;
+                    assert!(x != 0);
+                }
+                assert_eq!(last_i, 49);
+            }
+
+            if !s.is_empty() {
+                panic!("s should be empty!");
+            }
+
+            // reset to try again.
+            s.extend(1..100);
+        }
+    }
+
+    #[test]
+    fn test_replace() {
+        use core::hash;
+
+        #[derive(Debug)]
+        #[allow(dead_code)]
+        struct Foo(&'static str, i32);
+
+        impl PartialEq for Foo {
+            fn eq(&self, other: &Self) -> bool {
+                self.0 == other.0
+            }
+        }
+
+        impl Eq for Foo {}
+
+        impl hash::Hash for Foo {
+            fn hash<H: hash::Hasher>(&self, h: &mut H) {
+                self.0.hash(h);
+            }
+        }
+
+        let mut s = HashSet::new();
+        assert_eq!(s.replace(Foo("a", 1)), None);
+        assert_eq!(s.len(), 1);
+        assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1)));
+        assert_eq!(s.len(), 1);
+
+        let mut it = s.iter();
+        assert_eq!(it.next(), Some(&Foo("a", 2)));
+        assert_eq!(it.next(), None);
+    }
+
+    #[test]
+    #[allow(clippy::needless_borrow)]
+    fn test_extend_ref() {
+        let mut a = HashSet::new();
+        a.insert(1);
+
+        a.extend([2, 3, 4]);
+
+        assert_eq!(a.len(), 4);
+        assert!(a.contains(&1));
+        assert!(a.contains(&2));
+        assert!(a.contains(&3));
+        assert!(a.contains(&4));
+
+        let mut b = HashSet::new();
+        b.insert(5);
+        b.insert(6);
+
+        a.extend(&b);
+
+        assert_eq!(a.len(), 6);
+        assert!(a.contains(&1));
+        assert!(a.contains(&2));
+        assert!(a.contains(&3));
+        assert!(a.contains(&4));
+        assert!(a.contains(&5));
+        assert!(a.contains(&6));
+    }
+
+    #[test]
+    fn test_retain() {
+        let xs = [1, 2, 3, 4, 5, 6];
+        let mut set: HashSet<i32> = xs.iter().copied().collect();
+        set.retain(|&k| k % 2 == 0);
+        assert_eq!(set.len(), 3);
+        assert!(set.contains(&2));
+        assert!(set.contains(&4));
+        assert!(set.contains(&6));
+    }
+
+    #[test]
+    fn test_extract_if() {
+        {
+            let mut set: HashSet<i32> = (0..8).collect();
+            let drained = set.extract_if(|&k| k % 2 == 0);
+            let mut out = drained.collect::<Vec<_>>();
+            out.sort_unstable();
+            assert_eq!(vec![0, 2, 4, 6], out);
+            assert_eq!(set.len(), 4);
+        }
+        {
+            let mut set: HashSet<i32> = (0..8).collect();
+            set.extract_if(|&k| k % 2 == 0).for_each(drop);
+            assert_eq!(set.len(), 4, "Removes non-matching items on drop");
+        }
+    }
+
+    #[test]
+    fn test_const_with_hasher() {
+        use core::hash::BuildHasher;
+        use std::collections::hash_map::DefaultHasher;
+
+        #[derive(Clone)]
+        struct MyHasher;
+        impl BuildHasher for MyHasher {
+            type Hasher = DefaultHasher;
+
+            fn build_hasher(&self) -> DefaultHasher {
+                DefaultHasher::new()
+            }
+        }
+
+        const EMPTY_SET: HashSet<u32, MyHasher> = HashSet::with_hasher(MyHasher);
+
+        let mut set = EMPTY_SET;
+        set.insert(19);
+        assert!(set.contains(&19));
+    }
+
+    #[test]
+    fn rehash_in_place() {
+        let mut set = HashSet::new();
+
+        for i in 0..224 {
+            set.insert(i);
+        }
+
+        assert_eq!(
+            set.capacity(),
+            224,
+            "The set must be at or close to capacity to trigger a re hashing"
+        );
+
+        for i in 100..1400 {
+            set.remove(&(i - 100));
+            set.insert(i);
+        }
+    }
+
+    #[test]
+    fn collect() {
+        // At the time of writing, this hits the ZST case in from_base_index
+        // (and without the `map`, it does not).
+        let mut _set: HashSet<_> = (0..3).map(|_| ()).collect();
+    }
+}
+
\ No newline at end of file diff --git a/src/hashbrown/table.rs.html b/src/hashbrown/table.rs.html new file mode 100644 index 000000000..2a03f6b18 --- /dev/null +++ b/src/hashbrown/table.rs.html @@ -0,0 +1,4141 @@ +table.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
+445
+446
+447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
+469
+470
+471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
+505
+506
+507
+508
+509
+510
+511
+512
+513
+514
+515
+516
+517
+518
+519
+520
+521
+522
+523
+524
+525
+526
+527
+528
+529
+530
+531
+532
+533
+534
+535
+536
+537
+538
+539
+540
+541
+542
+543
+544
+545
+546
+547
+548
+549
+550
+551
+552
+553
+554
+555
+556
+557
+558
+559
+560
+561
+562
+563
+564
+565
+566
+567
+568
+569
+570
+571
+572
+573
+574
+575
+576
+577
+578
+579
+580
+581
+582
+583
+584
+585
+586
+587
+588
+589
+590
+591
+592
+593
+594
+595
+596
+597
+598
+599
+600
+601
+602
+603
+604
+605
+606
+607
+608
+609
+610
+611
+612
+613
+614
+615
+616
+617
+618
+619
+620
+621
+622
+623
+624
+625
+626
+627
+628
+629
+630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
+646
+647
+648
+649
+650
+651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
+664
+665
+666
+667
+668
+669
+670
+671
+672
+673
+674
+675
+676
+677
+678
+679
+680
+681
+682
+683
+684
+685
+686
+687
+688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
+699
+700
+701
+702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
+720
+721
+722
+723
+724
+725
+726
+727
+728
+729
+730
+731
+732
+733
+734
+735
+736
+737
+738
+739
+740
+741
+742
+743
+744
+745
+746
+747
+748
+749
+750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
+769
+770
+771
+772
+773
+774
+775
+776
+777
+778
+779
+780
+781
+782
+783
+784
+785
+786
+787
+788
+789
+790
+791
+792
+793
+794
+795
+796
+797
+798
+799
+800
+801
+802
+803
+804
+805
+806
+807
+808
+809
+810
+811
+812
+813
+814
+815
+816
+817
+818
+819
+820
+821
+822
+823
+824
+825
+826
+827
+828
+829
+830
+831
+832
+833
+834
+835
+836
+837
+838
+839
+840
+841
+842
+843
+844
+845
+846
+847
+848
+849
+850
+851
+852
+853
+854
+855
+856
+857
+858
+859
+860
+861
+862
+863
+864
+865
+866
+867
+868
+869
+870
+871
+872
+873
+874
+875
+876
+877
+878
+879
+880
+881
+882
+883
+884
+885
+886
+887
+888
+889
+890
+891
+892
+893
+894
+895
+896
+897
+898
+899
+900
+901
+902
+903
+904
+905
+906
+907
+908
+909
+910
+911
+912
+913
+914
+915
+916
+917
+918
+919
+920
+921
+922
+923
+924
+925
+926
+927
+928
+929
+930
+931
+932
+933
+934
+935
+936
+937
+938
+939
+940
+941
+942
+943
+944
+945
+946
+947
+948
+949
+950
+951
+952
+953
+954
+955
+956
+957
+958
+959
+960
+961
+962
+963
+964
+965
+966
+967
+968
+969
+970
+971
+972
+973
+974
+975
+976
+977
+978
+979
+980
+981
+982
+983
+984
+985
+986
+987
+988
+989
+990
+991
+992
+993
+994
+995
+996
+997
+998
+999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
+1122
+1123
+1124
+1125
+1126
+1127
+1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
+1143
+1144
+1145
+1146
+1147
+1148
+1149
+1150
+1151
+1152
+1153
+1154
+1155
+1156
+1157
+1158
+1159
+1160
+1161
+1162
+1163
+1164
+1165
+1166
+1167
+1168
+1169
+1170
+1171
+1172
+1173
+1174
+1175
+1176
+1177
+1178
+1179
+1180
+1181
+1182
+1183
+1184
+1185
+1186
+1187
+1188
+1189
+1190
+1191
+1192
+1193
+1194
+1195
+1196
+1197
+1198
+1199
+1200
+1201
+1202
+1203
+1204
+1205
+1206
+1207
+1208
+1209
+1210
+1211
+1212
+1213
+1214
+1215
+1216
+1217
+1218
+1219
+1220
+1221
+1222
+1223
+1224
+1225
+1226
+1227
+1228
+1229
+1230
+1231
+1232
+1233
+1234
+1235
+1236
+1237
+1238
+1239
+1240
+1241
+1242
+1243
+1244
+1245
+1246
+1247
+1248
+1249
+1250
+1251
+1252
+1253
+1254
+1255
+1256
+1257
+1258
+1259
+1260
+1261
+1262
+1263
+1264
+1265
+1266
+1267
+1268
+1269
+1270
+1271
+1272
+1273
+1274
+1275
+1276
+1277
+1278
+1279
+1280
+1281
+1282
+1283
+1284
+1285
+1286
+1287
+1288
+1289
+1290
+1291
+1292
+1293
+1294
+1295
+1296
+1297
+1298
+1299
+1300
+1301
+1302
+1303
+1304
+1305
+1306
+1307
+1308
+1309
+1310
+1311
+1312
+1313
+1314
+1315
+1316
+1317
+1318
+1319
+1320
+1321
+1322
+1323
+1324
+1325
+1326
+1327
+1328
+1329
+1330
+1331
+1332
+1333
+1334
+1335
+1336
+1337
+1338
+1339
+1340
+1341
+1342
+1343
+1344
+1345
+1346
+1347
+1348
+1349
+1350
+1351
+1352
+1353
+1354
+1355
+1356
+1357
+1358
+1359
+1360
+1361
+1362
+1363
+1364
+1365
+1366
+1367
+1368
+1369
+1370
+1371
+1372
+1373
+1374
+1375
+1376
+1377
+1378
+1379
+1380
+1381
+1382
+1383
+1384
+1385
+1386
+1387
+1388
+1389
+1390
+1391
+1392
+1393
+1394
+1395
+1396
+1397
+1398
+1399
+1400
+1401
+1402
+1403
+1404
+1405
+1406
+1407
+1408
+1409
+1410
+1411
+1412
+1413
+1414
+1415
+1416
+1417
+1418
+1419
+1420
+1421
+1422
+1423
+1424
+1425
+1426
+1427
+1428
+1429
+1430
+1431
+1432
+1433
+1434
+1435
+1436
+1437
+1438
+1439
+1440
+1441
+1442
+1443
+1444
+1445
+1446
+1447
+1448
+1449
+1450
+1451
+1452
+1453
+1454
+1455
+1456
+1457
+1458
+1459
+1460
+1461
+1462
+1463
+1464
+1465
+1466
+1467
+1468
+1469
+1470
+1471
+1472
+1473
+1474
+1475
+1476
+1477
+1478
+1479
+1480
+1481
+1482
+1483
+1484
+1485
+1486
+1487
+1488
+1489
+1490
+1491
+1492
+1493
+1494
+1495
+1496
+1497
+1498
+1499
+1500
+1501
+1502
+1503
+1504
+1505
+1506
+1507
+1508
+1509
+1510
+1511
+1512
+1513
+1514
+1515
+1516
+1517
+1518
+1519
+1520
+1521
+1522
+1523
+1524
+1525
+1526
+1527
+1528
+1529
+1530
+1531
+1532
+1533
+1534
+1535
+1536
+1537
+1538
+1539
+1540
+1541
+1542
+1543
+1544
+1545
+1546
+1547
+1548
+1549
+1550
+1551
+1552
+1553
+1554
+1555
+1556
+1557
+1558
+1559
+1560
+1561
+1562
+1563
+1564
+1565
+1566
+1567
+1568
+1569
+1570
+1571
+1572
+1573
+1574
+1575
+1576
+1577
+1578
+1579
+1580
+1581
+1582
+1583
+1584
+1585
+1586
+1587
+1588
+1589
+1590
+1591
+1592
+1593
+1594
+1595
+1596
+1597
+1598
+1599
+1600
+1601
+1602
+1603
+1604
+1605
+1606
+1607
+1608
+1609
+1610
+1611
+1612
+1613
+1614
+1615
+1616
+1617
+1618
+1619
+1620
+1621
+1622
+1623
+1624
+1625
+1626
+1627
+1628
+1629
+1630
+1631
+1632
+1633
+1634
+1635
+1636
+1637
+1638
+1639
+1640
+1641
+1642
+1643
+1644
+1645
+1646
+1647
+1648
+1649
+1650
+1651
+1652
+1653
+1654
+1655
+1656
+1657
+1658
+1659
+1660
+1661
+1662
+1663
+1664
+1665
+1666
+1667
+1668
+1669
+1670
+1671
+1672
+1673
+1674
+1675
+1676
+1677
+1678
+1679
+1680
+1681
+1682
+1683
+1684
+1685
+1686
+1687
+1688
+1689
+1690
+1691
+1692
+1693
+1694
+1695
+1696
+1697
+1698
+1699
+1700
+1701
+1702
+1703
+1704
+1705
+1706
+1707
+1708
+1709
+1710
+1711
+1712
+1713
+1714
+1715
+1716
+1717
+1718
+1719
+1720
+1721
+1722
+1723
+1724
+1725
+1726
+1727
+1728
+1729
+1730
+1731
+1732
+1733
+1734
+1735
+1736
+1737
+1738
+1739
+1740
+1741
+1742
+1743
+1744
+1745
+1746
+1747
+1748
+1749
+1750
+1751
+1752
+1753
+1754
+1755
+1756
+1757
+1758
+1759
+1760
+1761
+1762
+1763
+1764
+1765
+1766
+1767
+1768
+1769
+1770
+1771
+1772
+1773
+1774
+1775
+1776
+1777
+1778
+1779
+1780
+1781
+1782
+1783
+1784
+1785
+1786
+1787
+1788
+1789
+1790
+1791
+1792
+1793
+1794
+1795
+1796
+1797
+1798
+1799
+1800
+1801
+1802
+1803
+1804
+1805
+1806
+1807
+1808
+1809
+1810
+1811
+1812
+1813
+1814
+1815
+1816
+1817
+1818
+1819
+1820
+1821
+1822
+1823
+1824
+1825
+1826
+1827
+1828
+1829
+1830
+1831
+1832
+1833
+1834
+1835
+1836
+1837
+1838
+1839
+1840
+1841
+1842
+1843
+1844
+1845
+1846
+1847
+1848
+1849
+1850
+1851
+1852
+1853
+1854
+1855
+1856
+1857
+1858
+1859
+1860
+1861
+1862
+1863
+1864
+1865
+1866
+1867
+1868
+1869
+1870
+1871
+1872
+1873
+1874
+1875
+1876
+1877
+1878
+1879
+1880
+1881
+1882
+1883
+1884
+1885
+1886
+1887
+1888
+1889
+1890
+1891
+1892
+1893
+1894
+1895
+1896
+1897
+1898
+1899
+1900
+1901
+1902
+1903
+1904
+1905
+1906
+1907
+1908
+1909
+1910
+1911
+1912
+1913
+1914
+1915
+1916
+1917
+1918
+1919
+1920
+1921
+1922
+1923
+1924
+1925
+1926
+1927
+1928
+1929
+1930
+1931
+1932
+1933
+1934
+1935
+1936
+1937
+1938
+1939
+1940
+1941
+1942
+1943
+1944
+1945
+1946
+1947
+1948
+1949
+1950
+1951
+1952
+1953
+1954
+1955
+1956
+1957
+1958
+1959
+1960
+1961
+1962
+1963
+1964
+1965
+1966
+1967
+1968
+1969
+1970
+1971
+1972
+1973
+1974
+1975
+1976
+1977
+1978
+1979
+1980
+1981
+1982
+1983
+1984
+1985
+1986
+1987
+1988
+1989
+1990
+1991
+1992
+1993
+1994
+1995
+1996
+1997
+1998
+1999
+2000
+2001
+2002
+2003
+2004
+2005
+2006
+2007
+2008
+2009
+2010
+2011
+2012
+2013
+2014
+2015
+2016
+2017
+2018
+2019
+2020
+2021
+2022
+2023
+2024
+2025
+2026
+2027
+2028
+2029
+2030
+2031
+2032
+2033
+2034
+2035
+2036
+2037
+2038
+2039
+2040
+2041
+2042
+2043
+2044
+2045
+2046
+2047
+2048
+2049
+2050
+2051
+2052
+2053
+2054
+2055
+2056
+2057
+2058
+2059
+2060
+2061
+2062
+2063
+2064
+2065
+2066
+2067
+2068
+2069
+2070
+
use core::{fmt, iter::FusedIterator, marker::PhantomData};
+
+use crate::{
+    raw::{
+        Allocator, Bucket, Global, InsertSlot, RawDrain, RawExtractIf, RawIntoIter, RawIter,
+        RawTable,
+    },
+    TryReserveError,
+};
+
+/// Low-level hash table with explicit hashing.
+///
+/// The primary use case for this type over [`HashMap`] or [`HashSet`] is to
+/// support types that do not implement the [`Hash`] and [`Eq`] traits, but
+/// instead require additional data not contained in the key itself to compute a
+/// hash and compare two elements for equality.
+///
+/// Examples of when this can be useful include:
+/// - An `IndexMap` implementation where indices into a `Vec` are stored as
+///   elements in a `HashTable<usize>`. Hashing and comparing the elements
+///   requires indexing the associated `Vec` to get the actual value referred to
+///   by the index.
+/// - Avoiding re-computing a hash when it is already known.
+/// - Mutating the key of an element in a way that doesn't affect its hash.
+///
+/// To achieve this, `HashTable` methods that search for an element in the table
+/// require a hash value and equality function to be explicitly passed in as
+/// arguments. The method will then iterate over the elements with the given
+/// hash and call the equality function on each of them, until a match is found.
+///
+/// In most cases, a `HashTable` will not be exposed directly in an API. It will
+/// instead be wrapped in a helper type which handles the work of calculating
+/// hash values and comparing elements.
+///
+/// Due to its low-level nature, this type provides fewer guarantees than
+/// [`HashMap`] and [`HashSet`]. Specifically, the API allows you to shoot
+/// yourself in the foot by having multiple elements with identical keys in the
+/// table. The table itself will still function correctly and lookups will
+/// arbitrarily return one of the matching elements. However you should avoid
+/// doing this because it changes the runtime of hash table operations from
+/// `O(1)` to `O(k)` where `k` is the number of duplicate entries.
+///
+/// [`HashMap`]: super::HashMap
+/// [`HashSet`]: super::HashSet
+pub struct HashTable<T, A = Global>
+where
+    A: Allocator,
+{
+    pub(crate) raw: RawTable<T, A>,
+}
+
+impl<T> HashTable<T, Global> {
+    /// Creates an empty `HashTable`.
+    ///
+    /// The hash table is initially created with a capacity of 0, so it will not allocate until it
+    /// is first inserted into.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashTable;
+    /// let mut table: HashTable<&str> = HashTable::new();
+    /// assert_eq!(table.len(), 0);
+    /// assert_eq!(table.capacity(), 0);
+    /// ```
+    pub const fn new() -> Self {
+        Self {
+            raw: RawTable::new(),
+        }
+    }
+
+    /// Creates an empty `HashTable` with the specified capacity.
+    ///
+    /// The hash table will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash table will not allocate.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashTable;
+    /// let mut table: HashTable<&str> = HashTable::with_capacity(10);
+    /// assert_eq!(table.len(), 0);
+    /// assert!(table.capacity() >= 10);
+    /// ```
+    pub fn with_capacity(capacity: usize) -> Self {
+        Self {
+            raw: RawTable::with_capacity(capacity),
+        }
+    }
+}
+
+impl<T, A> HashTable<T, A>
+where
+    A: Allocator,
+{
+    /// Creates an empty `HashTable` using the given allocator.
+    ///
+    /// The hash table is initially created with a capacity of 0, so it will not allocate until it
+    /// is first inserted into.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use bumpalo::Bump;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let bump = Bump::new();
+    /// let mut table = HashTable::new_in(&bump);
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    ///
+    /// // The created HashTable holds none elements
+    /// assert_eq!(table.len(), 0);
+    ///
+    /// // The created HashTable also doesn't allocate memory
+    /// assert_eq!(table.capacity(), 0);
+    ///
+    /// // Now we insert element inside created HashTable
+    /// table.insert_unique(hasher(&"One"), "One", hasher);
+    /// // We can see that the HashTable holds 1 element
+    /// assert_eq!(table.len(), 1);
+    /// // And it also allocates some capacity
+    /// assert!(table.capacity() > 1);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub const fn new_in(alloc: A) -> Self {
+        Self {
+            raw: RawTable::new_in(alloc),
+        }
+    }
+
+    /// Creates an empty `HashTable` with the specified capacity using the given allocator.
+    ///
+    /// The hash table will be able to hold at least `capacity` elements without
+    /// reallocating. If `capacity` is 0, the hash table will not allocate.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use bumpalo::Bump;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let bump = Bump::new();
+    /// let mut table = HashTable::with_capacity_in(5, &bump);
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    ///
+    /// // The created HashTable holds none elements
+    /// assert_eq!(table.len(), 0);
+    /// // But it can hold at least 5 elements without reallocating
+    /// let empty_map_capacity = table.capacity();
+    /// assert!(empty_map_capacity >= 5);
+    ///
+    /// // Now we insert some 5 elements inside created HashTable
+    /// table.insert_unique(hasher(&"One"), "One", hasher);
+    /// table.insert_unique(hasher(&"Two"), "Two", hasher);
+    /// table.insert_unique(hasher(&"Three"), "Three", hasher);
+    /// table.insert_unique(hasher(&"Four"), "Four", hasher);
+    /// table.insert_unique(hasher(&"Five"), "Five", hasher);
+    ///
+    /// // We can see that the HashTable holds 5 elements
+    /// assert_eq!(table.len(), 5);
+    /// // But its capacity isn't changed
+    /// assert_eq!(table.capacity(), empty_map_capacity)
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
+        Self {
+            raw: RawTable::with_capacity_in(capacity, alloc),
+        }
+    }
+
+    /// Returns a reference to the underlying allocator.
+    pub fn allocator(&self) -> &A {
+        self.raw.allocator()
+    }
+
+    /// Returns a reference to an entry in the table with the given hash and
+    /// which satisfies the equality function passed.
+    ///
+    /// This method will call `eq` for all entries with the given hash, but may
+    /// also call it for entries with a different hash. `eq` should only return
+    /// true for the desired entry, at which point the search is stopped.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&1), 1, hasher);
+    /// table.insert_unique(hasher(&2), 2, hasher);
+    /// table.insert_unique(hasher(&3), 3, hasher);
+    /// assert_eq!(table.find(hasher(&2), |&val| val == 2), Some(&2));
+    /// assert_eq!(table.find(hasher(&4), |&val| val == 4), None);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn find(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> {
+        self.raw.get(hash, eq)
+    }
+
+    /// Returns a mutable reference to an entry in the table with the given hash
+    /// and which satisfies the equality function passed.
+    ///
+    /// This method will call `eq` for all entries with the given hash, but may
+    /// also call it for entries with a different hash. `eq` should only return
+    /// true for the desired entry, at which point the search is stopped.
+    ///
+    /// When mutating an entry, you should ensure that it still retains the same
+    /// hash value as when it was inserted, otherwise lookups of that entry may
+    /// fail to find it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&1), (1, "a"), |val| hasher(&val.0));
+    /// if let Some(val) = table.find_mut(hasher(&1), |val| val.0 == 1) {
+    ///     val.1 = "b";
+    /// }
+    /// assert_eq!(table.find(hasher(&1), |val| val.0 == 1), Some(&(1, "b")));
+    /// assert_eq!(table.find(hasher(&2), |val| val.0 == 2), None);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn find_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> {
+        self.raw.get_mut(hash, eq)
+    }
+
+    /// Returns an `OccupiedEntry` for an entry in the table with the given hash
+    /// and which satisfies the equality function passed.
+    ///
+    /// This can be used to remove the entry from the table. Call
+    /// [`HashTable::entry`] instead if you wish to insert an entry if the
+    /// lookup fails.
+    ///
+    /// This method will call `eq` for all entries with the given hash, but may
+    /// also call it for entries with a different hash. `eq` should only return
+    /// true for the desired entry, at which point the search is stopped.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&1), (1, "a"), |val| hasher(&val.0));
+    /// if let Ok(entry) = table.find_entry(hasher(&1), |val| val.0 == 1) {
+    ///     entry.remove();
+    /// }
+    /// assert_eq!(table.find(hasher(&1), |val| val.0 == 1), None);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn find_entry(
+        &mut self,
+        hash: u64,
+        eq: impl FnMut(&T) -> bool,
+    ) -> Result<OccupiedEntry<'_, T, A>, AbsentEntry<'_, T, A>> {
+        match self.raw.find(hash, eq) {
+            Some(bucket) => Ok(OccupiedEntry {
+                hash,
+                bucket,
+                table: self,
+            }),
+            None => Err(AbsentEntry { table: self }),
+        }
+    }
+
+    /// Returns an `Entry` for an entry in the table with the given hash
+    /// and which satisfies the equality function passed.
+    ///
+    /// This can be used to remove the entry from the table, or insert a new
+    /// entry with the given hash if one doesn't already exist.
+    ///
+    /// This method will call `eq` for all entries with the given hash, but may
+    /// also call it for entries with a different hash. `eq` should only return
+    /// true for the desired entry, at which point the search is stopped.
+    ///
+    /// This method may grow the table in preparation for an insertion. Call
+    /// [`HashTable::find_entry`] if this is undesirable.
+    ///
+    /// `hasher` is called if entries need to be moved or copied to a new table.
+    /// This must return the same hash value that each entry was inserted with.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::Entry;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&1), (1, "a"), |val| hasher(&val.0));
+    /// if let Entry::Occupied(entry) = table.entry(hasher(&1), |val| val.0 == 1, |val| hasher(&val.0))
+    /// {
+    ///     entry.remove();
+    /// }
+    /// if let Entry::Vacant(entry) = table.entry(hasher(&2), |val| val.0 == 2, |val| hasher(&val.0)) {
+    ///     entry.insert((2, "b"));
+    /// }
+    /// assert_eq!(table.find(hasher(&1), |val| val.0 == 1), None);
+    /// assert_eq!(table.find(hasher(&2), |val| val.0 == 2), Some(&(2, "b")));
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn entry(
+        &mut self,
+        hash: u64,
+        eq: impl FnMut(&T) -> bool,
+        hasher: impl Fn(&T) -> u64,
+    ) -> Entry<'_, T, A> {
+        match self.raw.find_or_find_insert_slot(hash, eq, hasher) {
+            Ok(bucket) => Entry::Occupied(OccupiedEntry {
+                hash,
+                bucket,
+                table: self,
+            }),
+            Err(insert_slot) => Entry::Vacant(VacantEntry {
+                hash,
+                insert_slot,
+                table: self,
+            }),
+        }
+    }
+
+    /// Inserts an element into the `HashTable` with the given hash value, but
+    /// without checking whether an equivalent element already exists within the
+    /// table.
+    ///
+    /// `hasher` is called if entries need to be moved or copied to a new table.
+    /// This must return the same hash value that each entry was inserted with.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut v = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// v.insert_unique(hasher(&1), 1, hasher);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn insert_unique(
+        &mut self,
+        hash: u64,
+        value: T,
+        hasher: impl Fn(&T) -> u64,
+    ) -> OccupiedEntry<'_, T, A> {
+        let bucket = self.raw.insert(hash, value, hasher);
+        OccupiedEntry {
+            hash,
+            bucket,
+            table: self,
+        }
+    }
+
+    /// Clears the table, removing all values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut v = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// v.insert_unique(hasher(&1), 1, hasher);
+    /// v.clear();
+    /// assert!(v.is_empty());
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn clear(&mut self) {
+        self.raw.clear();
+    }
+
+    /// Shrinks the capacity of the table as much as possible. It will drop
+    /// down as much as possible while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// `hasher` is called if entries need to be moved or copied to a new table.
+    /// This must return the same hash value that each entry was inserted with.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::with_capacity(100);
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&1), 1, hasher);
+    /// table.insert_unique(hasher(&2), 2, hasher);
+    /// assert!(table.capacity() >= 100);
+    /// table.shrink_to_fit(hasher);
+    /// assert!(table.capacity() >= 2);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn shrink_to_fit(&mut self, hasher: impl Fn(&T) -> u64) {
+        self.raw.shrink_to(self.len(), hasher)
+    }
+
+    /// Shrinks the capacity of the table with a lower limit. It will drop
+    /// down no lower than the supplied limit while maintaining the internal rules
+    /// and possibly leaving some space in accordance with the resize policy.
+    ///
+    /// `hasher` is called if entries need to be moved or copied to a new table.
+    /// This must return the same hash value that each entry was inserted with.
+    ///
+    /// Panics if the current capacity is smaller than the supplied
+    /// minimum capacity.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::with_capacity(100);
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&1), 1, hasher);
+    /// table.insert_unique(hasher(&2), 2, hasher);
+    /// assert!(table.capacity() >= 100);
+    /// table.shrink_to(10, hasher);
+    /// assert!(table.capacity() >= 10);
+    /// table.shrink_to(0, hasher);
+    /// assert!(table.capacity() >= 2);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn shrink_to(&mut self, min_capacity: usize, hasher: impl Fn(&T) -> u64) {
+        self.raw.shrink_to(min_capacity, hasher);
+    }
+
+    /// Reserves capacity for at least `additional` more elements to be inserted
+    /// in the `HashTable`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// `hasher` is called if entries need to be moved or copied to a new table.
+    /// This must return the same hash value that each entry was inserted with.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity exceeds [`isize::MAX`] bytes and [`abort`] the program
+    /// in case of allocation error. Use [`try_reserve`](HashTable::try_reserve) instead
+    /// if you want to handle memory allocation failure.
+    ///
+    /// [`isize::MAX`]: https://doc.rust-lang.org/std/primitive.isize.html
+    /// [`abort`]: https://doc.rust-lang.org/alloc/alloc/fn.handle_alloc_error.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<i32> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.reserve(10, hasher);
+    /// assert!(table.capacity() >= 10);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) {
+        self.raw.reserve(additional, hasher)
+    }
+
+    /// Tries to reserve capacity for at least `additional` more elements to be inserted
+    /// in the given `HashTable`. The collection may reserve more space to avoid
+    /// frequent reallocations.
+    ///
+    /// `hasher` is called if entries need to be moved or copied to a new table.
+    /// This must return the same hash value that each entry was inserted with.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<i32> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table
+    ///     .try_reserve(10, hasher)
+    ///     .expect("why is the test harness OOMing on 10 bytes?");
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn try_reserve(
+        &mut self,
+        additional: usize,
+        hasher: impl Fn(&T) -> u64,
+    ) -> Result<(), TryReserveError> {
+        self.raw.try_reserve(additional, hasher)
+    }
+
+    /// Returns the number of elements the table can hold without reallocating.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use hashbrown::HashTable;
+    /// let table: HashTable<i32> = HashTable::with_capacity(100);
+    /// assert!(table.capacity() >= 100);
+    /// ```
+    pub fn capacity(&self) -> usize {
+        self.raw.capacity()
+    }
+
+    /// Returns the number of elements in the table.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// let mut v = HashTable::new();
+    /// assert_eq!(v.len(), 0);
+    /// v.insert_unique(hasher(&1), 1, hasher);
+    /// assert_eq!(v.len(), 1);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn len(&self) -> usize {
+        self.raw.len()
+    }
+
+    /// Returns `true` if the set contains no elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// let mut v = HashTable::new();
+    /// assert!(v.is_empty());
+    /// v.insert_unique(hasher(&1), 1, hasher);
+    /// assert!(!v.is_empty());
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn is_empty(&self) -> bool {
+        self.raw.is_empty()
+    }
+
+    /// An iterator visiting all elements in arbitrary order.
+    /// The iterator element type is `&'a T`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&"a"), "b", hasher);
+    /// table.insert_unique(hasher(&"b"), "b", hasher);
+    ///
+    /// // Will print in an arbitrary order.
+    /// for x in table.iter() {
+    ///     println!("{}", x);
+    /// }
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn iter(&self) -> Iter<'_, T> {
+        Iter {
+            inner: unsafe { self.raw.iter() },
+            marker: PhantomData,
+        }
+    }
+
+    /// An iterator visiting all elements in arbitrary order,
+    /// with mutable references to the elements.
+    /// The iterator element type is `&'a mut T`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&1), 1, hasher);
+    /// table.insert_unique(hasher(&2), 2, hasher);
+    /// table.insert_unique(hasher(&3), 3, hasher);
+    ///
+    /// // Update all values
+    /// for val in table.iter_mut() {
+    ///     *val *= 2;
+    /// }
+    ///
+    /// assert_eq!(table.len(), 3);
+    /// let mut vec: Vec<i32> = Vec::new();
+    ///
+    /// for val in &table {
+    ///     println!("val: {}", val);
+    ///     vec.push(*val);
+    /// }
+    ///
+    /// // The `Iter` iterator produces items in arbitrary order, so the
+    /// // items must be sorted to test them against a sorted array.
+    /// vec.sort_unstable();
+    /// assert_eq!(vec, [2, 4, 6]);
+    ///
+    /// assert_eq!(table.len(), 3);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn iter_mut(&mut self) -> IterMut<'_, T> {
+        IterMut {
+            inner: unsafe { self.raw.iter() },
+            marker: PhantomData,
+        }
+    }
+
+    /// Retains only the elements specified by the predicate.
+    ///
+    /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// for x in 1..=6 {
+    ///     table.insert_unique(hasher(&x), x, hasher);
+    /// }
+    /// table.retain(|&mut x| x % 2 == 0);
+    /// assert_eq!(table.len(), 3);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn retain(&mut self, mut f: impl FnMut(&mut T) -> bool) {
+        // Here we only use `iter` as a temporary, preventing use-after-free
+        unsafe {
+            for item in self.raw.iter() {
+                if !f(item.as_mut()) {
+                    self.raw.erase(item);
+                }
+            }
+        }
+    }
+
+    /// Clears the set, returning all elements in an iterator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// for x in 1..=3 {
+    ///     table.insert_unique(hasher(&x), x, hasher);
+    /// }
+    /// assert!(!table.is_empty());
+    ///
+    /// // print 1, 2, 3 in an arbitrary order
+    /// for i in table.drain() {
+    ///     println!("{}", i);
+    /// }
+    ///
+    /// assert!(table.is_empty());
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn drain(&mut self) -> Drain<'_, T, A> {
+        Drain {
+            inner: self.raw.drain(),
+        }
+    }
+
+    /// Drains elements which are true under the given predicate,
+    /// and returns an iterator over the removed items.
+    ///
+    /// In other words, move all elements `e` such that `f(&e)` returns `true` out
+    /// into another iterator.
+    ///
+    /// If the returned `ExtractIf` is not exhausted, e.g. because it is dropped without iterating
+    /// or the iteration short-circuits, then the remaining elements will be retained.
+    /// Use [`retain()`] with a negated predicate if you do not need the returned iterator.
+    ///
+    /// [`retain()`]: HashTable::retain
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// for x in 0..8 {
+    ///     table.insert_unique(hasher(&x), x, hasher);
+    /// }
+    /// let drained: Vec<i32> = table.extract_if(|&mut v| v % 2 == 0).collect();
+    ///
+    /// let mut evens = drained.into_iter().collect::<Vec<_>>();
+    /// let mut odds = table.into_iter().collect::<Vec<_>>();
+    /// evens.sort();
+    /// odds.sort();
+    ///
+    /// assert_eq!(evens, vec![0, 2, 4, 6]);
+    /// assert_eq!(odds, vec![1, 3, 5, 7]);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn extract_if<F>(&mut self, f: F) -> ExtractIf<'_, T, F, A>
+    where
+        F: FnMut(&mut T) -> bool,
+    {
+        ExtractIf {
+            f,
+            inner: RawExtractIf {
+                iter: unsafe { self.raw.iter() },
+                table: &mut self.raw,
+            },
+        }
+    }
+
+    /// Attempts to get mutable references to `N` values in the map at once.
+    ///
+    /// The `eq` argument should be a closure such that `eq(i, k)` returns true if `k` is equal to
+    /// the `i`th key to be looked up.
+    ///
+    /// Returns an array of length `N` with the results of each query. For soundness, at most one
+    /// mutable reference will be returned to any value. `None` will be returned if any of the
+    /// keys are duplicates or missing.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::Entry;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut libraries: HashTable<(&str, u32)> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// for (k, v) in [
+    ///     ("Bodleian Library", 1602),
+    ///     ("Athenæum", 1807),
+    ///     ("Herzogin-Anna-Amalia-Bibliothek", 1691),
+    ///     ("Library of Congress", 1800),
+    /// ] {
+    ///     libraries.insert_unique(hasher(&k), (k, v), |(k, _)| hasher(&k));
+    /// }
+    ///
+    /// let keys = ["Athenæum", "Library of Congress"];
+    /// let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+    /// assert_eq!(
+    ///     got,
+    ///     Some([&mut ("Athenæum", 1807), &mut ("Library of Congress", 1800),]),
+    /// );
+    ///
+    /// // Missing keys result in None
+    /// let keys = ["Athenæum", "New York Public Library"];
+    /// let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+    /// assert_eq!(got, None);
+    ///
+    /// // Duplicate keys result in None
+    /// let keys = ["Athenæum", "Athenæum"];
+    /// let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+    /// assert_eq!(got, None);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn get_many_mut<const N: usize>(
+        &mut self,
+        hashes: [u64; N],
+        eq: impl FnMut(usize, &T) -> bool,
+    ) -> Option<[&'_ mut T; N]> {
+        self.raw.get_many_mut(hashes, eq)
+    }
+
+    /// Attempts to get mutable references to `N` values in the map at once, without validating that
+    /// the values are unique.
+    ///
+    /// The `eq` argument should be a closure such that `eq(i, k)` returns true if `k` is equal to
+    /// the `i`th key to be looked up.
+    ///
+    /// Returns an array of length `N` with the results of each query. `None` will be returned if
+    /// any of the keys are missing.
+    ///
+    /// For a safe alternative see [`get_many_mut`](`HashTable::get_many_mut`).
+    ///
+    /// # Safety
+    ///
+    /// Calling this method with overlapping keys is *[undefined behavior]* even if the resulting
+    /// references are not used.
+    ///
+    /// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::Entry;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut libraries: HashTable<(&str, u32)> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// for (k, v) in [
+    ///     ("Bodleian Library", 1602),
+    ///     ("Athenæum", 1807),
+    ///     ("Herzogin-Anna-Amalia-Bibliothek", 1691),
+    ///     ("Library of Congress", 1800),
+    /// ] {
+    ///     libraries.insert_unique(hasher(&k), (k, v), |(k, _)| hasher(&k));
+    /// }
+    ///
+    /// let keys = ["Athenæum", "Library of Congress"];
+    /// let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+    /// assert_eq!(
+    ///     got,
+    ///     Some([&mut ("Athenæum", 1807), &mut ("Library of Congress", 1800),]),
+    /// );
+    ///
+    /// // Missing keys result in None
+    /// let keys = ["Athenæum", "New York Public Library"];
+    /// let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+    /// assert_eq!(got, None);
+    ///
+    /// // Duplicate keys result in None
+    /// let keys = ["Athenæum", "Athenæum"];
+    /// let got = libraries.get_many_mut(keys.map(|k| hasher(&k)), |i, val| keys[i] == val.0);
+    /// assert_eq!(got, None);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub unsafe fn get_many_unchecked_mut<const N: usize>(
+        &mut self,
+        hashes: [u64; N],
+        eq: impl FnMut(usize, &T) -> bool,
+    ) -> Option<[&'_ mut T; N]> {
+        self.raw.get_many_unchecked_mut(hashes, eq)
+    }
+}
+
+impl<T, A> IntoIterator for HashTable<T, A>
+where
+    A: Allocator,
+{
+    type Item = T;
+    type IntoIter = IntoIter<T, A>;
+
+    fn into_iter(self) -> IntoIter<T, A> {
+        IntoIter {
+            inner: self.raw.into_iter(),
+        }
+    }
+}
+
+impl<'a, T, A> IntoIterator for &'a HashTable<T, A>
+where
+    A: Allocator,
+{
+    type Item = &'a T;
+    type IntoIter = Iter<'a, T>;
+
+    fn into_iter(self) -> Iter<'a, T> {
+        self.iter()
+    }
+}
+
+impl<'a, T, A> IntoIterator for &'a mut HashTable<T, A>
+where
+    A: Allocator,
+{
+    type Item = &'a mut T;
+    type IntoIter = IterMut<'a, T>;
+
+    fn into_iter(self) -> IterMut<'a, T> {
+        self.iter_mut()
+    }
+}
+
+impl<T, A> Default for HashTable<T, A>
+where
+    A: Allocator + Default,
+{
+    fn default() -> Self {
+        Self {
+            raw: Default::default(),
+        }
+    }
+}
+
+impl<T, A> Clone for HashTable<T, A>
+where
+    T: Clone,
+    A: Allocator + Clone,
+{
+    fn clone(&self) -> Self {
+        Self {
+            raw: self.raw.clone(),
+        }
+    }
+}
+
+impl<T, A> fmt::Debug for HashTable<T, A>
+where
+    T: fmt::Debug,
+    A: Allocator,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_set().entries(self.iter()).finish()
+    }
+}
+
+/// A view into a single entry in a table, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`HashTable`].
+///
+/// [`HashTable`]: struct.HashTable.html
+/// [`entry`]: struct.HashTable.html#method.entry
+///
+/// # Examples
+///
+/// ```
+/// # #[cfg(feature = "nightly")]
+/// # fn test() {
+/// use ahash::AHasher;
+/// use hashbrown::hash_table::{Entry, HashTable, OccupiedEntry};
+/// use std::hash::{BuildHasher, BuildHasherDefault};
+///
+/// let mut table = HashTable::new();
+/// let hasher = BuildHasherDefault::<AHasher>::default();
+/// let hasher = |val: &_| hasher.hash_one(val);
+/// for x in ["a", "b", "c"] {
+///     table.insert_unique(hasher(&x), x, hasher);
+/// }
+/// assert_eq!(table.len(), 3);
+///
+/// // Existing value (insert)
+/// let entry: Entry<_> = table.entry(hasher(&"a"), |&x| x == "a", hasher);
+/// let _raw_o: OccupiedEntry<_, _> = entry.insert("a");
+/// assert_eq!(table.len(), 3);
+/// // Nonexistent value (insert)
+/// table.entry(hasher(&"d"), |&x| x == "d", hasher).insert("d");
+///
+/// // Existing value (or_insert)
+/// table
+///     .entry(hasher(&"b"), |&x| x == "b", hasher)
+///     .or_insert("b");
+/// // Nonexistent value (or_insert)
+/// table
+///     .entry(hasher(&"e"), |&x| x == "e", hasher)
+///     .or_insert("e");
+///
+/// println!("Our HashTable: {:?}", table);
+///
+/// let mut vec: Vec<_> = table.iter().copied().collect();
+/// // The `Iter` iterator produces items in arbitrary order, so the
+/// // items must be sorted to test them against a sorted array.
+/// vec.sort_unstable();
+/// assert_eq!(vec, ["a", "b", "c", "d", "e"]);
+/// # }
+/// # fn main() {
+/// #     #[cfg(feature = "nightly")]
+/// #     test()
+/// # }
+/// ```
+pub enum Entry<'a, T, A = Global>
+where
+    A: Allocator,
+{
+    /// An occupied entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::{Entry, HashTable, OccupiedEntry};
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// for x in ["a", "b"] {
+    ///     table.insert_unique(hasher(&x), x, hasher);
+    /// }
+    ///
+    /// match table.entry(hasher(&"a"), |&x| x == "a", hasher) {
+    ///     Entry::Vacant(_) => unreachable!(),
+    ///     Entry::Occupied(_) => {}
+    /// }
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    Occupied(OccupiedEntry<'a, T, A>),
+
+    /// A vacant entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::{Entry, HashTable, OccupiedEntry};
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table = HashTable::<&str>::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    ///
+    /// match table.entry(hasher(&"a"), |&x| x == "a", hasher) {
+    ///     Entry::Vacant(_) => {}
+    ///     Entry::Occupied(_) => unreachable!(),
+    /// }
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    Vacant(VacantEntry<'a, T, A>),
+}
+
+impl<T: fmt::Debug, A: Allocator> fmt::Debug for Entry<'_, T, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+            Entry::Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+        }
+    }
+}
+
+impl<'a, T, A> Entry<'a, T, A>
+where
+    A: Allocator,
+{
+    /// Sets the value of the entry, replacing any existing value if there is
+    /// one, and returns an [`OccupiedEntry`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<&str> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    ///
+    /// let entry = table
+    ///     .entry(hasher(&"horseyland"), |&x| x == "horseyland", hasher)
+    ///     .insert("horseyland");
+    ///
+    /// assert_eq!(entry.get(), &"horseyland");
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn insert(self, value: T) -> OccupiedEntry<'a, T, A> {
+        match self {
+            Entry::Occupied(mut entry) => {
+                *entry.get_mut() = value;
+                entry
+            }
+            Entry::Vacant(entry) => entry.insert(value),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting if it was vacant.
+    ///
+    /// Returns an [`OccupiedEntry`] pointing to the now-occupied entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<&str> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    ///
+    /// // nonexistent key
+    /// table
+    ///     .entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher)
+    ///     .or_insert("poneyland");
+    /// assert!(table
+    ///     .find(hasher(&"poneyland"), |&x| x == "poneyland")
+    ///     .is_some());
+    ///
+    /// // existing key
+    /// table
+    ///     .entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher)
+    ///     .or_insert("poneyland");
+    /// assert!(table
+    ///     .find(hasher(&"poneyland"), |&x| x == "poneyland")
+    ///     .is_some());
+    /// assert_eq!(table.len(), 1);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn or_insert(self, default: T) -> OccupiedEntry<'a, T, A> {
+        match self {
+            Entry::Occupied(entry) => entry,
+            Entry::Vacant(entry) => entry.insert(default),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the result of the default function if empty..
+    ///
+    /// Returns an [`OccupiedEntry`] pointing to the now-occupied entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<String> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    ///
+    /// table
+    ///     .entry(hasher("poneyland"), |x| x == "poneyland", |val| hasher(val))
+    ///     .or_insert_with(|| "poneyland".to_string());
+    ///
+    /// assert!(table
+    ///     .find(hasher(&"poneyland"), |x| x == "poneyland")
+    ///     .is_some());
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn or_insert_with(self, default: impl FnOnce() -> T) -> OccupiedEntry<'a, T, A> {
+        match self {
+            Entry::Occupied(entry) => entry,
+            Entry::Vacant(entry) => entry.insert(default()),
+        }
+    }
+
+    /// Provides in-place mutable access to an occupied entry before any
+    /// potential inserts into the table.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<(&str, u32)> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    ///
+    /// table
+    ///     .entry(
+    ///         hasher(&"poneyland"),
+    ///         |&(x, _)| x == "poneyland",
+    ///         |(k, _)| hasher(&k),
+    ///     )
+    ///     .and_modify(|(_, v)| *v += 1)
+    ///     .or_insert(("poneyland", 42));
+    /// assert_eq!(
+    ///     table.find(hasher(&"poneyland"), |&(k, _)| k == "poneyland"),
+    ///     Some(&("poneyland", 42))
+    /// );
+    ///
+    /// table
+    ///     .entry(
+    ///         hasher(&"poneyland"),
+    ///         |&(x, _)| x == "poneyland",
+    ///         |(k, _)| hasher(&k),
+    ///     )
+    ///     .and_modify(|(_, v)| *v += 1)
+    ///     .or_insert(("poneyland", 42));
+    /// assert_eq!(
+    ///     table.find(hasher(&"poneyland"), |&(k, _)| k == "poneyland"),
+    ///     Some(&("poneyland", 43))
+    /// );
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn and_modify(self, f: impl FnOnce(&mut T)) -> Self {
+        match self {
+            Entry::Occupied(mut entry) => {
+                f(entry.get_mut());
+                Entry::Occupied(entry)
+            }
+            Entry::Vacant(entry) => Entry::Vacant(entry),
+        }
+    }
+}
+
+/// A view into an occupied entry in a `HashTable`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+///
+/// # Examples
+///
+/// ```
+/// # #[cfg(feature = "nightly")]
+/// # fn test() {
+/// use ahash::AHasher;
+/// use hashbrown::hash_table::{Entry, HashTable, OccupiedEntry};
+/// use std::hash::{BuildHasher, BuildHasherDefault};
+///
+/// let mut table = HashTable::new();
+/// let hasher = BuildHasherDefault::<AHasher>::default();
+/// let hasher = |val: &_| hasher.hash_one(val);
+/// for x in ["a", "b", "c"] {
+///     table.insert_unique(hasher(&x), x, hasher);
+/// }
+/// assert_eq!(table.len(), 3);
+///
+/// let _entry_o: OccupiedEntry<_, _> = table.find_entry(hasher(&"a"), |&x| x == "a").unwrap();
+/// assert_eq!(table.len(), 3);
+///
+/// // Existing key
+/// match table.entry(hasher(&"a"), |&x| x == "a", hasher) {
+///     Entry::Vacant(_) => unreachable!(),
+///     Entry::Occupied(view) => {
+///         assert_eq!(view.get(), &"a");
+///     }
+/// }
+///
+/// assert_eq!(table.len(), 3);
+///
+/// // Existing key (take)
+/// match table.entry(hasher(&"c"), |&x| x == "c", hasher) {
+///     Entry::Vacant(_) => unreachable!(),
+///     Entry::Occupied(view) => {
+///         assert_eq!(view.remove().0, "c");
+///     }
+/// }
+/// assert_eq!(table.find(hasher(&"c"), |&x| x == "c"), None);
+/// assert_eq!(table.len(), 2);
+/// # }
+/// # fn main() {
+/// #     #[cfg(feature = "nightly")]
+/// #     test()
+/// # }
+/// ```
+pub struct OccupiedEntry<'a, T, A = Global>
+where
+    A: Allocator,
+{
+    hash: u64,
+    bucket: Bucket<T>,
+    table: &'a mut HashTable<T, A>,
+}
+
+unsafe impl<T, A> Send for OccupiedEntry<'_, T, A>
+where
+    T: Send,
+    A: Send + Allocator,
+{
+}
+unsafe impl<T, A> Sync for OccupiedEntry<'_, T, A>
+where
+    T: Sync,
+    A: Sync + Allocator,
+{
+}
+
+impl<T: fmt::Debug, A: Allocator> fmt::Debug for OccupiedEntry<'_, T, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OccupiedEntry")
+            .field("value", self.get())
+            .finish()
+    }
+}
+
+impl<'a, T, A> OccupiedEntry<'a, T, A>
+where
+    A: Allocator,
+{
+    /// Takes the value out of the entry, and returns it along with a
+    /// `VacantEntry` that can be used to insert another value with the same
+    /// hash as the one that was just removed.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::Entry;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<&str> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// // The table is empty
+    /// assert!(table.is_empty() && table.capacity() == 0);
+    ///
+    /// table.insert_unique(hasher(&"poneyland"), "poneyland", hasher);
+    /// let capacity_before_remove = table.capacity();
+    ///
+    /// if let Entry::Occupied(o) = table.entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher) {
+    ///     assert_eq!(o.remove().0, "poneyland");
+    /// }
+    ///
+    /// assert!(table
+    ///     .find(hasher(&"poneyland"), |&x| x == "poneyland")
+    ///     .is_none());
+    /// // Now table hold none elements but capacity is equal to the old one
+    /// assert!(table.len() == 0 && table.capacity() == capacity_before_remove);
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    #[cfg_attr(feature = "inline-more", inline)]
+    pub fn remove(self) -> (T, VacantEntry<'a, T, A>) {
+        let (val, slot) = unsafe { self.table.raw.remove(self.bucket) };
+        (
+            val,
+            VacantEntry {
+                hash: self.hash,
+                insert_slot: slot,
+                table: self.table,
+            },
+        )
+    }
+
+    /// Gets a reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::Entry;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<&str> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&"poneyland"), "poneyland", hasher);
+    ///
+    /// match table.entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher) {
+    ///     Entry::Vacant(_) => panic!(),
+    ///     Entry::Occupied(entry) => assert_eq!(entry.get(), &"poneyland"),
+    /// }
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    #[inline]
+    pub fn get(&self) -> &T {
+        unsafe { self.bucket.as_ref() }
+    }
+
+    /// Gets a mutable reference to the value in the entry.
+    ///
+    /// If you need a reference to the `OccupiedEntry` which may outlive the
+    /// destruction of the `Entry` value, see [`into_mut`].
+    ///
+    /// [`into_mut`]: #method.into_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::Entry;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<(&str, u32)> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&"poneyland"), ("poneyland", 12), |(k, _)| hasher(&k));
+    ///
+    /// assert_eq!(
+    ///     table.find(hasher(&"poneyland"), |&(x, _)| x == "poneyland",),
+    ///     Some(&("poneyland", 12))
+    /// );
+    ///
+    /// if let Entry::Occupied(mut o) = table.entry(
+    ///     hasher(&"poneyland"),
+    ///     |&(x, _)| x == "poneyland",
+    ///     |(k, _)| hasher(&k),
+    /// ) {
+    ///     o.get_mut().1 += 10;
+    ///     assert_eq!(o.get().1, 22);
+    ///
+    ///     // We can use the same Entry multiple times.
+    ///     o.get_mut().1 += 2;
+    /// }
+    ///
+    /// assert_eq!(
+    ///     table.find(hasher(&"poneyland"), |&(x, _)| x == "poneyland",),
+    ///     Some(&("poneyland", 24))
+    /// );
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    #[inline]
+    pub fn get_mut(&mut self) -> &mut T {
+        unsafe { self.bucket.as_mut() }
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the value in the entry
+    /// with a lifetime bound to the table itself.
+    ///
+    /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
+    ///
+    /// [`get_mut`]: #method.get_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::Entry;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<(&str, u32)> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    /// table.insert_unique(hasher(&"poneyland"), ("poneyland", 12), |(k, _)| hasher(&k));
+    ///
+    /// assert_eq!(
+    ///     table.find(hasher(&"poneyland"), |&(x, _)| x == "poneyland",),
+    ///     Some(&("poneyland", 12))
+    /// );
+    ///
+    /// let value: &mut (&str, u32);
+    /// match table.entry(
+    ///     hasher(&"poneyland"),
+    ///     |&(x, _)| x == "poneyland",
+    ///     |(k, _)| hasher(&k),
+    /// ) {
+    ///     Entry::Occupied(entry) => value = entry.into_mut(),
+    ///     Entry::Vacant(_) => panic!(),
+    /// }
+    /// value.1 += 10;
+    ///
+    /// assert_eq!(
+    ///     table.find(hasher(&"poneyland"), |&(x, _)| x == "poneyland",),
+    ///     Some(&("poneyland", 22))
+    /// );
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    pub fn into_mut(self) -> &'a mut T {
+        unsafe { self.bucket.as_mut() }
+    }
+
+    /// Converts the OccupiedEntry into a mutable reference to the underlying
+    /// table.
+    pub fn into_table(self) -> &'a mut HashTable<T, A> {
+        self.table
+    }
+}
+
+/// A view into a vacant entry in a `HashTable`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+///
+/// # Examples
+///
+/// ```
+/// # #[cfg(feature = "nightly")]
+/// # fn test() {
+/// use ahash::AHasher;
+/// use hashbrown::hash_table::{Entry, HashTable, VacantEntry};
+/// use std::hash::{BuildHasher, BuildHasherDefault};
+///
+/// let mut table: HashTable<&str> = HashTable::new();
+/// let hasher = BuildHasherDefault::<AHasher>::default();
+/// let hasher = |val: &_| hasher.hash_one(val);
+///
+/// let entry_v: VacantEntry<_, _> = match table.entry(hasher(&"a"), |&x| x == "a", hasher) {
+///     Entry::Vacant(view) => view,
+///     Entry::Occupied(_) => unreachable!(),
+/// };
+/// entry_v.insert("a");
+/// assert!(table.find(hasher(&"a"), |&x| x == "a").is_some() && table.len() == 1);
+///
+/// // Nonexistent key (insert)
+/// match table.entry(hasher(&"b"), |&x| x == "b", hasher) {
+///     Entry::Vacant(view) => {
+///         view.insert("b");
+///     }
+///     Entry::Occupied(_) => unreachable!(),
+/// }
+/// assert!(table.find(hasher(&"b"), |&x| x == "b").is_some() && table.len() == 2);
+/// # }
+/// # fn main() {
+/// #     #[cfg(feature = "nightly")]
+/// #     test()
+/// # }
+/// ```
+pub struct VacantEntry<'a, T, A = Global>
+where
+    A: Allocator,
+{
+    hash: u64,
+    insert_slot: InsertSlot,
+    table: &'a mut HashTable<T, A>,
+}
+
+impl<T: fmt::Debug, A: Allocator> fmt::Debug for VacantEntry<'_, T, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.write_str("VacantEntry")
+    }
+}
+
+impl<'a, T, A> VacantEntry<'a, T, A>
+where
+    A: Allocator,
+{
+    /// Inserts a new element into the table with the hash that was used to
+    /// obtain the `VacantEntry`.
+    ///
+    /// An `OccupiedEntry` is returned for the newly inserted element.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #[cfg(feature = "nightly")]
+    /// # fn test() {
+    /// use ahash::AHasher;
+    /// use hashbrown::hash_table::Entry;
+    /// use hashbrown::HashTable;
+    /// use std::hash::{BuildHasher, BuildHasherDefault};
+    ///
+    /// let mut table: HashTable<&str> = HashTable::new();
+    /// let hasher = BuildHasherDefault::<AHasher>::default();
+    /// let hasher = |val: &_| hasher.hash_one(val);
+    ///
+    /// if let Entry::Vacant(o) = table.entry(hasher(&"poneyland"), |&x| x == "poneyland", hasher) {
+    ///     o.insert("poneyland");
+    /// }
+    /// assert_eq!(
+    ///     table.find(hasher(&"poneyland"), |&x| x == "poneyland"),
+    ///     Some(&"poneyland")
+    /// );
+    /// # }
+    /// # fn main() {
+    /// #     #[cfg(feature = "nightly")]
+    /// #     test()
+    /// # }
+    /// ```
+    #[inline]
+    pub fn insert(self, value: T) -> OccupiedEntry<'a, T, A> {
+        let bucket = unsafe {
+            self.table
+                .raw
+                .insert_in_slot(self.hash, self.insert_slot, value)
+        };
+        OccupiedEntry {
+            hash: self.hash,
+            bucket,
+            table: self.table,
+        }
+    }
+
+    /// Converts the VacantEntry into a mutable reference to the underlying
+    /// table.
+    pub fn into_table(self) -> &'a mut HashTable<T, A> {
+        self.table
+    }
+}
+
+/// Type representing the absence of an entry, as returned by [`HashTable::find_entry`].
+///
+/// This type only exists due to [limitations] in Rust's NLL borrow checker. In
+/// the future, `find_entry` will return an `Option<OccupiedEntry>` and this
+/// type will be removed.
+///
+/// [limitations]: https://smallcultfollowing.com/babysteps/blog/2018/06/15/mir-based-borrow-check-nll-status-update/#polonius
+///
+/// # Examples
+///
+/// ```
+/// # #[cfg(feature = "nightly")]
+/// # fn test() {
+/// use ahash::AHasher;
+/// use hashbrown::hash_table::{AbsentEntry, Entry, HashTable};
+/// use std::hash::{BuildHasher, BuildHasherDefault};
+///
+/// let mut table: HashTable<&str> = HashTable::new();
+/// let hasher = BuildHasherDefault::<AHasher>::default();
+/// let hasher = |val: &_| hasher.hash_one(val);
+///
+/// let entry_v: AbsentEntry<_, _> = table.find_entry(hasher(&"a"), |&x| x == "a").unwrap_err();
+/// entry_v
+///     .into_table()
+///     .insert_unique(hasher(&"a"), "a", hasher);
+/// assert!(table.find(hasher(&"a"), |&x| x == "a").is_some() && table.len() == 1);
+///
+/// // Nonexistent key (insert)
+/// match table.entry(hasher(&"b"), |&x| x == "b", hasher) {
+///     Entry::Vacant(view) => {
+///         view.insert("b");
+///     }
+///     Entry::Occupied(_) => unreachable!(),
+/// }
+/// assert!(table.find(hasher(&"b"), |&x| x == "b").is_some() && table.len() == 2);
+/// # }
+/// # fn main() {
+/// #     #[cfg(feature = "nightly")]
+/// #     test()
+/// # }
+/// ```
+pub struct AbsentEntry<'a, T, A = Global>
+where
+    A: Allocator,
+{
+    table: &'a mut HashTable<T, A>,
+}
+
+impl<T: fmt::Debug, A: Allocator> fmt::Debug for AbsentEntry<'_, T, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.write_str("AbsentEntry")
+    }
+}
+
+impl<'a, T, A> AbsentEntry<'a, T, A>
+where
+    A: Allocator,
+{
+    /// Converts the AbsentEntry into a mutable reference to the underlying
+    /// table.
+    pub fn into_table(self) -> &'a mut HashTable<T, A> {
+        self.table
+    }
+}
+
+/// An iterator over the entries of a `HashTable` in arbitrary order.
+/// The iterator element type is `&'a T`.
+///
+/// This `struct` is created by the [`iter`] method on [`HashTable`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.HashTable.html#method.iter
+/// [`HashTable`]: struct.HashTable.html
+pub struct Iter<'a, T> {
+    inner: RawIter<T>,
+    marker: PhantomData<&'a T>,
+}
+
+impl<'a, T> Iterator for Iter<'a, T> {
+    type Item = &'a T;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some(bucket) => Some(unsafe { bucket.as_ref() }),
+            None => None,
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner
+            .fold(init, |acc, bucket| unsafe { f(acc, bucket.as_ref()) })
+    }
+}
+
+impl<T> ExactSizeIterator for Iter<'_, T> {
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+impl<T> FusedIterator for Iter<'_, T> {}
+
+/// A mutable iterator over the entries of a `HashTable` in arbitrary order.
+/// The iterator element type is `&'a mut T`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`HashTable`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.HashTable.html#method.iter_mut
+/// [`HashTable`]: struct.HashTable.html
+pub struct IterMut<'a, T> {
+    inner: RawIter<T>,
+    marker: PhantomData<&'a mut T>,
+}
+
+impl<'a, T> Iterator for IterMut<'a, T> {
+    type Item = &'a mut T;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        // Avoid `Option::map` because it bloats LLVM IR.
+        match self.inner.next() {
+            Some(bucket) => Some(unsafe { bucket.as_mut() }),
+            None => None,
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+
+    fn fold<B, F>(self, init: B, mut f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner
+            .fold(init, |acc, bucket| unsafe { f(acc, bucket.as_mut()) })
+    }
+}
+
+impl<T> ExactSizeIterator for IterMut<'_, T> {
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+impl<T> FusedIterator for IterMut<'_, T> {}
+
+/// An owning iterator over the entries of a `HashTable` in arbitrary order.
+/// The iterator element type is `T`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`HashTable`]
+/// (provided by the [`IntoIterator`] trait). See its documentation for more.
+/// The table cannot be used after calling that method.
+///
+/// [`into_iter`]: struct.HashTable.html#method.into_iter
+/// [`HashTable`]: struct.HashTable.html
+/// [`IntoIterator`]: https://doc.rust-lang.org/core/iter/trait.IntoIterator.html
+pub struct IntoIter<T, A = Global>
+where
+    A: Allocator,
+{
+    inner: RawIntoIter<T, A>,
+}
+
+impl<T, A> Iterator for IntoIter<T, A>
+where
+    A: Allocator,
+{
+    type Item = T;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        self.inner.next()
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+
+    fn fold<B, F>(self, init: B, f: F) -> B
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> B,
+    {
+        self.inner.fold(init, f)
+    }
+}
+
+impl<T, A> ExactSizeIterator for IntoIter<T, A>
+where
+    A: Allocator,
+{
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+impl<T, A> FusedIterator for IntoIter<T, A> where A: Allocator {}
+
+/// A draining iterator over the items of a `HashTable`.
+///
+/// This `struct` is created by the [`drain`] method on [`HashTable`].
+/// See its documentation for more.
+///
+/// [`HashTable`]: struct.HashTable.html
+/// [`drain`]: struct.HashTable.html#method.drain
+pub struct Drain<'a, T, A: Allocator = Global> {
+    inner: RawDrain<'a, T, A>,
+}
+
+impl<T, A: Allocator> Drain<'_, T, A> {
+    /// Returns a iterator of references over the remaining items.
+    fn iter(&self) -> Iter<'_, T> {
+        Iter {
+            inner: self.inner.iter(),
+            marker: PhantomData,
+        }
+    }
+}
+
+impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
+    type Item = T;
+
+    fn next(&mut self) -> Option<T> {
+        self.inner.next()
+    }
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
+
+impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.iter()).finish()
+    }
+}
+
+/// A draining iterator over entries of a `HashTable` which don't satisfy the predicate `f`.
+///
+/// This `struct` is created by [`HashTable::extract_if`]. See its
+/// documentation for more.
+#[must_use = "Iterators are lazy unless consumed"]
+pub struct ExtractIf<'a, T, F, A: Allocator = Global>
+where
+    F: FnMut(&mut T) -> bool,
+{
+    f: F,
+    inner: RawExtractIf<'a, T, A>,
+}
+
+impl<T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A>
+where
+    F: FnMut(&mut T) -> bool,
+{
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<Self::Item> {
+        self.inner.next(|val| (self.f)(val))
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (0, self.inner.iter.size_hint().1)
+    }
+}
+
+impl<T, F, A: Allocator> FusedIterator for ExtractIf<'_, T, F, A> where F: FnMut(&mut T) -> bool {}
+
\ No newline at end of file diff --git a/src/log/__private_api.rs.html b/src/log/__private_api.rs.html new file mode 100644 index 000000000..ffa9629b4 --- /dev/null +++ b/src/log/__private_api.rs.html @@ -0,0 +1,251 @@ +__private_api.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+
//! WARNING: this is not part of the crate's public API and is subject to change at any time
+
+use self::sealed::KVs;
+use crate::{Level, Metadata, Record};
+use std::fmt::Arguments;
+pub use std::{file, format_args, line, module_path, stringify};
+
+#[cfg(not(feature = "kv"))]
+pub type Value<'a> = &'a str;
+
+mod sealed {
+    /// Types for the `kv` argument.
+    pub trait KVs<'a> {
+        fn into_kvs(self) -> Option<&'a [(&'a str, super::Value<'a>)]>;
+    }
+}
+
+// Types for the `kv` argument.
+
+impl<'a> KVs<'a> for &'a [(&'a str, Value<'a>)] {
+    #[inline]
+    fn into_kvs(self) -> Option<&'a [(&'a str, Value<'a>)]> {
+        Some(self)
+    }
+}
+
+impl<'a> KVs<'a> for () {
+    #[inline]
+    fn into_kvs(self) -> Option<&'a [(&'a str, Value<'a>)]> {
+        None
+    }
+}
+
+// Log implementation.
+
+fn log_impl(
+    args: Arguments,
+    level: Level,
+    &(target, module_path, file): &(&str, &'static str, &'static str),
+    line: u32,
+    kvs: Option<&[(&str, Value)]>,
+) {
+    #[cfg(not(feature = "kv"))]
+    if kvs.is_some() {
+        panic!("key-value support is experimental and must be enabled using the `kv` feature")
+    }
+
+    let mut builder = Record::builder();
+
+    builder
+        .args(args)
+        .level(level)
+        .target(target)
+        .module_path_static(Some(module_path))
+        .file_static(Some(file))
+        .line(Some(line));
+
+    #[cfg(feature = "kv")]
+    builder.key_values(&kvs);
+
+    crate::logger().log(&builder.build());
+}
+
+pub fn log<'a, K>(
+    args: Arguments,
+    level: Level,
+    target_module_path_and_file: &(&str, &'static str, &'static str),
+    line: u32,
+    kvs: K,
+) where
+    K: KVs<'a>,
+{
+    log_impl(
+        args,
+        level,
+        target_module_path_and_file,
+        line,
+        kvs.into_kvs(),
+    )
+}
+
+pub fn enabled(level: Level, target: &str) -> bool {
+    crate::logger().enabled(&Metadata::builder().level(level).target(target).build())
+}
+
+#[cfg(feature = "kv")]
+mod kv_support {
+    use crate::kv;
+
+    pub type Value<'a> = kv::Value<'a>;
+
+    // NOTE: Many functions here accept a double reference &&V
+    // This is so V itself can be ?Sized, while still letting us
+    // erase it to some dyn Trait (because &T is sized)
+
+    pub fn capture_to_value<'a, V: kv::ToValue + ?Sized>(v: &'a &'a V) -> Value<'a> {
+        v.to_value()
+    }
+
+    pub fn capture_debug<'a, V: core::fmt::Debug + ?Sized>(v: &'a &'a V) -> Value<'a> {
+        Value::from_debug(v)
+    }
+
+    pub fn capture_display<'a, V: core::fmt::Display + ?Sized>(v: &'a &'a V) -> Value<'a> {
+        Value::from_display(v)
+    }
+
+    #[cfg(feature = "kv_std")]
+    pub fn capture_error<'a>(v: &'a (dyn std::error::Error + 'static)) -> Value<'a> {
+        Value::from_dyn_error(v)
+    }
+
+    #[cfg(feature = "kv_sval")]
+    pub fn capture_sval<'a, V: sval::Value + ?Sized>(v: &'a &'a V) -> Value<'a> {
+        Value::from_sval(v)
+    }
+
+    #[cfg(feature = "kv_serde")]
+    pub fn capture_serde<'a, V: serde::Serialize + ?Sized>(v: &'a &'a V) -> Value<'a> {
+        Value::from_serde(v)
+    }
+}
+
+#[cfg(feature = "kv")]
+pub use self::kv_support::*;
+
\ No newline at end of file diff --git a/src/log/lib.rs.html b/src/log/lib.rs.html new file mode 100644 index 000000000..2bf1c7cae --- /dev/null +++ b/src/log/lib.rs.html @@ -0,0 +1,3679 @@ +lib.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+370
+371
+372
+373
+374
+375
+376
+377
+378
+379
+380
+381
+382
+383
+384
+385
+386
+387
+388
+389
+390
+391
+392
+393
+394
+395
+396
+397
+398
+399
+400
+401
+402
+403
+404
+405
+406
+407
+408
+409
+410
+411
+412
+413
+414
+415
+416
+417
+418
+419
+420
+421
+422
+423
+424
+425
+426
+427
+428
+429
+430
+431
+432
+433
+434
+435
+436
+437
+438
+439
+440
+441
+442
+443
+444
+445
+446
+447
+448
+449
+450
+451
+452
+453
+454
+455
+456
+457
+458
+459
+460
+461
+462
+463
+464
+465
+466
+467
+468
+469
+470
+471
+472
+473
+474
+475
+476
+477
+478
+479
+480
+481
+482
+483
+484
+485
+486
+487
+488
+489
+490
+491
+492
+493
+494
+495
+496
+497
+498
+499
+500
+501
+502
+503
+504
+505
+506
+507
+508
+509
+510
+511
+512
+513
+514
+515
+516
+517
+518
+519
+520
+521
+522
+523
+524
+525
+526
+527
+528
+529
+530
+531
+532
+533
+534
+535
+536
+537
+538
+539
+540
+541
+542
+543
+544
+545
+546
+547
+548
+549
+550
+551
+552
+553
+554
+555
+556
+557
+558
+559
+560
+561
+562
+563
+564
+565
+566
+567
+568
+569
+570
+571
+572
+573
+574
+575
+576
+577
+578
+579
+580
+581
+582
+583
+584
+585
+586
+587
+588
+589
+590
+591
+592
+593
+594
+595
+596
+597
+598
+599
+600
+601
+602
+603
+604
+605
+606
+607
+608
+609
+610
+611
+612
+613
+614
+615
+616
+617
+618
+619
+620
+621
+622
+623
+624
+625
+626
+627
+628
+629
+630
+631
+632
+633
+634
+635
+636
+637
+638
+639
+640
+641
+642
+643
+644
+645
+646
+647
+648
+649
+650
+651
+652
+653
+654
+655
+656
+657
+658
+659
+660
+661
+662
+663
+664
+665
+666
+667
+668
+669
+670
+671
+672
+673
+674
+675
+676
+677
+678
+679
+680
+681
+682
+683
+684
+685
+686
+687
+688
+689
+690
+691
+692
+693
+694
+695
+696
+697
+698
+699
+700
+701
+702
+703
+704
+705
+706
+707
+708
+709
+710
+711
+712
+713
+714
+715
+716
+717
+718
+719
+720
+721
+722
+723
+724
+725
+726
+727
+728
+729
+730
+731
+732
+733
+734
+735
+736
+737
+738
+739
+740
+741
+742
+743
+744
+745
+746
+747
+748
+749
+750
+751
+752
+753
+754
+755
+756
+757
+758
+759
+760
+761
+762
+763
+764
+765
+766
+767
+768
+769
+770
+771
+772
+773
+774
+775
+776
+777
+778
+779
+780
+781
+782
+783
+784
+785
+786
+787
+788
+789
+790
+791
+792
+793
+794
+795
+796
+797
+798
+799
+800
+801
+802
+803
+804
+805
+806
+807
+808
+809
+810
+811
+812
+813
+814
+815
+816
+817
+818
+819
+820
+821
+822
+823
+824
+825
+826
+827
+828
+829
+830
+831
+832
+833
+834
+835
+836
+837
+838
+839
+840
+841
+842
+843
+844
+845
+846
+847
+848
+849
+850
+851
+852
+853
+854
+855
+856
+857
+858
+859
+860
+861
+862
+863
+864
+865
+866
+867
+868
+869
+870
+871
+872
+873
+874
+875
+876
+877
+878
+879
+880
+881
+882
+883
+884
+885
+886
+887
+888
+889
+890
+891
+892
+893
+894
+895
+896
+897
+898
+899
+900
+901
+902
+903
+904
+905
+906
+907
+908
+909
+910
+911
+912
+913
+914
+915
+916
+917
+918
+919
+920
+921
+922
+923
+924
+925
+926
+927
+928
+929
+930
+931
+932
+933
+934
+935
+936
+937
+938
+939
+940
+941
+942
+943
+944
+945
+946
+947
+948
+949
+950
+951
+952
+953
+954
+955
+956
+957
+958
+959
+960
+961
+962
+963
+964
+965
+966
+967
+968
+969
+970
+971
+972
+973
+974
+975
+976
+977
+978
+979
+980
+981
+982
+983
+984
+985
+986
+987
+988
+989
+990
+991
+992
+993
+994
+995
+996
+997
+998
+999
+1000
+1001
+1002
+1003
+1004
+1005
+1006
+1007
+1008
+1009
+1010
+1011
+1012
+1013
+1014
+1015
+1016
+1017
+1018
+1019
+1020
+1021
+1022
+1023
+1024
+1025
+1026
+1027
+1028
+1029
+1030
+1031
+1032
+1033
+1034
+1035
+1036
+1037
+1038
+1039
+1040
+1041
+1042
+1043
+1044
+1045
+1046
+1047
+1048
+1049
+1050
+1051
+1052
+1053
+1054
+1055
+1056
+1057
+1058
+1059
+1060
+1061
+1062
+1063
+1064
+1065
+1066
+1067
+1068
+1069
+1070
+1071
+1072
+1073
+1074
+1075
+1076
+1077
+1078
+1079
+1080
+1081
+1082
+1083
+1084
+1085
+1086
+1087
+1088
+1089
+1090
+1091
+1092
+1093
+1094
+1095
+1096
+1097
+1098
+1099
+1100
+1101
+1102
+1103
+1104
+1105
+1106
+1107
+1108
+1109
+1110
+1111
+1112
+1113
+1114
+1115
+1116
+1117
+1118
+1119
+1120
+1121
+1122
+1123
+1124
+1125
+1126
+1127
+1128
+1129
+1130
+1131
+1132
+1133
+1134
+1135
+1136
+1137
+1138
+1139
+1140
+1141
+1142
+1143
+1144
+1145
+1146
+1147
+1148
+1149
+1150
+1151
+1152
+1153
+1154
+1155
+1156
+1157
+1158
+1159
+1160
+1161
+1162
+1163
+1164
+1165
+1166
+1167
+1168
+1169
+1170
+1171
+1172
+1173
+1174
+1175
+1176
+1177
+1178
+1179
+1180
+1181
+1182
+1183
+1184
+1185
+1186
+1187
+1188
+1189
+1190
+1191
+1192
+1193
+1194
+1195
+1196
+1197
+1198
+1199
+1200
+1201
+1202
+1203
+1204
+1205
+1206
+1207
+1208
+1209
+1210
+1211
+1212
+1213
+1214
+1215
+1216
+1217
+1218
+1219
+1220
+1221
+1222
+1223
+1224
+1225
+1226
+1227
+1228
+1229
+1230
+1231
+1232
+1233
+1234
+1235
+1236
+1237
+1238
+1239
+1240
+1241
+1242
+1243
+1244
+1245
+1246
+1247
+1248
+1249
+1250
+1251
+1252
+1253
+1254
+1255
+1256
+1257
+1258
+1259
+1260
+1261
+1262
+1263
+1264
+1265
+1266
+1267
+1268
+1269
+1270
+1271
+1272
+1273
+1274
+1275
+1276
+1277
+1278
+1279
+1280
+1281
+1282
+1283
+1284
+1285
+1286
+1287
+1288
+1289
+1290
+1291
+1292
+1293
+1294
+1295
+1296
+1297
+1298
+1299
+1300
+1301
+1302
+1303
+1304
+1305
+1306
+1307
+1308
+1309
+1310
+1311
+1312
+1313
+1314
+1315
+1316
+1317
+1318
+1319
+1320
+1321
+1322
+1323
+1324
+1325
+1326
+1327
+1328
+1329
+1330
+1331
+1332
+1333
+1334
+1335
+1336
+1337
+1338
+1339
+1340
+1341
+1342
+1343
+1344
+1345
+1346
+1347
+1348
+1349
+1350
+1351
+1352
+1353
+1354
+1355
+1356
+1357
+1358
+1359
+1360
+1361
+1362
+1363
+1364
+1365
+1366
+1367
+1368
+1369
+1370
+1371
+1372
+1373
+1374
+1375
+1376
+1377
+1378
+1379
+1380
+1381
+1382
+1383
+1384
+1385
+1386
+1387
+1388
+1389
+1390
+1391
+1392
+1393
+1394
+1395
+1396
+1397
+1398
+1399
+1400
+1401
+1402
+1403
+1404
+1405
+1406
+1407
+1408
+1409
+1410
+1411
+1412
+1413
+1414
+1415
+1416
+1417
+1418
+1419
+1420
+1421
+1422
+1423
+1424
+1425
+1426
+1427
+1428
+1429
+1430
+1431
+1432
+1433
+1434
+1435
+1436
+1437
+1438
+1439
+1440
+1441
+1442
+1443
+1444
+1445
+1446
+1447
+1448
+1449
+1450
+1451
+1452
+1453
+1454
+1455
+1456
+1457
+1458
+1459
+1460
+1461
+1462
+1463
+1464
+1465
+1466
+1467
+1468
+1469
+1470
+1471
+1472
+1473
+1474
+1475
+1476
+1477
+1478
+1479
+1480
+1481
+1482
+1483
+1484
+1485
+1486
+1487
+1488
+1489
+1490
+1491
+1492
+1493
+1494
+1495
+1496
+1497
+1498
+1499
+1500
+1501
+1502
+1503
+1504
+1505
+1506
+1507
+1508
+1509
+1510
+1511
+1512
+1513
+1514
+1515
+1516
+1517
+1518
+1519
+1520
+1521
+1522
+1523
+1524
+1525
+1526
+1527
+1528
+1529
+1530
+1531
+1532
+1533
+1534
+1535
+1536
+1537
+1538
+1539
+1540
+1541
+1542
+1543
+1544
+1545
+1546
+1547
+1548
+1549
+1550
+1551
+1552
+1553
+1554
+1555
+1556
+1557
+1558
+1559
+1560
+1561
+1562
+1563
+1564
+1565
+1566
+1567
+1568
+1569
+1570
+1571
+1572
+1573
+1574
+1575
+1576
+1577
+1578
+1579
+1580
+1581
+1582
+1583
+1584
+1585
+1586
+1587
+1588
+1589
+1590
+1591
+1592
+1593
+1594
+1595
+1596
+1597
+1598
+1599
+1600
+1601
+1602
+1603
+1604
+1605
+1606
+1607
+1608
+1609
+1610
+1611
+1612
+1613
+1614
+1615
+1616
+1617
+1618
+1619
+1620
+1621
+1622
+1623
+1624
+1625
+1626
+1627
+1628
+1629
+1630
+1631
+1632
+1633
+1634
+1635
+1636
+1637
+1638
+1639
+1640
+1641
+1642
+1643
+1644
+1645
+1646
+1647
+1648
+1649
+1650
+1651
+1652
+1653
+1654
+1655
+1656
+1657
+1658
+1659
+1660
+1661
+1662
+1663
+1664
+1665
+1666
+1667
+1668
+1669
+1670
+1671
+1672
+1673
+1674
+1675
+1676
+1677
+1678
+1679
+1680
+1681
+1682
+1683
+1684
+1685
+1686
+1687
+1688
+1689
+1690
+1691
+1692
+1693
+1694
+1695
+1696
+1697
+1698
+1699
+1700
+1701
+1702
+1703
+1704
+1705
+1706
+1707
+1708
+1709
+1710
+1711
+1712
+1713
+1714
+1715
+1716
+1717
+1718
+1719
+1720
+1721
+1722
+1723
+1724
+1725
+1726
+1727
+1728
+1729
+1730
+1731
+1732
+1733
+1734
+1735
+1736
+1737
+1738
+1739
+1740
+1741
+1742
+1743
+1744
+1745
+1746
+1747
+1748
+1749
+1750
+1751
+1752
+1753
+1754
+1755
+1756
+1757
+1758
+1759
+1760
+1761
+1762
+1763
+1764
+1765
+1766
+1767
+1768
+1769
+1770
+1771
+1772
+1773
+1774
+1775
+1776
+1777
+1778
+1779
+1780
+1781
+1782
+1783
+1784
+1785
+1786
+1787
+1788
+1789
+1790
+1791
+1792
+1793
+1794
+1795
+1796
+1797
+1798
+1799
+1800
+1801
+1802
+1803
+1804
+1805
+1806
+1807
+1808
+1809
+1810
+1811
+1812
+1813
+1814
+1815
+1816
+1817
+1818
+1819
+1820
+1821
+1822
+1823
+1824
+1825
+1826
+1827
+1828
+1829
+1830
+1831
+1832
+1833
+1834
+1835
+1836
+1837
+1838
+1839
+
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A lightweight logging facade.
+//!
+//! The `log` crate provides a single logging API that abstracts over the
+//! actual logging implementation. Libraries can use the logging API provided
+//! by this crate, and the consumer of those libraries can choose the logging
+//! implementation that is most suitable for its use case.
+//!
+//! If no logging implementation is selected, the facade falls back to a "noop"
+//! implementation that ignores all log messages. The overhead in this case
+//! is very small - just an integer load, comparison and jump.
+//!
+//! A log request consists of a _target_, a _level_, and a _body_. A target is a
+//! string which defaults to the module path of the location of the log request,
+//! though that default may be overridden. Logger implementations typically use
+//! the target to filter requests based on some user configuration.
+//!
+//! # Usage
+//!
+//! The basic use of the log crate is through the five logging macros: [`error!`],
+//! [`warn!`], [`info!`], [`debug!`] and [`trace!`]
+//! where `error!` represents the highest-priority log messages
+//! and `trace!` the lowest. The log messages are filtered by configuring
+//! the log level to exclude messages with a lower priority.
+//! Each of these macros accept format strings similarly to [`println!`].
+//!
+//!
+//! [`error!`]: ./macro.error.html
+//! [`warn!`]: ./macro.warn.html
+//! [`info!`]: ./macro.info.html
+//! [`debug!`]: ./macro.debug.html
+//! [`trace!`]: ./macro.trace.html
+//! [`println!`]: https://doc.rust-lang.org/stable/std/macro.println.html
+//!
+//! ## In libraries
+//!
+//! Libraries should link only to the `log` crate, and use the provided
+//! macros to log whatever information will be useful to downstream consumers.
+//!
+//! ### Examples
+//!
+//! ```
+//! # #[derive(Debug)] pub struct Yak(String);
+//! # impl Yak { fn shave(&mut self, _: u32) {} }
+//! # fn find_a_razor() -> Result<u32, u32> { Ok(1) }
+//! use log::{info, warn};
+//!
+//! pub fn shave_the_yak(yak: &mut Yak) {
+//!     info!(target: "yak_events", "Commencing yak shaving for {yak:?}");
+//!
+//!     loop {
+//!         match find_a_razor() {
+//!             Ok(razor) => {
+//!                 info!("Razor located: {razor}");
+//!                 yak.shave(razor);
+//!                 break;
+//!             }
+//!             Err(err) => {
+//!                 warn!("Unable to locate a razor: {err}, retrying");
+//!             }
+//!         }
+//!     }
+//! }
+//! # fn main() {}
+//! ```
+//!
+//! ## In executables
+//!
+//! Executables should choose a logging implementation and initialize it early in the
+//! runtime of the program. Logging implementations will typically include a
+//! function to do this. Any log messages generated before
+//! the implementation is initialized will be ignored.
+//!
+//! The executable itself may use the `log` crate to log as well.
+//!
+//! ### Warning
+//!
+//! The logging system may only be initialized once.
+//!
+//! ## Structured logging
+//!
+//! If you enable the `kv` feature you can associate structured values
+//! with your log records. If we take the example from before, we can include
+//! some additional context besides what's in the formatted message:
+//!
+//! ```
+//! # use serde::Serialize;
+//! # #[derive(Debug, Serialize)] pub struct Yak(String);
+//! # impl Yak { fn shave(&mut self, _: u32) {} }
+//! # fn find_a_razor() -> Result<u32, std::io::Error> { Ok(1) }
+//! # #[cfg(feature = "kv_serde")]
+//! # fn main() {
+//! use log::{info, warn};
+//!
+//! pub fn shave_the_yak(yak: &mut Yak) {
+//!     info!(target: "yak_events", yak:serde; "Commencing yak shaving");
+//!
+//!     loop {
+//!         match find_a_razor() {
+//!             Ok(razor) => {
+//!                 info!(razor; "Razor located");
+//!                 yak.shave(razor);
+//!                 break;
+//!             }
+//!             Err(e) => {
+//!                 warn!(e:err; "Unable to locate a razor, retrying");
+//!             }
+//!         }
+//!     }
+//! }
+//! # }
+//! # #[cfg(not(feature = "kv_serde"))]
+//! # fn main() {}
+//! ```
+//!
+//! See the [`kv`] module documentation for more details.
+//!
+//! # Available logging implementations
+//!
+//! In order to produce log output executables have to use
+//! a logger implementation compatible with the facade.
+//! There are many available implementations to choose from,
+//! here are some of the most popular ones:
+//!
+//! * Simple minimal loggers:
+//!     * [env_logger]
+//!     * [simple_logger]
+//!     * [simplelog]
+//!     * [pretty_env_logger]
+//!     * [stderrlog]
+//!     * [flexi_logger]
+//!     * [call_logger]
+//!     * [structured-logger]
+//! * Complex configurable frameworks:
+//!     * [log4rs]
+//!     * [fern]
+//! * Adaptors for other facilities:
+//!     * [syslog]
+//!     * [slog-stdlog]
+//!     * [systemd-journal-logger]
+//!     * [android_log]
+//!     * [win_dbg_logger]
+//!     * [db_logger]
+//!     * [log-to-defmt]
+//!     * [logcontrol-log]
+//! * For WebAssembly binaries:
+//!     * [console_log]
+//! * For dynamic libraries:
+//!     * You may need to construct an FFI-safe wrapper over `log` to initialize in your libraries
+//! * Utilities:
+//!     * [log_err]
+//!     * [log-reload]
+//!
+//! # Implementing a Logger
+//!
+//! Loggers implement the [`Log`] trait. Here's a very basic example that simply
+//! logs all messages at the [`Error`][level_link], [`Warn`][level_link] or
+//! [`Info`][level_link] levels to stdout:
+//!
+//! ```
+//! use log::{Record, Level, Metadata};
+//!
+//! struct SimpleLogger;
+//!
+//! impl log::Log for SimpleLogger {
+//!     fn enabled(&self, metadata: &Metadata) -> bool {
+//!         metadata.level() <= Level::Info
+//!     }
+//!
+//!     fn log(&self, record: &Record) {
+//!         if self.enabled(record.metadata()) {
+//!             println!("{} - {}", record.level(), record.args());
+//!         }
+//!     }
+//!
+//!     fn flush(&self) {}
+//! }
+//!
+//! # fn main() {}
+//! ```
+//!
+//! Loggers are installed by calling the [`set_logger`] function. The maximum
+//! log level also needs to be adjusted via the [`set_max_level`] function. The
+//! logging facade uses this as an optimization to improve performance of log
+//! messages at levels that are disabled. It's important to set it, as it
+//! defaults to [`Off`][filter_link], so no log messages will ever be captured!
+//! In the case of our example logger, we'll want to set the maximum log level
+//! to [`Info`][filter_link], since we ignore any [`Debug`][level_link] or
+//! [`Trace`][level_link] level log messages. A logging implementation should
+//! provide a function that wraps a call to [`set_logger`] and
+//! [`set_max_level`], handling initialization of the logger:
+//!
+//! ```
+//! # use log::{Level, Metadata};
+//! # struct SimpleLogger;
+//! # impl log::Log for SimpleLogger {
+//! #   fn enabled(&self, _: &Metadata) -> bool { false }
+//! #   fn log(&self, _: &log::Record) {}
+//! #   fn flush(&self) {}
+//! # }
+//! # fn main() {}
+//! use log::{SetLoggerError, LevelFilter};
+//!
+//! static LOGGER: SimpleLogger = SimpleLogger;
+//!
+//! pub fn init() -> Result<(), SetLoggerError> {
+//!     log::set_logger(&LOGGER)
+//!         .map(|()| log::set_max_level(LevelFilter::Info))
+//! }
+//! ```
+//!
+//! Implementations that adjust their configurations at runtime should take care
+//! to adjust the maximum log level as well.
+//!
+//! # Use with `std`
+//!
+//! `set_logger` requires you to provide a `&'static Log`, which can be hard to
+//! obtain if your logger depends on some runtime configuration. The
+//! `set_boxed_logger` function is available with the `std` Cargo feature. It is
+//! identical to `set_logger` except that it takes a `Box<Log>` rather than a
+//! `&'static Log`:
+//!
+//! ```
+//! # use log::{Level, LevelFilter, Log, SetLoggerError, Metadata};
+//! # struct SimpleLogger;
+//! # impl log::Log for SimpleLogger {
+//! #   fn enabled(&self, _: &Metadata) -> bool { false }
+//! #   fn log(&self, _: &log::Record) {}
+//! #   fn flush(&self) {}
+//! # }
+//! # fn main() {}
+//! # #[cfg(feature = "std")]
+//! pub fn init() -> Result<(), SetLoggerError> {
+//!     log::set_boxed_logger(Box::new(SimpleLogger))
+//!         .map(|()| log::set_max_level(LevelFilter::Info))
+//! }
+//! ```
+//!
+//! # Compile time filters
+//!
+//! Log levels can be statically disabled at compile time by enabling one of these Cargo features:
+//!
+//! * `max_level_off`
+//! * `max_level_error`
+//! * `max_level_warn`
+//! * `max_level_info`
+//! * `max_level_debug`
+//! * `max_level_trace`
+//!
+//! Log invocations at disabled levels will be skipped and will not even be present in the
+//! resulting binary. These features control the value of the `STATIC_MAX_LEVEL` constant. The
+//! logging macros check this value before logging a message. By default, no levels are disabled.
+//!
+//! It is possible to override this level for release builds only with the following features:
+//!
+//! * `release_max_level_off`
+//! * `release_max_level_error`
+//! * `release_max_level_warn`
+//! * `release_max_level_info`
+//! * `release_max_level_debug`
+//! * `release_max_level_trace`
+//!
+//! Libraries should avoid using the max level features because they're global and can't be changed
+//! once they're set.
+//!
+//! For example, a crate can disable trace level logs in debug builds and trace, debug, and info
+//! level logs in release builds with the following configuration:
+//!
+//! ```toml
+//! [dependencies]
+//! log = { version = "0.4", features = ["max_level_debug", "release_max_level_warn"] }
+//! ```
+//! # Crate Feature Flags
+//!
+//! The following crate feature flags are available in addition to the filters. They are
+//! configured in your `Cargo.toml`.
+//!
+//! * `std` allows use of `std` crate instead of the default `core`. Enables using `std::error` and
+//! `set_boxed_logger` functionality.
+//! * `serde` enables support for serialization and deserialization of `Level` and `LevelFilter`.
+//!
+//! ```toml
+//! [dependencies]
+//! log = { version = "0.4", features = ["std", "serde"] }
+//! ```
+//!
+//! # Version compatibility
+//!
+//! The 0.3 and 0.4 versions of the `log` crate are almost entirely compatible. Log messages
+//! made using `log` 0.3 will forward transparently to a logger implementation using `log` 0.4. Log
+//! messages made using `log` 0.4 will forward to a logger implementation using `log` 0.3, but the
+//! module path and file name information associated with the message will unfortunately be lost.
+//!
+//! [`Log`]: trait.Log.html
+//! [level_link]: enum.Level.html
+//! [filter_link]: enum.LevelFilter.html
+//! [`set_logger`]: fn.set_logger.html
+//! [`set_max_level`]: fn.set_max_level.html
+//! [`try_set_logger_raw`]: fn.try_set_logger_raw.html
+//! [`shutdown_logger_raw`]: fn.shutdown_logger_raw.html
+//! [env_logger]: https://docs.rs/env_logger/*/env_logger/
+//! [simple_logger]: https://github.com/borntyping/rust-simple_logger
+//! [simplelog]: https://github.com/drakulix/simplelog.rs
+//! [pretty_env_logger]: https://docs.rs/pretty_env_logger/*/pretty_env_logger/
+//! [stderrlog]: https://docs.rs/stderrlog/*/stderrlog/
+//! [flexi_logger]: https://docs.rs/flexi_logger/*/flexi_logger/
+//! [call_logger]: https://docs.rs/call_logger/*/call_logger/
+//! [syslog]: https://docs.rs/syslog/*/syslog/
+//! [slog-stdlog]: https://docs.rs/slog-stdlog/*/slog_stdlog/
+//! [log4rs]: https://docs.rs/log4rs/*/log4rs/
+//! [fern]: https://docs.rs/fern/*/fern/
+//! [systemd-journal-logger]: https://docs.rs/systemd-journal-logger/*/systemd_journal_logger/
+//! [android_log]: https://docs.rs/android_log/*/android_log/
+//! [win_dbg_logger]: https://docs.rs/win_dbg_logger/*/win_dbg_logger/
+//! [db_logger]: https://docs.rs/db_logger/*/db_logger/
+//! [log-to-defmt]: https://docs.rs/log-to-defmt/*/log_to_defmt/
+//! [console_log]: https://docs.rs/console_log/*/console_log/
+//! [structured-logger]: https://docs.rs/structured-logger/latest/structured_logger/
+//! [logcontrol-log]: https://docs.rs/logcontrol-log/*/logcontrol_log/
+//! [log_err]: https://docs.rs/log_err/*/log_err/
+//! [log-reload]: https://docs.rs/log-reload/*/log_reload/
+
+#![doc(
+    html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
+    html_favicon_url = "https://www.rust-lang.org/favicon.ico",
+    html_root_url = "https://docs.rs/log/0.4.21"
+)]
+#![warn(missing_docs)]
+#![deny(missing_debug_implementations, unconditional_recursion)]
+#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
+// When compiled for the rustc compiler itself we want to make sure that this is
+// an unstable crate
+#![cfg_attr(rustbuild, feature(staged_api, rustc_private))]
+#![cfg_attr(rustbuild, unstable(feature = "rustc_private", issue = "27812"))]
+
+#[cfg(all(not(feature = "std"), not(test)))]
+extern crate core as std;
+
+use std::cfg;
+#[cfg(feature = "std")]
+use std::error;
+use std::str::FromStr;
+use std::{cmp, fmt, mem};
+
+#[macro_use]
+mod macros;
+mod serde;
+
+#[cfg(feature = "kv")]
+pub mod kv;
+
+#[cfg(target_has_atomic = "ptr")]
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+#[cfg(not(target_has_atomic = "ptr"))]
+use std::cell::Cell;
+#[cfg(not(target_has_atomic = "ptr"))]
+use std::sync::atomic::Ordering;
+
+#[cfg(not(target_has_atomic = "ptr"))]
+struct AtomicUsize {
+    v: Cell<usize>,
+}
+
+#[cfg(not(target_has_atomic = "ptr"))]
+impl AtomicUsize {
+    const fn new(v: usize) -> AtomicUsize {
+        AtomicUsize { v: Cell::new(v) }
+    }
+
+    fn load(&self, _order: Ordering) -> usize {
+        self.v.get()
+    }
+
+    fn store(&self, val: usize, _order: Ordering) {
+        self.v.set(val)
+    }
+
+    #[cfg(target_has_atomic = "ptr")]
+    fn compare_exchange(
+        &self,
+        current: usize,
+        new: usize,
+        _success: Ordering,
+        _failure: Ordering,
+    ) -> Result<usize, usize> {
+        let prev = self.v.get();
+        if current == prev {
+            self.v.set(new);
+        }
+        Ok(prev)
+    }
+}
+
+// Any platform without atomics is unlikely to have multiple cores, so
+// writing via Cell will not be a race condition.
+#[cfg(not(target_has_atomic = "ptr"))]
+unsafe impl Sync for AtomicUsize {}
+
+// The LOGGER static holds a pointer to the global logger. It is protected by
+// the STATE static which determines whether LOGGER has been initialized yet.
+static mut LOGGER: &dyn Log = &NopLogger;
+
+static STATE: AtomicUsize = AtomicUsize::new(0);
+
+// There are three different states that we care about: the logger's
+// uninitialized, the logger's initializing (set_logger's been called but
+// LOGGER hasn't actually been set yet), or the logger's active.
+const UNINITIALIZED: usize = 0;
+const INITIALIZING: usize = 1;
+const INITIALIZED: usize = 2;
+
+static MAX_LOG_LEVEL_FILTER: AtomicUsize = AtomicUsize::new(0);
+
+static LOG_LEVEL_NAMES: [&str; 6] = ["OFF", "ERROR", "WARN", "INFO", "DEBUG", "TRACE"];
+
+static SET_LOGGER_ERROR: &str = "attempted to set a logger after the logging system \
+                                 was already initialized";
+static LEVEL_PARSE_ERROR: &str =
+    "attempted to convert a string that doesn't match an existing log level";
+
+/// An enum representing the available verbosity levels of the logger.
+///
+/// Typical usage includes: checking if a certain `Level` is enabled with
+/// [`log_enabled!`](macro.log_enabled.html), specifying the `Level` of
+/// [`log!`](macro.log.html), and comparing a `Level` directly to a
+/// [`LevelFilter`](enum.LevelFilter.html).
+#[repr(usize)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
+pub enum Level {
+    /// The "error" level.
+    ///
+    /// Designates very serious errors.
+    // This way these line up with the discriminants for LevelFilter below
+    // This works because Rust treats field-less enums the same way as C does:
+    // https://doc.rust-lang.org/reference/items/enumerations.html#custom-discriminant-values-for-field-less-enumerations
+    Error = 1,
+    /// The "warn" level.
+    ///
+    /// Designates hazardous situations.
+    Warn,
+    /// The "info" level.
+    ///
+    /// Designates useful information.
+    Info,
+    /// The "debug" level.
+    ///
+    /// Designates lower priority information.
+    Debug,
+    /// The "trace" level.
+    ///
+    /// Designates very low priority, often extremely verbose, information.
+    Trace,
+}
+
+impl PartialEq<LevelFilter> for Level {
+    #[inline]
+    fn eq(&self, other: &LevelFilter) -> bool {
+        *self as usize == *other as usize
+    }
+}
+
+impl PartialOrd<LevelFilter> for Level {
+    #[inline]
+    fn partial_cmp(&self, other: &LevelFilter) -> Option<cmp::Ordering> {
+        Some((*self as usize).cmp(&(*other as usize)))
+    }
+}
+
+impl FromStr for Level {
+    type Err = ParseLevelError;
+    fn from_str(level: &str) -> Result<Level, Self::Err> {
+        LOG_LEVEL_NAMES
+            .iter()
+            .position(|&name| name.eq_ignore_ascii_case(level))
+            .into_iter()
+            .filter(|&idx| idx != 0)
+            .map(|idx| Level::from_usize(idx).unwrap())
+            .next()
+            .ok_or(ParseLevelError(()))
+    }
+}
+
+impl fmt::Display for Level {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        fmt.pad(self.as_str())
+    }
+}
+
+impl Level {
+    fn from_usize(u: usize) -> Option<Level> {
+        match u {
+            1 => Some(Level::Error),
+            2 => Some(Level::Warn),
+            3 => Some(Level::Info),
+            4 => Some(Level::Debug),
+            5 => Some(Level::Trace),
+            _ => None,
+        }
+    }
+
+    /// Returns the most verbose logging level.
+    #[inline]
+    pub fn max() -> Level {
+        Level::Trace
+    }
+
+    /// Converts the `Level` to the equivalent `LevelFilter`.
+    #[inline]
+    pub fn to_level_filter(&self) -> LevelFilter {
+        LevelFilter::from_usize(*self as usize).unwrap()
+    }
+
+    /// Returns the string representation of the `Level`.
+    ///
+    /// This returns the same string as the `fmt::Display` implementation.
+    pub fn as_str(&self) -> &'static str {
+        LOG_LEVEL_NAMES[*self as usize]
+    }
+
+    /// Iterate through all supported logging levels.
+    ///
+    /// The order of iteration is from more severe to less severe log messages.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use log::Level;
+    ///
+    /// let mut levels = Level::iter();
+    ///
+    /// assert_eq!(Some(Level::Error), levels.next());
+    /// assert_eq!(Some(Level::Trace), levels.last());
+    /// ```
+    pub fn iter() -> impl Iterator<Item = Self> {
+        (1..6).map(|i| Self::from_usize(i).unwrap())
+    }
+}
+
+/// An enum representing the available verbosity level filters of the logger.
+///
+/// A `LevelFilter` may be compared directly to a [`Level`]. Use this type
+/// to get and set the maximum log level with [`max_level()`] and [`set_max_level`].
+///
+/// [`Level`]: enum.Level.html
+/// [`max_level()`]: fn.max_level.html
+/// [`set_max_level`]: fn.set_max_level.html
+#[repr(usize)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
+pub enum LevelFilter {
+    /// A level lower than all log levels.
+    Off,
+    /// Corresponds to the `Error` log level.
+    Error,
+    /// Corresponds to the `Warn` log level.
+    Warn,
+    /// Corresponds to the `Info` log level.
+    Info,
+    /// Corresponds to the `Debug` log level.
+    Debug,
+    /// Corresponds to the `Trace` log level.
+    Trace,
+}
+
+impl PartialEq<Level> for LevelFilter {
+    #[inline]
+    fn eq(&self, other: &Level) -> bool {
+        other.eq(self)
+    }
+}
+
+impl PartialOrd<Level> for LevelFilter {
+    #[inline]
+    fn partial_cmp(&self, other: &Level) -> Option<cmp::Ordering> {
+        Some((*self as usize).cmp(&(*other as usize)))
+    }
+}
+
+impl FromStr for LevelFilter {
+    type Err = ParseLevelError;
+    fn from_str(level: &str) -> Result<LevelFilter, Self::Err> {
+        LOG_LEVEL_NAMES
+            .iter()
+            .position(|&name| name.eq_ignore_ascii_case(level))
+            .map(|p| LevelFilter::from_usize(p).unwrap())
+            .ok_or(ParseLevelError(()))
+    }
+}
+
+impl fmt::Display for LevelFilter {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        fmt.pad(self.as_str())
+    }
+}
+
+impl LevelFilter {
+    fn from_usize(u: usize) -> Option<LevelFilter> {
+        match u {
+            0 => Some(LevelFilter::Off),
+            1 => Some(LevelFilter::Error),
+            2 => Some(LevelFilter::Warn),
+            3 => Some(LevelFilter::Info),
+            4 => Some(LevelFilter::Debug),
+            5 => Some(LevelFilter::Trace),
+            _ => None,
+        }
+    }
+
+    /// Returns the most verbose logging level filter.
+    #[inline]
+    pub fn max() -> LevelFilter {
+        LevelFilter::Trace
+    }
+
+    /// Converts `self` to the equivalent `Level`.
+    ///
+    /// Returns `None` if `self` is `LevelFilter::Off`.
+    #[inline]
+    pub fn to_level(&self) -> Option<Level> {
+        Level::from_usize(*self as usize)
+    }
+
+    /// Returns the string representation of the `LevelFilter`.
+    ///
+    /// This returns the same string as the `fmt::Display` implementation.
+    pub fn as_str(&self) -> &'static str {
+        LOG_LEVEL_NAMES[*self as usize]
+    }
+
+    /// Iterate through all supported filtering levels.
+    ///
+    /// The order of iteration is from less to more verbose filtering.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use log::LevelFilter;
+    ///
+    /// let mut levels = LevelFilter::iter();
+    ///
+    /// assert_eq!(Some(LevelFilter::Off), levels.next());
+    /// assert_eq!(Some(LevelFilter::Trace), levels.last());
+    /// ```
+    pub fn iter() -> impl Iterator<Item = Self> {
+        (0..6).map(|i| Self::from_usize(i).unwrap())
+    }
+}
+
+#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
+enum MaybeStaticStr<'a> {
+    Static(&'static str),
+    Borrowed(&'a str),
+}
+
+impl<'a> MaybeStaticStr<'a> {
+    #[inline]
+    fn get(&self) -> &'a str {
+        match *self {
+            MaybeStaticStr::Static(s) => s,
+            MaybeStaticStr::Borrowed(s) => s,
+        }
+    }
+}
+
+/// The "payload" of a log message.
+///
+/// # Use
+///
+/// `Record` structures are passed as parameters to the [`log`][method.log]
+/// method of the [`Log`] trait. Logger implementors manipulate these
+/// structures in order to display log messages. `Record`s are automatically
+/// created by the [`log!`] macro and so are not seen by log users.
+///
+/// Note that the [`level()`] and [`target()`] accessors are equivalent to
+/// `self.metadata().level()` and `self.metadata().target()` respectively.
+/// These methods are provided as a convenience for users of this structure.
+///
+/// # Example
+///
+/// The following example shows a simple logger that displays the level,
+/// module path, and message of any `Record` that is passed to it.
+///
+/// ```
+/// struct SimpleLogger;
+///
+/// impl log::Log for SimpleLogger {
+///    fn enabled(&self, _metadata: &log::Metadata) -> bool {
+///        true
+///    }
+///
+///    fn log(&self, record: &log::Record) {
+///        if !self.enabled(record.metadata()) {
+///            return;
+///        }
+///
+///        println!("{}:{} -- {}",
+///                 record.level(),
+///                 record.target(),
+///                 record.args());
+///    }
+///    fn flush(&self) {}
+/// }
+/// ```
+///
+/// [method.log]: trait.Log.html#tymethod.log
+/// [`Log`]: trait.Log.html
+/// [`log!`]: macro.log.html
+/// [`level()`]: struct.Record.html#method.level
+/// [`target()`]: struct.Record.html#method.target
+#[derive(Clone, Debug)]
+pub struct Record<'a> {
+    metadata: Metadata<'a>,
+    args: fmt::Arguments<'a>,
+    module_path: Option<MaybeStaticStr<'a>>,
+    file: Option<MaybeStaticStr<'a>>,
+    line: Option<u32>,
+    #[cfg(feature = "kv")]
+    key_values: KeyValues<'a>,
+}
+
+// This wrapper type is only needed so we can
+// `#[derive(Debug)]` on `Record`. It also
+// provides a useful `Debug` implementation for
+// the underlying `Source`.
+#[cfg(feature = "kv")]
+#[derive(Clone)]
+struct KeyValues<'a>(&'a dyn kv::Source);
+
+#[cfg(feature = "kv")]
+impl<'a> fmt::Debug for KeyValues<'a> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let mut visitor = f.debug_map();
+        self.0.visit(&mut visitor).map_err(|_| fmt::Error)?;
+        visitor.finish()
+    }
+}
+
+impl<'a> Record<'a> {
+    /// Returns a new builder.
+    #[inline]
+    pub fn builder() -> RecordBuilder<'a> {
+        RecordBuilder::new()
+    }
+
+    /// The message body.
+    #[inline]
+    pub fn args(&self) -> &fmt::Arguments<'a> {
+        &self.args
+    }
+
+    /// Metadata about the log directive.
+    #[inline]
+    pub fn metadata(&self) -> &Metadata<'a> {
+        &self.metadata
+    }
+
+    /// The verbosity level of the message.
+    #[inline]
+    pub fn level(&self) -> Level {
+        self.metadata.level()
+    }
+
+    /// The name of the target of the directive.
+    #[inline]
+    pub fn target(&self) -> &'a str {
+        self.metadata.target()
+    }
+
+    /// The module path of the message.
+    #[inline]
+    pub fn module_path(&self) -> Option<&'a str> {
+        self.module_path.map(|s| s.get())
+    }
+
+    /// The module path of the message, if it is a `'static` string.
+    #[inline]
+    pub fn module_path_static(&self) -> Option<&'static str> {
+        match self.module_path {
+            Some(MaybeStaticStr::Static(s)) => Some(s),
+            _ => None,
+        }
+    }
+
+    /// The source file containing the message.
+    #[inline]
+    pub fn file(&self) -> Option<&'a str> {
+        self.file.map(|s| s.get())
+    }
+
+    /// The source file containing the message, if it is a `'static` string.
+    #[inline]
+    pub fn file_static(&self) -> Option<&'static str> {
+        match self.file {
+            Some(MaybeStaticStr::Static(s)) => Some(s),
+            _ => None,
+        }
+    }
+
+    /// The line containing the message.
+    #[inline]
+    pub fn line(&self) -> Option<u32> {
+        self.line
+    }
+
+    /// The structured key-value pairs associated with the message.
+    #[cfg(feature = "kv")]
+    #[inline]
+    pub fn key_values(&self) -> &dyn kv::Source {
+        self.key_values.0
+    }
+
+    /// Create a new [`RecordBuilder`](struct.RecordBuilder.html) based on this record.
+    #[cfg(feature = "kv")]
+    #[inline]
+    pub fn to_builder(&self) -> RecordBuilder {
+        RecordBuilder {
+            record: Record {
+                metadata: Metadata {
+                    level: self.metadata.level,
+                    target: self.metadata.target,
+                },
+                args: self.args,
+                module_path: self.module_path,
+                file: self.file,
+                line: self.line,
+                key_values: self.key_values.clone(),
+            },
+        }
+    }
+}
+
+/// Builder for [`Record`](struct.Record.html).
+///
+/// Typically should only be used by log library creators or for testing and "shim loggers".
+/// The `RecordBuilder` can set the different parameters of `Record` object, and returns
+/// the created object when `build` is called.
+///
+/// # Examples
+///
+/// ```
+/// use log::{Level, Record};
+///
+/// let record = Record::builder()
+///                 .args(format_args!("Error!"))
+///                 .level(Level::Error)
+///                 .target("myApp")
+///                 .file(Some("server.rs"))
+///                 .line(Some(144))
+///                 .module_path(Some("server"))
+///                 .build();
+/// ```
+///
+/// Alternatively, use [`MetadataBuilder`](struct.MetadataBuilder.html):
+///
+/// ```
+/// use log::{Record, Level, MetadataBuilder};
+///
+/// let error_metadata = MetadataBuilder::new()
+///                         .target("myApp")
+///                         .level(Level::Error)
+///                         .build();
+///
+/// let record = Record::builder()
+///                 .metadata(error_metadata)
+///                 .args(format_args!("Error!"))
+///                 .line(Some(433))
+///                 .file(Some("app.rs"))
+///                 .module_path(Some("server"))
+///                 .build();
+/// ```
+#[derive(Debug)]
+pub struct RecordBuilder<'a> {
+    record: Record<'a>,
+}
+
+impl<'a> RecordBuilder<'a> {
+    /// Construct new `RecordBuilder`.
+    ///
+    /// The default options are:
+    ///
+    /// - `args`: [`format_args!("")`]
+    /// - `metadata`: [`Metadata::builder().build()`]
+    /// - `module_path`: `None`
+    /// - `file`: `None`
+    /// - `line`: `None`
+    ///
+    /// [`format_args!("")`]: https://doc.rust-lang.org/std/macro.format_args.html
+    /// [`Metadata::builder().build()`]: struct.MetadataBuilder.html#method.build
+    #[inline]
+    pub fn new() -> RecordBuilder<'a> {
+        RecordBuilder {
+            record: Record {
+                args: format_args!(""),
+                metadata: Metadata::builder().build(),
+                module_path: None,
+                file: None,
+                line: None,
+                #[cfg(feature = "kv")]
+                key_values: KeyValues(&None::<(kv::Key, kv::Value)>),
+            },
+        }
+    }
+
+    /// Set [`args`](struct.Record.html#method.args).
+    #[inline]
+    pub fn args(&mut self, args: fmt::Arguments<'a>) -> &mut RecordBuilder<'a> {
+        self.record.args = args;
+        self
+    }
+
+    /// Set [`metadata`](struct.Record.html#method.metadata). Construct a `Metadata` object with [`MetadataBuilder`](struct.MetadataBuilder.html).
+    #[inline]
+    pub fn metadata(&mut self, metadata: Metadata<'a>) -> &mut RecordBuilder<'a> {
+        self.record.metadata = metadata;
+        self
+    }
+
+    /// Set [`Metadata::level`](struct.Metadata.html#method.level).
+    #[inline]
+    pub fn level(&mut self, level: Level) -> &mut RecordBuilder<'a> {
+        self.record.metadata.level = level;
+        self
+    }
+
+    /// Set [`Metadata::target`](struct.Metadata.html#method.target)
+    #[inline]
+    pub fn target(&mut self, target: &'a str) -> &mut RecordBuilder<'a> {
+        self.record.metadata.target = target;
+        self
+    }
+
+    /// Set [`module_path`](struct.Record.html#method.module_path)
+    #[inline]
+    pub fn module_path(&mut self, path: Option<&'a str>) -> &mut RecordBuilder<'a> {
+        self.record.module_path = path.map(MaybeStaticStr::Borrowed);
+        self
+    }
+
+    /// Set [`module_path`](struct.Record.html#method.module_path) to a `'static` string
+    #[inline]
+    pub fn module_path_static(&mut self, path: Option<&'static str>) -> &mut RecordBuilder<'a> {
+        self.record.module_path = path.map(MaybeStaticStr::Static);
+        self
+    }
+
+    /// Set [`file`](struct.Record.html#method.file)
+    #[inline]
+    pub fn file(&mut self, file: Option<&'a str>) -> &mut RecordBuilder<'a> {
+        self.record.file = file.map(MaybeStaticStr::Borrowed);
+        self
+    }
+
+    /// Set [`file`](struct.Record.html#method.file) to a `'static` string.
+    #[inline]
+    pub fn file_static(&mut self, file: Option<&'static str>) -> &mut RecordBuilder<'a> {
+        self.record.file = file.map(MaybeStaticStr::Static);
+        self
+    }
+
+    /// Set [`line`](struct.Record.html#method.line)
+    #[inline]
+    pub fn line(&mut self, line: Option<u32>) -> &mut RecordBuilder<'a> {
+        self.record.line = line;
+        self
+    }
+
+    /// Set [`key_values`](struct.Record.html#method.key_values)
+    #[cfg(feature = "kv")]
+    #[inline]
+    pub fn key_values(&mut self, kvs: &'a dyn kv::Source) -> &mut RecordBuilder<'a> {
+        self.record.key_values = KeyValues(kvs);
+        self
+    }
+
+    /// Invoke the builder and return a `Record`
+    #[inline]
+    pub fn build(&self) -> Record<'a> {
+        self.record.clone()
+    }
+}
+
+impl<'a> Default for RecordBuilder<'a> {
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
+/// Metadata about a log message.
+///
+/// # Use
+///
+/// `Metadata` structs are created when users of the library use
+/// logging macros.
+///
+/// They are consumed by implementations of the `Log` trait in the
+/// `enabled` method.
+///
+/// `Record`s use `Metadata` to determine the log message's severity
+/// and target.
+///
+/// Users should use the `log_enabled!` macro in their code to avoid
+/// constructing expensive log messages.
+///
+/// # Examples
+///
+/// ```
+/// use log::{Record, Level, Metadata};
+///
+/// struct MyLogger;
+///
+/// impl log::Log for MyLogger {
+///     fn enabled(&self, metadata: &Metadata) -> bool {
+///         metadata.level() <= Level::Info
+///     }
+///
+///     fn log(&self, record: &Record) {
+///         if self.enabled(record.metadata()) {
+///             println!("{} - {}", record.level(), record.args());
+///         }
+///     }
+///     fn flush(&self) {}
+/// }
+///
+/// # fn main(){}
+/// ```
+#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
+pub struct Metadata<'a> {
+    level: Level,
+    target: &'a str,
+}
+
+impl<'a> Metadata<'a> {
+    /// Returns a new builder.
+    #[inline]
+    pub fn builder() -> MetadataBuilder<'a> {
+        MetadataBuilder::new()
+    }
+
+    /// The verbosity level of the message.
+    #[inline]
+    pub fn level(&self) -> Level {
+        self.level
+    }
+
+    /// The name of the target of the directive.
+    #[inline]
+    pub fn target(&self) -> &'a str {
+        self.target
+    }
+}
+
+/// Builder for [`Metadata`](struct.Metadata.html).
+///
+/// Typically should only be used by log library creators or for testing and "shim loggers".
+/// The `MetadataBuilder` can set the different parameters of a `Metadata` object, and returns
+/// the created object when `build` is called.
+///
+/// # Example
+///
+/// ```
+/// let target = "myApp";
+/// use log::{Level, MetadataBuilder};
+/// let metadata = MetadataBuilder::new()
+///                     .level(Level::Debug)
+///                     .target(target)
+///                     .build();
+/// ```
+#[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug)]
+pub struct MetadataBuilder<'a> {
+    metadata: Metadata<'a>,
+}
+
+impl<'a> MetadataBuilder<'a> {
+    /// Construct a new `MetadataBuilder`.
+    ///
+    /// The default options are:
+    ///
+    /// - `level`: `Level::Info`
+    /// - `target`: `""`
+    #[inline]
+    pub fn new() -> MetadataBuilder<'a> {
+        MetadataBuilder {
+            metadata: Metadata {
+                level: Level::Info,
+                target: "",
+            },
+        }
+    }
+
+    /// Setter for [`level`](struct.Metadata.html#method.level).
+    #[inline]
+    pub fn level(&mut self, arg: Level) -> &mut MetadataBuilder<'a> {
+        self.metadata.level = arg;
+        self
+    }
+
+    /// Setter for [`target`](struct.Metadata.html#method.target).
+    #[inline]
+    pub fn target(&mut self, target: &'a str) -> &mut MetadataBuilder<'a> {
+        self.metadata.target = target;
+        self
+    }
+
+    /// Returns a `Metadata` object.
+    #[inline]
+    pub fn build(&self) -> Metadata<'a> {
+        self.metadata.clone()
+    }
+}
+
+impl<'a> Default for MetadataBuilder<'a> {
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
+/// A trait encapsulating the operations required of a logger.
+pub trait Log: Sync + Send {
+    /// Determines if a log message with the specified metadata would be
+    /// logged.
+    ///
+    /// This is used by the `log_enabled!` macro to allow callers to avoid
+    /// expensive computation of log message arguments if the message would be
+    /// discarded anyway.
+    ///
+    /// # For implementors
+    ///
+    /// This method isn't called automatically by the `log!` macros.
+    /// It's up to an implementation of the `Log` trait to call `enabled` in its own
+    /// `log` method implementation to guarantee that filtering is applied.
+    fn enabled(&self, metadata: &Metadata) -> bool;
+
+    /// Logs the `Record`.
+    ///
+    /// # For implementors
+    ///
+    /// Note that `enabled` is *not* necessarily called before this method.
+    /// Implementations of `log` should perform all necessary filtering
+    /// internally.
+    fn log(&self, record: &Record);
+
+    /// Flushes any buffered records.
+    fn flush(&self);
+}
+
+// Just used as a dummy initial value for LOGGER
+struct NopLogger;
+
+impl Log for NopLogger {
+    fn enabled(&self, _: &Metadata) -> bool {
+        false
+    }
+
+    fn log(&self, _: &Record) {}
+    fn flush(&self) {}
+}
+
+impl<T> Log for &'_ T
+where
+    T: ?Sized + Log,
+{
+    fn enabled(&self, metadata: &Metadata) -> bool {
+        (**self).enabled(metadata)
+    }
+
+    fn log(&self, record: &Record) {
+        (**self).log(record);
+    }
+    fn flush(&self) {
+        (**self).flush();
+    }
+}
+
+#[cfg(feature = "std")]
+impl<T> Log for std::boxed::Box<T>
+where
+    T: ?Sized + Log,
+{
+    fn enabled(&self, metadata: &Metadata) -> bool {
+        self.as_ref().enabled(metadata)
+    }
+
+    fn log(&self, record: &Record) {
+        self.as_ref().log(record);
+    }
+    fn flush(&self) {
+        self.as_ref().flush();
+    }
+}
+
+#[cfg(feature = "std")]
+impl<T> Log for std::sync::Arc<T>
+where
+    T: ?Sized + Log,
+{
+    fn enabled(&self, metadata: &Metadata) -> bool {
+        self.as_ref().enabled(metadata)
+    }
+
+    fn log(&self, record: &Record) {
+        self.as_ref().log(record);
+    }
+    fn flush(&self) {
+        self.as_ref().flush();
+    }
+}
+
+/// Sets the global maximum log level.
+///
+/// Generally, this should only be called by the active logging implementation.
+///
+/// Note that `Trace` is the maximum level, because it provides the maximum amount of detail in the emitted logs.
+#[inline]
+#[cfg(target_has_atomic = "ptr")]
+pub fn set_max_level(level: LevelFilter) {
+    MAX_LOG_LEVEL_FILTER.store(level as usize, Ordering::Relaxed);
+}
+
+/// A thread-unsafe version of [`set_max_level`].
+///
+/// This function is available on all platforms, even those that do not have
+/// support for atomics that is needed by [`set_max_level`].
+///
+/// In almost all cases, [`set_max_level`] should be preferred.
+///
+/// # Safety
+///
+/// This function is only safe to call when no other level setting function is
+/// called while this function still executes.
+///
+/// This can be upheld by (for example) making sure that **there are no other
+/// threads**, and (on embedded) that **interrupts are disabled**.
+///
+/// Is is safe to use all other logging functions while this function runs
+/// (including all logging macros).
+///
+/// [`set_max_level`]: fn.set_max_level.html
+#[inline]
+pub unsafe fn set_max_level_racy(level: LevelFilter) {
+    // `MAX_LOG_LEVEL_FILTER` uses a `Cell` as the underlying primitive when a
+    // platform doesn't support `target_has_atomic = "ptr"`, so even though this looks the same
+    // as `set_max_level` it may have different safety properties.
+    MAX_LOG_LEVEL_FILTER.store(level as usize, Ordering::Relaxed);
+}
+
+/// Returns the current maximum log level.
+///
+/// The [`log!`], [`error!`], [`warn!`], [`info!`], [`debug!`], and [`trace!`] macros check
+/// this value and discard any message logged at a higher level. The maximum
+/// log level is set by the [`set_max_level`] function.
+///
+/// [`log!`]: macro.log.html
+/// [`error!`]: macro.error.html
+/// [`warn!`]: macro.warn.html
+/// [`info!`]: macro.info.html
+/// [`debug!`]: macro.debug.html
+/// [`trace!`]: macro.trace.html
+/// [`set_max_level`]: fn.set_max_level.html
+#[inline(always)]
+pub fn max_level() -> LevelFilter {
+    // Since `LevelFilter` is `repr(usize)`,
+    // this transmute is sound if and only if `MAX_LOG_LEVEL_FILTER`
+    // is set to a usize that is a valid discriminant for `LevelFilter`.
+    // Since `MAX_LOG_LEVEL_FILTER` is private, the only time it's set
+    // is by `set_max_level` above, i.e. by casting a `LevelFilter` to `usize`.
+    // So any usize stored in `MAX_LOG_LEVEL_FILTER` is a valid discriminant.
+    unsafe { mem::transmute(MAX_LOG_LEVEL_FILTER.load(Ordering::Relaxed)) }
+}
+
+/// Sets the global logger to a `Box<Log>`.
+///
+/// This is a simple convenience wrapper over `set_logger`, which takes a
+/// `Box<Log>` rather than a `&'static Log`. See the documentation for
+/// [`set_logger`] for more details.
+///
+/// Requires the `std` feature.
+///
+/// # Errors
+///
+/// An error is returned if a logger has already been set.
+///
+/// [`set_logger`]: fn.set_logger.html
+#[cfg(all(feature = "std", target_has_atomic = "ptr"))]
+pub fn set_boxed_logger(logger: Box<dyn Log>) -> Result<(), SetLoggerError> {
+    set_logger_inner(|| Box::leak(logger))
+}
+
+/// Sets the global logger to a `&'static Log`.
+///
+/// This function may only be called once in the lifetime of a program. Any log
+/// events that occur before the call to `set_logger` completes will be ignored.
+///
+/// This function does not typically need to be called manually. Logger
+/// implementations should provide an initialization method that installs the
+/// logger internally.
+///
+/// # Availability
+///
+/// This method is available even when the `std` feature is disabled. However,
+/// it is currently unavailable on `thumbv6` targets, which lack support for
+/// some atomic operations which are used by this function. Even on those
+/// targets, [`set_logger_racy`] will be available.
+///
+/// # Errors
+///
+/// An error is returned if a logger has already been set.
+///
+/// # Examples
+///
+/// ```
+/// use log::{error, info, warn, Record, Level, Metadata, LevelFilter};
+///
+/// static MY_LOGGER: MyLogger = MyLogger;
+///
+/// struct MyLogger;
+///
+/// impl log::Log for MyLogger {
+///     fn enabled(&self, metadata: &Metadata) -> bool {
+///         metadata.level() <= Level::Info
+///     }
+///
+///     fn log(&self, record: &Record) {
+///         if self.enabled(record.metadata()) {
+///             println!("{} - {}", record.level(), record.args());
+///         }
+///     }
+///     fn flush(&self) {}
+/// }
+///
+/// # fn main(){
+/// log::set_logger(&MY_LOGGER).unwrap();
+/// log::set_max_level(LevelFilter::Info);
+///
+/// info!("hello log");
+/// warn!("warning");
+/// error!("oops");
+/// # }
+/// ```
+///
+/// [`set_logger_racy`]: fn.set_logger_racy.html
+#[cfg(target_has_atomic = "ptr")]
+pub fn set_logger(logger: &'static dyn Log) -> Result<(), SetLoggerError> {
+    set_logger_inner(|| logger)
+}
+
+#[cfg(target_has_atomic = "ptr")]
+fn set_logger_inner<F>(make_logger: F) -> Result<(), SetLoggerError>
+where
+    F: FnOnce() -> &'static dyn Log,
+{
+    let old_state = match STATE.compare_exchange(
+        UNINITIALIZED,
+        INITIALIZING,
+        Ordering::SeqCst,
+        Ordering::SeqCst,
+    ) {
+        Ok(s) | Err(s) => s,
+    };
+    match old_state {
+        UNINITIALIZED => {
+            unsafe {
+                LOGGER = make_logger();
+            }
+            STATE.store(INITIALIZED, Ordering::SeqCst);
+            Ok(())
+        }
+        INITIALIZING => {
+            while STATE.load(Ordering::SeqCst) == INITIALIZING {
+                // TODO: replace with `hint::spin_loop` once MSRV is 1.49.0.
+                #[allow(deprecated)]
+                std::sync::atomic::spin_loop_hint();
+            }
+            Err(SetLoggerError(()))
+        }
+        _ => Err(SetLoggerError(())),
+    }
+}
+
+/// A thread-unsafe version of [`set_logger`].
+///
+/// This function is available on all platforms, even those that do not have
+/// support for atomics that is needed by [`set_logger`].
+///
+/// In almost all cases, [`set_logger`] should be preferred.
+///
+/// # Safety
+///
+/// This function is only safe to call when no other logger initialization
+/// function is called while this function still executes.
+///
+/// This can be upheld by (for example) making sure that **there are no other
+/// threads**, and (on embedded) that **interrupts are disabled**.
+///
+/// It is safe to use other logging functions while this function runs
+/// (including all logging macros).
+///
+/// [`set_logger`]: fn.set_logger.html
+pub unsafe fn set_logger_racy(logger: &'static dyn Log) -> Result<(), SetLoggerError> {
+    match STATE.load(Ordering::SeqCst) {
+        UNINITIALIZED => {
+            LOGGER = logger;
+            STATE.store(INITIALIZED, Ordering::SeqCst);
+            Ok(())
+        }
+        INITIALIZING => {
+            // This is just plain UB, since we were racing another initialization function
+            unreachable!("set_logger_racy must not be used with other initialization functions")
+        }
+        _ => Err(SetLoggerError(())),
+    }
+}
+
+/// The type returned by [`set_logger`] if [`set_logger`] has already been called.
+///
+/// [`set_logger`]: fn.set_logger.html
+#[allow(missing_copy_implementations)]
+#[derive(Debug)]
+pub struct SetLoggerError(());
+
+impl fmt::Display for SetLoggerError {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        fmt.write_str(SET_LOGGER_ERROR)
+    }
+}
+
+// The Error trait is not available in libcore
+#[cfg(feature = "std")]
+impl error::Error for SetLoggerError {}
+
+/// The type returned by [`from_str`] when the string doesn't match any of the log levels.
+///
+/// [`from_str`]: https://doc.rust-lang.org/std/str/trait.FromStr.html#tymethod.from_str
+#[allow(missing_copy_implementations)]
+#[derive(Debug, PartialEq, Eq)]
+pub struct ParseLevelError(());
+
+impl fmt::Display for ParseLevelError {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        fmt.write_str(LEVEL_PARSE_ERROR)
+    }
+}
+
+// The Error trait is not available in libcore
+#[cfg(feature = "std")]
+impl error::Error for ParseLevelError {}
+
+/// Returns a reference to the logger.
+///
+/// If a logger has not been set, a no-op implementation is returned.
+pub fn logger() -> &'static dyn Log {
+    // Acquire memory ordering guarantees that current thread would see any
+    // memory writes that happened before store of the value
+    // into `STATE` with memory ordering `Release` or stronger.
+    //
+    // Since the value `INITIALIZED` is written only after `LOGGER` was
+    // initialized, observing it after `Acquire` load here makes both
+    // write to the `LOGGER` static and initialization of the logger
+    // internal state synchronized with current thread.
+    if STATE.load(Ordering::Acquire) != INITIALIZED {
+        static NOP: NopLogger = NopLogger;
+        &NOP
+    } else {
+        unsafe { LOGGER }
+    }
+}
+
+// WARNING: this is not part of the crate's public API and is subject to change at any time
+#[doc(hidden)]
+pub mod __private_api;
+
+/// The statically resolved maximum log level.
+///
+/// See the crate level documentation for information on how to configure this.
+///
+/// This value is checked by the log macros, but not by the `Log`ger returned by
+/// the [`logger`] function. Code that manually calls functions on that value
+/// should compare the level against this value.
+///
+/// [`logger`]: fn.logger.html
+pub const STATIC_MAX_LEVEL: LevelFilter = match cfg!(debug_assertions) {
+    false if cfg!(feature = "release_max_level_off") => LevelFilter::Off,
+    false if cfg!(feature = "release_max_level_error") => LevelFilter::Error,
+    false if cfg!(feature = "release_max_level_warn") => LevelFilter::Warn,
+    false if cfg!(feature = "release_max_level_info") => LevelFilter::Info,
+    false if cfg!(feature = "release_max_level_debug") => LevelFilter::Debug,
+    false if cfg!(feature = "release_max_level_trace") => LevelFilter::Trace,
+    _ if cfg!(feature = "max_level_off") => LevelFilter::Off,
+    _ if cfg!(feature = "max_level_error") => LevelFilter::Error,
+    _ if cfg!(feature = "max_level_warn") => LevelFilter::Warn,
+    _ if cfg!(feature = "max_level_info") => LevelFilter::Info,
+    _ if cfg!(feature = "max_level_debug") => LevelFilter::Debug,
+    _ => LevelFilter::Trace,
+};
+
+#[cfg(test)]
+mod tests {
+    use super::{Level, LevelFilter, ParseLevelError, STATIC_MAX_LEVEL};
+
+    #[test]
+    fn test_levelfilter_from_str() {
+        let tests = [
+            ("off", Ok(LevelFilter::Off)),
+            ("error", Ok(LevelFilter::Error)),
+            ("warn", Ok(LevelFilter::Warn)),
+            ("info", Ok(LevelFilter::Info)),
+            ("debug", Ok(LevelFilter::Debug)),
+            ("trace", Ok(LevelFilter::Trace)),
+            ("OFF", Ok(LevelFilter::Off)),
+            ("ERROR", Ok(LevelFilter::Error)),
+            ("WARN", Ok(LevelFilter::Warn)),
+            ("INFO", Ok(LevelFilter::Info)),
+            ("DEBUG", Ok(LevelFilter::Debug)),
+            ("TRACE", Ok(LevelFilter::Trace)),
+            ("asdf", Err(ParseLevelError(()))),
+        ];
+        for &(s, ref expected) in &tests {
+            assert_eq!(expected, &s.parse());
+        }
+    }
+
+    #[test]
+    fn test_level_from_str() {
+        let tests = [
+            ("OFF", Err(ParseLevelError(()))),
+            ("error", Ok(Level::Error)),
+            ("warn", Ok(Level::Warn)),
+            ("info", Ok(Level::Info)),
+            ("debug", Ok(Level::Debug)),
+            ("trace", Ok(Level::Trace)),
+            ("ERROR", Ok(Level::Error)),
+            ("WARN", Ok(Level::Warn)),
+            ("INFO", Ok(Level::Info)),
+            ("DEBUG", Ok(Level::Debug)),
+            ("TRACE", Ok(Level::Trace)),
+            ("asdf", Err(ParseLevelError(()))),
+        ];
+        for &(s, ref expected) in &tests {
+            assert_eq!(expected, &s.parse());
+        }
+    }
+
+    #[test]
+    fn test_level_as_str() {
+        let tests = &[
+            (Level::Error, "ERROR"),
+            (Level::Warn, "WARN"),
+            (Level::Info, "INFO"),
+            (Level::Debug, "DEBUG"),
+            (Level::Trace, "TRACE"),
+        ];
+        for (input, expected) in tests {
+            assert_eq!(*expected, input.as_str());
+        }
+    }
+
+    #[test]
+    fn test_level_show() {
+        assert_eq!("INFO", Level::Info.to_string());
+        assert_eq!("ERROR", Level::Error.to_string());
+    }
+
+    #[test]
+    fn test_levelfilter_show() {
+        assert_eq!("OFF", LevelFilter::Off.to_string());
+        assert_eq!("ERROR", LevelFilter::Error.to_string());
+    }
+
+    #[test]
+    fn test_cross_cmp() {
+        assert!(Level::Debug > LevelFilter::Error);
+        assert!(LevelFilter::Warn < Level::Trace);
+        assert!(LevelFilter::Off < Level::Error);
+    }
+
+    #[test]
+    fn test_cross_eq() {
+        assert!(Level::Error == LevelFilter::Error);
+        assert!(LevelFilter::Off != Level::Error);
+        assert!(Level::Trace == LevelFilter::Trace);
+    }
+
+    #[test]
+    fn test_to_level() {
+        assert_eq!(Some(Level::Error), LevelFilter::Error.to_level());
+        assert_eq!(None, LevelFilter::Off.to_level());
+        assert_eq!(Some(Level::Debug), LevelFilter::Debug.to_level());
+    }
+
+    #[test]
+    fn test_to_level_filter() {
+        assert_eq!(LevelFilter::Error, Level::Error.to_level_filter());
+        assert_eq!(LevelFilter::Trace, Level::Trace.to_level_filter());
+    }
+
+    #[test]
+    fn test_level_filter_as_str() {
+        let tests = &[
+            (LevelFilter::Off, "OFF"),
+            (LevelFilter::Error, "ERROR"),
+            (LevelFilter::Warn, "WARN"),
+            (LevelFilter::Info, "INFO"),
+            (LevelFilter::Debug, "DEBUG"),
+            (LevelFilter::Trace, "TRACE"),
+        ];
+        for (input, expected) in tests {
+            assert_eq!(*expected, input.as_str());
+        }
+    }
+
+    #[test]
+    #[cfg_attr(not(debug_assertions), ignore)]
+    fn test_static_max_level_debug() {
+        if cfg!(feature = "max_level_off") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Off);
+        } else if cfg!(feature = "max_level_error") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Error);
+        } else if cfg!(feature = "max_level_warn") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Warn);
+        } else if cfg!(feature = "max_level_info") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Info);
+        } else if cfg!(feature = "max_level_debug") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Debug);
+        } else {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Trace);
+        }
+    }
+
+    #[test]
+    #[cfg_attr(debug_assertions, ignore)]
+    fn test_static_max_level_release() {
+        if cfg!(feature = "release_max_level_off") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Off);
+        } else if cfg!(feature = "release_max_level_error") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Error);
+        } else if cfg!(feature = "release_max_level_warn") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Warn);
+        } else if cfg!(feature = "release_max_level_info") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Info);
+        } else if cfg!(feature = "release_max_level_debug") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Debug);
+        } else if cfg!(feature = "release_max_level_trace") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Trace);
+        } else if cfg!(feature = "max_level_off") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Off);
+        } else if cfg!(feature = "max_level_error") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Error);
+        } else if cfg!(feature = "max_level_warn") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Warn);
+        } else if cfg!(feature = "max_level_info") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Info);
+        } else if cfg!(feature = "max_level_debug") {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Debug);
+        } else {
+            assert_eq!(STATIC_MAX_LEVEL, LevelFilter::Trace);
+        }
+    }
+
+    #[test]
+    #[cfg(feature = "std")]
+    fn test_error_trait() {
+        use super::SetLoggerError;
+        let e = SetLoggerError(());
+        assert_eq!(
+            &e.to_string(),
+            "attempted to set a logger after the logging system \
+             was already initialized"
+        );
+    }
+
+    #[test]
+    fn test_metadata_builder() {
+        use super::MetadataBuilder;
+        let target = "myApp";
+        let metadata_test = MetadataBuilder::new()
+            .level(Level::Debug)
+            .target(target)
+            .build();
+        assert_eq!(metadata_test.level(), Level::Debug);
+        assert_eq!(metadata_test.target(), "myApp");
+    }
+
+    #[test]
+    fn test_metadata_convenience_builder() {
+        use super::Metadata;
+        let target = "myApp";
+        let metadata_test = Metadata::builder()
+            .level(Level::Debug)
+            .target(target)
+            .build();
+        assert_eq!(metadata_test.level(), Level::Debug);
+        assert_eq!(metadata_test.target(), "myApp");
+    }
+
+    #[test]
+    fn test_record_builder() {
+        use super::{MetadataBuilder, RecordBuilder};
+        let target = "myApp";
+        let metadata = MetadataBuilder::new().target(target).build();
+        let fmt_args = format_args!("hello");
+        let record_test = RecordBuilder::new()
+            .args(fmt_args)
+            .metadata(metadata)
+            .module_path(Some("foo"))
+            .file(Some("bar"))
+            .line(Some(30))
+            .build();
+        assert_eq!(record_test.metadata().target(), "myApp");
+        assert_eq!(record_test.module_path(), Some("foo"));
+        assert_eq!(record_test.file(), Some("bar"));
+        assert_eq!(record_test.line(), Some(30));
+    }
+
+    #[test]
+    fn test_record_convenience_builder() {
+        use super::{Metadata, Record};
+        let target = "myApp";
+        let metadata = Metadata::builder().target(target).build();
+        let fmt_args = format_args!("hello");
+        let record_test = Record::builder()
+            .args(fmt_args)
+            .metadata(metadata)
+            .module_path(Some("foo"))
+            .file(Some("bar"))
+            .line(Some(30))
+            .build();
+        assert_eq!(record_test.target(), "myApp");
+        assert_eq!(record_test.module_path(), Some("foo"));
+        assert_eq!(record_test.file(), Some("bar"));
+        assert_eq!(record_test.line(), Some(30));
+    }
+
+    #[test]
+    fn test_record_complete_builder() {
+        use super::{Level, Record};
+        let target = "myApp";
+        let record_test = Record::builder()
+            .module_path(Some("foo"))
+            .file(Some("bar"))
+            .line(Some(30))
+            .target(target)
+            .level(Level::Error)
+            .build();
+        assert_eq!(record_test.target(), "myApp");
+        assert_eq!(record_test.level(), Level::Error);
+        assert_eq!(record_test.module_path(), Some("foo"));
+        assert_eq!(record_test.file(), Some("bar"));
+        assert_eq!(record_test.line(), Some(30));
+    }
+
+    #[test]
+    #[cfg(feature = "kv")]
+    fn test_record_key_values_builder() {
+        use super::Record;
+        use crate::kv::{self, VisitSource};
+
+        struct TestVisitSource {
+            seen_pairs: usize,
+        }
+
+        impl<'kvs> VisitSource<'kvs> for TestVisitSource {
+            fn visit_pair(
+                &mut self,
+                _: kv::Key<'kvs>,
+                _: kv::Value<'kvs>,
+            ) -> Result<(), kv::Error> {
+                self.seen_pairs += 1;
+                Ok(())
+            }
+        }
+
+        let kvs: &[(&str, i32)] = &[("a", 1), ("b", 2)];
+        let record_test = Record::builder().key_values(&kvs).build();
+
+        let mut visitor = TestVisitSource { seen_pairs: 0 };
+
+        record_test.key_values().visit(&mut visitor).unwrap();
+
+        assert_eq!(2, visitor.seen_pairs);
+    }
+
+    #[test]
+    #[cfg(feature = "kv")]
+    fn test_record_key_values_get_coerce() {
+        use super::Record;
+
+        let kvs: &[(&str, &str)] = &[("a", "1"), ("b", "2")];
+        let record = Record::builder().key_values(&kvs).build();
+
+        assert_eq!(
+            "2",
+            record
+                .key_values()
+                .get("b".into())
+                .expect("missing key")
+                .to_borrowed_str()
+                .expect("invalid value")
+        );
+    }
+
+    // Test that the `impl Log for Foo` blocks work
+    // This test mostly operates on a type level, so failures will be compile errors
+    #[test]
+    fn test_foreign_impl() {
+        use super::Log;
+        #[cfg(feature = "std")]
+        use std::sync::Arc;
+
+        fn assert_is_log<T: Log + ?Sized>() {}
+
+        assert_is_log::<&dyn Log>();
+
+        #[cfg(feature = "std")]
+        assert_is_log::<Box<dyn Log>>();
+
+        #[cfg(feature = "std")]
+        assert_is_log::<Arc<dyn Log>>();
+
+        // Assert these statements for all T: Log + ?Sized
+        #[allow(unused)]
+        fn forall<T: Log + ?Sized>() {
+            #[cfg(feature = "std")]
+            assert_is_log::<Box<T>>();
+
+            assert_is_log::<&T>();
+
+            #[cfg(feature = "std")]
+            assert_is_log::<Arc<T>>();
+        }
+    }
+}
+
\ No newline at end of file diff --git a/src/log/macros.rs.html b/src/log/macros.rs.html new file mode 100644 index 000000000..d0fedf9d1 --- /dev/null +++ b/src/log/macros.rs.html @@ -0,0 +1,739 @@ +macros.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+149
+150
+151
+152
+153
+154
+155
+156
+157
+158
+159
+160
+161
+162
+163
+164
+165
+166
+167
+168
+169
+170
+171
+172
+173
+174
+175
+176
+177
+178
+179
+180
+181
+182
+183
+184
+185
+186
+187
+188
+189
+190
+191
+192
+193
+194
+195
+196
+197
+198
+199
+200
+201
+202
+203
+204
+205
+206
+207
+208
+209
+210
+211
+212
+213
+214
+215
+216
+217
+218
+219
+220
+221
+222
+223
+224
+225
+226
+227
+228
+229
+230
+231
+232
+233
+234
+235
+236
+237
+238
+239
+240
+241
+242
+243
+244
+245
+246
+247
+248
+249
+250
+251
+252
+253
+254
+255
+256
+257
+258
+259
+260
+261
+262
+263
+264
+265
+266
+267
+268
+269
+270
+271
+272
+273
+274
+275
+276
+277
+278
+279
+280
+281
+282
+283
+284
+285
+286
+287
+288
+289
+290
+291
+292
+293
+294
+295
+296
+297
+298
+299
+300
+301
+302
+303
+304
+305
+306
+307
+308
+309
+310
+311
+312
+313
+314
+315
+316
+317
+318
+319
+320
+321
+322
+323
+324
+325
+326
+327
+328
+329
+330
+331
+332
+333
+334
+335
+336
+337
+338
+339
+340
+341
+342
+343
+344
+345
+346
+347
+348
+349
+350
+351
+352
+353
+354
+355
+356
+357
+358
+359
+360
+361
+362
+363
+364
+365
+366
+367
+368
+369
+
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// The standard logging macro.
+///
+/// This macro will generically log with the specified `Level` and `format!`
+/// based argument list.
+///
+/// # Examples
+///
+/// ```
+/// use log::{log, Level};
+///
+/// # fn main() {
+/// let data = (42, "Forty-two");
+/// let private_data = "private";
+///
+/// log!(Level::Error, "Received errors: {}, {}", data.0, data.1);
+/// log!(target: "app_events", Level::Warn, "App warning: {}, {}, {}",
+///     data.0, data.1, private_data);
+/// # }
+/// ```
+#[macro_export]
+macro_rules! log {
+    // log!(target: "my_target", Level::Info, key1:? = 42, key2 = true; "a {} event", "log");
+    (target: $target:expr, $lvl:expr, $($key:tt $(:$capture:tt)? $(= $value:expr)?),+; $($arg:tt)+) => ({
+        let lvl = $lvl;
+        if lvl <= $crate::STATIC_MAX_LEVEL && lvl <= $crate::max_level() {
+            $crate::__private_api::log::<&_>(
+                $crate::__private_api::format_args!($($arg)+),
+                lvl,
+                &($target, $crate::__private_api::module_path!(), $crate::__private_api::file!()),
+                $crate::__private_api::line!(),
+                &[$(($crate::__log_key!($key), $crate::__log_value!($key $(:$capture)* = $($value)*))),+]
+            );
+        }
+    });
+
+    // log!(target: "my_target", Level::Info, "a {} event", "log");
+    (target: $target:expr, $lvl:expr, $($arg:tt)+) => ({
+        let lvl = $lvl;
+        if lvl <= $crate::STATIC_MAX_LEVEL && lvl <= $crate::max_level() {
+            $crate::__private_api::log(
+                $crate::__private_api::format_args!($($arg)+),
+                lvl,
+                &($target, $crate::__private_api::module_path!(), $crate::__private_api::file!()),
+                $crate::__private_api::line!(),
+                (),
+            );
+        }
+    });
+
+    // log!(Level::Info, "a log event")
+    ($lvl:expr, $($arg:tt)+) => ($crate::log!(target: $crate::__private_api::module_path!(), $lvl, $($arg)+));
+}
+
+/// Logs a message at the error level.
+///
+/// # Examples
+///
+/// ```
+/// use log::error;
+///
+/// # fn main() {
+/// let (err_info, port) = ("No connection", 22);
+///
+/// error!("Error: {err_info} on port {port}");
+/// error!(target: "app_events", "App Error: {err_info}, Port: {port}");
+/// # }
+/// ```
+#[macro_export]
+macro_rules! error {
+    // error!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+    // error!(target: "my_target", "a {} event", "log")
+    (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Error, $($arg)+));
+
+    // error!("a {} event", "log")
+    ($($arg:tt)+) => ($crate::log!($crate::Level::Error, $($arg)+))
+}
+
+/// Logs a message at the warn level.
+///
+/// # Examples
+///
+/// ```
+/// use log::warn;
+///
+/// # fn main() {
+/// let warn_description = "Invalid Input";
+///
+/// warn!("Warning! {warn_description}!");
+/// warn!(target: "input_events", "App received warning: {warn_description}");
+/// # }
+/// ```
+#[macro_export]
+macro_rules! warn {
+    // warn!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+    // warn!(target: "my_target", "a {} event", "log")
+    (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Warn, $($arg)+));
+
+    // warn!("a {} event", "log")
+    ($($arg:tt)+) => ($crate::log!($crate::Level::Warn, $($arg)+))
+}
+
+/// Logs a message at the info level.
+///
+/// # Examples
+///
+/// ```
+/// use log::info;
+///
+/// # fn main() {
+/// # struct Connection { port: u32, speed: f32 }
+/// let conn_info = Connection { port: 40, speed: 3.20 };
+///
+/// info!("Connected to port {} at {} Mb/s", conn_info.port, conn_info.speed);
+/// info!(target: "connection_events", "Successful connection, port: {}, speed: {}",
+///       conn_info.port, conn_info.speed);
+/// # }
+/// ```
+#[macro_export]
+macro_rules! info {
+    // info!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+    // info!(target: "my_target", "a {} event", "log")
+    (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Info, $($arg)+));
+
+    // info!("a {} event", "log")
+    ($($arg:tt)+) => ($crate::log!($crate::Level::Info, $($arg)+))
+}
+
+/// Logs a message at the debug level.
+///
+/// # Examples
+///
+/// ```
+/// use log::debug;
+///
+/// # fn main() {
+/// # struct Position { x: f32, y: f32 }
+/// let pos = Position { x: 3.234, y: -1.223 };
+///
+/// debug!("New position: x: {}, y: {}", pos.x, pos.y);
+/// debug!(target: "app_events", "New position: x: {}, y: {}", pos.x, pos.y);
+/// # }
+/// ```
+#[macro_export]
+macro_rules! debug {
+    // debug!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+    // debug!(target: "my_target", "a {} event", "log")
+    (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Debug, $($arg)+));
+
+    // debug!("a {} event", "log")
+    ($($arg:tt)+) => ($crate::log!($crate::Level::Debug, $($arg)+))
+}
+
+/// Logs a message at the trace level.
+///
+/// # Examples
+///
+/// ```
+/// use log::trace;
+///
+/// # fn main() {
+/// # struct Position { x: f32, y: f32 }
+/// let pos = Position { x: 3.234, y: -1.223 };
+///
+/// trace!("Position is: x: {}, y: {}", pos.x, pos.y);
+/// trace!(target: "app_events", "x is {} and y is {}",
+///        if pos.x >= 0.0 { "positive" } else { "negative" },
+///        if pos.y >= 0.0 { "positive" } else { "negative" });
+/// # }
+/// ```
+#[macro_export]
+macro_rules! trace {
+    // trace!(target: "my_target", key1 = 42, key2 = true; "a {} event", "log")
+    // trace!(target: "my_target", "a {} event", "log")
+    (target: $target:expr, $($arg:tt)+) => ($crate::log!(target: $target, $crate::Level::Trace, $($arg)+));
+
+    // trace!("a {} event", "log")
+    ($($arg:tt)+) => ($crate::log!($crate::Level::Trace, $($arg)+))
+}
+
+/// Determines if a message logged at the specified level in that module will
+/// be logged.
+///
+/// This can be used to avoid expensive computation of log message arguments if
+/// the message would be ignored anyway.
+///
+/// # Examples
+///
+/// ```
+/// use log::Level::Debug;
+/// use log::{debug, log_enabled};
+///
+/// # fn foo() {
+/// if log_enabled!(Debug) {
+///     let data = expensive_call();
+///     debug!("expensive debug data: {} {}", data.x, data.y);
+/// }
+/// if log_enabled!(target: "Global", Debug) {
+///    let data = expensive_call();
+///    debug!(target: "Global", "expensive debug data: {} {}", data.x, data.y);
+/// }
+/// # }
+/// # struct Data { x: u32, y: u32 }
+/// # fn expensive_call() -> Data { Data { x: 0, y: 0 } }
+/// # fn main() {}
+/// ```
+#[macro_export]
+macro_rules! log_enabled {
+    (target: $target:expr, $lvl:expr) => {{
+        let lvl = $lvl;
+        lvl <= $crate::STATIC_MAX_LEVEL
+            && lvl <= $crate::max_level()
+            && $crate::__private_api::enabled(lvl, $target)
+    }};
+    ($lvl:expr) => {
+        $crate::log_enabled!(target: $crate::__private_api::module_path!(), $lvl)
+    };
+}
+
+// These macros use a pattern of #[cfg]s to produce nicer error
+// messages when log features aren't available
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv")]
+macro_rules! __log_key {
+    // key1 = 42
+    ($($args:ident)*) => {
+        $crate::__private_api::stringify!($($args)*)
+    };
+    // "key1" = 42
+    ($($args:expr)*) => {
+        $($args)*
+    };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv"))]
+macro_rules! __log_key {
+    ($($args:tt)*) => {
+        compile_error!("key value support requires the `kv` feature of `log`")
+    };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv")]
+macro_rules! __log_value {
+    // Entrypoint
+    ($key:tt = $args:expr) => {
+        $crate::__log_value!(($args):value)
+    };
+    ($key:tt :$capture:tt = $args:expr) => {
+        $crate::__log_value!(($args):$capture)
+    };
+    ($key:ident =) => {
+        $crate::__log_value!(($key):value)
+    };
+    ($key:ident :$capture:tt =) => {
+        $crate::__log_value!(($key):$capture)
+    };
+    // ToValue
+    (($args:expr):value) => {
+        $crate::__private_api::capture_to_value(&&$args)
+    };
+    // Debug
+    (($args:expr):?) => {
+        $crate::__private_api::capture_debug(&&$args)
+    };
+    (($args:expr):debug) => {
+        $crate::__private_api::capture_debug(&&$args)
+    };
+    // Display
+    (($args:expr):%) => {
+        $crate::__private_api::capture_display(&&$args)
+    };
+    (($args:expr):display) => {
+        $crate::__private_api::capture_display(&&$args)
+    };
+    //Error
+    (($args:expr):err) => {
+        $crate::__log_value_error!($args)
+    };
+    // sval::Value
+    (($args:expr):sval) => {
+        $crate::__log_value_sval!($args)
+    };
+    // serde::Serialize
+    (($args:expr):serde) => {
+        $crate::__log_value_serde!($args)
+    };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv"))]
+macro_rules! __log_value {
+    ($($args:tt)*) => {
+        compile_error!("key value support requires the `kv` feature of `log`")
+    };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv_sval")]
+macro_rules! __log_value_sval {
+    ($args:expr) => {
+        $crate::__private_api::capture_sval(&&$args)
+    };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv_sval"))]
+macro_rules! __log_value_sval {
+    ($args:expr) => {
+        compile_error!("capturing values as `sval::Value` requites the `kv_sval` feature of `log`")
+    };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv_serde")]
+macro_rules! __log_value_serde {
+    ($args:expr) => {
+        $crate::__private_api::capture_serde(&&$args)
+    };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv_serde"))]
+macro_rules! __log_value_serde {
+    ($args:expr) => {
+        compile_error!(
+            "capturing values as `serde::Serialize` requites the `kv_serde` feature of `log`"
+        )
+    };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(feature = "kv_std")]
+macro_rules! __log_value_error {
+    ($args:expr) => {
+        $crate::__private_api::capture_error(&$args)
+    };
+}
+
+#[doc(hidden)]
+#[macro_export]
+#[cfg(not(feature = "kv_std"))]
+macro_rules! __log_value_error {
+    ($args:expr) => {
+        compile_error!(
+            "capturing values as `std::error::Error` requites the `kv_std` feature of `log`"
+        )
+    };
+}
+
\ No newline at end of file diff --git a/src/rustc_hash/lib.rs.html b/src/rustc_hash/lib.rs.html new file mode 100644 index 000000000..4afae0d56 --- /dev/null +++ b/src/rustc_hash/lib.rs.html @@ -0,0 +1,297 @@ +lib.rs - source
1
+2
+3
+4
+5
+6
+7
+8
+9
+10
+11
+12
+13
+14
+15
+16
+17
+18
+19
+20
+21
+22
+23
+24
+25
+26
+27
+28
+29
+30
+31
+32
+33
+34
+35
+36
+37
+38
+39
+40
+41
+42
+43
+44
+45
+46
+47
+48
+49
+50
+51
+52
+53
+54
+55
+56
+57
+58
+59
+60
+61
+62
+63
+64
+65
+66
+67
+68
+69
+70
+71
+72
+73
+74
+75
+76
+77
+78
+79
+80
+81
+82
+83
+84
+85
+86
+87
+88
+89
+90
+91
+92
+93
+94
+95
+96
+97
+98
+99
+100
+101
+102
+103
+104
+105
+106
+107
+108
+109
+110
+111
+112
+113
+114
+115
+116
+117
+118
+119
+120
+121
+122
+123
+124
+125
+126
+127
+128
+129
+130
+131
+132
+133
+134
+135
+136
+137
+138
+139
+140
+141
+142
+143
+144
+145
+146
+147
+148
+
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Fast, non-cryptographic hash used by rustc and Firefox.
+//!
+//! # Example
+//!
+//! ```rust
+//! # #[cfg(feature = "std")]
+//! # fn main() {
+//! use rustc_hash::FxHashMap;
+//! let mut map: FxHashMap<u32, u32> = FxHashMap::default();
+//! map.insert(22, 44);
+//! # }
+//! # #[cfg(not(feature = "std"))]
+//! # fn main() { }
+//! ```
+
+#![no_std]
+
+#[cfg(feature = "std")]
+extern crate std;
+
+use core::convert::TryInto;
+use core::default::Default;
+#[cfg(feature = "std")]
+use core::hash::BuildHasherDefault;
+use core::hash::Hasher;
+use core::mem::size_of;
+use core::ops::BitXor;
+#[cfg(feature = "std")]
+use std::collections::{HashMap, HashSet};
+
+/// Type alias for a hashmap using the `fx` hash algorithm.
+#[cfg(feature = "std")]
+pub type FxHashMap<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher>>;
+
+/// Type alias for a hashmap using the `fx` hash algorithm.
+#[cfg(feature = "std")]
+pub type FxHashSet<V> = HashSet<V, BuildHasherDefault<FxHasher>>;
+
+/// A speedy hash algorithm for use within rustc. The hashmap in liballoc
+/// by default uses SipHash which isn't quite as speedy as we want. In the
+/// compiler we're not really worried about DOS attempts, so we use a fast
+/// non-cryptographic hash.
+///
+/// This is the same as the algorithm used by Firefox -- which is a homespun
+/// one not based on any widely-known algorithm -- though modified to produce
+/// 64-bit hash values instead of 32-bit hash values. It consistently
+/// out-performs an FNV-based hash within rustc itself -- the collision rate is
+/// similar or slightly worse than FNV, but the speed of the hash function
+/// itself is much higher because it works on up to 8 bytes at a time.
+pub struct FxHasher {
+    hash: usize,
+}
+
+#[cfg(target_pointer_width = "32")]
+const K: usize = 0x9e3779b9;
+#[cfg(target_pointer_width = "64")]
+const K: usize = 0x517cc1b727220a95;
+
+impl Default for FxHasher {
+    #[inline]
+    fn default() -> FxHasher {
+        FxHasher { hash: 0 }
+    }
+}
+
+impl FxHasher {
+    #[inline]
+    fn add_to_hash(&mut self, i: usize) {
+        self.hash = self.hash.rotate_left(5).bitxor(i).wrapping_mul(K);
+    }
+}
+
+impl Hasher for FxHasher {
+    #[inline]
+    fn write(&mut self, mut bytes: &[u8]) {
+        #[cfg(target_pointer_width = "32")]
+        let read_usize = |bytes: &[u8]| u32::from_ne_bytes(bytes[..4].try_into().unwrap());
+        #[cfg(target_pointer_width = "64")]
+        let read_usize = |bytes: &[u8]| u64::from_ne_bytes(bytes[..8].try_into().unwrap());
+
+        let mut hash = FxHasher { hash: self.hash };
+        assert!(size_of::<usize>() <= 8);
+        while bytes.len() >= size_of::<usize>() {
+            hash.add_to_hash(read_usize(bytes) as usize);
+            bytes = &bytes[size_of::<usize>()..];
+        }
+        if (size_of::<usize>() > 4) && (bytes.len() >= 4) {
+            hash.add_to_hash(u32::from_ne_bytes(bytes[..4].try_into().unwrap()) as usize);
+            bytes = &bytes[4..];
+        }
+        if (size_of::<usize>() > 2) && bytes.len() >= 2 {
+            hash.add_to_hash(u16::from_ne_bytes(bytes[..2].try_into().unwrap()) as usize);
+            bytes = &bytes[2..];
+        }
+        if (size_of::<usize>() > 1) && bytes.len() >= 1 {
+            hash.add_to_hash(bytes[0] as usize);
+        }
+        self.hash = hash.hash;
+    }
+
+    #[inline]
+    fn write_u8(&mut self, i: u8) {
+        self.add_to_hash(i as usize);
+    }
+
+    #[inline]
+    fn write_u16(&mut self, i: u16) {
+        self.add_to_hash(i as usize);
+    }
+
+    #[inline]
+    fn write_u32(&mut self, i: u32) {
+        self.add_to_hash(i as usize);
+    }
+
+    #[cfg(target_pointer_width = "32")]
+    #[inline]
+    fn write_u64(&mut self, i: u64) {
+        self.add_to_hash(i as usize);
+        self.add_to_hash((i >> 32) as usize);
+    }
+
+    #[cfg(target_pointer_width = "64")]
+    #[inline]
+    fn write_u64(&mut self, i: u64) {
+        self.add_to_hash(i as usize);
+    }
+
+    #[inline]
+    fn write_usize(&mut self, i: usize) {
+        self.add_to_hash(i);
+    }
+
+    #[inline]
+    fn finish(&self) -> u64 {
+        self.hash as u64
+    }
+}
+
\ No newline at end of file diff --git a/static.files/COPYRIGHT-23e9bde6c69aea69.txt b/static.files/COPYRIGHT-23e9bde6c69aea69.txt new file mode 100644 index 000000000..1447df792 --- /dev/null +++ b/static.files/COPYRIGHT-23e9bde6c69aea69.txt @@ -0,0 +1,50 @@ +# REUSE-IgnoreStart + +These documentation pages include resources by third parties. This copyright +file applies only to those resources. The following third party resources are +included, and carry their own copyright notices and license terms: + +* Fira Sans (FiraSans-Regular.woff2, FiraSans-Medium.woff2): + + Copyright (c) 2014, Mozilla Foundation https://mozilla.org/ + with Reserved Font Name Fira Sans. + + Copyright (c) 2014, Telefonica S.A. + + Licensed under the SIL Open Font License, Version 1.1. + See FiraSans-LICENSE.txt. + +* rustdoc.css, main.js, and playpen.js: + + Copyright 2015 The Rust Developers. + Licensed under the Apache License, Version 2.0 (see LICENSE-APACHE.txt) or + the MIT license (LICENSE-MIT.txt) at your option. + +* normalize.css: + + Copyright (c) Nicolas Gallagher and Jonathan Neal. + Licensed under the MIT license (see LICENSE-MIT.txt). + +* Source Code Pro (SourceCodePro-Regular.ttf.woff2, + SourceCodePro-Semibold.ttf.woff2, SourceCodePro-It.ttf.woff2): + + Copyright 2010, 2012 Adobe Systems Incorporated (http://www.adobe.com/), + with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark + of Adobe Systems Incorporated in the United States and/or other countries. + + Licensed under the SIL Open Font License, Version 1.1. + See SourceCodePro-LICENSE.txt. + +* Source Serif 4 (SourceSerif4-Regular.ttf.woff2, SourceSerif4-Bold.ttf.woff2, + SourceSerif4-It.ttf.woff2): + + Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name + 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United + States and/or other countries. + + Licensed under the SIL Open Font License, Version 1.1. + See SourceSerif4-LICENSE.md. + +This copyright file is intended to be distributed with rustdoc output. + +# REUSE-IgnoreEnd diff --git a/static.files/FiraSans-LICENSE-db4b642586e02d97.txt b/static.files/FiraSans-LICENSE-db4b642586e02d97.txt new file mode 100644 index 000000000..d7e9c149b --- /dev/null +++ b/static.files/FiraSans-LICENSE-db4b642586e02d97.txt @@ -0,0 +1,98 @@ +// REUSE-IgnoreStart + +Digitized data copyright (c) 2012-2015, The Mozilla Foundation and Telefonica S.A. +with Reserved Font Name < Fira >, + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. + +// REUSE-IgnoreEnd diff --git a/static.files/FiraSans-Medium-8f9a781e4970d388.woff2 b/static.files/FiraSans-Medium-8f9a781e4970d388.woff2 new file mode 100644 index 000000000..7a1e5fc54 Binary files /dev/null and b/static.files/FiraSans-Medium-8f9a781e4970d388.woff2 differ diff --git a/static.files/FiraSans-Regular-018c141bf0843ffd.woff2 b/static.files/FiraSans-Regular-018c141bf0843ffd.woff2 new file mode 100644 index 000000000..e766e06cc Binary files /dev/null and b/static.files/FiraSans-Regular-018c141bf0843ffd.woff2 differ diff --git a/static.files/LICENSE-APACHE-b91fa81cba47b86a.txt b/static.files/LICENSE-APACHE-b91fa81cba47b86a.txt new file mode 100644 index 000000000..16fe87b06 --- /dev/null +++ b/static.files/LICENSE-APACHE-b91fa81cba47b86a.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/static.files/LICENSE-MIT-65090b722b3f6c56.txt b/static.files/LICENSE-MIT-65090b722b3f6c56.txt new file mode 100644 index 000000000..31aa79387 --- /dev/null +++ b/static.files/LICENSE-MIT-65090b722b3f6c56.txt @@ -0,0 +1,23 @@ +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/static.files/NanumBarunGothic-0f09457c7a19b7c6.ttf.woff2 b/static.files/NanumBarunGothic-0f09457c7a19b7c6.ttf.woff2 new file mode 100644 index 000000000..1866ad4bc Binary files /dev/null and b/static.files/NanumBarunGothic-0f09457c7a19b7c6.ttf.woff2 differ diff --git a/static.files/NanumBarunGothic-LICENSE-18c5adf4b52b4041.txt b/static.files/NanumBarunGothic-LICENSE-18c5adf4b52b4041.txt new file mode 100644 index 000000000..4b3edc29e --- /dev/null +++ b/static.files/NanumBarunGothic-LICENSE-18c5adf4b52b4041.txt @@ -0,0 +1,103 @@ +// REUSE-IgnoreStart + +Copyright (c) 2010, NAVER Corporation (https://www.navercorp.com/), + +with Reserved Font Name Nanum, Naver Nanum, NanumGothic, Naver NanumGothic, +NanumMyeongjo, Naver NanumMyeongjo, NanumBrush, Naver NanumBrush, NanumPen, +Naver NanumPen, Naver NanumGothicEco, NanumGothicEco, Naver NanumMyeongjoEco, +NanumMyeongjoEco, Naver NanumGothicLight, NanumGothicLight, NanumBarunGothic, +Naver NanumBarunGothic, NanumSquareRound, NanumBarunPen, MaruBuri + +This Font Software is licensed under the SIL Open Font License, Version 1.1. +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. + +// REUSE-IgnoreEnd diff --git a/static.files/SourceCodePro-It-1cc31594bf4f1f79.ttf.woff2 b/static.files/SourceCodePro-It-1cc31594bf4f1f79.ttf.woff2 new file mode 100644 index 000000000..462c34efc Binary files /dev/null and b/static.files/SourceCodePro-It-1cc31594bf4f1f79.ttf.woff2 differ diff --git a/static.files/SourceCodePro-LICENSE-d180d465a756484a.txt b/static.files/SourceCodePro-LICENSE-d180d465a756484a.txt new file mode 100644 index 000000000..0d2941e14 --- /dev/null +++ b/static.files/SourceCodePro-LICENSE-d180d465a756484a.txt @@ -0,0 +1,97 @@ +// REUSE-IgnoreStart + +Copyright 2010, 2012 Adobe Systems Incorporated (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe Systems Incorporated in the United States and/or other countries. + +This Font Software is licensed under the SIL Open Font License, Version 1.1. + +This license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. + +// REUSE-IgnoreEnd diff --git a/static.files/SourceCodePro-Regular-562dcc5011b6de7d.ttf.woff2 b/static.files/SourceCodePro-Regular-562dcc5011b6de7d.ttf.woff2 new file mode 100644 index 000000000..10b558e0b Binary files /dev/null and b/static.files/SourceCodePro-Regular-562dcc5011b6de7d.ttf.woff2 differ diff --git a/static.files/SourceCodePro-Semibold-d899c5a5c4aeb14a.ttf.woff2 b/static.files/SourceCodePro-Semibold-d899c5a5c4aeb14a.ttf.woff2 new file mode 100644 index 000000000..5ec64eef0 Binary files /dev/null and b/static.files/SourceCodePro-Semibold-d899c5a5c4aeb14a.ttf.woff2 differ diff --git a/static.files/SourceSerif4-Bold-a2c9cd1067f8b328.ttf.woff2 b/static.files/SourceSerif4-Bold-a2c9cd1067f8b328.ttf.woff2 new file mode 100644 index 000000000..181a07f63 Binary files /dev/null and b/static.files/SourceSerif4-Bold-a2c9cd1067f8b328.ttf.woff2 differ diff --git a/static.files/SourceSerif4-It-acdfaf1a8af734b1.ttf.woff2 b/static.files/SourceSerif4-It-acdfaf1a8af734b1.ttf.woff2 new file mode 100644 index 000000000..2ae08a7be Binary files /dev/null and b/static.files/SourceSerif4-It-acdfaf1a8af734b1.ttf.woff2 differ diff --git a/static.files/SourceSerif4-LICENSE-3bb119e13b1258b7.md b/static.files/SourceSerif4-LICENSE-3bb119e13b1258b7.md new file mode 100644 index 000000000..175fa4f47 --- /dev/null +++ b/static.files/SourceSerif4-LICENSE-3bb119e13b1258b7.md @@ -0,0 +1,98 @@ + + +Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries. +Copyright 2014 - 2023 Adobe (http://www.adobe.com/), with Reserved Font Name ‘Source’. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries. + +This Font Software is licensed under the SIL Open Font License, Version 1.1. + +This license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL + + +----------------------------------------------------------- +SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 +----------------------------------------------------------- + +PREAMBLE +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + +DEFINITIONS +"Font Software" refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +"Reserved Font Name" refers to any names specified as such after the +copyright statement(s). + +"Original Version" refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +"Modified Version" refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +"Author" refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + +PERMISSION & CONDITIONS +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1) Neither the Font Software nor any of its individual components, +in Original or Modified Versions, may be sold by itself. + +2) Original or Modified Versions of the Font Software may be bundled, +redistributed and/or sold with any software, provided that each copy +contains the above copyright notice and this license. These can be +included either as stand-alone text files, human-readable headers or +in the appropriate machine-readable metadata fields within text or +binary files as long as those fields can be easily viewed by the user. + +3) No Modified Version of the Font Software may use the Reserved Font +Name(s) unless explicit written permission is granted by the corresponding +Copyright Holder. This restriction only applies to the primary font name as +presented to the users. + +4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font +Software shall not be used to promote, endorse or advertise any +Modified Version, except to acknowledge the contribution(s) of the +Copyright Holder(s) and the Author(s) or with their explicit written +permission. + +5) The Font Software, modified or unmodified, in part or in whole, +must be distributed entirely under this license, and must not be +distributed under any other license. The requirement for fonts to +remain under this license does not apply to any document created +using the Font Software. + +TERMINATION +This license becomes null and void if any of the above conditions are +not met. + +DISCLAIMER +THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT +OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL +DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM +OTHER DEALINGS IN THE FONT SOFTWARE. + + diff --git a/static.files/SourceSerif4-Regular-46f98efaafac5295.ttf.woff2 b/static.files/SourceSerif4-Regular-46f98efaafac5295.ttf.woff2 new file mode 100644 index 000000000..0263fc304 Binary files /dev/null and b/static.files/SourceSerif4-Regular-46f98efaafac5295.ttf.woff2 differ diff --git a/static.files/favicon-2c020d218678b618.svg b/static.files/favicon-2c020d218678b618.svg new file mode 100644 index 000000000..8b34b5119 --- /dev/null +++ b/static.files/favicon-2c020d218678b618.svg @@ -0,0 +1,24 @@ + + + + + diff --git a/static.files/favicon-32x32-422f7d1d52889060.png b/static.files/favicon-32x32-422f7d1d52889060.png new file mode 100644 index 000000000..69b8613ce Binary files /dev/null and b/static.files/favicon-32x32-422f7d1d52889060.png differ diff --git a/static.files/main-20a3ad099b048cf2.js b/static.files/main-20a3ad099b048cf2.js new file mode 100644 index 000000000..133116e4d --- /dev/null +++ b/static.files/main-20a3ad099b048cf2.js @@ -0,0 +1,11 @@ +"use strict";window.RUSTDOC_TOOLTIP_HOVER_MS=300;window.RUSTDOC_TOOLTIP_HOVER_EXIT_MS=450;function resourcePath(basename,extension){return getVar("root-path")+basename+getVar("resource-suffix")+extension}function hideMain(){addClass(document.getElementById(MAIN_ID),"hidden")}function showMain(){removeClass(document.getElementById(MAIN_ID),"hidden")}function blurHandler(event,parentElem,hideCallback){if(!parentElem.contains(document.activeElement)&&!parentElem.contains(event.relatedTarget)){hideCallback()}}window.rootPath=getVar("root-path");window.currentCrate=getVar("current-crate");function setMobileTopbar(){const mobileTopbar=document.querySelector(".mobile-topbar");const locationTitle=document.querySelector(".sidebar h2.location");if(mobileTopbar){const mobileTitle=document.createElement("h2");mobileTitle.className="location";if(hasClass(document.querySelector(".rustdoc"),"crate")){mobileTitle.innerHTML=`Crate ${window.currentCrate}`}else if(locationTitle){mobileTitle.innerHTML=locationTitle.innerHTML}mobileTopbar.appendChild(mobileTitle)}}function getVirtualKey(ev){if("key"in ev&&typeof ev.key!=="undefined"){return ev.key}const c=ev.charCode||ev.keyCode;if(c===27){return"Escape"}return String.fromCharCode(c)}const MAIN_ID="main-content";const SETTINGS_BUTTON_ID="settings-menu";const ALTERNATIVE_DISPLAY_ID="alternative-display";const NOT_DISPLAYED_ID="not-displayed";const HELP_BUTTON_ID="help-button";function getSettingsButton(){return document.getElementById(SETTINGS_BUTTON_ID)}function getHelpButton(){return document.getElementById(HELP_BUTTON_ID)}function getNakedUrl(){return window.location.href.split("?")[0].split("#")[0]}function insertAfter(newNode,referenceNode){referenceNode.parentNode.insertBefore(newNode,referenceNode.nextSibling)}function getOrCreateSection(id,classes){let el=document.getElementById(id);if(!el){el=document.createElement("section");el.id=id;el.className=classes;insertAfter(el,document.getElementById(MAIN_ID))}return el}function getAlternativeDisplayElem(){return getOrCreateSection(ALTERNATIVE_DISPLAY_ID,"content hidden")}function getNotDisplayedElem(){return getOrCreateSection(NOT_DISPLAYED_ID,"hidden")}function switchDisplayedElement(elemToDisplay){const el=getAlternativeDisplayElem();if(el.children.length>0){getNotDisplayedElem().appendChild(el.firstElementChild)}if(elemToDisplay===null){addClass(el,"hidden");showMain();return}el.appendChild(elemToDisplay);hideMain();removeClass(el,"hidden")}function browserSupportsHistoryApi(){return window.history&&typeof window.history.pushState==="function"}function preLoadCss(cssUrl){const link=document.createElement("link");link.href=cssUrl;link.rel="preload";link.as="style";document.getElementsByTagName("head")[0].appendChild(link)}(function(){const isHelpPage=window.location.pathname.endsWith("/help.html");function loadScript(url,errorCallback){const script=document.createElement("script");script.src=url;if(errorCallback!==undefined){script.onerror=errorCallback}document.head.append(script)}getSettingsButton().onclick=event=>{if(event.ctrlKey||event.altKey||event.metaKey){return}window.hideAllModals(false);addClass(getSettingsButton(),"rotate");event.preventDefault();loadScript(getVar("static-root-path")+getVar("settings-js"));setTimeout(()=>{const themes=getVar("themes").split(",");for(const theme of themes){if(theme!==""){preLoadCss(getVar("root-path")+theme+".css")}}},0)};window.searchState={loadingText:"Loading search results...",input:document.getElementsByClassName("search-input")[0],outputElement:()=>{let el=document.getElementById("search");if(!el){el=document.createElement("section");el.id="search";getNotDisplayedElem().appendChild(el)}return el},title:document.title,titleBeforeSearch:document.title,timeout:null,currentTab:0,focusedByTab:[null,null,null],clearInputTimeout:()=>{if(searchState.timeout!==null){clearTimeout(searchState.timeout);searchState.timeout=null}},isDisplayed:()=>searchState.outputElement().parentElement.id===ALTERNATIVE_DISPLAY_ID,focus:()=>{searchState.input.focus()},defocus:()=>{searchState.input.blur()},showResults:search=>{if(search===null||typeof search==="undefined"){search=searchState.outputElement()}switchDisplayedElement(search);searchState.mouseMovedAfterSearch=false;document.title=searchState.title},removeQueryParameters:()=>{document.title=searchState.titleBeforeSearch;if(browserSupportsHistoryApi()){history.replaceState(null,"",getNakedUrl()+window.location.hash)}},hideResults:()=>{switchDisplayedElement(null);searchState.removeQueryParameters()},getQueryStringParams:()=>{const params={};window.location.search.substring(1).split("&").map(s=>{const pair=s.split("=").map(x=>x.replace(/\+/g," "));params[decodeURIComponent(pair[0])]=typeof pair[1]==="undefined"?null:decodeURIComponent(pair[1])});return params},setup:()=>{const search_input=searchState.input;if(!searchState.input){return}let searchLoaded=false;function sendSearchForm(){document.getElementsByClassName("search-form")[0].submit()}function loadSearch(){if(!searchLoaded){searchLoaded=true;loadScript(getVar("static-root-path")+getVar("search-js"),sendSearchForm);loadScript(resourcePath("search-index",".js"),sendSearchForm)}}search_input.addEventListener("focus",()=>{search_input.origPlaceholder=search_input.placeholder;search_input.placeholder="Type your search here.";loadSearch()});if(search_input.value!==""){loadSearch()}const params=searchState.getQueryStringParams();if(params.search!==undefined){searchState.setLoadingSearch();loadSearch()}},setLoadingSearch:()=>{const search=searchState.outputElement();search.innerHTML="

"+searchState.loadingText+"

";searchState.showResults(search)},descShards:new Map(),loadDesc:async function({descShard,descIndex}){if(descShard.promise===null){descShard.promise=new Promise((resolve,reject)=>{descShard.resolve=resolve;const ds=descShard;const fname=`${ds.crate}-desc-${ds.shard}-`;const url=resourcePath(`search.desc/${descShard.crate}/${fname}`,".js",);loadScript(url,reject)})}const list=await descShard.promise;return list[descIndex]},loadedDescShard:function(crate,shard,data){this.descShards.get(crate)[shard].resolve(data.split("\n"))},};const toggleAllDocsId="toggle-all-docs";let savedHash="";function handleHashes(ev){if(ev!==null&&searchState.isDisplayed()&&ev.newURL){switchDisplayedElement(null);const hash=ev.newURL.slice(ev.newURL.indexOf("#")+1);if(browserSupportsHistoryApi()){history.replaceState(null,"",getNakedUrl()+window.location.search+"#"+hash)}const elem=document.getElementById(hash);if(elem){elem.scrollIntoView()}}const pageId=window.location.hash.replace(/^#/,"");if(savedHash!==pageId){savedHash=pageId;if(pageId!==""){expandSection(pageId)}}if(savedHash.startsWith("impl-")){const splitAt=savedHash.indexOf("/");if(splitAt!==-1){const implId=savedHash.slice(0,splitAt);const assocId=savedHash.slice(splitAt+1);const implElem=document.getElementById(implId);if(implElem&&implElem.parentElement.tagName==="SUMMARY"&&implElem.parentElement.parentElement.tagName==="DETAILS"){onEachLazy(implElem.parentElement.parentElement.querySelectorAll(`[id^="${assocId}"]`),item=>{const numbered=/([^-]+)-([0-9]+)/.exec(item.id);if(item.id===assocId||(numbered&&numbered[1]===assocId)){openParentDetails(item);item.scrollIntoView();setTimeout(()=>{window.location.replace("#"+item.id)},0)}},)}}}}function onHashChange(ev){hideSidebar();handleHashes(ev)}function openParentDetails(elem){while(elem){if(elem.tagName==="DETAILS"){elem.open=true}elem=elem.parentNode}}function expandSection(id){openParentDetails(document.getElementById(id))}function handleEscape(ev){searchState.clearInputTimeout();searchState.hideResults();ev.preventDefault();searchState.defocus();window.hideAllModals(true)}function handleShortcut(ev){const disableShortcuts=getSettingValue("disable-shortcuts")==="true";if(ev.ctrlKey||ev.altKey||ev.metaKey||disableShortcuts){return}if(document.activeElement.tagName==="INPUT"&&document.activeElement.type!=="checkbox"&&document.activeElement.type!=="radio"){switch(getVirtualKey(ev)){case"Escape":handleEscape(ev);break}}else{switch(getVirtualKey(ev)){case"Escape":handleEscape(ev);break;case"s":case"S":case"/":ev.preventDefault();searchState.focus();break;case"+":ev.preventDefault();expandAllDocs();break;case"-":ev.preventDefault();collapseAllDocs();break;case"?":showHelp();break;default:break}}}document.addEventListener("keypress",handleShortcut);document.addEventListener("keydown",handleShortcut);function addSidebarItems(){if(!window.SIDEBAR_ITEMS){return}const sidebar=document.getElementsByClassName("sidebar-elems")[0];function block(shortty,id,longty){const filtered=window.SIDEBAR_ITEMS[shortty];if(!filtered){return}const modpath=hasClass(document.querySelector(".rustdoc"),"mod")?"../":"";const h3=document.createElement("h3");h3.innerHTML=`${longty}`;const ul=document.createElement("ul");ul.className="block "+shortty;for(const name of filtered){let path;if(shortty==="mod"){path=`${modpath}${name}/index.html`}else{path=`${modpath}${shortty}.${name}.html`}let current_page=document.location.href.toString();if(current_page.endsWith("/")){current_page+="index.html"}const link=document.createElement("a");link.href=path;if(path===current_page){link.className="current"}link.textContent=name;const li=document.createElement("li");li.appendChild(link);ul.appendChild(li)}sidebar.appendChild(h3);sidebar.appendChild(ul)}if(sidebar){block("primitive","primitives","Primitive Types");block("mod","modules","Modules");block("macro","macros","Macros");block("struct","structs","Structs");block("enum","enums","Enums");block("constant","constants","Constants");block("static","static","Statics");block("trait","traits","Traits");block("fn","functions","Functions");block("type","types","Type Aliases");block("union","unions","Unions");block("foreigntype","foreign-types","Foreign Types");block("keyword","keywords","Keywords");block("opaque","opaque-types","Opaque Types");block("attr","attributes","Attribute Macros");block("derive","derives","Derive Macros");block("traitalias","trait-aliases","Trait Aliases")}}window.register_implementors=imp=>{const implementors=document.getElementById("implementors-list");const synthetic_implementors=document.getElementById("synthetic-implementors-list");const inlined_types=new Set();const TEXT_IDX=0;const SYNTHETIC_IDX=1;const TYPES_IDX=2;if(synthetic_implementors){onEachLazy(synthetic_implementors.getElementsByClassName("impl"),el=>{const aliases=el.getAttribute("data-aliases");if(!aliases){return}aliases.split(",").forEach(alias=>{inlined_types.add(alias)})})}let currentNbImpls=implementors.getElementsByClassName("impl").length;const traitName=document.querySelector(".main-heading h1 > .trait").textContent;const baseIdName="impl-"+traitName+"-";const libs=Object.getOwnPropertyNames(imp);const script=document.querySelector("script[data-ignore-extern-crates]");const ignoreExternCrates=new Set((script?script.getAttribute("data-ignore-extern-crates"):"").split(","),);for(const lib of libs){if(lib===window.currentCrate||ignoreExternCrates.has(lib)){continue}const structs=imp[lib];struct_loop:for(const struct of structs){const list=struct[SYNTHETIC_IDX]?synthetic_implementors:implementors;if(struct[SYNTHETIC_IDX]){for(const struct_type of struct[TYPES_IDX]){if(inlined_types.has(struct_type)){continue struct_loop}inlined_types.add(struct_type)}}const code=document.createElement("h3");code.innerHTML=struct[TEXT_IDX];addClass(code,"code-header");onEachLazy(code.getElementsByTagName("a"),elem=>{const href=elem.getAttribute("href");if(href&&!href.startsWith("#")&&!/^(?:[a-z+]+:)?\/\//.test(href)){elem.setAttribute("href",window.rootPath+href)}});const currentId=baseIdName+currentNbImpls;const anchor=document.createElement("a");anchor.href="#"+currentId;addClass(anchor,"anchor");const display=document.createElement("div");display.id=currentId;addClass(display,"impl");display.appendChild(anchor);display.appendChild(code);list.appendChild(display);currentNbImpls+=1}}};if(window.pending_implementors){window.register_implementors(window.pending_implementors)}window.register_type_impls=imp=>{if(!imp||!imp[window.currentCrate]){return}window.pending_type_impls=null;const idMap=new Map();let implementations=document.getElementById("implementations-list");let trait_implementations=document.getElementById("trait-implementations-list");let trait_implementations_header=document.getElementById("trait-implementations");const script=document.querySelector("script[data-self-path]");const selfPath=script?script.getAttribute("data-self-path"):null;const mainContent=document.querySelector("#main-content");const sidebarSection=document.querySelector(".sidebar section");let methods=document.querySelector(".sidebar .block.method");let associatedTypes=document.querySelector(".sidebar .block.associatedtype");let associatedConstants=document.querySelector(".sidebar .block.associatedconstant");let sidebarTraitList=document.querySelector(".sidebar .block.trait-implementation");for(const impList of imp[window.currentCrate]){const types=impList.slice(2);const text=impList[0];const isTrait=impList[1]!==0;const traitName=impList[1];if(types.indexOf(selfPath)===-1){continue}let outputList=isTrait?trait_implementations:implementations;if(outputList===null){const outputListName=isTrait?"Trait Implementations":"Implementations";const outputListId=isTrait?"trait-implementations-list":"implementations-list";const outputListHeaderId=isTrait?"trait-implementations":"implementations";const outputListHeader=document.createElement("h2");outputListHeader.id=outputListHeaderId;outputListHeader.innerText=outputListName;outputList=document.createElement("div");outputList.id=outputListId;if(isTrait){const link=document.createElement("a");link.href=`#${outputListHeaderId}`;link.innerText="Trait Implementations";const h=document.createElement("h3");h.appendChild(link);trait_implementations=outputList;trait_implementations_header=outputListHeader;sidebarSection.appendChild(h);sidebarTraitList=document.createElement("ul");sidebarTraitList.className="block trait-implementation";sidebarSection.appendChild(sidebarTraitList);mainContent.appendChild(outputListHeader);mainContent.appendChild(outputList)}else{implementations=outputList;if(trait_implementations){mainContent.insertBefore(outputListHeader,trait_implementations_header);mainContent.insertBefore(outputList,trait_implementations_header)}else{const mainContent=document.querySelector("#main-content");mainContent.appendChild(outputListHeader);mainContent.appendChild(outputList)}}}const template=document.createElement("template");template.innerHTML=text;onEachLazy(template.content.querySelectorAll("a"),elem=>{const href=elem.getAttribute("href");if(href&&!href.startsWith("#")&&!/^(?:[a-z+]+:)?\/\//.test(href)){elem.setAttribute("href",window.rootPath+href)}});onEachLazy(template.content.querySelectorAll("[id]"),el=>{let i=0;if(idMap.has(el.id)){i=idMap.get(el.id)}else if(document.getElementById(el.id)){i=1;while(document.getElementById(`${el.id}-${2 * i}`)){i=2*i}while(document.getElementById(`${el.id}-${i}`)){i+=1}}if(i!==0){const oldHref=`#${el.id}`;const newHref=`#${el.id}-${i}`;el.id=`${el.id}-${i}`;onEachLazy(template.content.querySelectorAll("a[href]"),link=>{if(link.getAttribute("href")===oldHref){link.href=newHref}})}idMap.set(el.id,i+1)});const templateAssocItems=template.content.querySelectorAll("section.tymethod, "+"section.method, section.associatedtype, section.associatedconstant");if(isTrait){const li=document.createElement("li");const a=document.createElement("a");a.href=`#${template.content.querySelector(".impl").id}`;a.textContent=traitName;li.appendChild(a);sidebarTraitList.append(li)}else{onEachLazy(templateAssocItems,item=>{let block=hasClass(item,"associatedtype")?associatedTypes:(hasClass(item,"associatedconstant")?associatedConstants:(methods));if(!block){const blockTitle=hasClass(item,"associatedtype")?"Associated Types":(hasClass(item,"associatedconstant")?"Associated Constants":("Methods"));const blockClass=hasClass(item,"associatedtype")?"associatedtype":(hasClass(item,"associatedconstant")?"associatedconstant":("method"));const blockHeader=document.createElement("h3");const blockLink=document.createElement("a");blockLink.href="#implementations";blockLink.innerText=blockTitle;blockHeader.appendChild(blockLink);block=document.createElement("ul");block.className=`block ${blockClass}`;const insertionReference=methods||sidebarTraitList;if(insertionReference){const insertionReferenceH=insertionReference.previousElementSibling;sidebarSection.insertBefore(blockHeader,insertionReferenceH);sidebarSection.insertBefore(block,insertionReferenceH)}else{sidebarSection.appendChild(blockHeader);sidebarSection.appendChild(block)}if(hasClass(item,"associatedtype")){associatedTypes=block}else if(hasClass(item,"associatedconstant")){associatedConstants=block}else{methods=block}}const li=document.createElement("li");const a=document.createElement("a");a.innerText=item.id.split("-")[0].split(".")[1];a.href=`#${item.id}`;li.appendChild(a);block.appendChild(li)})}outputList.appendChild(template.content)}for(const list of[methods,associatedTypes,associatedConstants,sidebarTraitList]){if(!list){continue}const newChildren=Array.prototype.slice.call(list.children);newChildren.sort((a,b)=>{const aI=a.innerText;const bI=b.innerText;return aIbI?1:0});list.replaceChildren(...newChildren)}};if(window.pending_type_impls){window.register_type_impls(window.pending_type_impls)}function addSidebarCrates(){if(!window.ALL_CRATES){return}const sidebarElems=document.getElementsByClassName("sidebar-elems")[0];if(!sidebarElems){return}const h3=document.createElement("h3");h3.innerHTML="Crates";const ul=document.createElement("ul");ul.className="block crate";for(const crate of window.ALL_CRATES){const link=document.createElement("a");link.href=window.rootPath+crate+"/index.html";link.textContent=crate;const li=document.createElement("li");if(window.rootPath!=="./"&&crate===window.currentCrate){li.className="current"}li.appendChild(link);ul.appendChild(li)}sidebarElems.appendChild(h3);sidebarElems.appendChild(ul)}function expandAllDocs(){const innerToggle=document.getElementById(toggleAllDocsId);removeClass(innerToggle,"will-expand");onEachLazy(document.getElementsByClassName("toggle"),e=>{if(!hasClass(e,"type-contents-toggle")&&!hasClass(e,"more-examples-toggle")){e.open=true}});innerToggle.title="collapse all docs";innerToggle.children[0].innerText="\u2212"}function collapseAllDocs(){const innerToggle=document.getElementById(toggleAllDocsId);addClass(innerToggle,"will-expand");onEachLazy(document.getElementsByClassName("toggle"),e=>{if(e.parentNode.id!=="implementations-list"||(!hasClass(e,"implementors-toggle")&&!hasClass(e,"type-contents-toggle"))){e.open=false}});innerToggle.title="expand all docs";innerToggle.children[0].innerText="+"}function toggleAllDocs(){const innerToggle=document.getElementById(toggleAllDocsId);if(!innerToggle){return}if(hasClass(innerToggle,"will-expand")){expandAllDocs()}else{collapseAllDocs()}}(function(){const toggles=document.getElementById(toggleAllDocsId);if(toggles){toggles.onclick=toggleAllDocs}const hideMethodDocs=getSettingValue("auto-hide-method-docs")==="true";const hideImplementations=getSettingValue("auto-hide-trait-implementations")==="true";const hideLargeItemContents=getSettingValue("auto-hide-large-items")!=="false";function setImplementorsTogglesOpen(id,open){const list=document.getElementById(id);if(list!==null){onEachLazy(list.getElementsByClassName("implementors-toggle"),e=>{e.open=open})}}if(hideImplementations){setImplementorsTogglesOpen("trait-implementations-list",false);setImplementorsTogglesOpen("blanket-implementations-list",false)}onEachLazy(document.getElementsByClassName("toggle"),e=>{if(!hideLargeItemContents&&hasClass(e,"type-contents-toggle")){e.open=true}if(hideMethodDocs&&hasClass(e,"method-toggle")){e.open=false}})}());window.rustdoc_add_line_numbers_to_examples=()=>{onEachLazy(document.getElementsByClassName("rust-example-rendered"),x=>{const parent=x.parentNode;const line_numbers=parent.querySelectorAll(".example-line-numbers");if(line_numbers.length>0){return}const count=x.textContent.split("\n").length;const elems=[];for(let i=0;i{onEachLazy(document.getElementsByClassName("rust-example-rendered"),x=>{const parent=x.parentNode;const line_numbers=parent.querySelectorAll(".example-line-numbers");for(const node of line_numbers){parent.removeChild(node)}})};if(getSettingValue("line-numbers")==="true"){window.rustdoc_add_line_numbers_to_examples()}function showSidebar(){window.hideAllModals(false);const sidebar=document.getElementsByClassName("sidebar")[0];addClass(sidebar,"shown")}function hideSidebar(){const sidebar=document.getElementsByClassName("sidebar")[0];removeClass(sidebar,"shown")}window.addEventListener("resize",()=>{if(window.CURRENT_TOOLTIP_ELEMENT){const base=window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE;const force_visible=base.TOOLTIP_FORCE_VISIBLE;hideTooltip(false);if(force_visible){showTooltip(base);base.TOOLTIP_FORCE_VISIBLE=true}}});const mainElem=document.getElementById(MAIN_ID);if(mainElem){mainElem.addEventListener("click",hideSidebar)}onEachLazy(document.querySelectorAll("a[href^='#']"),el=>{el.addEventListener("click",()=>{expandSection(el.hash.slice(1));hideSidebar()})});onEachLazy(document.querySelectorAll(".toggle > summary:not(.hideme)"),el=>{el.addEventListener("click",e=>{if(e.target.tagName!=="SUMMARY"&&e.target.tagName!=="A"){e.preventDefault()}})});function showTooltip(e){const notable_ty=e.getAttribute("data-notable-ty");if(!window.NOTABLE_TRAITS&¬able_ty){const data=document.getElementById("notable-traits-data");if(data){window.NOTABLE_TRAITS=JSON.parse(data.innerText)}else{throw new Error("showTooltip() called with notable without any notable traits!")}}if(window.CURRENT_TOOLTIP_ELEMENT&&window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE===e){clearTooltipHoverTimeout(window.CURRENT_TOOLTIP_ELEMENT);return}window.hideAllModals(false);const wrapper=document.createElement("div");if(notable_ty){wrapper.innerHTML="
"+window.NOTABLE_TRAITS[notable_ty]+"
"}else{if(e.getAttribute("title")!==null){e.setAttribute("data-title",e.getAttribute("title"));e.removeAttribute("title")}if(e.getAttribute("data-title")!==null){const titleContent=document.createElement("div");titleContent.className="content";titleContent.appendChild(document.createTextNode(e.getAttribute("data-title")));wrapper.appendChild(titleContent)}}wrapper.className="tooltip popover";const focusCatcher=document.createElement("div");focusCatcher.setAttribute("tabindex","0");focusCatcher.onfocus=hideTooltip;wrapper.appendChild(focusCatcher);const pos=e.getBoundingClientRect();wrapper.style.top=(pos.top+window.scrollY+pos.height)+"px";wrapper.style.left=0;wrapper.style.right="auto";wrapper.style.visibility="hidden";const body=document.getElementsByTagName("body")[0];body.appendChild(wrapper);const wrapperPos=wrapper.getBoundingClientRect();const finalPos=pos.left+window.scrollX-wrapperPos.width+24;if(finalPos>0){wrapper.style.left=finalPos+"px"}else{wrapper.style.setProperty("--popover-arrow-offset",(wrapperPos.right-pos.right+4)+"px",)}wrapper.style.visibility="";window.CURRENT_TOOLTIP_ELEMENT=wrapper;window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE=e;clearTooltipHoverTimeout(window.CURRENT_TOOLTIP_ELEMENT);wrapper.onpointerenter=ev=>{if(ev.pointerType!=="mouse"){return}clearTooltipHoverTimeout(e)};wrapper.onpointerleave=ev=>{if(ev.pointerType!=="mouse"){return}if(!e.TOOLTIP_FORCE_VISIBLE&&!e.contains(ev.relatedTarget)){setTooltipHoverTimeout(e,false);addClass(wrapper,"fade-out")}}}function setTooltipHoverTimeout(element,show){clearTooltipHoverTimeout(element);if(!show&&!window.CURRENT_TOOLTIP_ELEMENT){return}if(show&&window.CURRENT_TOOLTIP_ELEMENT){return}if(window.CURRENT_TOOLTIP_ELEMENT&&window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE!==element){return}element.TOOLTIP_HOVER_TIMEOUT=setTimeout(()=>{if(show){showTooltip(element)}else if(!element.TOOLTIP_FORCE_VISIBLE){hideTooltip(false)}},show?window.RUSTDOC_TOOLTIP_HOVER_MS:window.RUSTDOC_TOOLTIP_HOVER_EXIT_MS)}function clearTooltipHoverTimeout(element){if(element.TOOLTIP_HOVER_TIMEOUT!==undefined){removeClass(window.CURRENT_TOOLTIP_ELEMENT,"fade-out");clearTimeout(element.TOOLTIP_HOVER_TIMEOUT);delete element.TOOLTIP_HOVER_TIMEOUT}}function tooltipBlurHandler(event){if(window.CURRENT_TOOLTIP_ELEMENT&&!window.CURRENT_TOOLTIP_ELEMENT.contains(document.activeElement)&&!window.CURRENT_TOOLTIP_ELEMENT.contains(event.relatedTarget)&&!window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.contains(document.activeElement)&&!window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.contains(event.relatedTarget)){setTimeout(()=>hideTooltip(false),0)}}function hideTooltip(focus){if(window.CURRENT_TOOLTIP_ELEMENT){if(window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.TOOLTIP_FORCE_VISIBLE){if(focus){window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.focus()}window.CURRENT_TOOLTIP_ELEMENT.TOOLTIP_BASE.TOOLTIP_FORCE_VISIBLE=false}const body=document.getElementsByTagName("body")[0];body.removeChild(window.CURRENT_TOOLTIP_ELEMENT);clearTooltipHoverTimeout(window.CURRENT_TOOLTIP_ELEMENT);window.CURRENT_TOOLTIP_ELEMENT=null}}onEachLazy(document.getElementsByClassName("tooltip"),e=>{e.onclick=()=>{e.TOOLTIP_FORCE_VISIBLE=e.TOOLTIP_FORCE_VISIBLE?false:true;if(window.CURRENT_TOOLTIP_ELEMENT&&!e.TOOLTIP_FORCE_VISIBLE){hideTooltip(true)}else{showTooltip(e);window.CURRENT_TOOLTIP_ELEMENT.setAttribute("tabindex","0");window.CURRENT_TOOLTIP_ELEMENT.focus();window.CURRENT_TOOLTIP_ELEMENT.onblur=tooltipBlurHandler}return false};e.onpointerenter=ev=>{if(ev.pointerType!=="mouse"){return}setTooltipHoverTimeout(e,true)};e.onpointermove=ev=>{if(ev.pointerType!=="mouse"){return}setTooltipHoverTimeout(e,true)};e.onpointerleave=ev=>{if(ev.pointerType!=="mouse"){return}if(!e.TOOLTIP_FORCE_VISIBLE&&window.CURRENT_TOOLTIP_ELEMENT&&!window.CURRENT_TOOLTIP_ELEMENT.contains(ev.relatedTarget)){setTooltipHoverTimeout(e,false);addClass(window.CURRENT_TOOLTIP_ELEMENT,"fade-out")}}});const sidebar_menu_toggle=document.getElementsByClassName("sidebar-menu-toggle")[0];if(sidebar_menu_toggle){sidebar_menu_toggle.addEventListener("click",()=>{const sidebar=document.getElementsByClassName("sidebar")[0];if(!hasClass(sidebar,"shown")){showSidebar()}else{hideSidebar()}})}function helpBlurHandler(event){blurHandler(event,getHelpButton(),window.hidePopoverMenus)}function buildHelpMenu(){const book_info=document.createElement("span");const channel=getVar("channel");book_info.className="top";book_info.innerHTML=`You can find more information in \ +the rustdoc book.`;const shortcuts=[["?","Show this help dialog"],["S / /","Focus the search field"],["↑","Move up in search results"],["↓","Move down in search results"],["← / →","Switch result tab (when results focused)"],["⏎","Go to active search result"],["+","Expand all sections"],["-","Collapse all sections"],].map(x=>"
"+x[0].split(" ").map((y,index)=>((index&1)===0?""+y+"":" "+y+" ")).join("")+"
"+x[1]+"
").join("");const div_shortcuts=document.createElement("div");addClass(div_shortcuts,"shortcuts");div_shortcuts.innerHTML="

Keyboard Shortcuts

"+shortcuts+"
";const infos=[`For a full list of all search features, take a look here.`,"Prefix searches with a type followed by a colon (e.g., fn:) to \ + restrict the search to a given item kind.","Accepted kinds are: fn, mod, struct, \ + enum, trait, type, macro, \ + and const.","Search functions by type signature (e.g., vec -> usize or \ + -> vec or String, enum:Cow -> bool)","You can look for items with an exact name by putting double quotes around \ + your request: \"string\"","Look for functions that accept or return \ + slices and \ + arrays by writing \ + square brackets (e.g., -> [u8] or [] -> Option)","Look for items inside another one by searching for a path: vec::Vec",].map(x=>"

"+x+"

").join("");const div_infos=document.createElement("div");addClass(div_infos,"infos");div_infos.innerHTML="

Search Tricks

"+infos;const rustdoc_version=document.createElement("span");rustdoc_version.className="bottom";const rustdoc_version_code=document.createElement("code");rustdoc_version_code.innerText="rustdoc "+getVar("rustdoc-version");rustdoc_version.appendChild(rustdoc_version_code);const container=document.createElement("div");if(!isHelpPage){container.className="popover"}container.id="help";container.style.display="none";const side_by_side=document.createElement("div");side_by_side.className="side-by-side";side_by_side.appendChild(div_shortcuts);side_by_side.appendChild(div_infos);container.appendChild(book_info);container.appendChild(side_by_side);container.appendChild(rustdoc_version);if(isHelpPage){const help_section=document.createElement("section");help_section.appendChild(container);document.getElementById("main-content").appendChild(help_section);container.style.display="block"}else{const help_button=getHelpButton();help_button.appendChild(container);container.onblur=helpBlurHandler;help_button.onblur=helpBlurHandler;help_button.children[0].onblur=helpBlurHandler}return container}window.hideAllModals=switchFocus=>{hideSidebar();window.hidePopoverMenus();hideTooltip(switchFocus)};window.hidePopoverMenus=()=>{onEachLazy(document.querySelectorAll(".search-form .popover"),elem=>{elem.style.display="none"})};function getHelpMenu(buildNeeded){let menu=getHelpButton().querySelector(".popover");if(!menu&&buildNeeded){menu=buildHelpMenu()}return menu}function showHelp(){getHelpButton().querySelector("a").focus();const menu=getHelpMenu(true);if(menu.style.display==="none"){window.hideAllModals();menu.style.display=""}}if(isHelpPage){showHelp();document.querySelector(`#${HELP_BUTTON_ID} > a`).addEventListener("click",event=>{const target=event.target;if(target.tagName!=="A"||target.parentElement.id!==HELP_BUTTON_ID||event.ctrlKey||event.altKey||event.metaKey){return}event.preventDefault()})}else{document.querySelector(`#${HELP_BUTTON_ID} > a`).addEventListener("click",event=>{const target=event.target;if(target.tagName!=="A"||target.parentElement.id!==HELP_BUTTON_ID||event.ctrlKey||event.altKey||event.metaKey){return}event.preventDefault();const menu=getHelpMenu(true);const shouldShowHelp=menu.style.display==="none";if(shouldShowHelp){showHelp()}else{window.hidePopoverMenus()}})}setMobileTopbar();addSidebarItems();addSidebarCrates();onHashChange(null);window.addEventListener("hashchange",onHashChange);searchState.setup()}());(function(){const SIDEBAR_MIN=100;const SIDEBAR_MAX=500;const RUSTDOC_MOBILE_BREAKPOINT=700;const BODY_MIN=400;const SIDEBAR_VANISH_THRESHOLD=SIDEBAR_MIN/2;const sidebarButton=document.getElementById("sidebar-button");if(sidebarButton){sidebarButton.addEventListener("click",e=>{removeClass(document.documentElement,"hide-sidebar");updateLocalStorage("hide-sidebar","false");if(document.querySelector(".rustdoc.src")){window.rustdocToggleSrcSidebar()}e.preventDefault()})}let currentPointerId=null;let desiredSidebarSize=null;let pendingSidebarResizingFrame=false;const resizer=document.querySelector(".sidebar-resizer");const sidebar=document.querySelector(".sidebar");if(!resizer||!sidebar){return}const isSrcPage=hasClass(document.body,"src");function hideSidebar(){if(isSrcPage){window.rustdocCloseSourceSidebar();updateLocalStorage("src-sidebar-width",null);document.documentElement.style.removeProperty("--src-sidebar-width");sidebar.style.removeProperty("--src-sidebar-width");resizer.style.removeProperty("--src-sidebar-width")}else{addClass(document.documentElement,"hide-sidebar");updateLocalStorage("hide-sidebar","true");updateLocalStorage("desktop-sidebar-width",null);document.documentElement.style.removeProperty("--desktop-sidebar-width");sidebar.style.removeProperty("--desktop-sidebar-width");resizer.style.removeProperty("--desktop-sidebar-width")}}function showSidebar(){if(isSrcPage){window.rustdocShowSourceSidebar()}else{removeClass(document.documentElement,"hide-sidebar");updateLocalStorage("hide-sidebar","false")}}function changeSidebarSize(size){if(isSrcPage){updateLocalStorage("src-sidebar-width",size);sidebar.style.setProperty("--src-sidebar-width",size+"px");resizer.style.setProperty("--src-sidebar-width",size+"px")}else{updateLocalStorage("desktop-sidebar-width",size);sidebar.style.setProperty("--desktop-sidebar-width",size+"px");resizer.style.setProperty("--desktop-sidebar-width",size+"px")}}function isSidebarHidden(){return isSrcPage?!hasClass(document.documentElement,"src-sidebar-expanded"):hasClass(document.documentElement,"hide-sidebar")}function resize(e){if(currentPointerId===null||currentPointerId!==e.pointerId){return}e.preventDefault();const pos=e.clientX-3;if(pos=SIDEBAR_MIN){if(isSidebarHidden()){showSidebar()}const constrainedPos=Math.min(pos,window.innerWidth-BODY_MIN,SIDEBAR_MAX);changeSidebarSize(constrainedPos);desiredSidebarSize=constrainedPos;if(pendingSidebarResizingFrame!==false){clearTimeout(pendingSidebarResizingFrame)}pendingSidebarResizingFrame=setTimeout(()=>{if(currentPointerId===null||pendingSidebarResizingFrame===false){return}pendingSidebarResizingFrame=false;document.documentElement.style.setProperty("--resizing-sidebar-width",desiredSidebarSize+"px",)},100)}}window.addEventListener("resize",()=>{if(window.innerWidth=(window.innerWidth-BODY_MIN)){changeSidebarSize(window.innerWidth-BODY_MIN)}else if(desiredSidebarSize!==null&&desiredSidebarSize>SIDEBAR_MIN){changeSidebarSize(desiredSidebarSize)}});function stopResize(e){if(currentPointerId===null){return}if(e){e.preventDefault()}desiredSidebarSize=sidebar.getBoundingClientRect().width;removeClass(resizer,"active");window.removeEventListener("pointermove",resize,false);window.removeEventListener("pointerup",stopResize,false);removeClass(document.documentElement,"sidebar-resizing");document.documentElement.style.removeProperty("--resizing-sidebar-width");if(resizer.releasePointerCapture){resizer.releasePointerCapture(currentPointerId);currentPointerId=null}}function initResize(e){if(currentPointerId!==null||e.altKey||e.ctrlKey||e.metaKey||e.button!==0){return}if(resizer.setPointerCapture){resizer.setPointerCapture(e.pointerId);if(!resizer.hasPointerCapture(e.pointerId)){resizer.releasePointerCapture(e.pointerId);return}currentPointerId=e.pointerId}window.hideAllModals(false);e.preventDefault();window.addEventListener("pointermove",resize,false);window.addEventListener("pointercancel",stopResize,false);window.addEventListener("pointerup",stopResize,false);addClass(resizer,"active");addClass(document.documentElement,"sidebar-resizing");const pos=e.clientX-sidebar.offsetLeft-3;document.documentElement.style.setProperty("--resizing-sidebar-width",pos+"px");desiredSidebarSize=null}resizer.addEventListener("pointerdown",initResize,false)}());(function(){let reset_button_timeout=null;const but=document.getElementById("copy-path");if(!but){return}but.onclick=()=>{const parent=but.parentElement;const path=[];onEach(parent.childNodes,child=>{if(child.tagName==="A"){path.push(child.textContent)}});const el=document.createElement("textarea");el.value=path.join("::");el.setAttribute("readonly","");el.style.position="absolute";el.style.left="-9999px";document.body.appendChild(el);el.select();document.execCommand("copy");document.body.removeChild(el);but.classList.add("clicked");if(reset_button_timeout!==null){window.clearTimeout(reset_button_timeout)}function reset_button(){reset_button_timeout=null;but.classList.remove("clicked")}reset_button_timeout=window.setTimeout(reset_button,1000)}}()) \ No newline at end of file diff --git a/static.files/normalize-76eba96aa4d2e634.css b/static.files/normalize-76eba96aa4d2e634.css new file mode 100644 index 000000000..469959f13 --- /dev/null +++ b/static.files/normalize-76eba96aa4d2e634.css @@ -0,0 +1,2 @@ + /*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */ +html{line-height:1.15;-webkit-text-size-adjust:100%}body{margin:0}main{display:block}h1{font-size:2em;margin:0.67em 0}hr{box-sizing:content-box;height:0;overflow:visible}pre{font-family:monospace,monospace;font-size:1em}a{background-color:transparent}abbr[title]{border-bottom:none;text-decoration:underline;text-decoration:underline dotted}b,strong{font-weight:bolder}code,kbd,samp{font-family:monospace,monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-0.25em}sup{top:-0.5em}img{border-style:none}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;line-height:1.15;margin:0}button,input{overflow:visible}button,select{text-transform:none}[type="button"],[type="reset"],[type="submit"],button{-webkit-appearance:button}[type="button"]::-moz-focus-inner,[type="reset"]::-moz-focus-inner,[type="submit"]::-moz-focus-inner,button::-moz-focus-inner{border-style:none;padding:0}[type="button"]:-moz-focusring,[type="reset"]:-moz-focusring,[type="submit"]:-moz-focusring,button:-moz-focusring{outline:1px dotted ButtonText}fieldset{padding:0.35em 0.75em 0.625em}legend{box-sizing:border-box;color:inherit;display:table;max-width:100%;padding:0;white-space:normal}progress{vertical-align:baseline}textarea{overflow:auto}[type="checkbox"],[type="radio"]{box-sizing:border-box;padding:0}[type="number"]::-webkit-inner-spin-button,[type="number"]::-webkit-outer-spin-button{height:auto}[type="search"]{-webkit-appearance:textfield;outline-offset:-2px}[type="search"]::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}details{display:block}summary{display:list-item}template{display:none}[hidden]{display:none} \ No newline at end of file diff --git a/static.files/noscript-df360f571f6edeae.css b/static.files/noscript-df360f571f6edeae.css new file mode 100644 index 000000000..4c310ae52 --- /dev/null +++ b/static.files/noscript-df360f571f6edeae.css @@ -0,0 +1 @@ + #main-content .attributes{margin-left:0 !important;}#copy-path,#sidebar-button,.sidebar-resizer{display:none !important;}nav.sub{display:none;}.src .sidebar{display:none;}.notable-traits{display:none;}:root,:root:not([data-theme]){--main-background-color:white;--main-color:black;--settings-input-color:#2196f3;--settings-input-border-color:#717171;--settings-button-color:#000;--settings-button-border-focus:#717171;--sidebar-background-color:#f5f5f5;--sidebar-background-color-hover:#e0e0e0;--code-block-background-color:#f5f5f5;--scrollbar-track-background-color:#dcdcdc;--scrollbar-thumb-background-color:rgba(36,37,39,0.6);--scrollbar-color:rgba(36,37,39,0.6) #d9d9d9;--headings-border-bottom-color:#ddd;--border-color:#e0e0e0;--button-background-color:#fff;--right-side-color:grey;--code-attribute-color:#999;--toggles-color:#999;--toggle-filter:none;--mobile-sidebar-menu-filter:none;--search-input-focused-border-color:#66afe9;--copy-path-button-color:#999;--copy-path-img-filter:invert(50%);--copy-path-img-hover-filter:invert(35%);--codeblock-error-hover-color:rgb(255,0,0);--codeblock-error-color:rgba(255,0,0,.5);--codeblock-ignore-hover-color:rgb(255,142,0);--codeblock-ignore-color:rgba(255,142,0,.6);--warning-border-color:#ff8e00;--type-link-color:#ad378a;--trait-link-color:#6e4fc9;--assoc-item-link-color:#3873ad;--function-link-color:#ad7c37;--macro-link-color:#068000;--keyword-link-color:#3873ad;--mod-link-color:#3873ad;--link-color:#3873ad;--sidebar-link-color:#356da4;--sidebar-current-link-background-color:#fff;--search-result-link-focus-background-color:#ccc;--search-result-border-color:#aaa3;--search-color:#000;--search-error-code-background-color:#d0cccc;--search-results-alias-color:#000;--search-results-grey-color:#999;--search-tab-title-count-color:#888;--search-tab-button-not-selected-border-top-color:#e6e6e6;--search-tab-button-not-selected-background:#e6e6e6;--search-tab-button-selected-border-top-color:#0089ff;--search-tab-button-selected-background:#fff;--settings-menu-filter:none;--stab-background-color:#fff5d6;--stab-code-color:#000;--code-highlight-kw-color:#8959a8;--code-highlight-kw-2-color:#4271ae;--code-highlight-lifetime-color:#b76514;--code-highlight-prelude-color:#4271ae;--code-highlight-prelude-val-color:#c82829;--code-highlight-number-color:#718c00;--code-highlight-string-color:#718c00;--code-highlight-literal-color:#c82829;--code-highlight-attribute-color:#c82829;--code-highlight-self-color:#c82829;--code-highlight-macro-color:#3e999f;--code-highlight-question-mark-color:#ff9011;--code-highlight-comment-color:#8e908c;--code-highlight-doc-comment-color:#4d4d4c;--src-line-numbers-span-color:#c67e2d;--src-line-number-highlighted-background-color:#fdffd3;--test-arrow-color:#f5f5f5;--test-arrow-background-color:rgba(78,139,202,0.2);--test-arrow-hover-color:#f5f5f5;--test-arrow-hover-background-color:rgb(78,139,202);--target-background-color:#fdffd3;--target-border-color:#ad7c37;--kbd-color:#000;--kbd-background:#fafbfc;--kbd-box-shadow-color:#c6cbd1;--rust-logo-filter:initial;--crate-search-div-filter:invert(100%) sepia(0%) saturate(4223%) hue-rotate(289deg) brightness(114%) contrast(76%);--crate-search-div-hover-filter:invert(44%) sepia(18%) saturate(23%) hue-rotate(317deg) brightness(96%) contrast(93%);--crate-search-hover-border:#717171;--src-sidebar-background-selected:#fff;--src-sidebar-background-hover:#e0e0e0;--table-alt-row-background-color:#f5f5f5;--codeblock-link-background:#eee;--scrape-example-toggle-line-background:#ccc;--scrape-example-toggle-line-hover-background:#999;--scrape-example-code-line-highlight:#fcffd6;--scrape-example-code-line-highlight-focus:#f6fdb0;--scrape-example-help-border-color:#555;--scrape-example-help-color:#333;--scrape-example-help-hover-border-color:#000;--scrape-example-help-hover-color:#000;--scrape-example-code-wrapper-background-start:rgba(255,255,255,1);--scrape-example-code-wrapper-background-end:rgba(255,255,255,0);--sidebar-resizer-hover:hsl(207,90%,66%);--sidebar-resizer-active:hsl(207,90%,54%);}@media (prefers-color-scheme:dark){:root,:root:not([data-theme]){--main-background-color:#353535;--main-color:#ddd;--settings-input-color:#2196f3;--settings-input-border-color:#999;--settings-button-color:#000;--settings-button-border-focus:#ffb900;--sidebar-background-color:#505050;--sidebar-background-color-hover:#676767;--code-block-background-color:#2A2A2A;--scrollbar-track-background-color:#717171;--scrollbar-thumb-background-color:rgba(32,34,37,.6);--scrollbar-color:rgba(32,34,37,.6) #5a5a5a;--headings-border-bottom-color:#d2d2d2;--border-color:#e0e0e0;--button-background-color:#f0f0f0;--right-side-color:grey;--code-attribute-color:#999;--toggles-color:#999;--toggle-filter:invert(100%);--mobile-sidebar-menu-filter:invert(100%);--search-input-focused-border-color:#008dfd;--copy-path-button-color:#999;--copy-path-img-filter:invert(50%);--copy-path-img-hover-filter:invert(65%);--codeblock-error-hover-color:rgb(255,0,0);--codeblock-error-color:rgba(255,0,0,.5);--codeblock-ignore-hover-color:rgb(255,142,0);--codeblock-ignore-color:rgba(255,142,0,.6);--warning-border-color:#ff8e00;--type-link-color:#2dbfb8;--trait-link-color:#b78cf2;--assoc-item-link-color:#d2991d;--function-link-color:#2bab63;--macro-link-color:#09bd00;--keyword-link-color:#d2991d;--mod-link-color:#d2991d;--link-color:#d2991d;--sidebar-link-color:#fdbf35;--sidebar-current-link-background-color:#444;--search-result-link-focus-background-color:#616161;--search-result-border-color:#aaa3;--search-color:#111;--search-error-code-background-color:#484848;--search-results-alias-color:#fff;--search-results-grey-color:#ccc;--search-tab-title-count-color:#888;--search-tab-button-not-selected-border-top-color:#252525;--search-tab-button-not-selected-background:#252525;--search-tab-button-selected-border-top-color:#0089ff;--search-tab-button-selected-background:#353535;--stab-background-color:#314559;--stab-code-color:#e6e1cf;--code-highlight-kw-color:#ab8ac1;--code-highlight-kw-2-color:#769acb;--code-highlight-lifetime-color:#d97f26;--code-highlight-prelude-color:#769acb;--code-highlight-prelude-val-color:#ee6868;--code-highlight-number-color:#83a300;--code-highlight-string-color:#83a300;--code-highlight-literal-color:#ee6868;--code-highlight-attribute-color:#ee6868;--code-highlight-self-color:#ee6868;--code-highlight-macro-color:#3e999f;--code-highlight-question-mark-color:#ff9011;--code-highlight-comment-color:#8d8d8b;--code-highlight-doc-comment-color:#8ca375;--src-line-numbers-span-color:#3b91e2;--src-line-number-highlighted-background-color:#0a042f;--test-arrow-color:#dedede;--test-arrow-background-color:rgba(78,139,202,0.2);--test-arrow-hover-color:#dedede;--test-arrow-hover-background-color:#4e8bca;--target-background-color:#494a3d;--target-border-color:#bb7410;--kbd-color:#000;--kbd-background:#fafbfc;--kbd-box-shadow-color:#c6cbd1;--rust-logo-filter:drop-shadow(1px 0 0px #fff) drop-shadow(0 1px 0 #fff) drop-shadow(-1px 0 0 #fff) drop-shadow(0 -1px 0 #fff);--crate-search-div-filter:invert(94%) sepia(0%) saturate(721%) hue-rotate(255deg) brightness(90%) contrast(90%);--crate-search-div-hover-filter:invert(69%) sepia(60%) saturate(6613%) hue-rotate(184deg) brightness(100%) contrast(91%);--crate-search-hover-border:#2196f3;--src-sidebar-background-selected:#333;--src-sidebar-background-hover:#444;--table-alt-row-background-color:#2a2a2a;--codeblock-link-background:#333;--scrape-example-toggle-line-background:#999;--scrape-example-toggle-line-hover-background:#c5c5c5;--scrape-example-code-line-highlight:#5b3b01;--scrape-example-code-line-highlight-focus:#7c4b0f;--scrape-example-help-border-color:#aaa;--scrape-example-help-color:#eee;--scrape-example-help-hover-border-color:#fff;--scrape-example-help-hover-color:#fff;--scrape-example-code-wrapper-background-start:rgba(53,53,53,1);--scrape-example-code-wrapper-background-end:rgba(53,53,53,0);--sidebar-resizer-hover:hsl(207,30%,54%);--sidebar-resizer-active:hsl(207,90%,54%);}} \ No newline at end of file diff --git a/static.files/rust-logo-151179464ae7ed46.svg b/static.files/rust-logo-151179464ae7ed46.svg new file mode 100644 index 000000000..62424d8ff --- /dev/null +++ b/static.files/rust-logo-151179464ae7ed46.svg @@ -0,0 +1,61 @@ + + + diff --git a/static.files/rustdoc-dd39b87e5fcfba68.css b/static.files/rustdoc-dd39b87e5fcfba68.css new file mode 100644 index 000000000..77f898322 --- /dev/null +++ b/static.files/rustdoc-dd39b87e5fcfba68.css @@ -0,0 +1,46 @@ + :root{--nav-sub-mobile-padding:8px;--search-typename-width:6.75rem;--desktop-sidebar-width:200px;--src-sidebar-width:300px;--desktop-sidebar-z-index:100;}@font-face {font-family:'Fira Sans';font-style:normal;font-weight:400;src:local('Fira Sans'),url("FiraSans-Regular-018c141bf0843ffd.woff2") format("woff2");font-display:swap;}@font-face {font-family:'Fira Sans';font-style:normal;font-weight:500;src:local('Fira Sans Medium'),url("FiraSans-Medium-8f9a781e4970d388.woff2") format("woff2");font-display:swap;}@font-face {font-family:'Source Serif 4';font-style:normal;font-weight:400;src:local('Source Serif 4'),url("SourceSerif4-Regular-46f98efaafac5295.ttf.woff2") format("woff2");font-display:swap;}@font-face {font-family:'Source Serif 4';font-style:italic;font-weight:400;src:local('Source Serif 4 Italic'),url("SourceSerif4-It-acdfaf1a8af734b1.ttf.woff2") format("woff2");font-display:swap;}@font-face {font-family:'Source Serif 4';font-style:normal;font-weight:700;src:local('Source Serif 4 Bold'),url("SourceSerif4-Bold-a2c9cd1067f8b328.ttf.woff2") format("woff2");font-display:swap;}@font-face {font-family:'Source Code Pro';font-style:normal;font-weight:400;src:url("SourceCodePro-Regular-562dcc5011b6de7d.ttf.woff2") format("woff2");font-display:swap;}@font-face {font-family:'Source Code Pro';font-style:italic;font-weight:400;src:url("SourceCodePro-It-1cc31594bf4f1f79.ttf.woff2") format("woff2");font-display:swap;}@font-face {font-family:'Source Code Pro';font-style:normal;font-weight:600;src:url("SourceCodePro-Semibold-d899c5a5c4aeb14a.ttf.woff2") format("woff2");font-display:swap;}@font-face {font-family:'NanumBarunGothic';src:url("NanumBarunGothic-0f09457c7a19b7c6.ttf.woff2") format("woff2");font-display:swap;unicode-range:U+AC00-D7AF,U+1100-11FF,U+3130-318F,U+A960-A97F,U+D7B0-D7FF;}*{box-sizing:border-box;}body{font:1rem/1.5 "Source Serif 4",NanumBarunGothic,serif;margin:0;position:relative;overflow-wrap:break-word;overflow-wrap:anywhere;font-feature-settings:"kern","liga";background-color:var(--main-background-color);color:var(--main-color);}h1{font-size:1.5rem;}h2{font-size:1.375rem;}h3{font-size:1.25rem;}h1,h2,h3,h4,h5,h6{font-weight:500;}h1,h2,h3,h4{margin:25px 0 15px 0;padding-bottom:6px;}.docblock h3,.docblock h4,h5,h6{margin:15px 0 5px 0;}.docblock>h2:first-child,.docblock>h3:first-child,.docblock>h4:first-child,.docblock>h5:first-child,.docblock>h6:first-child{margin-top:0;}.main-heading h1{margin:0;padding:0;flex-grow:1;overflow-wrap:break-word;overflow-wrap:anywhere;}.main-heading{display:flex;flex-wrap:wrap;padding-bottom:6px;margin-bottom:15px;}.content h2,.top-doc .docblock>h3,.top-doc .docblock>h4{border-bottom:1px solid var(--headings-border-bottom-color);}h1,h2{line-height:1.25;padding-top:3px;padding-bottom:9px;}h3.code-header{font-size:1.125rem;}h4.code-header{font-size:1rem;}.code-header{font-weight:600;margin:0;padding:0;white-space:pre-wrap;}#crate-search,h1,h2,h3,h4,h5,h6,.sidebar,.mobile-topbar,.search-input,.search-results .result-name,.item-name>a,.out-of-band,span.since,a.src,#help-button>a,summary.hideme,.scraped-example-list,ul.all-items{font-family:"Fira Sans",Arial,NanumBarunGothic,sans-serif;}#toggle-all-docs,a.anchor,.section-header a,#src-sidebar a,.rust a,.sidebar h2 a,.sidebar h3 a,.mobile-topbar h2 a,h1 a,.search-results a,.stab,.result-name i{color:var(--main-color);}span.enum,a.enum,span.struct,a.struct,span.union,a.union,span.primitive,a.primitive,span.type,a.type,span.foreigntype,a.foreigntype{color:var(--type-link-color);}span.trait,a.trait,span.traitalias,a.traitalias{color:var(--trait-link-color);}span.associatedtype,a.associatedtype,span.constant,a.constant,span.static,a.static{color:var(--assoc-item-link-color);}span.fn,a.fn,span.method,a.method,span.tymethod,a.tymethod{color:var(--function-link-color);}span.attr,a.attr,span.derive,a.derive,span.macro,a.macro{color:var(--macro-link-color);}span.mod,a.mod{color:var(--mod-link-color);}span.keyword,a.keyword{color:var(--keyword-link-color);}a{color:var(--link-color);text-decoration:none;}ol,ul{padding-left:24px;}ul ul,ol ul,ul ol,ol ol{margin-bottom:.625em;}p,.docblock>.warning{margin:0 0 .75em 0;}p:last-child,.docblock>.warning:last-child{margin:0;}button{padding:1px 6px;cursor:pointer;}button#toggle-all-docs{padding:0;background:none;border:none;-webkit-appearance:none;opacity:1;}.rustdoc{display:flex;flex-direction:row;flex-wrap:nowrap;}main{position:relative;flex-grow:1;padding:10px 15px 40px 45px;min-width:0;}.src main{padding:15px;}.width-limiter{max-width:960px;margin-right:auto;}details:not(.toggle) summary{margin-bottom:.6em;}code,pre,a.test-arrow,.code-header{font-family:"Source Code Pro",monospace;}.docblock code,.docblock-short code{border-radius:3px;padding:0 0.125em;}.docblock pre code,.docblock-short pre code{padding:0;}pre{padding:14px;line-height:1.5;}pre.item-decl{overflow-x:auto;}.item-decl .type-contents-toggle{contain:initial;}.src .content pre{padding:20px;}.rustdoc.src .example-wrap pre.src-line-numbers{padding:20px 0 20px 4px;}img{max-width:100%;}.logo-container{line-height:0;display:block;}.rust-logo{filter:var(--rust-logo-filter);}.sidebar{font-size:0.875rem;flex:0 0 var(--desktop-sidebar-width);width:var(--desktop-sidebar-width);overflow-y:scroll;overscroll-behavior:contain;position:sticky;height:100vh;top:0;left:0;z-index:var(--desktop-sidebar-z-index);}.rustdoc.src .sidebar{flex-basis:50px;width:50px;border-right:1px solid;overflow-x:hidden;overflow-y:hidden;}.hide-sidebar .sidebar,.hide-sidebar .sidebar-resizer{display:none;}.sidebar-resizer{touch-action:none;width:9px;cursor:col-resize;z-index:calc(var(--desktop-sidebar-z-index) + 1);position:fixed;height:100%;left:calc(var(--desktop-sidebar-width) + 1px);}.rustdoc.src .sidebar-resizer{left:49px;}.src-sidebar-expanded .src .sidebar-resizer{left:var(--src-sidebar-width);}.sidebar-resizing{-moz-user-select:none;-webkit-user-select:none;-ms-user-select:none;user-select:none;}.sidebar-resizing*{cursor:col-resize !important;}.sidebar-resizing .sidebar{position:fixed;}.sidebar-resizing>body{padding-left:var(--resizing-sidebar-width);}.sidebar-resizer:hover,.sidebar-resizer:active,.sidebar-resizer:focus,.sidebar-resizer.active{width:10px;margin:0;left:var(--desktop-sidebar-width);border-left:solid 1px var(--sidebar-resizer-hover);}.src-sidebar-expanded .rustdoc.src .sidebar-resizer:hover,.src-sidebar-expanded .rustdoc.src .sidebar-resizer:active,.src-sidebar-expanded .rustdoc.src .sidebar-resizer:focus,.src-sidebar-expanded .rustdoc.src .sidebar-resizer.active{left:calc(var(--src-sidebar-width) - 1px);}@media (pointer:coarse){.sidebar-resizer{display:none !important;}}.sidebar-resizer.active{padding:0 140px;width:2px;margin-left:-140px;border-left:none;}.sidebar-resizer.active:before{border-left:solid 2px var(--sidebar-resizer-active);display:block;height:100%;content:"";}.sidebar,.mobile-topbar,.sidebar-menu-toggle,#src-sidebar{background-color:var(--sidebar-background-color);}.src .sidebar>*{visibility:hidden;}.src-sidebar-expanded .src .sidebar{overflow-y:auto;flex-basis:var(--src-sidebar-width);width:var(--src-sidebar-width);}.src-sidebar-expanded .src .sidebar>*{visibility:visible;}#all-types{margin-top:1em;}*{scrollbar-width:initial;scrollbar-color:var(--scrollbar-color);}.sidebar{scrollbar-width:thin;scrollbar-color:var(--scrollbar-color);}::-webkit-scrollbar{width:12px;}.sidebar::-webkit-scrollbar{width:8px;}::-webkit-scrollbar-track{-webkit-box-shadow:inset 0;background-color:var(--scrollbar-track-background-color);}.sidebar::-webkit-scrollbar-track{background-color:var(--scrollbar-track-background-color);}::-webkit-scrollbar-thumb,.sidebar::-webkit-scrollbar-thumb{background-color:var(--scrollbar-thumb-background-color);}.hidden{display:none !important;}.logo-container>img{height:48px;width:48px;}ul.block,.block li{padding:0;margin:0;list-style:none;}.sidebar-elems a,.sidebar>h2 a{display:block;padding:0.25rem;margin-left:-0.25rem;margin-right:0.25rem;}.sidebar h2{overflow-wrap:anywhere;padding:0;margin:0.7rem 0;}.sidebar h3{font-size:1.125rem;padding:0;margin:0;}.sidebar-elems,.sidebar>.version,.sidebar>h2{padding-left:24px;}.sidebar a{color:var(--sidebar-link-color);}.sidebar .current,.sidebar .current a,.sidebar-crate a.logo-container:hover+h2 a,.sidebar a:hover:not(.logo-container){background-color:var(--sidebar-current-link-background-color);}.sidebar-elems .block{margin-bottom:2em;}.sidebar-elems .block li a{white-space:nowrap;text-overflow:ellipsis;overflow:hidden;}.sidebar-crate{display:flex;align-items:center;justify-content:center;margin:14px 32px 1rem;row-gap:10px;column-gap:32px;flex-wrap:wrap;}.sidebar-crate h2{flex-grow:1;margin:0 -8px;align-self:start;}.sidebar-crate .logo-container{margin:0 -16px 0 -16px;text-align:center;}.sidebar-crate h2 a{display:block;margin:0 calc(-24px + 0.25rem) 0 -0.2rem;padding:calc((16px - 0.57rem ) / 2 ) 0.25rem;padding-left:0.2rem;}.sidebar-crate h2 .version{display:block;font-weight:normal;font-size:1rem;overflow-wrap:break-word;}.sidebar-crate+.version{margin-top:-1rem;margin-bottom:1rem;}.mobile-topbar{display:none;}.rustdoc .example-wrap{display:flex;position:relative;margin-bottom:10px;}.rustdoc .example-wrap:last-child{margin-bottom:0px;}.rustdoc .example-wrap pre{margin:0;flex-grow:1;}.rustdoc:not(.src) .example-wrap pre{overflow:auto hidden;}.rustdoc .example-wrap pre.example-line-numbers,.rustdoc .example-wrap pre.src-line-numbers{flex-grow:0;min-width:fit-content;overflow:initial;text-align:right;-webkit-user-select:none;user-select:none;padding:14px 8px;color:var(--src-line-numbers-span-color);}.rustdoc .example-wrap pre.src-line-numbers{padding:14px 0;}.src-line-numbers a,.src-line-numbers span{color:var(--src-line-numbers-span-color);padding:0 8px;}.src-line-numbers :target{background-color:transparent;border-right:none;padding:0 8px;}.src-line-numbers .line-highlighted{background-color:var(--src-line-number-highlighted-background-color);}.search-loading{text-align:center;}.docblock-short{overflow-wrap:break-word;overflow-wrap:anywhere;}.docblock :not(pre)>code,.docblock-short code{white-space:pre-wrap;}.top-doc .docblock h2{font-size:1.375rem;}.top-doc .docblock h3{font-size:1.25rem;}.top-doc .docblock h4,.top-doc .docblock h5{font-size:1.125rem;}.top-doc .docblock h6{font-size:1rem;}.docblock h5{font-size:1rem;}.docblock h6{font-size:0.875rem;}.docblock{margin-left:24px;position:relative;}.docblock>:not(.more-examples-toggle):not(.example-wrap){max-width:100%;overflow-x:auto;}.out-of-band{flex-grow:0;font-size:1.125rem;}.docblock code,.docblock-short code,pre,.rustdoc.src .example-wrap{background-color:var(--code-block-background-color);}#main-content{position:relative;}.docblock table{margin:.5em 0;border-collapse:collapse;}.docblock table td,.docblock table th{padding:.5em;border:1px solid var(--border-color);}.docblock table tbody tr:nth-child(2n){background:var(--table-alt-row-background-color);}div.where{white-space:pre-wrap;font-size:0.875rem;}.item-info{display:block;margin-left:24px;}.item-info code{font-size:0.875rem;}#main-content>.item-info{margin-left:0;}nav.sub{flex-grow:1;flex-flow:row nowrap;margin:4px 0 25px 0;display:flex;align-items:center;}.search-form{position:relative;display:flex;height:34px;flex-grow:1;}.src nav.sub{margin:0 0 15px 0;}.section-header{display:block;position:relative;}.section-header:hover>.anchor,.impl:hover>.anchor,.trait-impl:hover>.anchor,.variant:hover>.anchor{display:initial;}.anchor{display:none;position:absolute;left:-0.5em;background:none !important;}.anchor.field{left:-5px;}.section-header>.anchor{left:-15px;padding-right:8px;}h2.section-header>.anchor{padding-right:6px;}a.doc-anchor{color:var(--main-color);display:none;position:absolute;left:-17px;padding-right:5px;padding-left:3px;}*:hover>.doc-anchor{display:block;}.top-doc>.docblock>*:first-child>.doc-anchor{display:none !important;}.main-heading a:hover,.example-wrap .rust a:hover,.all-items a:hover,.docblock a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover:not(.doc-anchor),.docblock-short a:not(.test-arrow):not(.scrape-help):not(.tooltip):hover,.item-info a{text-decoration:underline;}.crate.block li.current a{font-weight:500;}table,.item-table{overflow-wrap:break-word;}.item-table{display:table;padding:0;margin:0;}.item-table>li{display:table-row;}.item-table>li>div{display:table-cell;}.item-table>li>.item-name{padding-right:1.25rem;}.search-results-title{margin-top:0;white-space:nowrap;display:flex;align-items:baseline;}#crate-search-div{position:relative;min-width:5em;}#crate-search{min-width:115px;padding:0 23px 0 4px;max-width:100%;text-overflow:ellipsis;border:1px solid var(--border-color);border-radius:4px;outline:none;cursor:pointer;-moz-appearance:none;-webkit-appearance:none;text-indent:0.01px;background-color:var(--main-background-color);color:inherit;line-height:1.5;font-weight:500;}#crate-search:hover,#crate-search:focus{border-color:var(--crate-search-hover-border);}#crate-search-div::after{pointer-events:none;width:100%;height:100%;position:absolute;top:0;left:0;content:"";background-repeat:no-repeat;background-size:20px;background-position:calc(100% - 2px) 56%;background-image:url('data:image/svg+xml, \ + ');filter:var(--crate-search-div-filter);}#crate-search-div:hover::after,#crate-search-div:focus-within::after{filter:var(--crate-search-div-hover-filter);}#crate-search>option{font-size:1rem;}.search-input{-webkit-appearance:none;outline:none;border:1px solid var(--border-color);border-radius:2px;padding:8px;font-size:1rem;flex-grow:1;background-color:var(--button-background-color);color:var(--search-color);}.search-input:focus{border-color:var(--search-input-focused-border-color);}.search-results{display:none;}.search-results.active{display:block;}.search-results>a{display:flex;margin-left:2px;margin-right:2px;border-bottom:1px solid var(--search-result-border-color);gap:1em;}.search-results>a>div.desc{white-space:nowrap;text-overflow:ellipsis;overflow:hidden;flex:2;}.search-results a:hover,.search-results a:focus{background-color:var(--search-result-link-focus-background-color);}.search-results .result-name{display:flex;align-items:center;justify-content:start;flex:3;}.search-results .result-name .alias{color:var(--search-results-alias-color);}.search-results .result-name .grey{color:var(--search-results-grey-color);}.search-results .result-name .typename{color:var(--search-results-grey-color);font-size:0.875rem;width:var(--search-typename-width);}.search-results .result-name .path{word-break:break-all;max-width:calc(100% - var(--search-typename-width));display:inline-block;}.search-results .result-name .path>*{display:inline;}.popover{position:absolute;top:100%;right:0;z-index:calc(var(--desktop-sidebar-z-index) + 1);margin-top:7px;border-radius:3px;border:1px solid var(--border-color);background-color:var(--main-background-color);color:var(--main-color);--popover-arrow-offset:11px;}.popover::before{content:'';position:absolute;right:var(--popover-arrow-offset);border:solid var(--border-color);border-width:1px 1px 0 0;background-color:var(--main-background-color);padding:4px;transform:rotate(-45deg);top:-5px;}.setting-line{margin:1.2em 0.6em;}.setting-radio input,.setting-check input{margin-right:0.3em;height:1.2rem;width:1.2rem;border:2px solid var(--settings-input-border-color);outline:none;-webkit-appearance:none;cursor:pointer;}.setting-radio input{border-radius:50%;}.setting-radio span,.setting-check span{padding-bottom:1px;}.setting-radio{margin-top:0.1em;margin-bottom:0.1em;min-width:3.8em;padding:0.3em;display:inline-flex;align-items:center;cursor:pointer;}.setting-radio+.setting-radio{margin-left:0.5em;}.setting-check{margin-right:20px;display:flex;align-items:center;cursor:pointer;}.setting-radio input:checked{box-shadow:inset 0 0 0 3px var(--main-background-color);background-color:var(--settings-input-color);}.setting-check input:checked{background-color:var(--settings-input-color);border-width:1px;content:url('data:image/svg+xml,\ + \ + ');}.setting-radio input:focus,.setting-check input:focus{box-shadow:0 0 1px 1px var(--settings-input-color);}.setting-radio input:checked:focus{box-shadow:inset 0 0 0 3px var(--main-background-color),0 0 2px 2px var(--settings-input-color);}.setting-radio input:hover,.setting-check input:hover{border-color:var(--settings-input-color) !important;}#help.popover{max-width:600px;--popover-arrow-offset:48px;}#help dt{float:left;clear:left;margin-right:0.5rem;}#help span.top,#help span.bottom{text-align:center;display:block;font-size:1.125rem;}#help span.top{margin:10px 0;border-bottom:1px solid var(--border-color);padding-bottom:4px;margin-bottom:6px;}#help span.bottom{clear:both;border-top:1px solid var(--border-color);}.side-by-side>div{width:50%;float:left;padding:0 20px 20px 17px;}.item-info .stab{display:block;padding:3px;margin-bottom:5px;}.item-name .stab{margin-left:0.3125em;}.stab{padding:0 2px;font-size:0.875rem;font-weight:normal;color:var(--main-color);background-color:var(--stab-background-color);width:fit-content;white-space:pre-wrap;border-radius:3px;display:inline;vertical-align:baseline;}.stab.portability>code{background:none;color:var(--stab-code-color);}.stab .emoji,.item-info .stab::before{font-size:1.25rem;}.stab .emoji{margin-right:0.3rem;}.item-info .stab::before{content:"\0";width:0;display:inline-block;color:transparent;}.emoji{text-shadow:1px 0 0 black,-1px 0 0 black,0 1px 0 black,0 -1px 0 black;}.since{font-weight:normal;font-size:initial;}.rightside{padding-left:12px;float:right;}.rightside:not(a),.out-of-band{color:var(--right-side-color);}pre.rust{tab-size:4;-moz-tab-size:4;}pre.rust .kw{color:var(--code-highlight-kw-color);}pre.rust .kw-2{color:var(--code-highlight-kw-2-color);}pre.rust .lifetime{color:var(--code-highlight-lifetime-color);}pre.rust .prelude-ty{color:var(--code-highlight-prelude-color);}pre.rust .prelude-val{color:var(--code-highlight-prelude-val-color);}pre.rust .string{color:var(--code-highlight-string-color);}pre.rust .number{color:var(--code-highlight-number-color);}pre.rust .bool-val{color:var(--code-highlight-literal-color);}pre.rust .self{color:var(--code-highlight-self-color);}pre.rust .attr{color:var(--code-highlight-attribute-color);}pre.rust .macro,pre.rust .macro-nonterminal{color:var(--code-highlight-macro-color);}pre.rust .question-mark{font-weight:bold;color:var(--code-highlight-question-mark-color);}pre.rust .comment{color:var(--code-highlight-comment-color);}pre.rust .doccomment{color:var(--code-highlight-doc-comment-color);}.rustdoc.src .example-wrap pre.rust a{background:var(--codeblock-link-background);}.example-wrap.compile_fail,.example-wrap.should_panic{border-left:2px solid var(--codeblock-error-color);}.ignore.example-wrap{border-left:2px solid var(--codeblock-ignore-color);}.example-wrap.compile_fail:hover,.example-wrap.should_panic:hover{border-left:2px solid var(--codeblock-error-hover-color);}.example-wrap.ignore:hover{border-left:2px solid var(--codeblock-ignore-hover-color);}.example-wrap.compile_fail .tooltip,.example-wrap.should_panic .tooltip{color:var(--codeblock-error-color);}.example-wrap.ignore .tooltip{color:var(--codeblock-ignore-color);}.example-wrap.compile_fail:hover .tooltip,.example-wrap.should_panic:hover .tooltip{color:var(--codeblock-error-hover-color);}.example-wrap.ignore:hover .tooltip{color:var(--codeblock-ignore-hover-color);}.example-wrap .tooltip{position:absolute;display:block;left:-25px;top:5px;margin:0;line-height:1;}.example-wrap.compile_fail .tooltip,.example-wrap.should_panic .tooltip,.example-wrap.ignore .tooltip{font-weight:bold;font-size:1.25rem;}.content .docblock .warning{border-left:2px solid var(--warning-border-color);padding:14px;position:relative;overflow-x:visible !important;}.content .docblock .warning::before{color:var(--warning-border-color);content:"ⓘ";position:absolute;left:-25px;top:5px;font-weight:bold;font-size:1.25rem;}.top-doc>.docblock>.warning:first-child::before{top:20px;}a.test-arrow{visibility:hidden;position:absolute;padding:5px 10px 5px 10px;border-radius:5px;font-size:1.375rem;top:5px;right:5px;z-index:1;color:var(--test-arrow-color);background-color:var(--test-arrow-background-color);}a.test-arrow:hover{color:var(--test-arrow-hover-color);background-color:var(--test-arrow-hover-background-color);}.example-wrap:hover .test-arrow{visibility:visible;}.code-attribute{font-weight:300;color:var(--code-attribute-color);}.item-spacer{width:100%;height:12px;display:block;}.out-of-band>span.since{font-size:1.25rem;}.sub-variant h4{font-size:1rem;font-weight:400;margin-top:0;margin-bottom:0;}.sub-variant{margin-left:24px;margin-bottom:40px;}.sub-variant>.sub-variant-field{margin-left:24px;}:target{padding-right:3px;background-color:var(--target-background-color);border-right:3px solid var(--target-border-color);}.code-header a.tooltip{color:inherit;margin-right:15px;position:relative;}.code-header a.tooltip:hover{color:var(--link-color);}a.tooltip:hover::after{position:absolute;top:calc(100% - 10px);left:-15px;right:-15px;height:20px;content:"\00a0";}.fade-out{opacity:0;transition:opacity 0.45s cubic-bezier(0,0,0.1,1.0);}.popover.tooltip .content{margin:0.25em 0.5em;}.popover.tooltip .content pre,.popover.tooltip .content code{background:transparent;margin:0;padding:0;font-size:1.25rem;white-space:pre-wrap;}.popover.tooltip .content>h3:first-child{margin:0 0 5px 0;}.search-failed{text-align:center;margin-top:20px;display:none;}.search-failed.active{display:block;}.search-failed>ul{text-align:left;max-width:570px;margin-left:auto;margin-right:auto;}#search-tabs{display:flex;flex-direction:row;gap:1px;margin-bottom:4px;}#search-tabs button{text-align:center;font-size:1.125rem;border:0;border-top:2px solid;flex:1;line-height:1.5;color:inherit;}#search-tabs button:not(.selected){background-color:var(--search-tab-button-not-selected-background);border-top-color:var(--search-tab-button-not-selected-border-top-color);}#search-tabs button:hover,#search-tabs button.selected{background-color:var(--search-tab-button-selected-background);border-top-color:var(--search-tab-button-selected-border-top-color);}#search-tabs .count{font-size:1rem;font-variant-numeric:tabular-nums;color:var(--search-tab-title-count-color);}#search .error code{border-radius:3px;background-color:var(--search-error-code-background-color);}.search-corrections{font-weight:normal;}#src-sidebar{width:100%;overflow:auto;}#src-sidebar div.files>a:hover,details.dir-entry summary:hover,#src-sidebar div.files>a:focus,details.dir-entry summary:focus{background-color:var(--src-sidebar-background-hover);}#src-sidebar div.files>a.selected{background-color:var(--src-sidebar-background-selected);}.src-sidebar-title{position:sticky;top:0;display:flex;padding:8px 8px 0 48px;margin-bottom:7px;background:var(--sidebar-background-color);border-bottom:1px solid var(--border-color);}#settings-menu,#help-button{margin-left:4px;display:flex;}#sidebar-button{display:none;line-height:0;}.hide-sidebar #sidebar-button,.src #sidebar-button{display:flex;margin-right:4px;position:fixed;left:6px;height:34px;width:34px;background-color:var(--main-background-color);z-index:1;}.src #sidebar-button{left:8px;z-index:calc(var(--desktop-sidebar-z-index) + 1);}.hide-sidebar .src #sidebar-button{position:static;}#settings-menu>a,#help-button>a,#sidebar-button>a{display:flex;align-items:center;justify-content:center;background-color:var(--button-background-color);border:1px solid var(--border-color);border-radius:2px;color:var(--settings-button-color);font-size:20px;width:33px;}#settings-menu>a:hover,#settings-menu>a:focus,#help-button>a:hover,#help-button>a:focus,#sidebar-button>a:hover,#sidebar-button>a:focus{border-color:var(--settings-button-border-focus);}#settings-menu>a{line-height:0;font-size:0;}#settings-menu>a:before{content:url('data:image/svg+xml,\ + ');width:22px;height:22px;filter:var(--settings-menu-filter);}#sidebar-button>a:before{content:url('data:image/svg+xml,\ + \ + \ + ');width:22px;height:22px;}#copy-path{color:var(--copy-path-button-color);background:var(--main-background-color);height:34px;width:33px;margin-left:10px;padding:0;padding-left:2px;border:0;font-size:0;}#copy-path::before{filter:var(--copy-path-img-filter);content:url('data:image/svg+xml,\ +\ +\ +');width:19px;height:18px;}#copy-path:hover::before{filter:var(--copy-path-img-hover-filter);}#copy-path.clicked::before{content:url('data:image/svg+xml,\ + \ + ');}@keyframes rotating{from{transform:rotate(0deg);}to{transform:rotate(360deg);}}#settings-menu.rotate>a img{animation:rotating 2s linear infinite;}kbd{display:inline-block;padding:3px 5px;font:15px monospace;line-height:10px;vertical-align:middle;border:solid 1px var(--border-color);border-radius:3px;color:var(--kbd-color);background-color:var(--kbd-background);box-shadow:inset 0 -1px 0 var(--kbd-box-shadow-color);}ul.all-items>li{list-style:none;}details.dir-entry{padding-left:4px;}details.dir-entry>summary{margin:0 0 0 -4px;padding:0 0 0 4px;cursor:pointer;}details.dir-entry div.folders,details.dir-entry div.files{padding-left:23px;}details.dir-entry a{display:block;}details.toggle{contain:layout;position:relative;}details.toggle>summary.hideme{cursor:pointer;font-size:1rem;}details.toggle>summary{list-style:none;outline:none;}details.toggle>summary::-webkit-details-marker,details.toggle>summary::marker{display:none;}details.toggle>summary.hideme>span{margin-left:9px;}details.toggle>summary::before{background:url('data:image/svg+xml,') no-repeat top left;content:"";cursor:pointer;width:16px;height:16px;display:inline-block;vertical-align:middle;opacity:.5;filter:var(--toggle-filter);}details.toggle>summary.hideme>span,.more-examples-toggle summary,.more-examples-toggle .hide-more{color:var(--toggles-color);}details.toggle>summary::after{content:"Expand";overflow:hidden;width:0;height:0;position:absolute;}details.toggle>summary.hideme::after{content:"";}details.toggle>summary:focus::before,details.toggle>summary:hover::before{opacity:1;}details.toggle>summary:focus-visible::before{outline:1px dotted #000;outline-offset:1px;}details.non-exhaustive{margin-bottom:8px;}details.toggle>summary.hideme::before{position:relative;}details.toggle>summary:not(.hideme)::before{position:absolute;left:-24px;top:4px;}.impl-items>details.toggle>summary:not(.hideme)::before{position:absolute;left:-24px;}details.toggle[open] >summary.hideme{position:absolute;}details.toggle[open] >summary.hideme>span{display:none;}details.toggle[open] >summary::before{background:url('data:image/svg+xml,') no-repeat top left;}details.toggle[open] >summary::after{content:"Collapse";}.docblock summary>*{display:inline-block;}.docblock>.example-wrap:first-child .tooltip{margin-top:16px;}.src #sidebar-button>a:before,.sidebar-menu-toggle:before{content:url('data:image/svg+xml,\ + ');opacity:0.75;}.sidebar-menu-toggle:hover:before,.sidebar-menu-toggle:active:before,.sidebar-menu-toggle:focus:before{opacity:1;}.src #sidebar-button>a:before{content:url('data:image/svg+xml,\ + \ + \ + ');opacity:0.75;}@media (max-width:850px){#search-tabs .count{display:block;}}@media (max-width:700px){*[id]{scroll-margin-top:45px;}.rustdoc{display:block;}main{padding-left:15px;padding-top:0px;}.main-heading{flex-direction:column;}.out-of-band{text-align:left;margin-left:initial;padding:initial;}.out-of-band .since::before{content:"Since ";}.sidebar .logo-container,.sidebar .location,.sidebar-resizer{display:none;}.sidebar{position:fixed;top:45px;left:-1000px;z-index:11;height:calc(100vh - 45px);width:200px;}.src main,.rustdoc.src .sidebar{top:0;padding:0;height:100vh;border:0;}.src .search-form{margin-left:40px;}.hide-sidebar .search-form{margin-left:32px;}.hide-sidebar .src .search-form{margin-left:0;}.sidebar.shown,.src-sidebar-expanded .src .sidebar,.rustdoc:not(.src) .sidebar:focus-within{left:0;}.mobile-topbar h2{padding-bottom:0;margin:auto 0.5em auto auto;overflow:hidden;font-size:24px;white-space:nowrap;text-overflow:ellipsis;}.mobile-topbar .logo-container>img{max-width:35px;max-height:35px;margin:5px 0 5px 20px;}.mobile-topbar{display:flex;flex-direction:row;position:sticky;z-index:10;font-size:2rem;height:45px;width:100%;left:0;top:0;}.hide-sidebar .mobile-topbar{display:none;}.sidebar-menu-toggle{width:45px;border:none;line-height:0;}.hide-sidebar .sidebar-menu-toggle{display:none;}.sidebar-elems{margin-top:1em;}.anchor{display:none !important;}#main-content>details.toggle>summary::before,#main-content>div>details.toggle>summary::before{left:-11px;}#copy-path,#help-button{display:none;}#sidebar-button>a:before{content:url('data:image/svg+xml,\ + \ + \ + ');width:22px;height:22px;}.sidebar-menu-toggle:before{filter:var(--mobile-sidebar-menu-filter);}.sidebar-menu-toggle:hover{background:var(--main-background-color);}.item-table,.item-row,.item-table>li,.item-table>li>div,.search-results>a,.search-results>a>div{display:block;}.search-results>a{padding:5px 0px;}.search-results>a>div.desc,.item-table>li>div.desc{padding-left:2em;}.search-results .result-name{display:block;}.search-results .result-name .typename{width:initial;margin-right:0;}.search-results .result-name .typename,.search-results .result-name .path{display:inline;}.src-sidebar-expanded .src .sidebar{position:fixed;max-width:100vw;width:100vw;}.src .src-sidebar-title{padding-top:0;}details.toggle:not(.top-doc)>summary{margin-left:10px;}.impl-items>details.toggle>summary:not(.hideme)::before,#main-content>details.toggle:not(.top-doc)>summary::before,#main-content>div>details.toggle>summary::before{left:-11px;}.impl-items>.item-info{margin-left:34px;}.src nav.sub{margin:0;padding:var(--nav-sub-mobile-padding);}}@media (min-width:701px){.scraped-example-title{position:absolute;z-index:10;background:var(--main-background-color);bottom:8px;right:5px;padding:2px 4px;box-shadow:0 0 4px var(--main-background-color);}}@media print{nav.sidebar,nav.sub,.out-of-band,a.src,#copy-path,details.toggle[open] >summary::before,details.toggle>summary::before,details.toggle.top-doc>summary{display:none;}.docblock{margin-left:0;}main{padding:10px;}}@media (max-width:464px){.docblock{margin-left:12px;}.docblock code{overflow-wrap:break-word;overflow-wrap:anywhere;}nav.sub{flex-direction:column;}.search-form{align-self:stretch;}}.variant,.implementors-toggle>summary,.impl,#implementors-list>.docblock,.impl-items>section,.impl-items>.toggle>summary,.methods>section,.methods>.toggle>summary{margin-bottom:0.75em;}.variants>.docblock,.implementors-toggle>.docblock,.impl-items>.toggle[open]:not(:last-child),.methods>.toggle[open]:not(:last-child),.implementors-toggle[open]:not(:last-child){margin-bottom:2em;}#trait-implementations-list .impl-items>.toggle:not(:last-child),#synthetic-implementations-list .impl-items>.toggle:not(:last-child),#blanket-implementations-list .impl-items>.toggle:not(:last-child){margin-bottom:1em;}.scraped-example-list .scrape-help{margin-left:10px;padding:0 4px;font-weight:normal;font-size:12px;position:relative;bottom:1px;border:1px solid var(--scrape-example-help-border-color);border-radius:50px;color:var(--scrape-example-help-color);}.scraped-example-list .scrape-help:hover{border-color:var(--scrape-example-help-hover-border-color);color:var(--scrape-example-help-hover-color);}.scraped-example{position:relative;}.scraped-example .code-wrapper{position:relative;display:flex;flex-direction:row;flex-wrap:wrap;width:100%;}.scraped-example:not(.expanded) .code-wrapper{max-height:calc(1.5em * 5 + 10px);}.scraped-example:not(.expanded) .code-wrapper pre{overflow-y:hidden;padding-bottom:0;max-height:calc(1.5em * 5 + 10px);}.more-scraped-examples .scraped-example:not(.expanded) .code-wrapper,.more-scraped-examples .scraped-example:not(.expanded) .code-wrapper pre{max-height:calc(1.5em * 10 + 10px);}.scraped-example .code-wrapper .next,.scraped-example .code-wrapper .prev,.scraped-example .code-wrapper .expand{color:var(--main-color);position:absolute;top:0.25em;z-index:1;padding:0;background:none;border:none;-webkit-appearance:none;opacity:1;}.scraped-example .code-wrapper .prev{right:2.25em;}.scraped-example .code-wrapper .next{right:1.25em;}.scraped-example .code-wrapper .expand{right:0.25em;}.scraped-example:not(.expanded) .code-wrapper::before,.scraped-example:not(.expanded) .code-wrapper::after{content:" ";width:100%;height:5px;position:absolute;z-index:1;}.scraped-example:not(.expanded) .code-wrapper::before{top:0;background:linear-gradient(to bottom,var(--scrape-example-code-wrapper-background-start),var(--scrape-example-code-wrapper-background-end));}.scraped-example:not(.expanded) .code-wrapper::after{bottom:0;background:linear-gradient(to top,var(--scrape-example-code-wrapper-background-start),var(--scrape-example-code-wrapper-background-end));}.scraped-example .code-wrapper .example-wrap{width:100%;overflow-y:hidden;margin-bottom:0;}.scraped-example:not(.expanded) .code-wrapper .example-wrap{overflow-x:hidden;}.scraped-example .example-wrap .rust span.highlight{background:var(--scrape-example-code-line-highlight);}.scraped-example .example-wrap .rust span.highlight.focus{background:var(--scrape-example-code-line-highlight-focus);}.more-examples-toggle{max-width:calc(100% + 25px);margin-top:10px;margin-left:-25px;}.more-examples-toggle .hide-more{margin-left:25px;cursor:pointer;}.more-scraped-examples{margin-left:25px;position:relative;}.toggle-line{position:absolute;top:5px;bottom:0;right:calc(100% + 10px);padding:0 4px;cursor:pointer;}.toggle-line-inner{min-width:2px;height:100%;background:var(--scrape-example-toggle-line-background);}.toggle-line:hover .toggle-line-inner{background:var(--scrape-example-toggle-line-hover-background);}.more-scraped-examples .scraped-example,.example-links{margin-top:20px;}.more-scraped-examples .scraped-example:first-child{margin-top:5px;}.example-links ul{margin-bottom:0;}:root[data-theme="light"],:root:not([data-theme]){--main-background-color:white;--main-color:black;--settings-input-color:#2196f3;--settings-input-border-color:#717171;--settings-button-color:#000;--settings-button-border-focus:#717171;--sidebar-background-color:#f5f5f5;--sidebar-background-color-hover:#e0e0e0;--code-block-background-color:#f5f5f5;--scrollbar-track-background-color:#dcdcdc;--scrollbar-thumb-background-color:rgba(36,37,39,0.6);--scrollbar-color:rgba(36,37,39,0.6) #d9d9d9;--headings-border-bottom-color:#ddd;--border-color:#e0e0e0;--button-background-color:#fff;--right-side-color:grey;--code-attribute-color:#999;--toggles-color:#999;--toggle-filter:none;--mobile-sidebar-menu-filter:none;--search-input-focused-border-color:#66afe9;--copy-path-button-color:#999;--copy-path-img-filter:invert(50%);--copy-path-img-hover-filter:invert(35%);--codeblock-error-hover-color:rgb(255,0,0);--codeblock-error-color:rgba(255,0,0,.5);--codeblock-ignore-hover-color:rgb(255,142,0);--codeblock-ignore-color:rgba(255,142,0,.6);--warning-border-color:#ff8e00;--type-link-color:#ad378a;--trait-link-color:#6e4fc9;--assoc-item-link-color:#3873ad;--function-link-color:#ad7c37;--macro-link-color:#068000;--keyword-link-color:#3873ad;--mod-link-color:#3873ad;--link-color:#3873ad;--sidebar-link-color:#356da4;--sidebar-current-link-background-color:#fff;--search-result-link-focus-background-color:#ccc;--search-result-border-color:#aaa3;--search-color:#000;--search-error-code-background-color:#d0cccc;--search-results-alias-color:#000;--search-results-grey-color:#999;--search-tab-title-count-color:#888;--search-tab-button-not-selected-border-top-color:#e6e6e6;--search-tab-button-not-selected-background:#e6e6e6;--search-tab-button-selected-border-top-color:#0089ff;--search-tab-button-selected-background:#fff;--settings-menu-filter:none;--stab-background-color:#fff5d6;--stab-code-color:#000;--code-highlight-kw-color:#8959a8;--code-highlight-kw-2-color:#4271ae;--code-highlight-lifetime-color:#b76514;--code-highlight-prelude-color:#4271ae;--code-highlight-prelude-val-color:#c82829;--code-highlight-number-color:#718c00;--code-highlight-string-color:#718c00;--code-highlight-literal-color:#c82829;--code-highlight-attribute-color:#c82829;--code-highlight-self-color:#c82829;--code-highlight-macro-color:#3e999f;--code-highlight-question-mark-color:#ff9011;--code-highlight-comment-color:#8e908c;--code-highlight-doc-comment-color:#4d4d4c;--src-line-numbers-span-color:#c67e2d;--src-line-number-highlighted-background-color:#fdffd3;--test-arrow-color:#f5f5f5;--test-arrow-background-color:rgba(78,139,202,0.2);--test-arrow-hover-color:#f5f5f5;--test-arrow-hover-background-color:rgb(78,139,202);--target-background-color:#fdffd3;--target-border-color:#ad7c37;--kbd-color:#000;--kbd-background:#fafbfc;--kbd-box-shadow-color:#c6cbd1;--rust-logo-filter:initial;--crate-search-div-filter:invert(100%) sepia(0%) saturate(4223%) hue-rotate(289deg) brightness(114%) contrast(76%);--crate-search-div-hover-filter:invert(44%) sepia(18%) saturate(23%) hue-rotate(317deg) brightness(96%) contrast(93%);--crate-search-hover-border:#717171;--src-sidebar-background-selected:#fff;--src-sidebar-background-hover:#e0e0e0;--table-alt-row-background-color:#f5f5f5;--codeblock-link-background:#eee;--scrape-example-toggle-line-background:#ccc;--scrape-example-toggle-line-hover-background:#999;--scrape-example-code-line-highlight:#fcffd6;--scrape-example-code-line-highlight-focus:#f6fdb0;--scrape-example-help-border-color:#555;--scrape-example-help-color:#333;--scrape-example-help-hover-border-color:#000;--scrape-example-help-hover-color:#000;--scrape-example-code-wrapper-background-start:rgba(255,255,255,1);--scrape-example-code-wrapper-background-end:rgba(255,255,255,0);--sidebar-resizer-hover:hsl(207,90%,66%);--sidebar-resizer-active:hsl(207,90%,54%);}:root[data-theme="dark"]{--main-background-color:#353535;--main-color:#ddd;--settings-input-color:#2196f3;--settings-input-border-color:#999;--settings-button-color:#000;--settings-button-border-focus:#ffb900;--sidebar-background-color:#505050;--sidebar-background-color-hover:#676767;--code-block-background-color:#2A2A2A;--scrollbar-track-background-color:#717171;--scrollbar-thumb-background-color:rgba(32,34,37,.6);--scrollbar-color:rgba(32,34,37,.6) #5a5a5a;--headings-border-bottom-color:#d2d2d2;--border-color:#e0e0e0;--button-background-color:#f0f0f0;--right-side-color:grey;--code-attribute-color:#999;--toggles-color:#999;--toggle-filter:invert(100%);--mobile-sidebar-menu-filter:invert(100%);--search-input-focused-border-color:#008dfd;--copy-path-button-color:#999;--copy-path-img-filter:invert(50%);--copy-path-img-hover-filter:invert(65%);--codeblock-error-hover-color:rgb(255,0,0);--codeblock-error-color:rgba(255,0,0,.5);--codeblock-ignore-hover-color:rgb(255,142,0);--codeblock-ignore-color:rgba(255,142,0,.6);--warning-border-color:#ff8e00;--type-link-color:#2dbfb8;--trait-link-color:#b78cf2;--assoc-item-link-color:#d2991d;--function-link-color:#2bab63;--macro-link-color:#09bd00;--keyword-link-color:#d2991d;--mod-link-color:#d2991d;--link-color:#d2991d;--sidebar-link-color:#fdbf35;--sidebar-current-link-background-color:#444;--search-result-link-focus-background-color:#616161;--search-result-border-color:#aaa3;--search-color:#111;--search-error-code-background-color:#484848;--search-results-alias-color:#fff;--search-results-grey-color:#ccc;--search-tab-title-count-color:#888;--search-tab-button-not-selected-border-top-color:#252525;--search-tab-button-not-selected-background:#252525;--search-tab-button-selected-border-top-color:#0089ff;--search-tab-button-selected-background:#353535;--settings-menu-filter:none;--stab-background-color:#314559;--stab-code-color:#e6e1cf;--code-highlight-kw-color:#ab8ac1;--code-highlight-kw-2-color:#769acb;--code-highlight-lifetime-color:#d97f26;--code-highlight-prelude-color:#769acb;--code-highlight-prelude-val-color:#ee6868;--code-highlight-number-color:#83a300;--code-highlight-string-color:#83a300;--code-highlight-literal-color:#ee6868;--code-highlight-attribute-color:#ee6868;--code-highlight-self-color:#ee6868;--code-highlight-macro-color:#3e999f;--code-highlight-question-mark-color:#ff9011;--code-highlight-comment-color:#8d8d8b;--code-highlight-doc-comment-color:#8ca375;--src-line-numbers-span-color:#3b91e2;--src-line-number-highlighted-background-color:#0a042f;--test-arrow-color:#dedede;--test-arrow-background-color:rgba(78,139,202,0.2);--test-arrow-hover-color:#dedede;--test-arrow-hover-background-color:#4e8bca;--target-background-color:#494a3d;--target-border-color:#bb7410;--kbd-color:#000;--kbd-background:#fafbfc;--kbd-box-shadow-color:#c6cbd1;--rust-logo-filter:drop-shadow(1px 0 0px #fff) drop-shadow(0 1px 0 #fff) drop-shadow(-1px 0 0 #fff) drop-shadow(0 -1px 0 #fff);--crate-search-div-filter:invert(94%) sepia(0%) saturate(721%) hue-rotate(255deg) brightness(90%) contrast(90%);--crate-search-div-hover-filter:invert(69%) sepia(60%) saturate(6613%) hue-rotate(184deg) brightness(100%) contrast(91%);--crate-search-hover-border:#2196f3;--src-sidebar-background-selected:#333;--src-sidebar-background-hover:#444;--table-alt-row-background-color:#2a2a2a;--codeblock-link-background:#333;--scrape-example-toggle-line-background:#999;--scrape-example-toggle-line-hover-background:#c5c5c5;--scrape-example-code-line-highlight:#5b3b01;--scrape-example-code-line-highlight-focus:#7c4b0f;--scrape-example-help-border-color:#aaa;--scrape-example-help-color:#eee;--scrape-example-help-hover-border-color:#fff;--scrape-example-help-hover-color:#fff;--scrape-example-code-wrapper-background-start:rgba(53,53,53,1);--scrape-example-code-wrapper-background-end:rgba(53,53,53,0);--sidebar-resizer-hover:hsl(207,30%,54%);--sidebar-resizer-active:hsl(207,90%,54%);}:root[data-theme="ayu"]{--main-background-color:#0f1419;--main-color:#c5c5c5;--settings-input-color:#ffb454;--settings-input-border-color:#999;--settings-button-color:#fff;--settings-button-border-focus:#e0e0e0;--sidebar-background-color:#14191f;--sidebar-background-color-hover:rgba(70,70,70,0.33);--code-block-background-color:#191f26;--scrollbar-track-background-color:transparent;--scrollbar-thumb-background-color:#5c6773;--scrollbar-color:#5c6773 #24292f;--headings-border-bottom-color:#5c6773;--border-color:#5c6773;--button-background-color:#141920;--right-side-color:grey;--code-attribute-color:#999;--toggles-color:#999;--toggle-filter:invert(100%);--mobile-sidebar-menu-filter:invert(100%);--search-input-focused-border-color:#5c6773;--copy-path-button-color:#fff;--copy-path-img-filter:invert(70%);--copy-path-img-hover-filter:invert(100%);--codeblock-error-hover-color:rgb(255,0,0);--codeblock-error-color:rgba(255,0,0,.5);--codeblock-ignore-hover-color:rgb(255,142,0);--codeblock-ignore-color:rgba(255,142,0,.6);--warning-border-color:#ff8e00;--type-link-color:#ffa0a5;--trait-link-color:#39afd7;--assoc-item-link-color:#39afd7;--function-link-color:#fdd687;--macro-link-color:#a37acc;--keyword-link-color:#39afd7;--mod-link-color:#39afd7;--link-color:#39afd7;--sidebar-link-color:#53b1db;--sidebar-current-link-background-color:transparent;--search-result-link-focus-background-color:#3c3c3c;--search-result-border-color:#aaa3;--search-color:#fff;--search-error-code-background-color:#4f4c4c;--search-results-alias-color:#c5c5c5;--search-results-grey-color:#999;--search-tab-title-count-color:#888;--search-tab-button-not-selected-border-top-color:none;--search-tab-button-not-selected-background:transparent !important;--search-tab-button-selected-border-top-color:none;--search-tab-button-selected-background:#141920 !important;--settings-menu-filter:invert(100%);--stab-background-color:#314559;--stab-code-color:#e6e1cf;--code-highlight-kw-color:#ff7733;--code-highlight-kw-2-color:#ff7733;--code-highlight-lifetime-color:#ff7733;--code-highlight-prelude-color:#69f2df;--code-highlight-prelude-val-color:#ff7733;--code-highlight-number-color:#b8cc52;--code-highlight-string-color:#b8cc52;--code-highlight-literal-color:#ff7733;--code-highlight-attribute-color:#e6e1cf;--code-highlight-self-color:#36a3d9;--code-highlight-macro-color:#a37acc;--code-highlight-question-mark-color:#ff9011;--code-highlight-comment-color:#788797;--code-highlight-doc-comment-color:#a1ac88;--src-line-numbers-span-color:#5c6773;--src-line-number-highlighted-background-color:rgba(255,236,164,0.06);--test-arrow-color:#788797;--test-arrow-background-color:rgba(57,175,215,0.09);--test-arrow-hover-color:#c5c5c5;--test-arrow-hover-background-color:rgba(57,175,215,0.368);--target-background-color:rgba(255,236,164,0.06);--target-border-color:rgba(255,180,76,0.85);--kbd-color:#c5c5c5;--kbd-background:#314559;--kbd-box-shadow-color:#5c6773;--rust-logo-filter:drop-shadow(1px 0 0px #fff) drop-shadow(0 1px 0 #fff) drop-shadow(-1px 0 0 #fff) drop-shadow(0 -1px 0 #fff);--crate-search-div-filter:invert(41%) sepia(12%) saturate(487%) hue-rotate(171deg) brightness(94%) contrast(94%);--crate-search-div-hover-filter:invert(98%) sepia(12%) saturate(81%) hue-rotate(343deg) brightness(113%) contrast(76%);--crate-search-hover-border:#e0e0e0;--src-sidebar-background-selected:#14191f;--src-sidebar-background-hover:#14191f;--table-alt-row-background-color:#191f26;--codeblock-link-background:#333;--scrape-example-toggle-line-background:#999;--scrape-example-toggle-line-hover-background:#c5c5c5;--scrape-example-code-line-highlight:#5b3b01;--scrape-example-code-line-highlight-focus:#7c4b0f;--scrape-example-help-border-color:#aaa;--scrape-example-help-color:#eee;--scrape-example-help-hover-border-color:#fff;--scrape-example-help-hover-color:#fff;--scrape-example-code-wrapper-background-start:rgba(15,20,25,1);--scrape-example-code-wrapper-background-end:rgba(15,20,25,0);--sidebar-resizer-hover:hsl(34,50%,33%);--sidebar-resizer-active:hsl(34,100%,66%);}:root[data-theme="ayu"] h1,:root[data-theme="ayu"] h2,:root[data-theme="ayu"] h3,:root[data-theme="ayu"] h4,:where(:root[data-theme="ayu"]) h1 a,:root[data-theme="ayu"] .sidebar h2 a,:root[data-theme="ayu"] .sidebar h3 a{color:#fff;}:root[data-theme="ayu"] .docblock code{color:#ffb454;}:root[data-theme="ayu"] .docblock a>code{color:#39AFD7 !important;}:root[data-theme="ayu"] .code-header,:root[data-theme="ayu"] .docblock pre>code,:root[data-theme="ayu"] pre,:root[data-theme="ayu"] pre>code,:root[data-theme="ayu"] .item-info code,:root[data-theme="ayu"] .rustdoc.source .example-wrap{color:#e6e1cf;}:root[data-theme="ayu"] .sidebar .current,:root[data-theme="ayu"] .sidebar .current a,:root[data-theme="ayu"] .sidebar a:hover,:root[data-theme="ayu"] #src-sidebar div.files>a:hover,:root[data-theme="ayu"] details.dir-entry summary:hover,:root[data-theme="ayu"] #src-sidebar div.files>a:focus,:root[data-theme="ayu"] details.dir-entry summary:focus,:root[data-theme="ayu"] #src-sidebar div.files>a.selected{color:#ffb44c;}:root[data-theme="ayu"] .sidebar-elems .location{color:#ff7733;}:root[data-theme="ayu"] .src-line-numbers .line-highlighted{color:#708090;padding-right:7px;border-right:1px solid #ffb44c;}:root[data-theme="ayu"] .search-results a:hover,:root[data-theme="ayu"] .search-results a:focus{color:#fff !important;background-color:#3c3c3c;}:root[data-theme="ayu"] .search-results a{color:#0096cf;}:root[data-theme="ayu"] .search-results a div.desc{color:#c5c5c5;}:root[data-theme="ayu"] .result-name .primitive>i,:root[data-theme="ayu"] .result-name .keyword>i{color:#788797;}:root[data-theme="ayu"] #search-tabs>button.selected{border-bottom:1px solid #ffb44c !important;border-top:none;}:root[data-theme="ayu"] #search-tabs>button:not(.selected){border:none;background-color:transparent !important;}:root[data-theme="ayu"] #search-tabs>button:hover{border-bottom:1px solid rgba(242,151,24,0.3);}:root[data-theme="ayu"] #settings-menu>a img,:root[data-theme="ayu"] #sidebar-button>a:before{filter:invert(100);} \ No newline at end of file diff --git a/static.files/scrape-examples-ef1e698c1d417c0c.js b/static.files/scrape-examples-ef1e698c1d417c0c.js new file mode 100644 index 000000000..ba830e374 --- /dev/null +++ b/static.files/scrape-examples-ef1e698c1d417c0c.js @@ -0,0 +1 @@ +"use strict";(function(){const DEFAULT_MAX_LINES=5;const HIDDEN_MAX_LINES=10;function scrollToLoc(elt,loc,isHidden){const lines=elt.querySelector(".src-line-numbers");let scrollOffset;const maxLines=isHidden?HIDDEN_MAX_LINES:DEFAULT_MAX_LINES;if(loc[1]-loc[0]>maxLines){const line=Math.max(0,loc[0]-1);scrollOffset=lines.children[line].offsetTop}else{const wrapper=elt.querySelector(".code-wrapper");const halfHeight=wrapper.offsetHeight/2;const offsetTop=lines.children[loc[0]].offsetTop;const lastLine=lines.children[loc[1]];const offsetBot=lastLine.offsetTop+lastLine.offsetHeight;const offsetMid=(offsetTop+offsetBot)/2;scrollOffset=offsetMid-halfHeight}lines.scrollTo(0,scrollOffset);elt.querySelector(".rust").scrollTo(0,scrollOffset)}function updateScrapedExample(example,isHidden){const locs=JSON.parse(example.attributes.getNamedItem("data-locs").textContent);let locIndex=0;const highlights=Array.prototype.slice.call(example.querySelectorAll(".highlight"));const link=example.querySelector(".scraped-example-title a");if(locs.length>1){const onChangeLoc=changeIndex=>{removeClass(highlights[locIndex],"focus");changeIndex();scrollToLoc(example,locs[locIndex][0],isHidden);addClass(highlights[locIndex],"focus");const url=locs[locIndex][1];const title=locs[locIndex][2];link.href=url;link.innerHTML=title};example.querySelector(".prev").addEventListener("click",()=>{onChangeLoc(()=>{locIndex=(locIndex-1+locs.length)%locs.length})});example.querySelector(".next").addEventListener("click",()=>{onChangeLoc(()=>{locIndex=(locIndex+1)%locs.length})})}const expandButton=example.querySelector(".expand");if(expandButton){expandButton.addEventListener("click",()=>{if(hasClass(example,"expanded")){removeClass(example,"expanded");scrollToLoc(example,locs[0][0],isHidden)}else{addClass(example,"expanded")}})}scrollToLoc(example,locs[0][0],isHidden)}const firstExamples=document.querySelectorAll(".scraped-example-list > .scraped-example");onEachLazy(firstExamples,el=>updateScrapedExample(el,false));onEachLazy(document.querySelectorAll(".more-examples-toggle"),toggle=>{onEachLazy(toggle.querySelectorAll(".toggle-line, .hide-more"),button=>{button.addEventListener("click",()=>{toggle.open=false})});const moreExamples=toggle.querySelectorAll(".scraped-example");toggle.querySelector("summary").addEventListener("click",()=>{setTimeout(()=>{onEachLazy(moreExamples,el=>updateScrapedExample(el,true))})},{once:true})})})() \ No newline at end of file diff --git a/static.files/search-d52510db62a78183.js b/static.files/search-d52510db62a78183.js new file mode 100644 index 000000000..a2824f297 --- /dev/null +++ b/static.files/search-d52510db62a78183.js @@ -0,0 +1,5 @@ +"use strict";if(!Array.prototype.toSpliced){Array.prototype.toSpliced=function(){const me=this.slice();Array.prototype.splice.apply(me,arguments);return me}}(function(){const itemTypes=["keyword","primitive","mod","externcrate","import","struct","enum","fn","type","static","trait","impl","tymethod","method","structfield","variant","macro","associatedtype","constant","associatedconstant","union","foreigntype","existential","attr","derive","traitalias","generic",];const longItemTypes=["keyword","primitive type","module","extern crate","re-export","struct","enum","function","type alias","static","trait","","trait method","method","struct field","enum variant","macro","assoc type","constant","assoc const","union","foreign type","existential type","attribute macro","derive macro","trait alias",];const TY_GENERIC=itemTypes.indexOf("generic");const TY_IMPORT=itemTypes.indexOf("import");const ROOT_PATH=typeof window!=="undefined"?window.rootPath:"../";const UNBOXING_LIMIT=5;function printTab(nb){let iter=0;let foundCurrentTab=false;let foundCurrentResultSet=false;onEachLazy(document.getElementById("search-tabs").childNodes,elem=>{if(nb===iter){addClass(elem,"selected");foundCurrentTab=true}else{removeClass(elem,"selected")}iter+=1});const isTypeSearch=(nb>0||iter===1);iter=0;onEachLazy(document.getElementById("results").childNodes,elem=>{if(nb===iter){addClass(elem,"active");foundCurrentResultSet=true}else{removeClass(elem,"active")}iter+=1});if(foundCurrentTab&&foundCurrentResultSet){searchState.currentTab=nb;const correctionsElem=document.getElementsByClassName("search-corrections");if(isTypeSearch){removeClass(correctionsElem[0],"hidden")}else{addClass(correctionsElem[0],"hidden")}}else if(nb!==0){printTab(0)}}const editDistanceState={current:[],prev:[],prevPrev:[],calculate:function calculate(a,b,limit){if(a.lengthlimit){return limit+1}while(b.length>0&&b[0]===a[0]){a=a.substring(1);b=b.substring(1)}while(b.length>0&&b[b.length-1]===a[a.length-1]){a=a.substring(0,a.length-1);b=b.substring(0,b.length-1)}if(b.length===0){return minDist}const aLength=a.length;const bLength=b.length;for(let i=0;i<=bLength;++i){this.current[i]=0;this.prev[i]=i;this.prevPrev[i]=Number.MAX_VALUE}for(let i=1;i<=aLength;++i){this.current[0]=i;const aIdx=i-1;for(let j=1;j<=bLength;++j){const bIdx=j-1;const substitutionCost=a[aIdx]===b[bIdx]?0:1;this.current[j]=Math.min(this.prev[j]+1,this.current[j-1]+1,this.prev[j-1]+substitutionCost,);if((i>1)&&(j>1)&&(a[aIdx]===b[bIdx-1])&&(a[aIdx-1]===b[bIdx])){this.current[j]=Math.min(this.current[j],this.prevPrev[j-2]+1,)}}const prevPrevTmp=this.prevPrev;this.prevPrev=this.prev;this.prev=this.current;this.current=prevPrevTmp}const distance=this.prev[bLength];return distance<=limit?distance:(limit+1)},};function editDistance(a,b,limit){return editDistanceState.calculate(a,b,limit)}function initSearch(rawSearchIndex){const MAX_RESULTS=200;const NO_TYPE_FILTER=-1;let searchIndex;let searchIndexDeprecated;let searchIndexEmptyDesc;let functionTypeFingerprint;let currentResults;const typeNameIdMap=new Map();const ALIASES=new Map();const typeNameIdOfArray=buildTypeMapIndex("array");const typeNameIdOfSlice=buildTypeMapIndex("slice");const typeNameIdOfArrayOrSlice=buildTypeMapIndex("[]");const typeNameIdOfTuple=buildTypeMapIndex("tuple");const typeNameIdOfUnit=buildTypeMapIndex("unit");const typeNameIdOfTupleOrUnit=buildTypeMapIndex("()");const typeNameIdOfFn=buildTypeMapIndex("fn");const typeNameIdOfFnMut=buildTypeMapIndex("fnmut");const typeNameIdOfFnOnce=buildTypeMapIndex("fnonce");const typeNameIdOfHof=buildTypeMapIndex("->");function buildTypeMapIndex(name,isAssocType){if(name===""||name===null){return null}if(typeNameIdMap.has(name)){const obj=typeNameIdMap.get(name);obj.assocOnly=isAssocType&&obj.assocOnly;return obj.id}else{const id=typeNameIdMap.size;typeNameIdMap.set(name,{id,assocOnly:isAssocType});return id}}function isSpecialStartCharacter(c){return"<\"".indexOf(c)!==-1}function isEndCharacter(c){return"=,>-])".indexOf(c)!==-1}function itemTypeFromName(typename){const index=itemTypes.findIndex(i=>i===typename);if(index<0){throw["Unknown type filter ",typename]}return index}function getStringElem(query,parserState,isInGenerics){if(isInGenerics){throw["Unexpected ","\""," in generics"]}else if(query.literalSearch){throw["Cannot have more than one literal search element"]}else if(parserState.totalElems-parserState.genericsElems>0){throw["Cannot use literal search when there is more than one element"]}parserState.pos+=1;const start=parserState.pos;const end=getIdentEndPosition(parserState);if(parserState.pos>=parserState.length){throw["Unclosed ","\""]}else if(parserState.userQuery[end]!=="\""){throw["Unexpected ",parserState.userQuery[end]," in a string element"]}else if(start===end){throw["Cannot have empty string element"]}parserState.pos+=1;query.literalSearch=true}function isPathStart(parserState){return parserState.userQuery.slice(parserState.pos,parserState.pos+2)==="::"}function isReturnArrow(parserState){return parserState.userQuery.slice(parserState.pos,parserState.pos+2)==="->"}function isIdentCharacter(c){return(c==="_"||(c>="0"&&c<="9")||(c>="a"&&c<="z")||(c>="A"&&c<="Z"))}function isSeparatorCharacter(c){return c===","||c==="="}function isPathSeparator(c){return c===":"||c===" "}function prevIs(parserState,lookingFor){let pos=parserState.pos;while(pos>0){const c=parserState.userQuery[pos-1];if(c===lookingFor){return true}else if(c!==" "){break}pos-=1}return false}function isLastElemGeneric(elems,parserState){return(elems.length>0&&elems[elems.length-1].generics.length>0)||prevIs(parserState,">")}function skipWhitespace(parserState){while(parserState.pos0){throw["Cannot have more than one element if you use quotes"]}const typeFilter=parserState.typeFilter;parserState.typeFilter=null;if(name==="!"){if(typeFilter!==null&&typeFilter!=="primitive"){throw["Invalid search type: primitive never type ","!"," and ",typeFilter," both specified",]}if(generics.length!==0){throw["Never type ","!"," does not accept generic parameters",]}const bindingName=parserState.isInBinding;parserState.isInBinding=null;return makePrimitiveElement("never",{bindingName})}const quadcolon=/::\s*::/.exec(path);if(path.startsWith("::")){throw["Paths cannot start with ","::"]}else if(path.endsWith("::")){throw["Paths cannot end with ","::"]}else if(quadcolon!==null){throw["Unexpected ",quadcolon[0]]}const pathSegments=path.split(/(?:::\s*)|(?:\s+(?:::\s*)?)/);if(pathSegments.length===0||(pathSegments.length===1&&pathSegments[0]==="")){if(generics.length>0||prevIs(parserState,">")){throw["Found generics without a path"]}else{throw["Unexpected ",parserState.userQuery[parserState.pos]]}}for(const[i,pathSegment]of pathSegments.entries()){if(pathSegment==="!"){if(i!==0){throw["Never type ","!"," is not associated item"]}pathSegments[i]="never"}}parserState.totalElems+=1;if(isInGenerics){parserState.genericsElems+=1}const bindingName=parserState.isInBinding;parserState.isInBinding=null;const bindings=new Map();const pathLast=pathSegments[pathSegments.length-1];return{name:name.trim(),id:null,fullPath:pathSegments,pathWithoutLast:pathSegments.slice(0,pathSegments.length-1),pathLast,normalizedPathLast:pathLast.replace(/_/g,""),generics:generics.filter(gen=>{if(gen.bindingName!==null){if(gen.name!==null){gen.bindingName.generics.unshift(gen)}bindings.set(gen.bindingName.name,gen.bindingName.generics);return false}return true}),bindings,typeFilter,bindingName,}}function getIdentEndPosition(parserState){const start=parserState.pos;let end=parserState.pos;let foundExclamation=-1;while(parserState.pos0){throw["Unexpected ",c," after ",parserState.userQuery[parserState.pos-1]]}else{throw["Unexpected ",c]}}parserState.pos+=1;end=parserState.pos}if(foundExclamation!==-1&&foundExclamation!==start&&isIdentCharacter(parserState.userQuery[foundExclamation-1])){if(parserState.typeFilter===null){parserState.typeFilter="macro"}else if(parserState.typeFilter!=="macro"){throw["Invalid search type: macro ","!"," and ",parserState.typeFilter," both specified",]}end=foundExclamation}return end}function getFilteredNextElem(query,parserState,elems,isInGenerics){const start=parserState.pos;if(parserState.userQuery[parserState.pos]===":"&&!isPathStart(parserState)){throw["Expected type filter before ",":"]}getNextElem(query,parserState,elems,isInGenerics);if(parserState.userQuery[parserState.pos]===":"&&!isPathStart(parserState)){if(parserState.typeFilter!==null){throw["Unexpected ",":"," (expected path after type filter ",parserState.typeFilter+":",")",]}if(elems.length===0){throw["Expected type filter before ",":"]}else if(query.literalSearch){throw["Cannot use quotes on type filter"]}const typeFilterElem=elems.pop();checkExtraTypeFilterCharacters(start,parserState);parserState.typeFilter=typeFilterElem.name;parserState.pos+=1;parserState.totalElems-=1;query.literalSearch=false;getNextElem(query,parserState,elems,isInGenerics)}}function getNextElem(query,parserState,elems,isInGenerics){const generics=[];skipWhitespace(parserState);let start=parserState.pos;let end;if("[(".indexOf(parserState.userQuery[parserState.pos])!==-1){let endChar=")";let name="()";let friendlyName="tuple";if(parserState.userQuery[parserState.pos]==="["){endChar="]";name="[]";friendlyName="slice"}parserState.pos+=1;const{foundSeparator}=getItemsBefore(query,parserState,generics,endChar);const typeFilter=parserState.typeFilter;const bindingName=parserState.isInBinding;parserState.typeFilter=null;parserState.isInBinding=null;for(const gen of generics){if(gen.bindingName!==null){throw["Type parameter ","=",` cannot be within ${friendlyName} `,name]}}if(name==="()"&&!foundSeparator&&generics.length===1&&typeFilter===null){elems.push(generics[0])}else if(name==="()"&&generics.length===1&&generics[0].name==="->"){generics[0].typeFilter=typeFilter;elems.push(generics[0])}else{if(typeFilter!==null&&typeFilter!=="primitive"){throw["Invalid search type: primitive ",name," and ",typeFilter," both specified",]}parserState.totalElems+=1;if(isInGenerics){parserState.genericsElems+=1}elems.push(makePrimitiveElement(name,{bindingName,generics}))}}else if(parserState.userQuery[parserState.pos]==="&"){if(parserState.typeFilter!==null&&parserState.typeFilter!=="primitive"){throw["Invalid search type: primitive ","&"," and ",parserState.typeFilter," both specified",]}parserState.typeFilter=null;parserState.pos+=1;let c=parserState.userQuery[parserState.pos];while(c===" "&&parserState.pos=end){throw["Found generics without a path"]}parserState.pos+=1;getItemsBefore(query,parserState,generics,">")}else if(parserState.pos=end){throw["Found generics without a path"]}if(parserState.isInBinding){throw["Unexpected ","("," after ","="]}parserState.pos+=1;const typeFilter=parserState.typeFilter;parserState.typeFilter=null;getItemsBefore(query,parserState,generics,")");skipWhitespace(parserState);if(isReturnArrow(parserState)){parserState.pos+=2;skipWhitespace(parserState);getFilteredNextElem(query,parserState,generics,isInGenerics);generics[generics.length-1].bindingName=makePrimitiveElement("output")}else{generics.push(makePrimitiveElement(null,{bindingName:makePrimitiveElement("output"),typeFilter:null,}))}parserState.typeFilter=typeFilter}if(isStringElem){skipWhitespace(parserState)}if(start>=end&&generics.length===0){return}if(parserState.userQuery[parserState.pos]==="="){if(parserState.isInBinding){throw["Cannot write ","="," twice in a binding"]}if(!isInGenerics){throw["Type parameter ","="," must be within generics list"]}const name=parserState.userQuery.slice(start,end).trim();if(name==="!"){throw["Type parameter ","="," key cannot be ","!"," never type"]}if(name.includes("!")){throw["Type parameter ","="," key cannot be ","!"," macro"]}if(name.includes("::")){throw["Type parameter ","="," key cannot contain ","::"," path"]}if(name.includes(":")){throw["Type parameter ","="," key cannot contain ",":"," type"]}parserState.isInBinding={name,generics}}else{elems.push(createQueryElement(query,parserState,parserState.userQuery.slice(start,end),generics,isInGenerics,),)}}}function getItemsBefore(query,parserState,elems,endChar){let foundStopChar=true;let foundSeparator=false;const oldTypeFilter=parserState.typeFilter;parserState.typeFilter=null;const oldIsInBinding=parserState.isInBinding;parserState.isInBinding=null;let hofParameters=null;let extra="";if(endChar===">"){extra="<"}else if(endChar==="]"){extra="["}else if(endChar===")"){extra="("}else if(endChar===""){extra="->"}else{extra=endChar}while(parserState.pos"," after ","="]}hofParameters=[...elems];elems.length=0;parserState.pos+=2;foundStopChar=true;foundSeparator=false;continue}else if(c===" "){parserState.pos+=1;continue}else if(isSeparatorCharacter(c)){parserState.pos+=1;foundStopChar=true;foundSeparator=true;continue}else if(c===":"&&isPathStart(parserState)){throw["Unexpected ","::",": paths cannot start with ","::"]}else if(isEndCharacter(c)){throw["Unexpected ",c," after ",extra]}if(!foundStopChar){let extra=[];if(isLastElemGeneric(query.elems,parserState)){extra=[" after ",">"]}else if(prevIs(parserState,"\"")){throw["Cannot have more than one element if you use quotes"]}if(endChar!==""){throw["Expected ",",",", ","=",", or ",endChar,...extra,", found ",c,]}throw["Expected ",","," or ","=",...extra,", found ",c,]}const posBefore=parserState.pos;getFilteredNextElem(query,parserState,elems,endChar!=="");if(endChar!==""&&parserState.pos>=parserState.length){throw["Unclosed ",extra]}if(posBefore===parserState.pos){parserState.pos+=1}foundStopChar=false}if(parserState.pos>=parserState.length&&endChar!==""){throw["Unclosed ",extra]}parserState.pos+=1;if(hofParameters){foundSeparator=false;if([...elems,...hofParameters].some(x=>x.bindingName)||parserState.isInBinding){throw["Unexpected ","="," within ","->"]}const hofElem=makePrimitiveElement("->",{generics:hofParameters,bindings:new Map([["output",[...elems]]]),typeFilter:null,});elems.length=0;elems[0]=hofElem}parserState.typeFilter=oldTypeFilter;parserState.isInBinding=oldIsInBinding;return{foundSeparator}}function checkExtraTypeFilterCharacters(start,parserState){const query=parserState.userQuery.slice(start,parserState.pos).trim();for(const c in query){if(!isIdentCharacter(query[c])){throw["Unexpected ",query[c]," in type filter (before ",":",")",]}}}function parseInput(query,parserState){let foundStopChar=true;while(parserState.pos"){if(isReturnArrow(parserState)){break}throw["Unexpected ",c," (did you mean ","->","?)"]}else if(parserState.pos>0){throw["Unexpected ",c," after ",parserState.userQuery[parserState.pos-1]]}throw["Unexpected ",c]}else if(c===" "){skipWhitespace(parserState);continue}if(!foundStopChar){let extra="";if(isLastElemGeneric(query.elems,parserState)){extra=[" after ",">"]}else if(prevIs(parserState,"\"")){throw["Cannot have more than one element if you use quotes"]}if(parserState.typeFilter!==null){throw["Expected ",","," or ","->",...extra,", found ",c,]}throw["Expected ",",",", ",":"," or ","->",...extra,", found ",c,]}const before=query.elems.length;getFilteredNextElem(query,parserState,query.elems,false);if(query.elems.length===before){parserState.pos+=1}foundStopChar=false}if(parserState.typeFilter!==null){throw["Unexpected ",":"," (expected path after type filter ",parserState.typeFilter+":",")",]}while(parserState.pos"]}break}else{parserState.pos+=1}}}function newParsedQuery(userQuery){return{original:userQuery,userQuery:userQuery.toLowerCase(),elems:[],returned:[],foundElems:0,totalElems:0,literalSearch:false,error:null,correction:null,proposeCorrectionFrom:null,proposeCorrectionTo:null,typeFingerprint:new Uint32Array(4),}}function buildUrl(search,filterCrates){let extra="?search="+encodeURIComponent(search);if(filterCrates!==null){extra+="&filter-crate="+encodeURIComponent(filterCrates)}return getNakedUrl()+extra+window.location.hash}function getFilterCrates(){const elem=document.getElementById("crate-search");if(elem&&elem.value!=="all crates"&&rawSearchIndex.has(elem.value)){return elem.value}return null}function parseQuery(userQuery){function convertTypeFilterOnElem(elem){if(elem.typeFilter!==null){let typeFilter=elem.typeFilter;if(typeFilter==="const"){typeFilter="constant"}elem.typeFilter=itemTypeFromName(typeFilter)}else{elem.typeFilter=NO_TYPE_FILTER}for(const elem2 of elem.generics){convertTypeFilterOnElem(elem2)}for(const constraints of elem.bindings.values()){for(const constraint of constraints){convertTypeFilterOnElem(constraint)}}}userQuery=userQuery.trim().replace(/\r|\n|\t/g," ");const parserState={length:userQuery.length,pos:0,totalElems:0,genericsElems:0,typeFilter:null,isInBinding:null,userQuery:userQuery.toLowerCase(),};let query=newParsedQuery(userQuery);try{parseInput(query,parserState);for(const elem of query.elems){convertTypeFilterOnElem(elem)}for(const elem of query.returned){convertTypeFilterOnElem(elem)}}catch(err){query=newParsedQuery(userQuery);query.error=err;return query}if(!query.literalSearch){query.literalSearch=parserState.totalElems>1}query.foundElems=query.elems.length+query.returned.length;query.totalElems=parserState.totalElems;return query}function createQueryResults(results_in_args,results_returned,results_others,parsedQuery){return{"in_args":results_in_args,"returned":results_returned,"others":results_others,"query":parsedQuery,}}async function execQuery(parsedQuery,filterCrates,currentCrate){const results_others=new Map(),results_in_args=new Map(),results_returned=new Map();function transformResults(results){const duplicates=new Set();const out=[];for(const result of results){if(result.id!==-1){const obj=searchIndex[result.id];obj.dist=result.dist;const res=buildHrefAndPath(obj);obj.displayPath=pathSplitter(res[0]);obj.fullPath=res[2]+"|"+obj.ty;if(duplicates.has(obj.fullPath)){continue}if(obj.ty===TY_IMPORT&&duplicates.has(res[2])){continue}if(duplicates.has(res[2]+"|"+TY_IMPORT)){continue}duplicates.add(obj.fullPath);duplicates.add(res[2]);obj.href=res[1];out.push(obj);if(out.length>=MAX_RESULTS){break}}}return out}async function sortResults(results,isType,preferredCrate){const userQuery=parsedQuery.userQuery;const result_list=[];for(const result of results.values()){result.item=searchIndex[result.id];result.word=searchIndex[result.id].word;result_list.push(result)}result_list.sort((aaa,bbb)=>{let a,b;a=(aaa.word!==userQuery);b=(bbb.word!==userQuery);if(a!==b){return a-b}a=(aaa.index<0);b=(bbb.index<0);if(a!==b){return a-b}a=aaa.path_dist;b=bbb.path_dist;if(a!==b){return a-b}a=aaa.index;b=bbb.index;if(a!==b){return a-b}a=(aaa.dist);b=(bbb.dist);if(a!==b){return a-b}a=searchIndexDeprecated.get(aaa.item.crate).contains(aaa.item.bitIndex);b=searchIndexDeprecated.get(bbb.item.crate).contains(bbb.item.bitIndex);if(a!==b){return a-b}a=(aaa.item.crate!==preferredCrate);b=(bbb.item.crate!==preferredCrate);if(a!==b){return a-b}a=aaa.word.length;b=bbb.word.length;if(a!==b){return a-b}a=aaa.word;b=bbb.word;if(a!==b){return(a>b?+1:-1)}a=searchIndexEmptyDesc.get(aaa.item.crate).contains(aaa.item.bitIndex);b=searchIndexEmptyDesc.get(bbb.item.crate).contains(bbb.item.bitIndex);if(a!==b){return a-b}a=aaa.item.ty;b=bbb.item.ty;if(a!==b){return a-b}a=aaa.item.path;b=bbb.item.path;if(a!==b){return(a>b?+1:-1)}return 0});return transformResults(result_list)}function unifyFunctionTypes(fnTypesIn,queryElems,whereClause,mgensIn,solutionCb,unboxingDepth,){if(unboxingDepth>=UNBOXING_LIMIT){return false}const mgens=mgensIn===null?null:new Map(mgensIn);if(queryElems.length===0){return!solutionCb||solutionCb(mgens)}if(!fnTypesIn||fnTypesIn.length===0){return false}const ql=queryElems.length;const fl=fnTypesIn.length;if(ql===1&&queryElems[0].generics.length===0&&queryElems[0].bindings.size===0){const queryElem=queryElems[0];for(const fnType of fnTypesIn){if(!unifyFunctionTypeIsMatchCandidate(fnType,queryElem,mgens)){continue}if(fnType.id<0&&queryElem.id<0){if(mgens&&mgens.has(fnType.id)&&mgens.get(fnType.id)!==queryElem.id){continue}const mgensScratch=new Map(mgens);mgensScratch.set(fnType.id,queryElem.id);if(!solutionCb||solutionCb(mgensScratch)){return true}}else if(!solutionCb||solutionCb(mgens?new Map(mgens):null)){return true}}for(const fnType of fnTypesIn){if(!unifyFunctionTypeIsUnboxCandidate(fnType,queryElem,whereClause,mgens,unboxingDepth+1,)){continue}if(fnType.id<0){if(mgens&&mgens.has(fnType.id)&&mgens.get(fnType.id)!==0){continue}const mgensScratch=new Map(mgens);mgensScratch.set(fnType.id,0);if(unifyFunctionTypes(whereClause[(-fnType.id)-1],queryElems,whereClause,mgensScratch,solutionCb,unboxingDepth+1,)){return true}}else if(unifyFunctionTypes([...fnType.generics,...Array.from(fnType.bindings.values()).flat()],queryElems,whereClause,mgens?new Map(mgens):null,solutionCb,unboxingDepth+1,)){return true}}return false}const fnTypes=fnTypesIn.slice();const flast=fl-1;const qlast=ql-1;const queryElem=queryElems[qlast];let queryElemsTmp=null;for(let i=flast;i>=0;i-=1){const fnType=fnTypes[i];if(!unifyFunctionTypeIsMatchCandidate(fnType,queryElem,mgens)){continue}let mgensScratch;if(fnType.id<0){mgensScratch=new Map(mgens);if(mgensScratch.has(fnType.id)&&mgensScratch.get(fnType.id)!==queryElem.id){continue}mgensScratch.set(fnType.id,queryElem.id)}else{mgensScratch=mgens}fnTypes[i]=fnTypes[flast];fnTypes.length=flast;if(!queryElemsTmp){queryElemsTmp=queryElems.slice(0,qlast)}const passesUnification=unifyFunctionTypes(fnTypes,queryElemsTmp,whereClause,mgensScratch,mgensScratch=>{if(fnType.generics.length===0&&queryElem.generics.length===0&&fnType.bindings.size===0&&queryElem.bindings.size===0){return!solutionCb||solutionCb(mgensScratch)}const solution=unifyFunctionTypeCheckBindings(fnType,queryElem,whereClause,mgensScratch,unboxingDepth,);if(!solution){return false}const simplifiedGenerics=solution.simplifiedGenerics;for(const simplifiedMgens of solution.mgens){const passesUnification=unifyFunctionTypes(simplifiedGenerics,queryElem.generics,whereClause,simplifiedMgens,solutionCb,unboxingDepth,);if(passesUnification){return true}}return false},unboxingDepth,);if(passesUnification){return true}fnTypes[flast]=fnTypes[i];fnTypes[i]=fnType;fnTypes.length=fl}for(let i=flast;i>=0;i-=1){const fnType=fnTypes[i];if(!unifyFunctionTypeIsUnboxCandidate(fnType,queryElem,whereClause,mgens,unboxingDepth+1,)){continue}let mgensScratch;if(fnType.id<0){mgensScratch=new Map(mgens);if(mgensScratch.has(fnType.id)&&mgensScratch.get(fnType.id)!==0){continue}mgensScratch.set(fnType.id,0)}else{mgensScratch=mgens}const generics=fnType.id<0?whereClause[(-fnType.id)-1]:fnType.generics;const bindings=fnType.bindings?Array.from(fnType.bindings.values()).flat():[];const passesUnification=unifyFunctionTypes(fnTypes.toSpliced(i,1,...generics,...bindings),queryElems,whereClause,mgensScratch,solutionCb,unboxingDepth+1,);if(passesUnification){return true}}return false}function unifyFunctionTypeIsMatchCandidate(fnType,queryElem,mgensIn){if(!typePassesFilter(queryElem.typeFilter,fnType.ty)){return false}if(fnType.id<0&&queryElem.id<0){if(mgensIn){if(mgensIn.has(fnType.id)&&mgensIn.get(fnType.id)!==queryElem.id){return false}for(const[fid,qid]of mgensIn.entries()){if(fnType.id!==fid&&queryElem.id===qid){return false}if(fnType.id===fid&&queryElem.id!==qid){return false}}}return true}else{if(queryElem.id===typeNameIdOfArrayOrSlice&&(fnType.id===typeNameIdOfSlice||fnType.id===typeNameIdOfArray)){}else if(queryElem.id===typeNameIdOfTupleOrUnit&&(fnType.id===typeNameIdOfTuple||fnType.id===typeNameIdOfUnit)){}else if(queryElem.id===typeNameIdOfHof&&(fnType.id===typeNameIdOfFn||fnType.id===typeNameIdOfFnMut||fnType.id===typeNameIdOfFnOnce)){}else if(fnType.id!==queryElem.id||queryElem.id===null){return false}if((fnType.generics.length+fnType.bindings.size)===0&&queryElem.generics.length!==0){return false}if(fnType.bindings.size0){const fnTypePath=fnType.path!==undefined&&fnType.path!==null?fnType.path.split("::"):[];if(queryElemPathLength>fnTypePath.length){return false}let i=0;for(const path of fnTypePath){if(path===queryElem.pathWithoutLast[i]){i+=1;if(i>=queryElemPathLength){break}}}if(i0){let mgensSolutionSet=[mgensIn];for(const[name,constraints]of queryElem.bindings.entries()){if(mgensSolutionSet.length===0){return false}if(!fnType.bindings.has(name)){return false}const fnTypeBindings=fnType.bindings.get(name);mgensSolutionSet=mgensSolutionSet.flatMap(mgens=>{const newSolutions=[];unifyFunctionTypes(fnTypeBindings,constraints,whereClause,mgens,newMgens=>{newSolutions.push(newMgens);return false},unboxingDepth,);return newSolutions})}if(mgensSolutionSet.length===0){return false}const binds=Array.from(fnType.bindings.entries()).flatMap(entry=>{const[name,constraints]=entry;if(queryElem.bindings.has(name)){return[]}else{return constraints}});if(simplifiedGenerics.length>0){simplifiedGenerics=[...simplifiedGenerics,...binds]}else{simplifiedGenerics=binds}return{simplifiedGenerics,mgens:mgensSolutionSet}}return{simplifiedGenerics,mgens:[mgensIn]}}function unifyFunctionTypeIsUnboxCandidate(fnType,queryElem,whereClause,mgens,unboxingDepth,){if(unboxingDepth>=UNBOXING_LIMIT){return false}if(fnType.id<0&&queryElem.id>=0){if(!whereClause){return false}if(mgens&&mgens.has(fnType.id)&&mgens.get(fnType.id)!==0){return false}const mgensTmp=new Map(mgens);mgensTmp.set(fnType.id,null);return checkIfInList(whereClause[(-fnType.id)-1],queryElem,whereClause,mgensTmp,unboxingDepth,)}else if(fnType.generics.length>0||fnType.bindings.size>0){const simplifiedGenerics=[...fnType.generics,...Array.from(fnType.bindings.values()).flat(),];return checkIfInList(simplifiedGenerics,queryElem,whereClause,mgens,unboxingDepth,)}return false}function checkIfInList(list,elem,whereClause,mgens,unboxingDepth){for(const entry of list){if(checkType(entry,elem,whereClause,mgens,unboxingDepth)){return true}}return false}function checkType(row,elem,whereClause,mgens,unboxingDepth){if(unboxingDepth>=UNBOXING_LIMIT){return false}if(row.bindings.size===0&&elem.bindings.size===0){if(elem.id<0&&mgens===null){return row.id<0||checkIfInList(row.generics,elem,whereClause,mgens,unboxingDepth+1,)}if(row.id>0&&elem.id>0&&elem.pathWithoutLast.length===0&&typePassesFilter(elem.typeFilter,row.ty)&&elem.generics.length===0&&elem.id!==typeNameIdOfArrayOrSlice&&elem.id!==typeNameIdOfTupleOrUnit&&elem.id!==typeNameIdOfHof){return row.id===elem.id||checkIfInList(row.generics,elem,whereClause,mgens,unboxingDepth,)}}return unifyFunctionTypes([row],[elem],whereClause,mgens,null,unboxingDepth)}function checkPath(contains,ty){if(contains.length===0){return 0}const maxPathEditDistance=Math.floor(contains.reduce((acc,next)=>acc+next.length,0)/3,);let ret_dist=maxPathEditDistance+1;const path=ty.path.split("::");if(ty.parent&&ty.parent.name){path.push(ty.parent.name.toLowerCase())}const length=path.length;const clength=contains.length;pathiter:for(let i=length-clength;i>=0;i-=1){let dist_total=0;for(let x=0;xmaxPathEditDistance){continue pathiter}dist_total+=dist}}ret_dist=Math.min(ret_dist,Math.round(dist_total/clength))}return ret_dist>maxPathEditDistance?null:ret_dist}function typePassesFilter(filter,type){if(filter<=NO_TYPE_FILTER||filter===type)return true;const name=itemTypes[type];switch(itemTypes[filter]){case"constant":return name==="associatedconstant";case"fn":return name==="method"||name==="tymethod";case"type":return name==="primitive"||name==="associatedtype";case"trait":return name==="traitalias"}return false}function createAliasFromItem(item){return{crate:item.crate,name:item.name,path:item.path,descShard:item.descShard,descIndex:item.descIndex,exactPath:item.exactPath,ty:item.ty,parent:item.parent,type:item.type,is_alias:true,bitIndex:item.bitIndex,implDisambiguator:item.implDisambiguator,}}function handleAliases(ret,query,filterCrates,currentCrate){const lowerQuery=query.toLowerCase();const aliases=[];const crateAliases=[];if(filterCrates!==null){if(ALIASES.has(filterCrates)&&ALIASES.get(filterCrates).has(lowerQuery)){const query_aliases=ALIASES.get(filterCrates).get(lowerQuery);for(const alias of query_aliases){aliases.push(createAliasFromItem(searchIndex[alias]))}}}else{for(const[crate,crateAliasesIndex]of ALIASES){if(crateAliasesIndex.has(lowerQuery)){const pushTo=crate===currentCrate?crateAliases:aliases;const query_aliases=crateAliasesIndex.get(lowerQuery);for(const alias of query_aliases){pushTo.push(createAliasFromItem(searchIndex[alias]))}}}}const sortFunc=(aaa,bbb)=>{if(aaa.path{alias.alias=query;const res=buildHrefAndPath(alias);alias.displayPath=pathSplitter(res[0]);alias.fullPath=alias.displayPath+alias.name;alias.href=res[1];ret.others.unshift(alias);if(ret.others.length>MAX_RESULTS){ret.others.pop()}};aliases.forEach(pushFunc);crateAliases.forEach(pushFunc)}function addIntoResults(results,fullId,id,index,dist,path_dist,maxEditDistance){if(dist<=maxEditDistance||index!==-1){if(results.has(fullId)){const result=results.get(fullId);if(result.dontValidate||result.dist<=dist){return}}results.set(fullId,{id:id,index:index,dontValidate:parsedQuery.literalSearch,dist:dist,path_dist:path_dist,})}}function handleSingleArg(row,pos,elem,results_others,results_in_args,results_returned,maxEditDistance,){if(!row||(filterCrates!==null&&row.crate!==filterCrates)){return}let path_dist=0;const fullId=row.id;const tfpDist=compareTypeFingerprints(fullId,parsedQuery.typeFingerprint,);if(tfpDist!==null){const in_args=row.type&&row.type.inputs&&checkIfInList(row.type.inputs,elem,row.type.where_clause,null,0);const returned=row.type&&row.type.output&&checkIfInList(row.type.output,elem,row.type.where_clause,null,0);if(in_args){results_in_args.max_dist=Math.max(results_in_args.max_dist||0,tfpDist);const maxDist=results_in_args.sizenormalizedIndex&&normalizedIndex!==-1)){index=normalizedIndex}if(elem.fullPath.length>1){path_dist=checkPath(elem.pathWithoutLast,row);if(path_dist===null){return}}if(parsedQuery.literalSearch){if(row.word===elem.pathLast){addIntoResults(results_others,fullId,pos,index,0,path_dist)}return}const dist=editDistance(row.normalizedName,elem.normalizedPathLast,maxEditDistance);if(index===-1&&dist>maxEditDistance){return}addIntoResults(results_others,fullId,pos,index,dist,path_dist,maxEditDistance)}function handleArgs(row,pos,results){if(!row||(filterCrates!==null&&row.crate!==filterCrates)||!row.type){return}const tfpDist=compareTypeFingerprints(row.id,parsedQuery.typeFingerprint,);if(tfpDist===null){return}if(results.size>=MAX_RESULTS&&tfpDist>results.max_dist){return}if(!unifyFunctionTypes(row.type.inputs,parsedQuery.elems,row.type.where_clause,null,mgens=>{return unifyFunctionTypes(row.type.output,parsedQuery.returned,row.type.where_clause,mgens,null,0,)},0,)){return}results.max_dist=Math.max(results.max_dist||0,tfpDist);addIntoResults(results,row.id,pos,0,tfpDist,0,Number.MAX_VALUE)}function innerRunQuery(){const queryLen=parsedQuery.elems.reduce((acc,next)=>acc+next.pathLast.length,0)+parsedQuery.returned.reduce((acc,next)=>acc+next.pathLast.length,0);const maxEditDistance=Math.floor(queryLen/3);const genericSymbols=new Map();function convertNameToId(elem,isAssocType){if(typeNameIdMap.has(elem.normalizedPathLast)&&(isAssocType||!typeNameIdMap.get(elem.normalizedPathLast).assocOnly)){elem.id=typeNameIdMap.get(elem.normalizedPathLast).id}else if(!parsedQuery.literalSearch){let match=null;let matchDist=maxEditDistance+1;let matchName="";for(const[name,{id,assocOnly}]of typeNameIdMap){const dist=editDistance(name,elem.normalizedPathLast,maxEditDistance);if(dist<=matchDist&&dist<=maxEditDistance&&(isAssocType||!assocOnly)){if(dist===matchDist&&matchName>name){continue}match=id;matchDist=dist;matchName=name}}if(match!==null){parsedQuery.correction=matchName}elem.id=match}if((elem.id===null&&parsedQuery.totalElems>1&&elem.typeFilter===-1&&elem.generics.length===0&&elem.bindings.size===0)||elem.typeFilter===TY_GENERIC){if(genericSymbols.has(elem.name)){elem.id=genericSymbols.get(elem.name)}else{elem.id=-(genericSymbols.size+1);genericSymbols.set(elem.name,elem.id)}if(elem.typeFilter===-1&&elem.name.length>=3){const maxPartDistance=Math.floor(elem.name.length/3);let matchDist=maxPartDistance+1;let matchName="";for(const name of typeNameIdMap.keys()){const dist=editDistance(name,elem.name,maxPartDistance);if(dist<=matchDist&&dist<=maxPartDistance){if(dist===matchDist&&matchName>name){continue}matchDist=dist;matchName=name}}if(matchName!==""){parsedQuery.proposeCorrectionFrom=elem.name;parsedQuery.proposeCorrectionTo=matchName}}elem.typeFilter=TY_GENERIC}if(elem.generics.length>0&&elem.typeFilter===TY_GENERIC){parsedQuery.error=["Generic type parameter ",elem.name," does not accept generic parameters",]}for(const elem2 of elem.generics){convertNameToId(elem2)}elem.bindings=new Map(Array.from(elem.bindings.entries()).map(entry=>{const[name,constraints]=entry;if(!typeNameIdMap.has(name)){parsedQuery.error=["Type parameter ",name," does not exist",];return[null,[]]}for(const elem2 of constraints){convertNameToId(elem2)}return[typeNameIdMap.get(name).id,constraints]}),)}const fps=new Set();for(const elem of parsedQuery.elems){convertNameToId(elem);buildFunctionTypeFingerprint(elem,parsedQuery.typeFingerprint,fps)}for(const elem of parsedQuery.returned){convertNameToId(elem);buildFunctionTypeFingerprint(elem,parsedQuery.typeFingerprint,fps)}if(parsedQuery.foundElems===1&&parsedQuery.returned.length===0){if(parsedQuery.elems.length===1){const elem=parsedQuery.elems[0];for(let i=0,nSearchIndex=searchIndex.length;i0){const sortQ=(a,b)=>{const ag=a.generics.length===0&&a.bindings.size===0;const bg=b.generics.length===0&&b.bindings.size===0;if(ag!==bg){return ag-bg}const ai=a.id>0;const bi=b.id>0;return ai-bi};parsedQuery.elems.sort(sortQ);parsedQuery.returned.sort(sortQ);for(let i=0,nSearchIndex=searchIndex.length;i{const descs=await Promise.all(list.map(result=>{return searchIndexEmptyDesc.get(result.crate).contains(result.bitIndex)?"":searchState.loadDesc(result)}));for(const[i,result]of list.entries()){result.desc=descs[i]}}));if(parsedQuery.error!==null&&ret.others.length!==0){ret.query.error=null}return ret}function nextTab(direction){const next=(searchState.currentTab+direction+3)%searchState.focusedByTab.length;searchState.focusedByTab[searchState.currentTab]=document.activeElement;printTab(next);focusSearchResult()}function focusSearchResult(){const target=searchState.focusedByTab[searchState.currentTab]||document.querySelectorAll(".search-results.active a").item(0)||document.querySelectorAll("#search-tabs button").item(searchState.currentTab);searchState.focusedByTab[searchState.currentTab]=null;if(target){target.focus()}}function buildHrefAndPath(item){let displayPath;let href;const type=itemTypes[item.ty];const name=item.name;let path=item.path;let exactPath=item.exactPath;if(type==="mod"){displayPath=path+"::";href=ROOT_PATH+path.replace(/::/g,"/")+"/"+name+"/index.html"}else if(type==="import"){displayPath=item.path+"::";href=ROOT_PATH+item.path.replace(/::/g,"/")+"/index.html#reexport."+name}else if(type==="primitive"||type==="keyword"){displayPath="";href=ROOT_PATH+path.replace(/::/g,"/")+"/"+type+"."+name+".html"}else if(type==="externcrate"){displayPath="";href=ROOT_PATH+name+"/index.html"}else if(item.parent!==undefined){const myparent=item.parent;let anchor=type+"."+name;const parentType=itemTypes[myparent.ty];let pageType=parentType;let pageName=myparent.name;exactPath=`${myparent.exactPath}::${myparent.name}`;if(parentType==="primitive"){displayPath=myparent.name+"::"}else if(type==="structfield"&&parentType==="variant"){const enumNameIdx=item.path.lastIndexOf("::");const enumName=item.path.substr(enumNameIdx+2);path=item.path.substr(0,enumNameIdx);displayPath=path+"::"+enumName+"::"+myparent.name+"::";anchor="variant."+myparent.name+".field."+name;pageType="enum";pageName=enumName}else{displayPath=path+"::"+myparent.name+"::"}if(item.implDisambiguator!==null){anchor=item.implDisambiguator+"/"+anchor}href=ROOT_PATH+path.replace(/::/g,"/")+"/"+pageType+"."+pageName+".html#"+anchor}else{displayPath=item.path+"::";href=ROOT_PATH+item.path.replace(/::/g,"/")+"/"+type+"."+name+".html"}return[displayPath,href,`${exactPath}::${name}`]}function pathSplitter(path){const tmp=""+path.replace(/::/g,"::");if(tmp.endsWith("")){return tmp.slice(0,tmp.length-6)}return tmp}async function addTab(array,query,display){const extraClass=display?" active":"";const output=document.createElement("div");if(array.length>0){output.className="search-results "+extraClass;for(const item of array){const name=item.name;const type=itemTypes[item.ty];const longType=longItemTypes[item.ty];const typeName=longType.length!==0?`${longType}`:"?";const link=document.createElement("a");link.className="result-"+type;link.href=item.href;const resultName=document.createElement("div");resultName.className="result-name";resultName.insertAdjacentHTML("beforeend",`${typeName}`);link.appendChild(resultName);let alias=" ";if(item.is_alias){alias=`
\ +${item.alias} - see \ +
`}resultName.insertAdjacentHTML("beforeend",`
${alias}\ +${item.displayPath}${name}\ +
`);const description=document.createElement("div");description.className="desc";description.insertAdjacentHTML("beforeend",item.desc);link.appendChild(description);output.appendChild(link)}}else if(query.error===null){output.className="search-failed"+extraClass;output.innerHTML="No results :(
"+"Try on DuckDuckGo?

"+"Or try looking in one of these:"}return[output,array.length]}function makeTabHeader(tabNb,text,nbElems){const fmtNbElems=nbElems<10?`\u{2007}(${nbElems})\u{2007}\u{2007}`:nbElems<100?`\u{2007}(${nbElems})\u{2007}`:`\u{2007}(${nbElems})`;if(searchState.currentTab===tabNb){return""}return""}async function showResults(results,go_to_first,filterCrates){const search=searchState.outputElement();if(go_to_first||(results.others.length===1&&getSettingValue("go-to-only-result")==="true")){window.onunload=()=>{};searchState.removeQueryParameters();const elem=document.createElement("a");elem.href=results.others[0].href;removeClass(elem,"active");document.body.appendChild(elem);elem.click();return}if(results.query===undefined){results.query=parseQuery(searchState.input.value)}currentResults=results.query.userQuery;const[ret_others,ret_in_args,ret_returned]=await Promise.all([addTab(results.others,results.query,true),addTab(results.in_args,results.query,false),addTab(results.returned,results.query,false),]);let currentTab=searchState.currentTab;if((currentTab===0&&ret_others[1]===0)||(currentTab===1&&ret_in_args[1]===0)||(currentTab===2&&ret_returned[1]===0)){if(ret_others[1]!==0){currentTab=0}else if(ret_in_args[1]!==0){currentTab=1}else if(ret_returned[1]!==0){currentTab=2}}let crates="";if(rawSearchIndex.size>1){crates=" in 
"}let output=`

Results${crates}

`;if(results.query.error!==null){const error=results.query.error;error.forEach((value,index)=>{value=value.split("<").join("<").split(">").join(">");if(index%2!==0){error[index]=`${value.replaceAll(" ", " ")}`}else{error[index]=value}});output+=`

Query parser error: "${error.join("")}".

`;output+="
"+makeTabHeader(0,"In Names",ret_others[1])+"
";currentTab=0}else if(results.query.foundElems<=1&&results.query.returned.length===0){output+="
"+makeTabHeader(0,"In Names",ret_others[1])+makeTabHeader(1,"In Parameters",ret_in_args[1])+makeTabHeader(2,"In Return Types",ret_returned[1])+"
"}else{const signatureTabTitle=results.query.elems.length===0?"In Function Return Types":results.query.returned.length===0?"In Function Parameters":"In Function Signatures";output+="
"+makeTabHeader(0,signatureTabTitle,ret_others[1])+"
";currentTab=0}if(results.query.correction!==null){const orig=results.query.returned.length>0?results.query.returned[0].name:results.query.elems[0].name;output+="

"+`Type "${orig}" not found. `+"Showing results for closest type name "+`"${results.query.correction}" instead.

`}if(results.query.proposeCorrectionFrom!==null){const orig=results.query.proposeCorrectionFrom;const targ=results.query.proposeCorrectionTo;output+="

"+`Type "${orig}" not found and used as generic parameter. `+`Consider searching for "${targ}" instead.

`}const resultsElem=document.createElement("div");resultsElem.id="results";resultsElem.appendChild(ret_others[0]);resultsElem.appendChild(ret_in_args[0]);resultsElem.appendChild(ret_returned[0]);search.innerHTML=output;const crateSearch=document.getElementById("crate-search");if(crateSearch){crateSearch.addEventListener("input",updateCrate)}search.appendChild(resultsElem);searchState.showResults(search);const elems=document.getElementById("search-tabs").childNodes;searchState.focusedByTab=[];let i=0;for(const elem of elems){const j=i;elem.onclick=()=>printTab(j);searchState.focusedByTab.push(null);i+=1}printTab(currentTab)}function updateSearchHistory(url){if(!browserSupportsHistoryApi()){return}const params=searchState.getQueryStringParams();if(!history.state&&!params.search){history.pushState(null,"",url)}else{history.replaceState(null,"",url)}}async function search(forced){const query=parseQuery(searchState.input.value.trim());let filterCrates=getFilterCrates();if(!forced&&query.userQuery===currentResults){if(query.userQuery.length>0){putBackSearch()}return}searchState.setLoadingSearch();const params=searchState.getQueryStringParams();if(filterCrates===null&¶ms["filter-crate"]!==undefined){filterCrates=params["filter-crate"]}searchState.title="Results for "+query.original+" - Rust";updateSearchHistory(buildUrl(query.original,filterCrates));await showResults(await execQuery(query,filterCrates,window.currentCrate),params.go_to_first,filterCrates)}function buildItemSearchTypeAll(types,lowercasePaths){return types.length>0?types.map(type=>buildItemSearchType(type,lowercasePaths)):EMPTY_GENERICS_ARRAY}const EMPTY_BINDINGS_MAP=new Map();const EMPTY_GENERICS_ARRAY=[];let TYPES_POOL=new Map();function buildItemSearchType(type,lowercasePaths,isAssocType){const PATH_INDEX_DATA=0;const GENERICS_DATA=1;const BINDINGS_DATA=2;let pathIndex,generics,bindings;if(typeof type==="number"){pathIndex=type;generics=EMPTY_GENERICS_ARRAY;bindings=EMPTY_BINDINGS_MAP}else{pathIndex=type[PATH_INDEX_DATA];generics=buildItemSearchTypeAll(type[GENERICS_DATA],lowercasePaths,);if(type.length>BINDINGS_DATA&&type[BINDINGS_DATA].length>0){bindings=new Map(type[BINDINGS_DATA].map(binding=>{const[assocType,constraints]=binding;return[buildItemSearchType(assocType,lowercasePaths,true).id,buildItemSearchTypeAll(constraints,lowercasePaths),]}))}else{bindings=EMPTY_BINDINGS_MAP}}let result;if(pathIndex<0){result={id:pathIndex,ty:TY_GENERIC,path:null,exactPath:null,generics,bindings,}}else if(pathIndex===0){result={id:null,ty:null,path:null,exactPath:null,generics,bindings,}}else{const item=lowercasePaths[pathIndex-1];result={id:buildTypeMapIndex(item.name,isAssocType),ty:item.ty,path:item.path,exactPath:item.exactPath,generics,bindings,}}const cr=TYPES_POOL.get(result.id);if(cr){if(cr.generics.length===result.generics.length&&cr.generics!==result.generics&&cr.generics.every((x,i)=>result.generics[i]===x)){result.generics=cr.generics}if(cr.bindings.size===result.bindings.size&&cr.bindings!==result.bindings){let ok=true;for(const[k,v]of cr.bindings.entries()){const v2=result.bindings.get(v);if(!v2){ok=false;break}if(v!==v2&&v.length===v2.length&&v.every((x,i)=>v2[i]===x)){result.bindings.set(k,v)}else if(v!==v2){ok=false;break}}if(ok){result.bindings=cr.bindings}}if(cr.ty===result.ty&&cr.path===result.path&&cr.bindings===result.bindings&&cr.generics===result.generics&&cr.ty===result.ty){return cr}}TYPES_POOL.set(result.id,result);return result}function buildFunctionSearchTypeCallback(lowercasePaths){return functionSearchType=>{if(functionSearchType===0){return null}const INPUTS_DATA=0;const OUTPUT_DATA=1;let inputs,output;if(typeof functionSearchType[INPUTS_DATA]==="number"){inputs=[buildItemSearchType(functionSearchType[INPUTS_DATA],lowercasePaths)]}else{inputs=buildItemSearchTypeAll(functionSearchType[INPUTS_DATA],lowercasePaths,)}if(functionSearchType.length>1){if(typeof functionSearchType[OUTPUT_DATA]==="number"){output=[buildItemSearchType(functionSearchType[OUTPUT_DATA],lowercasePaths)]}else{output=buildItemSearchTypeAll(functionSearchType[OUTPUT_DATA],lowercasePaths,)}}else{output=[]}const where_clause=[];const l=functionSearchType.length;for(let i=2;i{k=(~~k+0x7ed55d16)+(k<<12);k=(k ^ 0xc761c23c)^(k>>>19);k=(~~k+0x165667b1)+(k<<5);k=(~~k+0xd3a2646c)^(k<<9);k=(~~k+0xfd7046c5)+(k<<3);return(k ^ 0xb55a4f09)^(k>>>16)};const hashint2=k=>{k=~k+(k<<15);k ^=k>>>12;k+=k<<2;k ^=k>>>4;k=Math.imul(k,2057);return k ^(k>>16)};if(input!==null){const h0a=hashint1(input);const h0b=hashint2(input);const h1a=~~(h0a+Math.imul(h0b,2));const h1b=~~(h0a+Math.imul(h0b,3));const h2a=~~(h0a+Math.imul(h0b,4));const h2b=~~(h0a+Math.imul(h0b,5));output[0]|=(1<<(h0a%32))|(1<<(h1b%32));output[1]|=(1<<(h1a%32))|(1<<(h2b%32));output[2]|=(1<<(h2a%32))|(1<<(h0b%32));fps.add(input)}for(const g of type.generics){buildFunctionTypeFingerprint(g,output,fps)}const fb={id:null,ty:0,generics:EMPTY_GENERICS_ARRAY,bindings:EMPTY_BINDINGS_MAP,};for(const[k,v]of type.bindings.entries()){fb.id=k;fb.generics=v;buildFunctionTypeFingerprint(fb,output,fps)}output[3]=fps.size}function compareTypeFingerprints(fullId,queryFingerprint){const fh0=functionTypeFingerprint[fullId*4];const fh1=functionTypeFingerprint[(fullId*4)+1];const fh2=functionTypeFingerprint[(fullId*4)+2];const[qh0,qh1,qh2]=queryFingerprint;const[in0,in1,in2]=[fh0&qh0,fh1&qh1,fh2&qh2];if((in0 ^ qh0)||(in1 ^ qh1)||(in2 ^ qh2)){return null}return functionTypeFingerprint[(fullId*4)+3]}class VlqHexDecoder{constructor(string,cons){this.string=string;this.cons=cons;this.offset=0;this.backrefQueue=[]}decodeList(){const cb="}".charCodeAt(0);let c=this.string.charCodeAt(this.offset);const ret=[];while(c!==cb){ret.push(this.decode());c=this.string.charCodeAt(this.offset)}this.offset+=1;return ret}decode(){const[ob,la]=["{","`"].map(c=>c.charCodeAt(0));let n=0;let c=this.string.charCodeAt(this.offset);if(c===ob){this.offset+=1;return this.decodeList()}while(c>1];this.offset+=1;return sign?-value:value}next(){const c=this.string.charCodeAt(this.offset);const[zero,ua,la]=["0","@","`"].map(c=>c.charCodeAt(0));if(c>=zero&&c16){this.backrefQueue.pop()}return result}}class RoaringBitmap{constructor(str){const strdecoded=atob(str);const u8array=new Uint8Array(strdecoded.length);for(let j=0;j=4){offsets=[];for(let j=0;j>3]&(1<<(j&0x7))){const runcount=(u8array[i]|(u8array[i+1]<<8));i+=2;this.containers.push(new RoaringBitmapRun(runcount,u8array.slice(i,i+(runcount*4)),));i+=runcount*4}else if(this.cardinalities[j]>=4096){this.containers.push(new RoaringBitmapBits(u8array.slice(i,i+8192)));i+=8192}else{const end=this.cardinalities[j]*2;this.containers.push(new RoaringBitmapArray(this.cardinalities[j],u8array.slice(i,i+end),));i+=end}}}contains(keyvalue){const key=keyvalue>>16;const value=keyvalue&0xFFFF;for(let i=0;i=start&&value<=(start+lenm1)){return true}}return false}}class RoaringBitmapArray{constructor(cardinality,array){this.cardinality=cardinality;this.array=array}contains(value){const l=this.cardinality*2;for(let i=0;i>3]&(1<<(value&7)))}}function buildIndex(rawSearchIndex){searchIndex=[];searchIndexDeprecated=new Map();searchIndexEmptyDesc=new Map();const charA="A".charCodeAt(0);let currentIndex=0;let id=0;for(const crate of rawSearchIndex.values()){id+=crate.t.length+1}functionTypeFingerprint=new Uint32Array((id+1)*4);id=0;for(const[crate,crateCorpus]of rawSearchIndex){const itemDescShardDecoder=new VlqHexDecoder(crateCorpus.D,noop=>noop);let descShard={crate,shard:0,start:0,len:itemDescShardDecoder.next(),promise:null,resolve:null,};const descShardList=[descShard];searchIndexDeprecated.set(crate,new RoaringBitmap(crateCorpus.c));searchIndexEmptyDesc.set(crate,new RoaringBitmap(crateCorpus.e));let descIndex=0;const crateRow={crate,ty:3,name:crate,path:"",descShard,descIndex,exactPath:"",desc:crateCorpus.doc,parent:undefined,type:null,id,word:crate,normalizedName:crate.indexOf("_")===-1?crate:crate.replace(/_/g,""),bitIndex:0,implDisambiguator:null,};id+=1;searchIndex.push(crateRow);currentIndex+=1;if(!searchIndexEmptyDesc.get(crate).contains(0)){descIndex+=1}const itemTypes=crateCorpus.t;const itemNames=crateCorpus.n;const itemPaths=new Map(crateCorpus.q);const itemReexports=new Map(crateCorpus.r);const itemParentIdxs=crateCorpus.i;const implDisambiguator=new Map(crateCorpus.b);const paths=crateCorpus.p;const aliases=crateCorpus.a;const lowercasePaths=[];const itemFunctionDecoder=new VlqHexDecoder(crateCorpus.f,buildFunctionSearchTypeCallback(lowercasePaths),);let len=paths.length;let lastPath=itemPaths.get(0);for(let i=0;i2){path=itemPaths.has(elem[2])?itemPaths.get(elem[2]):lastPath;lastPath=path}const exactPath=elem.length>3?itemPaths.get(elem[3]):path;lowercasePaths.push({ty,name:name.toLowerCase(),path,exactPath});paths[i]={ty,name,path,exactPath}}lastPath="";len=itemTypes.length;for(let i=0;i=descShard.len&&!searchIndexEmptyDesc.get(crate).contains(bitIndex)){descShard={crate,shard:descShard.shard+1,start:descShard.start+descShard.len,len:itemDescShardDecoder.next(),promise:null,resolve:null,};descIndex=0;descShardList.push(descShard)}let word="";if(typeof itemNames[i]==="string"){word=itemNames[i].toLowerCase()}const path=itemPaths.has(i)?itemPaths.get(i):lastPath;const type=itemFunctionDecoder.next();if(type!==null){if(type){const fp=functionTypeFingerprint.subarray(id*4,(id+1)*4);const fps=new Set();for(const t of type.inputs){buildFunctionTypeFingerprint(t,fp,fps)}for(const t of type.output){buildFunctionTypeFingerprint(t,fp,fps)}for(const w of type.where_clause){for(const t of w){buildFunctionTypeFingerprint(t,fp,fps)}}}}const row={crate,ty:itemTypes.charCodeAt(i)-charA,name:itemNames[i],path,descShard,descIndex,exactPath:itemReexports.has(i)?itemPaths.get(itemReexports.get(i)):path,parent:itemParentIdxs[i]>0?paths[itemParentIdxs[i]-1]:undefined,type,id,word,normalizedName:word.indexOf("_")===-1?word:word.replace(/_/g,""),bitIndex,implDisambiguator:implDisambiguator.has(i)?implDisambiguator.get(i):null,};id+=1;searchIndex.push(row);lastPath=row.path;if(!searchIndexEmptyDesc.get(crate).contains(bitIndex)){descIndex+=1}}if(aliases){const currentCrateAliases=new Map();ALIASES.set(crate,currentCrateAliases);for(const alias_name in aliases){if(!Object.prototype.hasOwnProperty.call(aliases,alias_name)){continue}let currentNameAliases;if(currentCrateAliases.has(alias_name)){currentNameAliases=currentCrateAliases.get(alias_name)}else{currentNameAliases=[];currentCrateAliases.set(alias_name,currentNameAliases)}for(const local_alias of aliases[alias_name]){currentNameAliases.push(local_alias+currentIndex)}}}currentIndex+=itemTypes.length;searchState.descShards.set(crate,descShardList)}TYPES_POOL=new Map()}function onSearchSubmit(e){e.preventDefault();searchState.clearInputTimeout();search()}function putBackSearch(){const search_input=searchState.input;if(!searchState.input){return}if(search_input.value!==""&&!searchState.isDisplayed()){searchState.showResults();if(browserSupportsHistoryApi()){history.replaceState(null,"",buildUrl(search_input.value,getFilterCrates()))}document.title=searchState.title}}function registerSearchEvents(){const params=searchState.getQueryStringParams();if(searchState.input.value===""){searchState.input.value=params.search||""}const searchAfter500ms=()=>{searchState.clearInputTimeout();if(searchState.input.value.length===0){searchState.hideResults()}else{searchState.timeout=setTimeout(search,500)}};searchState.input.onkeyup=searchAfter500ms;searchState.input.oninput=searchAfter500ms;document.getElementsByClassName("search-form")[0].onsubmit=onSearchSubmit;searchState.input.onchange=e=>{if(e.target!==document.activeElement){return}searchState.clearInputTimeout();setTimeout(search,0)};searchState.input.onpaste=searchState.input.onchange;searchState.outputElement().addEventListener("keydown",e=>{if(e.altKey||e.ctrlKey||e.shiftKey||e.metaKey){return}if(e.which===38){const previous=document.activeElement.previousElementSibling;if(previous){previous.focus()}else{searchState.focus()}e.preventDefault()}else if(e.which===40){const next=document.activeElement.nextElementSibling;if(next){next.focus()}const rect=document.activeElement.getBoundingClientRect();if(window.innerHeight-rect.bottom{if(e.which===40){focusSearchResult();e.preventDefault()}});searchState.input.addEventListener("focus",()=>{putBackSearch()});searchState.input.addEventListener("blur",()=>{searchState.input.placeholder=searchState.input.origPlaceholder});if(browserSupportsHistoryApi()){const previousTitle=document.title;window.addEventListener("popstate",e=>{const params=searchState.getQueryStringParams();document.title=previousTitle;currentResults=null;if(params.search&¶ms.search.length>0){searchState.input.value=params.search;e.preventDefault();search()}else{searchState.input.value="";searchState.hideResults()}})}window.onpageshow=()=>{const qSearch=searchState.getQueryStringParams().search;if(searchState.input.value===""&&qSearch){searchState.input.value=qSearch}search()}}function updateCrate(ev){if(ev.target.value==="all crates"){const query=searchState.input.value.trim();updateSearchHistory(buildUrl(query,null))}currentResults=null;search(true)}buildIndex(rawSearchIndex);if(typeof window!=="undefined"){registerSearchEvents();if(window.searchState.getQueryStringParams().search){search()}}if(typeof exports!=="undefined"){exports.initSearch=initSearch;exports.execQuery=execQuery;exports.parseQuery=parseQuery}}if(typeof window!=="undefined"){window.initSearch=initSearch;if(window.searchIndex!==undefined){initSearch(window.searchIndex)}}else{initSearch(new Map())}})() \ No newline at end of file diff --git a/static.files/settings-4313503d2e1961c2.js b/static.files/settings-4313503d2e1961c2.js new file mode 100644 index 000000000..ab425fe49 --- /dev/null +++ b/static.files/settings-4313503d2e1961c2.js @@ -0,0 +1,17 @@ +"use strict";(function(){const isSettingsPage=window.location.pathname.endsWith("/settings.html");function changeSetting(settingName,value){if(settingName==="theme"){const useSystem=value==="system preference"?"true":"false";updateLocalStorage("use-system-theme",useSystem)}updateLocalStorage(settingName,value);switch(settingName){case"theme":case"preferred-dark-theme":case"preferred-light-theme":updateTheme();updateLightAndDark();break;case"line-numbers":if(value===true){window.rustdoc_add_line_numbers_to_examples()}else{window.rustdoc_remove_line_numbers_from_examples()}break;case"hide-sidebar":if(value===true){addClass(document.documentElement,"hide-sidebar")}else{removeClass(document.documentElement,"hide-sidebar")}break}}function showLightAndDark(){removeClass(document.getElementById("preferred-light-theme"),"hidden");removeClass(document.getElementById("preferred-dark-theme"),"hidden")}function hideLightAndDark(){addClass(document.getElementById("preferred-light-theme"),"hidden");addClass(document.getElementById("preferred-dark-theme"),"hidden")}function updateLightAndDark(){const useSystem=getSettingValue("use-system-theme");if(useSystem==="true"||(useSystem===null&&getSettingValue("theme")===null)){showLightAndDark()}else{hideLightAndDark()}}function setEvents(settingsElement){updateLightAndDark();onEachLazy(settingsElement.querySelectorAll("input[type=\"checkbox\"]"),toggle=>{const settingId=toggle.id;const settingValue=getSettingValue(settingId);if(settingValue!==null){toggle.checked=settingValue==="true"}toggle.onchange=()=>{changeSetting(toggle.id,toggle.checked)}});onEachLazy(settingsElement.querySelectorAll("input[type=\"radio\"]"),elem=>{const settingId=elem.name;let settingValue=getSettingValue(settingId);if(settingId==="theme"){const useSystem=getSettingValue("use-system-theme");if(useSystem==="true"||settingValue===null){settingValue=useSystem==="false"?"light":"system preference"}}if(settingValue!==null&&settingValue!=="null"){elem.checked=settingValue===elem.value}elem.addEventListener("change",ev=>{changeSetting(ev.target.name,ev.target.value)})})}function buildSettingsPageSections(settings){let output="";for(const setting of settings){const js_data_name=setting["js_name"];const setting_name=setting["name"];if(setting["options"]!==undefined){output+=`\ +
+
${setting_name}
+
`;onEach(setting["options"],option=>{const checked=option===setting["default"]?" checked":"";const full=`${js_data_name}-${option.replace(/ /g,"-")}`;output+=`\ + `});output+=`\ +
+
`}else{const checked=setting["default"]===true?" checked":"";output+=`\ +
\ + \ +
`}}return output}function buildSettingsPage(){const theme_names=getVar("themes").split(",").filter(t=>t);theme_names.push("light","dark","ayu");const settings=[{"name":"Theme","js_name":"theme","default":"system preference","options":theme_names.concat("system preference"),},{"name":"Preferred light theme","js_name":"preferred-light-theme","default":"light","options":theme_names,},{"name":"Preferred dark theme","js_name":"preferred-dark-theme","default":"dark","options":theme_names,},{"name":"Auto-hide item contents for large items","js_name":"auto-hide-large-items","default":true,},{"name":"Auto-hide item methods' documentation","js_name":"auto-hide-method-docs","default":false,},{"name":"Auto-hide trait implementation documentation","js_name":"auto-hide-trait-implementations","default":false,},{"name":"Directly go to item in search if there is only one result","js_name":"go-to-only-result","default":false,},{"name":"Show line numbers on code examples","js_name":"line-numbers","default":false,},{"name":"Hide persistent navigation bar","js_name":"hide-sidebar","default":false,},{"name":"Disable keyboard shortcuts","js_name":"disable-shortcuts","default":false,},];const elementKind=isSettingsPage?"section":"div";const innerHTML=`
${buildSettingsPageSections(settings)}
`;const el=document.createElement(elementKind);el.id="settings";if(!isSettingsPage){el.className="popover"}el.innerHTML=innerHTML;if(isSettingsPage){document.getElementById(MAIN_ID).appendChild(el)}else{el.setAttribute("tabindex","-1");getSettingsButton().appendChild(el)}return el}const settingsMenu=buildSettingsPage();function displaySettings(){settingsMenu.style.display="";onEachLazy(settingsMenu.querySelectorAll("input[type='checkbox']"),el=>{const val=getSettingValue(el.id);const checked=val==="true";if(checked!==el.checked&&val!==null){el.checked=checked}})}function settingsBlurHandler(event){blurHandler(event,getSettingsButton(),window.hidePopoverMenus)}if(isSettingsPage){getSettingsButton().onclick=event=>{event.preventDefault()}}else{const settingsButton=getSettingsButton();const settingsMenu=document.getElementById("settings");settingsButton.onclick=event=>{if(settingsMenu.contains(event.target)){return}event.preventDefault();const shouldDisplaySettings=settingsMenu.style.display==="none";window.hideAllModals();if(shouldDisplaySettings){displaySettings()}};settingsButton.onblur=settingsBlurHandler;settingsButton.querySelector("a").onblur=settingsBlurHandler;onEachLazy(settingsMenu.querySelectorAll("input"),el=>{el.onblur=settingsBlurHandler});settingsMenu.onblur=settingsBlurHandler}setTimeout(()=>{setEvents(settingsMenu);if(!isSettingsPage){displaySettings()}removeClass(getSettingsButton(),"rotate")},0)})() \ No newline at end of file diff --git a/static.files/src-script-e66d777a5a92e9b2.js b/static.files/src-script-e66d777a5a92e9b2.js new file mode 100644 index 000000000..d0aebb851 --- /dev/null +++ b/static.files/src-script-e66d777a5a92e9b2.js @@ -0,0 +1 @@ +"use strict";(function(){const rootPath=getVar("root-path");const NAME_OFFSET=0;const DIRS_OFFSET=1;const FILES_OFFSET=2;const RUSTDOC_MOBILE_BREAKPOINT=700;function closeSidebarIfMobile(){if(window.innerWidth{removeClass(document.documentElement,"src-sidebar-expanded");updateLocalStorage("source-sidebar-show","false")};window.rustdocShowSourceSidebar=()=>{addClass(document.documentElement,"src-sidebar-expanded");updateLocalStorage("source-sidebar-show","true")};window.rustdocToggleSrcSidebar=()=>{if(document.documentElement.classList.contains("src-sidebar-expanded")){window.rustdocCloseSourceSidebar()}else{window.rustdocShowSourceSidebar()}};function createSrcSidebar(){const container=document.querySelector("nav.sidebar");const sidebar=document.createElement("div");sidebar.id="src-sidebar";let hasFoundFile=false;for(const[key,source]of srcIndex){source[NAME_OFFSET]=key;hasFoundFile=createDirEntry(source,sidebar,"",hasFoundFile)}container.appendChild(sidebar);const selected_elem=sidebar.getElementsByClassName("selected")[0];if(typeof selected_elem!=="undefined"){selected_elem.focus()}}function highlightSrcLines(){const match=window.location.hash.match(/^#?(\d+)(?:-(\d+))?$/);if(!match){return}let from=parseInt(match[1],10);let to=from;if(typeof match[2]!=="undefined"){to=parseInt(match[2],10)}if(to{onEachLazy(e.getElementsByTagName("a"),i_e=>{removeClass(i_e,"line-highlighted")})});for(let i=from;i<=to;++i){elem=document.getElementById(i);if(!elem){break}addClass(elem,"line-highlighted")}}const handleSrcHighlight=(function(){let prev_line_id=0;const set_fragment=name=>{const x=window.scrollX,y=window.scrollY;if(browserSupportsHistoryApi()){history.replaceState(null,null,"#"+name);highlightSrcLines()}else{location.replace("#"+name)}window.scrollTo(x,y)};return ev=>{let cur_line_id=parseInt(ev.target.id,10);if(isNaN(cur_line_id)||ev.ctrlKey||ev.altKey||ev.metaKey){return}ev.preventDefault();if(ev.shiftKey&&prev_line_id){if(prev_line_id>cur_line_id){const tmp=prev_line_id;prev_line_id=cur_line_id;cur_line_id=tmp}set_fragment(prev_line_id+"-"+cur_line_id)}else{prev_line_id=cur_line_id;set_fragment(cur_line_id)}}}());window.addEventListener("hashchange",highlightSrcLines);onEachLazy(document.getElementsByClassName("src-line-numbers"),el=>{el.addEventListener("click",handleSrcHighlight)});highlightSrcLines();window.createSrcSidebar=createSrcSidebar})() \ No newline at end of file diff --git a/static.files/storage-118b08c4c78b968e.js b/static.files/storage-118b08c4c78b968e.js new file mode 100644 index 000000000..981894677 --- /dev/null +++ b/static.files/storage-118b08c4c78b968e.js @@ -0,0 +1,24 @@ +"use strict";const builtinThemes=["light","dark","ayu"];const darkThemes=["dark","ayu"];window.currentTheme=document.getElementById("themeStyle");const settingsDataset=(function(){const settingsElement=document.getElementById("default-settings");return settingsElement&&settingsElement.dataset?settingsElement.dataset:null})();function getSettingValue(settingName){const current=getCurrentValue(settingName);if(current===null&&settingsDataset!==null){const def=settingsDataset[settingName.replace(/-/g,"_")];if(def!==undefined){return def}}return current}const localStoredTheme=getSettingValue("theme");function hasClass(elem,className){return elem&&elem.classList&&elem.classList.contains(className)}function addClass(elem,className){if(elem&&elem.classList){elem.classList.add(className)}}function removeClass(elem,className){if(elem&&elem.classList){elem.classList.remove(className)}}function onEach(arr,func){for(const elem of arr){if(func(elem)){return true}}return false}function onEachLazy(lazyArray,func){return onEach(Array.prototype.slice.call(lazyArray),func)}function updateLocalStorage(name,value){try{window.localStorage.setItem("rustdoc-"+name,value)}catch(e){}}function getCurrentValue(name){try{return window.localStorage.getItem("rustdoc-"+name)}catch(e){return null}}const getVar=(function getVar(name){const el=document.querySelector("head > meta[name='rustdoc-vars']");return el?el.attributes["data-"+name].value:null});function switchTheme(newThemeName,saveTheme){const themeNames=getVar("themes").split(",").filter(t=>t);themeNames.push(...builtinThemes);if(themeNames.indexOf(newThemeName)===-1){return}if(saveTheme){updateLocalStorage("theme",newThemeName)}document.documentElement.setAttribute("data-theme",newThemeName);if(builtinThemes.indexOf(newThemeName)!==-1){if(window.currentTheme){window.currentTheme.parentNode.removeChild(window.currentTheme);window.currentTheme=null}}else{const newHref=getVar("root-path")+encodeURIComponent(newThemeName)+getVar("resource-suffix")+".css";if(!window.currentTheme){if(document.readyState==="loading"){document.write(``);window.currentTheme=document.getElementById("themeStyle")}else{window.currentTheme=document.createElement("link");window.currentTheme.rel="stylesheet";window.currentTheme.id="themeStyle";window.currentTheme.href=newHref;document.documentElement.appendChild(window.currentTheme)}}else if(newHref!==window.currentTheme.href){window.currentTheme.href=newHref}}}const updateTheme=(function(){const mql=window.matchMedia("(prefers-color-scheme: dark)");function updateTheme(){if(getSettingValue("use-system-theme")!=="false"){const lightTheme=getSettingValue("preferred-light-theme")||"light";const darkTheme=getSettingValue("preferred-dark-theme")||"dark";updateLocalStorage("use-system-theme","true");switchTheme(mql.matches?darkTheme:lightTheme,true)}else{switchTheme(getSettingValue("theme"),false)}}mql.addEventListener("change",updateTheme);return updateTheme})();if(getSettingValue("use-system-theme")!=="false"&&window.matchMedia){if(getSettingValue("use-system-theme")===null&&getSettingValue("preferred-dark-theme")===null&&darkThemes.indexOf(localStoredTheme)>=0){updateLocalStorage("preferred-dark-theme",localStoredTheme)}}updateTheme();if(getSettingValue("source-sidebar-show")==="true"){addClass(document.documentElement,"src-sidebar-expanded")}if(getSettingValue("hide-sidebar")==="true"){addClass(document.documentElement,"hide-sidebar")}function updateSidebarWidth(){const desktopSidebarWidth=getSettingValue("desktop-sidebar-width");if(desktopSidebarWidth&&desktopSidebarWidth!=="null"){document.documentElement.style.setProperty("--desktop-sidebar-width",desktopSidebarWidth+"px",)}const srcSidebarWidth=getSettingValue("src-sidebar-width");if(srcSidebarWidth&&srcSidebarWidth!=="null"){document.documentElement.style.setProperty("--src-sidebar-width",srcSidebarWidth+"px",)}}updateSidebarWidth();window.addEventListener("pageshow",ev=>{if(ev.persisted){setTimeout(updateTheme,0);setTimeout(updateSidebarWidth,0)}});class RustdocSearchElement extends HTMLElement{constructor(){super()}connectedCallback(){const rootPath=getVar("root-path");const currentCrate=getVar("current-crate");this.innerHTML=``}}window.customElements.define("rustdoc-search",RustdocSearchElement) \ No newline at end of file diff --git a/trait.impl/cactusref/adopt/trait.Adopt.js b/trait.impl/cactusref/adopt/trait.Adopt.js new file mode 100644 index 000000000..f1cf52cf5 --- /dev/null +++ b/trait.impl/cactusref/adopt/trait.Adopt.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"cactusref":[] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/borrow/trait.Borrow.js b/trait.impl/core/borrow/trait.Borrow.js new file mode 100644 index 000000000..0f19197a8 --- /dev/null +++ b/trait.impl/core/borrow/trait.Borrow.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"cactusref":[["impl<T> Borrow<T> for Rc<T>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/clone/trait.Clone.js b/trait.impl/core/clone/trait.Clone.js new file mode 100644 index 000000000..dcc9e498d --- /dev/null +++ b/trait.impl/core/clone/trait.Clone.js @@ -0,0 +1,5 @@ +(function() {var implementors = { +"cactusref":[["impl<T> Clone for Rc<T>"],["impl<T> Clone for Weak<T>"]], +"hashbrown":[["impl Clone for TryReserveError"],["impl<K> Clone for Iter<'_, K>"],["impl<K, V> Clone for Iter<'_, K, V>"],["impl<K, V> Clone for Keys<'_, K, V>"],["impl<K, V> Clone for Values<'_, K, V>"],["impl<K: Clone, V: Clone, S: Clone, A: Allocator + Clone> Clone for HashMap<K, V, S, A>"],["impl<T, A> Clone for HashTable<T, A>
where\n T: Clone,\n A: Allocator + Clone,
"],["impl<T, S, A: Allocator> Clone for Difference<'_, T, S, A>"],["impl<T, S, A: Allocator> Clone for Intersection<'_, T, S, A>"],["impl<T, S, A: Allocator> Clone for SymmetricDifference<'_, T, S, A>"],["impl<T, S, A: Allocator> Clone for Union<'_, T, S, A>"],["impl<T: Clone, S: Clone, A: Allocator + Clone> Clone for HashSet<T, S, A>"]], +"log":[["impl Clone for Level"],["impl Clone for LevelFilter"],["impl<'a> Clone for Metadata<'a>"],["impl<'a> Clone for Record<'a>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/cmp/trait.Eq.js b/trait.impl/core/cmp/trait.Eq.js new file mode 100644 index 000000000..5fb94c330 --- /dev/null +++ b/trait.impl/core/cmp/trait.Eq.js @@ -0,0 +1,5 @@ +(function() {var implementors = { +"cactusref":[["impl<T: Eq> Eq for Rc<T>"]], +"hashbrown":[["impl Eq for TryReserveError"],["impl<K, V, S, A> Eq for HashMap<K, V, S, A>
where\n K: Eq + Hash,\n V: Eq,\n S: BuildHasher,\n A: Allocator,
"],["impl<T, S, A> Eq for HashSet<T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"]], +"log":[["impl Eq for Level"],["impl Eq for LevelFilter"],["impl Eq for ParseLevelError"],["impl<'a> Eq for Metadata<'a>"],["impl<'a> Eq for MetadataBuilder<'a>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/cmp/trait.Ord.js b/trait.impl/core/cmp/trait.Ord.js new file mode 100644 index 000000000..ca285cef5 --- /dev/null +++ b/trait.impl/core/cmp/trait.Ord.js @@ -0,0 +1,4 @@ +(function() {var implementors = { +"cactusref":[["impl<T: Ord> Ord for Rc<T>"]], +"log":[["impl Ord for Level"],["impl Ord for LevelFilter"],["impl<'a> Ord for Metadata<'a>"],["impl<'a> Ord for MetadataBuilder<'a>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/cmp/trait.PartialEq.js b/trait.impl/core/cmp/trait.PartialEq.js new file mode 100644 index 000000000..8ac3a9d9c --- /dev/null +++ b/trait.impl/core/cmp/trait.PartialEq.js @@ -0,0 +1,5 @@ +(function() {var implementors = { +"cactusref":[["impl<T: PartialEq> PartialEq for Rc<T>"]], +"hashbrown":[["impl PartialEq for TryReserveError"],["impl<K, V, S, A> PartialEq for HashMap<K, V, S, A>
where\n K: Eq + Hash,\n V: PartialEq,\n S: BuildHasher,\n A: Allocator,
"],["impl<T, S, A> PartialEq for HashSet<T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"]], +"log":[["impl PartialEq for Level"],["impl PartialEq for LevelFilter"],["impl PartialEq for ParseLevelError"],["impl PartialEq<Level> for LevelFilter"],["impl PartialEq<LevelFilter> for Level"],["impl<'a> PartialEq for Metadata<'a>"],["impl<'a> PartialEq for MetadataBuilder<'a>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/cmp/trait.PartialOrd.js b/trait.impl/core/cmp/trait.PartialOrd.js new file mode 100644 index 000000000..a59dae7ba --- /dev/null +++ b/trait.impl/core/cmp/trait.PartialOrd.js @@ -0,0 +1,4 @@ +(function() {var implementors = { +"cactusref":[["impl<T: PartialOrd> PartialOrd for Rc<T>"]], +"log":[["impl PartialOrd for Level"],["impl PartialOrd for LevelFilter"],["impl PartialOrd<Level> for LevelFilter"],["impl PartialOrd<LevelFilter> for Level"],["impl<'a> PartialOrd for Metadata<'a>"],["impl<'a> PartialOrd for MetadataBuilder<'a>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/convert/trait.AsRef.js b/trait.impl/core/convert/trait.AsRef.js new file mode 100644 index 000000000..83f1a1ab1 --- /dev/null +++ b/trait.impl/core/convert/trait.AsRef.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"cactusref":[["impl<T> AsRef<T> for Rc<T>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/convert/trait.From.js b/trait.impl/core/convert/trait.From.js new file mode 100644 index 000000000..3abdca9b9 --- /dev/null +++ b/trait.impl/core/convert/trait.From.js @@ -0,0 +1,4 @@ +(function() {var implementors = { +"cactusref":[["impl<T> From<Box<T>> for Rc<T>"],["impl<T> From<T> for Rc<T>"]], +"hashbrown":[["impl<T, S, A> From<HashMap<T, (), S, A>> for HashSet<T, S, A>
where\n A: Allocator,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/default/trait.Default.js b/trait.impl/core/default/trait.Default.js new file mode 100644 index 000000000..57f81391b --- /dev/null +++ b/trait.impl/core/default/trait.Default.js @@ -0,0 +1,6 @@ +(function() {var implementors = { +"cactusref":[["impl<T> Default for Weak<T>"],["impl<T: Default> Default for Rc<T>"]], +"hashbrown":[["impl<K, V, S, A> Default for HashMap<K, V, S, A>
where\n S: Default,\n A: Default + Allocator,
"],["impl<T, A> Default for HashTable<T, A>
where\n A: Allocator + Default,
"],["impl<T, S, A> Default for HashSet<T, S, A>
where\n S: Default,\n A: Default + Allocator,
"]], +"log":[["impl<'a> Default for MetadataBuilder<'a>"],["impl<'a> Default for RecordBuilder<'a>"]], +"rustc_hash":[["impl Default for FxHasher"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/fmt/trait.Debug.js b/trait.impl/core/fmt/trait.Debug.js new file mode 100644 index 000000000..0e07e2a0c --- /dev/null +++ b/trait.impl/core/fmt/trait.Debug.js @@ -0,0 +1,5 @@ +(function() {var implementors = { +"cactusref":[["impl<T: Debug> Debug for Rc<T>"],["impl<T: Debug> Debug for Weak<T>"]], +"hashbrown":[["impl Debug for TryReserveError"],["impl<K, V> Debug for IterMut<'_, K, V>
where\n K: Debug,\n V: Debug,
"],["impl<K, V, A> Debug for Drain<'_, K, V, A>
where\n K: Debug,\n V: Debug,\n A: Allocator,
"],["impl<K, V, S, A> Debug for HashMap<K, V, S, A>
where\n K: Debug,\n V: Debug,\n A: Allocator,
"],["impl<K, V, S, A: Allocator> Debug for RawEntryBuilder<'_, K, V, S, A>"],["impl<K, V, S, A: Allocator> Debug for RawEntryBuilderMut<'_, K, V, S, A>"],["impl<K, V, S, A: Allocator> Debug for RawVacantEntryMut<'_, K, V, S, A>"],["impl<K, V: Debug> Debug for Values<'_, K, V>"],["impl<K, V: Debug> Debug for ValuesMut<'_, K, V>"],["impl<K, V: Debug, A: Allocator> Debug for IntoValues<K, V, A>"],["impl<K: Borrow<Q>, Q: ?Sized + Debug, V, S, A: Allocator> Debug for VacantEntryRef<'_, '_, K, Q, V, S, A>"],["impl<K: Borrow<Q>, Q: ?Sized + Debug, V: Debug, S, A: Allocator> Debug for EntryRef<'_, '_, K, Q, V, S, A>"],["impl<K: Borrow<Q>, Q: ?Sized + Debug, V: Debug, S, A: Allocator> Debug for OccupiedEntryRef<'_, '_, K, Q, V, S, A>"],["impl<K: Debug> Debug for Iter<'_, K>"],["impl<K: Debug, A: Allocator> Debug for Drain<'_, K, A>"],["impl<K: Debug, A: Allocator> Debug for IntoIter<K, A>"],["impl<K: Debug, V> Debug for Keys<'_, K, V>"],["impl<K: Debug, V, S, A: Allocator> Debug for VacantEntry<'_, K, V, S, A>"],["impl<K: Debug, V: Debug> Debug for Iter<'_, K, V>"],["impl<K: Debug, V: Debug, A: Allocator> Debug for IntoIter<K, V, A>"],["impl<K: Debug, V: Debug, A: Allocator> Debug for IntoKeys<K, V, A>"],["impl<K: Debug, V: Debug, S, A: Allocator> Debug for Entry<'_, K, V, S, A>"],["impl<K: Debug, V: Debug, S, A: Allocator> Debug for RawEntryMut<'_, K, V, S, A>"],["impl<K: Debug, V: Debug, S, A: Allocator> Debug for OccupiedEntry<'_, K, V, S, A>"],["impl<K: Debug, V: Debug, S, A: Allocator> Debug for OccupiedError<'_, K, V, S, A>"],["impl<K: Debug, V: Debug, S, A: Allocator> Debug for RawOccupiedEntryMut<'_, K, V, S, A>"],["impl<T, A> Debug for HashTable<T, A>
where\n T: Debug,\n A: Allocator,
"],["impl<T, S, A> Debug for Difference<'_, T, S, A>
where\n T: Debug + Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<T, S, A> Debug for HashSet<T, S, A>
where\n T: Debug,\n A: Allocator,
"],["impl<T, S, A> Debug for Intersection<'_, T, S, A>
where\n T: Debug + Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<T, S, A> Debug for SymmetricDifference<'_, T, S, A>
where\n T: Debug + Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<T, S, A> Debug for Union<'_, T, S, A>
where\n T: Debug + Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<T: Debug, A: Allocator> Debug for Entry<'_, T, A>"],["impl<T: Debug, A: Allocator> Debug for AbsentEntry<'_, T, A>"],["impl<T: Debug, A: Allocator> Debug for Drain<'_, T, A>"],["impl<T: Debug, A: Allocator> Debug for OccupiedEntry<'_, T, A>"],["impl<T: Debug, A: Allocator> Debug for VacantEntry<'_, T, A>"],["impl<T: Debug, S, A: Allocator> Debug for Entry<'_, T, S, A>"],["impl<T: Debug, S, A: Allocator> Debug for OccupiedEntry<'_, T, S, A>"],["impl<T: Debug, S, A: Allocator> Debug for VacantEntry<'_, T, S, A>"]], +"log":[["impl Debug for Level"],["impl Debug for LevelFilter"],["impl Debug for ParseLevelError"],["impl Debug for SetLoggerError"],["impl<'a> Debug for Metadata<'a>"],["impl<'a> Debug for MetadataBuilder<'a>"],["impl<'a> Debug for Record<'a>"],["impl<'a> Debug for RecordBuilder<'a>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/fmt/trait.Display.js b/trait.impl/core/fmt/trait.Display.js new file mode 100644 index 000000000..a8479b300 --- /dev/null +++ b/trait.impl/core/fmt/trait.Display.js @@ -0,0 +1,5 @@ +(function() {var implementors = { +"cactusref":[["impl<T: Display> Display for Rc<T>"]], +"hashbrown":[["impl<'a, K: Debug, V: Debug, S, A: Allocator> Display for OccupiedError<'a, K, V, S, A>"]], +"log":[["impl Display for Level"],["impl Display for LevelFilter"],["impl Display for ParseLevelError"],["impl Display for SetLoggerError"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/fmt/trait.Pointer.js b/trait.impl/core/fmt/trait.Pointer.js new file mode 100644 index 000000000..b9d15a21a --- /dev/null +++ b/trait.impl/core/fmt/trait.Pointer.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"cactusref":[["impl<T> Pointer for Rc<T>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/hash/trait.Hash.js b/trait.impl/core/hash/trait.Hash.js new file mode 100644 index 000000000..9a6733107 --- /dev/null +++ b/trait.impl/core/hash/trait.Hash.js @@ -0,0 +1,4 @@ +(function() {var implementors = { +"cactusref":[["impl<T: Hash> Hash for Rc<T>"]], +"log":[["impl Hash for Level"],["impl Hash for LevelFilter"],["impl<'a> Hash for Metadata<'a>"],["impl<'a> Hash for MetadataBuilder<'a>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/hash/trait.Hasher.js b/trait.impl/core/hash/trait.Hasher.js new file mode 100644 index 000000000..c4f74f7ef --- /dev/null +++ b/trait.impl/core/hash/trait.Hasher.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"rustc_hash":[["impl Hasher for FxHasher"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/iter/traits/collect/trait.Extend.js b/trait.impl/core/iter/traits/collect/trait.Extend.js new file mode 100644 index 000000000..fd0082620 --- /dev/null +++ b/trait.impl/core/iter/traits/collect/trait.Extend.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<'a, K, V, S, A> Extend<&'a (K, V)> for HashMap<K, V, S, A>
where\n K: Eq + Hash + Copy,\n V: Copy,\n S: BuildHasher,\n A: Allocator,
"],["impl<'a, K, V, S, A> Extend<(&'a K, &'a V)> for HashMap<K, V, S, A>
where\n K: Eq + Hash + Copy,\n V: Copy,\n S: BuildHasher,\n A: Allocator,
"],["impl<'a, T, S, A> Extend<&'a T> for HashSet<T, S, A>
where\n T: 'a + Eq + Hash + Copy,\n S: BuildHasher,\n A: Allocator,
"],["impl<K, V, S, A> Extend<(K, V)> for HashMap<K, V, S, A>
where\n K: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<T, S, A> Extend<T> for HashSet<T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/iter/traits/collect/trait.FromIterator.js b/trait.impl/core/iter/traits/collect/trait.FromIterator.js new file mode 100644 index 000000000..9cf007d2b --- /dev/null +++ b/trait.impl/core/iter/traits/collect/trait.FromIterator.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<K, V, S, A> FromIterator<(K, V)> for HashMap<K, V, S, A>
where\n K: Eq + Hash,\n S: BuildHasher + Default,\n A: Default + Allocator,
"],["impl<T, S, A> FromIterator<T> for HashSet<T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher + Default,\n A: Default + Allocator,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/iter/traits/collect/trait.IntoIterator.js b/trait.impl/core/iter/traits/collect/trait.IntoIterator.js new file mode 100644 index 000000000..476256ee5 --- /dev/null +++ b/trait.impl/core/iter/traits/collect/trait.IntoIterator.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<'a, K, V, S, A: Allocator> IntoIterator for &'a HashMap<K, V, S, A>"],["impl<'a, K, V, S, A: Allocator> IntoIterator for &'a mut HashMap<K, V, S, A>"],["impl<'a, T, A> IntoIterator for &'a HashTable<T, A>
where\n A: Allocator,
"],["impl<'a, T, A> IntoIterator for &'a mut HashTable<T, A>
where\n A: Allocator,
"],["impl<'a, T, S, A: Allocator> IntoIterator for &'a HashSet<T, S, A>"],["impl<K, V, S, A: Allocator> IntoIterator for HashMap<K, V, S, A>"],["impl<T, A> IntoIterator for HashTable<T, A>
where\n A: Allocator,
"],["impl<T, S, A: Allocator> IntoIterator for HashSet<T, S, A>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/iter/traits/exact_size/trait.ExactSizeIterator.js b/trait.impl/core/iter/traits/exact_size/trait.ExactSizeIterator.js new file mode 100644 index 000000000..879a43da4 --- /dev/null +++ b/trait.impl/core/iter/traits/exact_size/trait.ExactSizeIterator.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<'a, K> ExactSizeIterator for Iter<'a, K>"],["impl<K, A: Allocator> ExactSizeIterator for Drain<'_, K, A>"],["impl<K, A: Allocator> ExactSizeIterator for IntoIter<K, A>"],["impl<K, V> ExactSizeIterator for Iter<'_, K, V>"],["impl<K, V> ExactSizeIterator for IterMut<'_, K, V>"],["impl<K, V> ExactSizeIterator for Keys<'_, K, V>"],["impl<K, V> ExactSizeIterator for Values<'_, K, V>"],["impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V>"],["impl<K, V, A: Allocator> ExactSizeIterator for Drain<'_, K, V, A>"],["impl<K, V, A: Allocator> ExactSizeIterator for IntoIter<K, V, A>"],["impl<K, V, A: Allocator> ExactSizeIterator for IntoKeys<K, V, A>"],["impl<K, V, A: Allocator> ExactSizeIterator for IntoValues<K, V, A>"],["impl<T> ExactSizeIterator for Iter<'_, T>"],["impl<T> ExactSizeIterator for IterMut<'_, T>"],["impl<T, A> ExactSizeIterator for IntoIter<T, A>
where\n A: Allocator,
"],["impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/iter/traits/iterator/trait.Iterator.js b/trait.impl/core/iter/traits/iterator/trait.Iterator.js new file mode 100644 index 000000000..53ada74d3 --- /dev/null +++ b/trait.impl/core/iter/traits/iterator/trait.Iterator.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<'a, K> Iterator for Iter<'a, K>"],["impl<'a, K, V> Iterator for Iter<'a, K, V>"],["impl<'a, K, V> Iterator for IterMut<'a, K, V>"],["impl<'a, K, V> Iterator for Keys<'a, K, V>"],["impl<'a, K, V> Iterator for Values<'a, K, V>"],["impl<'a, K, V> Iterator for ValuesMut<'a, K, V>"],["impl<'a, K, V, A: Allocator> Iterator for Drain<'a, K, V, A>"],["impl<'a, T> Iterator for Iter<'a, T>"],["impl<'a, T> Iterator for IterMut<'a, T>"],["impl<'a, T, S, A> Iterator for Difference<'a, T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<'a, T, S, A> Iterator for Intersection<'a, T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<'a, T, S, A> Iterator for SymmetricDifference<'a, T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<'a, T, S, A> Iterator for Union<'a, T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<K, A: Allocator> Iterator for Drain<'_, K, A>"],["impl<K, A: Allocator> Iterator for IntoIter<K, A>"],["impl<K, F, A: Allocator> Iterator for ExtractIf<'_, K, F, A>
where\n F: FnMut(&K) -> bool,
"],["impl<K, V, A: Allocator> Iterator for IntoIter<K, V, A>"],["impl<K, V, A: Allocator> Iterator for IntoKeys<K, V, A>"],["impl<K, V, A: Allocator> Iterator for IntoValues<K, V, A>"],["impl<K, V, F, A> Iterator for ExtractIf<'_, K, V, F, A>
where\n F: FnMut(&K, &mut V) -> bool,\n A: Allocator,
"],["impl<T, A> Iterator for IntoIter<T, A>
where\n A: Allocator,
"],["impl<T, A: Allocator> Iterator for Drain<'_, T, A>"],["impl<T, F, A: Allocator> Iterator for ExtractIf<'_, T, F, A>
where\n F: FnMut(&mut T) -> bool,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/iter/traits/marker/trait.FusedIterator.js b/trait.impl/core/iter/traits/marker/trait.FusedIterator.js new file mode 100644 index 000000000..96769af4f --- /dev/null +++ b/trait.impl/core/iter/traits/marker/trait.FusedIterator.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<K> FusedIterator for Iter<'_, K>"],["impl<K, A: Allocator> FusedIterator for Drain<'_, K, A>"],["impl<K, A: Allocator> FusedIterator for IntoIter<K, A>"],["impl<K, F, A: Allocator> FusedIterator for ExtractIf<'_, K, F, A>
where\n F: FnMut(&K) -> bool,
"],["impl<K, V> FusedIterator for Iter<'_, K, V>"],["impl<K, V> FusedIterator for IterMut<'_, K, V>"],["impl<K, V> FusedIterator for Keys<'_, K, V>"],["impl<K, V> FusedIterator for Values<'_, K, V>"],["impl<K, V> FusedIterator for ValuesMut<'_, K, V>"],["impl<K, V, A: Allocator> FusedIterator for Drain<'_, K, V, A>"],["impl<K, V, A: Allocator> FusedIterator for IntoIter<K, V, A>"],["impl<K, V, A: Allocator> FusedIterator for IntoKeys<K, V, A>"],["impl<K, V, A: Allocator> FusedIterator for IntoValues<K, V, A>"],["impl<K, V, F> FusedIterator for ExtractIf<'_, K, V, F>
where\n F: FnMut(&K, &mut V) -> bool,
"],["impl<T> FusedIterator for Iter<'_, T>"],["impl<T> FusedIterator for IterMut<'_, T>"],["impl<T, A> FusedIterator for IntoIter<T, A>
where\n A: Allocator,
"],["impl<T, A: Allocator> FusedIterator for Drain<'_, T, A>"],["impl<T, F, A: Allocator> FusedIterator for ExtractIf<'_, T, F, A>
where\n F: FnMut(&mut T) -> bool,
"],["impl<T, S, A> FusedIterator for Difference<'_, T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<T, S, A> FusedIterator for Intersection<'_, T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<T, S, A> FusedIterator for SymmetricDifference<'_, T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"],["impl<T, S, A> FusedIterator for Union<'_, T, S, A>
where\n T: Eq + Hash,\n S: BuildHasher,\n A: Allocator,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/marker/trait.Copy.js b/trait.impl/core/marker/trait.Copy.js new file mode 100644 index 000000000..e786ea1c2 --- /dev/null +++ b/trait.impl/core/marker/trait.Copy.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"log":[["impl Copy for Level"],["impl Copy for LevelFilter"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/marker/trait.Freeze.js b/trait.impl/core/marker/trait.Freeze.js new file mode 100644 index 000000000..79c440604 --- /dev/null +++ b/trait.impl/core/marker/trait.Freeze.js @@ -0,0 +1,6 @@ +(function() {var implementors = { +"cactusref":[["impl<T> Freeze for Rc<T>",1,["cactusref::rc::Rc"]],["impl<T> Freeze for Weak<T>",1,["cactusref::rc::Weak"]]], +"hashbrown":[["impl Freeze for TryReserveError",1,["hashbrown::TryReserveError"]],["impl Freeze for DefaultHashBuilder",1,["hashbrown::map::DefaultHashBuilder"]],["impl<'a, 'b, K, Q, V, S, A> Freeze for EntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Freeze,\n Q: ?Sized,
",1,["hashbrown::map::EntryRef"]],["impl<'a, 'b, K, Q, V, S, A> Freeze for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Freeze,\n Q: ?Sized,
",1,["hashbrown::map::OccupiedEntryRef"]],["impl<'a, 'b, K, Q, V, S, A> Freeze for VacantEntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Freeze,\n Q: ?Sized,
",1,["hashbrown::map::VacantEntryRef"]],["impl<'a, K> Freeze for Iter<'a, K>",1,["hashbrown::set::Iter"]],["impl<'a, K, A> Freeze for Drain<'a, K, A>",1,["hashbrown::set::Drain"]],["impl<'a, K, F, A> Freeze for ExtractIf<'a, K, F, A>
where\n F: Freeze,
",1,["hashbrown::set::ExtractIf"]],["impl<'a, K, V> Freeze for Iter<'a, K, V>",1,["hashbrown::map::Iter"]],["impl<'a, K, V> Freeze for IterMut<'a, K, V>",1,["hashbrown::map::IterMut"]],["impl<'a, K, V> Freeze for Keys<'a, K, V>",1,["hashbrown::map::Keys"]],["impl<'a, K, V> Freeze for Values<'a, K, V>",1,["hashbrown::map::Values"]],["impl<'a, K, V> Freeze for ValuesMut<'a, K, V>",1,["hashbrown::map::ValuesMut"]],["impl<'a, K, V, A> Freeze for Drain<'a, K, V, A>",1,["hashbrown::map::Drain"]],["impl<'a, K, V, F, A> Freeze for ExtractIf<'a, K, V, F, A>
where\n F: Freeze,
",1,["hashbrown::map::ExtractIf"]],["impl<'a, K, V, S, A> Freeze for Entry<'a, K, V, S, A>
where\n K: Freeze,
",1,["hashbrown::map::Entry"]],["impl<'a, K, V, S, A> Freeze for RawEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawEntryMut"]],["impl<'a, K, V, S, A> Freeze for OccupiedEntry<'a, K, V, S, A>
where\n K: Freeze,
",1,["hashbrown::map::OccupiedEntry"]],["impl<'a, K, V, S, A> Freeze for OccupiedError<'a, K, V, S, A>
where\n V: Freeze,\n K: Freeze,
",1,["hashbrown::map::OccupiedError"]],["impl<'a, K, V, S, A> Freeze for RawEntryBuilder<'a, K, V, S, A>",1,["hashbrown::map::RawEntryBuilder"]],["impl<'a, K, V, S, A> Freeze for RawEntryBuilderMut<'a, K, V, S, A>",1,["hashbrown::map::RawEntryBuilderMut"]],["impl<'a, K, V, S, A> Freeze for RawOccupiedEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawOccupiedEntryMut"]],["impl<'a, K, V, S, A> Freeze for RawVacantEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawVacantEntryMut"]],["impl<'a, K, V, S, A> Freeze for VacantEntry<'a, K, V, S, A>
where\n K: Freeze,
",1,["hashbrown::map::VacantEntry"]],["impl<'a, T> Freeze for Iter<'a, T>",1,["hashbrown::table::Iter"]],["impl<'a, T> Freeze for IterMut<'a, T>",1,["hashbrown::table::IterMut"]],["impl<'a, T, A> Freeze for Entry<'a, T, A>",1,["hashbrown::table::Entry"]],["impl<'a, T, A> Freeze for AbsentEntry<'a, T, A>",1,["hashbrown::table::AbsentEntry"]],["impl<'a, T, A> Freeze for Drain<'a, T, A>",1,["hashbrown::table::Drain"]],["impl<'a, T, A> Freeze for OccupiedEntry<'a, T, A>",1,["hashbrown::table::OccupiedEntry"]],["impl<'a, T, A> Freeze for VacantEntry<'a, T, A>",1,["hashbrown::table::VacantEntry"]],["impl<'a, T, F, A> Freeze for ExtractIf<'a, T, F, A>
where\n F: Freeze,
",1,["hashbrown::table::ExtractIf"]],["impl<'a, T, S, A> Freeze for Entry<'a, T, S, A>
where\n T: Freeze,
",1,["hashbrown::set::Entry"]],["impl<'a, T, S, A> Freeze for Difference<'a, T, S, A>",1,["hashbrown::set::Difference"]],["impl<'a, T, S, A> Freeze for Intersection<'a, T, S, A>",1,["hashbrown::set::Intersection"]],["impl<'a, T, S, A> Freeze for OccupiedEntry<'a, T, S, A>
where\n T: Freeze,
",1,["hashbrown::set::OccupiedEntry"]],["impl<'a, T, S, A> Freeze for SymmetricDifference<'a, T, S, A>",1,["hashbrown::set::SymmetricDifference"]],["impl<'a, T, S, A> Freeze for Union<'a, T, S, A>",1,["hashbrown::set::Union"]],["impl<'a, T, S, A> Freeze for VacantEntry<'a, T, S, A>
where\n T: Freeze,
",1,["hashbrown::set::VacantEntry"]],["impl<K, A> Freeze for IntoIter<K, A>
where\n A: Freeze,
",1,["hashbrown::set::IntoIter"]],["impl<K, V, A> Freeze for IntoIter<K, V, A>
where\n A: Freeze,
",1,["hashbrown::map::IntoIter"]],["impl<K, V, A> Freeze for IntoKeys<K, V, A>
where\n A: Freeze,
",1,["hashbrown::map::IntoKeys"]],["impl<K, V, A> Freeze for IntoValues<K, V, A>
where\n A: Freeze,
",1,["hashbrown::map::IntoValues"]],["impl<K, V, S, A> Freeze for HashMap<K, V, S, A>
where\n S: Freeze,\n A: Freeze,
",1,["hashbrown::map::HashMap"]],["impl<T, A> Freeze for HashTable<T, A>
where\n A: Freeze,
",1,["hashbrown::table::HashTable"]],["impl<T, A> Freeze for IntoIter<T, A>
where\n A: Freeze,
",1,["hashbrown::table::IntoIter"]],["impl<T, S, A> Freeze for HashSet<T, S, A>
where\n S: Freeze,\n A: Freeze,
",1,["hashbrown::set::HashSet"]]], +"log":[["impl Freeze for Level",1,["log::Level"]],["impl Freeze for LevelFilter",1,["log::LevelFilter"]],["impl Freeze for ParseLevelError",1,["log::ParseLevelError"]],["impl Freeze for SetLoggerError",1,["log::SetLoggerError"]],["impl<'a> Freeze for Metadata<'a>",1,["log::Metadata"]],["impl<'a> Freeze for MetadataBuilder<'a>",1,["log::MetadataBuilder"]],["impl<'a> Freeze for Record<'a>",1,["log::Record"]],["impl<'a> Freeze for RecordBuilder<'a>",1,["log::RecordBuilder"]]], +"rustc_hash":[["impl Freeze for FxHasher",1,["rustc_hash::FxHasher"]]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/marker/trait.Send.js b/trait.impl/core/marker/trait.Send.js new file mode 100644 index 000000000..9bf37cf84 --- /dev/null +++ b/trait.impl/core/marker/trait.Send.js @@ -0,0 +1,6 @@ +(function() {var implementors = { +"cactusref":[["impl<T> !Send for Rc<T>",1,["cactusref::rc::Rc"]],["impl<T> !Send for Weak<T>",1,["cactusref::rc::Weak"]]], +"hashbrown":[["impl Send for TryReserveError",1,["hashbrown::TryReserveError"]],["impl Send for DefaultHashBuilder",1,["hashbrown::map::DefaultHashBuilder"]],["impl<'a, 'b, K, Q, V, S, A> Send for EntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Send,\n Q: Sync + ?Sized,\n V: Send,\n S: Send,\n A: Send,
",1,["hashbrown::map::EntryRef"]],["impl<'a, 'b, K, Q, V, S, A> Send for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Send,\n Q: Sync + ?Sized,\n V: Send,\n S: Send,\n A: Send + Allocator,
"],["impl<'a, 'b, K, Q, V, S, A> Send for VacantEntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Send,\n Q: Sync + ?Sized,\n S: Send,\n A: Send,\n V: Send,
",1,["hashbrown::map::VacantEntryRef"]],["impl<'a, K> Send for Iter<'a, K>
where\n K: Sync,
",1,["hashbrown::set::Iter"]],["impl<'a, K, A> Send for Drain<'a, K, A>
where\n A: Send,\n K: Send,
",1,["hashbrown::set::Drain"]],["impl<'a, K, F, A> Send for ExtractIf<'a, K, F, A>
where\n F: Send,\n A: Send,\n K: Send,
",1,["hashbrown::set::ExtractIf"]],["impl<'a, K, V> Send for Iter<'a, K, V>
where\n K: Sync,\n V: Sync,
",1,["hashbrown::map::Iter"]],["impl<'a, K, V> Send for Keys<'a, K, V>
where\n K: Sync,\n V: Sync,
",1,["hashbrown::map::Keys"]],["impl<'a, K, V> Send for Values<'a, K, V>
where\n K: Sync,\n V: Sync,
",1,["hashbrown::map::Values"]],["impl<'a, K, V> Send for ValuesMut<'a, K, V>
where\n K: Send,\n V: Send,
",1,["hashbrown::map::ValuesMut"]],["impl<'a, K, V, A> Send for Drain<'a, K, V, A>
where\n A: Send,\n K: Send,\n V: Send,
",1,["hashbrown::map::Drain"]],["impl<'a, K, V, F, A> Send for ExtractIf<'a, K, V, F, A>
where\n F: Send,\n A: Send,\n K: Send,\n V: Send,
",1,["hashbrown::map::ExtractIf"]],["impl<'a, K, V, S, A> Send for Entry<'a, K, V, S, A>
where\n K: Send,\n V: Send,\n S: Send,\n A: Send,
",1,["hashbrown::map::Entry"]],["impl<'a, K, V, S, A> Send for RawEntryMut<'a, K, V, S, A>
where\n K: Send,\n V: Send,\n S: Send + Sync,\n A: Send,
",1,["hashbrown::map::RawEntryMut"]],["impl<'a, K, V, S, A> Send for OccupiedError<'a, K, V, S, A>
where\n V: Send,\n K: Send,\n S: Send,\n A: Send,
",1,["hashbrown::map::OccupiedError"]],["impl<'a, K, V, S, A> Send for RawEntryBuilder<'a, K, V, S, A>
where\n S: Sync,\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::RawEntryBuilder"]],["impl<'a, K, V, S, A> Send for RawEntryBuilderMut<'a, K, V, S, A>
where\n S: Send,\n A: Send,\n K: Send,\n V: Send,
",1,["hashbrown::map::RawEntryBuilderMut"]],["impl<'a, K, V, S, A> Send for RawVacantEntryMut<'a, K, V, S, A>
where\n S: Sync,\n A: Send,\n K: Send,\n V: Send,
",1,["hashbrown::map::RawVacantEntryMut"]],["impl<'a, K, V, S, A> Send for VacantEntry<'a, K, V, S, A>
where\n K: Send,\n S: Send,\n A: Send,\n V: Send,
",1,["hashbrown::map::VacantEntry"]],["impl<'a, T> Send for Iter<'a, T>
where\n T: Sync,
",1,["hashbrown::table::Iter"]],["impl<'a, T> Send for IterMut<'a, T>
where\n T: Send,
",1,["hashbrown::table::IterMut"]],["impl<'a, T, A> Send for Entry<'a, T, A>
where\n T: Send,\n A: Send,
",1,["hashbrown::table::Entry"]],["impl<'a, T, A> Send for AbsentEntry<'a, T, A>
where\n T: Send,\n A: Send,
",1,["hashbrown::table::AbsentEntry"]],["impl<'a, T, A> Send for Drain<'a, T, A>
where\n T: Send,\n A: Send,
",1,["hashbrown::table::Drain"]],["impl<'a, T, A> Send for VacantEntry<'a, T, A>
where\n T: Send,\n A: Send,
",1,["hashbrown::table::VacantEntry"]],["impl<'a, T, F, A> Send for ExtractIf<'a, T, F, A>
where\n F: Send,\n T: Send,\n A: Send,
",1,["hashbrown::table::ExtractIf"]],["impl<'a, T, S, A> Send for Entry<'a, T, S, A>
where\n T: Send,\n S: Send,\n A: Send,
",1,["hashbrown::set::Entry"]],["impl<'a, T, S, A> Send for Difference<'a, T, S, A>
where\n S: Sync,\n A: Sync,\n T: Sync,
",1,["hashbrown::set::Difference"]],["impl<'a, T, S, A> Send for Intersection<'a, T, S, A>
where\n S: Sync,\n A: Sync,\n T: Sync,
",1,["hashbrown::set::Intersection"]],["impl<'a, T, S, A> Send for OccupiedEntry<'a, T, S, A>
where\n T: Send,\n S: Send,\n A: Send,
",1,["hashbrown::set::OccupiedEntry"]],["impl<'a, T, S, A> Send for SymmetricDifference<'a, T, S, A>
where\n S: Sync,\n A: Sync,\n T: Sync,
",1,["hashbrown::set::SymmetricDifference"]],["impl<'a, T, S, A> Send for Union<'a, T, S, A>
where\n S: Sync,\n A: Sync,\n T: Sync,
",1,["hashbrown::set::Union"]],["impl<'a, T, S, A> Send for VacantEntry<'a, T, S, A>
where\n T: Send,\n S: Send,\n A: Send,
",1,["hashbrown::set::VacantEntry"]],["impl<K, A> Send for IntoIter<K, A>
where\n A: Send,\n K: Send,
",1,["hashbrown::set::IntoIter"]],["impl<K, V, A> Send for IntoIter<K, V, A>
where\n A: Send,\n K: Send,\n V: Send,
",1,["hashbrown::map::IntoIter"]],["impl<K, V, A> Send for IntoKeys<K, V, A>
where\n A: Send,\n K: Send,\n V: Send,
",1,["hashbrown::map::IntoKeys"]],["impl<K, V, A> Send for IntoValues<K, V, A>
where\n A: Send,\n K: Send,\n V: Send,
",1,["hashbrown::map::IntoValues"]],["impl<K, V, S, A> Send for HashMap<K, V, S, A>
where\n S: Send,\n A: Send,\n K: Send,\n V: Send,
",1,["hashbrown::map::HashMap"]],["impl<K, V, S, A> Send for OccupiedEntry<'_, K, V, S, A>
where\n K: Send,\n V: Send,\n S: Send,\n A: Send + Allocator,
"],["impl<K, V, S, A> Send for RawOccupiedEntryMut<'_, K, V, S, A>
where\n K: Send,\n V: Send,\n S: Send,\n A: Send + Allocator,
"],["impl<K: Send, V: Send> Send for IterMut<'_, K, V>"],["impl<T, A> Send for HashTable<T, A>
where\n T: Send,\n A: Send,
",1,["hashbrown::table::HashTable"]],["impl<T, A> Send for IntoIter<T, A>
where\n T: Send,\n A: Send,
",1,["hashbrown::table::IntoIter"]],["impl<T, A> Send for OccupiedEntry<'_, T, A>
where\n T: Send,\n A: Send + Allocator,
"],["impl<T, S, A> Send for HashSet<T, S, A>
where\n S: Send,\n A: Send,\n T: Send,
",1,["hashbrown::set::HashSet"]]], +"log":[["impl Send for Level",1,["log::Level"]],["impl Send for LevelFilter",1,["log::LevelFilter"]],["impl Send for ParseLevelError",1,["log::ParseLevelError"]],["impl Send for SetLoggerError",1,["log::SetLoggerError"]],["impl<'a> !Send for Record<'a>",1,["log::Record"]],["impl<'a> !Send for RecordBuilder<'a>",1,["log::RecordBuilder"]],["impl<'a> Send for Metadata<'a>",1,["log::Metadata"]],["impl<'a> Send for MetadataBuilder<'a>",1,["log::MetadataBuilder"]]], +"rustc_hash":[["impl Send for FxHasher",1,["rustc_hash::FxHasher"]]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/marker/trait.StructuralPartialEq.js b/trait.impl/core/marker/trait.StructuralPartialEq.js new file mode 100644 index 000000000..e8eb4a893 --- /dev/null +++ b/trait.impl/core/marker/trait.StructuralPartialEq.js @@ -0,0 +1,4 @@ +(function() {var implementors = { +"hashbrown":[["impl StructuralPartialEq for TryReserveError"]], +"log":[["impl StructuralPartialEq for Level"],["impl StructuralPartialEq for LevelFilter"],["impl StructuralPartialEq for ParseLevelError"],["impl<'a> StructuralPartialEq for Metadata<'a>"],["impl<'a> StructuralPartialEq for MetadataBuilder<'a>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/marker/trait.Sync.js b/trait.impl/core/marker/trait.Sync.js new file mode 100644 index 000000000..2c3365768 --- /dev/null +++ b/trait.impl/core/marker/trait.Sync.js @@ -0,0 +1,6 @@ +(function() {var implementors = { +"cactusref":[["impl<T> !Sync for Rc<T>",1,["cactusref::rc::Rc"]],["impl<T> !Sync for Weak<T>",1,["cactusref::rc::Weak"]]], +"hashbrown":[["impl Sync for TryReserveError",1,["hashbrown::TryReserveError"]],["impl Sync for DefaultHashBuilder",1,["hashbrown::map::DefaultHashBuilder"]],["impl<'a, 'b, K, Q, V, S, A> Sync for EntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Sync,\n Q: Sync + ?Sized,\n V: Sync,\n S: Sync,\n A: Sync,
",1,["hashbrown::map::EntryRef"]],["impl<'a, 'b, K, Q, V, S, A> Sync for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Sync,\n Q: Sync + ?Sized,\n V: Sync,\n S: Sync,\n A: Sync + Allocator,
"],["impl<'a, 'b, K, Q, V, S, A> Sync for VacantEntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Sync,\n Q: Sync + ?Sized,\n S: Sync,\n A: Sync,\n V: Sync,
",1,["hashbrown::map::VacantEntryRef"]],["impl<'a, K> Sync for Iter<'a, K>
where\n K: Sync,
",1,["hashbrown::set::Iter"]],["impl<'a, K, A> Sync for Drain<'a, K, A>
where\n A: Sync,\n K: Sync,
",1,["hashbrown::set::Drain"]],["impl<'a, K, F, A> Sync for ExtractIf<'a, K, F, A>
where\n F: Sync,\n A: Sync,\n K: Sync,
",1,["hashbrown::set::ExtractIf"]],["impl<'a, K, V> Sync for Iter<'a, K, V>
where\n K: Sync,\n V: Sync,
",1,["hashbrown::map::Iter"]],["impl<'a, K, V> Sync for IterMut<'a, K, V>
where\n K: Sync,\n V: Sync,
",1,["hashbrown::map::IterMut"]],["impl<'a, K, V> Sync for Keys<'a, K, V>
where\n K: Sync,\n V: Sync,
",1,["hashbrown::map::Keys"]],["impl<'a, K, V> Sync for Values<'a, K, V>
where\n K: Sync,\n V: Sync,
",1,["hashbrown::map::Values"]],["impl<'a, K, V> Sync for ValuesMut<'a, K, V>
where\n K: Sync,\n V: Sync,
",1,["hashbrown::map::ValuesMut"]],["impl<'a, K, V, A> Sync for Drain<'a, K, V, A>
where\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::Drain"]],["impl<'a, K, V, F, A> Sync for ExtractIf<'a, K, V, F, A>
where\n F: Sync,\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::ExtractIf"]],["impl<'a, K, V, S, A> Sync for Entry<'a, K, V, S, A>
where\n K: Sync,\n V: Sync,\n S: Sync,\n A: Sync,
",1,["hashbrown::map::Entry"]],["impl<'a, K, V, S, A> Sync for RawEntryMut<'a, K, V, S, A>
where\n K: Sync,\n V: Sync,\n S: Sync,\n A: Sync,
",1,["hashbrown::map::RawEntryMut"]],["impl<'a, K, V, S, A> Sync for OccupiedError<'a, K, V, S, A>
where\n V: Sync,\n K: Sync,\n S: Sync,\n A: Sync,
",1,["hashbrown::map::OccupiedError"]],["impl<'a, K, V, S, A> Sync for RawEntryBuilder<'a, K, V, S, A>
where\n S: Sync,\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::RawEntryBuilder"]],["impl<'a, K, V, S, A> Sync for RawEntryBuilderMut<'a, K, V, S, A>
where\n S: Sync,\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::RawEntryBuilderMut"]],["impl<'a, K, V, S, A> Sync for RawVacantEntryMut<'a, K, V, S, A>
where\n S: Sync,\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::RawVacantEntryMut"]],["impl<'a, K, V, S, A> Sync for VacantEntry<'a, K, V, S, A>
where\n K: Sync,\n S: Sync,\n A: Sync,\n V: Sync,
",1,["hashbrown::map::VacantEntry"]],["impl<'a, T> Sync for Iter<'a, T>
where\n T: Sync,
",1,["hashbrown::table::Iter"]],["impl<'a, T> Sync for IterMut<'a, T>
where\n T: Sync,
",1,["hashbrown::table::IterMut"]],["impl<'a, T, A> Sync for Entry<'a, T, A>
where\n T: Sync,\n A: Sync,
",1,["hashbrown::table::Entry"]],["impl<'a, T, A> Sync for AbsentEntry<'a, T, A>
where\n T: Sync,\n A: Sync,
",1,["hashbrown::table::AbsentEntry"]],["impl<'a, T, A> Sync for Drain<'a, T, A>
where\n T: Sync,\n A: Sync,
",1,["hashbrown::table::Drain"]],["impl<'a, T, A> Sync for VacantEntry<'a, T, A>
where\n T: Sync,\n A: Sync,
",1,["hashbrown::table::VacantEntry"]],["impl<'a, T, F, A> Sync for ExtractIf<'a, T, F, A>
where\n F: Sync,\n T: Sync,\n A: Sync,
",1,["hashbrown::table::ExtractIf"]],["impl<'a, T, S, A> Sync for Entry<'a, T, S, A>
where\n T: Sync,\n S: Sync,\n A: Sync,
",1,["hashbrown::set::Entry"]],["impl<'a, T, S, A> Sync for Difference<'a, T, S, A>
where\n S: Sync,\n A: Sync,\n T: Sync,
",1,["hashbrown::set::Difference"]],["impl<'a, T, S, A> Sync for Intersection<'a, T, S, A>
where\n S: Sync,\n A: Sync,\n T: Sync,
",1,["hashbrown::set::Intersection"]],["impl<'a, T, S, A> Sync for OccupiedEntry<'a, T, S, A>
where\n T: Sync,\n S: Sync,\n A: Sync,
",1,["hashbrown::set::OccupiedEntry"]],["impl<'a, T, S, A> Sync for SymmetricDifference<'a, T, S, A>
where\n S: Sync,\n A: Sync,\n T: Sync,
",1,["hashbrown::set::SymmetricDifference"]],["impl<'a, T, S, A> Sync for Union<'a, T, S, A>
where\n S: Sync,\n A: Sync,\n T: Sync,
",1,["hashbrown::set::Union"]],["impl<'a, T, S, A> Sync for VacantEntry<'a, T, S, A>
where\n T: Sync,\n S: Sync,\n A: Sync,
",1,["hashbrown::set::VacantEntry"]],["impl<K, A> Sync for IntoIter<K, A>
where\n A: Sync,\n K: Sync,
",1,["hashbrown::set::IntoIter"]],["impl<K, V, A> Sync for IntoIter<K, V, A>
where\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::IntoIter"]],["impl<K, V, A> Sync for IntoKeys<K, V, A>
where\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::IntoKeys"]],["impl<K, V, A> Sync for IntoValues<K, V, A>
where\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::IntoValues"]],["impl<K, V, S, A> Sync for HashMap<K, V, S, A>
where\n S: Sync,\n A: Sync,\n K: Sync,\n V: Sync,
",1,["hashbrown::map::HashMap"]],["impl<K, V, S, A> Sync for OccupiedEntry<'_, K, V, S, A>
where\n K: Sync,\n V: Sync,\n S: Sync,\n A: Sync + Allocator,
"],["impl<K, V, S, A> Sync for RawOccupiedEntryMut<'_, K, V, S, A>
where\n K: Sync,\n V: Sync,\n S: Sync,\n A: Sync + Allocator,
"],["impl<T, A> Sync for HashTable<T, A>
where\n T: Sync,\n A: Sync,
",1,["hashbrown::table::HashTable"]],["impl<T, A> Sync for IntoIter<T, A>
where\n T: Sync,\n A: Sync,
",1,["hashbrown::table::IntoIter"]],["impl<T, A> Sync for OccupiedEntry<'_, T, A>
where\n T: Sync,\n A: Sync + Allocator,
"],["impl<T, S, A> Sync for HashSet<T, S, A>
where\n S: Sync,\n A: Sync,\n T: Sync,
",1,["hashbrown::set::HashSet"]]], +"log":[["impl Sync for Level",1,["log::Level"]],["impl Sync for LevelFilter",1,["log::LevelFilter"]],["impl Sync for ParseLevelError",1,["log::ParseLevelError"]],["impl Sync for SetLoggerError",1,["log::SetLoggerError"]],["impl<'a> !Sync for Record<'a>",1,["log::Record"]],["impl<'a> !Sync for RecordBuilder<'a>",1,["log::RecordBuilder"]],["impl<'a> Sync for Metadata<'a>",1,["log::Metadata"]],["impl<'a> Sync for MetadataBuilder<'a>",1,["log::MetadataBuilder"]]], +"rustc_hash":[["impl Sync for FxHasher",1,["rustc_hash::FxHasher"]]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/marker/trait.Unpin.js b/trait.impl/core/marker/trait.Unpin.js new file mode 100644 index 000000000..69c139002 --- /dev/null +++ b/trait.impl/core/marker/trait.Unpin.js @@ -0,0 +1,6 @@ +(function() {var implementors = { +"cactusref":[["impl<T> Unpin for Rc<T>"],["impl<T> Unpin for Weak<T>
where\n T: Unpin,
",1,["cactusref::rc::Weak"]]], +"hashbrown":[["impl Unpin for TryReserveError",1,["hashbrown::TryReserveError"]],["impl Unpin for DefaultHashBuilder",1,["hashbrown::map::DefaultHashBuilder"]],["impl<'a, 'b, K, Q, V, S, A> Unpin for EntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Unpin,\n Q: ?Sized,
",1,["hashbrown::map::EntryRef"]],["impl<'a, 'b, K, Q, V, S, A> Unpin for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Unpin,\n Q: ?Sized,
",1,["hashbrown::map::OccupiedEntryRef"]],["impl<'a, 'b, K, Q, V, S, A> Unpin for VacantEntryRef<'a, 'b, K, Q, V, S, A>
where\n K: Unpin,\n Q: ?Sized,
",1,["hashbrown::map::VacantEntryRef"]],["impl<'a, K> Unpin for Iter<'a, K>",1,["hashbrown::set::Iter"]],["impl<'a, K, A> Unpin for Drain<'a, K, A>",1,["hashbrown::set::Drain"]],["impl<'a, K, F, A> Unpin for ExtractIf<'a, K, F, A>
where\n F: Unpin,
",1,["hashbrown::set::ExtractIf"]],["impl<'a, K, V> Unpin for Iter<'a, K, V>",1,["hashbrown::map::Iter"]],["impl<'a, K, V> Unpin for IterMut<'a, K, V>",1,["hashbrown::map::IterMut"]],["impl<'a, K, V> Unpin for Keys<'a, K, V>",1,["hashbrown::map::Keys"]],["impl<'a, K, V> Unpin for Values<'a, K, V>",1,["hashbrown::map::Values"]],["impl<'a, K, V> Unpin for ValuesMut<'a, K, V>",1,["hashbrown::map::ValuesMut"]],["impl<'a, K, V, A> Unpin for Drain<'a, K, V, A>",1,["hashbrown::map::Drain"]],["impl<'a, K, V, F, A> Unpin for ExtractIf<'a, K, V, F, A>
where\n F: Unpin,
",1,["hashbrown::map::ExtractIf"]],["impl<'a, K, V, S, A> Unpin for Entry<'a, K, V, S, A>
where\n K: Unpin,
",1,["hashbrown::map::Entry"]],["impl<'a, K, V, S, A> Unpin for RawEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawEntryMut"]],["impl<'a, K, V, S, A> Unpin for OccupiedEntry<'a, K, V, S, A>
where\n K: Unpin,
",1,["hashbrown::map::OccupiedEntry"]],["impl<'a, K, V, S, A> Unpin for OccupiedError<'a, K, V, S, A>
where\n V: Unpin,\n K: Unpin,
",1,["hashbrown::map::OccupiedError"]],["impl<'a, K, V, S, A> Unpin for RawEntryBuilder<'a, K, V, S, A>",1,["hashbrown::map::RawEntryBuilder"]],["impl<'a, K, V, S, A> Unpin for RawEntryBuilderMut<'a, K, V, S, A>",1,["hashbrown::map::RawEntryBuilderMut"]],["impl<'a, K, V, S, A> Unpin for RawOccupiedEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawOccupiedEntryMut"]],["impl<'a, K, V, S, A> Unpin for RawVacantEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawVacantEntryMut"]],["impl<'a, K, V, S, A> Unpin for VacantEntry<'a, K, V, S, A>
where\n K: Unpin,
",1,["hashbrown::map::VacantEntry"]],["impl<'a, T> Unpin for Iter<'a, T>",1,["hashbrown::table::Iter"]],["impl<'a, T> Unpin for IterMut<'a, T>",1,["hashbrown::table::IterMut"]],["impl<'a, T, A> Unpin for Entry<'a, T, A>",1,["hashbrown::table::Entry"]],["impl<'a, T, A> Unpin for AbsentEntry<'a, T, A>",1,["hashbrown::table::AbsentEntry"]],["impl<'a, T, A> Unpin for Drain<'a, T, A>",1,["hashbrown::table::Drain"]],["impl<'a, T, A> Unpin for OccupiedEntry<'a, T, A>",1,["hashbrown::table::OccupiedEntry"]],["impl<'a, T, A> Unpin for VacantEntry<'a, T, A>",1,["hashbrown::table::VacantEntry"]],["impl<'a, T, F, A> Unpin for ExtractIf<'a, T, F, A>
where\n F: Unpin,
",1,["hashbrown::table::ExtractIf"]],["impl<'a, T, S, A> Unpin for Entry<'a, T, S, A>
where\n T: Unpin,
",1,["hashbrown::set::Entry"]],["impl<'a, T, S, A> Unpin for Difference<'a, T, S, A>",1,["hashbrown::set::Difference"]],["impl<'a, T, S, A> Unpin for Intersection<'a, T, S, A>",1,["hashbrown::set::Intersection"]],["impl<'a, T, S, A> Unpin for OccupiedEntry<'a, T, S, A>
where\n T: Unpin,
",1,["hashbrown::set::OccupiedEntry"]],["impl<'a, T, S, A> Unpin for SymmetricDifference<'a, T, S, A>",1,["hashbrown::set::SymmetricDifference"]],["impl<'a, T, S, A> Unpin for Union<'a, T, S, A>",1,["hashbrown::set::Union"]],["impl<'a, T, S, A> Unpin for VacantEntry<'a, T, S, A>
where\n T: Unpin,
",1,["hashbrown::set::VacantEntry"]],["impl<K, A> Unpin for IntoIter<K, A>
where\n A: Unpin,\n K: Unpin,
",1,["hashbrown::set::IntoIter"]],["impl<K, V, A> Unpin for IntoIter<K, V, A>
where\n A: Unpin,\n K: Unpin,\n V: Unpin,
",1,["hashbrown::map::IntoIter"]],["impl<K, V, A> Unpin for IntoKeys<K, V, A>
where\n A: Unpin,\n K: Unpin,\n V: Unpin,
",1,["hashbrown::map::IntoKeys"]],["impl<K, V, A> Unpin for IntoValues<K, V, A>
where\n A: Unpin,\n K: Unpin,\n V: Unpin,
",1,["hashbrown::map::IntoValues"]],["impl<K, V, S, A> Unpin for HashMap<K, V, S, A>
where\n S: Unpin,\n A: Unpin,\n K: Unpin,\n V: Unpin,
",1,["hashbrown::map::HashMap"]],["impl<T, A> Unpin for HashTable<T, A>
where\n A: Unpin,\n T: Unpin,
",1,["hashbrown::table::HashTable"]],["impl<T, A> Unpin for IntoIter<T, A>
where\n T: Unpin,\n A: Unpin,
",1,["hashbrown::table::IntoIter"]],["impl<T, S, A> Unpin for HashSet<T, S, A>
where\n S: Unpin,\n A: Unpin,\n T: Unpin,
",1,["hashbrown::set::HashSet"]]], +"log":[["impl Unpin for Level",1,["log::Level"]],["impl Unpin for LevelFilter",1,["log::LevelFilter"]],["impl Unpin for ParseLevelError",1,["log::ParseLevelError"]],["impl Unpin for SetLoggerError",1,["log::SetLoggerError"]],["impl<'a> Unpin for Metadata<'a>",1,["log::Metadata"]],["impl<'a> Unpin for MetadataBuilder<'a>",1,["log::MetadataBuilder"]],["impl<'a> Unpin for Record<'a>",1,["log::Record"]],["impl<'a> Unpin for RecordBuilder<'a>",1,["log::RecordBuilder"]]], +"rustc_hash":[["impl Unpin for FxHasher",1,["rustc_hash::FxHasher"]]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/ops/arith/trait.Sub.js b/trait.impl/core/ops/arith/trait.Sub.js new file mode 100644 index 000000000..752da7d7f --- /dev/null +++ b/trait.impl/core/ops/arith/trait.Sub.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<T, S> Sub<&HashSet<T, S>> for &HashSet<T, S>
where\n T: Eq + Hash + Clone,\n S: BuildHasher + Default,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/ops/bit/trait.BitAnd.js b/trait.impl/core/ops/bit/trait.BitAnd.js new file mode 100644 index 000000000..f9754999b --- /dev/null +++ b/trait.impl/core/ops/bit/trait.BitAnd.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<T, S, A> BitAnd<&HashSet<T, S, A>> for &HashSet<T, S, A>
where\n T: Eq + Hash + Clone,\n S: BuildHasher + Default,\n A: Allocator,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/ops/bit/trait.BitOr.js b/trait.impl/core/ops/bit/trait.BitOr.js new file mode 100644 index 000000000..e2d594444 --- /dev/null +++ b/trait.impl/core/ops/bit/trait.BitOr.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<T, S, A> BitOr<&HashSet<T, S, A>> for &HashSet<T, S, A>
where\n T: Eq + Hash + Clone,\n S: BuildHasher + Default,\n A: Allocator,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/ops/bit/trait.BitXor.js b/trait.impl/core/ops/bit/trait.BitXor.js new file mode 100644 index 000000000..6d6fdc77d --- /dev/null +++ b/trait.impl/core/ops/bit/trait.BitXor.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<T, S> BitXor<&HashSet<T, S>> for &HashSet<T, S>
where\n T: Eq + Hash + Clone,\n S: BuildHasher + Default,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/ops/deref/trait.Deref.js b/trait.impl/core/ops/deref/trait.Deref.js new file mode 100644 index 000000000..0e4f12d37 --- /dev/null +++ b/trait.impl/core/ops/deref/trait.Deref.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"cactusref":[["impl<T> Deref for Rc<T>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/ops/drop/trait.Drop.js b/trait.impl/core/ops/drop/trait.Drop.js new file mode 100644 index 000000000..7fdfb8ead --- /dev/null +++ b/trait.impl/core/ops/drop/trait.Drop.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"cactusref":[["impl<T> Drop for Rc<T>"],["impl<T> Drop for Weak<T>"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/ops/index/trait.Index.js b/trait.impl/core/ops/index/trait.Index.js new file mode 100644 index 000000000..4267b373e --- /dev/null +++ b/trait.impl/core/ops/index/trait.Index.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[["impl<K, Q, V, S, A> Index<&Q> for HashMap<K, V, S, A>
where\n K: Eq + Hash,\n Q: Hash + Equivalent<K> + ?Sized,\n S: BuildHasher,\n A: Allocator,
"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/panic/unwind_safe/trait.RefUnwindSafe.js b/trait.impl/core/panic/unwind_safe/trait.RefUnwindSafe.js new file mode 100644 index 000000000..e8a255316 --- /dev/null +++ b/trait.impl/core/panic/unwind_safe/trait.RefUnwindSafe.js @@ -0,0 +1,6 @@ +(function() {var implementors = { +"cactusref":[["impl<T> !RefUnwindSafe for Rc<T>",1,["cactusref::rc::Rc"]],["impl<T> !RefUnwindSafe for Weak<T>",1,["cactusref::rc::Weak"]]], +"hashbrown":[["impl RefUnwindSafe for TryReserveError",1,["hashbrown::TryReserveError"]],["impl RefUnwindSafe for DefaultHashBuilder",1,["hashbrown::map::DefaultHashBuilder"]],["impl<'a, 'b, K, Q, V, S, A> RefUnwindSafe for EntryRef<'a, 'b, K, Q, V, S, A>",1,["hashbrown::map::EntryRef"]],["impl<'a, 'b, K, Q, V, S, A> RefUnwindSafe for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>",1,["hashbrown::map::OccupiedEntryRef"]],["impl<'a, 'b, K, Q, V, S, A> RefUnwindSafe for VacantEntryRef<'a, 'b, K, Q, V, S, A>",1,["hashbrown::map::VacantEntryRef"]],["impl<'a, K> RefUnwindSafe for Iter<'a, K>
where\n K: RefUnwindSafe,
",1,["hashbrown::set::Iter"]],["impl<'a, K, A> RefUnwindSafe for Drain<'a, K, A>
where\n A: RefUnwindSafe,\n K: RefUnwindSafe,
",1,["hashbrown::set::Drain"]],["impl<'a, K, F, A> RefUnwindSafe for ExtractIf<'a, K, F, A>
where\n F: RefUnwindSafe,\n A: RefUnwindSafe,\n K: RefUnwindSafe,
",1,["hashbrown::set::ExtractIf"]],["impl<'a, K, V> RefUnwindSafe for Iter<'a, K, V>
where\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::Iter"]],["impl<'a, K, V> RefUnwindSafe for IterMut<'a, K, V>
where\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::IterMut"]],["impl<'a, K, V> RefUnwindSafe for Keys<'a, K, V>
where\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::Keys"]],["impl<'a, K, V> RefUnwindSafe for Values<'a, K, V>
where\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::Values"]],["impl<'a, K, V> RefUnwindSafe for ValuesMut<'a, K, V>
where\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::ValuesMut"]],["impl<'a, K, V, A> RefUnwindSafe for Drain<'a, K, V, A>
where\n A: RefUnwindSafe,\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::Drain"]],["impl<'a, K, V, F, A> RefUnwindSafe for ExtractIf<'a, K, V, F, A>",1,["hashbrown::map::ExtractIf"]],["impl<'a, K, V, S, A> RefUnwindSafe for Entry<'a, K, V, S, A>",1,["hashbrown::map::Entry"]],["impl<'a, K, V, S, A> RefUnwindSafe for RawEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawEntryMut"]],["impl<'a, K, V, S, A> RefUnwindSafe for OccupiedEntry<'a, K, V, S, A>",1,["hashbrown::map::OccupiedEntry"]],["impl<'a, K, V, S, A> RefUnwindSafe for OccupiedError<'a, K, V, S, A>",1,["hashbrown::map::OccupiedError"]],["impl<'a, K, V, S, A> RefUnwindSafe for RawEntryBuilder<'a, K, V, S, A>",1,["hashbrown::map::RawEntryBuilder"]],["impl<'a, K, V, S, A> RefUnwindSafe for RawEntryBuilderMut<'a, K, V, S, A>",1,["hashbrown::map::RawEntryBuilderMut"]],["impl<'a, K, V, S, A> RefUnwindSafe for RawOccupiedEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawOccupiedEntryMut"]],["impl<'a, K, V, S, A> RefUnwindSafe for RawVacantEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawVacantEntryMut"]],["impl<'a, K, V, S, A> RefUnwindSafe for VacantEntry<'a, K, V, S, A>",1,["hashbrown::map::VacantEntry"]],["impl<'a, T> RefUnwindSafe for Iter<'a, T>
where\n T: RefUnwindSafe,
",1,["hashbrown::table::Iter"]],["impl<'a, T> RefUnwindSafe for IterMut<'a, T>
where\n T: RefUnwindSafe,
",1,["hashbrown::table::IterMut"]],["impl<'a, T, A> RefUnwindSafe for Entry<'a, T, A>
where\n T: RefUnwindSafe,\n A: RefUnwindSafe,
",1,["hashbrown::table::Entry"]],["impl<'a, T, A> RefUnwindSafe for AbsentEntry<'a, T, A>
where\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::table::AbsentEntry"]],["impl<'a, T, A> RefUnwindSafe for Drain<'a, T, A>
where\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::table::Drain"]],["impl<'a, T, A> RefUnwindSafe for OccupiedEntry<'a, T, A>
where\n T: RefUnwindSafe,\n A: RefUnwindSafe,
",1,["hashbrown::table::OccupiedEntry"]],["impl<'a, T, A> RefUnwindSafe for VacantEntry<'a, T, A>
where\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::table::VacantEntry"]],["impl<'a, T, F, A> RefUnwindSafe for ExtractIf<'a, T, F, A>
where\n F: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::table::ExtractIf"]],["impl<'a, T, S, A> RefUnwindSafe for Entry<'a, T, S, A>
where\n T: RefUnwindSafe,\n S: RefUnwindSafe,\n A: RefUnwindSafe,
",1,["hashbrown::set::Entry"]],["impl<'a, T, S, A> RefUnwindSafe for Difference<'a, T, S, A>
where\n S: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::set::Difference"]],["impl<'a, T, S, A> RefUnwindSafe for Intersection<'a, T, S, A>
where\n S: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::set::Intersection"]],["impl<'a, T, S, A> RefUnwindSafe for OccupiedEntry<'a, T, S, A>
where\n T: RefUnwindSafe,\n S: RefUnwindSafe,\n A: RefUnwindSafe,
",1,["hashbrown::set::OccupiedEntry"]],["impl<'a, T, S, A> RefUnwindSafe for SymmetricDifference<'a, T, S, A>
where\n S: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::set::SymmetricDifference"]],["impl<'a, T, S, A> RefUnwindSafe for Union<'a, T, S, A>
where\n S: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::set::Union"]],["impl<'a, T, S, A> RefUnwindSafe for VacantEntry<'a, T, S, A>
where\n T: RefUnwindSafe,\n S: RefUnwindSafe,\n A: RefUnwindSafe,
",1,["hashbrown::set::VacantEntry"]],["impl<K, A> RefUnwindSafe for IntoIter<K, A>
where\n A: RefUnwindSafe,\n K: RefUnwindSafe,
",1,["hashbrown::set::IntoIter"]],["impl<K, V, A> RefUnwindSafe for IntoIter<K, V, A>
where\n A: RefUnwindSafe,\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::IntoIter"]],["impl<K, V, A> RefUnwindSafe for IntoKeys<K, V, A>
where\n A: RefUnwindSafe,\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::IntoKeys"]],["impl<K, V, A> RefUnwindSafe for IntoValues<K, V, A>
where\n A: RefUnwindSafe,\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::IntoValues"]],["impl<K, V, S, A> RefUnwindSafe for HashMap<K, V, S, A>",1,["hashbrown::map::HashMap"]],["impl<T, A> RefUnwindSafe for HashTable<T, A>
where\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::table::HashTable"]],["impl<T, A> RefUnwindSafe for IntoIter<T, A>
where\n T: RefUnwindSafe,\n A: RefUnwindSafe,
",1,["hashbrown::table::IntoIter"]],["impl<T, S, A> RefUnwindSafe for HashSet<T, S, A>
where\n S: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::set::HashSet"]]], +"log":[["impl RefUnwindSafe for Level",1,["log::Level"]],["impl RefUnwindSafe for LevelFilter",1,["log::LevelFilter"]],["impl RefUnwindSafe for ParseLevelError",1,["log::ParseLevelError"]],["impl RefUnwindSafe for SetLoggerError",1,["log::SetLoggerError"]],["impl<'a> !RefUnwindSafe for Record<'a>",1,["log::Record"]],["impl<'a> !RefUnwindSafe for RecordBuilder<'a>",1,["log::RecordBuilder"]],["impl<'a> RefUnwindSafe for Metadata<'a>",1,["log::Metadata"]],["impl<'a> RefUnwindSafe for MetadataBuilder<'a>",1,["log::MetadataBuilder"]]], +"rustc_hash":[["impl RefUnwindSafe for FxHasher",1,["rustc_hash::FxHasher"]]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/panic/unwind_safe/trait.UnwindSafe.js b/trait.impl/core/panic/unwind_safe/trait.UnwindSafe.js new file mode 100644 index 000000000..58149d753 --- /dev/null +++ b/trait.impl/core/panic/unwind_safe/trait.UnwindSafe.js @@ -0,0 +1,6 @@ +(function() {var implementors = { +"cactusref":[["impl<T> !UnwindSafe for Rc<T>",1,["cactusref::rc::Rc"]],["impl<T> !UnwindSafe for Weak<T>",1,["cactusref::rc::Weak"]]], +"hashbrown":[["impl UnwindSafe for TryReserveError",1,["hashbrown::TryReserveError"]],["impl UnwindSafe for DefaultHashBuilder",1,["hashbrown::map::DefaultHashBuilder"]],["impl<'a, 'b, K, Q, V, S, A = Global> !UnwindSafe for EntryRef<'a, 'b, K, Q, V, S, A>",1,["hashbrown::map::EntryRef"]],["impl<'a, 'b, K, Q, V, S, A = Global> !UnwindSafe for OccupiedEntryRef<'a, 'b, K, Q, V, S, A>",1,["hashbrown::map::OccupiedEntryRef"]],["impl<'a, 'b, K, Q, V, S, A = Global> !UnwindSafe for VacantEntryRef<'a, 'b, K, Q, V, S, A>",1,["hashbrown::map::VacantEntryRef"]],["impl<'a, K> UnwindSafe for Iter<'a, K>
where\n K: RefUnwindSafe,
",1,["hashbrown::set::Iter"]],["impl<'a, K, A> UnwindSafe for Drain<'a, K, A>
where\n A: RefUnwindSafe,\n K: RefUnwindSafe,
",1,["hashbrown::set::Drain"]],["impl<'a, K, F, A = Global> !UnwindSafe for ExtractIf<'a, K, F, A>",1,["hashbrown::set::ExtractIf"]],["impl<'a, K, V> !UnwindSafe for IterMut<'a, K, V>",1,["hashbrown::map::IterMut"]],["impl<'a, K, V> !UnwindSafe for ValuesMut<'a, K, V>",1,["hashbrown::map::ValuesMut"]],["impl<'a, K, V> UnwindSafe for Iter<'a, K, V>
where\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::Iter"]],["impl<'a, K, V> UnwindSafe for Keys<'a, K, V>
where\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::Keys"]],["impl<'a, K, V> UnwindSafe for Values<'a, K, V>
where\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::Values"]],["impl<'a, K, V, A> UnwindSafe for Drain<'a, K, V, A>
where\n A: RefUnwindSafe,\n K: RefUnwindSafe,\n V: RefUnwindSafe,
",1,["hashbrown::map::Drain"]],["impl<'a, K, V, F, A = Global> !UnwindSafe for ExtractIf<'a, K, V, F, A>",1,["hashbrown::map::ExtractIf"]],["impl<'a, K, V, S = DefaultHashBuilder, A = Global> !UnwindSafe for OccupiedEntry<'a, K, V, S, A>",1,["hashbrown::map::OccupiedEntry"]],["impl<'a, K, V, S = DefaultHashBuilder, A = Global> !UnwindSafe for VacantEntry<'a, K, V, S, A>",1,["hashbrown::map::VacantEntry"]],["impl<'a, K, V, S, A = Global> !UnwindSafe for Entry<'a, K, V, S, A>",1,["hashbrown::map::Entry"]],["impl<'a, K, V, S, A = Global> !UnwindSafe for RawEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawEntryMut"]],["impl<'a, K, V, S, A = Global> !UnwindSafe for OccupiedError<'a, K, V, S, A>",1,["hashbrown::map::OccupiedError"]],["impl<'a, K, V, S, A = Global> !UnwindSafe for RawEntryBuilderMut<'a, K, V, S, A>",1,["hashbrown::map::RawEntryBuilderMut"]],["impl<'a, K, V, S, A = Global> !UnwindSafe for RawOccupiedEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawOccupiedEntryMut"]],["impl<'a, K, V, S, A = Global> !UnwindSafe for RawVacantEntryMut<'a, K, V, S, A>",1,["hashbrown::map::RawVacantEntryMut"]],["impl<'a, K, V, S, A> UnwindSafe for RawEntryBuilder<'a, K, V, S, A>",1,["hashbrown::map::RawEntryBuilder"]],["impl<'a, T> !UnwindSafe for IterMut<'a, T>",1,["hashbrown::table::IterMut"]],["impl<'a, T> UnwindSafe for Iter<'a, T>
where\n T: RefUnwindSafe,
",1,["hashbrown::table::Iter"]],["impl<'a, T, A = Global> !UnwindSafe for Entry<'a, T, A>",1,["hashbrown::table::Entry"]],["impl<'a, T, A = Global> !UnwindSafe for AbsentEntry<'a, T, A>",1,["hashbrown::table::AbsentEntry"]],["impl<'a, T, A = Global> !UnwindSafe for OccupiedEntry<'a, T, A>",1,["hashbrown::table::OccupiedEntry"]],["impl<'a, T, A = Global> !UnwindSafe for VacantEntry<'a, T, A>",1,["hashbrown::table::VacantEntry"]],["impl<'a, T, A> UnwindSafe for Drain<'a, T, A>
where\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::table::Drain"]],["impl<'a, T, F, A = Global> !UnwindSafe for ExtractIf<'a, T, F, A>",1,["hashbrown::table::ExtractIf"]],["impl<'a, T, S, A = Global> !UnwindSafe for Entry<'a, T, S, A>",1,["hashbrown::set::Entry"]],["impl<'a, T, S, A = Global> !UnwindSafe for OccupiedEntry<'a, T, S, A>",1,["hashbrown::set::OccupiedEntry"]],["impl<'a, T, S, A = Global> !UnwindSafe for VacantEntry<'a, T, S, A>",1,["hashbrown::set::VacantEntry"]],["impl<'a, T, S, A> UnwindSafe for Difference<'a, T, S, A>
where\n S: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::set::Difference"]],["impl<'a, T, S, A> UnwindSafe for Intersection<'a, T, S, A>
where\n S: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::set::Intersection"]],["impl<'a, T, S, A> UnwindSafe for SymmetricDifference<'a, T, S, A>
where\n S: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::set::SymmetricDifference"]],["impl<'a, T, S, A> UnwindSafe for Union<'a, T, S, A>
where\n S: RefUnwindSafe,\n A: RefUnwindSafe,\n T: RefUnwindSafe,
",1,["hashbrown::set::Union"]],["impl<K, A> UnwindSafe for IntoIter<K, A>
where\n A: UnwindSafe,\n K: UnwindSafe + RefUnwindSafe,
",1,["hashbrown::set::IntoIter"]],["impl<K, V, A> UnwindSafe for IntoIter<K, V, A>",1,["hashbrown::map::IntoIter"]],["impl<K, V, A> UnwindSafe for IntoKeys<K, V, A>",1,["hashbrown::map::IntoKeys"]],["impl<K, V, A> UnwindSafe for IntoValues<K, V, A>",1,["hashbrown::map::IntoValues"]],["impl<K, V, S, A> UnwindSafe for HashMap<K, V, S, A>
where\n S: UnwindSafe,\n A: UnwindSafe,\n K: UnwindSafe,\n V: UnwindSafe,
",1,["hashbrown::map::HashMap"]],["impl<T, A> UnwindSafe for HashTable<T, A>
where\n A: UnwindSafe,\n T: UnwindSafe,
",1,["hashbrown::table::HashTable"]],["impl<T, A> UnwindSafe for IntoIter<T, A>
where\n T: UnwindSafe + RefUnwindSafe,\n A: UnwindSafe,
",1,["hashbrown::table::IntoIter"]],["impl<T, S, A> UnwindSafe for HashSet<T, S, A>
where\n S: UnwindSafe,\n A: UnwindSafe,\n T: UnwindSafe,
",1,["hashbrown::set::HashSet"]]], +"log":[["impl UnwindSafe for Level",1,["log::Level"]],["impl UnwindSafe for LevelFilter",1,["log::LevelFilter"]],["impl UnwindSafe for ParseLevelError",1,["log::ParseLevelError"]],["impl UnwindSafe for SetLoggerError",1,["log::SetLoggerError"]],["impl<'a> !UnwindSafe for Record<'a>",1,["log::Record"]],["impl<'a> !UnwindSafe for RecordBuilder<'a>",1,["log::RecordBuilder"]],["impl<'a> UnwindSafe for Metadata<'a>",1,["log::Metadata"]],["impl<'a> UnwindSafe for MetadataBuilder<'a>",1,["log::MetadataBuilder"]]], +"rustc_hash":[["impl UnwindSafe for FxHasher",1,["rustc_hash::FxHasher"]]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/core/str/traits/trait.FromStr.js b/trait.impl/core/str/traits/trait.FromStr.js new file mode 100644 index 000000000..a590da9b8 --- /dev/null +++ b/trait.impl/core/str/traits/trait.FromStr.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"log":[["impl FromStr for Level"],["impl FromStr for LevelFilter"]] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/hashbrown/trait.Equivalent.js b/trait.impl/hashbrown/trait.Equivalent.js new file mode 100644 index 000000000..667ea3fcb --- /dev/null +++ b/trait.impl/hashbrown/trait.Equivalent.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"hashbrown":[] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/trait.impl/log/trait.Log.js b/trait.impl/log/trait.Log.js new file mode 100644 index 000000000..e35e11d84 --- /dev/null +++ b/trait.impl/log/trait.Log.js @@ -0,0 +1,3 @@ +(function() {var implementors = { +"log":[] +};if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/type.impl/cactusref/struct.Rc.js b/type.impl/cactusref/struct.Rc.js new file mode 100644 index 000000000..c3529cc7a --- /dev/null +++ b/type.impl/cactusref/struct.Rc.js @@ -0,0 +1,3 @@ +(function() {var type_impls = { +"cactusref":[["
source§

impl<T> Adopt for Rc<T>

Implementation of Adopt for Rc which enables Rcs to form a cycle\nof strong references that are reaped by Rc’s Drop implementation.

\n
source§

unsafe fn adopt_unchecked(this: &Self, other: &Self)

Perform bookkeeping to record that this has an owned reference to\nother.

\n

Adoption is a one-way link, or a directed edge in the object graph which\nmeans “this owns other”.

\n

adopt can be called multiple times for a pair of Rcs. Each call to\nadopt indicates that this owns one distinct clone of other.

\n

This is an associated function that needs to be used as\nRc::adopt_unchecked(...). A method would interfere with methods of the same\nname on the contents of a Rc used through Deref.

\n
§Safety
\n

Callers must ensure that this owns a strong reference to other.

\n

Callers should call unadopt when this no longer holds a strong\nreference to other to avoid memory leaks, but this is not required for\nsoundness.

\n

Calling adopt does not increment the strong count of other. Callers\nmust ensure that other has been cloned and stored in the T contained\nby this.

\n
§Examples
\n

The following implements a self-referential array.

\n\n
use cactusref::{Adopt, Rc};\nuse std::cell::RefCell;\n\n#[derive(Default)]\nstruct Array {\n    buffer: Vec<Rc<RefCell<Self>>>,\n}\n\nlet array = Rc::new(RefCell::new(Array::default()));\nfor _ in 0..10 {\n    let item = Rc::clone(&array);\n    unsafe {\n        Rc::adopt_unchecked(&array, &item);\n    }\n    array.borrow_mut().buffer.push(item);\n}\nlet weak = Rc::downgrade(&array);\n// 1 for the array binding, 10 for the `Rc`s in buffer\nassert_eq!(Rc::strong_count(&array), 11);\ndrop(array);\nassert!(weak.upgrade().is_none());\nassert_eq!(weak.weak_count(), 0);
\n
source§

fn unadopt(this: &Self, other: &Self)

Perform bookkeeping to record that this has removed an owned reference\nto other.

\n

Adoption is a one-way link, or a directed edge in the object graph which\nmeans “this owns other”.

\n

This is an associated function that needs to be used as\nAdopt::unadopt(...). A method would interfere with methods of the same\nname on the contents of a Rc used through Deref.

\n
§Memory Leaks
\n

Failure to call this function when removing an owned Rc from this\nis safe, but may result in a memory leak.

\n
§Examples
\n

The following implements a self-referential array.

\n\n
use cactusref::{Adopt, Rc};\nuse std::cell::RefCell;\n\n#[derive(Default)]\nstruct Array {\n    buffer: Vec<Rc<RefCell<Self>>>,\n}\n\nlet array = Rc::new(RefCell::new(Array::default()));\nfor _ in 0..10 {\n    let item = Rc::clone(&array);\n    unsafe {\n        Rc::adopt_unchecked(&array, &item);\n    }\n    array.borrow_mut().buffer.push(item);\n}\nlet weak = Rc::downgrade(&array);\n// 1 for the array binding, 10 for the `Rc`s in buffer\nassert_eq!(Rc::strong_count(&array), 11);\n\nlet head = array.borrow_mut().buffer.pop().unwrap();\nRc::unadopt(&array, &head);\n\ndrop(head);\nassert_eq!(Rc::strong_count(&array), 10);\ndrop(array);\nassert!(weak.upgrade().is_none());\nassert_eq!(weak.weak_count(), 0);
\n
","Adopt","cactusref::CactusRef"],["
source§

impl<T> AsRef<T> for Rc<T>

source§

fn as_ref(&self) -> &T

Converts this type into a shared reference of the (usually inferred) input type.
","AsRef","cactusref::CactusRef"],["
source§

impl<T> Borrow<T> for Rc<T>

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
","Borrow","cactusref::CactusRef"],["
source§

impl<T> Clone for Rc<T>

source§

fn clone(&self) -> Rc<T>

Makes a clone of the Rc pointer.

\n

This creates another pointer to the same allocation, increasing the\nstrong reference count.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nlet _ = Rc::clone(&five);
\n
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
","Clone","cactusref::CactusRef"],["
source§

impl<T: Debug> Debug for Rc<T>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
","Debug","cactusref::CactusRef"],["
source§

impl<T: Default> Default for Rc<T>

source§

fn default() -> Rc<T>

Creates a new Rc<T>, with the Default value for T.

\n
§Examples
\n
use cactusref::Rc;\n\nlet x: Rc<i32> = Default::default();\nassert_eq!(*x, 0);
\n
","Default","cactusref::CactusRef"],["
source§

impl<T> Deref for Rc<T>

§

type Target = T

The resulting type after dereferencing.
source§

fn deref(&self) -> &T

Dereferences the value.
","Deref","cactusref::CactusRef"],["
source§

impl<T: Display> Display for Rc<T>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
","Display","cactusref::CactusRef"],["
source§

impl<T> Drop for Rc<T>

source§

fn drop(&mut self)

Drops the Rc.

\n

This will decrement the strong reference count. If the strong reference\ncount reaches zero then the only other references (if any) are Weak,\nso we drop the inner value.

\n

If this Rc has adopted any other Rcs, drop will trace the reachable\nobject graph and detect if this Rc is part of an orphaned cycle. An\norphaned cycle is a cycle in which all members have no owned references\nheld by Rcs outside of the cycle.

\n

Rcs do not pay the cost of the reachability check unless they use\nAdopt::adopt_unchecked.

\n
§Examples
\n
use cactusref::Rc;\n\nstruct Foo;\n\nimpl Drop for Foo {\n    fn drop(&mut self) {\n        println!(\"dropped!\");\n    }\n}\n\nlet foo  = Rc::new(Foo);\nlet foo2 = Rc::clone(&foo);\n\ndrop(foo);    // Doesn't print anything\ndrop(foo2);   // Prints \"dropped!\"
\n\n
use cactusref::{Adopt, Rc};\n\nstruct Foo(u8);\n\nimpl Drop for Foo {\n    fn drop(&mut self) {\n        println!(\"dropped {}!\", self.0);\n    }\n}\n\nlet foo  = Rc::new(Foo(10));\nlet foo2 = Rc::new(Foo(20));\n\nunsafe {\n    Rc::adopt_unchecked(&foo, &foo2);\n    Rc::adopt_unchecked(&foo2, &foo);\n}\n\ndrop(foo);    // Doesn't print anything\ndrop(foo2);   // Prints \"dropped 10!\" and \"dropped 20!\"
\n
§Cycle Detection and Deallocation Algorithm
\n

Rc::adopt_unchecked does explicit bookkeeping to store links to\nadoptee Rcs. These links form a graph of reachable objects which are\nused to detect cycles.

\n

On drop, if an Rc has no links, it is dropped like a normal Rc. If\nthe Rc has links, Drop performs a breadth first search by traversing\nthe forward and backward links stored in each Rc. Deallocating cycles\nrequires correct use of Adopt::adopt_unchecked and Adopt::unadopt\nto perform the reachability bookkeeping.

\n

After determining all reachable objects, Rc reduces the graph to\nobjects that form a cycle by performing pairwise reachability checks.\nDuring this step, for each object in the cycle, Rc counts the number\nof refs held by other objects in the cycle.

\n

Using the cycle-held references, Rc computes whether the object graph\nis reachable by any non-cycle nodes by comparing strong counts.

\n

If the cycle is orphaned, Rc busts all the link structures and\ndeallocates each object.

\n
§Performance
\n

Cycle detection uses breadth first search to trace the object graph.\nThe runtime complexity of detecting a cycle is O(links + nodes) where\nlinks is the number of adoptions that are alive and nodes is the number\nof objects in the cycle.

\n

Determining whether the cycle is orphaned builds on cycle detection and\niterates over all nodes in the graph to see if their strong count is\ngreater than the number of references in the cycle. The runtime\ncomplexity of finding an orphaned cycle is O(links + nodes) where\nlinks is the number of adoptions that are alive and nodes is the number\nobjects in the cycle.

\n
","Drop","cactusref::CactusRef"],["
source§

impl<T> From<Box<T>> for Rc<T>

source§

fn from(v: Box<T>) -> Rc<T>

Move a boxed object to a new, reference counted, allocation.

\n
§Example
\n
let original: Box<i32> = Box::new(1);\nlet shared: Rc<i32> = Rc::from(original);\nassert_eq!(1, *shared);
\n
","From>","cactusref::CactusRef"],["
source§

impl<T> From<T> for Rc<T>

source§

fn from(t: T) -> Self

Converts a generic type T into a Rc<T>

\n

The conversion allocates on the heap and moves t\nfrom the stack into it.

\n
§Example
\n
let x = 5;\nlet rc = Rc::new(5);\n\nassert_eq!(Rc::from(x), rc);
\n
","From","cactusref::CactusRef"],["
source§

impl<T: Hash> Hash for Rc<T>

source§

fn hash<H: Hasher>(&self, state: &mut H)

Feeds this value into the given Hasher. Read more
1.3.0 · source§

fn hash_slice<H>(data: &[Self], state: &mut H)
where\n H: Hasher,\n Self: Sized,

Feeds a slice of this type into the given Hasher. Read more
","Hash","cactusref::CactusRef"],["
source§

impl<T: Ord> Ord for Rc<T>

source§

fn cmp(&self, other: &Rc<T>) -> Ordering

Comparison for two Rcs.

\n

The two are compared by calling cmp() on their inner values.

\n
§Examples
\n
use cactusref::Rc;\nuse std::cmp::Ordering;\n\nlet five = Rc::new(5);\n\nassert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
\n
1.21.0 · source§

fn max(self, other: Self) -> Self
where\n Self: Sized,

Compares and returns the maximum of two values. Read more
1.21.0 · source§

fn min(self, other: Self) -> Self
where\n Self: Sized,

Compares and returns the minimum of two values. Read more
1.50.0 · source§

fn clamp(self, min: Self, max: Self) -> Self
where\n Self: Sized + PartialOrd,

Restrict a value to a certain interval. Read more
","Ord","cactusref::CactusRef"],["
source§

impl<T: PartialEq> PartialEq for Rc<T>

source§

fn eq(&self, other: &Rc<T>) -> bool

Equality for two Rcs.

\n

Two Rcs are equal if their inner values are equal, even if they are\nstored in different allocation.

\n

If T also implements Eq (implying reflexivity of equality),\ntwo Rcs that point to the same allocation are\nalways equal.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nassert!(five == Rc::new(5));
\n
source§

fn ne(&self, other: &Rc<T>) -> bool

Inequality for two Rcs.

\n

Two Rcs are unequal if their inner values are unequal.

\n

If T also implements Eq (implying reflexivity of equality),\ntwo Rcs that point to the same allocation are\nnever unequal.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nassert!(five != Rc::new(6));
\n
","PartialEq","cactusref::CactusRef"],["
source§

impl<T: PartialOrd> PartialOrd for Rc<T>

source§

fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering>

Partial comparison for two Rcs.

\n

The two are compared by calling partial_cmp() on their inner values.

\n
§Examples
\n
use cactusref::Rc;\nuse std::cmp::Ordering;\n\nlet five = Rc::new(5);\n\nassert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
\n
source§

fn lt(&self, other: &Rc<T>) -> bool

Less-than comparison for two Rcs.

\n

The two are compared by calling < on their inner values.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nassert!(five < Rc::new(6));
\n
source§

fn le(&self, other: &Rc<T>) -> bool

‘Less than or equal to’ comparison for two Rcs.

\n

The two are compared by calling <= on their inner values.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nassert!(five <= Rc::new(5));
\n
source§

fn gt(&self, other: &Rc<T>) -> bool

Greater-than comparison for two Rcs.

\n

The two are compared by calling > on their inner values.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nassert!(five > Rc::new(4));
\n
source§

fn ge(&self, other: &Rc<T>) -> bool

‘Greater than or equal to’ comparison for two Rcs.

\n

The two are compared by calling >= on their inner values.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nassert!(five >= Rc::new(5));
\n
","PartialOrd","cactusref::CactusRef"],["
source§

impl<T> Pointer for Rc<T>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
","Pointer","cactusref::CactusRef"],["
source§

impl<T> Rc<MaybeUninit<T>>

source

pub unsafe fn assume_init(self) -> Rc<T>

Converts to Rc<T>.

\n
§Safety
\n

As with MaybeUninit::assume_init,\nit is up to the caller to guarantee that the inner value\nreally is in an initialized state.\nCalling this when the content is not yet fully initialized\ncauses immediate undefined behavior.

\n
§Examples
\n
use cactusref::Rc;\n\nlet mut five = Rc::<u32>::new_uninit();\n\nlet five = unsafe {\n    // Deferred initialization:\n    Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);\n\n    five.assume_init()\n};\n\nassert_eq!(*five, 5)
\n
",0,"cactusref::CactusRef"],["
source§

impl<T> Rc<T>

source

pub fn new(value: T) -> Rc<T>

Constructs a new Rc<T>.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);
\n
source

pub fn new_uninit() -> Rc<MaybeUninit<T>>

Constructs a new Rc with uninitialized contents.

\n
§Examples
\n
use cactusref::Rc;\n\nlet mut five = Rc::<u32>::new_uninit();\n\nlet five = unsafe {\n    // Deferred initialization:\n    Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);\n\n    five.assume_init()\n};\n\nassert_eq!(*five, 5)
\n
source

pub fn pin(value: T) -> Pin<Rc<T>>

Constructs a new Pin<Rc<T>>. If T does not implement Unpin, then\nvalue will be pinned in memory and unable to be moved.

\n
source

pub fn try_unwrap(this: Self) -> Result<T, Self>

Returns the inner value, if the Rc has exactly one strong reference.

\n

Otherwise, an Err is returned with the same Rc that was\npassed in.

\n

This will succeed even if there are outstanding weak references.

\n
§Examples
\n
use cactusref::Rc;\n\nlet x = Rc::new(3);\nassert_eq!(Rc::try_unwrap(x), Ok(3));\n\nlet x = Rc::new(4);\nlet _y = Rc::clone(&x);\nassert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
\n
§Errors
\n

If the given Rc does not have exactly one strong reference, it is\nreturned in the Err variant of the returned Result.

\n
",0,"cactusref::CactusRef"],["
source§

impl<T> Rc<T>

source

pub fn into_raw(this: Self) -> *const T

Consumes the Rc, returning the wrapped pointer.

\n

To avoid a memory leak the pointer must be converted back to an Rc using\nRc::from_raw.

\n
§Examples
\n
use cactusref::Rc;\n\nlet x = Rc::new(\"hello\".to_owned());\nlet x_ptr = Rc::into_raw(x);\nassert_eq!(unsafe { &*x_ptr }, \"hello\");\n// Reconstruct the `Rc` to avoid a leak.\nlet _ = unsafe { Rc::from_raw(x_ptr) };
\n
source

pub fn as_ptr(this: &Self) -> *const T

Provides a raw pointer to the data.

\n

The counts are not affected in any way and the Rc is not consumed. The pointer is valid\nfor as long there are strong counts in the Rc.

\n
§Examples
\n
use cactusref::Rc;\n\nlet x = Rc::new(\"hello\".to_owned());\nlet y = Rc::clone(&x);\nlet x_ptr = Rc::as_ptr(&x);\nassert_eq!(x_ptr, Rc::as_ptr(&y));\nassert_eq!(unsafe { &*x_ptr }, \"hello\");
\n
source

pub unsafe fn from_raw(ptr: *const T) -> Self

Constructs an Rc<T> from a raw pointer.

\n

The raw pointer must have been previously returned by a call to\nRc<U>::into_raw where U must have the same size\nand alignment as T. This is trivially true if U is T.\nNote that if U is not T but has the same size and alignment, this is\nbasically like transmuting references of different types. See\nmem::transmute for more information on what\nrestrictions apply in this case.

\n

The user of from_raw has to make sure a specific value of T is only\ndropped once.

\n

This function is unsafe because improper use may lead to memory unsafety,\neven if the returned Rc<T> is never accessed.

\n
§Examples
\n
use cactusref::Rc;\n\nlet x = Rc::new(\"hello\".to_owned());\nlet x_ptr = Rc::into_raw(x);\n\nunsafe {\n    // Convert back to an `Rc` to prevent leak.\n    let x = Rc::from_raw(x_ptr);\n    assert_eq!(&*x, \"hello\");\n\n    // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.\n}\n\n// The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
\n
§Safety
\n

Callers must ensure that ptr points to a live Rc and was created\nwith a call to Rc::into_raw.

\n
source

pub fn downgrade(this: &Self) -> Weak<T>

Creates a new Weak pointer to this allocation.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nlet weak_five = Rc::downgrade(&five);
\n
source

pub fn weak_count(this: &Self) -> usize

Gets the number of Weak pointers to this allocation.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\nlet _weak_five = Rc::downgrade(&five);\n\nassert_eq!(1, Rc::weak_count(&five));
\n
source

pub fn strong_count(this: &Self) -> usize

Gets the number of strong (Rc) pointers to this allocation.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\nlet _also_five = Rc::clone(&five);\n\nassert_eq!(2, Rc::strong_count(&five));
\n
source

pub unsafe fn increment_strong_count(ptr: *const T)

Increments the strong reference count on the Rc<T> associated with the\nprovided pointer by one.

\n
§Safety
\n

The pointer must have been obtained through Rc::into_raw, and the\nassociated Rc instance must be valid (i.e. the strong count must be at\nleast 1) for the duration of this method.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nunsafe {\n    let ptr = Rc::into_raw(five);\n    Rc::increment_strong_count(ptr);\n\n    let five = Rc::from_raw(ptr);\n    assert_eq!(2, Rc::strong_count(&five));\n\n    // Decrement the strong count to avoid a leak.\n    Rc::decrement_strong_count(ptr);\n}
\n
source

pub unsafe fn decrement_strong_count(ptr: *const T)

Decrements the strong reference count on the Rc<T> associated with the\nprovided pointer by one.

\n
§Safety
\n

The pointer must have been obtained through Rc::into_raw, and the\nassociated Rc instance must be valid (i.e. the strong count must be at\nleast 1) when invoking this method. This method can be used to release\nthe final Rc and backing storage, but should not be called after\nthe final Rc has been released.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nunsafe {\n    let ptr = Rc::into_raw(five);\n    Rc::increment_strong_count(ptr);\n\n    let five = Rc::from_raw(ptr);\n    assert_eq!(2, Rc::strong_count(&five));\n    Rc::decrement_strong_count(ptr);\n    assert_eq!(1, Rc::strong_count(&five));\n}
\n
source

pub fn get_mut(this: &mut Self) -> Option<&mut T>

Returns a mutable reference into the given Rc, if there are\nno other Rc or Weak pointers to the same allocation.

\n

Returns None otherwise, because it is not safe to\nmutate a shared value.

\n

See also make_mut, which will clone\nthe inner value when there are other pointers.

\n
§Examples
\n
use cactusref::Rc;\n\nlet mut x = Rc::new(3);\n*Rc::get_mut(&mut x).unwrap() = 4;\nassert_eq!(*x, 4);\n\nlet _y = Rc::clone(&x);\nassert!(Rc::get_mut(&mut x).is_none());
\n
source

pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T

Returns a mutable reference into the given Rc,\nwithout any check.

\n

See also get_mut, which is safe and does appropriate checks.

\n
§Safety
\n

Any other Rc or Weak pointers to the same allocation must not be dereferenced\nfor the duration of the returned borrow.\nThis is trivially the case if no such pointers exist,\nfor example immediately after Rc::new.

\n
§Examples
\n
use cactusref::Rc;\n\nlet mut x = Rc::new(String::new());\nunsafe {\n    Rc::get_mut_unchecked(&mut x).push_str(\"foo\")\n}\nassert_eq!(*x, \"foo\");
\n
source

pub fn ptr_eq(this: &Self, other: &Self) -> bool

Returns true if the two Rcs point to the same allocation\n(in a vein similar to ptr::eq).

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\nlet same_five = Rc::clone(&five);\nlet other_five = Rc::new(5);\n\nassert!(Rc::ptr_eq(&five, &same_five));\nassert!(!Rc::ptr_eq(&five, &other_five));
\n
",0,"cactusref::CactusRef"],["
source§

impl<T: Clone> Rc<T>

source

pub fn make_mut(this: &mut Self) -> &mut T

Makes a mutable reference into the given Rc.

\n

If there are other Rc pointers to the same allocation, then make_mut will\nclone the inner value to a new allocation to ensure unique ownership. This is also\nreferred to as clone-on-write.

\n

If there are no other Rc pointers to this allocation, then Weak\npointers to this allocation will be disassociated.

\n

See also get_mut, which will fail rather than cloning.

\n
§Examples
\n
use cactusref::Rc;\n\nlet mut data = Rc::new(5);\n\n*Rc::make_mut(&mut data) += 1;        // Won't clone anything\nlet mut other_data = Rc::clone(&data);    // Won't clone inner data\n*Rc::make_mut(&mut data) += 1;        // Clones inner data\n*Rc::make_mut(&mut data) += 1;        // Won't clone anything\n*Rc::make_mut(&mut other_data) *= 2;  // Won't clone anything\n\n// Now `data` and `other_data` point to different allocations.\nassert_eq!(*data, 8);\nassert_eq!(*other_data, 12);
\n

Weak pointers will be disassociated:

\n\n
use cactusref::Rc;\n\nlet mut data = Rc::new(75);\nlet weak = Rc::downgrade(&data);\n\nassert!(75 == *data);\nassert!(75 == *weak.upgrade().unwrap());\n\n*Rc::make_mut(&mut data) += 1;\n\nassert!(76 == *data);\nassert!(weak.upgrade().is_none());
\n
",0,"cactusref::CactusRef"],["
source§

impl<T: Eq> Eq for Rc<T>

","Eq","cactusref::CactusRef"],["
source§

impl<T> Unpin for Rc<T>

","Unpin","cactusref::CactusRef"]] +};if (window.register_type_impls) {window.register_type_impls(type_impls);} else {window.pending_type_impls = type_impls;}})() \ No newline at end of file diff --git a/type.impl/cactusref/struct.Weak.js b/type.impl/cactusref/struct.Weak.js new file mode 100644 index 000000000..19dc49381 --- /dev/null +++ b/type.impl/cactusref/struct.Weak.js @@ -0,0 +1,3 @@ +(function() {var type_impls = { +"cactusref":[["
source§

impl<T> Clone for Weak<T>

source§

fn clone(&self) -> Weak<T>

Makes a clone of the Weak pointer that points to the same allocation.

\n
§Examples
\n
use cactusref::{Rc, Weak};\n\nlet weak_five = Rc::downgrade(&Rc::new(5));\n\nlet _ = Weak::clone(&weak_five);
\n
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
","Clone","cactusref::CactusWeakRef"],["
source§

impl<T: Debug> Debug for Weak<T>

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
","Debug","cactusref::CactusWeakRef"],["
source§

impl<T> Default for Weak<T>

source§

fn default() -> Weak<T>

Constructs a new Weak<T>, without allocating any memory.\nCalling upgrade on the return value always gives None.

\n
§Examples
\n
use cactusref::Weak;\n\nlet empty: Weak<i64> = Default::default();\nassert!(empty.upgrade().is_none());
\n
","Default","cactusref::CactusWeakRef"],["
source§

impl<T> Drop for Weak<T>

source§

fn drop(&mut self)

Drops the Weak pointer.

\n
§Examples
\n
use cactusref::{Rc, Weak};\n\nstruct Foo;\n\nimpl Drop for Foo {\n    fn drop(&mut self) {\n        println!(\"dropped!\");\n    }\n}\n\nlet foo = Rc::new(Foo);\nlet weak_foo = Rc::downgrade(&foo);\nlet other_weak_foo = Weak::clone(&weak_foo);\n\ndrop(weak_foo);   // Doesn't print anything\ndrop(foo);        // Prints \"dropped!\"\n\nassert!(other_weak_foo.upgrade().is_none());
\n
","Drop","cactusref::CactusWeakRef"],["
source§

impl<T> Weak<T>

source

pub fn new() -> Weak<T>

Constructs a new Weak<T>, without allocating any memory.\nCalling upgrade on the return value always gives None.

\n
§Examples
\n
use cactusref::Weak;\n\nlet empty: Weak<i64> = Weak::new();\nassert!(empty.upgrade().is_none());
\n
",0,"cactusref::CactusWeakRef"],["
source§

impl<T> Weak<T>

source

pub fn as_ptr(&self) -> *const T

Returns a raw pointer to the object T pointed to by this Weak<T>.

\n

The pointer is valid only if there are some strong references. The pointer may be dangling,\nunaligned or even null otherwise.

\n
§Examples
\n
use cactusref::Rc;\nuse std::ptr;\n\nlet strong = Rc::new(\"hello\".to_owned());\nlet weak = Rc::downgrade(&strong);\n// Both point to the same object\nassert!(ptr::eq(&*strong, weak.as_ptr()));\n// The strong here keeps it alive, so we can still access the object.\nassert_eq!(\"hello\", unsafe { &*weak.as_ptr() });\n\ndrop(strong);\n// But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to\n// undefined behaviour.\n// assert_eq!(\"hello\", unsafe { &*weak.as_ptr() });
\n
source

pub fn into_raw(self) -> *const T

Consumes the Weak<T> and turns it into a raw pointer.

\n

This converts the weak pointer into a raw pointer, while still preserving the ownership of\none weak reference (the weak count is not modified by this operation). It can be turned\nback into the Weak<T> with from_raw.

\n

The same restrictions of accessing the target of the pointer as with\nas_ptr apply.

\n
§Examples
\n
use cactusref::{Rc, Weak};\n\nlet strong = Rc::new(\"hello\".to_owned());\nlet weak = Rc::downgrade(&strong);\nlet raw = weak.into_raw();\n\nassert_eq!(1, Rc::weak_count(&strong));\nassert_eq!(\"hello\", unsafe { &*raw });\n\ndrop(unsafe { Weak::from_raw(raw) });\nassert_eq!(0, Rc::weak_count(&strong));
\n
source

pub unsafe fn from_raw(ptr: *const T) -> Self

Converts a raw pointer previously created by into_raw back into Weak<T>.

\n

This can be used to safely get a strong reference (by calling upgrade\nlater) or to deallocate the weak count by dropping the Weak<T>.

\n

It takes ownership of one weak reference (with the exception of pointers created by new,\nas these don’t own anything; the method still works on them).

\n
§Safety
\n

The pointer must have originated from the into_raw and must still own its potential\nweak reference.

\n

It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this\ntakes ownership of one weak reference currently represented as a raw pointer (the weak\ncount is not modified by this operation) and therefore it must be paired with a previous\ncall to into_raw.

\n
§Examples
\n
use cactusref::{Rc, Weak};\n\nlet strong = Rc::new(\"hello\".to_owned());\n\nlet raw_1 = Rc::downgrade(&strong).into_raw();\nlet raw_2 = Rc::downgrade(&strong).into_raw();\n\nassert_eq!(2, Rc::weak_count(&strong));\n\nassert_eq!(\"hello\", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());\nassert_eq!(1, Rc::weak_count(&strong));\n\ndrop(strong);\n\n// Decrement the last weak count.\nassert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
\n
source

pub fn upgrade(&self) -> Option<Rc<T>>

Attempts to upgrade the Weak pointer to an Rc, delaying\ndropping of the inner value if successful.

\n

Returns None if the inner value has since been dropped.

\n
§Examples
\n
use cactusref::Rc;\n\nlet five = Rc::new(5);\n\nlet weak_five = Rc::downgrade(&five);\n\nlet strong_five: Option<Rc<_>> = weak_five.upgrade();\nassert!(strong_five.is_some());\n\n// Destroy all strong pointers.\ndrop(strong_five);\ndrop(five);\n\nassert!(weak_five.upgrade().is_none());
\n
source

pub fn strong_count(&self) -> usize

Gets the number of strong (Rc) pointers pointing to this allocation.

\n

If self was created using Weak::new, this will return 0.

\n
source

pub fn weak_count(&self) -> usize

Gets the number of Weak pointers pointing to this allocation.

\n

If no strong pointers remain, this will return zero.

\n
source

pub fn ptr_eq(&self, other: &Self) -> bool

Returns true if the two Weaks point to the same allocation (similar to\nptr::eq), or if both don’t point to any allocation\n(because they were created with Weak::new()).

\n
§Notes
\n

Since this compares pointers it means that Weak::new() will equal each\nother, even though they don’t point to any allocation.

\n
§Examples
\n
use cactusref::Rc;\n\nlet first_rc = Rc::new(5);\nlet first = Rc::downgrade(&first_rc);\nlet second = Rc::downgrade(&first_rc);\n\nassert!(first.ptr_eq(&second));\n\nlet third_rc = Rc::new(5);\nlet third = Rc::downgrade(&third_rc);\n\nassert!(!first.ptr_eq(&third));
\n

Comparing Weak::new.

\n\n
use cactusref::{Rc, Weak};\n\nlet first = Weak::new();\nlet second = Weak::new();\nassert!(first.ptr_eq(&second));\n\nlet third_rc = Rc::new(());\nlet third = Rc::downgrade(&third_rc);\nassert!(!first.ptr_eq(&third));
\n
",0,"cactusref::CactusWeakRef"]] +};if (window.register_type_impls) {window.register_type_impls(type_impls);} else {window.pending_type_impls = type_impls;}})() \ No newline at end of file