1 //! A priority queue implemented with a 4-ary heap.
3 //! Insertion and popping the minimal element have `O(log n)` time complexity.
4 //! Checking the minimal element is `O(1)`. Keys of elements in the heap can
5 //! also be increased or decreased
10 //! use stud_rust_base::index_heap::{Indexing, IndexdMinHeap};
12 //! #[derive(Copy, Clone, Eq, PartialEq, Debug, Ord, PartialOrd)]
13 //! pub struct State {
14 //! pub distance: usize,
19 //! // The `Indexing` traits needs to be implemented as well, so we can find elements to decrease their key.
20 //! impl Indexing for State {
21 //! fn as_index(&self) -> usize {
22 //! self.node as usize
26 //! let mut heap = IndexdMinHeap::new(3);
27 //! heap.push(State { node: 0, distance: 42 });
28 //! heap.push(State { node: 1, distance: 23 });
29 //! heap.push(State { node: 2, distance: 50000 });
30 //! assert_eq!(heap.peek().cloned(), Some(State { node: 1, distance: 23 }));
31 //! heap.decrease_key(State { node: 0, distance: 1 });
32 //! assert_eq!(heap.pop(), Some(State { node: 0, distance: 1 }));
41 /// A trait to map elements in a heap to a unique index.
42 /// The element type of the `IndexdMinHeap` has to implement this trait.
44 /// This method has to map a heap element to a unique `usize` index.
45 fn as_index(&self) -> usize;
48 /// A priority queue where the elements are IDs from 0 to id_count-1 where id_count is a number that is set in the constructor.
49 /// The elements are sorted ascending by the ordering defined by the `Ord` trait.
50 /// The interface mirros the standard library BinaryHeap (except for the reversed order).
51 /// Only the methods necessary for dijkstras algorithm are implemented.
52 /// In addition, `increase_key` and `decrease_key` methods are available.
54 pub struct IndexdMinHeap<T: Ord + Indexing> {
55 positions: Vec<usize>,
59 const TREE_ARITY: usize = 4;
60 const INVALID_POSITION: usize = std::usize::MAX;
62 impl<T: Ord + Indexing> IndexdMinHeap<T> {
63 /// Creates an empty `IndexdMinHeap` as a min-heap.
64 /// The indices (as defined by the `Indexing` trait) of all inserted elements
65 /// will have to be between in `[0, max_index)`
66 pub fn new(max_index: usize) -> IndexdMinHeap<T> {
68 positions: vec![INVALID_POSITION; max_index],
73 /// Returns the length of the binary heap.
74 pub fn len(&self) -> usize {
78 /// Checks if the binary heap is empty.
79 pub fn is_empty(&self) -> bool {
83 /// Checks if the heap already contains an element mapped to the given index
84 pub fn contains_index(&self, id: usize) -> bool {
85 self.positions[id] != INVALID_POSITION
88 /// Drops all items from the heap.
89 pub fn clear(&mut self) {
90 for element in &self.data {
91 self.positions[element.as_index()] = INVALID_POSITION;
96 /// Returns a reference to the smallest item in the heap, or None if it is empty.
97 pub fn peek(&self) -> Option<&T> {
101 /// Removes the greatest item from the binary heap and returns it, or None if it is empty.
102 pub fn pop(&mut self) -> Option<T> {
103 self.data.pop().map(|mut item| {
104 self.positions[item.as_index()] = INVALID_POSITION;
105 if !self.is_empty() {
106 self.positions[item.as_index()] = 0;
107 self.positions[self.data[0].as_index()] = INVALID_POSITION;
108 swap(&mut item, &mut self.data[0]);
109 self.move_down_in_tree(0);
115 /// Pushes an item onto the binary heap.
116 /// Panics if an element with the same index already exists.
117 pub fn push(&mut self, element: T) {
118 assert!(!self.contains_index(element.as_index()));
119 let insert_position = self.len();
120 self.positions[element.as_index()] = insert_position;
121 self.data.push(element);
122 self.move_up_in_tree(insert_position);
125 /// Updates the key of an element if the new key is smaller than the old key.
126 /// Does nothing if the new key is larger.
127 /// Panics if the element is not part of the queue.
128 pub fn decrease_key(&mut self, element: T) {
129 let position = self.positions[element.as_index()];
130 self.data[position] = element;
131 self.move_up_in_tree(position);
134 /// Updates the key of an element if the new key is larger than the old key.
135 /// Does nothing if the new key is smaller.
136 /// Panics if the element is not part of the queue.
137 pub fn increase_key(&mut self, element: T) {
138 let position = self.positions[element.as_index()];
139 self.data[position] = element;
140 self.move_down_in_tree(position);
143 fn move_up_in_tree(&mut self, position: usize) {
145 let mut position = position;
146 let mut hole = Hole::new(&mut self.data, position);
149 let parent = (position - 1) / TREE_ARITY;
151 if hole.get(parent) < hole.element() {
155 self.positions[hole.get(parent).as_index()] = position;
156 hole.move_to(parent);
160 self.positions[hole.element().as_index()] = position;
164 fn move_down_in_tree(&mut self, position: usize) {
166 let mut position = position;
167 let heap_size = self.len();
168 let mut hole = Hole::new(&mut self.data, position);
171 if let Some(smallest_child) =
172 IndexdMinHeap::<T>::children_index_range(position, heap_size)
173 .min_by_key(|&child_index| hole.get(child_index))
175 if hole.get(smallest_child) >= hole.element() {
176 self.positions[hole.element().as_index()] = position;
177 return; // no child is smaller
180 self.positions[hole.get(smallest_child).as_index()] = position;
181 hole.move_to(smallest_child);
182 position = smallest_child;
184 self.positions[hole.element().as_index()] = position;
185 return; // no children at all
191 fn children_index_range(parent_index: usize, heap_size: usize) -> std::ops::Range<usize> {
192 let first_child = TREE_ARITY * parent_index + 1;
193 let last_child = min(TREE_ARITY * parent_index + TREE_ARITY + 1, heap_size);
194 first_child..last_child
198 // This is an optimization copied straight from the rust stdlib binary heap
199 // it allows to avoid always swapping elements pairwise and rather
200 // move each element only once.
202 /// Hole represents a hole in a slice i.e. an index without valid value
203 /// (because it was moved from or duplicated).
204 /// In drop, `Hole` will restore the slice by filling the hole
205 /// position with the value that was originally removed.
206 struct Hole<'a, T: 'a> {
208 /// `elt` is always `Some` from new until drop.
213 impl<'a, T> Hole<'a, T> {
214 /// Create a new Hole at index `pos`.
216 /// Unsafe because pos must be within the data slice.
218 unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
219 debug_assert!(pos < data.len());
220 let elt = ptr::read(&data[pos]);
228 /// Returns a reference to the element removed.
230 fn element(&self) -> &T {
231 self.elt.as_ref().unwrap()
234 /// Returns a reference to the element at `index`.
236 /// Unsafe because index must be within the data slice and not equal to pos.
238 unsafe fn get(&self, index: usize) -> &T {
239 debug_assert!(index != self.pos);
240 debug_assert!(index < self.data.len());
241 self.data.get_unchecked(index)
244 /// Move hole to new location
246 /// Unsafe because index must be within the data slice and not equal to pos.
248 unsafe fn move_to(&mut self, index: usize) {
249 debug_assert!(index != self.pos);
250 debug_assert!(index < self.data.len());
251 let index_ptr: *const _ = self.data.get_unchecked(index);
252 let hole_ptr = self.data.get_unchecked_mut(self.pos);
253 ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
258 impl<'a, T> Drop for Hole<'a, T> {
261 // fill the hole again
264 ptr::write(self.data.get_unchecked_mut(pos), self.elt.take().unwrap());