referrerpolicy=no-referrer-when-downgrade

sp_npos_elections/
reduce.rs

1// This file is part of Substrate.
2
3// Copyright (C) Parity Technologies (UK) Ltd.
4// SPDX-License-Identifier: Apache-2.0
5
6// Licensed under the Apache License, Version 2.0 (the "License");
7// you may not use this file except in compliance with the License.
8// You may obtain a copy of the License at
9//
10// 	http://www.apache.org/licenses/LICENSE-2.0
11//
12// Unless required by applicable law or agreed to in writing, software
13// distributed under the License is distributed on an "AS IS" BASIS,
14// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15// See the License for the specific language governing permissions and
16// limitations under the License.
17
18//! Rust implementation of the Phragmén reduce algorithm. This can be used by any off chain
19//! application to reduce cycles from the edge assignment, which will result in smaller size.
20//!
21//!  ### Notions
22//!  - `m`: size of the committee to elect.
23//!  - `k`: maximum allowed votes (16 as of this writing).
24//!  - `nv ∈ E` means that nominator `n ∈ N` supports the election of candidate `v ∈ V`.
25//!  - A valid solution consists of a tuple `(S, W)` , where `S ⊆ V` is a committee of m validators,
26//!    and `W : E → R ≥ 0` is an edge weight vector which describes how the budget of each nominator
27//!    n is fractionally assigned to n 's elected neighbors.
28//!  - `E_w := { e ∈ E : w_e > 0 }`.
29//!
30//!  ### Algorithm overview
31//!
32//! > We consider the input edge weight vector `w` as a directed flow over `E_w` , where the flow in
33//! > each edge is directed from the nominator to the validator. We build `w′` from `w` by removing
34//! > **circulations** to cancel out the flow over as many edges as possible, while preserving flow
35//! > demands over all vertices and without reverting the flow direction over any edge. As long as
36//! > there is a cycle, we can remove an additional circulation and eliminate at least one new edge
37//! > from `E_w′` . This shows that the algorithm always progresses and will eventually finish with
38//! > an acyclic edge support. We keep a data structure that represents a forest of rooted trees,
39//! > which is initialized as a collection of singletons – one per vertex – and to which edges in
40//! > `E_w` are added one by one, causing the trees to merge. Whenever a new edge creates a cycle,
41//! > we detect it and destroy it by removing a circulation. We also run a pre-computation which is
42//! > designed to detect and remove cycles of length four exclusively. This pre-computation is
43//! > optional, and if we skip it then the running time becomes `O (|E_w| ⋅ m), so the
44//! > pre-computation makes sense only if `m >> k` and `|E_w| >> m^2`.
45//!
46//! ### Resources:
47//!
48//! 1. <https://hackmd.io/JOn9x98iS0e0DPWQ87zGWg?view>
49
50use crate::{
51	node::{Node, NodeId, NodeRef, NodeRole},
52	ExtendedBalance, IdentifierT, StakedAssignment,
53};
54use alloc::{
55	collections::btree_map::{BTreeMap, Entry::*},
56	vec,
57	vec::Vec,
58};
59use sp_arithmetic::traits::{Bounded, Zero};
60
61/// Map type used for reduce_4. Can be easily swapped with HashMap.
62type Map<A> = BTreeMap<(A, A), A>;
63
64/// Returns all combinations of size two in the collection `input` with no repetition.
65fn combinations_2<T: Clone>(input: &[T]) -> Vec<(T, T)> {
66	let n = input.len();
67	if n < 2 {
68		return Default::default()
69	}
70
71	let mut comb = Vec::with_capacity(n * (n - 1) / 2);
72	for i in 0..n {
73		for j in i + 1..n {
74			comb.push((input[i].clone(), input[j].clone()))
75		}
76	}
77	comb
78}
79
80/// Returns the count of trailing common elements in two slices.
81pub(crate) fn trailing_common<T: Eq>(t1: &[T], t2: &[T]) -> usize {
82	t1.iter().rev().zip(t2.iter().rev()).take_while(|e| e.0 == e.1).count()
83}
84
85/// Merges two parent roots as described by the reduce algorithm.
86fn merge<A: IdentifierT>(voter_root_path: Vec<NodeRef<A>>, target_root_path: Vec<NodeRef<A>>) {
87	let (shorter_path, longer_path) = if voter_root_path.len() <= target_root_path.len() {
88		(voter_root_path, target_root_path)
89	} else {
90		(target_root_path, voter_root_path)
91	};
92
93	// iterate from last to beginning, skipping the first one. This asserts that
94	// indexing is always correct.
95	shorter_path
96		.iter()
97		.zip(shorter_path.iter().skip(1))
98		.for_each(|(voter, next)| Node::set_parent_of(next, voter));
99	Node::set_parent_of(&shorter_path[0], &longer_path[0]);
100}
101
102/// Reduce only redundant edges with cycle length of 4.
103///
104/// Returns the number of edges removed.
105///
106/// It is strictly assumed that the `who` attribute of all provided assignments are unique. The
107/// result will most likely be corrupt otherwise.
108///
109/// O(|E_w| ⋅ k).
110fn reduce_4<A: IdentifierT>(assignments: &mut Vec<StakedAssignment<A>>) -> u32 {
111	let mut combination_map: Map<A> = Map::new();
112	let mut num_changed: u32 = Zero::zero();
113
114	// we have to use the old fashioned loops here with manual indexing. Borrowing assignments will
115	// not work since then there is NO way to mutate it inside.
116	for assignment_index in 0..assignments.len() {
117		let who = assignments[assignment_index].who.clone();
118
119		// all combinations for this particular voter
120		let distribution_ids = &assignments[assignment_index]
121			.distribution
122			.iter()
123			.map(|(t, _p)| t.clone())
124			.collect::<Vec<A>>();
125		let candidate_combinations = combinations_2(distribution_ids);
126
127		for (v1, v2) in candidate_combinations {
128			match combination_map.entry((v1.clone(), v2.clone())) {
129				Vacant(entry) => {
130					entry.insert(who.clone());
131				},
132				Occupied(mut entry) => {
133					let other_who = entry.get_mut();
134
135					// double check if who is still voting for this pair. If not, it means that this
136					// pair is no longer valid and must have been removed in previous rounds. The
137					// reason for this is subtle; candidate_combinations is created once while the
138					// inner loop might remove some edges. Note that if count() > 2, the we have
139					// duplicates.
140					if assignments[assignment_index]
141						.distribution
142						.iter()
143						.filter(|(t, _)| *t == v1 || *t == v2)
144						.count() != 2
145					{
146						continue
147					}
148
149					// check if other_who voted for the same pair v1, v2.
150					let maybe_other_assignments = assignments.iter().find(|a| a.who == *other_who);
151					if maybe_other_assignments.is_none() {
152						continue
153					}
154					let other_assignment =
155						maybe_other_assignments.expect("value is checked to be 'Some'");
156
157					// Collect potential cycle votes
158					let mut other_cycle_votes =
159						other_assignment
160							.distribution
161							.iter()
162							.filter_map(|(t, w)| {
163								if *t == v1 || *t == v2 {
164									Some((t.clone(), *w))
165								} else {
166									None
167								}
168							})
169							.collect::<Vec<(A, ExtendedBalance)>>();
170
171					let other_votes_count = other_cycle_votes.len();
172
173					// If the length is more than 2, then we have identified duplicates. For now, we
174					// just skip. Later on we can early exit and stop processing this data since it
175					// is corrupt anyhow.
176					debug_assert!(other_votes_count <= 2);
177
178					if other_votes_count < 2 {
179						// This is not a cycle. Replace and continue.
180						*other_who = who.clone();
181						continue
182					} else if other_votes_count == 2 {
183						// This is a cycle.
184						let mut who_cycle_votes: Vec<(A, ExtendedBalance)> = Vec::with_capacity(2);
185						assignments[assignment_index].distribution.iter().for_each(|(t, w)| {
186							if *t == v1 || *t == v2 {
187								who_cycle_votes.push((t.clone(), *w));
188							}
189						});
190
191						if who_cycle_votes.len() != 2 {
192							continue
193						}
194
195						// Align the targets similarly. This helps with the circulation below.
196						if other_cycle_votes[0].0 != who_cycle_votes[0].0 {
197							other_cycle_votes.swap(0, 1);
198						}
199
200						// Find min
201						let mut min_value: ExtendedBalance = Bounded::max_value();
202						let mut min_index: usize = 0;
203						let cycle = who_cycle_votes
204							.iter()
205							.chain(other_cycle_votes.iter())
206							.enumerate()
207							.map(|(index, (t, w))| {
208								if *w <= min_value {
209									min_value = *w;
210									min_index = index;
211								}
212								(t.clone(), *w)
213							})
214							.collect::<Vec<(A, ExtendedBalance)>>();
215
216						// min was in the first part of the chained iters
217						let mut increase_indices: Vec<usize> = Vec::new();
218						let mut decrease_indices: Vec<usize> = Vec::new();
219						decrease_indices.push(min_index);
220						if min_index < 2 {
221							// min_index == 0 => sibling_index <- 1
222							// min_index == 1 => sibling_index <- 0
223							let sibling_index = 1 - min_index;
224							increase_indices.push(sibling_index);
225							// valid because the two chained sections of `cycle` are aligned;
226							// index [0, 2] are both voting for v1 or both v2. Same goes for [1, 3].
227							decrease_indices.push(sibling_index + 2);
228							increase_indices.push(min_index + 2);
229						} else {
230							// min_index == 2 => sibling_index <- 3
231							// min_index == 3 => sibling_index <- 2
232							let sibling_index = 3 - min_index % 2;
233							increase_indices.push(sibling_index);
234							// valid because the two chained sections of `cycle` are aligned;
235							// index [0, 2] are both voting for v1 or both v2. Same goes for [1, 3].
236							decrease_indices.push(sibling_index - 2);
237							increase_indices.push(min_index - 2);
238						}
239
240						// apply changes
241						let mut remove_indices: Vec<usize> = Vec::with_capacity(1);
242						increase_indices.into_iter().for_each(|i| {
243							let voter = if i < 2 { who.clone() } else { other_who.clone() };
244							// Note: so this is pretty ambiguous. We should only look for one
245							// assignment that meets this criteria and if we find multiple then that
246							// is a corrupt input. Same goes for the next block.
247							assignments.iter_mut().filter(|a| a.who == voter).for_each(|ass| {
248								ass.distribution
249									.iter_mut()
250									.position(|(t, _)| *t == cycle[i].0)
251									.map(|idx| {
252										let next_value =
253											ass.distribution[idx].1.saturating_add(min_value);
254										ass.distribution[idx].1 = next_value;
255									});
256							});
257						});
258						decrease_indices.into_iter().for_each(|i| {
259							let voter = if i < 2 { who.clone() } else { other_who.clone() };
260							assignments.iter_mut().filter(|a| a.who == voter).for_each(|ass| {
261								ass.distribution
262									.iter_mut()
263									.position(|(t, _)| *t == cycle[i].0)
264									.map(|idx| {
265										let next_value =
266											ass.distribution[idx].1.saturating_sub(min_value);
267										if next_value.is_zero() {
268											ass.distribution.remove(idx);
269											remove_indices.push(i);
270											num_changed += 1;
271										} else {
272											ass.distribution[idx].1 = next_value;
273										}
274									});
275							});
276						});
277
278						// remove either one of them.
279						let who_removed = remove_indices.iter().any(|i| *i < 2usize);
280						let other_removed = remove_indices.into_iter().any(|i| i >= 2usize);
281
282						match (who_removed, other_removed) {
283							(false, true) => {
284								*other_who = who.clone();
285							},
286							(true, false) => {
287								// nothing, other_who can stay there.
288							},
289							(true, true) => {
290								// remove and don't replace
291								entry.remove();
292							},
293							(false, false) => {
294								// Neither of the edges was removed? impossible.
295								panic!("Duplicate voter (or other corrupt input).");
296							},
297						}
298					}
299				},
300			}
301		}
302	}
303
304	num_changed
305}
306
307/// Reduce redundant edges from the edge weight graph, with all possible length.
308///
309/// To get the best performance, this should be called after `reduce_4()`.
310///
311/// Returns the number of edges removed.
312///
313/// It is strictly assumed that the `who` attribute of all provided assignments are unique. The
314/// result will most likely be corrupt otherwise.
315///
316/// O(|Ew| ⋅ m)
317fn reduce_all<A: IdentifierT>(assignments: &mut Vec<StakedAssignment<A>>) -> u32 {
318	let mut num_changed: u32 = Zero::zero();
319	let mut tree: BTreeMap<NodeId<A>, NodeRef<A>> = BTreeMap::new();
320
321	// NOTE: This code can heavily use an index cache. Looking up a pair of (voter, target) in the
322	// assignments happens numerous times and we can save time. For now it is written as such
323	// because abstracting some of this code into a function/closure is super hard due to borrow
324	// checks (and most likely needs unsafe code at the end). For now I will keep it as it and
325	// refactor later.
326
327	// a flat iterator of (voter, target) over all pairs of votes. Similar to reduce_4, we loop
328	// without borrowing.
329	for assignment_index in 0..assignments.len() {
330		let voter = assignments[assignment_index].who.clone();
331
332		let mut dist_index = 0;
333		loop {
334			// A distribution could have been removed. We don't know for sure. Hence, we check.
335			let maybe_dist = assignments[assignment_index].distribution.get(dist_index);
336			if maybe_dist.is_none() {
337				// The rest of this loop is moot.
338				break
339			}
340			let (target, _) = maybe_dist.expect("Value checked to be some").clone();
341
342			// store if they existed already.
343			let voter_id = NodeId::from(voter.clone(), NodeRole::Voter);
344			let target_id = NodeId::from(target.clone(), NodeRole::Target);
345			let voter_exists = tree.contains_key(&voter_id);
346			let target_exists = tree.contains_key(&target_id);
347
348			// create both.
349			let voter_node = tree
350				.entry(voter_id.clone())
351				.or_insert_with(|| Node::new(voter_id).into_ref())
352				.clone();
353			let target_node = tree
354				.entry(target_id.clone())
355				.or_insert_with(|| Node::new(target_id).into_ref())
356				.clone();
357
358			// If one exists but the other one doesn't, or if both do not, then set the existing
359			// one as the parent of the non-existing one and move on. Else, continue with the rest
360			// of the code.
361			match (voter_exists, target_exists) {
362				(false, false) => {
363					Node::set_parent_of(&target_node, &voter_node);
364					dist_index += 1;
365					continue
366				},
367				(false, true) => {
368					Node::set_parent_of(&voter_node, &target_node);
369					dist_index += 1;
370					continue
371				},
372				(true, false) => {
373					Node::set_parent_of(&target_node, &voter_node);
374					dist_index += 1;
375					continue
376				},
377				(true, true) => { /* don't continue and execute the rest */ },
378			};
379
380			let (voter_root, voter_root_path) = Node::root(&voter_node);
381			let (target_root, target_root_path) = Node::root(&target_node);
382
383			if voter_root != target_root {
384				// swap
385				merge(voter_root_path, target_root_path);
386				dist_index += 1;
387			} else {
388				// find common and cycle.
389				let common_count = trailing_common(&voter_root_path, &target_root_path);
390
391				// because roots are the same.
392				//debug_assert_eq!(target_root_path.last().unwrap(),
393				// voter_root_path.last().unwrap()); TODO: @kian
394				// the common path must be non-void..
395				debug_assert!(common_count > 0);
396				// and smaller than both
397				debug_assert!(common_count <= voter_root_path.len());
398				debug_assert!(common_count <= target_root_path.len());
399
400				// cycle part of each path will be `path[path.len() - common_count - 1 : 0]`
401				// NOTE: the order of chaining is important! it is always build from [target, ...,
402				// voter]
403				let cycle = target_root_path
404					.iter()
405					.take(target_root_path.len().saturating_sub(common_count).saturating_add(1))
406					.cloned()
407					.chain(
408						voter_root_path
409							.iter()
410							.take(voter_root_path.len().saturating_sub(common_count))
411							.rev()
412							.cloned(),
413					)
414					.collect::<Vec<NodeRef<A>>>();
415
416				// a cycle's length shall always be multiple of two.
417				debug_assert_eq!(cycle.len() % 2, 0);
418
419				// find minimum of cycle.
420				let mut min_value: ExtendedBalance = Bounded::max_value();
421				// The voter and the target pair that create the min edge. These MUST be set by the
422				// end of this code block, otherwise we skip.
423				let mut maybe_min_target: Option<A> = None;
424				let mut maybe_min_voter: Option<A> = None;
425				// The index of the min in opaque cycle list.
426				let mut maybe_min_index: Option<usize> = None;
427				// 1 -> next // 0 -> prev
428				let mut maybe_min_direction: Option<u32> = None;
429
430				// helpers
431				let next_index = |i| {
432					if i < (cycle.len() - 1) {
433						i + 1
434					} else {
435						0
436					}
437				};
438				let prev_index = |i| {
439					if i > 0 {
440						i - 1
441					} else {
442						cycle.len() - 1
443					}
444				};
445
446				for i in 0..cycle.len() {
447					if cycle[i].borrow().id.role == NodeRole::Voter {
448						// NOTE: sadly way too many clones since I don't want to make A: Copy
449						let current = cycle[i].borrow().id.who.clone();
450						let next = cycle[next_index(i)].borrow().id.who.clone();
451						let prev = cycle[prev_index(i)].borrow().id.who.clone();
452						assignments.iter().find(|a| a.who == current).map(|ass| {
453							ass.distribution.iter().find(|d| d.0 == next).map(|(_, w)| {
454								if *w < min_value {
455									min_value = *w;
456									maybe_min_target = Some(next.clone());
457									maybe_min_voter = Some(current.clone());
458									maybe_min_index = Some(i);
459									maybe_min_direction = Some(1);
460								}
461							})
462						});
463						assignments.iter().find(|a| a.who == current).map(|ass| {
464							ass.distribution.iter().find(|d| d.0 == prev).map(|(_, w)| {
465								if *w < min_value {
466									min_value = *w;
467									maybe_min_target = Some(prev.clone());
468									maybe_min_voter = Some(current.clone());
469									maybe_min_index = Some(i);
470									maybe_min_direction = Some(0);
471								}
472							})
473						});
474					}
475				}
476
477				// all of these values must be set by now, we assign them to un-mut values, no
478				// longer being optional either.
479				let (min_value, min_target, min_voter, min_index, min_direction) =
480					match (
481						min_value,
482						maybe_min_target,
483						maybe_min_voter,
484						maybe_min_index,
485						maybe_min_direction,
486					) {
487						(
488							min_value,
489							Some(min_target),
490							Some(min_voter),
491							Some(min_index),
492							Some(min_direction),
493						) => (min_value, min_target, min_voter, min_index, min_direction),
494						_ => {
495							sp_runtime::print("UNREACHABLE code reached in `reduce` algorithm. This must be a bug.");
496							break
497						},
498					};
499
500				// if the min edge is in the voter's sub-chain.
501				// [target, ..., X, Y, ... voter]
502				let target_chunk = target_root_path.len() - common_count;
503				let min_chain_in_voter = (min_index + min_direction as usize) > target_chunk;
504
505				// walk over the cycle and update the weights
506				let mut should_inc_counter = true;
507				let start_operation_add = ((min_index % 2) + min_direction as usize) % 2 == 1;
508				let mut additional_removed = Vec::new();
509				for i in 0..cycle.len() {
510					let current = cycle[i].borrow();
511					if current.id.role == NodeRole::Voter {
512						let prev = cycle[prev_index(i)].borrow();
513						assignments
514							.iter_mut()
515							.enumerate()
516							.filter(|(_, a)| a.who == current.id.who)
517							.for_each(|(target_ass_index, ass)| {
518								ass.distribution
519									.iter_mut()
520									.position(|(t, _)| *t == prev.id.who)
521									.map(|idx| {
522										let next_value = if i % 2 == 0 {
523											if start_operation_add {
524												ass.distribution[idx].1.saturating_add(min_value)
525											} else {
526												ass.distribution[idx].1.saturating_sub(min_value)
527											}
528										} else if start_operation_add {
529											ass.distribution[idx].1.saturating_sub(min_value)
530										} else {
531											ass.distribution[idx].1.saturating_add(min_value)
532										};
533
534										if next_value.is_zero() {
535											// if the removed edge is from the current assignment,
536											// index should NOT be increased.
537											if target_ass_index == assignment_index {
538												should_inc_counter = false
539											}
540											ass.distribution.remove(idx);
541											num_changed += 1;
542											// only add if this is not the min itself.
543											if !(i == min_index && min_direction == 0) {
544												additional_removed.push((
545													cycle[i].clone(),
546													cycle[prev_index(i)].clone(),
547												));
548											}
549										} else {
550											ass.distribution[idx].1 = next_value;
551										}
552									});
553							});
554
555						let next = cycle[next_index(i)].borrow();
556						assignments
557							.iter_mut()
558							.enumerate()
559							.filter(|(_, a)| a.who == current.id.who)
560							.for_each(|(target_ass_index, ass)| {
561								ass.distribution
562									.iter_mut()
563									.position(|(t, _)| *t == next.id.who)
564									.map(|idx| {
565										let next_value = if i % 2 == 0 {
566											if start_operation_add {
567												ass.distribution[idx].1.saturating_sub(min_value)
568											} else {
569												ass.distribution[idx].1.saturating_add(min_value)
570											}
571										} else if start_operation_add {
572											ass.distribution[idx].1.saturating_add(min_value)
573										} else {
574											ass.distribution[idx].1.saturating_sub(min_value)
575										};
576
577										if next_value.is_zero() {
578											// if the removed edge is from the current assignment,
579											// index should NOT be increased.
580											if target_ass_index == assignment_index {
581												should_inc_counter = false
582											}
583											ass.distribution.remove(idx);
584											num_changed += 1;
585											if !(i == min_index && min_direction == 1) {
586												additional_removed.push((
587													cycle[i].clone(),
588													cycle[next_index(i)].clone(),
589												));
590											}
591										} else {
592											ass.distribution[idx].1 = next_value;
593										}
594									});
595							});
596					}
597				}
598
599				// don't do anything if the edge removed itself. This is always the first and last
600				// element
601				let should_reorg = !(min_index == (cycle.len() - 1) && min_direction == 1);
602
603				// re-org.
604				if should_reorg {
605					let min_edge = vec![min_voter, min_target];
606					if min_chain_in_voter {
607						// NOTE: safe; voter_root_path is always bigger than 1 element.
608						for i in 0..voter_root_path.len() - 1 {
609							let current = voter_root_path[i].clone().borrow().id.who.clone();
610							let next = voter_root_path[i + 1].clone().borrow().id.who.clone();
611							if min_edge.contains(&current) && min_edge.contains(&next) {
612								break
613							}
614							Node::set_parent_of(&voter_root_path[i + 1], &voter_root_path[i]);
615						}
616						Node::set_parent_of(&voter_node, &target_node);
617					} else {
618						// NOTE: safe; target_root_path is always bigger than 1 element.
619						for i in 0..target_root_path.len() - 1 {
620							let current = target_root_path[i].clone().borrow().id.who.clone();
621							let next = target_root_path[i + 1].clone().borrow().id.who.clone();
622							if min_edge.contains(&current) && min_edge.contains(&next) {
623								break
624							}
625							Node::set_parent_of(&target_root_path[i + 1], &target_root_path[i]);
626						}
627						Node::set_parent_of(&target_node, &voter_node);
628					}
629				}
630
631				// remove every other node which has collapsed to zero
632				for (r1, r2) in additional_removed {
633					if Node::is_parent_of(&r1, &r2) {
634						Node::remove_parent(&r1);
635					} else if Node::is_parent_of(&r2, &r1) {
636						Node::remove_parent(&r2);
637					}
638				}
639
640				// increment the counter if needed.
641				if should_inc_counter {
642					dist_index += 1;
643				}
644			}
645		}
646	}
647
648	num_changed
649}
650
651/// Reduce the given [`Vec<StakedAssignment<IdentifierT>>`]. This removes redundant edges without
652/// changing the overall backing of any of the elected candidates.
653///
654/// Returns the number of edges removed.
655///
656/// IMPORTANT: It is strictly assumed that the `who` attribute of all provided assignments are
657/// unique. The result will most likely be corrupt otherwise. Furthermore, if the _distribution
658/// vector_ contains duplicate ids, only the first instance is ever updates.
659///
660/// O(min{ |Ew| ⋅ k + m3 , |Ew| ⋅ m })
661pub fn reduce<A: IdentifierT>(assignments: &mut Vec<StakedAssignment<A>>) -> u32 where {
662	let mut num_changed = reduce_4(assignments);
663	num_changed += reduce_all(assignments);
664	num_changed
665}
666
667#[cfg(test)]
668mod tests {
669	use super::*;
670
671	#[test]
672	fn merging_works() {
673		// 	D <-- A <-- B <-- C
674		//
675		// 		F <-- E
676		let d = Node::new(NodeId::from(1, NodeRole::Target)).into_ref();
677		let a = Node::new(NodeId::from(2, NodeRole::Target)).into_ref();
678		let b = Node::new(NodeId::from(3, NodeRole::Target)).into_ref();
679		let c = Node::new(NodeId::from(4, NodeRole::Target)).into_ref();
680		let e = Node::new(NodeId::from(5, NodeRole::Target)).into_ref();
681		let f = Node::new(NodeId::from(6, NodeRole::Target)).into_ref();
682
683		Node::set_parent_of(&c, &b);
684		Node::set_parent_of(&b, &a);
685		Node::set_parent_of(&a, &d);
686		Node::set_parent_of(&e, &f);
687
688		let path1 = vec![c.clone(), b.clone(), a.clone(), d.clone()];
689		let path2 = vec![e.clone(), f.clone()];
690
691		merge(path1, path2);
692		// 	D <-- A <-- B <-- C
693		// 					  |
694		// 		F --> E --> -->
695		assert_eq!(e.borrow().clone().parent.unwrap().borrow().id.who, 4u32); // c
696	}
697
698	#[test]
699	fn merge_with_len_one() {
700		// 	D <-- A <-- B <-- C
701		//
702		// 		F <-- E
703		let d = Node::new(NodeId::from(1, NodeRole::Target)).into_ref();
704		let a = Node::new(NodeId::from(2, NodeRole::Target)).into_ref();
705		let b = Node::new(NodeId::from(3, NodeRole::Target)).into_ref();
706		let c = Node::new(NodeId::from(4, NodeRole::Target)).into_ref();
707		let f = Node::new(NodeId::from(6, NodeRole::Target)).into_ref();
708
709		Node::set_parent_of(&c, &b);
710		Node::set_parent_of(&b, &a);
711		Node::set_parent_of(&a, &d);
712
713		let path1 = vec![c.clone(), b.clone(), a.clone(), d.clone()];
714		let path2 = vec![f.clone()];
715
716		merge(path1, path2);
717		// 	D <-- A <-- B <-- C
718		// 					  |
719		// 			F -->  -->
720		assert_eq!(f.borrow().clone().parent.unwrap().borrow().id.who, 4u32); // c
721	}
722
723	#[test]
724	fn basic_reduce_4_cycle_works() {
725		use super::*;
726
727		let assignments = vec![
728			StakedAssignment { who: 1, distribution: vec![(10, 25), (20, 75)] },
729			StakedAssignment { who: 2, distribution: vec![(10, 50), (20, 50)] },
730		];
731
732		let mut new_assignments = assignments.clone();
733		let num_reduced = reduce_4(&mut new_assignments);
734
735		assert_eq!(num_reduced, 1);
736		assert_eq!(
737			new_assignments,
738			vec![
739				StakedAssignment { who: 1, distribution: vec![(20, 100),] },
740				StakedAssignment { who: 2, distribution: vec![(10, 75), (20, 25),] },
741			],
742		);
743	}
744
745	#[test]
746	fn basic_reduce_all_cycles_works() {
747		let mut assignments = vec![
748			StakedAssignment { who: 1, distribution: vec![(10, 10)] },
749			StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5)] },
750			StakedAssignment { who: 3, distribution: vec![(20, 15), (40, 15)] },
751			StakedAssignment { who: 4, distribution: vec![(20, 10), (30, 10), (40, 20)] },
752			StakedAssignment { who: 5, distribution: vec![(20, 20), (30, 10), (40, 20)] },
753		];
754
755		assert_eq!(3, reduce_all(&mut assignments));
756
757		assert_eq!(
758			assignments,
759			vec![
760				StakedAssignment { who: 1, distribution: vec![(10, 10),] },
761				StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5),] },
762				StakedAssignment { who: 3, distribution: vec![(20, 30),] },
763				StakedAssignment { who: 4, distribution: vec![(40, 40),] },
764				StakedAssignment { who: 5, distribution: vec![(20, 15), (30, 20), (40, 15),] },
765			],
766		)
767	}
768
769	#[test]
770	fn basic_reduce_works() {
771		let mut assignments = vec![
772			StakedAssignment { who: 1, distribution: vec![(10, 10)] },
773			StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5)] },
774			StakedAssignment { who: 3, distribution: vec![(20, 15), (40, 15)] },
775			StakedAssignment { who: 4, distribution: vec![(20, 10), (30, 10), (40, 20)] },
776			StakedAssignment { who: 5, distribution: vec![(20, 20), (30, 10), (40, 20)] },
777		];
778
779		assert_eq!(3, reduce(&mut assignments));
780
781		assert_eq!(
782			assignments,
783			vec![
784				StakedAssignment { who: 1, distribution: vec![(10, 10),] },
785				StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5),] },
786				StakedAssignment { who: 3, distribution: vec![(20, 30),] },
787				StakedAssignment { who: 4, distribution: vec![(40, 40),] },
788				StakedAssignment { who: 5, distribution: vec![(20, 15), (30, 20), (40, 15),] },
789			],
790		)
791	}
792
793	#[test]
794	fn should_deal_with_self_vote() {
795		let mut assignments = vec![
796			StakedAssignment { who: 1, distribution: vec![(10, 10)] },
797			StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5)] },
798			StakedAssignment { who: 3, distribution: vec![(20, 15), (40, 15)] },
799			StakedAssignment { who: 4, distribution: vec![(20, 10), (30, 10), (40, 20)] },
800			StakedAssignment { who: 5, distribution: vec![(20, 20), (30, 10), (40, 20)] },
801			// self vote from 10 and 20 to itself.
802			StakedAssignment { who: 10, distribution: vec![(10, 100)] },
803			StakedAssignment { who: 20, distribution: vec![(20, 200)] },
804		];
805
806		assert_eq!(3, reduce(&mut assignments));
807
808		assert_eq!(
809			assignments,
810			vec![
811				StakedAssignment { who: 1, distribution: vec![(10, 10),] },
812				StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 5),] },
813				StakedAssignment { who: 3, distribution: vec![(20, 30),] },
814				StakedAssignment { who: 4, distribution: vec![(40, 40),] },
815				StakedAssignment { who: 5, distribution: vec![(20, 15), (30, 20), (40, 15),] },
816				// should stay untouched.
817				StakedAssignment { who: 10, distribution: vec![(10, 100)] },
818				StakedAssignment { who: 20, distribution: vec![(20, 200)] },
819			],
820		)
821	}
822
823	#[test]
824	fn reduce_3_common_votes_same_weight() {
825		let mut assignments = vec![
826			StakedAssignment {
827				who: 4,
828				distribution: vec![(1000000, 100), (1000002, 100), (1000004, 100)],
829			},
830			StakedAssignment {
831				who: 5,
832				distribution: vec![(1000000, 100), (1000002, 100), (1000004, 100)],
833			},
834		];
835
836		reduce_4(&mut assignments);
837
838		assert_eq!(
839			assignments,
840			vec![
841				StakedAssignment { who: 4, distribution: vec![(1000000, 200,), (1000004, 100,),] },
842				StakedAssignment { who: 5, distribution: vec![(1000002, 200,), (1000004, 100,),] },
843			],
844		)
845	}
846
847	#[test]
848	#[should_panic]
849	fn reduce_panics_on_duplicate_voter() {
850		let mut assignments = vec![
851			StakedAssignment { who: 1, distribution: vec![(10, 10), (20, 10)] },
852			StakedAssignment { who: 1, distribution: vec![(10, 15), (20, 5)] },
853			StakedAssignment { who: 2, distribution: vec![(10, 15), (20, 15)] },
854		];
855
856		reduce(&mut assignments);
857	}
858
859	#[test]
860	fn should_deal_with_duplicates_target() {
861		let mut assignments = vec![
862			StakedAssignment { who: 1, distribution: vec![(10, 15), (20, 5)] },
863			StakedAssignment {
864				who: 2,
865				distribution: vec![
866					(10, 15),
867					(20, 15),
868					// duplicate
869					(10, 1),
870					// duplicate
871					(20, 1),
872				],
873			},
874		];
875
876		reduce(&mut assignments);
877
878		assert_eq!(
879			assignments,
880			vec![
881				StakedAssignment { who: 1, distribution: vec![(10, 20),] },
882				StakedAssignment {
883					who: 2,
884					distribution: vec![
885						(10, 10),
886						(20, 20),
887						// duplicate votes are silently ignored.
888						(10, 1),
889						(20, 1),
890					],
891				},
892			],
893		)
894	}
895
896	#[test]
897	fn bound_should_be_kept() {
898		let mut assignments = vec![
899			StakedAssignment {
900				who: 1,
901				distribution: vec![(103, 72), (101, 53), (100, 83), (102, 38)],
902			},
903			StakedAssignment {
904				who: 2,
905				distribution: vec![(103, 18), (101, 36), (102, 54), (100, 94)],
906			},
907			StakedAssignment {
908				who: 3,
909				distribution: vec![(100, 96), (101, 35), (102, 52), (103, 69)],
910			},
911			StakedAssignment {
912				who: 4,
913				distribution: vec![(102, 34), (100, 47), (103, 91), (101, 73)],
914			},
915		];
916
917		let winners = vec![103, 101, 100, 102];
918
919		let n = 4;
920		let m = winners.len() as u32;
921		let num_reduced = reduce_all(&mut assignments);
922		assert!(16 - num_reduced <= n + m);
923	}
924}