referrerpolicy=no-referrer-when-downgrade

sc_state_db/
noncanonical.rs

1// This file is part of Substrate.
2
3// Copyright (C) Parity Technologies (UK) Ltd.
4// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
5
6// This program is free software: you can redistribute it and/or modify
7// it under the terms of the GNU General Public License as published by
8// the Free Software Foundation, either version 3 of the License, or
9// (at your option) any later version.
10
11// This program is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// You should have received a copy of the GNU General Public License
17// along with this program. If not, see <https://www.gnu.org/licenses/>.
18
19//! Canonicalization window.
20//! Maintains trees of block overlays and allows discarding trees/roots
21//! The overlays are added in `insert` and removed in `canonicalize`.
22
23use crate::{LOG_TARGET, LOG_TARGET_PIN};
24
25use super::{to_meta_key, ChangeSet, CommitSet, DBValue, Error, Hash, MetaDb, StateDbError};
26use codec::{Decode, Encode};
27use log::trace;
28use std::collections::{hash_map::Entry, HashMap, VecDeque};
29
30const NON_CANONICAL_JOURNAL: &[u8] = b"noncanonical_journal";
31pub(crate) const LAST_CANONICAL: &[u8] = b"last_canonical";
32const MAX_BLOCKS_PER_LEVEL: u64 = 32;
33
34/// See module documentation.
35pub struct NonCanonicalOverlay<BlockHash: Hash, Key: Hash> {
36	last_canonicalized: Option<(BlockHash, u64)>,
37	levels: VecDeque<OverlayLevel<BlockHash, Key>>,
38	parents: HashMap<BlockHash, BlockHash>,
39	values: HashMap<Key, (u32, DBValue)>, // ref counted
40	// would be deleted but kept around because block is pinned, ref counted.
41	pinned: HashMap<BlockHash, u32>,
42	pinned_insertions: HashMap<BlockHash, (Vec<Key>, u32)>,
43	pinned_canonincalized: Vec<BlockHash>,
44}
45
46#[cfg_attr(test, derive(PartialEq, Debug))]
47struct OverlayLevel<BlockHash: Hash, Key: Hash> {
48	blocks: Vec<BlockOverlay<BlockHash, Key>>,
49	used_indices: u64, // Bitmask of available journal indices.
50}
51
52impl<BlockHash: Hash, Key: Hash> OverlayLevel<BlockHash, Key> {
53	fn push(&mut self, overlay: BlockOverlay<BlockHash, Key>) {
54		self.used_indices |= 1 << overlay.journal_index;
55		self.blocks.push(overlay)
56	}
57
58	fn available_index(&self) -> u64 {
59		self.used_indices.trailing_ones() as u64
60	}
61
62	fn remove(&mut self, index: usize) -> BlockOverlay<BlockHash, Key> {
63		self.used_indices &= !(1 << self.blocks[index].journal_index);
64		self.blocks.remove(index)
65	}
66
67	fn new() -> OverlayLevel<BlockHash, Key> {
68		OverlayLevel { blocks: Vec::new(), used_indices: 0 }
69	}
70}
71
72#[derive(Encode, Decode)]
73struct JournalRecord<BlockHash: Hash, Key: Hash> {
74	hash: BlockHash,
75	parent_hash: BlockHash,
76	inserted: Vec<(Key, DBValue)>,
77	deleted: Vec<Key>,
78}
79
80fn to_journal_key(block: u64, index: u64) -> Vec<u8> {
81	to_meta_key(NON_CANONICAL_JOURNAL, &(block, index))
82}
83
84#[cfg_attr(test, derive(PartialEq, Debug))]
85struct BlockOverlay<BlockHash: Hash, Key: Hash> {
86	hash: BlockHash,
87	journal_index: u64,
88	journal_key: Vec<u8>,
89	inserted: Vec<Key>,
90	deleted: Vec<Key>,
91}
92
93fn insert_values<Key: Hash>(
94	values: &mut HashMap<Key, (u32, DBValue)>,
95	inserted: Vec<(Key, DBValue)>,
96) {
97	for (k, v) in inserted {
98		debug_assert!(values.get(&k).map_or(true, |(_, value)| *value == v));
99		let (ref mut counter, _) = values.entry(k).or_insert_with(|| (0, v));
100		*counter += 1;
101	}
102}
103
104fn discard_values<Key: Hash>(values: &mut HashMap<Key, (u32, DBValue)>, inserted: Vec<Key>) {
105	for k in inserted {
106		match values.entry(k) {
107			Entry::Occupied(mut e) => {
108				let (ref mut counter, _) = e.get_mut();
109				*counter -= 1;
110				if *counter == 0 {
111					e.remove_entry();
112				}
113			},
114			Entry::Vacant(_) => {
115				debug_assert!(false, "Trying to discard missing value");
116			},
117		}
118	}
119}
120
121fn discard_descendants<BlockHash: Hash, Key: Hash>(
122	levels: &mut (&mut [OverlayLevel<BlockHash, Key>], &mut [OverlayLevel<BlockHash, Key>]),
123	values: &mut HashMap<Key, (u32, DBValue)>,
124	parents: &mut HashMap<BlockHash, BlockHash>,
125	pinned: &HashMap<BlockHash, u32>,
126	pinned_insertions: &mut HashMap<BlockHash, (Vec<Key>, u32)>,
127	hash: &BlockHash,
128) -> u32 {
129	let (first, mut remainder) = if let Some((first, rest)) = levels.0.split_first_mut() {
130		(Some(first), (rest, &mut *levels.1))
131	} else if let Some((first, rest)) = levels.1.split_first_mut() {
132		(Some(first), (&mut *levels.0, rest))
133	} else {
134		(None, (&mut *levels.0, &mut *levels.1))
135	};
136	let mut pinned_children = 0;
137	if let Some(level) = first {
138		while let Some(i) = level.blocks.iter().position(|overlay| {
139			parents
140				.get(&overlay.hash)
141				.expect("there is a parent entry for each entry in levels; qed") ==
142				hash
143		}) {
144			let overlay = level.remove(i);
145			let mut num_pinned = discard_descendants(
146				&mut remainder,
147				values,
148				parents,
149				pinned,
150				pinned_insertions,
151				&overlay.hash,
152			);
153			if pinned.contains_key(&overlay.hash) {
154				num_pinned += 1;
155			}
156			if num_pinned != 0 {
157				// save to be discarded later.
158				pinned_insertions.insert(overlay.hash.clone(), (overlay.inserted, num_pinned));
159				pinned_children += num_pinned;
160			} else {
161				// discard immediately.
162				parents.remove(&overlay.hash);
163				discard_values(values, overlay.inserted);
164			}
165		}
166	}
167	pinned_children
168}
169
170impl<BlockHash: Hash, Key: Hash> NonCanonicalOverlay<BlockHash, Key> {
171	/// Creates a new instance. Does not expect any metadata to be present in the DB.
172	pub fn new<D: MetaDb>(db: &D) -> Result<NonCanonicalOverlay<BlockHash, Key>, Error<D::Error>> {
173		let last_canonicalized =
174			db.get_meta(&to_meta_key(LAST_CANONICAL, &())).map_err(Error::Db)?;
175		let last_canonicalized = last_canonicalized
176			.map(|buffer| <(BlockHash, u64)>::decode(&mut buffer.as_slice()))
177			.transpose()?;
178		let mut levels = VecDeque::new();
179		let mut parents = HashMap::new();
180		let mut values = HashMap::new();
181		if let Some((ref hash, mut block)) = last_canonicalized {
182			// read the journal
183			trace!(
184				target: LOG_TARGET,
185				"Reading uncanonicalized journal. Last canonicalized #{} ({:?})",
186				block,
187				hash
188			);
189			let mut total: u64 = 0;
190			block += 1;
191			loop {
192				let mut level = OverlayLevel::new();
193				for index in 0..MAX_BLOCKS_PER_LEVEL {
194					let journal_key = to_journal_key(block, index);
195					if let Some(record) = db.get_meta(&journal_key).map_err(Error::Db)? {
196						let record: JournalRecord<BlockHash, Key> =
197							Decode::decode(&mut record.as_slice())?;
198						let inserted = record.inserted.iter().map(|(k, _)| k.clone()).collect();
199						let overlay = BlockOverlay {
200							hash: record.hash.clone(),
201							journal_index: index,
202							journal_key,
203							inserted,
204							deleted: record.deleted,
205						};
206						insert_values(&mut values, record.inserted);
207						trace!(
208							target: LOG_TARGET,
209							"Uncanonicalized journal entry {}.{} ({:?}) ({} inserted, {} deleted)",
210							block,
211							index,
212							record.hash,
213							overlay.inserted.len(),
214							overlay.deleted.len()
215						);
216						level.push(overlay);
217						parents.insert(record.hash, record.parent_hash);
218						total += 1;
219					}
220				}
221				if level.blocks.is_empty() {
222					break
223				}
224				levels.push_back(level);
225				block += 1;
226			}
227			trace!(
228				target: LOG_TARGET,
229				"Finished reading uncanonicalized journal, {} entries",
230				total
231			);
232		}
233		Ok(NonCanonicalOverlay {
234			last_canonicalized,
235			levels,
236			parents,
237			pinned: Default::default(),
238			pinned_insertions: Default::default(),
239			values,
240			pinned_canonincalized: Default::default(),
241		})
242	}
243
244	/// Insert a new block into the overlay. If inserted on the second level or lover expects parent
245	/// to be present in the window.
246	pub fn insert(
247		&mut self,
248		hash: &BlockHash,
249		number: u64,
250		parent_hash: &BlockHash,
251		changeset: ChangeSet<Key>,
252	) -> Result<CommitSet<Key>, StateDbError> {
253		let mut commit = CommitSet::default();
254		let front_block_number = self.front_block_number();
255		if self.levels.is_empty() && self.last_canonicalized.is_none() && number > 0 {
256			// assume that parent was canonicalized
257			let last_canonicalized = (parent_hash.clone(), number - 1);
258			commit
259				.meta
260				.inserted
261				.push((to_meta_key(LAST_CANONICAL, &()), last_canonicalized.encode()));
262			self.last_canonicalized = Some(last_canonicalized);
263		} else if self.last_canonicalized.is_some() {
264			if number < front_block_number || number > front_block_number + self.levels.len() as u64
265			{
266				trace!(
267					target: LOG_TARGET,
268					"Failed to insert block {}, current is {} .. {})",
269					number,
270					front_block_number,
271					front_block_number + self.levels.len() as u64,
272				);
273				return Err(StateDbError::InvalidBlockNumber)
274			}
275			// check for valid parent if inserting on second level or higher
276			if number == front_block_number {
277				if !self
278					.last_canonicalized
279					.as_ref()
280					.map_or(false, |&(ref h, n)| h == parent_hash && n == number - 1)
281				{
282					return Err(StateDbError::InvalidParent)
283				}
284			} else if !self.parents.contains_key(parent_hash) {
285				return Err(StateDbError::InvalidParent)
286			}
287		}
288		let level = if self.levels.is_empty() ||
289			number == front_block_number + self.levels.len() as u64
290		{
291			self.levels.push_back(OverlayLevel::new());
292			self.levels.back_mut().expect("can't be empty after insertion; qed")
293		} else {
294			self.levels.get_mut((number - front_block_number) as usize)
295				.expect("number is [front_block_number .. front_block_number + levels.len()) is asserted in precondition; qed")
296		};
297
298		if level.blocks.len() >= MAX_BLOCKS_PER_LEVEL as usize {
299			trace!(
300				target: LOG_TARGET,
301				"Too many sibling blocks at #{number}: {:?}",
302				level.blocks.iter().map(|b| &b.hash).collect::<Vec<_>>()
303			);
304			return Err(StateDbError::TooManySiblingBlocks { number })
305		}
306		if level.blocks.iter().any(|b| b.hash == *hash) {
307			return Err(StateDbError::BlockAlreadyExists)
308		}
309
310		let index = level.available_index();
311		let journal_key = to_journal_key(number, index);
312
313		let inserted = changeset.inserted.iter().map(|(k, _)| k.clone()).collect();
314		let overlay = BlockOverlay {
315			hash: hash.clone(),
316			journal_index: index,
317			journal_key: journal_key.clone(),
318			inserted,
319			deleted: changeset.deleted.clone(),
320		};
321		level.push(overlay);
322		self.parents.insert(hash.clone(), parent_hash.clone());
323		let journal_record = JournalRecord {
324			hash: hash.clone(),
325			parent_hash: parent_hash.clone(),
326			inserted: changeset.inserted,
327			deleted: changeset.deleted,
328		};
329		commit.meta.inserted.push((journal_key, journal_record.encode()));
330		trace!(
331			target: LOG_TARGET,
332			"Inserted uncanonicalized changeset {}.{} {:?} ({} inserted, {} deleted)",
333			number,
334			index,
335			hash,
336			journal_record.inserted.len(),
337			journal_record.deleted.len()
338		);
339		insert_values(&mut self.values, journal_record.inserted);
340		Ok(commit)
341	}
342
343	fn discard_journals(
344		&self,
345		level_index: usize,
346		discarded_journals: &mut Vec<Vec<u8>>,
347		hash: &BlockHash,
348	) {
349		if let Some(level) = self.levels.get(level_index) {
350			level.blocks.iter().for_each(|overlay| {
351				let parent = self
352					.parents
353					.get(&overlay.hash)
354					.expect("there is a parent entry for each entry in levels; qed")
355					.clone();
356				if parent == *hash {
357					discarded_journals.push(overlay.journal_key.clone());
358					self.discard_journals(level_index + 1, discarded_journals, &overlay.hash);
359				}
360			});
361		}
362	}
363
364	fn front_block_number(&self) -> u64 {
365		self.last_canonicalized.as_ref().map(|&(_, n)| n + 1).unwrap_or(0)
366	}
367
368	pub fn last_canonicalized_block_number(&self) -> Option<u64> {
369		self.last_canonicalized.as_ref().map(|&(_, n)| n)
370	}
371
372	/// Confirm that all changes made to commit sets are on disk. Allows for temporarily pinned
373	/// blocks to be released.
374	pub fn sync(&mut self) {
375		let mut pinned = std::mem::take(&mut self.pinned_canonincalized);
376		for hash in pinned.iter() {
377			self.unpin(hash)
378		}
379		pinned.clear();
380		// Reuse the same memory buffer
381		self.pinned_canonincalized = pinned;
382	}
383
384	/// Select a top-level root and canonicalized it. Discards all sibling subtrees and the root.
385	/// Add a set of changes of the canonicalized block to `CommitSet`
386	/// Return the block number of the canonicalized block
387	pub fn canonicalize(
388		&mut self,
389		hash: &BlockHash,
390		commit: &mut CommitSet<Key>,
391	) -> Result<u64, StateDbError> {
392		trace!(target: LOG_TARGET, "Canonicalizing {:?}", hash);
393		let level = match self.levels.pop_front() {
394			Some(level) => level,
395			None => return Err(StateDbError::InvalidBlock),
396		};
397		let index = level
398			.blocks
399			.iter()
400			.position(|overlay| overlay.hash == *hash)
401			.ok_or(StateDbError::InvalidBlock)?;
402
403		// No failures are possible beyond this point.
404
405		// Force pin canonicalized block so that it is no discarded immediately
406		self.pin(hash);
407		self.pinned_canonincalized.push(hash.clone());
408
409		let mut discarded_journals = Vec::new();
410		for (i, overlay) in level.blocks.into_iter().enumerate() {
411			let mut pinned_children = 0;
412			// That's the one we need to canonicalize
413			if i == index {
414				commit.data.inserted.extend(overlay.inserted.iter().map(|k| {
415					(
416						k.clone(),
417						self.values
418							.get(k)
419							.expect("For each key in overlays there's a value in values")
420							.1
421							.clone(),
422					)
423				}));
424				commit.data.deleted.extend(overlay.deleted.clone());
425			} else {
426				// Discard this overlay
427				self.discard_journals(0, &mut discarded_journals, &overlay.hash);
428				pinned_children = discard_descendants(
429					&mut self.levels.as_mut_slices(),
430					&mut self.values,
431					&mut self.parents,
432					&self.pinned,
433					&mut self.pinned_insertions,
434					&overlay.hash,
435				);
436			}
437			if self.pinned.contains_key(&overlay.hash) {
438				pinned_children += 1;
439			}
440			if pinned_children != 0 {
441				self.pinned_insertions
442					.insert(overlay.hash.clone(), (overlay.inserted, pinned_children));
443			} else {
444				self.parents.remove(&overlay.hash);
445				discard_values(&mut self.values, overlay.inserted);
446			}
447			discarded_journals.push(overlay.journal_key.clone());
448		}
449		commit.meta.deleted.append(&mut discarded_journals);
450
451		let canonicalized = (hash.clone(), self.front_block_number());
452		commit
453			.meta
454			.inserted
455			.push((to_meta_key(LAST_CANONICAL, &()), canonicalized.encode()));
456		trace!(target: LOG_TARGET, "Discarding {} records", commit.meta.deleted.len());
457
458		let num = canonicalized.1;
459		self.last_canonicalized = Some(canonicalized);
460		Ok(num)
461	}
462
463	/// Get a value from the node overlay. This searches in every existing changeset.
464	pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<DBValue>
465	where
466		Key: std::borrow::Borrow<Q>,
467		Q: std::hash::Hash + Eq,
468	{
469		self.values.get(key).map(|v| v.1.clone())
470	}
471
472	/// Check if the block is in the canonicalization queue.
473	pub fn have_block(&self, hash: &BlockHash) -> bool {
474		self.parents.contains_key(hash)
475	}
476
477	/// Revert a single level. Returns commit set that deletes the journal or `None` if not
478	/// possible.
479	pub fn revert_one(&mut self) -> Option<CommitSet<Key>> {
480		self.levels.pop_back().map(|level| {
481			let mut commit = CommitSet::default();
482			for overlay in level.blocks.into_iter() {
483				commit.meta.deleted.push(overlay.journal_key);
484				self.parents.remove(&overlay.hash);
485				discard_values(&mut self.values, overlay.inserted);
486			}
487			commit
488		})
489	}
490
491	/// Revert a single block. Returns commit set that deletes the journal or `None` if not
492	/// possible.
493	pub fn remove(&mut self, hash: &BlockHash) -> Option<CommitSet<Key>> {
494		let mut commit = CommitSet::default();
495		let level_count = self.levels.len();
496		for (level_index, level) in self.levels.iter_mut().enumerate().rev() {
497			let index = match level.blocks.iter().position(|overlay| &overlay.hash == hash) {
498				Some(index) => index,
499				None => continue,
500			};
501			// Check that it does not have any children
502			if (level_index != level_count - 1) && self.parents.values().any(|h| h == hash) {
503				log::debug!(target: LOG_TARGET, "Trying to remove block {:?} with children", hash);
504				return None
505			}
506			let overlay = level.remove(index);
507			commit.meta.deleted.push(overlay.journal_key);
508			self.parents.remove(&overlay.hash);
509			discard_values(&mut self.values, overlay.inserted);
510			break
511		}
512		if self.levels.back().map_or(false, |l| l.blocks.is_empty()) {
513			self.levels.pop_back();
514		}
515		if !commit.meta.deleted.is_empty() {
516			Some(commit)
517		} else {
518			None
519		}
520	}
521
522	/// Pin state values in memory
523	pub fn pin(&mut self, hash: &BlockHash) {
524		let refs = self.pinned.entry(hash.clone()).or_default();
525		if *refs == 0 {
526			trace!(target: LOG_TARGET_PIN, "Pinned non-canon block: {:?}", hash);
527		}
528		*refs += 1;
529	}
530
531	/// Discard pinned state
532	pub fn unpin(&mut self, hash: &BlockHash) {
533		let removed = match self.pinned.entry(hash.clone()) {
534			Entry::Occupied(mut entry) => {
535				*entry.get_mut() -= 1;
536				if *entry.get() == 0 {
537					entry.remove();
538					true
539				} else {
540					false
541				}
542			},
543			Entry::Vacant(_) => false,
544		};
545
546		if removed {
547			let mut parent = Some(hash.clone());
548			while let Some(hash) = parent {
549				parent = self.parents.get(&hash).cloned();
550				match self.pinned_insertions.entry(hash.clone()) {
551					Entry::Occupied(mut entry) => {
552						entry.get_mut().1 -= 1;
553						if entry.get().1 == 0 {
554							let (inserted, _) = entry.remove();
555							trace!(
556								target: LOG_TARGET_PIN,
557								"Discarding unpinned non-canon block: {:?}",
558								hash
559							);
560							discard_values(&mut self.values, inserted);
561							self.parents.remove(&hash);
562						}
563					},
564					Entry::Vacant(_) => break,
565				};
566			}
567		}
568	}
569}
570
571#[cfg(test)]
572mod tests {
573	use super::{to_journal_key, NonCanonicalOverlay};
574	use crate::{
575		test::{make_changeset, make_db},
576		ChangeSet, CommitSet, MetaDb, StateDbError,
577	};
578	use sp_core::H256;
579
580	fn contains(overlay: &NonCanonicalOverlay<H256, H256>, key: u64) -> bool {
581		overlay.get(&H256::from_low_u64_be(key)) ==
582			Some(H256::from_low_u64_be(key).as_bytes().to_vec())
583	}
584
585	#[test]
586	fn created_from_empty_db() {
587		let db = make_db(&[]);
588		let overlay: NonCanonicalOverlay<H256, H256> = NonCanonicalOverlay::new(&db).unwrap();
589		assert_eq!(overlay.last_canonicalized, None);
590		assert!(overlay.levels.is_empty());
591		assert!(overlay.parents.is_empty());
592	}
593
594	#[test]
595	#[should_panic]
596	fn canonicalize_empty_panics() {
597		let db = make_db(&[]);
598		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
599		let mut commit = CommitSet::default();
600		overlay.canonicalize(&H256::default(), &mut commit).unwrap();
601	}
602
603	#[test]
604	#[should_panic]
605	fn insert_ahead_panics() {
606		let db = make_db(&[]);
607		let h1 = H256::random();
608		let h2 = H256::random();
609		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
610		overlay.insert(&h1, 2, &H256::default(), ChangeSet::default()).unwrap();
611		overlay.insert(&h2, 1, &h1, ChangeSet::default()).unwrap();
612	}
613
614	#[test]
615	#[should_panic]
616	fn insert_behind_panics() {
617		let h1 = H256::random();
618		let h2 = H256::random();
619		let db = make_db(&[]);
620		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
621		overlay.insert(&h1, 1, &H256::default(), ChangeSet::default()).unwrap();
622		overlay.insert(&h2, 3, &h1, ChangeSet::default()).unwrap();
623	}
624
625	#[test]
626	#[should_panic]
627	fn insert_unknown_parent_panics() {
628		let db = make_db(&[]);
629		let h1 = H256::random();
630		let h2 = H256::random();
631		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
632		overlay.insert(&h1, 1, &H256::default(), ChangeSet::default()).unwrap();
633		overlay.insert(&h2, 2, &H256::default(), ChangeSet::default()).unwrap();
634	}
635
636	#[test]
637	fn insert_existing_fails() {
638		let db = make_db(&[]);
639		let h1 = H256::random();
640		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
641		overlay.insert(&h1, 2, &H256::default(), ChangeSet::default()).unwrap();
642		assert!(matches!(
643			overlay.insert(&h1, 2, &H256::default(), ChangeSet::default()),
644			Err(StateDbError::BlockAlreadyExists)
645		));
646	}
647
648	#[test]
649	#[should_panic]
650	fn canonicalize_unknown_panics() {
651		let h1 = H256::random();
652		let h2 = H256::random();
653		let db = make_db(&[]);
654		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
655		overlay.insert(&h1, 1, &H256::default(), ChangeSet::default()).unwrap();
656		let mut commit = CommitSet::default();
657		overlay.canonicalize(&h2, &mut commit).unwrap();
658	}
659
660	#[test]
661	fn insert_canonicalize_one() {
662		let h1 = H256::random();
663		let mut db = make_db(&[1, 2]);
664		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
665		let changeset = make_changeset(&[3, 4], &[2]);
666		let insertion = overlay.insert(&h1, 1, &H256::default(), changeset.clone()).unwrap();
667		assert_eq!(insertion.data.inserted.len(), 0);
668		assert_eq!(insertion.data.deleted.len(), 0);
669		assert_eq!(insertion.meta.inserted.len(), 2);
670		assert_eq!(insertion.meta.deleted.len(), 0);
671		db.commit(&insertion);
672		let mut finalization = CommitSet::default();
673		overlay.canonicalize(&h1, &mut finalization).unwrap();
674		assert_eq!(finalization.data.inserted.len(), changeset.inserted.len());
675		assert_eq!(finalization.data.deleted.len(), changeset.deleted.len());
676		assert_eq!(finalization.meta.inserted.len(), 1);
677		assert_eq!(finalization.meta.deleted.len(), 1);
678		db.commit(&finalization);
679		assert!(db.data_eq(&make_db(&[1, 3, 4])));
680	}
681
682	#[test]
683	fn restore_from_journal() {
684		let h1 = H256::random();
685		let h2 = H256::random();
686		let mut db = make_db(&[1, 2]);
687		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
688		db.commit(
689			&overlay
690				.insert(&h1, 10, &H256::default(), make_changeset(&[3, 4], &[2]))
691				.unwrap(),
692		);
693		db.commit(&overlay.insert(&h2, 11, &h1, make_changeset(&[5], &[3])).unwrap());
694		assert_eq!(db.meta_len(), 3);
695
696		let overlay2 = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
697		assert_eq!(overlay.levels, overlay2.levels);
698		assert_eq!(overlay.parents, overlay2.parents);
699		assert_eq!(overlay.last_canonicalized, overlay2.last_canonicalized);
700	}
701
702	#[test]
703	fn restore_from_journal_after_canonicalize() {
704		let h1 = H256::random();
705		let h2 = H256::random();
706		let mut db = make_db(&[1, 2]);
707		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
708		db.commit(
709			&overlay
710				.insert(&h1, 10, &H256::default(), make_changeset(&[3, 4], &[2]))
711				.unwrap(),
712		);
713		db.commit(&overlay.insert(&h2, 11, &h1, make_changeset(&[5], &[3])).unwrap());
714		let mut commit = CommitSet::default();
715		overlay.canonicalize(&h1, &mut commit).unwrap();
716		overlay.unpin(&h1);
717		db.commit(&commit);
718		assert_eq!(overlay.levels.len(), 1);
719
720		let overlay2 = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
721		assert_eq!(overlay.levels, overlay2.levels);
722		assert_eq!(overlay.parents, overlay2.parents);
723		assert_eq!(overlay.last_canonicalized, overlay2.last_canonicalized);
724	}
725
726	#[test]
727	fn insert_canonicalize_two() {
728		let h1 = H256::random();
729		let h2 = H256::random();
730		let mut db = make_db(&[1, 2, 3, 4]);
731		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
732		let changeset1 = make_changeset(&[5, 6], &[2]);
733		let changeset2 = make_changeset(&[7, 8], &[5, 3]);
734		db.commit(&overlay.insert(&h1, 1, &H256::default(), changeset1).unwrap());
735		assert!(contains(&overlay, 5));
736		db.commit(&overlay.insert(&h2, 2, &h1, changeset2).unwrap());
737		assert!(contains(&overlay, 7));
738		assert!(contains(&overlay, 5));
739		assert_eq!(overlay.levels.len(), 2);
740		assert_eq!(overlay.parents.len(), 2);
741		let mut commit = CommitSet::default();
742		overlay.canonicalize(&h1, &mut commit).unwrap();
743		db.commit(&commit);
744		overlay.sync();
745		assert!(!contains(&overlay, 5));
746		assert!(contains(&overlay, 7));
747		assert_eq!(overlay.levels.len(), 1);
748		assert_eq!(overlay.parents.len(), 1);
749		let mut commit = CommitSet::default();
750		overlay.canonicalize(&h2, &mut commit).unwrap();
751		db.commit(&commit);
752		overlay.sync();
753		assert_eq!(overlay.levels.len(), 0);
754		assert_eq!(overlay.parents.len(), 0);
755		assert!(db.data_eq(&make_db(&[1, 4, 6, 7, 8])));
756	}
757
758	#[test]
759	fn insert_same_key() {
760		let mut db = make_db(&[]);
761		let (h_1, c_1) = (H256::random(), make_changeset(&[1], &[]));
762		let (h_2, c_2) = (H256::random(), make_changeset(&[1], &[]));
763
764		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
765		db.commit(&overlay.insert(&h_1, 1, &H256::default(), c_1).unwrap());
766		db.commit(&overlay.insert(&h_2, 1, &H256::default(), c_2).unwrap());
767		assert!(contains(&overlay, 1));
768		let mut commit = CommitSet::default();
769		overlay.canonicalize(&h_1, &mut commit).unwrap();
770		db.commit(&commit);
771		overlay.sync();
772		assert!(!contains(&overlay, 1));
773	}
774
775	#[test]
776	fn insert_and_canonicalize() {
777		let h1 = H256::random();
778		let h2 = H256::random();
779		let h3 = H256::random();
780		let mut db = make_db(&[]);
781		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
782		let changeset = make_changeset(&[], &[]);
783		db.commit(&overlay.insert(&h1, 1, &H256::default(), changeset.clone()).unwrap());
784		db.commit(&overlay.insert(&h2, 2, &h1, changeset.clone()).unwrap());
785		let mut commit = CommitSet::default();
786		overlay.canonicalize(&h1, &mut commit).unwrap();
787		overlay.canonicalize(&h2, &mut commit).unwrap();
788		db.commit(&commit);
789		db.commit(&overlay.insert(&h3, 3, &h2, changeset.clone()).unwrap());
790		assert_eq!(overlay.levels.len(), 1);
791	}
792
793	#[test]
794	fn complex_tree() {
795		let mut db = make_db(&[]);
796
797		#[rustfmt::skip]
798		// - 1 - 1_1 - 1_1_1
799		//     \ 1_2 - 1_2_1
800		//           \ 1_2_2
801		//           \ 1_2_3
802		//
803		// - 2 - 2_1 - 2_1_1
804		//     \ 2_2
805		//
806		// 1_2_2 is the winner
807
808		let (h_1, c_1) = (H256::random(), make_changeset(&[1], &[]));
809		let (h_2, c_2) = (H256::random(), make_changeset(&[2], &[]));
810
811		let (h_1_1, c_1_1) = (H256::random(), make_changeset(&[11], &[]));
812		let (h_1_2, c_1_2) = (H256::random(), make_changeset(&[12], &[]));
813		let (h_2_1, c_2_1) = (H256::random(), make_changeset(&[21], &[]));
814		let (h_2_2, c_2_2) = (H256::random(), make_changeset(&[22], &[]));
815
816		let (h_1_1_1, c_1_1_1) = (H256::random(), make_changeset(&[111], &[]));
817		let (h_1_2_1, c_1_2_1) = (H256::random(), make_changeset(&[121], &[]));
818		let (h_1_2_2, c_1_2_2) = (H256::random(), make_changeset(&[122], &[]));
819		let (h_1_2_3, c_1_2_3) = (H256::random(), make_changeset(&[123], &[]));
820		let (h_2_1_1, c_2_1_1) = (H256::random(), make_changeset(&[211], &[]));
821
822		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
823		db.commit(&overlay.insert(&h_1, 1, &H256::default(), c_1).unwrap());
824
825		db.commit(&overlay.insert(&h_1_1, 2, &h_1, c_1_1).unwrap());
826		db.commit(&overlay.insert(&h_1_2, 2, &h_1, c_1_2).unwrap());
827
828		db.commit(&overlay.insert(&h_2, 1, &H256::default(), c_2).unwrap());
829
830		db.commit(&overlay.insert(&h_2_1, 2, &h_2, c_2_1).unwrap());
831		db.commit(&overlay.insert(&h_2_2, 2, &h_2, c_2_2).unwrap());
832
833		db.commit(&overlay.insert(&h_1_1_1, 3, &h_1_1, c_1_1_1).unwrap());
834		db.commit(&overlay.insert(&h_1_2_1, 3, &h_1_2, c_1_2_1).unwrap());
835		db.commit(&overlay.insert(&h_1_2_2, 3, &h_1_2, c_1_2_2).unwrap());
836		db.commit(&overlay.insert(&h_1_2_3, 3, &h_1_2, c_1_2_3).unwrap());
837		db.commit(&overlay.insert(&h_2_1_1, 3, &h_2_1, c_2_1_1).unwrap());
838
839		assert!(contains(&overlay, 2));
840		assert!(contains(&overlay, 11));
841		assert!(contains(&overlay, 21));
842		assert!(contains(&overlay, 111));
843		assert!(contains(&overlay, 122));
844		assert!(contains(&overlay, 211));
845		assert_eq!(overlay.levels.len(), 3);
846		assert_eq!(overlay.parents.len(), 11);
847		assert_eq!(overlay.last_canonicalized, Some((H256::default(), 0)));
848
849		// check if restoration from journal results in the same tree
850		let overlay2 = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
851		assert_eq!(overlay.levels, overlay2.levels);
852		assert_eq!(overlay.parents, overlay2.parents);
853		assert_eq!(overlay.last_canonicalized, overlay2.last_canonicalized);
854
855		// canonicalize 1. 2 and all its children should be discarded
856		let mut commit = CommitSet::default();
857		overlay.canonicalize(&h_1, &mut commit).unwrap();
858		db.commit(&commit);
859		overlay.sync();
860		assert_eq!(overlay.levels.len(), 2);
861		assert_eq!(overlay.parents.len(), 6);
862		assert!(!contains(&overlay, 1));
863		assert!(!contains(&overlay, 2));
864		assert!(!contains(&overlay, 21));
865		assert!(!contains(&overlay, 22));
866		assert!(!contains(&overlay, 211));
867		assert!(contains(&overlay, 111));
868		assert!(!contains(&overlay, 211));
869		// check that journals are deleted
870		assert!(db.get_meta(&to_journal_key(1, 0)).unwrap().is_none());
871		assert!(db.get_meta(&to_journal_key(1, 1)).unwrap().is_none());
872		assert!(db.get_meta(&to_journal_key(2, 1)).unwrap().is_some());
873		assert!(db.get_meta(&to_journal_key(2, 2)).unwrap().is_none());
874		assert!(db.get_meta(&to_journal_key(2, 3)).unwrap().is_none());
875
876		// canonicalize 1_2. 1_1 and all its children should be discarded
877		let mut commit = CommitSet::default();
878		overlay.canonicalize(&h_1_2, &mut commit).unwrap();
879		db.commit(&commit);
880		overlay.sync();
881		assert_eq!(overlay.levels.len(), 1);
882		assert_eq!(overlay.parents.len(), 3);
883		assert!(!contains(&overlay, 11));
884		assert!(!contains(&overlay, 111));
885		assert!(contains(&overlay, 121));
886		assert!(contains(&overlay, 122));
887		assert!(contains(&overlay, 123));
888		assert!(overlay.have_block(&h_1_2_1));
889		assert!(!overlay.have_block(&h_1_2));
890		assert!(!overlay.have_block(&h_1_1));
891		assert!(!overlay.have_block(&h_1_1_1));
892
893		// canonicalize 1_2_2
894		let mut commit = CommitSet::default();
895		overlay.canonicalize(&h_1_2_2, &mut commit).unwrap();
896		db.commit(&commit);
897		overlay.sync();
898		assert_eq!(overlay.levels.len(), 0);
899		assert_eq!(overlay.parents.len(), 0);
900		assert!(db.data_eq(&make_db(&[1, 12, 122])));
901		assert_eq!(overlay.last_canonicalized, Some((h_1_2_2, 3)));
902	}
903
904	#[test]
905	fn insert_revert() {
906		let h1 = H256::random();
907		let h2 = H256::random();
908		let mut db = make_db(&[1, 2, 3, 4]);
909		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
910		assert!(overlay.revert_one().is_none());
911		let changeset1 = make_changeset(&[5, 6], &[2]);
912		let changeset2 = make_changeset(&[7, 8], &[5, 3]);
913		db.commit(&overlay.insert(&h1, 1, &H256::default(), changeset1).unwrap());
914		db.commit(&overlay.insert(&h2, 2, &h1, changeset2).unwrap());
915		assert!(contains(&overlay, 7));
916		db.commit(&overlay.revert_one().unwrap());
917		assert_eq!(overlay.parents.len(), 1);
918		assert!(contains(&overlay, 5));
919		assert!(!contains(&overlay, 7));
920		db.commit(&overlay.revert_one().unwrap());
921		assert_eq!(overlay.levels.len(), 0);
922		assert_eq!(overlay.parents.len(), 0);
923		assert!(overlay.revert_one().is_none());
924	}
925
926	#[test]
927	fn keeps_pinned() {
928		let mut db = make_db(&[]);
929
930		#[rustfmt::skip]
931		// - 0 - 1_1
932		//     \ 1_2
933
934		let (h_1, c_1) = (H256::random(), make_changeset(&[1], &[]));
935		let (h_2, c_2) = (H256::random(), make_changeset(&[2], &[]));
936
937		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
938		db.commit(&overlay.insert(&h_1, 1, &H256::default(), c_1).unwrap());
939		db.commit(&overlay.insert(&h_2, 1, &H256::default(), c_2).unwrap());
940
941		overlay.pin(&h_1);
942
943		let mut commit = CommitSet::default();
944		overlay.canonicalize(&h_2, &mut commit).unwrap();
945		db.commit(&commit);
946		assert!(contains(&overlay, 1));
947		overlay.unpin(&h_1);
948		assert!(!contains(&overlay, 1));
949	}
950
951	#[test]
952	fn keeps_pinned_ref_count() {
953		let mut db = make_db(&[]);
954
955		#[rustfmt::skip]
956		// - 0 - 1_1
957		//     \ 1_2
958		//     \ 1_3
959
960		// 1_1 and 1_2 both make the same change
961		let (h_1, c_1) = (H256::random(), make_changeset(&[1], &[]));
962		let (h_2, c_2) = (H256::random(), make_changeset(&[1], &[]));
963		let (h_3, c_3) = (H256::random(), make_changeset(&[], &[]));
964
965		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
966		db.commit(&overlay.insert(&h_1, 1, &H256::default(), c_1).unwrap());
967		db.commit(&overlay.insert(&h_2, 1, &H256::default(), c_2).unwrap());
968		db.commit(&overlay.insert(&h_3, 1, &H256::default(), c_3).unwrap());
969
970		overlay.pin(&h_1);
971
972		let mut commit = CommitSet::default();
973		overlay.canonicalize(&h_3, &mut commit).unwrap();
974		db.commit(&commit);
975
976		assert!(contains(&overlay, 1));
977		overlay.unpin(&h_1);
978		assert!(!contains(&overlay, 1));
979	}
980
981	#[test]
982	fn pins_canonicalized() {
983		let mut db = make_db(&[]);
984
985		let (h_1, c_1) = (H256::random(), make_changeset(&[1], &[]));
986		let (h_2, c_2) = (H256::random(), make_changeset(&[2], &[]));
987
988		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
989		db.commit(&overlay.insert(&h_1, 1, &H256::default(), c_1).unwrap());
990		db.commit(&overlay.insert(&h_2, 2, &h_1, c_2).unwrap());
991
992		let mut commit = CommitSet::default();
993		overlay.canonicalize(&h_1, &mut commit).unwrap();
994		overlay.canonicalize(&h_2, &mut commit).unwrap();
995		assert!(contains(&overlay, 1));
996		assert!(contains(&overlay, 2));
997		db.commit(&commit);
998		overlay.sync();
999		assert!(!contains(&overlay, 1));
1000		assert!(!contains(&overlay, 2));
1001	}
1002
1003	#[test]
1004	fn pin_keeps_parent() {
1005		let mut db = make_db(&[]);
1006
1007		#[rustfmt::skip]
1008		// - 0 - 1_1 - 2_1
1009		//     \ 1_2
1010
1011		let (h_11, c_11) = (H256::random(), make_changeset(&[1], &[]));
1012		let (h_12, c_12) = (H256::random(), make_changeset(&[], &[]));
1013		let (h_21, c_21) = (H256::random(), make_changeset(&[], &[]));
1014
1015		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
1016		db.commit(&overlay.insert(&h_11, 1, &H256::default(), c_11).unwrap());
1017		db.commit(&overlay.insert(&h_12, 1, &H256::default(), c_12).unwrap());
1018		db.commit(&overlay.insert(&h_21, 2, &h_11, c_21).unwrap());
1019
1020		overlay.pin(&h_21);
1021
1022		let mut commit = CommitSet::default();
1023		overlay.canonicalize(&h_12, &mut commit).unwrap();
1024		db.commit(&commit);
1025
1026		assert!(contains(&overlay, 1));
1027		overlay.unpin(&h_21);
1028		assert!(!contains(&overlay, 1));
1029		overlay.unpin(&h_12);
1030		assert!(overlay.pinned.is_empty());
1031	}
1032
1033	#[test]
1034	fn restore_from_journal_after_canonicalize_no_first() {
1035		// This test discards a branch that is journaled under a non-zero index on level 1,
1036		// making sure all journals are loaded for each level even if some of them are missing.
1037		let root = H256::random();
1038		let h1 = H256::random();
1039		let h2 = H256::random();
1040		let h11 = H256::random();
1041		let h21 = H256::random();
1042		let mut db = make_db(&[]);
1043		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
1044		db.commit(&overlay.insert(&root, 10, &H256::default(), make_changeset(&[], &[])).unwrap());
1045		db.commit(&overlay.insert(&h1, 11, &root, make_changeset(&[1], &[])).unwrap());
1046		db.commit(&overlay.insert(&h2, 11, &root, make_changeset(&[2], &[])).unwrap());
1047		db.commit(&overlay.insert(&h11, 12, &h1, make_changeset(&[11], &[])).unwrap());
1048		db.commit(&overlay.insert(&h21, 12, &h2, make_changeset(&[21], &[])).unwrap());
1049		let mut commit = CommitSet::default();
1050		overlay.canonicalize(&root, &mut commit).unwrap();
1051		overlay.canonicalize(&h2, &mut commit).unwrap(); // h11 should stay in the DB
1052		db.commit(&commit);
1053		assert_eq!(overlay.levels.len(), 1);
1054		assert!(contains(&overlay, 21));
1055		assert!(!contains(&overlay, 11));
1056		assert!(db.get_meta(&to_journal_key(12, 1)).unwrap().is_some());
1057
1058		// Restore into a new overlay and check that journaled value exists.
1059		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
1060		assert!(contains(&overlay, 21));
1061
1062		let mut commit = CommitSet::default();
1063		overlay.canonicalize(&h21, &mut commit).unwrap(); // h11 should stay in the DB
1064		db.commit(&commit);
1065		overlay.sync();
1066		assert!(!contains(&overlay, 21));
1067	}
1068
1069	#[test]
1070	fn index_reuse() {
1071		// This test discards a branch that is journaled under a non-zero index on level 1,
1072		// making sure all journals are loaded for each level even if some of them are missing.
1073		let root = H256::random();
1074		let h1 = H256::random();
1075		let h2 = H256::random();
1076		let h11 = H256::random();
1077		let h21 = H256::random();
1078		let mut db = make_db(&[]);
1079		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
1080		db.commit(&overlay.insert(&root, 10, &H256::default(), make_changeset(&[], &[])).unwrap());
1081		db.commit(&overlay.insert(&h1, 11, &root, make_changeset(&[1], &[])).unwrap());
1082		db.commit(&overlay.insert(&h2, 11, &root, make_changeset(&[2], &[])).unwrap());
1083		db.commit(&overlay.insert(&h11, 12, &h1, make_changeset(&[11], &[])).unwrap());
1084		db.commit(&overlay.insert(&h21, 12, &h2, make_changeset(&[21], &[])).unwrap());
1085		let mut commit = CommitSet::default();
1086		overlay.canonicalize(&root, &mut commit).unwrap();
1087		overlay.canonicalize(&h2, &mut commit).unwrap(); // h11 should stay in the DB
1088		db.commit(&commit);
1089
1090		// add another block at top level. It should reuse journal index 0 of previously discarded
1091		// block
1092		let h22 = H256::random();
1093		db.commit(&overlay.insert(&h22, 12, &h2, make_changeset(&[22], &[])).unwrap());
1094		assert_eq!(overlay.levels[0].blocks[0].journal_index, 1);
1095		assert_eq!(overlay.levels[0].blocks[1].journal_index, 0);
1096
1097		// Restore into a new overlay and check that journaled value exists.
1098		let overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
1099		assert_eq!(overlay.parents.len(), 2);
1100		assert!(contains(&overlay, 21));
1101		assert!(contains(&overlay, 22));
1102	}
1103
1104	#[test]
1105	fn remove_works() {
1106		let root = H256::random();
1107		let h1 = H256::random();
1108		let h2 = H256::random();
1109		let h11 = H256::random();
1110		let h21 = H256::random();
1111		let mut db = make_db(&[]);
1112		let mut overlay = NonCanonicalOverlay::<H256, H256>::new(&db).unwrap();
1113		db.commit(&overlay.insert(&root, 10, &H256::default(), make_changeset(&[], &[])).unwrap());
1114		db.commit(&overlay.insert(&h1, 11, &root, make_changeset(&[1], &[])).unwrap());
1115		db.commit(&overlay.insert(&h2, 11, &root, make_changeset(&[2], &[])).unwrap());
1116		db.commit(&overlay.insert(&h11, 12, &h1, make_changeset(&[11], &[])).unwrap());
1117		db.commit(&overlay.insert(&h21, 12, &h2, make_changeset(&[21], &[])).unwrap());
1118		assert!(overlay.remove(&h1).is_none());
1119		assert!(overlay.remove(&h2).is_none());
1120		assert_eq!(overlay.levels.len(), 3);
1121
1122		db.commit(&overlay.remove(&h11).unwrap());
1123		assert!(!contains(&overlay, 11));
1124
1125		db.commit(&overlay.remove(&h21).unwrap());
1126		assert_eq!(overlay.levels.len(), 2);
1127
1128		db.commit(&overlay.remove(&h2).unwrap());
1129		assert!(!contains(&overlay, 2));
1130	}
1131}