Skip to content

Commit 0782de1

Browse files
committed
Implementing blind zipper changes for viz, counters, and arena_compact
Agh! We did it again. Both implemented the same changes at the same time
1 parent 302690e commit 0782de1

3 files changed

Lines changed: 39 additions & 32 deletions

File tree

src/arena_compact.rs

Lines changed: 34 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ use crate::{
8686
utils::{BitMask, ByteMask, find_prefix_overlap},
8787
zipper::{
8888
Zipper, ZipperValues, ZipperForking, ZipperAbsolutePath, ZipperIteration,
89-
ZipperMoving, ZipperPathBuffer, ZipperPath, ZipperReadOnlyValues,
89+
ZipperMoving, ZipperPath, ZipperPathBuffer, ZipperReadOnlyValues,
9090
ZipperConcrete, ZipperReadOnlyConditionalValues,
9191
},
9292
};
@@ -828,13 +828,12 @@ impl ArenaCompactTree<Vec<u8>> {
828828
/// Construct [ArenaCompactTree] from a read zipper.
829829
/// # Examples
830830
/// ```
831-
/// use pathmap::{PathMap, arena_compact::ArenaCompactTree};
831+
/// use pathmap::{PathMap, zipper::*, arena_compact::ArenaCompactTree};
832832
/// let items = ["ace", "acf", "adg", "adh", "bjk"];
833833
/// let btm = PathMap::from_iter(items.iter().map(|i| (i, ())));
834834
/// let tree1 = ArenaCompactTree::from_zipper(btm.read_zipper(), |_v| 0);
835835
/// let mut zipper = tree1.read_zipper();
836836
/// for path in items {
837-
/// use pathmap::zipper::{ZipperMoving, ZipperPath};
838837
/// zipper.reset();
839838
/// assert!(zipper.descend_to_existing(path) == path.len());
840839
/// assert_eq!(zipper.path(), path.as_bytes());
@@ -1487,38 +1486,44 @@ where Storage: AsRef<[u8]>
14871486
Some(value)
14881487
}
14891488

1490-
fn ascend_invalid(&mut self, limit: Option<&mut usize>) -> bool {
1489+
/// Ascends any non-existent portion of the path. Returns the number of steps ascended
1490+
///
1491+
/// `limit` sets an upper-bound on the number of steps that will be ascended, and contains
1492+
/// the number of steps un-ascended upon return (i.e. if the zipper is already at an
1493+
/// existent, aka valid, path)
1494+
fn ascend_invalid(&mut self, limit: Option<usize>) -> usize {
14911495
if self.invalid == 0 {
1492-
return true;
1496+
return 0;
14931497
}
14941498
let len = self.path.len();
14951499
let mut invalid_cut = self.invalid.min(len - self.origin_depth);
14961500
if let Some(limit) = limit {
1497-
invalid_cut = invalid_cut.min(*limit);
1498-
*limit -= invalid_cut;
1501+
invalid_cut = invalid_cut.min(limit);
14991502
}
15001503
self.path.truncate(len - invalid_cut);
15011504
self.invalid = self.invalid - invalid_cut;
1502-
self.invalid == 0
1505+
invalid_cut
15031506
}
15041507

1508+
/// Returns the number of steps ascended
15051509
fn ascend_to_branch(&mut self, need_value: bool) -> usize {
15061510
self.trace_pos();
1507-
let orig_len = self.path.len();
1511+
let mut ascended = 0;
15081512
if self.invalid > 0 {
1509-
if !self.ascend_invalid(None) {
1510-
return orig_len - self.path.len();
1513+
ascended += self.ascend_invalid(None);
1514+
if self.invalid > 0 {
1515+
return ascended;
15111516
}
15121517

15131518
match &self.cur_node {
15141519
Node::Line(line) => {
15151520
if need_value && line.value.is_some() {
1516-
return orig_len - self.path.len();
1521+
return ascended;
15171522
}
15181523
}
15191524
Node::Branch(node) => {
15201525
if need_value && node.value.is_some() {
1521-
return orig_len - self.path.len();
1526+
return ascended;
15221527
}
15231528
}
15241529
}
@@ -1536,6 +1541,7 @@ where Storage: AsRef<[u8]>
15361541
this_steps += 1;
15371542
}
15381543
self.path.truncate(self.path.len() - this_steps);
1544+
ascended += this_steps;
15391545
// eprintln!("path={:?}", self.path);
15401546
let brk = match &self.cur_node {
15411547
Node::Branch(node) => {
@@ -1547,7 +1553,7 @@ where Storage: AsRef<[u8]>
15471553
break;
15481554
}
15491555
}
1550-
return orig_len - self.path.len();
1556+
ascended
15511557
}
15521558

15531559
fn descend_cond(&mut self, path: &[u8], on_value: bool) -> usize {
@@ -1631,8 +1637,11 @@ where Storage: AsRef<[u8]>
16311637
}
16321638
top2_frame.child_index - 1
16331639
};
1634-
debug_assert_eq!(self.ascend(1), 1);
1635-
self.descend_indexed_byte(sibling_idx)
1640+
let ascended = self.ascend_byte();
1641+
debug_assert!(ascended);
1642+
let result = self.descend_indexed_byte(sibling_idx);
1643+
debug_assert!(result.is_some());
1644+
result
16361645
}
16371646
}
16381647

@@ -1720,6 +1729,7 @@ where Storage: AsRef<[u8]>
17201729
/// Returns the path from the zipper's root to the current focus
17211730
fn path(&self) -> &[u8] { &self.path[self.origin_depth..] }
17221731
}
1732+
17231733
/// An interface to enable moving a zipper around the trie and inspecting paths
17241734
impl<'tree, Storage, Value> ZipperMoving for ACTZipper<'tree, Storage, Value>
17251735
where Storage: AsRef<[u8]>
@@ -1926,10 +1936,11 @@ where Storage: AsRef<[u8]>
19261936
/// If the root is fewer than `n` steps from the zipper's position, then this method will stop at
19271937
/// the root and return `false`
19281938
fn ascend(&mut self, steps: usize) -> usize {
1929-
let mut remaining = steps;
19301939
self.trace_pos();
1931-
if !self.ascend_invalid(Some(&mut remaining)) {
1932-
return steps - remaining;
1940+
let mut remaining = steps;
1941+
remaining -= self.ascend_invalid(Some(steps));
1942+
if remaining == 0 {
1943+
return steps;
19331944
}
19341945
while let Some(top_frame) = self.stack.last_mut() {
19351946
let rest_path = &self.path[self.origin_depth..];
@@ -1951,20 +1962,14 @@ where Storage: AsRef<[u8]>
19511962
unreachable!();
19521963
}
19531964

1954-
/// Ascends the zipper up a single byte. Equivalent to passing `1` to [ascend](Self::ascend)
19551965
fn ascend_byte(&mut self) -> bool {
19561966
self.ascend(1) == 1
19571967
}
19581968

1959-
/// Ascends the zipper to the nearest upstream branch point or value. Returns `true` if the zipper
1960-
/// focus moved upwards, otherwise returns `false` if the zipper was already at the root
19611969
fn ascend_until(&mut self) -> usize {
19621970
self.ascend_to_branch(true)
19631971
}
19641972

1965-
/// Ascends the zipper to the nearest upstream branch point, skipping over values along the way. Returns
1966-
/// `true` if the zipper focus moved upwards, otherwise returns `false` if the zipper was already at the
1967-
/// root
19681973
fn ascend_until_branch(&mut self) -> usize {
19691974
self.ascend_to_branch(false)
19701975
}
@@ -2034,21 +2039,22 @@ where Storage: AsRef<[u8]>
20342039
let mut depth = k;
20352040
'outer: loop {
20362041
while depth > 0 && self.child_count() <= 1 {
2037-
if self.ascend(1) != 1 {
2042+
if !self.ascend_byte() {
20382043
break 'outer;
20392044
}
20402045
depth -= 1;
20412046
}
20422047
let stack = self.stack.last_mut().unwrap();
20432048
let idx = stack.child_index + 1;
20442049
if idx >= stack.child_count {
2045-
if depth == 0 || self.ascend(1) != 1 {
2050+
if depth == 0 || !self.ascend_byte() {
20462051
break 'outer;
20472052
}
20482053
depth -= 1;
20492054
continue 'outer;
20502055
}
2051-
assert!(self.descend_indexed_byte(idx).is_some());
2056+
let descended = self.descend_indexed_byte(idx);
2057+
debug_assert!(descended.is_some());
20522058
depth += 1;
20532059
for _ii in 0..k - depth {
20542060
if self.descend_first_byte().is_none() {

src/counters.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ use crate::trie_node::{TaggedNodeRef, TrieNode};
44

55
/// Example usage of counters
66
///
7-
/// ```
7+
/// ```ignore
88
/// pathmap::counters::print_traversal(&map.read_zipper());
99
/// let counters = pathmap::counters::Counters::count_ocupancy(&map);
1010
/// counters.print_histogram_by_depth();

src/path_tracker.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,11 @@ use crate::{
77
},
88
};
99

10-
/// Wrapper for blind zippers that allows tracking path
10+
/// Wrapper to implement [`ZipperPaths`] for zipper types that implement `ZipperMoving`.
11+
/// This is useful for tracking the path of "blind" zipper types
1112
///
12-
/// This allows having nested virtual zippers that don't maintain their
13-
/// own path buffer, such that they don't repeat the work of copying paths.
13+
/// The "blind" zipper pattern enables nested virtual zippers to efficiently compose,
14+
/// without repeating the work of copying paths.
1415
///
1516
/// Example:
1617
/// ```rust

0 commit comments

Comments
 (0)