@@ -67,45 +67,66 @@ module.exports = (plan, limit) => {
6767 }
6868 }
6969 }
70- // Reduce until limit reached
71- while ( currentCount > limit ) {
72- // Select node that helps reaching the limit most effectively without overmerging
73- const overLimit = currentCount - limit ;
74- let bestNode ;
75- let bestCost = Infinity ;
70+ // Reduce until limit reached. When no reduction is needed at all, skip
71+ // building the candidate set entirely to avoid paying for the setup on the
72+ // common fast path.
73+ if ( currentCount > limit ) {
74+ // Pre-filter candidate nodes so the inner selection loop skips structural
75+ // non-candidates entirely. `children` length and parent presence are
76+ // fixed after tree construction; only `entries` can change (it can only
77+ // decrease), so a node that fails the `entries` check in a later round
78+ // is simply skipped via `continue`. When we merge a subtree we drop the
79+ // descendants from the candidate set to keep it shrinking over
80+ // iterations.
81+ /** @type {Set<TreeNode<T>> } */
82+ const candidates = new Set ( ) ;
7683 for ( const node of treeMap . values ( ) ) {
77- if ( node . entries <= 1 || ! node . children || ! node . parent ) continue ;
84+ if ( ! node . parent || ! node . children ) continue ;
7885 if ( node . children . length === 0 ) continue ;
7986 if ( node . children . length === 1 && ! node . value ) continue ;
80- // Try to select the node with has just a bit more entries than we need to reduce
81- // When just a bit more is over 30% over the limit,
82- // also consider just a bit less entries then we need to reduce
83- const cost =
84- node . entries - 1 >= overLimit
85- ? node . entries - 1 - overLimit
86- : overLimit - node . entries + 1 + limit * 0.3 ;
87- if ( cost < bestCost ) {
88- bestNode = node ;
89- bestCost = cost ;
90- }
91- }
92- if ( ! bestNode ) break ;
93- // Merge all children
94- const reduction = bestNode . entries - 1 ;
95- bestNode . active = true ;
96- bestNode . entries = 1 ;
97- currentCount -= reduction ;
98- let { parent } = bestNode ;
99- while ( parent ) {
100- parent . entries -= reduction ;
101- parent = parent . parent ;
87+ candidates . add ( node ) ;
10288 }
103- const queue = new Set ( bestNode . children ) ;
104- for ( const node of queue ) {
105- node . active = false ;
106- node . entries = 0 ;
107- if ( node . children ) {
108- for ( const child of node . children ) queue . add ( child ) ;
89+ const costBias = limit * 0.3 ;
90+ while ( currentCount > limit ) {
91+ // Select node that helps reaching the limit most effectively without overmerging
92+ const overLimit = currentCount - limit ;
93+ let bestNode ;
94+ let bestCost = Infinity ;
95+ for ( const node of candidates ) {
96+ if ( node . entries <= 1 ) continue ;
97+ // Try to select the node with has just a bit more entries than we need to reduce
98+ // When just a bit more is over 30% over the limit,
99+ // also consider just a bit less entries then we need to reduce
100+ const diff = node . entries - 1 - overLimit ;
101+ const cost = diff >= 0 ? diff : - diff + costBias ;
102+ if ( cost < bestCost ) {
103+ bestNode = node ;
104+ bestCost = cost ;
105+ // A cost of 0 means the merge reduces exactly to the limit;
106+ // no further candidate can improve on that, so stop scanning.
107+ if ( cost === 0 ) break ;
108+ }
109+ }
110+ if ( ! bestNode ) break ;
111+ // Merge all children
112+ const reduction = bestNode . entries - 1 ;
113+ bestNode . active = true ;
114+ bestNode . entries = 1 ;
115+ candidates . delete ( bestNode ) ;
116+ currentCount -= reduction ;
117+ let { parent } = bestNode ;
118+ while ( parent ) {
119+ parent . entries -= reduction ;
120+ parent = parent . parent ;
121+ }
122+ const queue = new Set ( bestNode . children ) ;
123+ for ( const node of queue ) {
124+ node . active = false ;
125+ node . entries = 0 ;
126+ candidates . delete ( node ) ;
127+ if ( node . children ) {
128+ for ( const child of node . children ) queue . add ( child ) ;
129+ }
109130 }
110131 }
111132 }
0 commit comments