8352067: Remove the NMT treap and replace its uses with the utilities red-black tree

Reviewed-by: jsjolen, ayang
This commit is contained in:
Casper Norrbin 2025-08-11 12:22:52 +00:00
parent 43cfd80c1c
commit 0ad919c1e5
14 changed files with 200 additions and 892 deletions

View File

@ -64,12 +64,12 @@ void MemoryFileTracker::print_report_on(const MemoryFile* file, outputStream* st
stream->print_cr("Memory map of %s", file->_descriptive_name);
stream->cr();
VMATree::TreapNode* prev = nullptr;
const VMATree::TNode* prev = nullptr;
#ifdef ASSERT
VMATree::TreapNode* broken_start = nullptr;
VMATree::TreapNode* broken_end = nullptr;
const VMATree::TNode* broken_start = nullptr;
const VMATree::TNode* broken_end = nullptr;
#endif
file->_tree.visit_in_order([&](VMATree::TreapNode* current) {
file->_tree.visit_in_order([&](const VMATree::TNode* current) {
if (prev == nullptr) {
// Must be first node.
prev = current;

View File

@ -1,447 +0,0 @@
/*
* Copyright (c) 2024, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef SHARE_NMT_NMTTREAP_HPP
#define SHARE_NMT_NMTTREAP_HPP
#include "runtime/os.hpp"
#include "utilities/debug.hpp"
#include "utilities/globalDefinitions.hpp"
#include "utilities/growableArray.hpp"
#include "utilities/macros.hpp"
#include "utilities/powerOfTwo.hpp"
#include <type_traits>
// A Treap is a self-balanced binary tree where each node is equipped with a
// priority. It adds the invariant that the priority of a parent P is strictly larger
// larger than the priority of its children. When priorities are randomly
// assigned the tree is balanced.
// All operations are defined through merge and split, which are each other's inverse.
// merge(left_treap, right_treap) => treap where left_treap <= right_treap
// split(treap, key) => (left_treap, right_treap) where left_treap <= right_treap
// Recursion is used in these, but the depth of the call stack is the depth of
// the tree which is O(log n) so we are safe from stack overflow.
// TreapNode has LEQ nodes on the left, GT nodes on the right.
//
// COMPARATOR must have a static function `cmp(a,b)` which returns:
// - an int < 0 when a < b
// - an int == 0 when a == b
// - an int > 0 when a > b
// ALLOCATOR must check for oom and exit, as Treap currently does not handle the allocation
// failing.
template<typename K, typename V, typename COMPARATOR, typename ALLOCATOR>
class Treap {
friend class NMTVMATreeTest;
friend class NMTTreapTest;
friend class VMTWithVMATreeTest;
public:
class TreapNode {
friend Treap;
uint64_t _priority;
const K _key;
V _value;
TreapNode* _left;
TreapNode* _right;
public:
TreapNode(const K& k, uint64_t p) : _priority(p), _key(k), _left(nullptr), _right(nullptr) {}
TreapNode(const K& k, const V& v, uint64_t p)
: _priority(p),
_key(k),
_value(v),
_left(nullptr),
_right(nullptr) {
}
const K& key() const { return _key; }
V& val() { return _value; }
TreapNode* left() const { return _left; }
TreapNode* right() const { return _right; }
};
private:
ALLOCATOR _allocator;
TreapNode* _root;
// A random number
static constexpr const uint64_t _initial_seed = 0xC8DD2114AE0543A3;
uint64_t _prng_seed;
int _node_count;
uint64_t prng_next() {
uint64_t first_half = os::next_random(_prng_seed);
uint64_t second_half = os::next_random(_prng_seed >> 32);
_prng_seed = first_half | (second_half << 32);
return _prng_seed;
}
struct node_pair {
TreapNode* left;
TreapNode* right;
};
enum SplitMode {
LT, // <
LEQ // <=
};
// Split tree at head into two trees, SplitMode decides where EQ values go.
// We have SplitMode because it makes remove() trivial to implement.
static node_pair split(TreapNode* head, const K& key, SplitMode mode = LEQ DEBUG_ONLY(COMMA int recur_count = 0)) {
assert(recur_count < 200, "Call-stack depth should never exceed 200");
if (head == nullptr) {
return {nullptr, nullptr};
}
if ((COMPARATOR::cmp(head->_key, key) <= 0 && mode == LEQ) || (COMPARATOR::cmp(head->_key, key) < 0 && mode == LT)) {
node_pair p = split(head->_right, key, mode DEBUG_ONLY(COMMA recur_count + 1));
head->_right = p.left;
return node_pair{head, p.right};
} else {
node_pair p = split(head->_left, key, mode DEBUG_ONLY(COMMA recur_count + 1));
head->_left = p.right;
return node_pair{p.left, head};
}
}
// Invariant: left is a treap whose keys are LEQ to the keys in right.
static TreapNode* merge(TreapNode* left, TreapNode* right DEBUG_ONLY(COMMA int recur_count = 0)) {
assert(recur_count < 200, "Call-stack depth should never exceed 200");
if (left == nullptr) return right;
if (right == nullptr) return left;
if (left->_priority > right->_priority) {
// We need
// LEFT
// |
// RIGHT
// for the invariant re: priorities to hold.
left->_right = merge(left->_right, right DEBUG_ONLY(COMMA recur_count + 1));
return left;
} else {
// We need
// RIGHT
// |
// LEFT
// for the invariant re: priorities to hold.
right->_left = merge(left, right->_left DEBUG_ONLY(COMMA recur_count + 1));
return right;
}
}
static TreapNode* find(TreapNode* node, const K& k DEBUG_ONLY(COMMA int recur_count = 0)) {
if (node == nullptr) {
return nullptr;
}
int key_cmp_k = COMPARATOR::cmp(node->key(), k);
if (key_cmp_k == 0) { // key EQ k
return node;
}
if (key_cmp_k < 0) { // key LT k
return find(node->right(), k DEBUG_ONLY(COMMA recur_count + 1));
} else { // key GT k
return find(node->left(), k DEBUG_ONLY(COMMA recur_count + 1));
}
}
#ifdef ASSERT
void verify_self() {
// A balanced binary search tree should have a depth on the order of log(N).
// We take the ceiling of log_2(N + 1) * 3 as our maximum bound.
// For comparison, a RB-tree has a proven max depth of log_2(N + 1) * 2.
const int expected_maximum_depth = ceil(log2i(this->_node_count+1) * 3);
// Find the maximum depth through DFS and ensure that the priority invariant holds.
int maximum_depth_found = 0;
struct DFS {
int depth;
uint64_t parent_prio;
TreapNode* n;
};
GrowableArrayCHeap<DFS, mtNMT> to_visit;
constexpr const uint64_t positive_infinity = 0xFFFFFFFFFFFFFFFF;
to_visit.push({0, positive_infinity, this->_root});
while (!to_visit.is_empty()) {
DFS head = to_visit.pop();
if (head.n == nullptr) continue;
maximum_depth_found = MAX2(maximum_depth_found, head.depth);
assert(head.parent_prio >= head.n->_priority, "broken priority invariant");
to_visit.push({head.depth + 1, head.n->_priority, head.n->left()});
to_visit.push({head.depth + 1, head.n->_priority, head.n->right()});
}
assert(maximum_depth_found - expected_maximum_depth <= 3,
"depth unexpectedly large for treap of node count %d, was: %d, expected between %d and %d",
_node_count, maximum_depth_found, expected_maximum_depth - 3, expected_maximum_depth);
// Visit everything in order, see that the key ordering is monotonically increasing.
TreapNode* last_seen = nullptr;
bool failed = false;
int seen_count = 0;
this->visit_in_order([&](TreapNode* node) {
seen_count++;
if (last_seen == nullptr) {
last_seen = node;
return true;
}
if (COMPARATOR::cmp(last_seen->key(), node->key()) > 0) {
failed = false;
}
last_seen = node;
return true;
});
assert(seen_count == _node_count, "the number of visited nodes do not match with the number of stored nodes");
assert(!failed, "keys was not monotonically strongly increasing when visiting in order");
}
#endif // ASSERT
public:
NONCOPYABLE(Treap);
Treap()
: _allocator(),
_root(nullptr),
_prng_seed(_initial_seed),
_node_count(0) {
static_assert(std::is_trivially_destructible<K>::value, "must be");
}
~Treap() {
this->remove_all();
}
int size() {
return _node_count;
}
void upsert(const K& k, const V& v) {
TreapNode* found = find(_root, k);
if (found != nullptr) {
// Already exists, update value.
found->_value = v;
return;
}
_node_count++;
// Doesn't exist, make node
void* node_place = _allocator.allocate(sizeof(TreapNode));
uint64_t prio = prng_next();
TreapNode* node = new (node_place) TreapNode(k, v, prio);
// (LEQ_k, GT_k)
node_pair split_up = split(this->_root, k);
// merge(merge(LEQ_k, EQ_k), GT_k)
this->_root = merge(merge(split_up.left, node), split_up.right);
}
void remove(const K& k) {
// (LEQ_k, GT_k)
node_pair first_split = split(this->_root, k, LEQ);
// (LT_k, GEQ_k) == (LT_k, EQ_k) since it's from LEQ_k and keys are unique.
node_pair second_split = split(first_split.left, k, LT);
if (second_split.right != nullptr) {
// The key k existed, we delete it.
_node_count--;
second_split.right->_value.~V();
_allocator.free(second_split.right);
}
// Merge together everything
_root = merge(second_split.left, first_split.right);
}
// Delete all nodes.
void remove_all() {
_node_count = 0;
GrowableArrayCHeap<TreapNode*, mtNMT> to_delete;
to_delete.push(_root);
while (!to_delete.is_empty()) {
TreapNode* head = to_delete.pop();
if (head == nullptr) continue;
to_delete.push(head->_left);
to_delete.push(head->_right);
head->_value.~V();
_allocator.free(head);
}
_root = nullptr;
}
TreapNode* closest_leq(const K& key) {
TreapNode* candidate = nullptr;
TreapNode* pos = _root;
while (pos != nullptr) {
int cmp_r = COMPARATOR::cmp(pos->key(), key);
if (cmp_r == 0) { // Exact match
candidate = pos;
break; // Can't become better than that.
}
if (cmp_r < 0) {
// Found a match, try to find a better one.
candidate = pos;
pos = pos->_right;
} else if (cmp_r > 0) {
pos = pos->_left;
}
}
return candidate;
}
struct FindResult {
FindResult(TreapNode* node, bool new_node) : node(node), new_node(new_node) {}
TreapNode* const node;
bool const new_node;
};
// Finds the node for the given k in the tree or inserts a new node with the default constructed value.
FindResult find(const K& k) {
if (TreapNode* found = find(_root, k)) {
return FindResult(found, false);
}
_node_count++;
// Doesn't exist, make node
void* node_place = _allocator.allocate(sizeof(TreapNode));
uint64_t prio = prng_next();
TreapNode* node = new (node_place) TreapNode(k, prio);
// (LEQ_k, GT_k)
node_pair split_up = split(this->_root, k);
// merge(merge(LEQ_k, EQ_k), GT_k)
this->_root = merge(merge(split_up.left, node), split_up.right);
return FindResult(node, true);
}
TreapNode* closest_gt(const K& key) {
TreapNode* candidate = nullptr;
TreapNode* pos = _root;
while (pos != nullptr) {
int cmp_r = COMPARATOR::cmp(pos->key(), key);
if (cmp_r > 0) {
// Found a match, try to find a better one.
candidate = pos;
pos = pos->_left;
} else if (cmp_r <= 0) {
pos = pos->_right;
}
}
return candidate;
}
struct Range {
TreapNode* start;
TreapNode* end;
Range(TreapNode* start, TreapNode* end)
: start(start), end(end) {}
};
// Return the range [start, end)
// where start->key() <= addr < end->key().
// Failure to find the range leads to start and/or end being null.
Range find_enclosing_range(K addr) {
TreapNode* start = closest_leq(addr);
TreapNode* end = closest_gt(addr);
return Range(start, end);
}
// Visit all TreapNodes in ascending key order.
template<typename F>
void visit_in_order(F f) const {
GrowableArrayCHeap<TreapNode*, mtNMT> to_visit;
TreapNode* head = _root;
while (!to_visit.is_empty() || head != nullptr) {
while (head != nullptr) {
to_visit.push(head);
head = head->left();
}
head = to_visit.pop();
if (!f(head)) {
return;
}
head = head->right();
}
}
// Visit all TreapNodes in ascending order whose keys are in range [from, to).
template<typename F>
void visit_range_in_order(const K& from, const K& to, F f) {
assert(COMPARATOR::cmp(from, to) <= 0, "from must be less or equal to to");
GrowableArrayCHeap<TreapNode*, mtNMT> to_visit;
TreapNode* head = _root;
while (!to_visit.is_empty() || head != nullptr) {
while (head != nullptr) {
int cmp_from = COMPARATOR::cmp(head->key(), from);
to_visit.push(head);
if (cmp_from >= 0) {
head = head->left();
} else {
// We've reached a node which is strictly less than from
// We don't need to visit any further to the left.
break;
}
}
head = to_visit.pop();
const int cmp_from = COMPARATOR::cmp(head->key(), from);
const int cmp_to = COMPARATOR::cmp(head->key(), to);
if (cmp_from >= 0 && cmp_to < 0) {
if (!f(head)) {
return;
}
}
if (cmp_to < 0) {
head = head->right();
} else {
head = nullptr;
}
}
}
};
class TreapCHeapAllocator {
public:
void* allocate(size_t sz) {
void* allocation = os::malloc(sz, mtNMT);
if (allocation == nullptr) {
vm_exit_out_of_memory(sz, OOM_MALLOC_ERROR, "treap failed allocation");
}
return allocation;
}
void free(void* ptr) {
os::free(ptr);
}
};
template<typename K, typename V, typename COMPARATOR>
using TreapCHeap = Treap<K, V, COMPARATOR, TreapCHeapAllocator>;
#endif //SHARE_NMT_NMTTREAP_HPP

View File

@ -48,8 +48,8 @@ void RegionsTree::NodeHelper::print_on(outputStream* st) {
}
void RegionsTree::print_on(outputStream* st) {
visit_in_order([&](Node* node) {
NodeHelper curr(node);
visit_in_order([&](const Node* node) {
NodeHelper curr(const_cast<Node*>(node));
curr.print_on(st);
return true;
});

View File

@ -45,7 +45,7 @@ class RegionsTree : public VMATree {
SummaryDiff commit_region(address addr, size_t size, const NativeCallStack& stack);
SummaryDiff uncommit_region(address addr, size_t size);
using Node = VMATree::TreapNode;
using Node = VMATree::TNode;
class NodeHelper {
Node* _node;

View File

@ -52,8 +52,8 @@ void RegionsTree::visit_reserved_regions(F func) {
NodeHelper begin_node, prev;
size_t rgn_size = 0;
visit_in_order([&](Node* node) {
NodeHelper curr(node);
visit_in_order([&](const Node* node) {
NodeHelper curr(const_cast<Node*>(node));
if (prev.is_valid()) {
rgn_size += curr.distance_from(prev);
} else {

View File

@ -204,7 +204,7 @@ void VMATree::compute_summary_diff(const SingleDiff::delta region_size,
// update the region state between n1 and n2. Since n1 and n2 are pointers, any update of them will be visible from tree.
// If n1 is noop, it can be removed because its left region (n1->val().in) is already decided and its right state (n1->val().out) is decided here.
// The state of right of n2 (n2->val().out) cannot be decided here yet.
void VMATree::update_region(TreapNode* n1, TreapNode* n2, const RequestInfo& req, SummaryDiff& diff) {
void VMATree::update_region(TNode* n1, TNode* n2, const RequestInfo& req, SummaryDiff& diff) {
assert(n1 != nullptr,"sanity");
assert(n2 != nullptr,"sanity");
//.........n1......n2......
@ -261,8 +261,8 @@ VMATree::SummaryDiff VMATree::register_mapping(position _A, position _B, StateTy
};
stA.out.set_commit_stack(NativeCallStackStorage::invalid);
stB.in.set_commit_stack(NativeCallStackStorage::invalid);
VMATreap::Range rA = _tree.find_enclosing_range(_A);
VMATreap::Range rB = _tree.find_enclosing_range(_B);
VMARBTree::Range rA = _tree.find_enclosing_range(_A);
VMARBTree::Range rB = _tree.find_enclosing_range(_B);
// nodes: .....X.......Y...Z......W........U
// request: A------------------B
@ -320,24 +320,24 @@ VMATree::SummaryDiff VMATree::register_mapping(position _A, position _B, StateTy
// Meaning that whenever any of one item in this sequence is changed, the rest of the consequent items to
// be checked/changed.
TreapNode* X = rA.start;
TreapNode* Y = rA.end;
TreapNode* W = rB.start;
TreapNode* U = rB.end;
TreapNode nA{_A, stA, 0}; // the node that represents A
TreapNode nB{_B, stB, 0}; // the node that represents B
TreapNode* A = &nA;
TreapNode* B = &nB;
auto upsert_if= [&](TreapNode* node) {
TNode* X = rA.start;
TNode* Y = rA.end;
TNode* W = rB.start;
TNode* U = rB.end;
TNode nA{_A, stA}; // the node that represents A
TNode nB{_B, stB}; // the node that represents B
TNode* A = &nA;
TNode* B = &nB;
auto upsert_if= [&](TNode* node) {
if (!node->val().is_noop()) {
_tree.upsert(node->key(), node->val());
}
};
// update region between n1 and n2
auto update = [&](TreapNode* n1, TreapNode* n2) {
auto update = [&](TNode* n1, TNode* n2) {
update_region(n1, n2, req, diff);
};
auto remove_if = [&](TreapNode* node) -> bool{
auto remove_if = [&](TNode* node) -> bool{
if (node->val().is_noop()) {
_tree.remove(node->key());
return true;
@ -347,8 +347,8 @@ VMATree::SummaryDiff VMATree::register_mapping(position _A, position _B, StateTy
GrowableArrayCHeap<position, mtNMT> to_be_removed;
// update regions in range A to B
auto update_loop = [&]() {
TreapNode* prev = nullptr;
_tree.visit_range_in_order(_A + 1, _B + 1, [&](TreapNode* curr) {
TNode* prev = nullptr;
_tree.visit_range_in_order(_A + 1, _B + 1, [&](TNode* curr) {
if (prev != nullptr) {
update_region(prev, curr, req, diff);
// during visit, structure of the tree should not be changed
@ -362,7 +362,7 @@ VMATree::SummaryDiff VMATree::register_mapping(position _A, position _B, StateTy
});
};
// update region of [A,T)
auto update_A = [&](TreapNode* T) {
auto update_A = [&](TNode* T) {
A->val().out = A->val().in;
update(A, T);
};
@ -650,7 +650,7 @@ VMATree::SummaryDiff VMATree::register_mapping(position _A, position _B, StateTy
#ifdef ASSERT
void VMATree::print_on(outputStream* out) {
visit_in_order([&](TreapNode* current) {
visit_in_order([&](const TNode* current) {
out->print("%zu (%s) - %s [%d, %d]-> ", current->key(), NMTUtil::tag_to_name(out_state(current).mem_tag()),
statetype_to_string(out_state(current).type()), current->val().out.reserved_stack(), current->val().out.committed_stack());
return true;
@ -660,11 +660,11 @@ void VMATree::print_on(outputStream* out) {
#endif
VMATree::SummaryDiff VMATree::set_tag(const position start, const size size, const MemTag tag) {
auto pos = [](TreapNode* n) { return n->key(); };
auto pos = [](TNode* n) { return n->key(); };
position from = start;
position end = from+size;
size_t remsize = size;
VMATreap::Range range(nullptr, nullptr);
VMARBTree::Range range(nullptr, nullptr);
// Find the next range to adjust and set range, remsize and from
// appropriately. If it returns false, there is no valid next range.

View File

@ -29,9 +29,9 @@
#include "nmt/memTag.hpp"
#include "nmt/memTag.hpp"
#include "nmt/nmtNativeCallStackStorage.hpp"
#include "nmt/nmtTreap.hpp"
#include "utilities/globalDefinitions.hpp"
#include "utilities/ostream.hpp"
#include "utilities/rbTree.inline.hpp"
#include <cstdint>
// A VMATree stores a sequence of points on the natural number line.
@ -193,17 +193,21 @@ private:
};
public:
using VMATreap = TreapCHeap<position, IntervalChange, PositionComparator>;
using TreapNode = VMATreap::TreapNode;
using VMARBTree = RBTreeCHeap<position, IntervalChange, PositionComparator, mtNMT>;
using TNode = RBNode<position, IntervalChange>;
private:
VMATreap _tree;
VMARBTree _tree;
static IntervalState& in_state(TreapNode* node) {
static IntervalState& in_state(TNode* node) {
return node->val().in;
}
static IntervalState& out_state(TreapNode* node) {
static IntervalState& out_state(TNode* node) {
return node->val().out;
}
static const IntervalState& out_state(const TNode* node) {
return node->val().out;
}
@ -281,7 +285,7 @@ public:
SIndex get_new_reserve_callstack(const SIndex existinting_stack, const StateType ex, const RequestInfo& req) const;
SIndex get_new_commit_callstack(const SIndex existinting_stack, const StateType ex, const RequestInfo& req) const;
void compute_summary_diff(const SingleDiff::delta region_size, const MemTag t1, const StateType& ex, const RequestInfo& req, const MemTag new_tag, SummaryDiff& diff) const;
void update_region(TreapNode* n1, TreapNode* n2, const RequestInfo& req, SummaryDiff& diff);
void update_region(TNode* n1, TNode* n2, const RequestInfo& req, SummaryDiff& diff);
int state_to_index(const StateType st) const {
return
st == StateType::Released ? 0 :
@ -325,6 +329,6 @@ public:
void visit_range_in_order(const position& from, const position& to, F f) {
_tree.visit_range_in_order(from, to, f);
}
VMATreap& tree() { return _tree; }
VMARBTree& tree() { return _tree; }
};
#endif

View File

@ -71,21 +71,25 @@ InlinePrinter::IPInlineSite* InlinePrinter::locate(JVMState* state, ciMethod* ca
}
InlinePrinter::IPInlineSite& InlinePrinter::IPInlineSite::at_bci(int bci, ciMethod* callee) {
auto find_result = _children.find(bci);
IPInlineSite& child = find_result.node->val();
RBTreeCHeap<int, IPInlineSite, Cmp, mtCompiler>::Cursor cursor = _children.cursor(bci);
if (find_result.new_node) {
assert(callee != nullptr, "an inline call is missing in the chain up to the root");
child.set_source(callee, bci);
} else { // We already saw a call at this site before
if (cursor.found()) { // We already saw a call at this site before
IPInlineSite& child = cursor.node()->val();
if (callee != nullptr && callee != child._method) {
outputStream* stream = child.add(InliningResult::SUCCESS);
stream->print("callee changed to ");
CompileTask::print_inline_inner_method_info(stream, callee);
}
return child;
}
return child;
assert(callee != nullptr, "an inline call is missing in the chain up to the root");
RBNode<int, IPInlineSite>* node = _children.allocate_node(bci);
_children.insert_at_cursor(node, cursor);
node->val().set_source(callee, bci);
return node->val();
}
outputStream* InlinePrinter::IPInlineSite::add(InliningResult result) {

View File

@ -26,9 +26,9 @@
#define PRINTINLINING_HPP
#include "memory/allocation.hpp"
#include "nmt/nmtTreap.hpp"
#include "utilities/growableArray.hpp"
#include "utilities/ostream.hpp"
#include "utilities/rbTree.inline.hpp"
class JVMState;
class ciMethod;
@ -77,7 +77,7 @@ private:
ciMethod* _method;
int _bci;
GrowableArrayCHeap<IPInlineAttempt, mtCompiler> _attempts;
TreapCHeap<int, IPInlineSite, Cmp> _children;
RBTreeCHeap<int, IPInlineSite, Cmp, mtCompiler> _children;
public:
IPInlineSite(ciMethod* method, int bci) : _method(method), _bci(bci) {}

View File

@ -167,6 +167,7 @@ public:
template <typename K, typename NodeType, typename COMPARATOR>
class AbstractRBTree {
friend class RBTreeTest;
friend class NMTVMATreeTest;
typedef AbstractRBTree<K, NodeType, COMPARATOR> TreeType;
public:
@ -404,13 +405,21 @@ public:
}
// Visit all RBNodes in ascending order, calling f on each node.
// If f returns `true` the iteration continues, otherwise it is stopped at the current node.
template <typename F>
void visit_in_order(F f) const;
template <typename F>
void visit_in_order(F f);
// Visit all RBNodes in ascending order whose keys are in range [from, to], calling f on each node.
// If f returns `true` the iteration continues, otherwise it is stopped at the current node.
template <typename F>
void visit_range_in_order(const K& from, const K& to, F f) const;
template <typename F>
void visit_range_in_order(const K &from, const K &to, F f);
// Verifies that the tree is correct and holds rb-properties
// If not using a key comparator (when using IntrusiveRBTree for example),
// A second `cmp` must exist in COMPARATOR (see top of file).
@ -457,6 +466,12 @@ public:
free_node(old_node);
}
RBNode<K, V>* allocate_node(const K& key) {
void* node_place = _allocator.allocate(sizeof(RBNode<K, V>));
assert(node_place != nullptr, "rb-tree allocator must exit on failure");
return new (node_place) RBNode<K, V>(key);
}
RBNode<K, V>* allocate_node(const K& key, const V& val) {
void* node_place = _allocator.allocate(sizeof(RBNode<K, V>));
assert(node_place != nullptr, "rb-tree allocator must exit on failure");

View File

@ -85,7 +85,7 @@ inline IntrusiveRBNode* IntrusiveRBNode::rotate_right() {
inline const IntrusiveRBNode* IntrusiveRBNode::prev() const {
const IntrusiveRBNode* node = this;
if (_left != nullptr) { // right subtree exists
if (_left != nullptr) { // left subtree exists
node = _left;
while (node->_right != nullptr) {
node = node->_right;
@ -599,7 +599,21 @@ template <typename F>
inline void AbstractRBTree<K, NodeType, COMPARATOR>::visit_in_order(F f) const {
const NodeType* node = leftmost();
while (node != nullptr) {
f(node);
if (!f(node)) {
return;
}
node = node->next();
}
}
template <typename K, typename NodeType, typename COMPARATOR>
template <typename F>
inline void AbstractRBTree<K, NodeType, COMPARATOR>::visit_in_order(F f) {
NodeType* node = leftmost();
while (node != nullptr) {
if (!f(node)) {
return;
}
node = node->next();
}
}
@ -618,7 +632,30 @@ inline void AbstractRBTree<K, NodeType, COMPARATOR>::visit_range_in_order(const
const NodeType* end = next(cursor_end).node();
while (start != end) {
f(start);
if (!f(start)) {
return;
}
start = start->next();
}
}
template <typename K, typename NodeType, typename COMPARATOR>
template <typename F>
inline void AbstractRBTree<K, NodeType, COMPARATOR>::visit_range_in_order(const K& from, const K& to, F f) {
assert_key_leq(from, to);
if (_root == nullptr) {
return;
}
Cursor cursor_start = cursor(from);
Cursor cursor_end = cursor(to);
NodeType* start = cursor_start.found() ? cursor_start.node() : next(cursor_start).node();
NodeType* end = next(cursor_end).node();
while (start != end) {
if (!f(start)) {
return;
}
start = start->next();
}
}

View File

@ -1,364 +0,0 @@
/*
* Copyright (c) 2024, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "memory/resourceArea.hpp"
#include "nmt/nmtTreap.hpp"
#include "nmt/virtualMemoryTracker.hpp"
#include "runtime/os.hpp"
#include "unittest.hpp"
class NMTTreapTest : public testing::Test {
public:
struct Cmp {
static int cmp(int a, int b) {
return a - b;
}
};
struct CmpInverse {
static int cmp(int a, int b) {
return b - a;
}
};
struct FCmp {
static int cmp(float a, float b) {
if (a < b) return -1;
if (a == b) return 0;
return 1;
}
};
#ifdef ASSERT
template<typename K, typename V, typename CMP, typename ALLOC>
void verify_it(Treap<K, V, CMP, ALLOC>& t) {
t.verify_self();
}
#endif // ASSERT
public:
void inserting_duplicates_results_in_one_value() {
constexpr const int up_to = 10;
GrowableArrayCHeap<int, mtTest> nums_seen(up_to, up_to, 0);
TreapCHeap<int, int, Cmp> treap;
for (int i = 0; i < up_to; i++) {
treap.upsert(i, i);
treap.upsert(i, i);
treap.upsert(i, i);
treap.upsert(i, i);
treap.upsert(i, i);
}
treap.visit_in_order([&](TreapCHeap<int, int, Cmp>::TreapNode* node) {
nums_seen.at(node->key())++;
return true;
});
for (int i = 0; i < up_to; i++) {
EXPECT_EQ(1, nums_seen.at(i));
}
}
void treap_ought_not_leak() {
struct LeakCheckedAllocator {
int allocations;
LeakCheckedAllocator()
: allocations(0) {
}
void* allocate(size_t sz) {
void* allocation = os::malloc(sz, mtTest);
if (allocation == nullptr) {
vm_exit_out_of_memory(sz, OOM_MALLOC_ERROR, "treap failed allocation");
}
++allocations;
return allocation;
}
void free(void* ptr) {
--allocations;
os::free(ptr);
}
};
constexpr const int up_to = 10;
{
Treap<int, int, Cmp, LeakCheckedAllocator> treap;
for (int i = 0; i < up_to; i++) {
treap.upsert(i, i);
}
EXPECT_EQ(up_to, treap._allocator.allocations);
for (int i = 0; i < up_to; i++) {
treap.remove(i);
}
EXPECT_EQ(0, treap._allocator.allocations);
EXPECT_EQ(nullptr, treap._root);
}
{
Treap<int, int, Cmp, LeakCheckedAllocator> treap;
for (int i = 0; i < up_to; i++) {
treap.upsert(i, i);
}
treap.remove_all();
EXPECT_EQ(0, treap._allocator.allocations);
EXPECT_EQ(nullptr, treap._root);
}
}
void test_find() {
struct Empty {};
TreapCHeap<float, Empty, FCmp> treap;
using Node = TreapCHeap<float, Empty, FCmp>::TreapNode;
Node* n = nullptr;
auto test = [&](float f) {
EXPECT_EQ(nullptr, treap.find(treap._root, f));
treap.upsert(f, Empty{});
Node* n = treap.find(treap._root, f);
EXPECT_NE(nullptr, n);
EXPECT_EQ(f, n->key());
};
test(1.0f);
test(5.0f);
test(0.0f);
}
};
TEST_VM_F(NMTTreapTest, InsertingDuplicatesResultsInOneValue) {
this->inserting_duplicates_results_in_one_value();
}
TEST_VM_F(NMTTreapTest, TreapOughtNotLeak) {
this->treap_ought_not_leak();
}
TEST_VM_F(NMTTreapTest, TestVisitors) {
{ // Tests with 'default' ordering (ascending)
TreapCHeap<int, int, Cmp> treap;
using Node = TreapCHeap<int, int, Cmp>::TreapNode;
treap.visit_range_in_order(0, 100, [&](Node* x) {
EXPECT_TRUE(false) << "Empty treap has no nodes to visit";
return true;
});
// Single-element set
treap.upsert(1, 0);
int count = 0;
treap.visit_range_in_order(0, 100, [&](Node* x) {
count++;
return true;
});
EXPECT_EQ(1, count);
count = 0;
treap.visit_in_order([&](Node* x) {
count++;
return true;
});
EXPECT_EQ(1, count);
// Add an element outside of the range that should not be visited on the right side and
// one on the left side.
treap.upsert(101, 0);
treap.upsert(-1, 0);
count = 0;
treap.visit_range_in_order(0, 100, [&](Node* x) {
count++;
return true;
});
EXPECT_EQ(1, count);
count = 0;
treap.visit_in_order([&](Node* x) {
count++;
return true;
});
EXPECT_EQ(3, count);
// Visiting empty range [0, 0) == {}
treap.upsert(0, 0); // This node should not be visited.
treap.visit_range_in_order(0, 0, [&](Node* x) {
EXPECT_TRUE(false) << "Empty visiting range should not visit any node";
return true;
});
treap.remove_all();
for (int i = 0; i < 11; i++) {
treap.upsert(i, 0);
}
ResourceMark rm;
GrowableArray<int> seen;
treap.visit_range_in_order(0, 10, [&](Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(10, seen.length());
for (int i = 0; i < 10; i++) {
EXPECT_EQ(i, seen.at(i));
}
seen.clear();
treap.visit_in_order([&](Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(11, seen.length());
for (int i = 0; i < 10; i++) {
EXPECT_EQ(i, seen.at(i));
}
seen.clear();
treap.visit_range_in_order(10, 12, [&](Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(1, seen.length());
EXPECT_EQ(10, seen.at(0));
}
{ // Test with descending ordering
TreapCHeap<int, int, CmpInverse> treap;
using Node = TreapCHeap<int, int, CmpInverse>::TreapNode;
for (int i = 0; i < 10; i++) {
treap.upsert(i, 0);
}
ResourceMark rm;
GrowableArray<int> seen;
treap.visit_range_in_order(9, -1, [&](Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(10, seen.length());
for (int i = 0; i < 10; i++) {
EXPECT_EQ(10-i-1, seen.at(i));
}
seen.clear();
treap.visit_in_order([&](Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(10, seen.length());
for (int i = 0; i < 10; i++) {
EXPECT_EQ(10 - i - 1, seen.at(i));
}
}
}
TEST_VM_F(NMTTreapTest, TestFind) {
test_find();
}
TEST_VM_F(NMTTreapTest, TestClosestLeq) {
using Node = TreapCHeap<int, int, Cmp>::TreapNode;
{
TreapCHeap<int, int, Cmp> treap;
Node* n = treap.closest_leq(0);
EXPECT_EQ(nullptr, n);
treap.upsert(0, 0);
n = treap.closest_leq(0);
EXPECT_EQ(0, n->key());
treap.upsert(-1, -1);
n = treap.closest_leq(0);
EXPECT_EQ(0, n->key());
treap.upsert(6, 0);
n = treap.closest_leq(6);
EXPECT_EQ(6, n->key());
n = treap.closest_leq(-2);
EXPECT_EQ(nullptr, n);
}
}
#ifdef ASSERT
TEST_VM_F(NMTTreapTest, VerifyItThroughStressTest) {
{ // Repeatedly verify a treap of moderate size
TreapCHeap<int, int, Cmp> treap;
constexpr const int ten_thousand = 10000;
for (int i = 0; i < ten_thousand; i++) {
int r = os::random();
if (r % 2 == 0) {
treap.upsert(i, i);
} else {
treap.remove(i);
}
if (i % 100 == 0) {
verify_it(treap);
}
}
for (int i = 0; i < ten_thousand; i++) {
int r = os::random();
if (r % 2 == 0) {
treap.upsert(i, i);
} else {
treap.remove(i);
}
if (i % 100 == 0) {
verify_it(treap);
}
}
}
{ // Make a very large treap and verify at the end
struct Nothing {};
TreapCHeap<int, Nothing, Cmp> treap;
constexpr const int one_hundred_thousand = 100000;
for (int i = 0; i < one_hundred_thousand; i++) {
treap.upsert(i, Nothing());
}
verify_it(treap);
}
}
struct NTD {
static bool has_run_destructor;
~NTD() {
has_run_destructor = true;
}
};
bool NTD::has_run_destructor = false;
TEST_VM_F(NMTTreapTest, ValueDestructorsAreRun) {
TreapCHeap<int, NTD, Cmp> treap;
NTD ntd;
treap.upsert(0, ntd);
treap.remove(0);
EXPECT_TRUE(NTD::has_run_destructor);
NTD::has_run_destructor = false;
{
TreapCHeap<int, NTD, Cmp> treap;
NTD ntd;
treap.upsert(0, ntd);
}
EXPECT_TRUE(NTD::has_run_destructor);
}
#endif // ASSERT

View File

@ -31,7 +31,7 @@
#include "unittest.hpp"
using Tree = VMATree;
using TNode = Tree::TreapNode;
using TNode = Tree::TNode;
using NCS = NativeCallStackStorage;
class NMTVMATreeTest : public testing::Test {
@ -54,16 +54,16 @@ public:
// Utilities
VMATree::TreapNode* treap_root(VMATree& tree) {
return tree._tree._root;
VMATree::TNode* rbtree_root(VMATree& tree) {
return static_cast<VMATree::TNode*>(tree._tree._root);
}
VMATree::VMATreap& treap(VMATree& tree) {
VMATree::VMARBTree& rbtree(VMATree& tree) {
return tree._tree;
}
VMATree::TreapNode* find(VMATree::VMATreap& treap, const VMATree::position key) {
return treap.find(treap._root, key);
VMATree::TNode* find(VMATree::VMARBTree& rbtree, const VMATree::position key) {
return rbtree.find_node(key);
}
NativeCallStack make_stack(size_t a) {
@ -71,17 +71,17 @@ public:
return stack;
}
VMATree::StateType in_type_of(VMATree::TreapNode* x) {
VMATree::StateType in_type_of(VMATree::TNode* x) {
return x->val().in.type();
}
VMATree::StateType out_type_of(VMATree::TreapNode* x) {
VMATree::StateType out_type_of(VMATree::TNode* x) {
return x->val().out.type();
}
int count_nodes(Tree& tree) {
int count = 0;
treap(tree).visit_in_order([&](TNode* x) {
rbtree(tree).visit_in_order([&](TNode* x) {
++count;
return true;
});
@ -123,14 +123,14 @@ public:
for (int i = 0; i < 10; i++) {
tree.release_mapping(i * 100, 100);
}
EXPECT_EQ(nullptr, treap_root(tree));
EXPECT_EQ(nullptr, rbtree_root(tree));
// Other way around
tree.reserve_mapping(0, 100 * 10, rd);
for (int i = 9; i >= 0; i--) {
tree.release_mapping(i * 100, 100);
}
EXPECT_EQ(nullptr, treap_root(tree));
EXPECT_EQ(nullptr, rbtree_root(tree));
}
// Committing in a whole reserved range results in 2 nodes
@ -140,7 +140,7 @@ public:
for (int i = 0; i < 10; i++) {
tree.commit_mapping(i * 100, 100, rd);
}
treap(tree).visit_in_order([&](TNode* x) {
rbtree(tree).visit_in_order([&](TNode* x) {
VMATree::StateType in = in_type_of(x);
VMATree::StateType out = out_type_of(x);
EXPECT_TRUE((in == VMATree::StateType::Released && out == VMATree::StateType::Committed) ||
@ -166,7 +166,7 @@ public:
};
int i = 0;
treap(tree).visit_in_order([&](TNode* x) {
rbtree(tree).visit_in_order([&](TNode* x) {
if (i < 16) {
found[i] = x->key();
}
@ -199,7 +199,7 @@ public:
};
void call_update_region(const UpdateCallInfo upd) {
VMATree::TreapNode n1{upd.req.A, {}, 0}, n2{upd.req.B, {}, 0};
VMATree::TNode n1{upd.req.A, {}}, n2{upd.req.B, {}};
n1.val().out= upd.ex_st;
n2.val().in = n1.val().out;
Tree tree;
@ -264,7 +264,7 @@ public:
template <int N>
void check_tree(Tree& tree, const ExpectedTree<N>& et, int line_no) {
using Node = VMATree::TreapNode;
using Node = VMATree::TNode;
auto left_released = [&](Node n) -> bool {
return n.val().in.type() == VMATree::StateType::Released and
n.val().in.mem_tag() == mtNone;
@ -274,7 +274,7 @@ public:
n.val().out.mem_tag() == mtNone;
};
for (int i = 0; i < N; i++) {
VMATree::VMATreap::Range r = tree.tree().find_enclosing_range(et.nodes[i]);
VMATree::VMARBTree::Range r = tree.tree().find_enclosing_range(et.nodes[i]);
ASSERT_TRUE(r.start != nullptr);
Node node = *r.start;
ASSERT_EQ(node.key(), (VMATree::position)et.nodes[i]) << "at line " << line_no;
@ -354,7 +354,7 @@ TEST_VM_F(NMTVMATreeTest, DuplicateReserve) {
tree.reserve_mapping(100, 100, rd);
tree.reserve_mapping(100, 100, rd);
EXPECT_EQ(2, count_nodes(tree));
VMATree::VMATreap::Range r = tree.tree().find_enclosing_range(110);
VMATree::VMARBTree::Range r = tree.tree().find_enclosing_range(110);
EXPECT_EQ(100, (int)(r.end->key() - r.start->key()));
}
@ -369,7 +369,7 @@ TEST_VM_F(NMTVMATreeTest, UseTagInplace) {
// post-cond: 0---20**30--40**70----100
tree.commit_mapping(20, 50, rd_None_cs1, true);
tree.uncommit_mapping(30, 10, rd_None_cs1);
tree.visit_in_order([&](TNode* node) {
tree.visit_in_order([&](const TNode* node) {
if (node->key() != 100) {
EXPECT_EQ(mtTest, node->val().out.mem_tag()) << "failed at: " << node->key();
if (node->key() != 20 && node->key() != 40) {
@ -410,9 +410,9 @@ TEST_VM_F(NMTVMATreeTest, LowLevel) {
VMATree::RegionData rd_NMT_cs1{si[1], mtNMT};
tree.commit_mapping(50, 50, rd_NMT_cs1);
tree.reserve_mapping(0, 100, rd_Test_cs0);
treap(tree).visit_in_order([&](TNode* x) {
rbtree(tree).visit_in_order([&](const TNode* x) {
EXPECT_TRUE(x->key() == 0 || x->key() == 100);
if (x->key() == 0) {
if (x->key() == 0UL) {
EXPECT_EQ(x->val().out.reserved_regiondata().mem_tag, mtTest);
}
return true;
@ -438,7 +438,7 @@ TEST_VM_F(NMTVMATreeTest, LowLevel) {
tree.reserve_mapping(0, 500000, rd_NMT_cs0);
tree.release_mapping(0, 500000);
EXPECT_EQ(nullptr, treap_root(tree));
EXPECT_EQ(nullptr, rbtree_root(tree));
}
{ // A committed region inside of/replacing a reserved region
@ -448,7 +448,7 @@ TEST_VM_F(NMTVMATreeTest, LowLevel) {
Tree tree;
tree.reserve_mapping(0, 100, rd_NMT_cs0);
tree.commit_mapping(0, 100, rd_Test_cs1);
treap(tree).visit_range_in_order(0, 99999, [&](TNode* x) {
rbtree(tree).visit_range_in_order(0, 99999, [&](TNode* x) {
if (x->key() == 0) {
EXPECT_EQ(mtTest, x->val().out.reserved_regiondata().mem_tag);
}
@ -463,9 +463,9 @@ TEST_VM_F(NMTVMATreeTest, LowLevel) {
Tree tree;
VMATree::RegionData rd_NMT_cs0{si[0], mtNMT};
tree.reserve_mapping(0, 0, rd_NMT_cs0);
EXPECT_EQ(nullptr, treap_root(tree));
EXPECT_EQ(nullptr, rbtree_root(tree));
tree.commit_mapping(0, 0, rd_NMT_cs0);
EXPECT_EQ(nullptr, treap_root(tree));
EXPECT_EQ(nullptr, rbtree_root(tree));
}
}
@ -483,14 +483,14 @@ TEST_VM_F(NMTVMATreeTest, SetTag) {
auto expect_equivalent_form = [&](auto& expected, VMATree& tree, int line_no) {
// With auto& our arrays do not deteriorate to pointers but are kept as testrange[N]
// so this actually works!
int len = sizeof(expected) / sizeof(testrange);
size_t len = sizeof(expected) / sizeof(testrange);
VMATree::position previous_to = 0;
for (int i = 0; i < len; i++) {
for (size_t i = 0; i < len; i++) {
testrange expect = expected[i];
assert(previous_to == 0 || previous_to <= expect.from, "the expected list must be sorted");
previous_to = expect.to;
VMATree::VMATreap::Range found = tree.tree().find_enclosing_range(expect.from);
VMATree::VMARBTree::Range found = tree.tree().find_enclosing_range(expect.from);
ASSERT_NE(nullptr, found.start);
ASSERT_NE(nullptr, found.end);
// Same region
@ -993,10 +993,10 @@ TEST_VM_F(NMTVMATreeTest, TestConsistencyWithSimpleTracker) {
ASSERT_LE(end, SimpleVMATracker::num_pages);
SimpleVMATracker::Info endi = tr->pages[end];
VMATree::VMATreap& treap = this->treap(tree);
VMATree::TreapNode* startn = find(treap, start * page_size);
VMATree::VMARBTree& rbtree = this->rbtree(tree);
VMATree::TNode* startn = find(rbtree, start * page_size);
ASSERT_NE(nullptr, startn);
VMATree::TreapNode* endn = find(treap, (end * page_size) + page_size);
VMATree::TNode* endn = find(rbtree, (end * page_size) + page_size);
ASSERT_NE(nullptr, endn);
const NativeCallStack& start_stack = ncss.get(startn->val().out.reserved_stack());

View File

@ -129,6 +129,7 @@ public:
rbtree_const.visit_in_order([&](const RBTreeIntNode* node) {
nums_seen.at(node->key())++;
return true;
});
for (int i = 0; i < up_to; i++) {
EXPECT_EQ(1, nums_seen.at(i));
@ -210,6 +211,7 @@ public:
rbtree_const.visit_range_in_order(0, 100, [&](const Node* x) {
EXPECT_TRUE(false) << "Empty rbtree has no nodes to visit";
return true;
});
// Single-element set
@ -217,14 +219,21 @@ public:
int count = 0;
rbtree_const.visit_range_in_order(0, 100, [&](const Node* x) {
count++;
return true;
});
EXPECT_EQ(1, count);
count = 0;
rbtree_const.visit_in_order([&](const Node* x) {
count++;
return true;
});
EXPECT_EQ(1, count);
rbtree.visit_in_order([&](const Node* x) {
count++;
return true;
});
EXPECT_EQ(2, count);
// Add an element outside of the range that should not be visited on the right side and
// one on the left side.
@ -233,21 +242,62 @@ public:
count = 0;
rbtree_const.visit_range_in_order(0, 100, [&](const Node* x) {
count++;
return true;
});
EXPECT_EQ(1, count);
rbtree.visit_range_in_order(0, 100, [&](const Node* x) {
count++;
return true;
});
EXPECT_EQ(2, count);
count = 0;
rbtree_const.visit_in_order([&](const Node* x) {
count++;
return true;
});
EXPECT_EQ(3, count);
rbtree.visit_in_order([&](const Node* x) {
count++;
return true;
});
EXPECT_EQ(6, count);
count = 0;
rbtree.upsert(0, 0);
rbtree_const.visit_range_in_order(0, 0, [&](const Node* x) {
count++;
return true;
});
EXPECT_EQ(1, count);
rbtree.visit_range_in_order(0, 0, [&](const Node* x) {
count++;
return true;
});
EXPECT_EQ(2, count);
// Test exiting visit early
rbtree.remove_all();
for (int i = 0; i < 11; i++) {
rbtree.upsert(i, 0);
}
count = 0;
rbtree_const.visit_in_order([&](const Node* x) {
if (x->key() >= 6) return false;
count++;
return true;
});
EXPECT_EQ(6, count);
count = 0;
rbtree_const.visit_range_in_order(6, 10, [&](const Node* x) {
if (x->key() >= 6) return false;
count++;
return true;
});
EXPECT_EQ(0, count);
rbtree.remove_all();
for (int i = 0; i < 11; i++) {
@ -258,6 +308,7 @@ public:
GrowableArray<int> seen;
rbtree_const.visit_range_in_order(0, 9, [&](const Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(10, seen.length());
for (int i = 0; i < 10; i++) {
@ -267,6 +318,7 @@ public:
seen.clear();
rbtree_const.visit_in_order([&](const Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(11, seen.length());
for (int i = 0; i < 10; i++) {
@ -276,6 +328,7 @@ public:
seen.clear();
rbtree_const.visit_range_in_order(10, 12, [&](const Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(1, seen.length());
EXPECT_EQ(10, seen.at(0));
@ -292,6 +345,7 @@ public:
GrowableArray<int> seen;
rbtree_const.visit_range_in_order(9, -1, [&](const Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(10, seen.length());
for (int i = 0; i < 10; i++) {
@ -301,6 +355,7 @@ public:
rbtree_const.visit_in_order([&](const Node* x) {
seen.push(x->key());
return true;
});
EXPECT_EQ(10, seen.length());
for (int i = 0; i < 10; i++) {
@ -320,9 +375,12 @@ public:
{4, 4}, {6, 6}, {6, 7}, {7, 7}};
for (const int (&test_case)[2] : test_cases) {
rbtree.visit_range_in_order(test_case[0], test_case[1], [&](const Node* x) {
FAIL() << "Range should not visit nodes";
bool visited = false;
rbtree.visit_range_in_order(test_case[0], test_case[1], [&](const Node* x) -> bool {
visited = true;
return true;
});
EXPECT_FALSE(visited);
}
}
@ -515,6 +573,7 @@ public:
// After deleting, values should have remained consistant
rbtree.visit_in_order([&](const Node* node) {
EXPECT_EQ(node, node->val());
return true;
});
}