From 3cc459b6c2f571987dc36fd548a2b830f0b33a0a Mon Sep 17 00:00:00 2001 From: Justin King Date: Thu, 16 Feb 2023 14:40:38 +0000 Subject: [PATCH] 8302595: use-after-free related to GraphKit::clone_map Reviewed-by: kvn, thartmann --- src/hotspot/share/opto/compile.hpp | 3 ++- src/hotspot/share/opto/graphKit.cpp | 23 +++++++++++++++++++++ src/hotspot/share/opto/graphKit.hpp | 6 ++++++ src/hotspot/share/opto/library_call.cpp | 6 +++--- src/hotspot/share/opto/node.hpp | 5 +++++ src/hotspot/share/opto/phaseX.hpp | 5 +++++ src/hotspot/share/opto/vectorIntrinsics.cpp | 6 +++--- 7 files changed, 47 insertions(+), 7 deletions(-) diff --git a/src/hotspot/share/opto/compile.hpp b/src/hotspot/share/opto/compile.hpp index c486a36828a..665d5861ee0 100644 --- a/src/hotspot/share/opto/compile.hpp +++ b/src/hotspot/share/opto/compile.hpp @@ -936,7 +936,8 @@ class Compile : public Phase { // Parsing, optimization PhaseGVN* initial_gvn() { return _initial_gvn; } Unique_Node_List* for_igvn() { return _for_igvn; } - inline void record_for_igvn(Node* n); // Body is after class Unique_Node_List. + inline void record_for_igvn(Node* n); // Body is after class Unique_Node_List in node.hpp. + inline void remove_for_igvn(Node* n); // Body is after class Unique_Node_List in node.hpp. void set_initial_gvn(PhaseGVN *gvn) { _initial_gvn = gvn; } void set_for_igvn(Unique_Node_List *for_igvn) { _for_igvn = for_igvn; } diff --git a/src/hotspot/share/opto/graphKit.cpp b/src/hotspot/share/opto/graphKit.cpp index c7df38045c8..b9bf66ba8ab 100644 --- a/src/hotspot/share/opto/graphKit.cpp +++ b/src/hotspot/share/opto/graphKit.cpp @@ -735,6 +735,29 @@ SafePointNode* GraphKit::clone_map() { return clonemap; } +//-----------------------------destruct_map_clone------------------------------ +// +// Order of destruct is important to increase the likelyhood that memory can be re-used. We need +// to destruct/free/delete in the exact opposite order as clone_map(). +void GraphKit::destruct_map_clone(SafePointNode* sfp) { + if (sfp == nullptr) return; + + Node* mem = sfp->memory(); + JVMState* jvms = sfp->jvms(); + + if (jvms != nullptr) { + delete jvms; + } + + remove_for_igvn(sfp); + gvn().clear_type(sfp); + sfp->destruct(&_gvn); + + if (mem != nullptr) { + gvn().clear_type(mem); + mem->destruct(&_gvn); + } +} //-----------------------------set_map_clone----------------------------------- void GraphKit::set_map_clone(SafePointNode* m) { diff --git a/src/hotspot/share/opto/graphKit.hpp b/src/hotspot/share/opto/graphKit.hpp index 1cdeac78134..731223f491e 100644 --- a/src/hotspot/share/opto/graphKit.hpp +++ b/src/hotspot/share/opto/graphKit.hpp @@ -94,6 +94,7 @@ class GraphKit : public Phase { void* barrier_set_state() const { return C->barrier_set_state(); } void record_for_igvn(Node* n) const { C->record_for_igvn(n); } // delegate to Compile + void remove_for_igvn(Node* n) const { C->remove_for_igvn(n); } // Handy well-known nodes: Node* null() const { return zerocon(T_OBJECT); } @@ -170,6 +171,11 @@ class GraphKit : public Phase { // Clone the existing map state. (Implements PreserveJVMState.) SafePointNode* clone_map(); + // Reverses the work done by clone_map(). Should only be used when the node returned by + // clone_map() is ultimately not used. Calling Node::destruct directly in the previously + // mentioned circumstance instead of this method may result in use-after-free. + void destruct_map_clone(SafePointNode* sfp); + // Set the map to a clone of the given one. void set_map_clone(SafePointNode* m); diff --git a/src/hotspot/share/opto/library_call.cpp b/src/hotspot/share/opto/library_call.cpp index a481045dc1f..06653608f14 100644 --- a/src/hotspot/share/opto/library_call.cpp +++ b/src/hotspot/share/opto/library_call.cpp @@ -1646,7 +1646,7 @@ bool LibraryCallKit::inline_string_char_access(bool is_store) { set_sp(old_sp); return false; } - old_map->destruct(&_gvn); + destruct_map_clone(old_map); if (is_store) { access_store_at(value, adr, TypeAryPtr::BYTES, ch, TypeInt::CHAR, T_CHAR, IN_HEAP | MO_UNORDERED | C2_MISMATCHED); } else { @@ -2361,7 +2361,7 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c mismatched = true; // conservatively mark all "wide" on-heap accesses as mismatched } - old_map->destruct(&_gvn); + destruct_map_clone(old_map); assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched"); if (mismatched) { @@ -2612,7 +2612,7 @@ bool LibraryCallKit::inline_unsafe_load_store(const BasicType type, const LoadSt return false; } - old_map->destruct(&_gvn); + destruct_map_clone(old_map); // For CAS, unlike inline_unsafe_access, there seems no point in // trying to refine types. Just use the coarse types here. diff --git a/src/hotspot/share/opto/node.hpp b/src/hotspot/share/opto/node.hpp index 16ab1161a02..12fde6db48b 100644 --- a/src/hotspot/share/opto/node.hpp +++ b/src/hotspot/share/opto/node.hpp @@ -1672,6 +1672,11 @@ inline void Compile::record_for_igvn(Node* n) { _for_igvn->push(n); } +// Inline definition of Compile::remove_for_igvn must be deferred to this point. +inline void Compile::remove_for_igvn(Node* n) { + _for_igvn->remove(n); +} + //------------------------------Node_Stack------------------------------------- class Node_Stack { friend class VMStructs; diff --git a/src/hotspot/share/opto/phaseX.hpp b/src/hotspot/share/opto/phaseX.hpp index 4b8b438f598..5e3efcf5982 100644 --- a/src/hotspot/share/opto/phaseX.hpp +++ b/src/hotspot/share/opto/phaseX.hpp @@ -239,6 +239,11 @@ public: assert(t != NULL, "type must not be null"); _types.map(n->_idx, t); } + void clear_type(const Node* n) { + if (n->_idx < _types.Size()) { + _types.map(n->_idx, NULL); + } + } // Record an initial type for a node, the node's bottom type. void set_type_bottom(const Node* n) { // Use this for initialization when bottom_type() (or better) is not handy. diff --git a/src/hotspot/share/opto/vectorIntrinsics.cpp b/src/hotspot/share/opto/vectorIntrinsics.cpp index d088cfb1d0f..717b4264020 100644 --- a/src/hotspot/share/opto/vectorIntrinsics.cpp +++ b/src/hotspot/share/opto/vectorIntrinsics.cpp @@ -1113,7 +1113,7 @@ bool LibraryCallKit::inline_vector_mem_operation(bool is_store) { set_result(box); } - old_map->destruct(&_gvn); + destruct_map_clone(old_map); if (needs_cpu_membar) { insert_mem_bar(Op_MemBarCPUOrder); @@ -1372,7 +1372,7 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) { set_result(box); } - old_map->destruct(&_gvn); + destruct_map_clone(old_map); if (can_access_non_heap) { insert_mem_bar(Op_MemBarCPUOrder); @@ -1585,7 +1585,7 @@ bool LibraryCallKit::inline_vector_gather_scatter(bool is_scatter) { set_result(box); } - old_map->destruct(&_gvn); + destruct_map_clone(old_map); C->set_max_vector_size(MAX2(C->max_vector_size(), (uint)(num_elem * type2aelembytes(elem_bt)))); return true;