mirror of
https://github.com/openjdk/jdk.git
synced 2026-03-06 14:10:36 +00:00
8302595: use-after-free related to GraphKit::clone_map
Reviewed-by: kvn, thartmann
This commit is contained in:
parent
2e3cea01da
commit
3cc459b6c2
@ -936,7 +936,8 @@ class Compile : public Phase {
|
||||
// Parsing, optimization
|
||||
PhaseGVN* initial_gvn() { return _initial_gvn; }
|
||||
Unique_Node_List* for_igvn() { return _for_igvn; }
|
||||
inline void record_for_igvn(Node* n); // Body is after class Unique_Node_List.
|
||||
inline void record_for_igvn(Node* n); // Body is after class Unique_Node_List in node.hpp.
|
||||
inline void remove_for_igvn(Node* n); // Body is after class Unique_Node_List in node.hpp.
|
||||
void set_initial_gvn(PhaseGVN *gvn) { _initial_gvn = gvn; }
|
||||
void set_for_igvn(Unique_Node_List *for_igvn) { _for_igvn = for_igvn; }
|
||||
|
||||
|
||||
@ -735,6 +735,29 @@ SafePointNode* GraphKit::clone_map() {
|
||||
return clonemap;
|
||||
}
|
||||
|
||||
//-----------------------------destruct_map_clone------------------------------
|
||||
//
|
||||
// Order of destruct is important to increase the likelyhood that memory can be re-used. We need
|
||||
// to destruct/free/delete in the exact opposite order as clone_map().
|
||||
void GraphKit::destruct_map_clone(SafePointNode* sfp) {
|
||||
if (sfp == nullptr) return;
|
||||
|
||||
Node* mem = sfp->memory();
|
||||
JVMState* jvms = sfp->jvms();
|
||||
|
||||
if (jvms != nullptr) {
|
||||
delete jvms;
|
||||
}
|
||||
|
||||
remove_for_igvn(sfp);
|
||||
gvn().clear_type(sfp);
|
||||
sfp->destruct(&_gvn);
|
||||
|
||||
if (mem != nullptr) {
|
||||
gvn().clear_type(mem);
|
||||
mem->destruct(&_gvn);
|
||||
}
|
||||
}
|
||||
|
||||
//-----------------------------set_map_clone-----------------------------------
|
||||
void GraphKit::set_map_clone(SafePointNode* m) {
|
||||
|
||||
@ -94,6 +94,7 @@ class GraphKit : public Phase {
|
||||
void* barrier_set_state() const { return C->barrier_set_state(); }
|
||||
|
||||
void record_for_igvn(Node* n) const { C->record_for_igvn(n); } // delegate to Compile
|
||||
void remove_for_igvn(Node* n) const { C->remove_for_igvn(n); }
|
||||
|
||||
// Handy well-known nodes:
|
||||
Node* null() const { return zerocon(T_OBJECT); }
|
||||
@ -170,6 +171,11 @@ class GraphKit : public Phase {
|
||||
// Clone the existing map state. (Implements PreserveJVMState.)
|
||||
SafePointNode* clone_map();
|
||||
|
||||
// Reverses the work done by clone_map(). Should only be used when the node returned by
|
||||
// clone_map() is ultimately not used. Calling Node::destruct directly in the previously
|
||||
// mentioned circumstance instead of this method may result in use-after-free.
|
||||
void destruct_map_clone(SafePointNode* sfp);
|
||||
|
||||
// Set the map to a clone of the given one.
|
||||
void set_map_clone(SafePointNode* m);
|
||||
|
||||
|
||||
@ -1646,7 +1646,7 @@ bool LibraryCallKit::inline_string_char_access(bool is_store) {
|
||||
set_sp(old_sp);
|
||||
return false;
|
||||
}
|
||||
old_map->destruct(&_gvn);
|
||||
destruct_map_clone(old_map);
|
||||
if (is_store) {
|
||||
access_store_at(value, adr, TypeAryPtr::BYTES, ch, TypeInt::CHAR, T_CHAR, IN_HEAP | MO_UNORDERED | C2_MISMATCHED);
|
||||
} else {
|
||||
@ -2361,7 +2361,7 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
|
||||
mismatched = true; // conservatively mark all "wide" on-heap accesses as mismatched
|
||||
}
|
||||
|
||||
old_map->destruct(&_gvn);
|
||||
destruct_map_clone(old_map);
|
||||
assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched");
|
||||
|
||||
if (mismatched) {
|
||||
@ -2612,7 +2612,7 @@ bool LibraryCallKit::inline_unsafe_load_store(const BasicType type, const LoadSt
|
||||
return false;
|
||||
}
|
||||
|
||||
old_map->destruct(&_gvn);
|
||||
destruct_map_clone(old_map);
|
||||
|
||||
// For CAS, unlike inline_unsafe_access, there seems no point in
|
||||
// trying to refine types. Just use the coarse types here.
|
||||
|
||||
@ -1672,6 +1672,11 @@ inline void Compile::record_for_igvn(Node* n) {
|
||||
_for_igvn->push(n);
|
||||
}
|
||||
|
||||
// Inline definition of Compile::remove_for_igvn must be deferred to this point.
|
||||
inline void Compile::remove_for_igvn(Node* n) {
|
||||
_for_igvn->remove(n);
|
||||
}
|
||||
|
||||
//------------------------------Node_Stack-------------------------------------
|
||||
class Node_Stack {
|
||||
friend class VMStructs;
|
||||
|
||||
@ -239,6 +239,11 @@ public:
|
||||
assert(t != NULL, "type must not be null");
|
||||
_types.map(n->_idx, t);
|
||||
}
|
||||
void clear_type(const Node* n) {
|
||||
if (n->_idx < _types.Size()) {
|
||||
_types.map(n->_idx, NULL);
|
||||
}
|
||||
}
|
||||
// Record an initial type for a node, the node's bottom type.
|
||||
void set_type_bottom(const Node* n) {
|
||||
// Use this for initialization when bottom_type() (or better) is not handy.
|
||||
|
||||
@ -1113,7 +1113,7 @@ bool LibraryCallKit::inline_vector_mem_operation(bool is_store) {
|
||||
set_result(box);
|
||||
}
|
||||
|
||||
old_map->destruct(&_gvn);
|
||||
destruct_map_clone(old_map);
|
||||
|
||||
if (needs_cpu_membar) {
|
||||
insert_mem_bar(Op_MemBarCPUOrder);
|
||||
@ -1372,7 +1372,7 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
|
||||
set_result(box);
|
||||
}
|
||||
|
||||
old_map->destruct(&_gvn);
|
||||
destruct_map_clone(old_map);
|
||||
|
||||
if (can_access_non_heap) {
|
||||
insert_mem_bar(Op_MemBarCPUOrder);
|
||||
@ -1585,7 +1585,7 @@ bool LibraryCallKit::inline_vector_gather_scatter(bool is_scatter) {
|
||||
set_result(box);
|
||||
}
|
||||
|
||||
old_map->destruct(&_gvn);
|
||||
destruct_map_clone(old_map);
|
||||
|
||||
C->set_max_vector_size(MAX2(C->max_vector_size(), (uint)(num_elem * type2aelembytes(elem_bt))));
|
||||
return true;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user