8359344: C2: Malformed control flow after intrinsic bailout

Reviewed-by: thartmann, kvn
This commit is contained in:
Marc Chevalier 2025-07-11 07:07:27 +00:00
parent 529049be6b
commit 3ffc5b9ef7
7 changed files with 282 additions and 117 deletions

View File

@ -330,6 +330,7 @@ public:
class SafePointNode : public MultiNode {
friend JVMState;
friend class GraphKit;
friend class LibraryCallKit;
virtual bool cmp( const Node &n ) const;
virtual uint size_of() const; // Size is bigger

View File

@ -135,6 +135,7 @@ JVMState* LibraryIntrinsic::generate(JVMState* jvms) {
// The intrinsic bailed out
assert(ctrl == kit.control(), "Control flow was added although the intrinsic bailed out");
assert(jvms->map() == kit.map(), "Out of sync JVM state");
if (jvms->has_method()) {
// Not a root compile.
const char* msg;
@ -1724,18 +1725,15 @@ bool LibraryCallKit::inline_string_char_access(bool is_store) {
}
// Save state and restore on bailout
uint old_sp = sp();
SafePointNode* old_map = clone_map();
SavedState old_state(this);
value = must_be_not_null(value, true);
Node* adr = array_element_address(value, index, T_CHAR);
if (adr->is_top()) {
set_map(old_map);
set_sp(old_sp);
return false;
}
destruct_map_clone(old_map);
old_state.discard();
if (is_store) {
access_store_at(value, adr, TypeAryPtr::BYTES, ch, TypeInt::CHAR, T_CHAR, IN_HEAP | MO_UNORDERED | C2_MISMATCHED);
} else {
@ -2373,6 +2371,47 @@ DecoratorSet LibraryCallKit::mo_decorator_for_access_kind(AccessKind kind) {
}
}
LibraryCallKit::SavedState::SavedState(LibraryCallKit* kit) :
_kit(kit),
_sp(kit->sp()),
_jvms(kit->jvms()),
_map(kit->clone_map()),
_discarded(false)
{
for (DUIterator_Fast imax, i = kit->control()->fast_outs(imax); i < imax; i++) {
Node* out = kit->control()->fast_out(i);
if (out->is_CFG()) {
_ctrl_succ.push(out);
}
}
}
LibraryCallKit::SavedState::~SavedState() {
if (_discarded) {
_kit->destruct_map_clone(_map);
return;
}
_kit->jvms()->set_map(_map);
_kit->jvms()->set_sp(_sp);
_map->set_jvms(_kit->jvms());
_kit->set_map(_map);
_kit->set_sp(_sp);
for (DUIterator_Fast imax, i = _kit->control()->fast_outs(imax); i < imax; i++) {
Node* out = _kit->control()->fast_out(i);
if (out->is_CFG() && out->in(0) == _kit->control() && out != _kit->map() && !_ctrl_succ.member(out)) {
_kit->_gvn.hash_delete(out);
out->set_req(0, _kit->C->top());
_kit->C->record_for_igvn(out);
--i; --imax;
_kit->_gvn.hash_find_insert(out);
}
}
}
void LibraryCallKit::SavedState::discard() {
_discarded = true;
}
bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, const AccessKind kind, const bool unaligned) {
if (callee()->is_static()) return false; // caller must have the capability!
DecoratorSet decorators = C2_UNSAFE_ACCESS;
@ -2434,8 +2473,7 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
offset = ConvL2X(offset);
// Save state and restore on bailout
uint old_sp = sp();
SafePointNode* old_map = clone_map();
SavedState old_state(this);
Node* adr = make_unsafe_address(base, offset, type, kind == Relaxed);
assert(!stopped(), "Inlining of unsafe access failed: address construction stopped unexpectedly");
@ -2444,8 +2482,6 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
if (type != T_OBJECT) {
decorators |= IN_NATIVE; // off-heap primitive access
} else {
set_map(old_map);
set_sp(old_sp);
return false; // off-heap oop accesses are not supported
}
} else {
@ -2463,8 +2499,6 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
const TypePtr* adr_type = _gvn.type(adr)->isa_ptr();
if (adr_type == TypePtr::NULL_PTR) {
set_map(old_map);
set_sp(old_sp);
return false; // off-heap access with zero address
}
@ -2474,8 +2508,6 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
if (alias_type->adr_type() == TypeInstPtr::KLASS ||
alias_type->adr_type() == TypeAryPtr::RANGE) {
set_map(old_map);
set_sp(old_sp);
return false; // not supported
}
@ -2494,8 +2526,6 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
}
if ((bt == T_OBJECT) != (type == T_OBJECT)) {
// Don't intrinsify mismatched object accesses
set_map(old_map);
set_sp(old_sp);
return false;
}
mismatched = (bt != type);
@ -2503,7 +2533,7 @@ bool LibraryCallKit::inline_unsafe_access(bool is_store, const BasicType type, c
mismatched = true; // conservatively mark all "wide" on-heap accesses as mismatched
}
destruct_map_clone(old_map);
old_state.discard();
assert(!mismatched || alias_type->adr_type()->is_oopptr(), "off-heap access can't be mismatched");
if (mismatched) {
@ -2739,8 +2769,7 @@ bool LibraryCallKit::inline_unsafe_load_store(const BasicType type, const LoadSt
// 32-bit machines ignore the high half of long offsets
offset = ConvL2X(offset);
// Save state and restore on bailout
uint old_sp = sp();
SafePointNode* old_map = clone_map();
SavedState old_state(this);
Node* adr = make_unsafe_address(base, offset,type, false);
const TypePtr *adr_type = _gvn.type(adr)->isa_ptr();
@ -2749,12 +2778,10 @@ bool LibraryCallKit::inline_unsafe_load_store(const BasicType type, const LoadSt
if (bt != T_ILLEGAL &&
(is_reference_type(bt) != (type == T_OBJECT))) {
// Don't intrinsify mismatched object accesses.
set_map(old_map);
set_sp(old_sp);
return false;
}
destruct_map_clone(old_map);
old_state.discard();
// For CAS, unlike inline_unsafe_access, there seems no point in
// trying to refine types. Just use the coarse types here.

View File

@ -129,6 +129,29 @@ class LibraryCallKit : public GraphKit {
virtual int reexecute_sp() { return _reexecute_sp; }
/* When an intrinsic makes changes before bailing out, it's necessary to restore the graph
* as it was. See JDK-8359344 for what can happen wrong. It's also not always possible to
* bailout before making changes because the bailing out decision might depend on new nodes
* (their types, for instance).
*
* So, if an intrinsic might cause this situation, one must start by saving the state in a
* SavedState by constructing it, and the state will be restored on destruction. If the
* intrinsic is not bailing out, one need to call discard to prevent restoring the old state.
*/
class SavedState {
LibraryCallKit* _kit;
uint _sp;
JVMState* _jvms;
SafePointNode* _map;
Unique_Node_List _ctrl_succ;
bool _discarded;
public:
SavedState(LibraryCallKit*);
~SavedState();
void discard();
};
// Helper functions to inline natives
Node* generate_guard(Node* test, RegionNode* region, float true_prob);
Node* generate_slow_guard(Node* test, RegionNode* region);

View File

@ -822,8 +822,7 @@ bool LibraryCallKit::inline_vector_mem_operation(bool is_store) {
Node* offset = ConvL2X(argument(4));
// Save state and restore on bailout
uint old_sp = sp();
SafePointNode* old_map = clone_map();
SavedState old_state(this);
Node* addr = make_unsafe_address(base, offset, (is_mask ? T_BOOLEAN : elem_bt), true);
@ -860,8 +859,6 @@ bool LibraryCallKit::inline_vector_mem_operation(bool is_store) {
log_if_needed(" ** not supported: arity=%d op=%s vlen=%d etype=%s atype=%s ismask=no",
is_store, is_store ? "store" : "load",
num_elem, type2name(elem_bt), type2name(arr_type->elem()->array_element_basic_type()));
set_map(old_map);
set_sp(old_sp);
return false;
}
@ -873,8 +870,6 @@ bool LibraryCallKit::inline_vector_mem_operation(bool is_store) {
log_if_needed(" ** not supported: arity=%d op=%s vlen=%d*8 etype=%s/8 ismask=no",
is_store, "store",
num_elem, type2name(elem_bt));
set_map(old_map);
set_sp(old_sp);
return false; // not supported
}
} else {
@ -883,8 +878,6 @@ bool LibraryCallKit::inline_vector_mem_operation(bool is_store) {
log_if_needed(" ** not supported: arity=%d op=%s vlen=%d*8 etype=%s/8 ismask=no",
is_store, "load",
mem_num_elem, type2name(mem_elem_bt));
set_map(old_map);
set_sp(old_sp);
return false; // not supported
}
}
@ -892,14 +885,10 @@ bool LibraryCallKit::inline_vector_mem_operation(bool is_store) {
if (is_mask) {
if (!is_store) {
if (!arch_supports_vector(Op_LoadVector, num_elem, elem_bt, VecMaskUseLoad)) {
set_map(old_map);
set_sp(old_sp);
return false; // not supported
}
} else {
if (!arch_supports_vector(Op_StoreVector, num_elem, elem_bt, VecMaskUseStore)) {
set_map(old_map);
set_sp(old_sp);
return false; // not supported
}
}
@ -914,8 +903,6 @@ bool LibraryCallKit::inline_vector_mem_operation(bool is_store) {
if (is_store) {
Node* val = unbox_vector(argument(7), vbox_type, elem_bt, num_elem);
if (val == nullptr) {
set_map(old_map);
set_sp(old_sp);
return false; // operand unboxing failed
}
set_all_memory(reset_memory());
@ -952,7 +939,7 @@ bool LibraryCallKit::inline_vector_mem_operation(bool is_store) {
set_result(box);
}
destruct_map_clone(old_map);
old_state.discard();
if (needs_cpu_membar) {
insert_mem_bar(Op_MemBarCPUOrder);
@ -1029,8 +1016,7 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
Node* offset = ConvL2X(argument(5));
// Save state and restore on bailout
uint old_sp = sp();
SafePointNode* old_map = clone_map();
SavedState old_state(this);
Node* addr = make_unsafe_address(base, offset, elem_bt, true);
const TypePtr *addr_type = gvn().type(addr)->isa_ptr();
@ -1043,8 +1029,6 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
log_if_needed(" ** not supported: arity=%d op=%s vlen=%d etype=%s atype=%s",
is_store, is_store ? "storeMasked" : "loadMasked",
num_elem, type2name(elem_bt), type2name(arr_type->elem()->array_element_basic_type()));
set_map(old_map);
set_sp(old_sp);
return false;
}
@ -1066,8 +1050,6 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
if (!offset_in_range->is_con()) {
log_if_needed(" ** missing constant: offsetInRange=%s",
NodeClassNames[argument(8)->Opcode()]);
set_map(old_map);
set_sp(old_sp);
return false;
}
needs_predicate = (offset_in_range->get_con() == 0);
@ -1077,8 +1059,6 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
log_if_needed(" ** not supported: op=%s vlen=%d etype=%s mismatched_ms=%d",
is_store ? "storeMasked" : "loadMasked",
num_elem, type2name(elem_bt), mismatched_ms ? 1 : 0);
set_map(old_map);
set_sp(old_sp);
return false;
}
}
@ -1089,8 +1069,6 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
!arch_supports_vector(Op_VectorBlend, mem_num_elem, mem_elem_bt, VecMaskUseLoad))) {
log_if_needed(" ** not supported: op=loadMasked vlen=%d etype=%s mismatched_ms=%d",
num_elem, type2name(elem_bt), mismatched_ms ? 1 : 0);
set_map(old_map);
set_sp(old_sp);
return false;
}
@ -1101,8 +1079,6 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
log_if_needed(" ** not supported: arity=%d op=%s vlen=%d etype=%s mismatched_ms=1",
is_store, is_store ? "storeMasked" : "loadMasked",
num_elem, type2name(elem_bt));
set_map(old_map);
set_sp(old_sp);
return false;
}
}
@ -1113,8 +1089,6 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
log_if_needed(" ** not supported: arity=%d op=%s vlen=%d etype=%s",
is_store, is_store ? "storeMasked" : "loadMasked",
num_elem, type2name(elem_bt));
set_map(old_map);
set_sp(old_sp);
return false;
}
@ -1135,8 +1109,6 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
log_if_needed(" ** unbox failed mask=%s",
is_store ? NodeClassNames[argument(9)->Opcode()]
: NodeClassNames[argument(8)->Opcode()]);
set_map(old_map);
set_sp(old_sp);
return false;
}
@ -1145,8 +1117,6 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
if (val == nullptr) {
log_if_needed(" ** unbox failed vector=%s",
NodeClassNames[argument(8)->Opcode()]);
set_map(old_map);
set_sp(old_sp);
return false; // operand unboxing failed
}
set_all_memory(reset_memory());
@ -1193,7 +1163,7 @@ bool LibraryCallKit::inline_vector_mem_masked_operation(bool is_store) {
set_result(box);
}
destruct_map_clone(old_map);
old_state.discard();
if (can_access_non_heap) {
insert_mem_bar(Op_MemBarCPUOrder);
@ -1316,8 +1286,7 @@ bool LibraryCallKit::inline_vector_gather_scatter(bool is_scatter) {
Node* offset = ConvL2X(argument(7));
// Save state and restore on bailout
uint old_sp = sp();
SafePointNode* old_map = clone_map();
SavedState old_state(this);
Node* addr = nullptr;
if (!is_subword_type(elem_bt)) {
@ -1339,8 +1308,6 @@ bool LibraryCallKit::inline_vector_gather_scatter(bool is_scatter) {
log_if_needed(" ** not supported: arity=%d op=%s vlen=%d etype=%s atype=%s ismask=no",
is_scatter, is_scatter ? "scatter" : "gather",
num_elem, type2name(elem_bt), type2name(arr_type->elem()->array_element_basic_type()));
set_map(old_map);
set_sp(old_sp);
return false;
}
@ -1348,8 +1315,6 @@ bool LibraryCallKit::inline_vector_gather_scatter(bool is_scatter) {
const TypeInstPtr* vbox_type = TypeInstPtr::make_exact(TypePtr::NotNull, vbox_klass);
ciKlass* vbox_idx_klass = vector_idx_klass->const_oop()->as_instance()->java_lang_Class_klass();
if (vbox_idx_klass == nullptr) {
set_map(old_map);
set_sp(old_sp);
return false;
}
@ -1364,8 +1329,6 @@ bool LibraryCallKit::inline_vector_gather_scatter(bool is_scatter) {
// Get the first index vector.
indexes = unbox_vector(argument(9), vbox_idx_type, T_INT, idx_num_elem);
if (indexes == nullptr) {
set_map(old_map);
set_sp(old_sp);
return false;
}
}
@ -1378,8 +1341,6 @@ bool LibraryCallKit::inline_vector_gather_scatter(bool is_scatter) {
mask = unbox_vector(m, mbox_type, elem_bt, num_elem);
if (mask == nullptr) {
log_if_needed(" ** unbox failed mask=%s", NodeClassNames[m->Opcode()]);
set_map(old_map);
set_sp(old_sp);
return false;
}
}
@ -1388,8 +1349,6 @@ bool LibraryCallKit::inline_vector_gather_scatter(bool is_scatter) {
if (is_scatter) {
Node* val = unbox_vector(argument(10), vbox_type, elem_bt, num_elem);
if (val == nullptr) {
set_map(old_map);
set_sp(old_sp);
return false; // operand unboxing failed
}
set_all_memory(reset_memory());
@ -1412,7 +1371,7 @@ bool LibraryCallKit::inline_vector_gather_scatter(bool is_scatter) {
set_result(box);
}
destruct_map_clone(old_map);
old_state.discard();
C->set_max_vector_size(MAX2(C->max_vector_size(), (uint)(num_elem * type2aelembytes(elem_bt))));
return true;
}

View File

@ -0,0 +1,69 @@
/*
* Copyright (c) 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package compiler.intrinsics;
/*
* @test
* @bug 8359344
* @summary Intrinsic storeMasked can add some control flow before bailing out, leaving a malformed CFG.
* @modules jdk.incubator.vector
* @run main/othervm -XX:+UnlockDiagnosticVMOptions
* -XX:TypeProfileLevel=222 -Xbatch
* -XX:CompileCommand=compileonly,jdk.incubator.vector.Long*::intoArray0
* -XX:+AbortVMOnCompilationFailure
* compiler.intrinsics.VectorIntoArrayInvalidControlFlow
*
* @run main compiler.intrinsics.VectorIntoArrayInvalidControlFlow
*/
import jdk.incubator.vector.*;
public class VectorIntoArrayInvalidControlFlow {
private static final VectorSpecies<Long> L_SPECIES = LongVector.SPECIES_128;
private static final LongVector longVector;
private static final long[] longArray = new long[L_SPECIES.length()];
private static final boolean[] longMask = new boolean[L_SPECIES.length()];
private static final VectorMask<Long> longVectorMask;
static {
for (int i = 0; i < L_SPECIES.length(); i++) {
longArray[i] = i + 1;
longMask[i] = L_SPECIES.length() > 1 && i % 2 == 0;
}
longVector = LongVector.fromArray(L_SPECIES, longArray, 0);
longVectorMask = VectorMask.fromArray(L_SPECIES, longMask, 0);
}
static long[] test() {
long[] res = new long[L_SPECIES.length()];
for(int j = 0; j < 10_000; j++) {
longVector.intoArray(res, 0, longVectorMask);
}
return res;
}
static public void main(String[] args) {
test();
}
}

View File

@ -99,9 +99,9 @@ public class IRNode {
private static final String POSTFIX = "#_";
private static final String START = "(\\d+(\\s){2}(";
private static final String MID = ".*)+(\\s){2}===.*";
private static final String END = ")";
public static final String START = "(\\d+(\\s){2}(";
public static final String MID = ".*)+(\\s){2}===.*";
public static final String END = ")";
private static final String STORE_OF_CLASS_POSTFIX = "(:|\\+)\\S* \\*" + END;
private static final String LOAD_OF_CLASS_POSTFIX = "(:|\\+)\\S* \\*" + END;

View File

@ -23,32 +23,15 @@
/*
* @test
* @bug 8155781
* @bug 8155781 8359344
* @modules java.base/jdk.internal.misc
*
* @run main/bootclasspath/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+UnlockDiagnosticVMOptions
* -XX:-TieredCompilation -Xbatch
* -XX:+UseCompressedOops -XX:+UseCompressedClassPointers
* -XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test*
* compiler.unsafe.OpaqueAccesses
* @run main/bootclasspath/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+UnlockDiagnosticVMOptions
* -XX:-TieredCompilation -Xbatch
* -XX:+UseCompressedOops -XX:-UseCompressedClassPointers
* -XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test*
* compiler.unsafe.OpaqueAccesses
* @run main/bootclasspath/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+UnlockDiagnosticVMOptions
* -XX:-TieredCompilation -Xbatch
* -XX:-UseCompressedOops -XX:+UseCompressedClassPointers
* -XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test*
* compiler.unsafe.OpaqueAccesses
* @run main/bootclasspath/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+UnlockDiagnosticVMOptions
* -XX:-TieredCompilation -Xbatch
* -XX:-UseCompressedOops -XX:-UseCompressedClassPointers
* -XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test*
* compiler.unsafe.OpaqueAccesses
* @library /test/lib /
* @run driver compiler.unsafe.OpaqueAccesses
*/
package compiler.unsafe;
import compiler.lib.ir_framework.*;
import jdk.internal.misc.Unsafe;
import java.lang.reflect.Field;
@ -77,74 +60,126 @@ public class OpaqueAccesses {
private Object f = new Object();
private long l1, l2;
// To the end of a line, a new line character, repeated.
private static final String FULL_LINES = "(.*\\R)*";
// Finish the line after the node type, skips full line, and eats until before the node types
private static final String SKIP = IRNode.MID + IRNode.END + "\\R" + FULL_LINES + "\\s*" + IRNode.START;
private static final String CALL_STATIC_JAVA_AND_THEN_OPAQUE_NOT_NULL = IRNode.START + "CallStaticJava" + SKIP + "OpaqueNotNull" + IRNode.MID + IRNode.END;
private static final String OPAQUE_NOT_NULL_AND_THEN_CALL_STATIC_JAVA = IRNode.START + "OpaqueNotNull" + SKIP + "CallStaticJava" + IRNode.MID + IRNode.END;
/* Having both CallStaticJava and OpaqueNotNull, in any order. We use that in a failOn to make sure we have one
* or the other (or none), but not both.
* The CallStaticJava happens when the call is not intrinsified, and the OpaqueNotNull comes from the intrinsic.
* We don't want a unfinished intrinsic, with the call nevertheless.
*/
private static final String BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL =
"(" + CALL_STATIC_JAVA_AND_THEN_OPAQUE_NOT_NULL + ") | (" + OPAQUE_NOT_NULL_AND_THEN_CALL_STATIC_JAVA + ")";
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static Object testFixedOffsetField(Object o) {
return UNSAFE.getReference(o, F_OFFSET);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeader0(Object o) {
return UNSAFE.getInt(o, 0);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeader4(Object o) {
return UNSAFE.getInt(o, 4);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeader8(Object o) {
return UNSAFE.getInt(o, 8);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeader12(Object o) {
return UNSAFE.getInt(o, 12);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeader16(Object o) {
return UNSAFE.getInt(o, 16);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeader17(Object o) {
return UNSAFE.getIntUnaligned(o, 17);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static Object testFixedBase(long off) {
return UNSAFE.getReference(INSTANCE, off);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static Object testOpaque(Object o, long off) {
return UNSAFE.getReference(o, off);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeaderArray0(Object[] arr) {
return UNSAFE.getInt(arr, 0);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeaderArray4(Object[] arr) {
return UNSAFE.getInt(arr, 4);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeaderArray8(Object[] arr) {
return UNSAFE.getInt(arr, 8);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeaderArray12(Object[] arr) {
return UNSAFE.getInt(arr, 12);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeaderArray16(Object[] arr) {
return UNSAFE.getInt(arr, 16);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static int testFixedOffsetHeaderArray17(Object[] arr) {
return UNSAFE.getIntUnaligned(arr, 17);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static Object testFixedOffsetArray(Object[] arr) {
return UNSAFE.getReference(arr, E_OFFSET);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static Object testFixedBaseArray(long off) {
return UNSAFE.getReference(ARRAY, off);
}
@Test
@IR(failOn = {BOTH_CALL_STATIC_JAVA_AND_OPAQUE_NOT_NULL}, phase = CompilePhase.BEFORE_MACRO_EXPANSION)
static Object testOpaqueArray(Object[] o, long off) {
return UNSAFE.getReference(o, off);
}
@ -152,6 +187,7 @@ public class OpaqueAccesses {
static final long ADDR = UNSAFE.allocateMemory(10);
static boolean flag;
@Test
static int testMixedAccess() {
flag = !flag;
Object o = (flag ? INSTANCE : null);
@ -159,31 +195,81 @@ public class OpaqueAccesses {
return UNSAFE.getInt(o, off);
}
public static void main(String[] args) {
for (int i = 0; i < 20_000; i++) {
// Instance
testFixedOffsetField(INSTANCE);
testFixedOffsetHeader0(INSTANCE);
testFixedOffsetHeader4(INSTANCE);
testFixedOffsetHeader8(INSTANCE);
testFixedOffsetHeader12(INSTANCE);
testFixedOffsetHeader16(INSTANCE);
testFixedOffsetHeader17(INSTANCE);
testFixedBase(F_OFFSET);
testOpaque(INSTANCE, F_OFFSET);
testMixedAccess();
@Run(test = {
"testFixedOffsetField",
"testFixedOffsetHeader0",
"testFixedOffsetHeader4",
"testFixedOffsetHeader8",
"testFixedOffsetHeader12",
"testFixedOffsetHeader16",
"testFixedOffsetHeader17",
"testFixedBase",
"testOpaque",
"testMixedAccess",
"testFixedOffsetHeaderArray0",
"testFixedOffsetHeaderArray4",
"testFixedOffsetHeaderArray8",
"testFixedOffsetHeaderArray12",
"testFixedOffsetHeaderArray16",
"testFixedOffsetHeaderArray17",
"testFixedOffsetArray",
"testFixedBaseArray",
"testOpaqueArray",
})
public static void runMethod() {
// Instance
testFixedOffsetField(INSTANCE);
testFixedOffsetHeader0(INSTANCE);
testFixedOffsetHeader4(INSTANCE);
testFixedOffsetHeader8(INSTANCE);
testFixedOffsetHeader12(INSTANCE);
testFixedOffsetHeader16(INSTANCE);
testFixedOffsetHeader17(INSTANCE);
testFixedBase(F_OFFSET);
testOpaque(INSTANCE, F_OFFSET);
testMixedAccess();
// Array
testFixedOffsetHeaderArray0(ARRAY);
testFixedOffsetHeaderArray4(ARRAY);
testFixedOffsetHeaderArray8(ARRAY);
testFixedOffsetHeaderArray12(ARRAY);
testFixedOffsetHeaderArray16(ARRAY);
testFixedOffsetHeaderArray17(ARRAY);
testFixedOffsetArray(ARRAY);
testFixedBaseArray(E_OFFSET);
testOpaqueArray(ARRAY, E_OFFSET);
}
// Array
testFixedOffsetHeaderArray0(ARRAY);
testFixedOffsetHeaderArray4(ARRAY);
testFixedOffsetHeaderArray8(ARRAY);
testFixedOffsetHeaderArray12(ARRAY);
testFixedOffsetHeaderArray16(ARRAY);
testFixedOffsetHeaderArray17(ARRAY);
testFixedOffsetArray(ARRAY);
testFixedBaseArray(E_OFFSET);
testOpaqueArray(ARRAY, E_OFFSET);
System.out.println("TEST PASSED");
}
public static void main(String[] args) {
TestFramework.runWithFlags(
"--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED",
"-XX:+IgnoreUnrecognizedVMOptions", "-XX:+UnlockDiagnosticVMOptions",
"-XX:-TieredCompilation", "-Xbatch",
"-XX:+UseCompressedOops", "-XX:+UseCompressedClassPointers",
"-XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test*"
);
TestFramework.runWithFlags(
"--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED",
"-XX:+IgnoreUnrecognizedVMOptions", "-XX:+UnlockDiagnosticVMOptions",
"-XX:-TieredCompilation", "-Xbatch",
"-XX:+UseCompressedOops", "-XX:-UseCompressedClassPointers",
"-XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test*"
);
TestFramework.runWithFlags(
"--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED",
"-XX:+IgnoreUnrecognizedVMOptions", "-XX:+UnlockDiagnosticVMOptions",
"-XX:-TieredCompilation", "-Xbatch",
"-XX:-UseCompressedOops", "-XX:+UseCompressedClassPointers",
"-XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test*"
);
TestFramework.runWithFlags(
"--add-exports=java.base/jdk.internal.misc=ALL-UNNAMED",
"-XX:+IgnoreUnrecognizedVMOptions", "-XX:+UnlockDiagnosticVMOptions",
"-XX:-TieredCompilation", "-Xbatch",
"-XX:-UseCompressedOops", "-XX:-UseCompressedClassPointers",
"-XX:CompileCommand=dontinline,compiler.unsafe.OpaqueAccesses::test*"
);
}
}