8363996: Obsolete UseCompressedClassPointers

Reviewed-by: rkennke, kvn, adinn, dholmes, mdoerr, iklam, fyang
This commit is contained in:
Thomas Stuefe 2026-03-26 11:08:48 +00:00
parent ab659d4ee4
commit da296cbea1
149 changed files with 952 additions and 2093 deletions

View File

@ -2233,15 +2233,9 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const {
void MachUEPNode::format(PhaseRegAlloc* ra_, outputStream* st) const
{
st->print_cr("# MachUEPNode");
if (UseCompressedClassPointers) {
st->print_cr("\tldrw rscratch1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
st->print_cr("\tldrw r10, [rscratch2 + CompiledICData::speculated_klass_offset()]\t# compressed klass");
st->print_cr("\tcmpw rscratch1, r10");
} else {
st->print_cr("\tldr rscratch1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
st->print_cr("\tldr r10, [rscratch2 + CompiledICData::speculated_klass_offset()]\t# compressed klass");
st->print_cr("\tcmp rscratch1, r10");
}
st->print_cr("\tldrw rscratch1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
st->print_cr("\tldrw r10, [rscratch2 + CompiledICData::speculated_klass_offset()]\t# compressed klass");
st->print_cr("\tcmpw rscratch1, r10");
st->print_cr("\tbne, SharedRuntime::_ic_miss_stub");
}
#endif

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2020, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -59,22 +59,6 @@ const Register SHIFT_count = r0; // where count for shift operations must be
#define __ _masm->
static void select_different_registers(Register preserve,
Register extra,
Register &tmp1,
Register &tmp2) {
if (tmp1 == preserve) {
assert_different_registers(tmp1, tmp2, extra);
tmp1 = extra;
} else if (tmp2 == preserve) {
assert_different_registers(tmp1, tmp2, extra);
tmp2 = extra;
}
assert_different_registers(preserve, tmp1, tmp2);
}
static void select_different_registers(Register preserve,
Register extra,
Register &tmp1,
@ -1269,12 +1253,9 @@ void LIR_Assembler::emit_typecheck_helper(LIR_OpTypeCheck *op, Label* success, L
} else if (obj == klass_RInfo) {
klass_RInfo = dst;
}
if (k->is_loaded() && !UseCompressedClassPointers) {
select_different_registers(obj, dst, k_RInfo, klass_RInfo);
} else {
Rtmp1 = op->tmp3()->as_register();
select_different_registers(obj, dst, k_RInfo, klass_RInfo, Rtmp1);
}
Rtmp1 = op->tmp3()->as_register();
select_different_registers(obj, dst, k_RInfo, klass_RInfo, Rtmp1);
assert_different_registers(obj, k_RInfo, klass_RInfo);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2005, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2005, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -1287,9 +1287,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) {
}
LIR_Opr reg = rlock_result(x);
LIR_Opr tmp3 = LIR_OprFact::illegalOpr;
if (!x->klass()->is_loaded() || UseCompressedClassPointers) {
tmp3 = new_register(objectType);
}
tmp3 = new_register(objectType);
__ checkcast(reg, obj.result(), x->klass(),
new_register(objectType), new_register(objectType), tmp3,
x->direct_compare(), info_for_exception, patching_info, stub,
@ -1308,9 +1306,7 @@ void LIRGenerator::do_InstanceOf(InstanceOf* x) {
}
obj.load_item();
LIR_Opr tmp3 = LIR_OprFact::illegalOpr;
if (!x->klass()->is_loaded() || UseCompressedClassPointers) {
tmp3 = new_register(objectType);
}
tmp3 = new_register(objectType);
__ instanceof(reg, obj.result(), x->klass(),
new_register(objectType), new_register(objectType), tmp3,
x->direct_compare(), patching_info, x->profiled_method(), x->profiled_bci());

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2021, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -105,12 +105,8 @@ void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register
} else {
mov(t1, checked_cast<int32_t>(markWord::prototype().value()));
str(t1, Address(obj, oopDesc::mark_offset_in_bytes()));
if (UseCompressedClassPointers) { // Take care not to kill klass
encode_klass_not_null(t1, klass);
strw(t1, Address(obj, oopDesc::klass_offset_in_bytes()));
} else {
str(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
}
encode_klass_not_null(t1, klass); // Take care not to kill klass
strw(t1, Address(obj, oopDesc::klass_offset_in_bytes()));
}
if (len->is_valid()) {
@ -121,7 +117,7 @@ void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register
// Clear gap/first 4 bytes following the length field.
strw(zr, Address(obj, base_offset));
}
} else if (UseCompressedClassPointers && !UseCompactObjectHeaders) {
} else if (!UseCompactObjectHeaders) {
store_klass_gap(obj, zr);
}
}

View File

@ -762,7 +762,7 @@ void MacroAssembler::call_VM_base(Register oop_result,
assert(java_thread == rthread, "unexpected register");
#ifdef ASSERT
// TraceBytecodes does not use r12 but saves it over the call, so don't verify
// if ((UseCompressedOops || UseCompressedClassPointers) && !TraceBytecodes) verify_heapbase("call_VM_base: heap base corrupted?");
// if (!TraceBytecodes) verify_heapbase("call_VM_base: heap base corrupted?");
#endif // ASSERT
assert(java_thread != oop_result , "cannot use the same register for java_thread & oop_result");
@ -1002,14 +1002,10 @@ int MacroAssembler::ic_check(int end_alignment) {
load_narrow_klass_compact(tmp1, receiver);
ldrw(tmp2, Address(data, CompiledICData::speculated_klass_offset()));
cmpw(tmp1, tmp2);
} else if (UseCompressedClassPointers) {
} else {
ldrw(tmp1, Address(receiver, oopDesc::klass_offset_in_bytes()));
ldrw(tmp2, Address(data, CompiledICData::speculated_klass_offset()));
cmpw(tmp1, tmp2);
} else {
ldr(tmp1, Address(receiver, oopDesc::klass_offset_in_bytes()));
ldr(tmp2, Address(data, CompiledICData::speculated_klass_offset()));
cmp(tmp1, tmp2);
}
Label dont;
@ -3278,7 +3274,6 @@ int MacroAssembler::pop_p(unsigned int bitset, Register stack) {
#ifdef ASSERT
void MacroAssembler::verify_heapbase(const char* msg) {
#if 0
assert (UseCompressedOops || UseCompressedClassPointers, "should be compressed");
assert (Universe::heap() != nullptr, "java heap should be initialized");
if (!UseCompressedOops || Universe::ptr_base() == nullptr) {
// rheapbase is allocated as general register
@ -5067,13 +5062,10 @@ void MacroAssembler::load_narrow_klass_compact(Register dst, Register src) {
void MacroAssembler::load_klass(Register dst, Register src) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(dst, src);
decode_klass_not_null(dst);
} else if (UseCompressedClassPointers) {
ldrw(dst, Address(src, oopDesc::klass_offset_in_bytes()));
decode_klass_not_null(dst);
} else {
ldr(dst, Address(src, oopDesc::klass_offset_in_bytes()));
ldrw(dst, Address(src, oopDesc::klass_offset_in_bytes()));
}
decode_klass_not_null(dst);
}
void MacroAssembler::restore_cpu_control_state_after_jni(Register tmp1, Register tmp2) {
@ -5125,25 +5117,21 @@ void MacroAssembler::load_mirror(Register dst, Register method, Register tmp1, R
void MacroAssembler::cmp_klass(Register obj, Register klass, Register tmp) {
assert_different_registers(obj, klass, tmp);
if (UseCompressedClassPointers) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(tmp, obj);
} else {
ldrw(tmp, Address(obj, oopDesc::klass_offset_in_bytes()));
}
if (CompressedKlassPointers::base() == nullptr) {
cmp(klass, tmp, LSL, CompressedKlassPointers::shift());
return;
} else if (((uint64_t)CompressedKlassPointers::base() & 0xffffffff) == 0
&& CompressedKlassPointers::shift() == 0) {
// Only the bottom 32 bits matter
cmpw(klass, tmp);
return;
}
decode_klass_not_null(tmp);
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(tmp, obj);
} else {
ldr(tmp, Address(obj, oopDesc::klass_offset_in_bytes()));
ldrw(tmp, Address(obj, oopDesc::klass_offset_in_bytes()));
}
if (CompressedKlassPointers::base() == nullptr) {
cmp(klass, tmp, LSL, CompressedKlassPointers::shift());
return;
} else if (((uint64_t)CompressedKlassPointers::base() & 0xffffffff) == 0
&& CompressedKlassPointers::shift() == 0) {
// Only the bottom 32 bits matter
cmpw(klass, tmp);
return;
}
decode_klass_not_null(tmp);
cmp(klass, tmp);
}
@ -5151,36 +5139,25 @@ void MacroAssembler::cmp_klasses_from_objects(Register obj1, Register obj2, Regi
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(tmp1, obj1);
load_narrow_klass_compact(tmp2, obj2);
cmpw(tmp1, tmp2);
} else if (UseCompressedClassPointers) {
} else {
ldrw(tmp1, Address(obj1, oopDesc::klass_offset_in_bytes()));
ldrw(tmp2, Address(obj2, oopDesc::klass_offset_in_bytes()));
cmpw(tmp1, tmp2);
} else {
ldr(tmp1, Address(obj1, oopDesc::klass_offset_in_bytes()));
ldr(tmp2, Address(obj2, oopDesc::klass_offset_in_bytes()));
cmp(tmp1, tmp2);
}
cmpw(tmp1, tmp2);
}
void MacroAssembler::store_klass(Register dst, Register src) {
// FIXME: Should this be a store release? concurrent gcs assumes
// klass length is valid if klass field is not null.
assert(!UseCompactObjectHeaders, "not with compact headers");
if (UseCompressedClassPointers) {
encode_klass_not_null(src);
strw(src, Address(dst, oopDesc::klass_offset_in_bytes()));
} else {
str(src, Address(dst, oopDesc::klass_offset_in_bytes()));
}
encode_klass_not_null(src);
strw(src, Address(dst, oopDesc::klass_offset_in_bytes()));
}
void MacroAssembler::store_klass_gap(Register dst, Register src) {
assert(!UseCompactObjectHeaders, "not with compact headers");
if (UseCompressedClassPointers) {
// Store to klass gap in destination
strw(src, Address(dst, oopDesc::klass_gap_offset_in_bytes()));
}
// Store to klass gap in destination
strw(src, Address(dst, oopDesc::klass_gap_offset_in_bytes()));
}
// Algorithm must match CompressedOops::encode.
@ -5326,8 +5303,6 @@ MacroAssembler::KlassDecodeMode MacroAssembler::klass_decode_mode() {
}
MacroAssembler::KlassDecodeMode MacroAssembler::klass_decode_mode(address base, int shift, const size_t range) {
assert(UseCompressedClassPointers, "not using compressed class pointers");
// KlassDecodeMode shouldn't be set already.
assert(_klass_decode_mode == KlassDecodeNone, "set once");
@ -5457,8 +5432,6 @@ void MacroAssembler::decode_klass_not_null_for_aot(Register dst, Register src) {
}
void MacroAssembler::decode_klass_not_null(Register dst, Register src) {
assert (UseCompressedClassPointers, "should only be used for compressed headers");
if (AOTCodeCache::is_on_for_dump()) {
decode_klass_not_null_for_aot(dst, src);
return;
@ -5525,7 +5498,6 @@ void MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
}
void MacroAssembler::set_narrow_klass(Register dst, Klass* k) {
assert (UseCompressedClassPointers, "should only be used for compressed headers");
assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
int index = oop_recorder()->find_index(k);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2021, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2021, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -75,7 +75,6 @@
static bool narrow_klass_use_complex_address() {
NOT_LP64(ShouldNotCallThis());
assert(UseCompressedClassPointers, "only for compressed klass code");
return false;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2012, 2025 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -144,7 +144,7 @@ void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register
if (len->is_valid()) {
stw(len, arrayOopDesc::length_offset_in_bytes(), obj);
} else if (UseCompressedClassPointers && !UseCompactObjectHeaders) {
} else if (!UseCompactObjectHeaders) {
// Otherwise length is in the class gap.
store_klass_gap(obj);
}

View File

@ -3201,23 +3201,17 @@ Register MacroAssembler::encode_klass_not_null(Register dst, Register src) {
void MacroAssembler::store_klass(Register dst_oop, Register klass, Register ck) {
assert(!UseCompactObjectHeaders, "not with compact headers");
if (UseCompressedClassPointers) {
Register compressedKlass = encode_klass_not_null(ck, klass);
stw(compressedKlass, oopDesc::klass_offset_in_bytes(), dst_oop);
} else {
std(klass, oopDesc::klass_offset_in_bytes(), dst_oop);
}
Register compressedKlass = encode_klass_not_null(ck, klass);
stw(compressedKlass, oopDesc::klass_offset_in_bytes(), dst_oop);
}
void MacroAssembler::store_klass_gap(Register dst_oop, Register val) {
assert(!UseCompactObjectHeaders, "not with compact headers");
if (UseCompressedClassPointers) {
if (val == noreg) {
val = R0;
li(val, 0);
}
stw(val, oopDesc::klass_gap_offset_in_bytes(), dst_oop);
if (val == noreg) {
val = R0;
li(val, 0);
}
stw(val, oopDesc::klass_gap_offset_in_bytes(), dst_oop);
}
int MacroAssembler::instr_size_for_decode_klass_not_null() {
@ -3226,17 +3220,13 @@ int MacroAssembler::instr_size_for_decode_klass_not_null() {
// Not yet computed?
if (computed_size == -1) {
if (!UseCompressedClassPointers) {
computed_size = 0;
} else {
// Determine by scratch emit.
ResourceMark rm;
int code_size = 8 * BytesPerInstWord;
CodeBuffer cb("decode_klass_not_null scratch buffer", code_size, 0);
MacroAssembler* a = new MacroAssembler(&cb);
a->decode_klass_not_null(R11_scratch1);
computed_size = a->offset();
}
// Determine by scratch emit.
ResourceMark rm;
int code_size = 8 * BytesPerInstWord;
CodeBuffer cb("decode_klass_not_null scratch buffer", code_size, 0);
MacroAssembler* a = new MacroAssembler(&cb);
a->decode_klass_not_null(R11_scratch1);
computed_size = a->offset();
}
return computed_size;
@ -3259,18 +3249,14 @@ void MacroAssembler::decode_klass_not_null(Register dst, Register src) {
void MacroAssembler::load_klass_no_decode(Register dst, Register src) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(dst, src);
} else if (UseCompressedClassPointers) {
lwz(dst, oopDesc::klass_offset_in_bytes(), src);
} else {
ld(dst, oopDesc::klass_offset_in_bytes(), src);
lwz(dst, oopDesc::klass_offset_in_bytes(), src);
}
}
void MacroAssembler::load_klass(Register dst, Register src) {
load_klass_no_decode(dst, src);
if (UseCompressedClassPointers) { // also true for UseCompactObjectHeaders
decode_klass_not_null(dst);
}
decode_klass_not_null(dst);
}
// Loads the obj's Klass* into dst.
@ -3286,18 +3272,13 @@ void MacroAssembler::load_narrow_klass_compact(Register dst, Register src) {
void MacroAssembler::cmp_klass(ConditionRegister dst, Register obj, Register klass, Register tmp, Register tmp2) {
assert_different_registers(obj, klass, tmp);
if (UseCompressedClassPointers) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(tmp, obj);
} else {
lwz(tmp, oopDesc::klass_offset_in_bytes(), obj);
}
Register encoded_klass = encode_klass_not_null(tmp2, klass);
cmpw(dst, tmp, encoded_klass);
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(tmp, obj);
} else {
ld(tmp, oopDesc::klass_offset_in_bytes(), obj);
cmpd(dst, tmp, klass);
lwz(tmp, oopDesc::klass_offset_in_bytes(), obj);
}
Register encoded_klass = encode_klass_not_null(tmp2, klass);
cmpw(dst, tmp, encoded_klass);
}
void MacroAssembler::cmp_klasses_from_objects(ConditionRegister dst, Register obj1, Register obj2, Register tmp1, Register tmp2) {
@ -3305,14 +3286,10 @@ void MacroAssembler::cmp_klasses_from_objects(ConditionRegister dst, Register ob
load_narrow_klass_compact(tmp1, obj1);
load_narrow_klass_compact(tmp2, obj2);
cmpw(dst, tmp1, tmp2);
} else if (UseCompressedClassPointers) {
} else {
lwz(tmp1, oopDesc::klass_offset_in_bytes(), obj1);
lwz(tmp2, oopDesc::klass_offset_in_bytes(), obj2);
cmpw(dst, tmp1, tmp2);
} else {
ld(tmp1, oopDesc::klass_offset_in_bytes(), obj1);
ld(tmp2, oopDesc::klass_offset_in_bytes(), obj2);
cmpd(dst, tmp1, tmp2);
}
}

View File

@ -87,7 +87,6 @@
static bool narrow_klass_use_complex_address() {
NOT_LP64(ShouldNotCallThis());
assert(UseCompressedClassPointers, "only for compressed klass code");
// TODO: PPC port if (MatchDecodeNodes) return true;
return false;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -196,12 +196,9 @@ void LIR_Assembler::arraycopy_type_check(Register src, Register src_pos, Registe
if (UseCompactObjectHeaders) {
__ load_narrow_klass_compact(tmp, src);
__ load_narrow_klass_compact(t0, dst);
} else if (UseCompressedClassPointers) {
} else {
__ lwu(tmp, Address(src, oopDesc::klass_offset_in_bytes()));
__ lwu(t0, Address(dst, oopDesc::klass_offset_in_bytes()));
} else {
__ ld(tmp, Address(src, oopDesc::klass_offset_in_bytes()));
__ ld(t0, Address(dst, oopDesc::klass_offset_in_bytes()));
}
__ bne(tmp, t0, *stub->entry(), /* is_far */ true);
} else {
@ -257,9 +254,7 @@ void LIR_Assembler::arraycopy_assert(Register src, Register dst, Register tmp, c
// but not necessarily exactly of type default_type.
Label known_ok, halt;
__ mov_metadata(tmp, default_type->constant_encoding());
if (UseCompressedClassPointers) {
__ encode_klass_not_null(tmp);
}
__ encode_klass_not_null(tmp);
if (basic_type != T_OBJECT) {
__ cmp_klass_compressed(dst, tmp, t0, halt, false);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2020, Red Hat Inc. All rights reserved.
* Copyright (c) 2020, 2023, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -55,20 +55,6 @@ const Register SHIFT_count = x10; // where count for shift operations must be
#define __ _masm->
static void select_different_registers(Register preserve,
Register extra,
Register &tmp1,
Register &tmp2) {
if (tmp1 == preserve) {
assert_different_registers(tmp1, tmp2, extra);
tmp1 = extra;
} else if (tmp2 == preserve) {
assert_different_registers(tmp1, tmp2, extra);
tmp2 = extra;
}
assert_different_registers(preserve, tmp1, tmp2);
}
static void select_different_registers(Register preserve,
Register extra,
Register &tmp1,
@ -1155,12 +1141,8 @@ void LIR_Assembler::emit_typecheck_helper(LIR_OpTypeCheck *op, Label* success, L
} else if (obj == klass_RInfo) {
klass_RInfo = dst;
}
if (k->is_loaded() && !UseCompressedClassPointers) {
select_different_registers(obj, dst, k_RInfo, klass_RInfo);
} else {
Rtmp1 = op->tmp3()->as_register();
select_different_registers(obj, dst, k_RInfo, klass_RInfo, Rtmp1);
}
Rtmp1 = op->tmp3()->as_register();
select_different_registers(obj, dst, k_RInfo, klass_RInfo, Rtmp1);
assert_different_registers(obj, k_RInfo, klass_RInfo);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2005, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2005, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -1073,9 +1073,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) {
}
LIR_Opr reg = rlock_result(x);
LIR_Opr tmp3 = LIR_OprFact::illegalOpr;
if (!x->klass()->is_loaded() || UseCompressedClassPointers) {
tmp3 = new_register(objectType);
}
tmp3 = new_register(objectType);
__ checkcast(reg, obj.result(), x->klass(),
new_register(objectType), new_register(objectType), tmp3,
x->direct_compare(), info_for_exception, patching_info, stub,
@ -1094,9 +1092,7 @@ void LIRGenerator::do_InstanceOf(InstanceOf* x) {
}
obj.load_item();
LIR_Opr tmp3 = LIR_OprFact::illegalOpr;
if (!x->klass()->is_loaded() || UseCompressedClassPointers) {
tmp3 = new_register(objectType);
}
tmp3 = new_register(objectType);
__ instanceof(reg, obj.result(), x->klass(),
new_register(objectType), new_register(objectType), tmp3,
x->direct_compare(), patching_info, x->profiled_method(), x->profiled_bci());

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -92,12 +92,8 @@ void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register
// This assumes that all prototype bits fitr in an int32_t
mv(tmp1, checked_cast<int32_t>(markWord::prototype().value()));
sd(tmp1, Address(obj, oopDesc::mark_offset_in_bytes()));
if (UseCompressedClassPointers) { // Take care not to kill klass
encode_klass_not_null(tmp1, klass, tmp2);
sw(tmp1, Address(obj, oopDesc::klass_offset_in_bytes()));
} else {
sd(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
}
encode_klass_not_null(tmp1, klass, tmp2);
sw(tmp1, Address(obj, oopDesc::klass_offset_in_bytes()));
}
if (len->is_valid()) {
@ -108,7 +104,7 @@ void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register
// Clear gap/first 4 bytes following the length field.
sw(zr, Address(obj, base_offset));
}
} else if (UseCompressedClassPointers && !UseCompactObjectHeaders) {
} else if (!UseCompactObjectHeaders) {
store_klass_gap(obj, zr);
}
}

View File

@ -1175,8 +1175,7 @@ void C2_MacroAssembler::string_compare_long_same_encoding(Register result, Regis
Label TAIL_CHECK, TAIL, NEXT_WORD, DIFFERENCE;
const int base_offset = arrayOopDesc::base_offset_in_bytes(T_BYTE);
assert((base_offset % (UseCompactObjectHeaders ? 4 :
(UseCompressedClassPointers ? 8 : 4))) == 0, "Must be");
assert((base_offset % (UseCompactObjectHeaders ? 4 : 8)) == 0, "Must be");
const int minCharsInWord = isLL ? wordSize : wordSize / 2;
@ -1269,8 +1268,7 @@ void C2_MacroAssembler::string_compare_long_different_encoding(Register result,
Label TAIL, NEXT_WORD, DIFFERENCE;
const int base_offset = arrayOopDesc::base_offset_in_bytes(T_BYTE);
assert((base_offset % (UseCompactObjectHeaders ? 4 :
(UseCompressedClassPointers ? 8 : 4))) == 0, "Must be");
assert((base_offset % (UseCompactObjectHeaders ? 4 : 8)) == 0, "Must be");
Register strL = isLU ? str1 : str2;
Register strU = isLU ? str2 : str1;
@ -1485,8 +1483,7 @@ void C2_MacroAssembler::arrays_equals(Register a1, Register a2,
int length_offset = arrayOopDesc::length_offset_in_bytes();
int base_offset = arrayOopDesc::base_offset_in_bytes(elem_size == 2 ? T_CHAR : T_BYTE);
assert((base_offset % (UseCompactObjectHeaders ? 4 :
(UseCompressedClassPointers ? 8 : 4))) == 0, "Must be");
assert((base_offset % (UseCompactObjectHeaders ? 4 : 8)) == 0, "Must be");
Register cnt1 = tmp3;
Register cnt2 = tmp1; // cnt2 only used in array length compare
@ -1611,8 +1608,7 @@ void C2_MacroAssembler::string_equals(Register a1, Register a2,
int base_offset = arrayOopDesc::base_offset_in_bytes(T_BYTE);
assert((base_offset % (UseCompactObjectHeaders ? 4 :
(UseCompressedClassPointers ? 8 : 4))) == 0, "Must be");
assert((base_offset % (UseCompactObjectHeaders ? 4 : 8)) == 0, "Must be");
BLOCK_COMMENT("string_equals {");
@ -2699,8 +2695,7 @@ void C2_MacroAssembler::arrays_equals_v(Register a1, Register a2, Register resul
int length_offset = arrayOopDesc::length_offset_in_bytes();
int base_offset = arrayOopDesc::base_offset_in_bytes(elem_size == 2 ? T_CHAR : T_BYTE);
assert((base_offset % (UseCompactObjectHeaders ? 4 :
(UseCompressedClassPointers ? 8 : 4))) == 0, "Must be");
assert((base_offset % (UseCompactObjectHeaders ? 4 : 8)) == 0, "Must be");
BLOCK_COMMENT("arrays_equals_v {");

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2020, Red Hat Inc. All rights reserved.
* Copyright (c) 2020, 2024, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -3514,10 +3514,8 @@ void MacroAssembler::orptr(Address adr, RegisterOrConstant src, Register tmp1, R
void MacroAssembler::cmp_klass_compressed(Register oop, Register trial_klass, Register tmp, Label &L, bool equal) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(tmp, oop);
} else if (UseCompressedClassPointers) {
lwu(tmp, Address(oop, oopDesc::klass_offset_in_bytes()));
} else {
ld(tmp, Address(oop, oopDesc::klass_offset_in_bytes()));
lwu(tmp, Address(oop, oopDesc::klass_offset_in_bytes()));
}
if (equal) {
beq(trial_klass, tmp, L);
@ -3741,11 +3739,9 @@ void MacroAssembler::load_klass(Register dst, Register src, Register tmp) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(dst, src);
decode_klass_not_null(dst, tmp);
} else if (UseCompressedClassPointers) {
} else {
lwu(dst, Address(src, oopDesc::klass_offset_in_bytes()));
decode_klass_not_null(dst, tmp);
} else {
ld(dst, Address(src, oopDesc::klass_offset_in_bytes()));
}
}
@ -3753,20 +3749,15 @@ void MacroAssembler::store_klass(Register dst, Register src, Register tmp) {
// FIXME: Should this be a store release? concurrent gcs assumes
// klass length is valid if klass field is not null.
assert(!UseCompactObjectHeaders, "not with compact headers");
if (UseCompressedClassPointers) {
encode_klass_not_null(src, tmp);
sw(src, Address(dst, oopDesc::klass_offset_in_bytes()));
} else {
sd(src, Address(dst, oopDesc::klass_offset_in_bytes()));
}
encode_klass_not_null(src, tmp);
sw(src, Address(dst, oopDesc::klass_offset_in_bytes()));
}
void MacroAssembler::store_klass_gap(Register dst, Register src) {
assert(!UseCompactObjectHeaders, "not with compact headers");
if (UseCompressedClassPointers) {
// Store to klass gap in destination
sw(src, Address(dst, oopDesc::klass_gap_offset_in_bytes()));
}
// Store to klass gap in destination
sw(src, Address(dst, oopDesc::klass_gap_offset_in_bytes()));
}
void MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
@ -3775,7 +3766,6 @@ void MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
}
void MacroAssembler::decode_klass_not_null(Register dst, Register src, Register tmp) {
assert(UseCompressedClassPointers, "should only be used for compressed headers");
assert_different_registers(dst, tmp);
assert_different_registers(src, tmp);
@ -3806,8 +3796,6 @@ void MacroAssembler::encode_klass_not_null(Register r, Register tmp) {
}
void MacroAssembler::encode_klass_not_null(Register dst, Register src, Register tmp) {
assert(UseCompressedClassPointers, "should only be used for compressed headers");
if (CompressedKlassPointers::base() == nullptr) {
if (CompressedKlassPointers::shift() != 0) {
srli(dst, src, CompressedKlassPointers::shift());
@ -5337,7 +5325,6 @@ void MacroAssembler::set_narrow_oop(Register dst, jobject obj) {
}
void MacroAssembler::set_narrow_klass(Register dst, Klass* k) {
assert (UseCompressedClassPointers, "should only be used for compressed headers");
assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
int index = oop_recorder()->find_index(k);
@ -5417,12 +5404,9 @@ int MacroAssembler::ic_check(int end_alignment) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(tmp1, receiver);
lwu(tmp2, Address(data, CompiledICData::speculated_klass_offset()));
} else if (UseCompressedClassPointers) {
} else {
lwu(tmp1, Address(receiver, oopDesc::klass_offset_in_bytes()));
lwu(tmp2, Address(data, CompiledICData::speculated_klass_offset()));
} else {
ld(tmp1, Address(receiver, oopDesc::klass_offset_in_bytes()));
ld(tmp2, Address(data, CompiledICData::speculated_klass_offset()));
}
Label ic_hit;

View File

@ -1801,13 +1801,8 @@ void MachUEPNode::format(PhaseRegAlloc* ra_, outputStream* st) const
{
assert_cond(st != nullptr);
st->print_cr("# MachUEPNode");
if (UseCompressedClassPointers) {
st->print_cr("\tlwu t1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
st->print_cr("\tlwu t2, [t0 + CompiledICData::speculated_klass_offset()]\t# compressed klass");
} else {
st->print_cr("\tld t1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
st->print_cr("\tld t2, [t0 + CompiledICData::speculated_klass_offset()]\t# compressed klass");
}
st->print_cr("\tlwu t1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
st->print_cr("\tlwu t2, [t0 + CompiledICData::speculated_klass_offset()]\t# compressed klass");
st->print_cr("\tbeq t1, t2, ic_hit");
st->print_cr("\tj, SharedRuntime::_ic_miss_stub\t # Inline cache check");
st->print_cr("\tic_hit:");

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2025, Red Hat Inc. All rights reserved.
* Copyright (c) 2020, 2025, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -3070,8 +3070,7 @@ class StubGenerator: public StubCodeGenerator {
const Register tmp = x30, tmpLval = x12;
int base_offset = arrayOopDesc::base_offset_in_bytes(T_BYTE);
assert((base_offset % (UseCompactObjectHeaders ? 4 :
(UseCompressedClassPointers ? 8 : 4))) == 0, "Must be");
assert((base_offset % (UseCompactObjectHeaders ? 4 : 8)) == 0, "Must be");
#ifdef ASSERT
if (AvoidUnalignedAccesses) {
@ -3128,8 +3127,7 @@ class StubGenerator: public StubCodeGenerator {
tmp1 = x28, tmp2 = x29, tmp3 = x30, tmp4 = x12;
int base_offset = arrayOopDesc::base_offset_in_bytes(T_BYTE);
assert((base_offset % (UseCompactObjectHeaders ? 4 :
(UseCompressedClassPointers ? 8 : 4))) == 0, "Must be");
assert((base_offset % (UseCompactObjectHeaders ? 4 : 8)) == 0, "Must be");
Register strU = isLU ? str2 : str1,
strL = isLU ? str1 : str2,

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2024 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -2251,9 +2251,7 @@ void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
// but not necessarily exactly of type default_type.
NearLabel known_ok, halt;
metadata2reg(default_type->constant_encoding(), tmp);
if (UseCompressedClassPointers) {
__ encode_klass_not_null(tmp);
}
__ encode_klass_not_null(tmp);
if (basic_type != T_OBJECT) {
__ cmp_klass(tmp, dst, Z_R1_scratch);
@ -2540,13 +2538,8 @@ void LIR_Assembler::emit_typecheck_helper(LIR_OpTypeCheck *op, Label* success, L
// Get object class.
// Not a safepoint as obj null check happens earlier.
if (op->fast_check()) {
if (UseCompressedClassPointers) {
__ load_klass(klass_RInfo, obj);
__ compareU64_and_branch(k_RInfo, klass_RInfo, Assembler::bcondNotEqual, *failure_target);
} else {
__ z_cg(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes()));
__ branch_optimized(Assembler::bcondNotEqual, *failure_target);
}
__ load_klass(klass_RInfo, obj);
__ compareU64_and_branch(k_RInfo, klass_RInfo, Assembler::bcondNotEqual, *failure_target);
// Successful cast, fall through to profile or jump.
} else {
bool need_slow_path = !k->is_loaded() ||

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2024 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -107,10 +107,10 @@ void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register
}
if (len->is_valid()) {
// Length will be in the klass gap, if one exists.
// Length will be in the klass gap.
z_st(len, Address(obj, arrayOopDesc::length_offset_in_bytes()));
} else if (UseCompressedClassPointers && !UseCompactObjectHeaders) {
store_klass_gap(Rzero, obj); // Zero klass gap for compressed oops.
} else if (!UseCompactObjectHeaders) {
store_klass_gap(Rzero, obj); // Zero klass gap.
}
}

View File

@ -1237,7 +1237,6 @@ void MacroAssembler::load_narrow_oop(Register t, narrowOop a) {
// Load narrow klass constant, compression required.
void MacroAssembler::load_narrow_klass(Register t, Klass* k) {
assert(UseCompressedClassPointers, "must be on to call this method");
narrowKlass encoded_k = CompressedKlassPointers::encode(k);
load_const_32to64(t, encoded_k, false /*sign_extend*/);
}
@ -1255,7 +1254,6 @@ void MacroAssembler::compare_immediate_narrow_oop(Register oop1, narrowOop oop2)
// Compare narrow oop in reg with narrow oop constant, no decompression.
void MacroAssembler::compare_immediate_narrow_klass(Register klass1, Klass* klass2) {
assert(UseCompressedClassPointers, "must be on to call this method");
narrowKlass encoded_k = CompressedKlassPointers::encode(klass2);
Assembler::z_clfi(klass1, encoded_k);
@ -1348,8 +1346,6 @@ int MacroAssembler::patch_load_narrow_oop(address pos, oop o) {
// Patching the immediate value of CPU version dependent load_narrow_klass sequence.
// The passed ptr must NOT be in compressed format!
int MacroAssembler::patch_load_narrow_klass(address pos, Klass* k) {
assert(UseCompressedClassPointers, "Can only patch compressed klass pointers");
narrowKlass nk = CompressedKlassPointers::encode(k);
return patch_load_const_32to64(pos, nk);
}
@ -1364,8 +1360,6 @@ int MacroAssembler::patch_compare_immediate_narrow_oop(address pos, oop o) {
// Patching the immediate value of CPU version dependent compare_immediate_narrow_klass sequence.
// The passed ptr must NOT be in compressed format!
int MacroAssembler::patch_compare_immediate_narrow_klass(address pos, Klass* k) {
assert(UseCompressedClassPointers, "Can only patch compressed klass pointers");
narrowKlass nk = CompressedKlassPointers::encode(k);
return patch_compare_immediate_32(pos, nk);
}
@ -2235,10 +2229,8 @@ int MacroAssembler::ic_check(int end_alignment) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(R1_scratch, R2_receiver);
} else if (UseCompressedClassPointers) {
z_llgf(R1_scratch, Address(R2_receiver, oopDesc::klass_offset_in_bytes()));
} else {
z_lg(R1_scratch, Address(R2_receiver, oopDesc::klass_offset_in_bytes()));
z_llgf(R1_scratch, Address(R2_receiver, oopDesc::klass_offset_in_bytes()));
}
z_cg(R1_scratch, Address(R9_data, in_bytes(CompiledICData::speculated_klass_offset())));
z_bre(success);
@ -3916,7 +3908,6 @@ void MacroAssembler::encode_klass_not_null(Register dst, Register src) {
address base = CompressedKlassPointers::base();
int shift = CompressedKlassPointers::shift();
bool need_zero_extend = base != nullptr;
assert(UseCompressedClassPointers, "only for compressed klass ptrs");
BLOCK_COMMENT("cKlass encoder {");
@ -4013,7 +4004,6 @@ int MacroAssembler::instr_size_for_decode_klass_not_null() {
address base = CompressedKlassPointers::base();
int shift_size = CompressedKlassPointers::shift() == 0 ? 0 : 6; /* sllg */
int addbase_size = 0;
assert(UseCompressedClassPointers, "only for compressed klass ptrs");
if (base != nullptr) {
unsigned int base_h = ((unsigned long)base)>>32;
@ -4043,7 +4033,6 @@ void MacroAssembler::decode_klass_not_null(Register dst) {
address base = CompressedKlassPointers::base();
int shift = CompressedKlassPointers::shift();
int beg_off = offset();
assert(UseCompressedClassPointers, "only for compressed klass ptrs");
BLOCK_COMMENT("cKlass decoder (const size) {");
@ -4085,7 +4074,6 @@ void MacroAssembler::decode_klass_not_null(Register dst) {
void MacroAssembler::decode_klass_not_null(Register dst, Register src) {
address base = CompressedKlassPointers::base();
int shift = CompressedKlassPointers::shift();
assert(UseCompressedClassPointers, "only for compressed klass ptrs");
BLOCK_COMMENT("cKlass decoder {");
@ -4125,13 +4113,9 @@ void MacroAssembler::decode_klass_not_null(Register dst, Register src) {
}
void MacroAssembler::load_klass(Register klass, Address mem) {
if (UseCompressedClassPointers) {
z_llgf(klass, mem);
// Attention: no null check here!
decode_klass_not_null(klass);
} else {
z_lg(klass, mem);
}
z_llgf(klass, mem);
// Attention: no null check here!
decode_klass_not_null(klass);
}
// Loads the obj's Klass* into dst.
@ -4154,10 +4138,8 @@ void MacroAssembler::cmp_klass(Register klass, Register obj, Register tmp) {
assert_different_registers(klass, obj, tmp);
load_narrow_klass_compact(tmp, obj);
z_cr(klass, tmp);
} else if (UseCompressedClassPointers) {
z_c(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
} else {
z_cg(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
z_c(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
}
BLOCK_COMMENT("} cmp_klass");
}
@ -4170,12 +4152,9 @@ void MacroAssembler::cmp_klasses_from_objects(Register obj1, Register obj2, Regi
load_narrow_klass_compact(tmp1, obj1);
load_narrow_klass_compact(tmp2, obj2);
z_cr(tmp1, tmp2);
} else if (UseCompressedClassPointers) {
} else {
z_l(tmp1, Address(obj1, oopDesc::klass_offset_in_bytes()));
z_c(tmp1, Address(obj2, oopDesc::klass_offset_in_bytes()));
} else {
z_lg(tmp1, Address(obj1, oopDesc::klass_offset_in_bytes()));
z_cg(tmp1, Address(obj2, oopDesc::klass_offset_in_bytes()));
}
BLOCK_COMMENT("} cmp_klasses_from_objects");
}
@ -4184,36 +4163,28 @@ void MacroAssembler::load_klass(Register klass, Register src_oop) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(klass, src_oop);
decode_klass_not_null(klass);
} else if (UseCompressedClassPointers) {
} else {
z_llgf(klass, oopDesc::klass_offset_in_bytes(), src_oop);
decode_klass_not_null(klass);
} else {
z_lg(klass, oopDesc::klass_offset_in_bytes(), src_oop);
}
}
void MacroAssembler::store_klass(Register klass, Register dst_oop, Register ck) {
assert(!UseCompactObjectHeaders, "Don't use with compact headers");
if (UseCompressedClassPointers) {
assert_different_registers(dst_oop, klass, Z_R0);
if (ck == noreg) ck = klass;
encode_klass_not_null(ck, klass);
z_st(ck, Address(dst_oop, oopDesc::klass_offset_in_bytes()));
} else {
z_stg(klass, Address(dst_oop, oopDesc::klass_offset_in_bytes()));
}
assert_different_registers(dst_oop, klass, Z_R0);
if (ck == noreg) ck = klass;
encode_klass_not_null(ck, klass);
z_st(ck, Address(dst_oop, oopDesc::klass_offset_in_bytes()));
}
void MacroAssembler::store_klass_gap(Register s, Register d) {
assert(!UseCompactObjectHeaders, "Don't use with compact headers");
if (UseCompressedClassPointers) {
assert(s != d, "not enough registers");
// Support s = noreg.
if (s != noreg) {
z_st(s, Address(d, oopDesc::klass_gap_offset_in_bytes()));
} else {
z_mvhi(Address(d, oopDesc::klass_gap_offset_in_bytes()), 0);
}
assert(s != d, "not enough registers");
// Support s = noreg.
if (s != noreg) {
z_st(s, Address(d, oopDesc::klass_gap_offset_in_bytes()));
} else {
z_mvhi(Address(d, oopDesc::klass_gap_offset_in_bytes()), 0);
}
}
@ -4227,67 +4198,64 @@ void MacroAssembler::compare_klass_ptr(Register Rop1, int64_t disp, Register Rba
BLOCK_COMMENT("compare klass ptr {");
if (UseCompressedClassPointers) {
const int shift = CompressedKlassPointers::shift();
address base = CompressedKlassPointers::base();
const int shift = CompressedKlassPointers::shift();
address base = CompressedKlassPointers::base();
if (UseCompactObjectHeaders) {
assert(shift >= 3, "cKlass encoder detected bad shift");
} else {
assert((shift == 0) || (shift == 3), "cKlass encoder detected bad shift");
}
assert_different_registers(Rop1, Z_R0);
assert_different_registers(Rop1, Rbase, Z_R1);
// First encode register oop and then compare with cOop in memory.
// This sequence saves an unnecessary cOop load and decode.
if (base == nullptr) {
if (shift == 0) {
z_cl(Rop1, disp, Rbase); // Unscaled
} else {
z_srlg(Z_R0, Rop1, shift); // ZeroBased
z_cl(Z_R0, disp, Rbase);
}
} else { // HeapBased
#ifdef ASSERT
bool used_R0 = true;
bool used_R1 = true;
#endif
Register current = Rop1;
Label done;
if (maybenull) { // null pointer must be preserved!
z_ltgr(Z_R0, current);
z_bre(done);
current = Z_R0;
}
unsigned int base_h = ((unsigned long)base)>>32;
unsigned int base_l = (unsigned int)((unsigned long)base);
if ((base_h != 0) && (base_l == 0) && VM_Version::has_HighWordInstr()) {
lgr_if_needed(Z_R0, current);
z_aih(Z_R0, -((int)base_h)); // Base has no set bits in lower half.
} else if ((base_h == 0) && (base_l != 0)) {
lgr_if_needed(Z_R0, current);
z_agfi(Z_R0, -(int)base_l);
} else {
int pow2_offset = get_oop_base_complement(Z_R1, ((uint64_t)(intptr_t)base));
add2reg_with_index(Z_R0, pow2_offset, Z_R1, Rop1); // Subtract base by adding complement.
}
if (shift != 0) {
z_srlg(Z_R0, Z_R0, shift);
}
bind(done);
z_cl(Z_R0, disp, Rbase);
#ifdef ASSERT
if (used_R0) preset_reg(Z_R0, 0xb05bUL, 2);
if (used_R1) preset_reg(Z_R1, 0xb06bUL, 2);
#endif
}
if (UseCompactObjectHeaders) {
assert(shift >= 3, "cKlass encoder detected bad shift");
} else {
z_clg(Rop1, disp, Z_R0, Rbase);
assert((shift == 0) || (shift == 3), "cKlass encoder detected bad shift");
}
assert_different_registers(Rop1, Z_R0);
assert_different_registers(Rop1, Rbase, Z_R1);
// First encode register oop and then compare with cOop in memory.
// This sequence saves an unnecessary cOop load and decode.
if (base == nullptr) {
if (shift == 0) {
z_cl(Rop1, disp, Rbase); // Unscaled
} else {
z_srlg(Z_R0, Rop1, shift); // ZeroBased
z_cl(Z_R0, disp, Rbase);
}
} else { // HeapBased
#ifdef ASSERT
bool used_R0 = true;
bool used_R1 = true;
#endif
Register current = Rop1;
Label done;
if (maybenull) { // null pointer must be preserved!
z_ltgr(Z_R0, current);
z_bre(done);
current = Z_R0;
}
unsigned int base_h = ((unsigned long)base)>>32;
unsigned int base_l = (unsigned int)((unsigned long)base);
if ((base_h != 0) && (base_l == 0) && VM_Version::has_HighWordInstr()) {
lgr_if_needed(Z_R0, current);
z_aih(Z_R0, -((int)base_h)); // Base has no set bits in lower half.
} else if ((base_h == 0) && (base_l != 0)) {
lgr_if_needed(Z_R0, current);
z_agfi(Z_R0, -(int)base_l);
} else {
int pow2_offset = get_oop_base_complement(Z_R1, ((uint64_t)(intptr_t)base));
add2reg_with_index(Z_R0, pow2_offset, Z_R1, Rop1); // Subtract base by adding complement.
}
if (shift != 0) {
z_srlg(Z_R0, Z_R0, shift);
}
bind(done);
z_cl(Z_R0, disp, Rbase);
#ifdef ASSERT
if (used_R0) preset_reg(Z_R0, 0xb05bUL, 2);
if (used_R1) preset_reg(Z_R1, 0xb06bUL, 2);
#endif
}
BLOCK_COMMENT("} compare klass ptr");
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2024 SAP SE. All rights reserved.
* Copyright (c) 2024 IBM Corporation. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -842,8 +842,7 @@ class MacroAssembler: public Assembler {
void store_klass(Register klass, Register dst_oop, Register ck = noreg); // Klass will get compressed if ck not provided.
void store_klass_gap(Register s, Register dst_oop);
void load_narrow_klass_compact(Register dst, Register src);
// Compares the Klass pointer of an object to a given Klass (which might be narrow,
// depending on UseCompressedClassPointers).
// Compares the narrow Klass pointer of an object to a given narrow Klass
void cmp_klass(Register klass, Register obj, Register tmp);
// Compares the Klass pointer of two objects obj1 and obj2. Result is in the condition flags.
// Uses tmp1 and tmp2 as temporary registers.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2021, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2021, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2017, 2024 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -82,7 +82,6 @@
static bool narrow_klass_use_complex_address() {
NOT_LP64(ShouldNotCallThis());
assert(UseCompressedClassPointers, "only for compressed klass code");
// TODO HS25: z port if (MatchDecodeNodes) return true;
return false;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -77,23 +77,6 @@ const Register SHIFT_count = rcx; // where count for shift operations must be
#define __ _masm->
static void select_different_registers(Register preserve,
Register extra,
Register &tmp1,
Register &tmp2) {
if (tmp1 == preserve) {
assert_different_registers(tmp1, tmp2, extra);
tmp1 = extra;
} else if (tmp2 == preserve) {
assert_different_registers(tmp1, tmp2, extra);
tmp2 = extra;
}
assert_different_registers(preserve, tmp1, tmp2);
}
static void select_different_registers(Register preserve,
Register extra,
Register &tmp1,
@ -1309,12 +1292,8 @@ void LIR_Assembler::emit_typecheck_helper(LIR_OpTypeCheck *op, Label* success, L
} else if (obj == klass_RInfo) {
klass_RInfo = dst;
}
if (k->is_loaded() && !UseCompressedClassPointers) {
select_different_registers(obj, dst, k_RInfo, klass_RInfo);
} else {
Rtmp1 = op->tmp3()->as_register();
select_different_registers(obj, dst, k_RInfo, klass_RInfo, Rtmp1);
}
Rtmp1 = op->tmp3()->as_register();
select_different_registers(obj, dst, k_RInfo, klass_RInfo, Rtmp1);
assert_different_registers(obj, k_RInfo, klass_RInfo);
@ -1348,12 +1327,8 @@ void LIR_Assembler::emit_typecheck_helper(LIR_OpTypeCheck *op, Label* success, L
if (op->fast_check()) {
// get object class
// not a safepoint as obj null check happens earlier
if (UseCompressedClassPointers) {
__ load_klass(Rtmp1, obj, tmp_load_klass);
__ cmpptr(k_RInfo, Rtmp1);
} else {
__ cmpptr(k_RInfo, Address(obj, oopDesc::klass_offset_in_bytes()));
}
__ load_klass(Rtmp1, obj, tmp_load_klass);
__ cmpptr(k_RInfo, Rtmp1);
__ jcc(Assembler::notEqual, *failure_target);
// successful cast, fall through to profile or jump
} else {
@ -2651,9 +2626,7 @@ void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
// but not necessarily exactly of type default_type.
Label known_ok, halt;
__ mov_metadata(tmp, default_type->constant_encoding());
if (UseCompressedClassPointers) {
__ encode_klass_not_null(tmp, rscratch1);
}
__ encode_klass_not_null(tmp, rscratch1);
if (basic_type != T_OBJECT) {
__ cmp_klass(tmp, dst, tmp2);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2005, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2005, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -1291,9 +1291,7 @@ void LIRGenerator::do_CheckCast(CheckCast* x) {
}
LIR_Opr reg = rlock_result(x);
LIR_Opr tmp3 = LIR_OprFact::illegalOpr;
if (!x->klass()->is_loaded() || UseCompressedClassPointers) {
tmp3 = new_register(objectType);
}
tmp3 = new_register(objectType);
__ checkcast(reg, obj.result(), x->klass(),
new_register(objectType), new_register(objectType), tmp3,
x->direct_compare(), info_for_exception, patching_info, stub,
@ -1313,9 +1311,7 @@ void LIRGenerator::do_InstanceOf(InstanceOf* x) {
}
obj.load_item();
LIR_Opr tmp3 = LIR_OprFact::illegalOpr;
if (!x->klass()->is_loaded() || UseCompressedClassPointers) {
tmp3 = new_register(objectType);
}
tmp3 = new_register(objectType);
__ instanceof(reg, obj.result(), x->klass(),
new_register(objectType), new_register(objectType), tmp3,
x->direct_compare(), patching_info, x->profiled_method(), x->profiled_bci());

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -85,14 +85,11 @@ void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register
if (UseCompactObjectHeaders) {
movptr(t1, Address(klass, Klass::prototype_header_offset()));
movptr(Address(obj, oopDesc::mark_offset_in_bytes()), t1);
} else if (UseCompressedClassPointers) { // Take care not to kill klass
} else { // Take care not to kill klass
movptr(Address(obj, oopDesc::mark_offset_in_bytes()), checked_cast<int32_t>(markWord::prototype().value()));
movptr(t1, klass);
encode_klass_not_null(t1, rscratch1);
movl(Address(obj, oopDesc::klass_offset_in_bytes()), t1);
} else {
movptr(Address(obj, oopDesc::mark_offset_in_bytes()), checked_cast<int32_t>(markWord::prototype().value()));
movptr(Address(obj, oopDesc::klass_offset_in_bytes()), klass);
}
if (len->is_valid()) {
@ -104,7 +101,7 @@ void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register
xorl(t1, t1);
movl(Address(obj, base_offset), t1);
}
} else if (UseCompressedClassPointers && !UseCompactObjectHeaders) {
} else if (!UseCompactObjectHeaders) {
xorptr(t1, t1);
store_klass_gap(obj, t1);
}

View File

@ -985,12 +985,9 @@ int MacroAssembler::ic_check(int end_alignment) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(temp, receiver);
cmpl(temp, Address(data, CompiledICData::speculated_klass_offset()));
} else if (UseCompressedClassPointers) {
} else {
movl(temp, Address(receiver, oopDesc::klass_offset_in_bytes()));
cmpl(temp, Address(data, CompiledICData::speculated_klass_offset()));
} else {
movptr(temp, Address(receiver, oopDesc::klass_offset_in_bytes()));
cmpptr(temp, Address(data, CompiledICData::speculated_klass_offset()));
}
// if inline cache check fails, then jump to runtime routine
@ -5384,11 +5381,9 @@ void MacroAssembler::load_klass(Register dst, Register src, Register tmp) {
if (UseCompactObjectHeaders) {
load_narrow_klass_compact(dst, src);
decode_klass_not_null(dst, tmp);
} else if (UseCompressedClassPointers) {
} else {
movl(dst, Address(src, oopDesc::klass_offset_in_bytes()));
decode_klass_not_null(dst, tmp);
} else {
movptr(dst, Address(src, oopDesc::klass_offset_in_bytes()));
}
}
@ -5396,12 +5391,8 @@ void MacroAssembler::store_klass(Register dst, Register src, Register tmp) {
assert(!UseCompactObjectHeaders, "not with compact headers");
assert_different_registers(src, tmp);
assert_different_registers(dst, tmp);
if (UseCompressedClassPointers) {
encode_klass_not_null(src, tmp);
movl(Address(dst, oopDesc::klass_offset_in_bytes()), src);
} else {
movptr(Address(dst, oopDesc::klass_offset_in_bytes()), src);
}
encode_klass_not_null(src, tmp);
movl(Address(dst, oopDesc::klass_offset_in_bytes()), src);
}
void MacroAssembler::cmp_klass(Register klass, Register obj, Register tmp) {
@ -5410,10 +5401,8 @@ void MacroAssembler::cmp_klass(Register klass, Register obj, Register tmp) {
assert_different_registers(klass, obj, tmp);
load_narrow_klass_compact(tmp, obj);
cmpl(klass, tmp);
} else if (UseCompressedClassPointers) {
cmpl(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
} else {
cmpptr(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
cmpl(klass, Address(obj, oopDesc::klass_offset_in_bytes()));
}
}
@ -5424,12 +5413,9 @@ void MacroAssembler::cmp_klasses_from_objects(Register obj1, Register obj2, Regi
load_narrow_klass_compact(tmp1, obj1);
load_narrow_klass_compact(tmp2, obj2);
cmpl(tmp1, tmp2);
} else if (UseCompressedClassPointers) {
} else {
movl(tmp1, Address(obj1, oopDesc::klass_offset_in_bytes()));
cmpl(tmp1, Address(obj2, oopDesc::klass_offset_in_bytes()));
} else {
movptr(tmp1, Address(obj1, oopDesc::klass_offset_in_bytes()));
cmpptr(tmp1, Address(obj2, oopDesc::klass_offset_in_bytes()));
}
}
@ -5478,10 +5464,8 @@ void MacroAssembler::store_heap_oop_null(Address dst) {
void MacroAssembler::store_klass_gap(Register dst, Register src) {
assert(!UseCompactObjectHeaders, "Don't use with compact headers");
if (UseCompressedClassPointers) {
// Store to klass gap in destination
movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
}
// Store to klass gap in destination
movl(Address(dst, oopDesc::klass_gap_offset_in_bytes()), src);
}
#ifdef ASSERT
@ -5671,7 +5655,6 @@ void MacroAssembler::decode_klass_not_null(Register r, Register tmp) {
BLOCK_COMMENT("decode_klass_not_null {");
assert_different_registers(r, tmp);
// Note: it will change flags
assert(UseCompressedClassPointers, "should only be used for compressed headers");
// Cannot assert, unverified entry point counts instructions (see .ad file)
// vtableStubs also counts instructions in pd_code_size_limit.
// Also do not verify_oop as this is called by verify_oop.
@ -5693,7 +5676,6 @@ void MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src)
BLOCK_COMMENT("decode_and_move_klass_not_null {");
assert_different_registers(src, dst);
// Note: it will change flags
assert (UseCompressedClassPointers, "should only be used for compressed headers");
// Cannot assert, unverified entry point counts instructions (see .ad file)
// vtableStubs also counts instructions in pd_code_size_limit.
// Also do not verify_oop as this is called by verify_oop.
@ -5750,7 +5732,6 @@ void MacroAssembler::set_narrow_oop(Address dst, jobject obj) {
}
void MacroAssembler::set_narrow_klass(Register dst, Klass* k) {
assert (UseCompressedClassPointers, "should only be used for compressed headers");
assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
int klass_index = oop_recorder()->find_index(k);
RelocationHolder rspec = metadata_Relocation::spec(klass_index);
@ -5758,7 +5739,6 @@ void MacroAssembler::set_narrow_klass(Register dst, Klass* k) {
}
void MacroAssembler::set_narrow_klass(Address dst, Klass* k) {
assert (UseCompressedClassPointers, "should only be used for compressed headers");
assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
int klass_index = oop_recorder()->find_index(k);
RelocationHolder rspec = metadata_Relocation::spec(klass_index);
@ -5784,7 +5764,6 @@ void MacroAssembler::cmp_narrow_oop(Address dst, jobject obj) {
}
void MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
assert (UseCompressedClassPointers, "should only be used for compressed headers");
assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
int klass_index = oop_recorder()->find_index(k);
RelocationHolder rspec = metadata_Relocation::spec(klass_index);
@ -5792,7 +5771,6 @@ void MacroAssembler::cmp_narrow_klass(Register dst, Klass* k) {
}
void MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
assert (UseCompressedClassPointers, "should only be used for compressed headers");
assert (oop_recorder() != nullptr, "this assembler needs an OopRecorder");
int klass_index = oop_recorder()->find_index(k);
RelocationHolder rspec = metadata_Relocation::spec(klass_index);

View File

@ -351,8 +351,7 @@ class MacroAssembler: public Assembler {
void load_klass(Register dst, Register src, Register tmp);
void store_klass(Register dst, Register src, Register tmp);
// Compares the Klass pointer of an object to a given Klass (which might be narrow,
// depending on UseCompressedClassPointers).
// Compares the narrow Klass pointer of an object to a given narrow Klass.
void cmp_klass(Register klass, Register obj, Register tmp);
// Compares the Klass pointer of two objects obj1 and obj2. Result is in the condition flags.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2021, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2021, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -75,7 +75,6 @@
}
static bool narrow_klass_use_complex_address() {
assert(UseCompressedClassPointers, "only for compressed klass code");
return (CompressedKlassPointers::shift() <= 3);
}

View File

@ -2605,13 +2605,8 @@ uint BoxLockNode::size(PhaseRegAlloc *ra_) const
#ifndef PRODUCT
void MachUEPNode::format(PhaseRegAlloc* ra_, outputStream* st) const
{
if (UseCompressedClassPointers) {
st->print_cr("movl rscratch1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
st->print_cr("\tcmpl rscratch1, [rax + CompiledICData::speculated_klass_offset()]\t # Inline cache check");
} else {
st->print_cr("movq rscratch1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
st->print_cr("\tcmpq rscratch1, [rax + CompiledICData::speculated_klass_offset()]\t # Inline cache check");
}
st->print_cr("movl rscratch1, [j_rarg0 + oopDesc::klass_offset_in_bytes()]\t# compressed klass");
st->print_cr("\tcmpl rscratch1, [rax + CompiledICData::speculated_klass_offset()]\t # Inline cache check");
st->print_cr("\tjne SharedRuntime::_ic_miss_stub");
}
#endif

View File

@ -589,7 +589,6 @@ public:
}
Klass* real_klass() {
assert(UseCompressedClassPointers, "heap archiving requires UseCompressedClassPointers");
return _data._klass;
}

View File

@ -54,7 +54,7 @@ public:
// Can this VM map archived heap region? Currently only G1+compressed{oops,cp}
static bool can_map() {
CDS_JAVA_HEAP_ONLY(return (UseG1GC && UseCompressedClassPointers);)
CDS_JAVA_HEAP_ONLY(return UseG1GC;)
NOT_CDS_JAVA_HEAP(return false;)
}

View File

@ -450,7 +450,6 @@ int AOTMappedHeapWriter::filler_array_length(size_t fill_bytes) {
}
HeapWord* AOTMappedHeapWriter::init_filler_array_at_buffer_top(int array_length, size_t fill_bytes) {
assert(UseCompressedClassPointers, "Archived heap only supported for compressed klasses");
Klass* oak = Universe::objectArrayKlass(); // already relocated to point to archived klass
HeapWord* mem = offset_to_buffered_address<HeapWord*>(_buffer_used);
memset(mem, 0, fill_bytes);
@ -724,7 +723,6 @@ template <typename T> void AOTMappedHeapWriter::mark_oop_pointer(T* buffered_add
}
void AOTMappedHeapWriter::update_header_for_requested_obj(oop requested_obj, oop src_obj, Klass* src_klass) {
assert(UseCompressedClassPointers, "Archived heap only supported for compressed klasses");
narrowKlass nk = ArchiveBuilder::current()->get_requested_narrow_klass(src_klass);
address buffered_addr = requested_addr_to_buffered_addr(cast_from_oop<address>(requested_obj));

View File

@ -250,9 +250,9 @@ static bool shared_base_too_high(char* specified_base, char* aligned_base, size_
static char* compute_shared_base(size_t cds_max) {
char* specified_base = (char*)SharedBaseAddress;
size_t alignment = AOTMetaspace::core_region_alignment();
if (UseCompressedClassPointers && CompressedKlassPointers::needs_class_space()) {
alignment = MAX2(alignment, Metaspace::reserve_alignment());
}
#if INCLUDE_CLASS_SPACE
alignment = MAX2(alignment, Metaspace::reserve_alignment());
#endif
if (SharedBaseAddress == 0) {
// Special meaning of -XX:SharedBaseAddress=0 -> Always map archive at os-selected address.
@ -1637,32 +1637,29 @@ MapArchiveResult AOTMetaspace::map_archives(FileMapInfo* static_mapinfo, FileMap
aot_log_debug(aot)("Failed to reserve spaces (use_requested_addr=%u)", (unsigned)use_requested_addr);
} else {
if (Metaspace::using_class_space()) {
prot_zone_size = protection_zone_size();
}
CLASS_SPACE_ONLY(prot_zone_size = protection_zone_size();)
#ifdef ASSERT
// Some sanity checks after reserving address spaces for archives
// and class space.
assert(archive_space_rs.is_reserved(), "Sanity");
if (Metaspace::using_class_space()) {
assert(archive_space_rs.base() == mapped_base_address &&
archive_space_rs.size() > protection_zone_size(),
"Archive space must lead and include the protection zone");
// Class space must closely follow the archive space. Both spaces
// must be aligned correctly.
assert(class_space_rs.is_reserved() && class_space_rs.size() > 0,
"A class space should have been reserved");
assert(class_space_rs.base() >= archive_space_rs.end(),
"class space should follow the cds archive space");
assert(is_aligned(archive_space_rs.base(),
core_region_alignment()),
"Archive space misaligned");
assert(is_aligned(class_space_rs.base(),
Metaspace::reserve_alignment()),
"class space misaligned");
}
#endif // ASSERT
#if INCLUDE_CLASS_SPACE
assert(archive_space_rs.base() == mapped_base_address &&
archive_space_rs.size() > protection_zone_size(),
"Archive space must lead and include the protection zone");
// Class space must closely follow the archive space. Both spaces
// must be aligned correctly.
assert(class_space_rs.is_reserved() && class_space_rs.size() > 0,
"A class space should have been reserved");
assert(class_space_rs.base() >= archive_space_rs.end(),
"class space should follow the cds archive space");
assert(is_aligned(archive_space_rs.base(),
core_region_alignment()),
"Archive space misaligned");
assert(is_aligned(class_space_rs.base(),
Metaspace::reserve_alignment()),
"class space misaligned");
#endif // INCLUDE_CLASS_SPACE
aot_log_info(aot)("Reserved archive_space_rs [" INTPTR_FORMAT " - " INTPTR_FORMAT "] (%zu) bytes%s",
p2i(archive_space_rs.base()), p2i(archive_space_rs.end()), archive_space_rs.size(),
@ -1764,62 +1761,60 @@ MapArchiveResult AOTMetaspace::map_archives(FileMapInfo* static_mapinfo, FileMap
if (result == MAP_ARCHIVE_SUCCESS) {
SharedBaseAddress = (size_t)mapped_base_address;
#ifdef _LP64
if (Metaspace::using_class_space()) {
assert(prot_zone_size > 0 &&
*(mapped_base_address) == 'P' &&
*(mapped_base_address + prot_zone_size - 1) == 'P',
"Protection zone was overwritten?");
// Set up ccs in metaspace.
Metaspace::initialize_class_space(class_space_rs);
#if INCLUDE_CLASS_SPACE
assert(prot_zone_size > 0 &&
*(mapped_base_address) == 'P' &&
*(mapped_base_address + prot_zone_size - 1) == 'P',
"Protection zone was overwritten?");
// Set up ccs in metaspace.
Metaspace::initialize_class_space(class_space_rs);
// Set up compressed Klass pointer encoding: the encoding range must
// cover both archive and class space.
const address klass_range_start = (address)mapped_base_address;
const size_t klass_range_size = (address)class_space_rs.end() - klass_range_start;
if (INCLUDE_CDS_JAVA_HEAP || UseCompactObjectHeaders) {
// The CDS archive may contain narrow Klass IDs that were precomputed at archive generation time:
// - every archived java object header (only if INCLUDE_CDS_JAVA_HEAP)
// - every archived Klass' prototype (only if +UseCompactObjectHeaders)
//
// In order for those IDs to still be valid, we need to dictate base and shift: base should be the
// mapping start (including protection zone), shift should be the shift used at archive generation time.
CompressedKlassPointers::initialize_for_given_encoding(
klass_range_start, klass_range_size,
klass_range_start, ArchiveBuilder::precomputed_narrow_klass_shift() // precomputed encoding, see ArchiveBuilder
);
assert(CompressedKlassPointers::base() == klass_range_start, "must be");
} else {
// Let JVM freely choose encoding base and shift
CompressedKlassPointers::initialize(klass_range_start, klass_range_size);
assert(CompressedKlassPointers::base() == nullptr ||
CompressedKlassPointers::base() == klass_range_start, "must be");
}
// Establish protection zone, but only if we need one
if (CompressedKlassPointers::base() == klass_range_start) {
CompressedKlassPointers::establish_protection_zone(klass_range_start, prot_zone_size);
}
// Set up compressed Klass pointer encoding: the encoding range must
// cover both archive and class space.
const address klass_range_start = (address)mapped_base_address;
const size_t klass_range_size = (address)class_space_rs.end() - klass_range_start;
if (INCLUDE_CDS_JAVA_HEAP || UseCompactObjectHeaders) {
// The CDS archive may contain narrow Klass IDs that were precomputed at archive generation time:
// - every archived java object header (only if INCLUDE_CDS_JAVA_HEAP)
// - every archived Klass' prototype (only if +UseCompactObjectHeaders)
//
// In order for those IDs to still be valid, we need to dictate base and shift: base should be the
// mapping start (including protection zone), shift should be the shift used at archive generation time.
CompressedKlassPointers::initialize_for_given_encoding(
klass_range_start, klass_range_size,
klass_range_start, ArchiveBuilder::precomputed_narrow_klass_shift() // precomputed encoding, see ArchiveBuilder
);
assert(CompressedKlassPointers::base() == klass_range_start, "must be");
} else {
// Let JVM freely choose encoding base and shift
CompressedKlassPointers::initialize(klass_range_start, klass_range_size);
assert(CompressedKlassPointers::base() == nullptr ||
CompressedKlassPointers::base() == klass_range_start, "must be");
}
// Establish protection zone, but only if we need one
if (CompressedKlassPointers::base() == klass_range_start) {
CompressedKlassPointers::establish_protection_zone(klass_range_start, prot_zone_size);
}
if (static_mapinfo->can_use_heap_region()) {
if (static_mapinfo->object_streaming_mode()) {
HeapShared::initialize_loading_mode(HeapArchiveMode::_streaming);
} else {
// map_or_load_heap_region() compares the current narrow oop and klass encodings
// with the archived ones, so it must be done after all encodings are determined.
static_mapinfo->map_or_load_heap_region();
HeapShared::initialize_loading_mode(HeapArchiveMode::_mapping);
}
if (static_mapinfo->can_use_heap_region()) {
if (static_mapinfo->object_streaming_mode()) {
HeapShared::initialize_loading_mode(HeapArchiveMode::_streaming);
} else {
FileMapRegion* r = static_mapinfo->region_at(AOTMetaspace::hp);
if (r->used() > 0) {
AOTMetaspace::report_loading_error("Cannot use CDS heap data.");
}
if (!CDSConfig::is_dumping_static_archive()) {
CDSConfig::stop_using_full_module_graph("No CDS heap data");
}
// map_or_load_heap_region() compares the current narrow oop and klass encodings
// with the archived ones, so it must be done after all encodings are determined.
static_mapinfo->map_or_load_heap_region();
HeapShared::initialize_loading_mode(HeapArchiveMode::_mapping);
}
} else {
FileMapRegion* r = static_mapinfo->region_at(AOTMetaspace::hp);
if (r->used() > 0) {
AOTMetaspace::report_loading_error("Cannot use CDS heap data.");
}
if (!CDSConfig::is_dumping_static_archive()) {
CDSConfig::stop_using_full_module_graph("No CDS heap data");
}
}
#endif // _LP64
#endif // INCLUDE_CLASS_SPACE
log_info(aot)("initial optimized module handling: %s", CDSConfig::is_using_optimized_module_handling() ? "enabled" : "disabled");
log_info(aot)("initial full module graph: %s", CDSConfig::is_using_full_module_graph() ? "enabled" : "disabled");
} else {
@ -1852,8 +1847,13 @@ MapArchiveResult AOTMetaspace::map_archives(FileMapInfo* static_mapinfo, FileMap
// (The gap may result from different alignment requirements between metaspace
// and CDS)
//
// If UseCompressedClassPointers is disabled, only one address space will be
// reserved:
// The range encompassing both spaces will be suitable to en/decode narrow Klass
// pointers: the base will be valid for encoding the range [Base, End) and not
// surpass the max. range for that encoding.
//
// On 32-bit, a "narrow" Klass is just the pointer itself, and the Klass encoding
// range encompasses the whole address range. Consequently, we can "decode" and
// "encode" any pointer anywhere, and so are free to place the CDS archive anywhere:
//
// +-- Base address End
// | |
@ -1867,27 +1867,21 @@ MapArchiveResult AOTMetaspace::map_archives(FileMapInfo* static_mapinfo, FileMap
// use_archive_base_addr address is false, this base address is determined
// by the platform.
//
// If UseCompressedClassPointers=1, the range encompassing both spaces will be
// suitable to en/decode narrow Klass pointers: the base will be valid for
// encoding, the range [Base, End) and not surpass the max. range for that encoding.
//
// Return:
//
// - On success:
// - total_space_rs will be reserved as whole for archive_space_rs and
// class_space_rs if UseCompressedClassPointers is true.
// class_space_rs on 64-bit.
// On Windows, try reserve archive_space_rs and class_space_rs
// separately first if use_archive_base_addr is true.
// - archive_space_rs will be reserved and large enough to host static and
// if needed dynamic archive: [Base, A).
// archive_space_rs.base and size will be aligned to CDS reserve
// granularity.
// - class_space_rs: If UseCompressedClassPointers=1, class_space_rs will
// be reserved. Its start address will be aligned to metaspace reserve
// alignment, which may differ from CDS alignment. It will follow the cds
// archive space, close enough such that narrow class pointer encoding
// covers both spaces.
// If UseCompressedClassPointers=0, class_space_rs remains unreserved.
// - class_space_rs: On 64-bit, class_space_rs will be reserved. Its start
// address will be aligned to metaspace reserve alignment, which may differ
// from CDS alignment. It will follow the cds archive space, close enough
// such that narrow class pointer encoding covers both spaces.
// - On error: null is returned and the spaces remain unreserved.
char* AOTMetaspace::reserve_address_space_for_archives(FileMapInfo* static_mapinfo,
FileMapInfo* dynamic_mapinfo,
@ -1903,32 +1897,34 @@ char* AOTMetaspace::reserve_address_space_for_archives(FileMapInfo* static_mapin
size_t archive_end_offset = (dynamic_mapinfo == nullptr) ? static_mapinfo->mapping_end_offset() : dynamic_mapinfo->mapping_end_offset();
size_t archive_space_size = align_up(archive_end_offset, archive_space_alignment);
if (!Metaspace::using_class_space()) {
// Get the simple case out of the way first:
// no compressed class space, simple allocation.
#if !INCLUDE_CLASS_SPACE
// When running without class space, requested archive base should be aligned to cds core alignment.
assert(is_aligned(base_address, archive_space_alignment),
"Archive base address unaligned: " PTR_FORMAT ", needs alignment: %zu.",
p2i(base_address), archive_space_alignment);
// Get the simple case out of the way first:
// no compressed class space, simple allocation.
archive_space_rs = MemoryReserver::reserve((char*)base_address,
archive_space_size,
archive_space_alignment,
os::vm_page_size(),
mtNone);
if (archive_space_rs.is_reserved()) {
assert(base_address == nullptr ||
(address)archive_space_rs.base() == base_address, "Sanity");
// Register archive space with NMT.
MemTracker::record_virtual_memory_tag(archive_space_rs, mtClassShared);
return archive_space_rs.base();
}
return nullptr;
// When running without class space, requested archive base should be aligned to cds core alignment.
assert(is_aligned(base_address, archive_space_alignment),
"Archive base address unaligned: " PTR_FORMAT ", needs alignment: %zu.",
p2i(base_address), archive_space_alignment);
archive_space_rs = MemoryReserver::reserve((char*)base_address,
archive_space_size,
archive_space_alignment,
os::vm_page_size(),
mtNone);
if (archive_space_rs.is_reserved()) {
assert(base_address == nullptr ||
(address)archive_space_rs.base() == base_address, "Sanity");
// Register archive space with NMT.
MemTracker::record_virtual_memory_tag(archive_space_rs, mtClassShared);
return archive_space_rs.base();
}
#ifdef _LP64
return nullptr;
#else
// INCLUDE_CLASS_SPACE=1
// Complex case: two spaces adjacent to each other, both to be addressable
// with narrow class pointers.
// We reserve the whole range spanning both spaces, then split that range up.
@ -2040,11 +2036,7 @@ char* AOTMetaspace::reserve_address_space_for_archives(FileMapInfo* static_mapin
return archive_space_rs.base();
#else
ShouldNotReachHere();
return nullptr;
#endif
#endif // INCLUDE_CLASS_SPACE
}
void AOTMetaspace::release_reserved_spaces(ReservedSpace& total_space_rs,

View File

@ -369,7 +369,6 @@ template <typename T> void AOTStreamedHeapWriter::map_oop_field_in_buffer(oop ob
}
void AOTStreamedHeapWriter::update_header_for_buffered_addr(address buffered_addr, oop src_obj, Klass* src_klass) {
assert(UseCompressedClassPointers, "Archived heap only supported for compressed klasses");
narrowKlass nk = ArchiveBuilder::current()->get_requested_narrow_klass(src_klass);
markWord mw = markWord::prototype();

View File

@ -1092,20 +1092,17 @@ class RelocateBufferToRequested : public BitMapClosure {
}
};
#ifdef _LP64
int ArchiveBuilder::precomputed_narrow_klass_shift() {
// Legacy Mode:
// We use 32 bits for narrowKlass, which should cover the full 4G Klass range. Shift can be 0.
// Standard Mode:
// We use 32 bits for narrowKlass, which should cover a full 4G Klass range. Shift can be 0.
// CompactObjectHeader Mode:
// narrowKlass is much smaller, and we use the highest possible shift value to later get the maximum
// Klass encoding range.
//
// Note that all of this may change in the future, if we decide to correct the pre-calculated
// narrow Klass IDs at archive load time.
assert(UseCompressedClassPointers, "Only needed for compressed class pointers");
return UseCompactObjectHeaders ? CompressedKlassPointers::max_shift() : 0;
}
#endif // _LP64
void ArchiveBuilder::relocate_to_requested() {
if (!ro_region()->is_packed()) {

View File

@ -484,7 +484,6 @@ public:
void print_stats();
void report_out_of_space(const char* name, size_t needed_bytes);
#ifdef _LP64
// The CDS archive contains pre-computed narrow Klass IDs. It carries them in the headers of
// archived heap objects. With +UseCompactObjectHeaders, it also carries them in prototypes
// in Klass.
@ -504,7 +503,6 @@ public:
// TinyClassPointer Mode:
// We use the highest possible shift value to maximize the encoding range size.
static int precomputed_narrow_klass_shift();
#endif // _LP64
};

View File

@ -360,8 +360,8 @@ char* DumpRegion::allocate_metaspace_obj(size_t num_bytes, address src, Metaspac
bool is_instance_class = is_class && ((Klass*)src)->is_instance_klass();
#ifdef _LP64
// More strict alignments needed for UseCompressedClassPointers
if (is_class && UseCompressedClassPointers) {
// More strict alignments needed for Klass objects
if (is_class) {
size_t klass_alignment = checked_cast<size_t>(nth_bit(ArchiveBuilder::precomputed_narrow_klass_shift()));
alignment = MAX2(alignment, klass_alignment);
precond(is_aligned(alignment, SharedSpaceObjectAlignment));

View File

@ -891,10 +891,6 @@ const char* CDSConfig::type_of_archive_being_written() {
// If an incompatible VM options is found, return a text message that explains why
static const char* check_options_incompatible_with_dumping_heap() {
#if INCLUDE_CDS_JAVA_HEAP
if (!UseCompressedClassPointers) {
return "UseCompressedClassPointers must be true";
}
return nullptr;
#else
return "JVM not configured for writing Java heap objects";

View File

@ -225,15 +225,9 @@ void FileMapHeader::populate(FileMapInfo *info, size_t core_region_alignment,
}
#endif
_compressed_oops = UseCompressedOops;
_compressed_class_ptrs = UseCompressedClassPointers;
if (UseCompressedClassPointers) {
#ifdef _LP64
_narrow_klass_pointer_bits = CompressedKlassPointers::narrow_klass_pointer_bits();
_narrow_klass_shift = ArchiveBuilder::precomputed_narrow_klass_shift();
#endif
} else {
_narrow_klass_pointer_bits = _narrow_klass_shift = -1;
}
_narrow_klass_pointer_bits = CompressedKlassPointers::narrow_klass_pointer_bits();
_narrow_klass_shift = ArchiveBuilder::precomputed_narrow_klass_shift();
// Which JIT compier is used
_compiler_type = (u1)CompilerConfig::compiler_type();
_type_profile_level = TypeProfileLevel;
@ -295,7 +289,6 @@ void FileMapHeader::print(outputStream* st) {
st->print_cr("- max_heap_size: %zu", _max_heap_size);
st->print_cr("- narrow_oop_mode: %d", _narrow_oop_mode);
st->print_cr("- compressed_oops: %d", _compressed_oops);
st->print_cr("- compressed_class_ptrs: %d", _compressed_class_ptrs);
st->print_cr("- narrow_klass_pointer_bits: %d", _narrow_klass_pointer_bits);
st->print_cr("- narrow_klass_shift: %d", _narrow_klass_shift);
st->print_cr("- cloned_vtables: %u", cast_to_u4(_cloned_vtables));
@ -1926,11 +1919,12 @@ bool FileMapHeader::validate() {
_has_platform_or_app_classes = false;
}
aot_log_info(aot)("The %s was created with UseCompressedOops = %d, UseCompressedClassPointers = %d, UseCompactObjectHeaders = %d",
file_type, compressed_oops(), compressed_class_pointers(), compact_headers());
if (compressed_oops() != UseCompressedOops || compressed_class_pointers() != UseCompressedClassPointers) {
aot_log_warning(aot)("Unable to use %s.\nThe saved state of UseCompressedOops and UseCompressedClassPointers is "
"different from runtime, CDS will be disabled.", file_type);
aot_log_info(aot)("The %s was created with UseCompressedOops = %d, UseCompactObjectHeaders = %d",
file_type, compressed_oops(), compact_headers());
if (compressed_oops() != UseCompressedOops) {
aot_log_warning(aot)("Unable to use %s.\nThe saved state of UseCompressedOops (%d) is "
"different from runtime (%d), CDS will be disabled.", file_type,
compressed_oops(), UseCompressedOops);
return false;
}

View File

@ -120,7 +120,6 @@ private:
CompressedOops::Mode _narrow_oop_mode; // compressed oop encoding mode
bool _object_streaming_mode; // dump was created for object streaming
bool _compressed_oops; // save the flag UseCompressedOops
bool _compressed_class_ptrs; // save the flag UseCompressedClassPointers
int _narrow_klass_pointer_bits; // save number of bits in narrowKlass
int _narrow_klass_shift; // save shift width used to pre-compute narrowKlass IDs in archived heap objects
narrowPtr _cloned_vtables; // The address of the first cloned vtable
@ -200,7 +199,6 @@ public:
bool has_platform_or_app_classes() const { return _has_platform_or_app_classes; }
bool has_aot_linked_classes() const { return _has_aot_linked_classes; }
bool compressed_oops() const { return _compressed_oops; }
bool compressed_class_pointers() const { return _compressed_class_ptrs; }
int narrow_klass_pointer_bits() const { return _narrow_klass_pointer_bits; }
int narrow_klass_shift() const { return _narrow_klass_shift; }
bool has_full_module_graph() const { return _has_full_module_graph; }

View File

@ -89,11 +89,9 @@ DEBUG_ONLY(bool SystemDictionaryShared::_class_loading_may_happen = true;)
#ifdef ASSERT
static void check_klass_after_loading(const Klass* k) {
#ifdef _LP64
if (k != nullptr && UseCompressedClassPointers) {
if (k != nullptr) {
CompressedKlassPointers::check_encodable(k);
}
#endif
}
#endif

View File

@ -388,9 +388,6 @@ void AOTCodeCache::Config::record(uint cpu_features_offset) {
if (UseCompressedOops) {
_flags |= compressedOops;
}
if (UseCompressedClassPointers) {
_flags |= compressedClassPointers;
}
if (UseTLAB) {
_flags |= useTLAB;
}
@ -474,10 +471,6 @@ bool AOTCodeCache::Config::verify(AOTCodeCache* cache) const {
return false;
}
if (((_flags & compressedClassPointers) != 0) != UseCompressedClassPointers) {
log_debug(aot, codecache, init)("AOT Code Cache disabled: it was created with UseCompressedClassPointers = %s", UseCompressedClassPointers ? "false" : "true");
return false;
}
if (_compressedKlassShift != (uint)CompressedKlassPointers::shift()) {
log_debug(aot, codecache, init)("AOT Code Cache disabled: it was created with CompressedKlassPointers::shift() = %d vs current %d", _compressedKlassShift, CompressedKlassPointers::shift());
return false;

View File

@ -177,12 +177,11 @@ protected:
none = 0,
debugVM = 1,
compressedOops = 2,
compressedClassPointers = 4,
useTLAB = 8,
systemClassAssertions = 16,
userClassAssertions = 32,
enableContendedPadding = 64,
restrictContendedPadding = 128
useTLAB = 4,
systemClassAssertions = 8,
userClassAssertions = 16,
enableContendedPadding = 32,
restrictContendedPadding = 64
};
uint _flags;
uint _cpu_features_offset; // offset in the cache where cpu features are stored

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -76,11 +76,7 @@ CompiledICData::CompiledICData()
// Inline cache callsite info is initialized once the first time it is resolved
void CompiledICData::initialize(CallInfo* call_info, Klass* receiver_klass) {
_speculated_method = call_info->selected_method();
if (UseCompressedClassPointers) {
_speculated_klass = (uintptr_t)CompressedKlassPointers::encode_not_null(receiver_klass);
} else {
_speculated_klass = (uintptr_t)receiver_klass;
}
_speculated_klass = (uintptr_t)CompressedKlassPointers::encode_not_null(receiver_klass);
if (call_info->call_kind() == CallInfo::itable_call) {
assert(call_info->resolved_method() != nullptr, "virtual or interface method must be found");
_itable_defc_klass = call_info->resolved_method()->method_holder();
@ -133,12 +129,7 @@ Klass* CompiledICData::speculated_klass() const {
if (is_speculated_klass_unloaded()) {
return nullptr;
}
if (UseCompressedClassPointers) {
return CompressedKlassPointers::decode_not_null((narrowKlass)_speculated_klass);
} else {
return (Klass*)_speculated_klass;
}
return CompressedKlassPointers::decode_not_null((narrowKlass)_speculated_klass);
}
//-----------------------------------------------------------------------------

View File

@ -708,7 +708,6 @@ int BarrierSetC2::arraycopy_payload_base_offset(bool is_array) {
// 12 - 64-bit VM, compressed klass
// 16 - 64-bit VM, normal klass
if (base_off % BytesPerLong != 0) {
assert(UseCompressedClassPointers, "");
assert(!UseCompactObjectHeaders, "");
if (is_array) {
// Exclude length to copy by 8 bytes words.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2012, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2012, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -122,11 +122,10 @@ void GCTracer::report_gc_heap_summary(GCWhen::Type when, const GCHeapSummary& he
void GCTracer::report_metaspace_summary(GCWhen::Type when, const MetaspaceSummary& summary) const {
send_meta_space_summary_event(when, summary);
send_metaspace_chunk_free_list_summary(when, Metaspace::NonClassType, summary.metaspace_chunk_free_list_summary());
if (UseCompressedClassPointers) {
send_metaspace_chunk_free_list_summary(when, Metaspace::ClassType, summary.class_chunk_free_list_summary());
}
#if INCLUDE_CLASS_SPACE
send_metaspace_chunk_free_list_summary(when, Metaspace::ClassType, summary.class_chunk_free_list_summary());
#endif
}
void YoungGCTracer::report_gc_end_impl(const Ticks& timestamp, TimePartitions* time_partitions) {

View File

@ -559,26 +559,24 @@ bool ShenandoahAsserts::extract_klass_safely(oop obj, narrowKlass& nk, const Kla
if (!os::is_readable_pointer(obj)) {
return false;
}
if (UseCompressedClassPointers) {
if (UseCompactObjectHeaders) { // look in forwardee
markWord mark = obj->mark();
if (mark.is_marked()) {
oop fwd = cast_to_oop(mark.clear_lock_bits().to_pointer());
if (!os::is_readable_pointer(fwd)) {
return false;
}
mark = fwd->mark();
if (UseCompactObjectHeaders) { // look in forwardee
markWord mark = obj->mark();
if (mark.is_marked()) {
oop fwd = cast_to_oop(mark.clear_lock_bits().to_pointer());
if (!os::is_readable_pointer(fwd)) {
return false;
}
nk = mark.narrow_klass();
} else {
nk = obj->narrow_klass();
mark = fwd->mark();
}
if (!CompressedKlassPointers::is_valid_narrow_klass_id(nk)) {
return false;
}
k = CompressedKlassPointers::decode_not_null_without_asserts(nk);
nk = mark.narrow_klass();
} else {
k = obj->klass();
nk = obj->narrow_klass();
}
if (!CompressedKlassPointers::is_valid_narrow_klass_id(nk)) {
return false;
}
k = CompressedKlassPointers::decode_not_null_without_asserts(nk);
return k != nullptr;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -442,7 +442,7 @@ void ZBarrierSetC2::clone_at_expansion(PhaseMacroExpand* phase, ArrayCopyNode* a
assert(src_offset == dest_offset, "should be equal");
const jlong offset = src_offset->get_long();
if (offset != arrayOopDesc::base_offset_in_bytes(T_OBJECT)) {
assert(!UseCompressedClassPointers || UseCompactObjectHeaders, "should only happen without compressed class pointers");
assert(UseCompactObjectHeaders, "should only happen with COH");
assert((arrayOopDesc::base_offset_in_bytes(T_OBJECT) - offset) == BytesPerLong, "unexpected offset");
length = phase->transform_later(new SubLNode(length, phase->longcon(1))); // Size is in longs
src_offset = phase->longcon(arrayOopDesc::base_offset_in_bytes(T_OBJECT));

View File

@ -50,11 +50,7 @@ define zpo
end
printf "\t Page: %llu\n", ((uintptr_t)$obj & ZAddressOffsetMask) >> ZGranuleSizeShift
x/16gx $obj
if (UseCompressedClassPointers)
set $klass = (Klass*)(void*)((uintptr_t)CompressedKlassPointers::_base +((uintptr_t)$obj->_metadata->_compressed_klass << CompressedKlassPointers::_shift))
else
set $klass = $obj->_metadata->_klass
end
set $klass = (Klass*)(void*)((uintptr_t)CompressedKlassPointers::_base +((uintptr_t)$obj->_compressed_klass << CompressedKlassPointers::_shift))
printf "Mark: 0x%016llx\tKlass: %s\n", (uintptr_t)$obj->_mark, (char*)$klass->_name->_body
end

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -75,7 +75,7 @@ static size_t element_size(bool compressed) {
}
static bool can_compress_element(traceid id) {
return Metaspace::using_class_space() && id < uncompressed_threshold;
return INCLUDE_CLASS_SPACE == 1 && id < uncompressed_threshold;
}
static size_t element_size(const Klass* klass) {

View File

@ -206,13 +206,8 @@ void CompilerToVM::Data::initialize(JVMCI_TRAPS) {
Universe_narrow_oop_base = nullptr;
Universe_narrow_oop_shift = 0;
}
if (UseCompressedClassPointers) {
Universe_narrow_klass_base = CompressedKlassPointers::base();
Universe_narrow_klass_shift = CompressedKlassPointers::shift();
} else {
Universe_narrow_klass_base = nullptr;
Universe_narrow_klass_shift = 0;
}
Universe_narrow_klass_base = CompressedKlassPointers::base();
Universe_narrow_klass_shift = CompressedKlassPointers::shift();
Universe_non_oop_bits = Universe::non_oop_word();
Universe_verify_oop_mask = Universe::verify_oop_mask();
Universe_verify_oop_bits = Universe::verify_oop_bits();
@ -390,7 +385,6 @@ JVMCIObjectArray CompilerToVM::initialize_intrinsics(JVMCI_TRAPS) {
X86_ONLY(do_int_flag(UseAVX)) \
do_bool_flag(UseCRC32Intrinsics) \
do_bool_flag(UseAdler32Intrinsics) \
do_bool_flag(UseCompressedClassPointers) \
do_bool_flag(UseCompressedOops) \
X86_ONLY(do_bool_flag(UseCountLeadingZerosInstruction)) \
X86_ONLY(do_bool_flag(UseCountTrailingZerosInstruction)) \

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -342,7 +342,7 @@
volatile_nonstatic_field(ObjectMonitor, _succ, int64_t) \
\
volatile_nonstatic_field(oopDesc, _mark, markWord) \
volatile_nonstatic_field(oopDesc, _metadata._klass, Klass*) \
volatile_nonstatic_field(oopDesc, _compressed_klass, narrowKlass) \
\
static_field(StubRoutines, _verify_oop_count, jint) \
\

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -162,10 +162,12 @@ void ClassLoaderMetaspace::deallocate(MetaWord* ptr, size_t word_size) {
MetaBlock bl(ptr, word_size);
// Add to class arena only if block is usable for encodable Klass storage.
MetaspaceArena* receiving_arena = non_class_space_arena();
if (Metaspace::using_class_space() && Metaspace::is_in_class_space(ptr) &&
#if INCLUDE_CLASS_SPACE
if (Metaspace::is_in_class_space(ptr) &&
is_aligned(ptr, class_space_arena()->allocation_alignment_bytes())) {
receiving_arena = class_space_arena();
}
#endif
receiving_arena->deallocate(bl);
DEBUG_ONLY(InternalStats::inc_num_deallocs();)
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2011, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -40,9 +40,10 @@ namespace metaspace {
// A ClassLoaderMetaspace manages MetaspaceArena(s) for a CLD.
//
// A CLD owns one MetaspaceArena if UseCompressedClassPointers is false. Otherwise
// it owns two - one for the Klass* objects from the class space, one for the other
// types of MetaspaceObjs from the non-class space.
// 64-bit:
//
// A CLD owns two MetaspaceArenas - one for the Klass* objects from the class space,
// one for the other types of MetaspaceObjs from the non-class space.
//
// +------+ +----------------------+ +-------------------+
// | CLD | ---> | ClassLoaderMetaspace | ----> | (non class) Arena |
@ -58,6 +59,11 @@ namespace metaspace {
// ^
// alloc top
//
// 32-bit:
//
// A CLD owns just one MetaspaceArena. In that arena all metadata - Klass and other -
// are placed.
class ClassLoaderMetaspace : public CHeapObj<mtClass> {
friend class metaspace::ClmsTester; // for gtests
@ -67,11 +73,10 @@ class ClassLoaderMetaspace : public CHeapObj<mtClass> {
const Metaspace::MetaspaceType _space_type;
// Arena for allocations from non-class metaspace
// (resp. for all allocations if -XX:-UseCompressedClassPointers).
metaspace::MetaspaceArena* _non_class_space_arena;
// Arena for allocations from class space
// (null if -XX:-UseCompressedClassPointers).
// (null for 32-bit).
metaspace::MetaspaceArena* _class_space_arena;
Mutex* lock() const { return _lock; }

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2011, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2017, 2021 SAP SE. All rights reserved.
* Copyright (c) 2023, 2025, Red Hat, Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
@ -166,33 +166,33 @@ void MetaspaceUtils::print_metaspace_change(const MetaspaceCombinedStats& pre_me
// it is a constant (to uninformed users, often confusingly large). For non-class space, it would
// be interesting since free chunks can be uncommitted, but for now it is left out.
if (Metaspace::using_class_space()) {
log_info(gc, metaspace)(HEAP_CHANGE_FORMAT" "
HEAP_CHANGE_FORMAT" "
HEAP_CHANGE_FORMAT,
HEAP_CHANGE_FORMAT_ARGS("Metaspace",
pre_meta_values.used(),
pre_meta_values.committed(),
meta_values.used(),
meta_values.committed()),
HEAP_CHANGE_FORMAT_ARGS("NonClass",
pre_meta_values.non_class_used(),
pre_meta_values.non_class_committed(),
meta_values.non_class_used(),
meta_values.non_class_committed()),
HEAP_CHANGE_FORMAT_ARGS("Class",
pre_meta_values.class_used(),
pre_meta_values.class_committed(),
meta_values.class_used(),
meta_values.class_committed()));
} else {
log_info(gc, metaspace)(HEAP_CHANGE_FORMAT,
HEAP_CHANGE_FORMAT_ARGS("Metaspace",
pre_meta_values.used(),
pre_meta_values.committed(),
meta_values.used(),
meta_values.committed()));
}
#if INCLUDE_CLASS_SPACE
log_info(gc, metaspace)(HEAP_CHANGE_FORMAT" "
HEAP_CHANGE_FORMAT" "
HEAP_CHANGE_FORMAT,
HEAP_CHANGE_FORMAT_ARGS("Metaspace",
pre_meta_values.used(),
pre_meta_values.committed(),
meta_values.used(),
meta_values.committed()),
HEAP_CHANGE_FORMAT_ARGS("NonClass",
pre_meta_values.non_class_used(),
pre_meta_values.non_class_committed(),
meta_values.non_class_used(),
meta_values.non_class_committed()),
HEAP_CHANGE_FORMAT_ARGS("Class",
pre_meta_values.class_used(),
pre_meta_values.class_committed(),
meta_values.class_used(),
meta_values.class_committed()));
#else
log_info(gc, metaspace)(HEAP_CHANGE_FORMAT,
HEAP_CHANGE_FORMAT_ARGS("Metaspace",
pre_meta_values.used(),
pre_meta_values.committed(),
meta_values.used(),
meta_values.committed()));
#endif // INCLUDE_CLASS_SPACE
}
// This will print out a basic metaspace usage report but
@ -226,41 +226,36 @@ void MetaspaceUtils::print_on(outputStream* out) {
stats.committed()/K,
stats.reserved()/K);
if (Metaspace::using_class_space()) {
StreamIndentor si(out, 1);
out->print("class space ");
out->fill_to(17);
out->print_cr("used %zuK, "
"committed %zuK, "
"reserved %zuK",
stats.class_space_stats().used()/K,
stats.class_space_stats().committed()/K,
stats.class_space_stats().reserved()/K);
}
#if INCLUDE_CLASS_SPACE
StreamIndentor si(out, 1);
out->print("class space ");
out->fill_to(17);
out->print_cr("used %zuK, "
"committed %zuK, "
"reserved %zuK",
stats.class_space_stats().used()/K,
stats.class_space_stats().committed()/K,
stats.class_space_stats().reserved()/K);
#endif // INCLUDE_CLASS_SPACE
}
#ifdef ASSERT
void MetaspaceUtils::verify() {
if (Metaspace::initialized()) {
// Verify non-class chunkmanager...
ChunkManager* cm = ChunkManager::chunkmanager_nonclass();
cm->verify();
// ... and space list.
VirtualSpaceList* vsl = VirtualSpaceList::vslist_nonclass();
vsl->verify();
if (Metaspace::using_class_space()) {
// If we use compressed class pointers, verify class chunkmanager...
cm = ChunkManager::chunkmanager_class();
cm->verify();
// ... and class spacelist.
vsl = VirtualSpaceList::vslist_class();
vsl->verify();
}
#if INCLUDE_CLASS_SPACE
cm = ChunkManager::chunkmanager_class();
cm->verify();
vsl = VirtualSpaceList::vslist_class();
vsl->verify();
#endif // INCLUDE_CLASS_SPACE
}
}
#endif
@ -387,7 +382,8 @@ void MetaspaceGC::post_initialize() {
bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {
// Check if the compressed class space is full.
if (is_class && Metaspace::using_class_space()) {
#if INCLUDE_CLASS_SPACE
if (is_class) {
size_t class_committed = MetaspaceUtils::committed_bytes(Metaspace::ClassType);
if (class_committed + word_size * BytesPerWord > CompressedClassSpaceSize) {
log_trace(gc, metaspace, freelist)("Cannot expand %s metaspace by %zu words (CompressedClassSpaceSize = %zu words)",
@ -395,6 +391,7 @@ bool MetaspaceGC::can_expand(size_t word_size, bool is_class) {
return false;
}
}
#endif // INCLUDE_CLASS_SPACE
// Check if the user has imposed a limit on the metaspace memory.
size_t committed_bytes = MetaspaceUtils::committed_bytes();
@ -548,7 +545,7 @@ const void* Metaspace::_class_space_end = nullptr;
bool Metaspace::initialized() {
return metaspace::MetaspaceContext::context_nonclass() != nullptr
LP64_ONLY(&& (using_class_space() ? Metaspace::class_space_is_initialized() : true));
CLASS_SPACE_ONLY(&& Metaspace::class_space_is_initialized());
}
#ifdef _LP64
@ -566,9 +563,9 @@ void Metaspace::print_compressed_class_space(outputStream* st) {
// Given a prereserved space, use that to set up the compressed class space list.
void Metaspace::initialize_class_space(ReservedSpace rs) {
STATIC_ASSERT(INCLUDE_CLASS_SPACE == 1);
assert(rs.size() >= CompressedClassSpaceSize,
"%zu != %zu", rs.size(), CompressedClassSpaceSize);
assert(using_class_space(), "Must be using class space");
assert(rs.size() == CompressedClassSpaceSize, "%zu != %zu",
rs.size(), CompressedClassSpaceSize);
@ -658,49 +655,51 @@ void Metaspace::ergo_initialize() {
MaxMetaspaceSize = MAX2(MaxMetaspaceSize, commit_alignment());
if (UseCompressedClassPointers) {
// Let Class Space not be larger than 80% of MaxMetaspaceSize. Note that is
// grossly over-dimensioned for most usage scenarios; typical ratio of
// class space : non class space usage is about 1:6. With many small classes,
// it can get as low as 1:2. It is not a big deal though since ccs is only
// reserved and will be committed on demand only.
const size_t max_ccs_size = 8 * (MaxMetaspaceSize / 10);
#if INCLUDE_CLASS_SPACE
// Sanity check.
const size_t max_klass_range = CompressedKlassPointers::max_klass_range_size();
assert(max_klass_range >= reserve_alignment(),
"Klass range (%zu) must cover at least a full root chunk (%zu)",
max_klass_range, reserve_alignment());
// Let Class Space not be larger than 80% of MaxMetaspaceSize. Note that is
// grossly over-dimensioned for most usage scenarios; typical ratio of
// class space : non class space usage is about 1:6. With many small classes,
// it can get as low as 1:2. It is not a big deal though since ccs is only
// reserved and will be committed on demand only.
const size_t max_ccs_size = 8 * (MaxMetaspaceSize / 10);
size_t adjusted_ccs_size = MIN3(CompressedClassSpaceSize, max_ccs_size, max_klass_range);
// Sanity check.
const size_t max_klass_range = CompressedKlassPointers::max_klass_range_size();
assert(max_klass_range >= reserve_alignment(),
"Klass range (%zu) must cover at least a full root chunk (%zu)",
max_klass_range, reserve_alignment());
// CCS must be aligned to root chunk size, and be at least the size of one
// root chunk.
adjusted_ccs_size = align_up(adjusted_ccs_size, reserve_alignment());
adjusted_ccs_size = MAX2(adjusted_ccs_size, reserve_alignment());
size_t adjusted_ccs_size = MIN3(CompressedClassSpaceSize, max_ccs_size, max_klass_range);
// Print a warning if the adjusted size differs from the users input
if (CompressedClassSpaceSize != adjusted_ccs_size) {
#define X "CompressedClassSpaceSize adjusted from user input " \
"%zu bytes to %zu bytes", CompressedClassSpaceSize, adjusted_ccs_size
if (FLAG_IS_CMDLINE(CompressedClassSpaceSize)) {
log_warning(metaspace)(X);
} else {
log_info(metaspace)(X);
}
#undef X
}
// Note: re-adjusting may have us left with a CompressedClassSpaceSize
// larger than MaxMetaspaceSize for very small values of MaxMetaspaceSize.
// Lets just live with that, its not a big deal.
if (adjusted_ccs_size != CompressedClassSpaceSize) {
FLAG_SET_ERGO(CompressedClassSpaceSize, adjusted_ccs_size);
log_info(metaspace)("Setting CompressedClassSpaceSize to %zu.",
CompressedClassSpaceSize);
// CCS must be aligned to root chunk size, and be at least the size of one
// root chunk.
adjusted_ccs_size = align_up(adjusted_ccs_size, reserve_alignment());
adjusted_ccs_size = MAX2(adjusted_ccs_size, reserve_alignment());
// Print a warning if the adjusted size differs from the users input
if (CompressedClassSpaceSize != adjusted_ccs_size) {
#define X "CompressedClassSpaceSize adjusted from user input " \
"%zu bytes to %zu bytes", CompressedClassSpaceSize, adjusted_ccs_size
if (FLAG_IS_CMDLINE(CompressedClassSpaceSize)) {
log_warning(metaspace)(X);
} else {
log_info(metaspace)(X);
}
#undef X
}
// Note: re-adjusting may have us left with a CompressedClassSpaceSize
// larger than MaxMetaspaceSize for very small values of MaxMetaspaceSize.
// Lets just live with that, its not a big deal.
if (adjusted_ccs_size != CompressedClassSpaceSize) {
FLAG_SET_ERGO(CompressedClassSpaceSize, adjusted_ccs_size);
log_info(metaspace)("Setting CompressedClassSpaceSize to %zu.",
CompressedClassSpaceSize);
}
#endif // INCLUDE_CLASS_SPACE
// Set MetaspaceSize, MinMetaspaceExpansion and MaxMetaspaceExpansion
if (MetaspaceSize > MaxMetaspaceSize) {
MetaspaceSize = MaxMetaspaceSize;
@ -724,15 +723,12 @@ void Metaspace::global_initialize() {
AOTMetaspace::initialize_for_static_dump();
}
// If UseCompressedClassPointers=1, we have two cases:
// We have two cases:
// a) if CDS is active (runtime, Xshare=on), it will create the class space
// for us, initialize it and set up CompressedKlassPointers encoding.
// Class space will be reserved above the mapped archives.
// for us. It then will set up encoding to cover both CDS archive space and class space.
// b) if CDS either deactivated (Xshare=off) or a static dump is to be done (Xshare:dump),
// we will create the class space on our own. It will be placed above the java heap,
// since we assume it has been placed in low
// address regions. We may rethink this (see JDK-8244943). Failing that,
// it will be placed anywhere.
// we will create the class space on our own and set up encoding to only cover the
// class space.
#if INCLUDE_CDS
// case (a)
@ -746,9 +742,9 @@ void Metaspace::global_initialize() {
}
#endif // INCLUDE_CDS
#ifdef _LP64
#if INCLUDE_CLASS_SPACE
if (using_class_space() && !class_space_is_initialized()) {
if (!class_space_is_initialized()) {
assert(!CDSConfig::is_using_archive(), "CDS archive is not mapped at this point");
// case (b) (No CDS)
@ -835,28 +831,23 @@ void Metaspace::global_initialize() {
}
#else
// +UseCompressedClassPointers on 32-bit: does not need class space. Klass can live wherever.
if (UseCompressedClassPointers) {
const address start = (address)os::vm_min_address(); // but not in the zero page
const address end = (address)CompressedKlassPointers::max_klass_range_size();
CompressedKlassPointers::initialize(start, end - start);
}
#endif // __LP64
// 32-bit:
const address start = (address)os::vm_min_address(); // but not in the zero page
const address end = (address)CompressedKlassPointers::max_klass_range_size();
CompressedKlassPointers::initialize(start, end - start);
#endif // INCLUDE_CLASS_SPACE
// Initialize non-class virtual space list, and its chunk manager:
MetaspaceContext::initialize_nonclass_space_context();
_tracer = new MetaspaceTracer();
if (UseCompressedClassPointers) {
// Note: "cds" would be a better fit but keep this for backward compatibility.
LogTarget(Info, gc, metaspace) lt;
if (lt.is_enabled()) {
LogStream ls(lt);
CDS_ONLY(AOTMetaspace::print_on(&ls);)
Metaspace::print_compressed_class_space(&ls);
CompressedKlassPointers::print_mode(&ls);
}
LogTarget(Info, gc, metaspace) lt;
if (lt.is_enabled()) {
LogStream ls(lt);
CDS_ONLY(AOTMetaspace::print_on(&ls);)
Metaspace::print_compressed_class_space(&ls);
CompressedKlassPointers::print_mode(&ls);
}
}
@ -888,15 +879,13 @@ MetaWord* Metaspace::allocate(ClassLoaderData* loader_data, size_t word_size,
MetaWord* result = loader_data->metaspace_non_null()->allocate(word_size, mdtype);
if (result != nullptr) {
#ifdef ASSERT
if (using_class_space() && mdtype == ClassType) {
if (INCLUDE_CLASS_SPACE == 1 && mdtype == ClassType) {
assert(is_in_class_space(result) &&
is_aligned(result, CompressedKlassPointers::klass_alignment_in_bytes()), "Sanity");
} else {
assert((is_in_class_space(result) || is_in_nonclass_metaspace(result)) &&
is_aligned(result, Metaspace::min_allocation_alignment_bytes), "Sanity");
}
#endif
// Zero initialize.
Copy::fill_to_words((HeapWord*)result, word_size, 0);
log_trace(metaspace)("Metaspace::allocate: type %d return " PTR_FORMAT ".", (int)type, p2i(result));
@ -1017,12 +1006,12 @@ void Metaspace::purge(bool classes_unloaded) {
if (cm != nullptr) {
cm->purge();
}
if (using_class_space()) {
cm = ChunkManager::chunkmanager_class();
if (cm != nullptr) {
cm->purge();
}
#if INCLUDE_CLASS_SPACE
cm = ChunkManager::chunkmanager_class();
if (cm != nullptr) {
cm->purge();
}
#endif // INCLUDE_CLASS_SPACE
}
// Try to satisfy queued metaspace allocation requests.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2011, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2017, 2021 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -164,18 +164,12 @@ public:
static void print_compressed_class_space(outputStream* st) NOT_LP64({});
// Return TRUE only if UseCompressedClassPointers is True.
static bool using_class_space() {
return NOT_LP64(false) LP64_ONLY(UseCompressedClassPointers);
}
static bool is_class_space_allocation(MetadataType mdType) {
return mdType == ClassType && using_class_space();
return CLASS_SPACE_ONLY(mdType == ClassType) NOT_CLASS_SPACE(false);
}
static bool initialized();
};
#endif // SHARE_MEMORY_METASPACE_HPP

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2020 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -59,39 +59,39 @@ static void print_vs(outputStream* out, size_t scale) {
const size_t committed_nc = RunningCounters::committed_words_nonclass();
const int num_nodes_nc = VirtualSpaceList::vslist_nonclass()->num_nodes();
if (Metaspace::using_class_space()) {
const size_t reserved_c = RunningCounters::reserved_words_class();
const size_t committed_c = RunningCounters::committed_words_class();
const int num_nodes_c = VirtualSpaceList::vslist_class()->num_nodes();
#if INCLUDE_CLASS_SPACE
const size_t reserved_c = RunningCounters::reserved_words_class();
const size_t committed_c = RunningCounters::committed_words_class();
const int num_nodes_c = VirtualSpaceList::vslist_class()->num_nodes();
out->print(" Non-class space: ");
print_scaled_words(out, reserved_nc, scale, 7);
out->print(" reserved, ");
print_scaled_words_and_percentage(out, committed_nc, reserved_nc, scale, 7);
out->print(" committed, ");
out->print(" %d nodes.", num_nodes_nc);
out->cr();
out->print(" Class space: ");
print_scaled_words(out, reserved_c, scale, 7);
out->print(" reserved, ");
print_scaled_words_and_percentage(out, committed_c, reserved_c, scale, 7);
out->print(" committed, ");
out->print(" %d nodes.", num_nodes_c);
out->cr();
out->print(" Both: ");
print_scaled_words(out, reserved_c + reserved_nc, scale, 7);
out->print(" reserved, ");
print_scaled_words_and_percentage(out, committed_c + committed_nc, reserved_c + reserved_nc, scale, 7);
out->print(" committed. ");
out->cr();
} else {
print_scaled_words(out, reserved_nc, scale, 7);
out->print(" reserved, ");
print_scaled_words_and_percentage(out, committed_nc, reserved_nc, scale, 7);
out->print(" committed, ");
out->print(" %d nodes.", num_nodes_nc);
out->cr();
}
out->print(" Non-class space: ");
print_scaled_words(out, reserved_nc, scale, 7);
out->print(" reserved, ");
print_scaled_words_and_percentage(out, committed_nc, reserved_nc, scale, 7);
out->print(" committed, ");
out->print(" %d nodes.", num_nodes_nc);
out->cr();
out->print(" Class space: ");
print_scaled_words(out, reserved_c, scale, 7);
out->print(" reserved, ");
print_scaled_words_and_percentage(out, committed_c, reserved_c, scale, 7);
out->print(" committed, ");
out->print(" %d nodes.", num_nodes_c);
out->cr();
out->print(" Both: ");
print_scaled_words(out, reserved_c + reserved_nc, scale, 7);
out->print(" reserved, ");
print_scaled_words_and_percentage(out, committed_c + committed_nc, reserved_c + reserved_nc, scale, 7);
out->print(" committed. ");
out->cr();
#else
print_scaled_words(out, reserved_nc, scale, 7);
out->print(" reserved, ");
print_scaled_words_and_percentage(out, committed_nc, reserved_nc, scale, 7);
out->print(" committed, ");
out->print(" %d nodes.", num_nodes_nc);
out->cr();
#endif // INCLUDE_CLASS_SPACE
}
static void print_settings(outputStream* out, size_t scale) {
@ -102,12 +102,12 @@ static void print_settings(outputStream* out, size_t scale) {
print_human_readable_size(out, MaxMetaspaceSize, scale);
}
out->cr();
if (Metaspace::using_class_space()) {
out->print("CompressedClassSpaceSize: ");
print_human_readable_size(out, CompressedClassSpaceSize, scale);
} else {
out->print("No class space");
}
#if INCLUDE_CLASS_SPACE
out->print("CompressedClassSpaceSize: ");
print_human_readable_size(out, CompressedClassSpaceSize, scale);
#else
out->print("No class space");
#endif // INCLUDE_CLASS_SPACE
out->cr();
out->print("Initial GC threshold: ");
print_human_readable_size(out, MetaspaceSize, scale);
@ -117,9 +117,7 @@ static void print_settings(outputStream* out, size_t scale) {
out->cr();
out->print_cr("CDS: %s", (CDSConfig::is_using_archive() ? "on" : (CDSConfig::is_dumping_static_archive() ? "dump" : "off")));
Settings::print_on(out);
#ifdef _LP64
CompressedKlassPointers::print_mode(out);
#endif
}
// This will print out a basic metaspace usage report but
@ -131,9 +129,7 @@ void MetaspaceReporter::print_basic_report(outputStream* out, size_t scale) {
}
out->cr();
out->print_cr("Usage:");
if (Metaspace::using_class_space()) {
out->print(" Non-class: ");
}
CLASS_SPACE_ONLY(out->print(" Non-class: ");)
// Note: since we want to purely rely on counters, without any locking or walking the CLDG,
// for Usage stats (statistics over in-use chunks) all we can print is the
@ -144,37 +140,35 @@ void MetaspaceReporter::print_basic_report(outputStream* out, size_t scale) {
print_scaled_words(out, used_nc, scale, 5);
out->print(" used.");
out->cr();
if (Metaspace::using_class_space()) {
const size_t used_c = MetaspaceUtils::used_words(Metaspace::ClassType);
out->print(" Class: ");
print_scaled_words(out, used_c, scale, 5);
out->print(" used.");
out->cr();
out->print(" Both: ");
const size_t used = used_nc + used_c;
print_scaled_words(out, used, scale, 5);
out->print(" used.");
out->cr();
}
#if INCLUDE_CLASS_SPACE
const size_t used_c = MetaspaceUtils::used_words(Metaspace::ClassType);
out->print(" Class: ");
print_scaled_words(out, used_c, scale, 5);
out->print(" used.");
out->cr();
out->print(" Both: ");
const size_t used = used_nc + used_c;
print_scaled_words(out, used, scale, 5);
out->print(" used.");
out->cr();
#endif // INCLUDE_CLASS_SPACE
out->cr();
out->print_cr("Virtual space:");
print_vs(out, scale);
out->cr();
out->print_cr("Chunk freelists:");
if (Metaspace::using_class_space()) {
out->print(" Non-Class: ");
}
CLASS_SPACE_ONLY(out->print(" Non-Class: ");)
print_scaled_words(out, ChunkManager::chunkmanager_nonclass()->total_word_size(), scale);
out->cr();
if (Metaspace::using_class_space()) {
out->print(" Class: ");
print_scaled_words(out, ChunkManager::chunkmanager_class()->total_word_size(), scale);
out->cr();
out->print(" Both: ");
print_scaled_words(out, ChunkManager::chunkmanager_nonclass()->total_word_size() +
ChunkManager::chunkmanager_class()->total_word_size(), scale);
out->cr();
}
#if INCLUDE_CLASS_SPACE
out->print(" Class: ");
print_scaled_words(out, ChunkManager::chunkmanager_class()->total_word_size(), scale);
out->cr();
out->print(" Both: ");
print_scaled_words(out, ChunkManager::chunkmanager_nonclass()->total_word_size() +
ChunkManager::chunkmanager_class()->total_word_size(), scale);
out->cr();
#endif // INCLUDE_CLASS_SPACE
out->cr();
// Print basic settings
@ -256,70 +250,70 @@ void MetaspaceReporter::print_report(outputStream* out, size_t scale, int flags)
// -- Print VirtualSpaceList details.
if ((flags & (int)Option::ShowVSList) > 0) {
out->cr();
out->print_cr("Virtual space list%s:", Metaspace::using_class_space() ? "s" : "");
if (Metaspace::using_class_space()) {
out->print_cr(" Non-Class:");
}
#if INCLUDE_CLASS_SPACE
out->print_cr("Virtual space lists:");
out->print_cr(" Non-Class:");
VirtualSpaceList::vslist_nonclass()->print_on(out);
out->cr();
if (Metaspace::using_class_space()) {
out->print_cr(" Class:");
VirtualSpaceList::vslist_class()->print_on(out);
out->cr();
}
out->print_cr(" Class:");
VirtualSpaceList::vslist_class()->print_on(out);
out->cr();
#else
out->print_cr("Virtual space list:");
VirtualSpaceList::vslist_nonclass()->print_on(out);
out->cr();
#endif // INCLUDE_CLASS_SPACE
}
out->cr();
//////////// Freelists (ChunkManager) section ///////////////////////////
out->cr();
out->print_cr("Chunk freelist%s:", Metaspace::using_class_space() ? "s" : "");
ChunkManagerStats non_class_cm_stat;
ChunkManagerStats class_cm_stat;
ChunkManagerStats total_cm_stat;
ChunkManager::chunkmanager_nonclass()->add_to_statistics(&non_class_cm_stat);
if (Metaspace::using_class_space()) {
ChunkManager::chunkmanager_nonclass()->add_to_statistics(&non_class_cm_stat);
ChunkManager::chunkmanager_class()->add_to_statistics(&class_cm_stat);
total_cm_stat.add(non_class_cm_stat);
total_cm_stat.add(class_cm_stat);
#if INCLUDE_CLASS_SPACE
ChunkManager::chunkmanager_class()->add_to_statistics(&class_cm_stat);
total_cm_stat.add(non_class_cm_stat);
total_cm_stat.add(class_cm_stat);
out->print_cr(" Non-Class:");
non_class_cm_stat.print_on(out, scale);
out->cr();
out->print_cr(" Class:");
class_cm_stat.print_on(out, scale);
out->cr();
out->print_cr(" Both:");
total_cm_stat.print_on(out, scale);
out->cr();
} else {
ChunkManager::chunkmanager_nonclass()->add_to_statistics(&non_class_cm_stat);
non_class_cm_stat.print_on(out, scale);
out->cr();
}
out->print_cr("Chunk freelists:");
out->cr();
out->print_cr(" Non-Class:");
non_class_cm_stat.print_on(out, scale);
out->cr();
out->print_cr(" Class:");
class_cm_stat.print_on(out, scale);
out->cr();
out->print_cr(" Both:");
total_cm_stat.print_on(out, scale);
out->cr();
#else
out->print_cr("Chunk freelist:");
ChunkManager::chunkmanager_nonclass()->add_to_statistics(&non_class_cm_stat);
non_class_cm_stat.print_on(out, scale);
out->cr();
#endif // INCLUDE_CLASS_SPACE
// -- Print Chunkmanager details.
if ((flags & (int)Option::ShowChunkFreeList) > 0) {
out->cr();
out->print_cr("Chunk freelist details:");
if (Metaspace::using_class_space()) {
out->print_cr(" Non-Class:");
}
#if INCLUDE_CLASS_SPACE
out->print_cr(" Non-Class:");
ChunkManager::chunkmanager_nonclass()->print_on(out);
out->cr();
if (Metaspace::using_class_space()) {
out->print_cr(" Class:");
ChunkManager::chunkmanager_class()->print_on(out);
out->cr();
}
out->print_cr(" Class:");
ChunkManager::chunkmanager_class()->print_on(out);
out->cr();
#else
ChunkManager::chunkmanager_nonclass()->print_on(out);
out->cr();
#endif // INCLUDE_CLASS_SPACE
}
out->cr();
//////////// Waste section ///////////////////////////
// As a convenience, print a summary of common waste.
out->cr();

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2020 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -205,28 +205,26 @@ ArenaStats ClmsStats::totals() const {
void ClmsStats::print_on(outputStream* st, size_t scale, bool detailed) const {
StreamIndentor si(st, 2);
st->cr();
if (Metaspace::using_class_space()) {
st->print("Non-Class: ");
}
CLASS_SPACE_ONLY(st->print("Non-Class: ");)
_arena_stats_nonclass.print_on(st, scale, detailed);
if (detailed) {
st->cr();
}
if (Metaspace::using_class_space()) {
#if INCLUDE_CLASS_SPACE
st->cr();
st->print(" Class: ");
_arena_stats_class.print_on(st, scale, detailed);
if (detailed) {
st->cr();
st->print(" Class: ");
_arena_stats_class.print_on(st, scale, detailed);
if (detailed) {
st->cr();
}
st->cr();
st->print(" Both: ");
totals().print_on(st, scale, detailed);
if (detailed) {
st->cr();
}
}
st->cr();
st->print(" Both: ");
totals().print_on(st, scale, detailed);
if (detailed) {
st->cr();
}
#endif // INCLUDE_CLASS_SPACE
st->cr();
}
#ifdef ASSERT

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2021 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -259,12 +259,11 @@ VirtualSpaceNode* VirtualSpaceNode::create_node(size_t word_size,
}
#ifndef _LP64
// On 32-bit, with +UseCompressedClassPointers, the whole address space is the encoding range. We therefore
// don't need a class space. However, as a pragmatic workaround for pesty overflow problems on 32-bit, we leave
// a small area at the end of the address space out of the encoding range. We just assume no Klass will ever live
// On 32-bit, the whole address space is the encoding range. We therefore don't need a class space.
// However, as a pragmatic workaround for pesty overflow problems on 32-bit, we leave a small area
// at the end of the address space out of the encoding range. We just assume no Klass will ever live
// there (it won't, for no OS we support on 32-bit has user-addressable memory up there).
assert(!UseCompressedClassPointers ||
rs.end() <= (char*)CompressedKlassPointers::max_klass_range_size(), "Weirdly high address");
assert(rs.end() <= (char*)CompressedKlassPointers::max_klass_range_size(), "Weirdly high address");
#endif // _LP64
MemTracker::record_virtual_memory_tag(rs, mtMetaspace);

View File

@ -272,9 +272,7 @@ void MemSummaryReporter::report_summary_of_tag(MemTag mem_tag,
} else if (mem_tag == mtClass) {
// Metadata information
report_metadata(Metaspace::NonClassType);
if (Metaspace::using_class_space()) {
report_metadata(Metaspace::ClassType);
}
CLASS_SPACE_ONLY(report_metadata(Metaspace::ClassType);)
}
out->cr();
}
@ -754,9 +752,9 @@ void MemSummaryDiffReporter::diff_summary_of_tag(MemTag mem_tag,
void MemSummaryDiffReporter::print_metaspace_diff(const MetaspaceCombinedStats& current_ms,
const MetaspaceCombinedStats& early_ms) const {
print_metaspace_diff("Metadata", current_ms.non_class_space_stats(), early_ms.non_class_space_stats());
if (Metaspace::using_class_space()) {
print_metaspace_diff("Class space", current_ms.class_space_stats(), early_ms.class_space_stats());
}
#if INCLUDE_CLASS_SPACE
print_metaspace_diff("Class space", current_ms.class_space_stats(), early_ms.class_space_stats());
#endif
}
void MemSummaryDiffReporter::print_metaspace_diff(const char* header,

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -80,8 +80,7 @@ private:
// The _length field is not declared in C++. It is allocated after the
// mark-word when using compact headers (+UseCompactObjectHeaders), otherwise
// after the compressed Klass* when running with compressed class-pointers
// (+UseCompressedClassPointers), or else after the full Klass*.
// after the compressed Klass*.
static int length_offset_in_bytes() {
return oopDesc::base_offset_in_bytes();
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2019, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2019, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -95,7 +95,7 @@ void CompressedKlassPointers::sanity_check_after_initialization() {
// We should need a class space if address space is larger than what narrowKlass can address
const bool should_need_class_space = (BytesPerWord * BitsPerByte) > narrow_klass_pointer_bits();
ASSERT_HERE(should_need_class_space == needs_class_space());
ASSERT_HERE(should_need_class_space == (INCLUDE_CLASS_SPACE ? true : false));
const size_t klass_align = klass_alignment_in_bytes();
@ -318,24 +318,19 @@ void CompressedKlassPointers::initialize(address addr, size_t len) {
}
void CompressedKlassPointers::print_mode(outputStream* st) {
st->print_cr("UseCompressedClassPointers %d, UseCompactObjectHeaders %d",
UseCompressedClassPointers, UseCompactObjectHeaders);
if (UseCompressedClassPointers) {
st->print_cr("Narrow klass pointer bits %d, Max shift %d",
_narrow_klass_pointer_bits, _max_shift);
st->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: %d",
p2i(base()), shift());
st->print_cr("Encoding Range: " RANGE2FMT, RANGE2FMTARGS(_base, encoding_range_end()));
st->print_cr("Klass Range: " RANGE2FMT, RANGE2FMTARGS(_klass_range_start, _klass_range_end));
st->print_cr("Klass ID Range: [%u - %u) (%u)", _lowest_valid_narrow_klass_id, _highest_valid_narrow_klass_id + 1,
_highest_valid_narrow_klass_id + 1 - _lowest_valid_narrow_klass_id);
if (_protection_zone_size > 0) {
st->print_cr("Protection zone: " RANGEFMT, RANGEFMTARGS(_base, _protection_zone_size));
} else {
st->print_cr("No protection zone.");
}
st->print_cr("UseCompactObjectHeaders %d", UseCompactObjectHeaders);
st->print_cr("Narrow klass pointer bits %d, Max shift %d",
_narrow_klass_pointer_bits, _max_shift);
st->print_cr("Narrow klass base: " PTR_FORMAT ", Narrow klass shift: %d",
p2i(base()), shift());
st->print_cr("Encoding Range: " RANGE2FMT, RANGE2FMTARGS(_base, encoding_range_end()));
st->print_cr("Klass Range: " RANGE2FMT, RANGE2FMTARGS(_klass_range_start, _klass_range_end));
st->print_cr("Klass ID Range: [%u - %u) (%u)", _lowest_valid_narrow_klass_id, _highest_valid_narrow_klass_id + 1,
_highest_valid_narrow_klass_id + 1 - _lowest_valid_narrow_klass_id);
if (_protection_zone_size > 0) {
st->print_cr("Protection zone: " RANGEFMT, RANGEFMTARGS(_base, _protection_zone_size));
} else {
st->print_cr("UseCompressedClassPointers off");
st->print_cr("No protection zone.");
}
}

View File

@ -98,7 +98,6 @@ class Klass;
// If compressed klass pointers then use narrowKlass.
typedef juint narrowKlass;
// For UseCompressedClassPointers.
class CompressedKlassPointers : public AllStatic {
friend class VMStructs;
friend class ArchiveBuilder;
@ -161,7 +160,6 @@ public:
// Initialization sequence:
// 1) Parse arguments. The following arguments take a role:
// - UseCompressedClassPointers
// - UseCompactObjectHeaders
// - Xshare on off dump
// - CompressedClassSpaceSize
@ -192,12 +190,6 @@ public:
// resulting from the current encoding settings (base, shift), capped to a certain max. value.
static size_t max_klass_range_size();
// On 64-bit, we need the class space to confine Klass structures to the encoding range, which is determined
// by bit size of narrowKlass IDs and the shift. On 32-bit, we support compressed class pointer only
// "pro-forma": narrowKlass have the same size as addresses (32 bits), and therefore the encoding range is
// equal to the address space size. Here, we don't need a class space.
static constexpr bool needs_class_space() { return LP64_ONLY(true) NOT_LP64(false); }
// Reserve a range of memory that is to contain Klass strucutures which are referenced by narrow Klass IDs.
// If optimize_for_zero_base is true, the implementation will attempt to reserve optimized for zero-based encoding.
static char* reserve_address_space_for_compressed_classes(size_t size, bool aslr, bool optimize_for_zero_base);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2017, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -75,7 +75,6 @@ inline narrowKlass CompressedKlassPointers::encode(const Klass* v) {
#ifdef ASSERT
inline void CompressedKlassPointers::check_encodable(const void* addr) {
assert(UseCompressedClassPointers, "Only call for +UseCCP");
assert(addr != nullptr, "Null Klass?");
assert(is_encodable(addr),
"Address " PTR_FORMAT " is not encodable (Klass range: " RANGEFMT ", klass alignment: %d)",
@ -84,7 +83,6 @@ inline void CompressedKlassPointers::check_encodable(const void* addr) {
inline void CompressedKlassPointers::check_valid_narrow_klass_id(narrowKlass nk) {
check_init(_base);
assert(UseCompressedClassPointers, "Only call for +UseCCP");
assert(nk > 0, "narrow Klass ID is 0");
const uint64_t nk_mask = ~right_n_bits(narrow_klass_pointer_bits());
assert(((uint64_t)nk & nk_mask) == 0, "narrow klass id bit spillover (%u)", nk);

View File

@ -486,10 +486,8 @@ InstanceKlass* InstanceKlass::allocate_instance_klass(const ClassFileParser& par
ik = new (loader_data, size, THREAD) InstanceKlass(parser);
}
if (ik != nullptr && UseCompressedClassPointers) {
assert(CompressedKlassPointers::is_encodable(ik),
"Klass " PTR_FORMAT "needs a narrow Klass ID, but is not encodable", p2i(ik));
}
assert(ik == nullptr || CompressedKlassPointers::is_encodable(ik),
"Klass " PTR_FORMAT "needs a narrow Klass ID, but is not encodable", p2i(ik));
// Check for pending exception before adding to the loader data and incrementing
// class count. Can get OOM here.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -1055,14 +1055,8 @@ void Klass::verify_on(outputStream* st) {
// This can be expensive, but it is worth checking that this klass is actually
// in the CLD graph but not in production.
#ifdef ASSERT
if (UseCompressedClassPointers) {
// Stricter checks for both correct alignment and placement
CompressedKlassPointers::check_encodable(this);
} else {
assert(Metaspace::contains((address)this), "Should be");
}
#endif // ASSERT
// Stricter checks for both correct alignment and placement
DEBUG_ONLY(CompressedKlassPointers::check_encodable(this));
guarantee(this->is_klass(),"should be klass");

View File

@ -38,21 +38,17 @@ void ObjLayout::initialize() {
_klass_mode = Compact;
_oop_base_offset_in_bytes = sizeof(markWord);
_oop_has_klass_gap = false;
} else if (UseCompressedClassPointers) {
} else {
_klass_mode = Compressed;
_oop_base_offset_in_bytes = sizeof(markWord) + sizeof(narrowKlass);
_oop_has_klass_gap = true;
} else {
_klass_mode = Uncompressed;
_oop_base_offset_in_bytes = sizeof(markWord) + sizeof(Klass*);
_oop_has_klass_gap = false;
}
#else
assert(_klass_mode == Undefined, "ObjLayout initialized twice");
assert(!UseCompactObjectHeaders, "COH unsupported on 32-bit");
// We support +-UseCompressedClassPointers on 32-bit, but the layout
// We support narrow Klass pointers on 32-bit, but the layout
// is exactly the same as it was with uncompressed klass pointers
_klass_mode = UseCompressedClassPointers ? Compressed : Uncompressed;
_klass_mode = Compressed;
_oop_base_offset_in_bytes = sizeof(markWord) + sizeof(Klass*);
_oop_has_klass_gap = false;
#endif

View File

@ -27,8 +27,8 @@
/*
* This class helps to avoid loading more than one flag in some
* operations that require checking UseCompressedClassPointers,
* UseCompactObjectHeaders and possibly more.
* operations that require checking UseCompactObjectHeaders and - in the future -
* possibly more.
*
* This is important on some performance critical paths, e.g. where
* the Klass* is accessed frequently, especially by GC oop iterators
@ -37,12 +37,10 @@
class ObjLayout {
public:
enum Mode {
// +UseCompactObjectHeaders (implies +UseCompressedClassPointers)
// +UseCompactObjectHeaders
Compact,
// +UseCompressedClassPointers (-UseCompactObjectHeaders)
// -UseCompactObjectHeaders (compressed Klass pointers)
Compressed,
// -UseCompressedClassPointers (-UseCompactObjectHeaders)
Uncompressed,
// Not yet initialized
Undefined
};

View File

@ -32,10 +32,8 @@ inline ObjLayout::Mode ObjLayout::klass_mode() {
assert(_klass_mode != Undefined, "KlassMode not yet initialized");
if (UseCompactObjectHeaders) {
assert(_klass_mode == Compact, "Klass mode does not match flags");
} else if (UseCompressedClassPointers) {
assert(_klass_mode == Compressed, "Klass mode does not match flags");
} else {
assert(_klass_mode == Uncompressed, "Klass mode does not match flags");
assert(_klass_mode == Compressed, "Klass mode does not match flags");
}
#endif
return _klass_mode;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -152,8 +152,7 @@ bool oopDesc::is_typeArray_noinline() const { return is_typeArray(); }
#if INCLUDE_CDS_JAVA_HEAP
void oopDesc::set_narrow_klass(narrowKlass nk) {
assert(CDSConfig::is_dumping_heap(), "Used by CDS only. Do not abuse!");
assert(UseCompressedClassPointers, "must be");
_metadata._compressed_klass = nk;
_compressed_klass = nk;
}
#endif

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -49,10 +49,7 @@ class oopDesc {
friend class JVMCIVMStructs;
private:
volatile markWord _mark;
union _metadata {
Klass* _klass;
narrowKlass _compressed_klass;
} _metadata;
narrowKlass _compressed_klass;
// There may be ordering constraints on the initialization of fields that
// make use of the C++ copy/assign incorrect.
@ -338,7 +335,7 @@ class oopDesc {
} else
#endif
{
return (int)offset_of(oopDesc, _metadata._klass);
return (int)offset_of(oopDesc, _compressed_klass);
}
}
static int klass_gap_offset_in_bytes() {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -99,9 +99,9 @@ Klass* oopDesc::klass() const {
case ObjLayout::Compact:
return mark().klass();
case ObjLayout::Compressed:
return CompressedKlassPointers::decode_not_null(_metadata._compressed_klass);
return CompressedKlassPointers::decode_not_null(_compressed_klass);
default:
return _metadata._klass;
ShouldNotReachHere();
}
}
@ -110,9 +110,9 @@ Klass* oopDesc::klass_or_null() const {
case ObjLayout::Compact:
return mark().klass_or_null();
case ObjLayout::Compressed:
return CompressedKlassPointers::decode(_metadata._compressed_klass);
return CompressedKlassPointers::decode(_compressed_klass);
default:
return _metadata._klass;
ShouldNotReachHere();
}
}
@ -121,11 +121,11 @@ Klass* oopDesc::klass_or_null_acquire() const {
case ObjLayout::Compact:
return mark_acquire().klass();
case ObjLayout::Compressed: {
narrowKlass narrow_klass = AtomicAccess::load_acquire(&_metadata._compressed_klass);
narrowKlass narrow_klass = AtomicAccess::load_acquire(&_compressed_klass);
return CompressedKlassPointers::decode(narrow_klass);
}
default:
return AtomicAccess::load_acquire(&_metadata._klass);
ShouldNotReachHere();
}
}
@ -134,9 +134,9 @@ Klass* oopDesc::klass_without_asserts() const {
case ObjLayout::Compact:
return mark().klass_without_asserts();
case ObjLayout::Compressed:
return CompressedKlassPointers::decode_without_asserts(_metadata._compressed_klass);
return CompressedKlassPointers::decode_without_asserts(_compressed_klass);
default:
return _metadata._klass;
ShouldNotReachHere();
}
}
@ -145,7 +145,7 @@ narrowKlass oopDesc::narrow_klass() const {
case ObjLayout::Compact:
return mark().narrow_klass();
case ObjLayout::Compressed:
return _metadata._compressed_klass;
return _compressed_klass;
default:
ShouldNotReachHere();
}
@ -154,23 +154,14 @@ narrowKlass oopDesc::narrow_klass() const {
void oopDesc::set_klass(Klass* k) {
assert(Universe::is_bootstrapping() || (k != nullptr && k->is_klass()), "incorrect Klass");
assert(!UseCompactObjectHeaders, "don't set Klass* with compact headers");
if (UseCompressedClassPointers) {
_metadata._compressed_klass = CompressedKlassPointers::encode_not_null(k);
} else {
_metadata._klass = k;
}
_compressed_klass = CompressedKlassPointers::encode_not_null(k);
}
void oopDesc::release_set_klass(HeapWord* mem, Klass* k) {
assert(Universe::is_bootstrapping() || (k != nullptr && k->is_klass()), "incorrect Klass");
assert(!UseCompactObjectHeaders, "don't set Klass* with compact headers");
char* raw_mem = ((char*)mem + klass_offset_in_bytes());
if (UseCompressedClassPointers) {
AtomicAccess::release_store((narrowKlass*)raw_mem,
CompressedKlassPointers::encode_not_null(k));
} else {
AtomicAccess::release_store((Klass**)raw_mem, k);
}
AtomicAccess::release_store((narrowKlass*)raw_mem, CompressedKlassPointers::encode_not_null(k));
}
void oopDesc::set_klass_gap(HeapWord* mem, int v) {

View File

@ -2689,7 +2689,7 @@ Node *PhiNode::Ideal(PhaseGVN *phase, bool can_reshape) {
#ifdef _LP64
// Push DecodeN/DecodeNKlass down through phi.
// The rest of phi graph will transform by split EncodeP node though phis up.
if ((UseCompressedOops || UseCompressedClassPointers) && can_reshape && progress == nullptr) {
if (can_reshape && progress == nullptr) {
bool may_push = true;
bool has_decodeN = false;
bool is_decodeN = false;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -2633,7 +2633,7 @@ void PhaseChaitin::verify_base_ptrs(ResourceArea* a) const {
#ifdef _LP64
(UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_CastPP) ||
(UseCompressedOops && check->as_Mach()->ideal_Opcode() == Op_DecodeN) ||
(UseCompressedClassPointers && check->as_Mach()->ideal_Opcode() == Op_DecodeNKlass) ||
(check->as_Mach()->ideal_Opcode() == Op_DecodeNKlass) ||
#endif // _LP64
check->as_Mach()->ideal_Opcode() == Op_LoadP ||
check->as_Mach()->ideal_Opcode() == Op_LoadKlass))) {

View File

@ -3414,8 +3414,7 @@ void Compile::final_graph_reshaping_main_switch(Node* n, Final_Reshape_Counts& f
Node *addp = n->in(AddPNode::Address);
assert(n->as_AddP()->address_input_has_same_base(), "Base pointers must match (addp %u)", addp->_idx );
#ifdef _LP64
if ((UseCompressedOops || UseCompressedClassPointers) &&
addp->Opcode() == Op_ConP &&
if (addp->Opcode() == Op_ConP &&
addp == n->in(AddPNode::Base) &&
n->in(AddPNode::Offset)->is_Con()) {
// If the transformation of ConP to ConN+DecodeN is beneficial depends
@ -3428,7 +3427,7 @@ void Compile::final_graph_reshaping_main_switch(Node* n, Final_Reshape_Counts& f
bool is_klass = t->isa_klassptr() != nullptr;
if ((is_oop && UseCompressedOops && Matcher::const_oop_prefer_decode() ) ||
(is_klass && UseCompressedClassPointers && Matcher::const_klass_prefer_decode() &&
(is_klass && Matcher::const_klass_prefer_decode() &&
t->isa_klassptr()->exact_klass()->is_in_encoding_range())) {
Node* nn = nullptr;
@ -3975,8 +3974,7 @@ void Compile::final_graph_reshaping_walk(Node_Stack& nstack, Node* root, Final_R
}
// Skip next transformation if compressed oops are not used.
if ((UseCompressedOops && !Matcher::gen_narrow_oop_implicit_null_checks()) ||
(!UseCompressedOops && !UseCompressedClassPointers))
if (UseCompressedOops && !Matcher::gen_narrow_oop_implicit_null_checks())
return;
// Go over safepoints nodes to skip DecodeN/DecodeNKlass nodes for debug edges.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1998, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -306,8 +306,7 @@ void PhaseCFG::implicit_null_check(Block* block, Node *proj, Node *val, int allo
// cannot reason about it; is probably not implicit null exception
} else {
const TypePtr* tptr;
if ((UseCompressedOops && CompressedOops::shift() == 0) ||
(UseCompressedClassPointers && CompressedKlassPointers::shift() == 0)) {
if ((UseCompressedOops && CompressedOops::shift() == 0) || CompressedKlassPointers::shift() == 0) {
// 32-bits narrow oop can be the base of address expressions
tptr = base->get_ptr_type();
} else {

View File

@ -2486,7 +2486,6 @@ Node* LoadKlassNode::make(PhaseGVN& gvn, Node* mem, Node* adr, const TypePtr* at
assert(adr_type != nullptr, "expecting TypeKlassPtr");
#ifdef _LP64
if (adr_type->is_ptr_to_narrowklass()) {
assert(UseCompressedClassPointers, "no compressed klasses");
Node* load_klass = gvn.transform(new LoadNKlassNode(mem, adr, at, tk->make_narrowklass(), MemNode::unordered));
return new DecodeNKlassNode(load_klass, load_klass->bottom_type()->make_ptr());
}
@ -2816,8 +2815,7 @@ StoreNode* StoreNode::make(PhaseGVN& gvn, Node* ctl, Node* mem, Node* adr, const
val = gvn.transform(new EncodePNode(val, val->bottom_type()->make_narrowoop()));
return new StoreNNode(ctl, mem, adr, adr_type, val, mo);
} else if (adr->bottom_type()->is_ptr_to_narrowklass() ||
(UseCompressedClassPointers && val->bottom_type()->isa_klassptr() &&
adr->bottom_type()->isa_rawptr())) {
(val->bottom_type()->isa_klassptr() && adr->bottom_type()->isa_rawptr())) {
val = gvn.transform(new EncodePKlassNode(val, val->bottom_type()->make_narrowklass()));
return new StoreNKlassNode(ctl, mem, adr, adr_type, val, mo);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -102,7 +102,7 @@ const Type* EncodePKlassNode::Value(PhaseGVN* phase) const {
if (t == Type::TOP) return Type::TOP;
assert (t != TypePtr::NULL_PTR, "null klass?");
assert(UseCompressedClassPointers && t->isa_klassptr(), "only klass ptr here");
assert(t->isa_klassptr(), "only klass ptr here");
return t->make_narrowklass();
}

View File

@ -3489,7 +3489,7 @@ TypeOopPtr::TypeOopPtr(TYPES t, PTR ptr, ciKlass* k, const TypeInterfaces* inter
#ifdef _LP64
if (_offset > 0 || _offset == Type::OffsetTop || _offset == Type::OffsetBot) {
if (_offset == oopDesc::klass_offset_in_bytes()) {
_is_ptr_to_narrowklass = UseCompressedClassPointers;
_is_ptr_to_narrowklass = true;
} else if (klass() == nullptr) {
// Array with unknown body type
assert(this->isa_aryptr(), "only arrays without klass");

View File

@ -333,7 +333,6 @@ WB_ENTRY(void, WB_ReadFromNoaccessArea(JNIEnv* env, jobject o))
WB_END
WB_ENTRY(void, WB_DecodeNKlassAndAccessKlass(JNIEnv* env, jobject o, jint nKlass))
assert(UseCompressedClassPointers, "Should only call for UseCompressedClassPointers");
const narrowKlass nk = (narrowKlass)nKlass;
const Klass* const k = CompressedKlassPointers::decode_not_null_without_asserts(nKlass);
printf("WB_DecodeNKlassAndAccessKlass: nk %u k " PTR_FORMAT "\n", nk, p2i(k));

View File

@ -1554,7 +1554,7 @@ void Arguments::set_heap_size() {
}
#ifdef _LP64
if (UseCompressedOops || UseCompressedClassPointers) {
if (UseCompressedOops) {
// HeapBaseMinAddress can be greater than default but not less than.
if (!FLAG_IS_DEFAULT(HeapBaseMinAddress)) {
if (HeapBaseMinAddress < DefaultHeapBaseMinAddress) {
@ -1567,9 +1567,7 @@ void Arguments::set_heap_size() {
FLAG_SET_ERGO(HeapBaseMinAddress, DefaultHeapBaseMinAddress);
}
}
}
if (UseCompressedOops) {
uintptr_t heap_end = HeapBaseMinAddress + MaxHeapSize;
uintptr_t max_coop_heap = max_heap_for_compressed_oops();
@ -3782,10 +3780,6 @@ jint Arguments::parse(const JavaVMInitArgs* initial_cmd_args) {
void Arguments::set_compact_headers_flags() {
#ifdef _LP64
if (UseCompactObjectHeaders && FLAG_IS_CMDLINE(UseCompressedClassPointers) && !UseCompressedClassPointers) {
warning("Compact object headers require compressed class pointers. Disabling compact object headers.");
FLAG_SET_DEFAULT(UseCompactObjectHeaders, false);
}
if (UseCompactObjectHeaders && !UseObjectMonitorTable) {
// If UseCompactObjectHeaders is on the command line, turn on UseObjectMonitorTable.
if (FLAG_IS_CMDLINE(UseCompactObjectHeaders)) {
@ -3799,9 +3793,6 @@ void Arguments::set_compact_headers_flags() {
FLAG_SET_DEFAULT(UseObjectMonitorTable, true);
}
}
if (UseCompactObjectHeaders && !UseCompressedClassPointers) {
FLAG_SET_DEFAULT(UseCompressedClassPointers, true);
}
#endif
}
@ -3817,9 +3808,7 @@ jint Arguments::apply_ergo() {
set_compact_headers_flags();
if (UseCompressedClassPointers) {
CompressedKlassPointers::pre_initialize();
}
CompressedKlassPointers::pre_initialize();
CDSConfig::ergo_initialize();
@ -3864,10 +3853,6 @@ jint Arguments::apply_ergo() {
DebugNonSafepoints = true;
}
if (FLAG_IS_CMDLINE(CompressedClassSpaceSize) && !UseCompressedClassPointers) {
warning("Setting CompressedClassSpaceSize has no effect when compressed class pointers are not used");
}
// Treat the odd case where local verification is enabled but remote
// verification is not as if both were enabled.
if (BytecodeVerificationLocal && !BytecodeVerificationRemote) {

View File

@ -1368,9 +1368,6 @@ const int ObjectAlignmentInBytes = 8;
"Maximum size of Metaspaces (in bytes)") \
constraint(MaxMetaspaceSizeConstraintFunc,AfterErgo) \
\
product(bool, UseCompressedClassPointers, true, \
"(Deprecated) Use 32-bit class pointers.") \
\
product(size_t, CompressedClassSpaceSize, 1*G, \
"Maximum size of class area in Metaspace when compressed " \
"class pointers are used") \

View File

@ -1285,7 +1285,7 @@ void os::print_location(outputStream* st, intptr_t x, bool verbose) {
bool accessible = is_readable_pointer(addr);
// Check if addr points into the narrow Klass protection zone
if (UseCompressedClassPointers && CompressedKlassPointers::is_in_protection_zone(addr)) {
if (CompressedKlassPointers::is_in_protection_zone(addr)) {
st->print_cr(PTR_FORMAT " points into nKlass protection zone", p2i(addr));
return;
}
@ -1339,8 +1339,9 @@ void os::print_location(outputStream* st, intptr_t x, bool verbose) {
}
// Compressed klass needs to be decoded first.
// Todo: questionable for COH - can we do this better?
#ifdef _LP64
if (UseCompressedClassPointers && ((uintptr_t)addr &~ (uintptr_t)max_juint) == 0) {
if (((uintptr_t)addr &~ (uintptr_t)max_juint) == 0) {
narrowKlass narrow_klass = (narrowKlass)(uintptr_t)addr;
Klass* k = CompressedKlassPointers::decode_without_asserts(narrow_klass);

View File

@ -163,8 +163,7 @@
/******************************************************************/ \
\
volatile_nonstatic_field(oopDesc, _mark, markWord) \
volatile_nonstatic_field(oopDesc, _metadata._klass, Klass*) \
volatile_nonstatic_field(oopDesc, _metadata._compressed_klass, narrowKlass) \
volatile_nonstatic_field(oopDesc, _compressed_klass, narrowKlass) \
static_field(BarrierSet, _barrier_set, BarrierSet*) \
nonstatic_field(ArrayKlass, _dimension, const int) \
volatile_nonstatic_field(ArrayKlass, _higher_dimension, ObjArrayKlass*) \

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -119,11 +119,11 @@ void MemoryService::add_metaspace_memory_pools() {
mgr->add_pool(_metaspace_pool);
_pools_list->append(_metaspace_pool);
if (UseCompressedClassPointers) {
_compressed_class_pool = new CompressedKlassSpacePool();
mgr->add_pool(_compressed_class_pool);
_pools_list->append(_compressed_class_pool);
}
#if INCLUDE_CLASS_SPACE
_compressed_class_pool = new CompressedKlassSpacePool();
mgr->add_pool(_compressed_class_pool);
_pools_list->append(_compressed_class_pool);
#endif
_managers_list->append(mgr);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -586,6 +586,18 @@
#define BIG_ENDIAN_ONLY(code) code
#endif
#ifdef _LP64
#define INCLUDE_CLASS_SPACE 1
#define CLASS_SPACE_ONLY(x) x
#define NOT_CLASS_SPACE(x)
#else
// On 32-bit we use fake "narrow class pointers" which are really just 32-bit pointers,
// but we don't use a class space (would cause too much address space fragmentation)
#define INCLUDE_CLASS_SPACE 0
#define CLASS_SPACE_ONLY(x)
#define NOT_CLASS_SPACE(x) x
#endif
#define define_pd_global(type, name, value) const type pd_##name = value;
// Helper macros for constructing file names for includes.

View File

@ -533,8 +533,7 @@ static void report_vm_version(outputStream* st, char* buf, int buflen) {
"", "",
#endif
UseCompressedOops ? ", compressed oops" : "",
UseCompactObjectHeaders ? ", compact obj headers"
: (UseCompressedClassPointers ? ", compressed class ptrs" : ""),
UseCompactObjectHeaders ? ", compact obj headers" : "",
GCConfig::hs_err_name(),
VM_Version::vm_platform_string()
);
@ -1215,7 +1214,7 @@ void VMError::report(outputStream* st, bool _verbose) {
CompressedOops::print_mode(st);
st->cr();
STEP_IF("printing compressed klass pointers mode", _verbose && UseCompressedClassPointers)
STEP_IF("printing compressed klass pointers mode", _verbose)
CDS_ONLY(AOTMetaspace::print_on(st);)
Metaspace::print_compressed_class_space(st);
CompressedKlassPointers::print_mode(st);
@ -1437,12 +1436,10 @@ void VMError::print_vm_info(outputStream* st) {
#endif
// STEP("printing compressed class ptrs mode")
if (UseCompressedClassPointers) {
CDS_ONLY(AOTMetaspace::print_on(st);)
Metaspace::print_compressed_class_space(st);
CompressedKlassPointers::print_mode(st);
st->cr();
}
CDS_ONLY(AOTMetaspace::print_on(st);)
Metaspace::print_compressed_class_space(st);
CompressedKlassPointers::print_mode(st);
st->cr();
// Take heap lock over heap, GC and metaspace printing so that information
// is consistent.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -83,10 +83,8 @@ public class Array extends Oop {
}
if (VM.getVM().isCompactObjectHeadersEnabled()) {
lengthOffsetInBytes = Oop.getHeaderSize();
} else if (VM.getVM().isCompressedKlassPointersEnabled()) {
lengthOffsetInBytes = typeSize - VM.getVM().getIntSize();
} else {
lengthOffsetInBytes = typeSize;
lengthOffsetInBytes = typeSize - VM.getVM().getIntSize();
}
return lengthOffsetInBytes;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -57,10 +57,8 @@ public class Instance extends Oop {
public static long getHeaderSize() {
if (VM.getVM().isCompactObjectHeadersEnabled()) {
return Oop.getHeaderSize();
} else if (VM.getVM().isCompressedKlassPointersEnabled()) {
return typeSize - VM.getVM().getIntSize();
} else {
return typeSize;
return typeSize - VM.getVM().getIntSize();
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -51,8 +51,7 @@ public class Oop {
headerSize = markType.getSize();
} else {
headerSize = type.getSize();
klass = new MetadataField(type.getAddressField("_metadata._klass"), 0);
compressedKlass = new NarrowKlassField(type.getAddressField("_metadata._compressed_klass"), 0);
compressedKlass = new NarrowKlassField(type.getAddressField("_compressed_klass"), 0);
}
}
@ -75,7 +74,6 @@ public class Oop {
public static long getHeaderSize() { return headerSize; } // Header size in bytes.
private static CIntField mark;
private static MetadataField klass;
private static NarrowKlassField compressedKlass;
// Accessors for declared fields
@ -83,12 +81,9 @@ public class Oop {
public Klass getKlass() {
if (VM.getVM().isCompactObjectHeadersEnabled()) {
assert(VM.getVM().isCompressedKlassPointersEnabled());
return getMark().getKlass();
} else if (VM.getVM().isCompressedKlassPointersEnabled()) {
return (Klass)compressedKlass.getValue(getHandle());
} else {
return (Klass)klass.getValue(getHandle());
return (Klass)compressedKlass.getValue(getHandle());
}
}
@ -157,11 +152,7 @@ public class Oop {
if (doVMFields) {
visitor.doCInt(mark, true);
if (!VM.getVM().isCompactObjectHeadersEnabled()) {
if (VM.getVM().isCompressedKlassPointersEnabled()) {
visitor.doMetadata(compressedKlass, true);
} else {
visitor.doMetadata(klass, true);
}
visitor.doMetadata(compressedKlass, true);
}
}
}
@ -220,10 +211,8 @@ public class Oop {
if (VM.getVM().isCompactObjectHeadersEnabled()) {
Mark mark = new Mark(handle);
return mark.getKlass();
} else if (VM.getVM().isCompressedKlassPointersEnabled()) {
return (Klass)Metadata.instantiateWrapperFor(handle.getCompKlassAddressAt(compressedKlass.getOffset()));
} else {
return (Klass)Metadata.instantiateWrapperFor(handle.getAddressAt(klass.getOffset()));
return (Klass)Metadata.instantiateWrapperFor(handle.getCompKlassAddressAt(compressedKlass.getOffset()));
}
}
};

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -144,7 +144,6 @@ public class VM {
private static CIntegerType boolType;
private Boolean sharingEnabled;
private Boolean compressedOopsEnabled;
private Boolean compressedKlassPointersEnabled;
private Boolean compactObjectHeadersEnabled;
// command line flags supplied to VM - see struct JVMFlag in jvmFlag.hpp
@ -515,11 +514,7 @@ public class VM {
heapOopSize = (int)getOopSize();
}
if (isCompressedKlassPointersEnabled()) {
klassPtrSize = (int)getIntSize();
} else {
klassPtrSize = (int)getOopSize(); // same as an oop
}
klassPtrSize = (int)getIntSize();
}
/** This could be used by a reflective runtime system */
@ -938,15 +933,6 @@ public class VM {
return compressedOopsEnabled.booleanValue();
}
public boolean isCompressedKlassPointersEnabled() {
if (compressedKlassPointersEnabled == null) {
Flag flag = getCommandLineFlag("UseCompressedClassPointers");
compressedKlassPointersEnabled = (flag == null) ? Boolean.FALSE:
(flag.getBool()? Boolean.TRUE: Boolean.FALSE);
}
return compressedKlassPointersEnabled.booleanValue();
}
public boolean isCompactObjectHeadersEnabled() {
if (compactObjectHeadersEnabled == null) {
Flag flag = getCommandLineFlag("UseCompactObjectHeaders");

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -65,8 +65,8 @@ public:
// Test VirtualSpaceList::contains
const VirtualSpaceList* const vslist =
(mdType == Metaspace::ClassType && Metaspace::using_class_space()) ?
VirtualSpaceList::vslist_class() : VirtualSpaceList::vslist_nonclass();
(mdType == Metaspace::ClassType && INCLUDE_CLASS_SPACE) ?
VirtualSpaceList::vslist_class() : VirtualSpaceList::vslist_nonclass();
ASSERT_TRUE(vslist->contains((MetaWord*)p));

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -37,15 +37,15 @@ TEST_VM(metaspace, MetaspaceUtils_reserved) {
EXPECT_LE(reserved_metadata, reserved);
}
#if INCLUDE_CLASS_SPACE
TEST_VM(metaspace, MetaspaceUtils_reserved_compressed_class_pointers) {
if (UseCompressedClassPointers && CompressedKlassPointers::needs_class_space()) {
size_t reserved = MetaspaceUtils::reserved_bytes();
EXPECT_GT(reserved, 0UL);
size_t reserved_class = MetaspaceUtils::reserved_bytes(Metaspace::ClassType);
EXPECT_GT(reserved_class, 0UL);
EXPECT_LE(reserved_class, reserved);
}
size_t reserved = MetaspaceUtils::reserved_bytes();
EXPECT_GT(reserved, 0UL);
size_t reserved_class = MetaspaceUtils::reserved_bytes(Metaspace::ClassType);
EXPECT_GT(reserved_class, 0UL);
EXPECT_LE(reserved_class, reserved);
}
#endif // INCLUDE_CLASS_SPACE
TEST_VM(metaspace, MetaspaceUtils_committed) {
size_t committed = MetaspaceUtils::committed_bytes();
@ -59,30 +59,15 @@ TEST_VM(metaspace, MetaspaceUtils_committed) {
EXPECT_LE(committed_metadata, committed);
}
#if INCLUDE_CLASS_SPACE
TEST_VM(metaspace, MetaspaceUtils_committed_compressed_class_pointers) {
if (UseCompressedClassPointers && CompressedKlassPointers::needs_class_space()) {
size_t committed = MetaspaceUtils::committed_bytes();
EXPECT_GT(committed, 0UL);
size_t committed_class = MetaspaceUtils::committed_bytes(Metaspace::ClassType);
EXPECT_GT(committed_class, 0UL);
EXPECT_LE(committed_class, committed);
}
}
TEST_VM(metaspace, MetaspaceUtils_non_compressed_class_pointers) {
if (UseCompressedClassPointers) {
return;
}
size_t committed = MetaspaceUtils::committed_bytes();
EXPECT_GT(committed, 0UL);
size_t committed_class = MetaspaceUtils::committed_bytes(Metaspace::ClassType);
EXPECT_EQ(committed_class, 0UL);
size_t used_class = MetaspaceUtils::used_bytes(Metaspace::ClassType);
EXPECT_EQ(used_class, 0UL);
size_t reserved_class = MetaspaceUtils::reserved_bytes(Metaspace::ClassType);
EXPECT_EQ(reserved_class, 0UL);
EXPECT_GT(committed_class, 0UL);
EXPECT_LE(committed_class, committed);
}
#endif // INCLUDE_CLASS_SPACE
static void check_metaspace_stats_are_consistent(const MetaspaceStats& stats) {
EXPECT_LE(stats.committed(), stats.reserved());
@ -102,13 +87,13 @@ TEST_VM(MetaspaceUtils, MetaspaceUtils_get_statistics) {
check_metaspace_stats_are_not_null(combined_stats.non_class_space_stats());
check_metaspace_stats_are_consistent(combined_stats.non_class_space_stats());
if (CompressedKlassPointers::needs_class_space() && UseCompressedClassPointers) {
check_metaspace_stats_are_not_null(combined_stats.class_space_stats());
check_metaspace_stats_are_consistent(combined_stats.class_space_stats());
} else {
// if we don't have a class space, combined stats should equal non-class stats
EXPECT_EQ(combined_stats.non_class_space_stats().reserved(), combined_stats.reserved());
EXPECT_EQ(combined_stats.non_class_space_stats().committed(), combined_stats.committed());
EXPECT_EQ(combined_stats.non_class_space_stats().used(), combined_stats.used());
}
#if INCLUDE_CLASS_SPACE
check_metaspace_stats_are_not_null(combined_stats.class_space_stats());
check_metaspace_stats_are_consistent(combined_stats.class_space_stats());
#else
// if we don't have a class space, combined stats should equal non-class stats
EXPECT_EQ(combined_stats.non_class_space_stats().reserved(), combined_stats.reserved());
EXPECT_EQ(combined_stats.non_class_space_stats().committed(), combined_stats.committed());
EXPECT_EQ(combined_stats.non_class_space_stats().used(), combined_stats.used());
#endif // INCLUDE_CLASS_SPACE
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -97,7 +97,7 @@ TEST_VM(arrayOopDesc, base_offset) {
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_OBJECT), 16);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_ARRAY), 16);
}
} else if (UseCompressedClassPointers) {
} else {
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_BOOLEAN), 16);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_BYTE), 16);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_SHORT), 16);
@ -108,22 +108,6 @@ TEST_VM(arrayOopDesc, base_offset) {
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_DOUBLE), 16);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_OBJECT), 16);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_ARRAY), 16);
} else {
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_BOOLEAN), 20);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_BYTE), 20);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_SHORT), 20);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_CHAR), 20);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_INT), 20);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_FLOAT), 20);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_LONG), 24);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_DOUBLE), 24);
if (UseCompressedOops) {
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_OBJECT), 20);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_ARRAY), 20);
} else {
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_OBJECT), 24);
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_ARRAY), 24);
}
}
#else
EXPECT_EQ(arrayOopDesc::base_offset_in_bytes(T_BOOLEAN), 12);

View File

@ -29,9 +29,6 @@
#include "unittest.hpp"
TEST_VM(CompressedKlass, basics) {
if (!UseCompressedClassPointers) {
return;
}
ASSERT_LE(CompressedKlassPointers::base(), CompressedKlassPointers::klass_range_start());
ASSERT_LT(CompressedKlassPointers::klass_range_start(), CompressedKlassPointers::klass_range_end());
ASSERT_LE(CompressedKlassPointers::klass_range_end(), CompressedKlassPointers::encoding_range_end());
@ -54,22 +51,7 @@ TEST_VM(CompressedKlass, basics) {
#endif // _LP64
}
TEST_VM(CompressedKlass, ccp_off) {
if (UseCompressedClassPointers) {
return;
}
ASSERT_EQ(CompressedKlassPointers::klass_range_start(), (address)nullptr);
ASSERT_EQ(CompressedKlassPointers::klass_range_end(), (address)nullptr);
// We should be able to call CompressedKlassPointers::is_encodable, and it should
// always return false
ASSERT_FALSE(CompressedKlassPointers::is_encodable((address)0x12345));
}
TEST_VM(CompressedKlass, test_too_low_address) {
if (!UseCompressedClassPointers) {
return;
}
address really_low = (address) 32;
ASSERT_FALSE(CompressedKlassPointers::is_encodable(really_low));
address low = CompressedKlassPointers::klass_range_start() - 1;
@ -77,9 +59,6 @@ TEST_VM(CompressedKlass, test_too_low_address) {
}
TEST_VM(CompressedKlass, test_too_high_address) {
if (!UseCompressedClassPointers) {
return;
}
address really_high = (address) UINTPTR_MAX;
ASSERT_FALSE(CompressedKlassPointers::is_encodable(really_high));
address high = CompressedKlassPointers::klass_range_end();
@ -87,9 +66,6 @@ TEST_VM(CompressedKlass, test_too_high_address) {
}
TEST_VM(CompressedKlass, test_unaligned_address) {
if (!UseCompressedClassPointers) {
return;
}
const size_t alignment = CompressedKlassPointers::klass_alignment_in_bytes();
address addr = CompressedKlassPointers::klass_range_start() + alignment - 1;
ASSERT_FALSE(CompressedKlassPointers::is_encodable(addr));
@ -103,9 +79,6 @@ TEST_VM(CompressedKlass, test_unaligned_address) {
}
TEST_VM(CompressedKlass, test_good_address) {
if (!UseCompressedClassPointers) {
return;
}
const size_t alignment = CompressedKlassPointers::klass_alignment_in_bytes();
address addr = CompressedKlassPointers::klass_range_start();
ASSERT_TRUE(CompressedKlassPointers::is_encodable(addr));
@ -114,9 +87,6 @@ TEST_VM(CompressedKlass, test_good_address) {
}
TEST_VM(CompressedKlass, test_is_valid_narrow_klass) {
if (!UseCompressedClassPointers) {
return;
}
ASSERT_FALSE(CompressedKlassPointers::is_valid_narrow_klass_id(0));
narrowKlass nk_jlC = CompressedKlassPointers::encode((Klass*)vmClasses::Class_klass());
ASSERT_TRUE(CompressedKlassPointers::is_valid_narrow_klass_id(nk_jlC));

View File

@ -27,35 +27,29 @@
TEST_VM(objArrayOop, osize) {
static const struct {
int objal; bool ccp; bool coops; bool coh; int result;
int objal; bool coops; bool coh; int result;
} x[] = {
// ObjAligInB, UseCCP, UseCoops, UseCOH, object size in heap words
// ObjAligInB, UseCoops, UseCOH, object size in heap words
#ifdef _LP64
{ 8, false, false, false, 4 }, // 20 byte header, 8 byte oops
{ 8, false, true, false, 3 }, // 20 byte header, 4 byte oops
{ 8, true, false, false, 3 }, // 16 byte header, 8 byte oops
{ 8, true, true, false, 3 }, // 16 byte header, 4 byte oops
{ 8, true, false, true, 3 }, // 12 byte header, 8 byte oops
{ 8, true, true, true, 2 }, // 12 byte header, 4 byte oops
{ 16, false, false, false, 4 }, // 20 byte header, 8 byte oops, 16-byte align
{ 16, false, true, false, 4 }, // 20 byte header, 4 byte oops, 16-byte align
{ 16, true, false, false, 4 }, // 16 byte header, 8 byte oops, 16-byte align
{ 16, true, true, false, 4 }, // 16 byte header, 4 byte oops, 16-byte align
{ 16, true, false, true, 4 }, // 12 byte header, 8 byte oops, 16-byte align
{ 16, true, true, true, 2 }, // 12 byte header, 4 byte oops, 16-byte align
{ 256, false, false, false, 32 }, // 20 byte header, 8 byte oops, 256-byte align
{ 256, false, true, false, 32 }, // 20 byte header, 4 byte oops, 256-byte align
{ 256, true, false, false, 32 }, // 16 byte header, 8 byte oops, 256-byte align
{ 256, true, true, false, 32 }, // 16 byte header, 4 byte oops, 256-byte align
{ 256, true, false, true, 32 }, // 12 byte header, 8 byte oops, 256-byte align
{ 256, true, true, true, 32 }, // 12 byte header, 4 byte oops, 256-byte align
{ 8, false, false, 3 }, // 16 byte header, 8 byte oops
{ 8, true, false, 3 }, // 16 byte header, 4 byte oops
{ 8, false, true, 3 }, // 12 byte header, 8 byte oops
{ 8, true, true, 2 }, // 12 byte header, 4 byte oops
{ 16, false, false, 4 }, // 16 byte header, 8 byte oops, 16-byte align
{ 16, true, false, 4 }, // 16 byte header, 4 byte oops, 16-byte align
{ 16, false, true, 4 }, // 12 byte header, 8 byte oops, 16-byte align
{ 16, true, true, 2 }, // 12 byte header, 4 byte oops, 16-byte align
{ 256, false, false, 32 }, // 16 byte header, 8 byte oops, 256-byte align
{ 256, true, false, 32 }, // 16 byte header, 4 byte oops, 256-byte align
{ 256, false, true, 32 }, // 12 byte header, 8 byte oops, 256-byte align
{ 256, true, true, 32 }, // 12 byte header, 4 byte oops, 256-byte align
#else
{ 8, false, false, false, 4 }, // 12 byte header, 4 byte oops, wordsize 4
{ 8, false, false, 4 }, // 12 byte header, 4 byte oops, wordsize 4
#endif
{ -1, false, false, false, -1 }
{ -1, false, false, -1 }
};
for (int i = 0; x[i].result != -1; i++) {
if (x[i].objal == (int)ObjectAlignmentInBytes && x[i].ccp == UseCompressedClassPointers && x[i].coops == UseCompressedOops &&
if (x[i].objal == (int)ObjectAlignmentInBytes && x[i].coops == UseCompressedOops &&
x[i].coh == UseCompactObjectHeaders) {
EXPECT_EQ(objArrayOopDesc::object_size(1), (size_t)x[i].result);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,10 +34,6 @@
* @run main/othervm -XX:CompileCommand=compileonly,compiler.arraycopy.TestObjectArrayClone::testClone*
* -XX:CompileCommand=compileonly,jdk.internal.reflect.GeneratedMethodAccessor*::invoke
* compiler.arraycopy.TestObjectArrayClone
* @run main/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:-UseCompressedClassPointers -Xmx128m
* -XX:CompileCommand=compileonly,compiler.arraycopy.TestObjectArrayClone::testClone*
* -XX:CompileCommand=compileonly,jdk.internal.reflect.GeneratedMethodAccessor*::invoke
* compiler.arraycopy.TestObjectArrayClone
* @run main/othervm -Xbatch -XX:-UseTypeProfile
* -XX:CompileCommand=compileonly,compiler.arraycopy.TestObjectArrayClone::testClone*
* -XX:CompileCommand=compileonly,jdk.internal.reflect.GeneratedMethodAccessor*::invoke

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2016, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -25,8 +25,7 @@
* @test
* @bug 8160591
* @summary C1-generated code for System.arraycopy() does not throw an ArrayStoreException if 'dst' is no a "proper" array (i.e., it is java.lang.Object)
* @run main/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -Xcomp -XX:-UseCompressedClassPointers -XX:CompileOnly=TestArrayCopyToFromObject::test TestArrayCopyToFromObject
* @run main/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -Xcomp -XX:+UseCompressedClassPointers -XX:CompileOnly=TestArrayCopyToFromObject::test TestArrayCopyToFromObject
* @run main/othervm -XX:+IgnoreUnrecognizedVMOptions -XX:+TieredCompilation -XX:TieredStopAtLevel=1 -Xcomp -XX:CompileOnly=TestArrayCopyToFromObject::test TestArrayCopyToFromObject
*/
public class TestArrayCopyToFromObject {

View File

@ -1,63 +0,0 @@
/*
* Copyright (c) 2024, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 8330795
* @summary Check that Reduce Allocation Merges doesn't crash when CompressedClassPointers
* is disabled and there is an access to Klass "field" through the phi.
* @requires vm.bits == 64 & vm.flagless & vm.compiler2.enabled & vm.opt.final.EliminateAllocations
* @run main/othervm -XX:CompileCommand=dontinline,*TestReduceAllocationAndLoadKlass*::test
* -XX:CompileCommand=compileonly,*TestReduceAllocationAndLoadKlass*::test
* -XX:CompileCommand=compileonly,*Shape*::*init*
* -XX:CompileCommand=compileonly,*Point*::*init*
* -XX:CompileCommand=exclude,*TestReduceAllocationAndLoadKlass*::dummy
* -XX:-TieredCompilation
* -XX:-UseCompressedClassPointers
* -Xbatch
* -Xcomp
* compiler.c2.TestReduceAllocationAndLoadKlass
*/
package compiler.c2;
public class TestReduceAllocationAndLoadKlass {
public static void main(String[] args) {
Point p = new Point();
Line q = new Line();
test(true);
test(false);
}
static Class test(boolean cond) {
Object p = cond ? dummy() : new Line();
return p.getClass();
}
static Point dummy() { return new Point(); }
static class Shape { }
static class Point extends Shape { }
static class Line extends Shape { }
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2023, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2023, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -46,26 +46,13 @@ public class AllocationMergesTests {
"-XX:+TraceReduceAllocationMerges",
"-XX:+DeoptimizeALot",
"-XX:+UseCompressedOops",
"-XX:+UseCompressedClassPointers",
"-XX:CompileCommand=inline,*::charAt*",
"-XX:CompileCommand=inline,*PicturePositions::*",
"-XX:CompileCommand=inline,*Point::*",
"-XX:CompileCommand=inline,*Nested::*",
"-XX:CompileCommand=exclude,*::dummy*");
Scenario scenario1 = new Scenario(1, "-XX:+UnlockDiagnosticVMOptions",
"-XX:+ReduceAllocationMerges",
"-XX:+TraceReduceAllocationMerges",
"-XX:+DeoptimizeALot",
"-XX:+UseCompressedOops",
"-XX:-UseCompressedClassPointers",
"-XX:CompileCommand=inline,*::charAt*",
"-XX:CompileCommand=inline,*PicturePositions::*",
"-XX:CompileCommand=inline,*Point::*",
"-XX:CompileCommand=inline,*Nested::*",
"-XX:CompileCommand=exclude,*::dummy*");
Scenario scenario2 = new Scenario(2, "-XX:+UnlockDiagnosticVMOptions",
Scenario scenario1 = new Scenario(2, "-XX:+UnlockDiagnosticVMOptions",
"-XX:+ReduceAllocationMerges",
"-XX:+TraceReduceAllocationMerges",
"-XX:+DeoptimizeALot",
@ -76,12 +63,11 @@ public class AllocationMergesTests {
"-XX:CompileCommand=inline,*Nested::*",
"-XX:CompileCommand=exclude,*::dummy*");
Scenario scenario3 = new Scenario(3, "-XX:+UnlockDiagnosticVMOptions",
Scenario scenario2 = new Scenario(3, "-XX:+UnlockDiagnosticVMOptions",
"-XX:+ReduceAllocationMerges",
"-XX:+TraceReduceAllocationMerges",
"-XX:+DeoptimizeALot",
"-XX:+UseCompressedOops",
"-XX:+UseCompressedClassPointers",
"-XX:-OptimizePtrCompare",
"-XX:+VerifyReduceAllocationMerges",
"-XX:CompileCommand=inline,*::charAt*",
@ -90,7 +76,7 @@ public class AllocationMergesTests {
"-XX:CompileCommand=inline,*Nested::*",
"-XX:CompileCommand=exclude,*::dummy*");
framework.addScenarios(scenario0, scenario1, scenario2, scenario3).start();
framework.addScenarios(scenario0, scenario1, scenario2).start();
}
// ------------------ No Scalar Replacement Should Happen in The Tests Below ------------------- //

Some files were not shown because too many files have changed in this diff Show More