8366474: Rename MetaspaceObj::is_shared() to MetaspaceObj::in_aot_cache()

Reviewed-by: liach, kvn
This commit is contained in:
Ioi Lam 2025-09-04 04:47:48 +00:00
parent 4d1dfabcb4
commit 90a2db1ecb
55 changed files with 204 additions and 201 deletions

View File

@ -117,7 +117,7 @@ void VMError::check_failing_cds_access(outputStream* st, const void* siginfo) {
if (si->si_signo == SIGBUS || si->si_signo == SIGSEGV) {
const void* const fault_addr = si->si_addr;
if (fault_addr != nullptr) {
if (MetaspaceShared::is_in_shared_metaspace(fault_addr)) {
if (MetaspaceShared::in_aot_cache(fault_addr)) {
st->print("Error accessing class data sharing archive. "
"Mapped file inaccessible during execution, possible disk/network problem.");
}

View File

@ -51,7 +51,7 @@ void VMError::check_failing_cds_access(outputStream* st, const void* siginfo) {
er->NumberParameters >= 2) {
const void* const fault_addr = (const void*) er->ExceptionInformation[1];
if (fault_addr != nullptr) {
if (MetaspaceShared::is_in_shared_metaspace(fault_addr)) {
if (MetaspaceShared::in_aot_cache(fault_addr)) {
st->print("Error accessing class data sharing archive. "
"Mapped file inaccessible during execution, possible disk/network problem.");
}

View File

@ -224,7 +224,7 @@ void AOTArtifactFinder::append_to_all_cached_classes(Klass* k) {
}
void AOTArtifactFinder::add_cached_instance_class(InstanceKlass* ik) {
if (CDSConfig::is_dumping_dynamic_archive() && ik->is_shared()) {
if (CDSConfig::is_dumping_dynamic_archive() && ik->in_aot_cache()) {
// This class is already included in the base archive. No need to cache
// it again in the dynamic archive.
return;

View File

@ -212,7 +212,7 @@ Array<InstanceKlass*>* AOTClassLinker::write_classes(oop class_loader, bool is_j
continue;
}
if (ik->is_shared() && CDSConfig::is_dumping_dynamic_archive()) {
if (ik->in_aot_cache() && CDSConfig::is_dumping_dynamic_archive()) {
if (CDSConfig::is_using_aot_linked_classes()) {
// This class was recorded as AOT-linked for the base archive,
// so there's no need to do so again for the dynamic archive.

View File

@ -85,7 +85,7 @@ bool AOTConstantPoolResolver::is_class_resolution_deterministic(InstanceKlass* c
if (resolved_class->is_instance_klass()) {
InstanceKlass* ik = InstanceKlass::cast(resolved_class);
if (!ik->is_shared() && SystemDictionaryShared::is_excluded_class(ik)) {
if (!ik->in_aot_cache() && SystemDictionaryShared::is_excluded_class(ik)) {
return false;
}

View File

@ -365,8 +365,8 @@ address ArchiveBuilder::reserve_buffer() {
if (CDSConfig::is_dumping_static_archive()) {
my_archive_requested_bottom = _requested_static_archive_bottom;
} else {
_mapped_static_archive_bottom = (address)MetaspaceObj::shared_metaspace_base();
_mapped_static_archive_top = (address)MetaspaceObj::shared_metaspace_top();
_mapped_static_archive_bottom = (address)MetaspaceObj::aot_metaspace_base();
_mapped_static_archive_top = (address)MetaspaceObj::aot_metaspace_top();
assert(_mapped_static_archive_top >= _mapped_static_archive_bottom, "must be");
size_t static_archive_size = _mapped_static_archive_top - _mapped_static_archive_bottom;
@ -540,7 +540,7 @@ bool ArchiveBuilder::is_excluded(Klass* klass) {
return SystemDictionaryShared::is_excluded_class(ik);
} else if (klass->is_objArray_klass()) {
Klass* bottom = ObjArrayKlass::cast(klass)->bottom_klass();
if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_shared_static(bottom)) {
if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::in_aot_cache_static_region(bottom)) {
// The bottom class is in the static archive so it's clearly not excluded.
return false;
} else if (bottom->is_instance_klass()) {
@ -553,7 +553,7 @@ bool ArchiveBuilder::is_excluded(Klass* klass) {
ArchiveBuilder::FollowMode ArchiveBuilder::get_follow_mode(MetaspaceClosure::Ref *ref) {
address obj = ref->obj();
if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_in_shared_metaspace(obj)) {
if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::in_aot_cache(obj)) {
// Don't dump existing shared metadata again.
return point_to_it;
} else if (ref->msotype() == MetaspaceObj::MethodDataType ||

View File

@ -275,7 +275,7 @@ public:
}
// The following functions translate between a u4 offset and an address in the
// the range of the mapped CDS archive (e.g., Metaspace::is_in_shared_metaspace()).
// the range of the mapped CDS archive (e.g., Metaspace::in_aot_cache()).
// Since the first 16 bytes in this range are dummy data (see ArchiveBuilder::reserve_buffer()),
// we know that offset 0 never represents a valid object. As a result, an offset of 0
// is used to encode a nullptr.
@ -287,7 +287,7 @@ public:
template <typename T> T static offset_to_archived_address(u4 offset) {
assert(offset != 0, "sanity");
T p = (T)(SharedBaseAddress + offset);
assert(Metaspace::is_in_shared_metaspace(p), "must be");
assert(Metaspace::in_aot_cache(p), "must be");
return p;
}
@ -303,7 +303,7 @@ public:
template <typename T> static u4 archived_address_to_offset(T p) {
uintx pn = (uintx)p;
uintx base = (uintx)SharedBaseAddress;
assert(Metaspace::is_in_shared_metaspace(p), "must be");
assert(Metaspace::in_aot_cache(p), "must be");
assert(pn > base, "sanity"); // No valid object is stored at 0 offset from SharedBaseAddress
uintx offset = pn - base;
assert(offset <= MAX_SHARED_DELTA, "range check");

View File

@ -80,7 +80,7 @@ Array<T>* ArchiveUtils::archive_ptr_array(GrowableArray<T>* tmp_array) {
for (int i = 0; i < tmp_array->length(); i++) {
T ptr = tmp_array->at(i);
if (ptr != nullptr && !builder->is_in_buffer_space(ptr)) {
if (is_dynamic_dump && MetaspaceShared::is_in_shared_metaspace(ptr)) {
if (is_dynamic_dump && MetaspaceShared::in_aot_cache(ptr)) {
// We have a pointer that lives in the dynamic archive but points into
// the static archive.
} else {

View File

@ -117,8 +117,8 @@ Handle CDSProtectionDomain::get_package_name(Symbol* class_name, TRAPS) {
PackageEntry* CDSProtectionDomain::get_package_entry_from_class(InstanceKlass* ik, Handle class_loader) {
PackageEntry* pkg_entry = ik->package();
if (CDSConfig::is_using_full_module_graph() && ik->is_shared() && pkg_entry != nullptr) {
assert(MetaspaceShared::is_in_shared_metaspace(pkg_entry), "must be");
if (CDSConfig::is_using_full_module_graph() && ik->in_aot_cache() && pkg_entry != nullptr) {
assert(MetaspaceShared::in_aot_cache(pkg_entry), "must be");
assert(!ik->defined_by_other_loaders(), "unexpected archived package entry for an unregistered class");
return pkg_entry;
}

View File

@ -110,7 +110,7 @@ void ClassListWriter::write_to_stream(const InstanceKlass* k, outputStream* stre
bool is_builtin_loader = SystemDictionaryShared::is_builtin_loader(loader_data);
if (!is_builtin_loader) {
// class may be loaded from shared archive
if (!k->is_shared()) {
if (!k->in_aot_cache()) {
if (cfs == nullptr || cfs->source() == nullptr) {
// CDS static dump only handles unregistered class with known source.
return;

View File

@ -321,7 +321,7 @@ void CppVtables::zero_archived_vtables() {
}
bool CppVtables::is_valid_shared_method(const Method* m) {
assert(MetaspaceShared::is_in_shared_metaspace(m), "must be");
assert(MetaspaceShared::in_aot_cache(m), "must be");
return vtable_of(m) == _index[Method_Kind]->cloned_vtable() ||
vtable_of(m) == _archived_cpp_vtptrs[Method_Kind];
}

View File

@ -142,7 +142,7 @@ bool DumpTimeClassInfo::is_builtin() {
}
DumpTimeClassInfo* DumpTimeSharedClassTable::allocate_info(InstanceKlass* k) {
assert(CDSConfig::is_dumping_final_static_archive() || !k->is_shared(), "Do not call with shared classes");
assert(CDSConfig::is_dumping_final_static_archive() || !k->in_aot_cache(), "Do not call with shared classes");
bool created;
DumpTimeClassInfo* p = put_if_absent(k, &created);
assert(created, "must not exist in table");
@ -151,7 +151,7 @@ DumpTimeClassInfo* DumpTimeSharedClassTable::allocate_info(InstanceKlass* k) {
}
DumpTimeClassInfo* DumpTimeSharedClassTable::get_info(InstanceKlass* k) {
assert(CDSConfig::is_dumping_final_static_archive() || !k->is_shared(), "Do not call with shared classes");
assert(CDSConfig::is_dumping_final_static_archive() || !k->in_aot_cache(), "Do not call with shared classes");
DumpTimeClassInfo* p = get(k);
assert(p != nullptr, "we must not see any non-shared InstanceKlass* that's "
"not stored with SystemDictionaryShared::init_dumptime_info");

View File

@ -187,10 +187,10 @@ public:
for (int i = T_BOOLEAN; i <= T_LONG; i++) {
assert(is_java_primitive((BasicType)i), "sanity");
Klass* k = Universe::typeArrayKlass((BasicType)i); // this give you "[I", etc
assert(MetaspaceShared::is_shared_static((void*)k),
assert(MetaspaceShared::in_aot_cache_static_region((void*)k),
"one-dimensional primitive array should be in static archive");
ArrayKlass* ak = ArrayKlass::cast(k);
while (ak != nullptr && ak->is_shared()) {
while (ak != nullptr && ak->in_aot_cache()) {
Klass* next_k = ak->array_klass_or_null();
if (next_k != nullptr) {
ak = ArrayKlass::cast(next_k);
@ -253,7 +253,7 @@ void DynamicArchiveBuilder::sort_methods() {
// klasses were created. Re-sort all the tables. See Method::sort_methods().
void DynamicArchiveBuilder::sort_methods(InstanceKlass* ik) const {
assert(ik != nullptr, "DynamicArchiveBuilder currently doesn't support dumping the base archive");
if (MetaspaceShared::is_in_shared_metaspace(ik)) {
if (MetaspaceShared::in_aot_cache(ik)) {
// We have reached a supertype that's already in the base archive
return;
}
@ -287,13 +287,13 @@ void DynamicArchiveBuilder::sort_methods(InstanceKlass* ik) const {
if (ik->methods() != nullptr) {
for (int m = 0; m < ik->methods()->length(); m++) {
Symbol* name = ik->methods()->at(m)->name();
assert(MetaspaceShared::is_in_shared_metaspace(name) || is_in_buffer_space(name), "must be");
assert(MetaspaceShared::in_aot_cache(name) || is_in_buffer_space(name), "must be");
}
}
if (ik->default_methods() != nullptr) {
for (int m = 0; m < ik->default_methods()->length(); m++) {
Symbol* name = ik->default_methods()->at(m)->name();
assert(MetaspaceShared::is_in_shared_metaspace(name) || is_in_buffer_space(name), "must be");
assert(MetaspaceShared::in_aot_cache(name) || is_in_buffer_space(name), "must be");
}
}
#endif
@ -367,14 +367,14 @@ void DynamicArchiveBuilder::gather_array_klasses() {
if (klasses()->at(i)->is_objArray_klass()) {
ObjArrayKlass* oak = ObjArrayKlass::cast(klasses()->at(i));
Klass* elem = oak->element_klass();
if (MetaspaceShared::is_shared_static(elem)) {
if (MetaspaceShared::in_aot_cache_static_region(elem)) {
// Only capture the array klass whose element_klass is in the static archive.
// During run time, setup (see DynamicArchive::setup_array_klasses()) is needed
// so that the element_klass can find its array klasses from the dynamic archive.
DynamicArchive::append_array_klass(oak);
} else {
// The element_klass and its array klasses are in the same archive.
assert(!MetaspaceShared::is_shared_static(oak),
assert(!MetaspaceShared::in_aot_cache_static_region(oak),
"we should not gather klasses that are already in the static archive");
}
}
@ -435,7 +435,7 @@ void DynamicArchive::setup_array_klasses() {
assert(!oak->is_typeArray_klass(), "all type array classes must be in static archive");
Klass* elm = oak->element_klass();
assert(MetaspaceShared::is_shared_static((void*)elm), "must be");
assert(MetaspaceShared::in_aot_cache_static_region((void*)elm), "must be");
if (elm->is_instance_klass()) {
assert(InstanceKlass::cast(elm)->array_klasses() == nullptr, "must be");

View File

@ -1144,7 +1144,7 @@ MapArchiveResult FileMapInfo::map_regions(int regions[], int num_regions, char*
FileMapRegion* r = region_at(idx);
DEBUG_ONLY(if (last_region != nullptr) {
// Ensure that the OS won't be able to allocate new memory spaces between any mapped
// regions, or else it would mess up the simple comparison in MetaspaceObj::is_shared().
// regions, or else it would mess up the simple comparison in MetaspaceObj::in_aot_cache().
assert(r->mapped_base() == last_region->mapped_end(), "must have no gaps");
}
last_region = r;)

View File

@ -1220,7 +1220,7 @@ const ArchivedKlassSubGraphInfoRecord*
HeapShared::resolve_or_init_classes_for_subgraph_of(Klass* k, bool do_init, TRAPS) {
assert(!CDSConfig::is_dumping_heap(), "Should not be called when dumping heap");
if (!k->is_shared()) {
if (!k->in_aot_cache()) {
return nullptr;
}
unsigned int hash = SystemDictionaryShared::hash_for_shared_dictionary_quick(k);
@ -1274,7 +1274,7 @@ HeapShared::resolve_or_init_classes_for_subgraph_of(Klass* k, bool do_init, TRAP
if (klasses != nullptr) {
for (int i = 0; i < klasses->length(); i++) {
Klass* klass = klasses->at(i);
if (!klass->is_shared()) {
if (!klass->in_aot_cache()) {
return nullptr;
}
resolve_or_init(klass, do_init, CHECK_NULL);

View File

@ -179,7 +179,7 @@ void LambdaFormInvokers::regenerate_holder_classes(TRAPS) {
TempNewSymbol class_name_sym = SymbolTable::new_symbol(class_name);
Klass* klass = SystemDictionary::resolve_or_null(class_name_sym, THREAD);
assert(klass != nullptr, "must already be loaded");
if (!klass->is_shared() && klass->shared_classpath_index() < 0) {
if (!klass->in_aot_cache() && klass->shared_classpath_index() < 0) {
// Fake it, so that it will be included into the archive.
klass->set_shared_classpath_index(0);
// Set the "generated" bit, so it won't interfere with JVMTI.
@ -223,7 +223,7 @@ void LambdaFormInvokers::regenerate_class(char* class_name, ClassFileStream& st,
assert(!HAS_PENDING_EXCEPTION, "Invariant");
result->set_is_generated_shared_class();
if (!klass->is_shared()) {
if (!klass->in_aot_cache()) {
log_info(aot, lambda)("regenerate_class excluding klass %s %s", class_name, klass->name()->as_C_string());
SystemDictionaryShared::set_excluded(InstanceKlass::cast(klass)); // exclude the existing class from dump
}

View File

@ -247,12 +247,12 @@ InstanceKlass* LambdaProxyClassDictionary::find_lambda_proxy_class(InstanceKlass
assert(method_type != nullptr, "sanity");
assert(instantiated_method_type != nullptr, "sanity");
if (!caller_ik->is_shared() ||
!invoked_name->is_shared() ||
!invoked_type->is_shared() ||
!method_type->is_shared() ||
(member_method != nullptr && !member_method->is_shared()) ||
!instantiated_method_type->is_shared()) {
if (!caller_ik->in_aot_cache() ||
!invoked_name->in_aot_cache() ||
!invoked_type->in_aot_cache() ||
!method_type->in_aot_cache() ||
(member_method != nullptr && !member_method->in_aot_cache()) ||
!instantiated_method_type->in_aot_cache()) {
// These can't be represented as u4 offset, but we wouldn't have archived a lambda proxy in this case anyway.
return nullptr;
}
@ -325,7 +325,7 @@ InstanceKlass* LambdaProxyClassDictionary::load_and_init_lambda_proxy_class(Inst
InstanceKlass* shared_nest_host = get_shared_nest_host(lambda_ik);
assert(shared_nest_host != nullptr, "unexpected nullptr _nest_host");
assert(shared_nest_host->is_shared(), "nest host must be in CDS archive");
assert(shared_nest_host->in_aot_cache(), "nest host must be in aot metaspace");
Klass* resolved_nest_host = SystemDictionary::resolve_or_fail(shared_nest_host->name(), class_loader, true, CHECK_NULL);
if (resolved_nest_host != shared_nest_host) {

View File

@ -108,7 +108,7 @@ ReservedSpace MetaspaceShared::_symbol_rs;
VirtualSpace MetaspaceShared::_symbol_vs;
bool MetaspaceShared::_archive_loading_failed = false;
bool MetaspaceShared::_remapped_readwrite = false;
void* MetaspaceShared::_shared_metaspace_static_top = nullptr;
void* MetaspaceShared::_aot_metaspace_static_top = nullptr;
intx MetaspaceShared::_relocation_delta;
char* MetaspaceShared::_requested_base_address;
Array<Method*>* MetaspaceShared::_archived_method_handle_intrinsics = nullptr;
@ -1208,7 +1208,7 @@ bool MetaspaceShared::try_link_class(JavaThread* current, InstanceKlass* ik) {
JavaThread* THREAD = current; // For exception macros.
assert(CDSConfig::is_dumping_archive(), "sanity");
if (ik->is_shared() && !CDSConfig::is_dumping_final_static_archive()) {
if (ik->in_aot_cache() && !CDSConfig::is_dumping_final_static_archive()) {
assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
return false;
}
@ -1252,23 +1252,23 @@ void VM_PopulateDumpSharedSpace::dump_java_heap_objects() {
}
}
void MetaspaceShared::set_shared_metaspace_range(void* base, void *static_top, void* top) {
void MetaspaceShared::set_aot_metaspace_range(void* base, void *static_top, void* top) {
assert(base <= static_top && static_top <= top, "must be");
_shared_metaspace_static_top = static_top;
MetaspaceObj::set_shared_metaspace_range(base, top);
_aot_metaspace_static_top = static_top;
MetaspaceObj::set_aot_metaspace_range(base, top);
}
bool MetaspaceShared::is_shared_dynamic(void* p) {
if ((p < MetaspaceObj::shared_metaspace_top()) &&
(p >= _shared_metaspace_static_top)) {
bool MetaspaceShared::in_aot_cache_dynamic_region(void* p) {
if ((p < MetaspaceObj::aot_metaspace_top()) &&
(p >= _aot_metaspace_static_top)) {
return true;
} else {
return false;
}
}
bool MetaspaceShared::is_shared_static(void* p) {
if (is_in_shared_metaspace(p) && !is_shared_dynamic(p)) {
bool MetaspaceShared::in_aot_cache_static_region(void* p) {
if (in_aot_cache(p) && !in_aot_cache_dynamic_region(p)) {
return true;
} else {
return false;
@ -1368,7 +1368,7 @@ void MetaspaceShared::initialize_runtime_shared_and_meta_spaces() {
char* cds_end = dynamic_mapped ? dynamic_mapinfo->mapped_end() : static_mapinfo->mapped_end();
// Register CDS memory region with LSan.
LSAN_REGISTER_ROOT_REGION(cds_base, cds_end - cds_base);
set_shared_metaspace_range(cds_base, static_mapinfo->mapped_end(), cds_end);
set_aot_metaspace_range(cds_base, static_mapinfo->mapped_end(), cds_end);
_relocation_delta = static_mapinfo->relocation_delta();
_requested_base_address = static_mapinfo->requested_base_address();
if (dynamic_mapped) {
@ -1376,7 +1376,7 @@ void MetaspaceShared::initialize_runtime_shared_and_meta_spaces() {
AutoCreateSharedArchive = false;
}
} else {
set_shared_metaspace_range(nullptr, nullptr, nullptr);
set_aot_metaspace_range(nullptr, nullptr, nullptr);
if (CDSConfig::is_dumping_dynamic_archive()) {
aot_log_warning(aot)("-XX:ArchiveClassesAtExit is unsupported when base CDS archive is not loaded. Run with -Xlog:cds for more info.");
}
@ -1466,7 +1466,7 @@ MapArchiveResult MetaspaceShared::map_archives(FileMapInfo* static_mapinfo, File
if (dynamic_mapinfo != nullptr) {
// Ensure that the OS won't be able to allocate new memory spaces between the two
// archives, or else it would mess up the simple comparison in MetaspaceObj::is_shared().
// archives, or else it would mess up the simple comparison in MetaspaceObj::in_aot_cache().
assert(static_mapinfo->mapping_end_offset() == dynamic_mapinfo->mapping_base_offset(), "no gap");
}
@ -2075,9 +2075,9 @@ bool MetaspaceShared::remap_shared_readonly_as_readwrite() {
void MetaspaceShared::print_on(outputStream* st) {
if (CDSConfig::is_using_archive()) {
st->print("CDS archive(s) mapped at: ");
address base = (address)MetaspaceObj::shared_metaspace_base();
address static_top = (address)_shared_metaspace_static_top;
address top = (address)MetaspaceObj::shared_metaspace_top();
address base = (address)MetaspaceObj::aot_metaspace_base();
address static_top = (address)_aot_metaspace_static_top;
address top = (address)MetaspaceObj::aot_metaspace_top();
st->print("[" PTR_FORMAT "-" PTR_FORMAT "-" PTR_FORMAT "), ", p2i(base), p2i(static_top), p2i(top));
st->print("size %zu, ", top - base);
st->print("SharedBaseAddress: " PTR_FORMAT ", ArchiveRelocationMode: %d.", SharedBaseAddress, ArchiveRelocationMode);

View File

@ -54,7 +54,7 @@ class MetaspaceShared : AllStatic {
static VirtualSpace _symbol_vs; // used only during -Xshare:dump
static bool _archive_loading_failed;
static bool _remapped_readwrite;
static void* _shared_metaspace_static_top;
static void* _aot_metaspace_static_top;
static intx _relocation_delta;
static char* _requested_base_address;
static bool _use_optimized_module_handling;
@ -101,14 +101,17 @@ public:
// Return true if given address is in the shared metaspace regions (i.e., excluding the
// mapped heap region.)
static bool is_in_shared_metaspace(const void* p) {
return MetaspaceObj::is_shared((const MetaspaceObj*)p);
static bool in_aot_cache(const void* p) {
return MetaspaceObj::in_aot_cache((const MetaspaceObj*)p);
}
static void set_shared_metaspace_range(void* base, void *static_top, void* top) NOT_CDS_RETURN;
static void set_aot_metaspace_range(void* base, void *static_top, void* top) NOT_CDS_RETURN;
static bool is_shared_dynamic(void* p) NOT_CDS_RETURN_(false);
static bool is_shared_static(void* p) NOT_CDS_RETURN_(false);
// inside the metaspace of the AOT cache, or the static CDS archive
static bool in_aot_cache_static_region(void* p) NOT_CDS_RETURN_(false);
// inside the metaspace of the dynamic static CDS archive
static bool in_aot_cache_dynamic_region(void* p) NOT_CDS_RETURN_(false);
static void unrecoverable_loading_error(const char* message = "unrecoverable error");
static void report_loading_error(const char* format, ...) ATTRIBUTE_PRINTF(1, 0);

View File

@ -75,7 +75,7 @@ void RunTimeClassInfo::init(DumpTimeClassInfo& info) {
}
InstanceKlass* RunTimeClassInfo::klass() const {
if (MetaspaceShared::is_in_shared_metaspace(this)) {
if (MetaspaceShared::in_aot_cache(this)) {
// <this> is inside a mmaped CDS archive.
return ArchiveUtils::offset_to_archived_address<InstanceKlass*>(_klass_offset);
} else {

View File

@ -251,7 +251,7 @@ private:
public:
static RunTimeClassInfo* get_for(InstanceKlass* klass) {
assert(klass->is_shared(), "don't call for non-shared class");
assert(klass->in_aot_cache(), "don't call for non-shared class");
return *info_pointer_addr(klass);
}
static void set_for(InstanceKlass* klass, RunTimeClassInfo* record) {

View File

@ -415,7 +415,7 @@ void ClassLoaderData::loaded_classes_do(KlassClosure* klass_closure) {
if (!InstanceKlass::cast(k)->is_loaded()) {
continue;
}
} else if (k->is_shared() && k->is_objArray_klass()) {
} else if (k->in_aot_cache() && k->is_objArray_klass()) {
Klass* bottom = ObjArrayKlass::cast(k)->bottom_klass();
if (bottom->is_instance_klass() && !InstanceKlass::cast(bottom)->is_loaded()) {
// This could happen if <bottom> is a shared class that has been restored
@ -868,7 +868,7 @@ void ClassLoaderData::init_handle_locked(OopHandle& dest, Handle h) {
// a safepoint which checks if handles point to this metadata field.
void ClassLoaderData::add_to_deallocate_list(Metadata* m) {
// Metadata in shared region isn't deleted.
if (!m->is_shared()) {
if (!m->in_aot_cache()) {
MutexLocker ml(metaspace_lock(), Mutex::_no_safepoint_check_flag);
if (_deallocate_list == nullptr) {
_deallocate_list = new (mtClass) GrowableArray<Metadata*>(100, mtClass);

View File

@ -939,7 +939,7 @@ void java_lang_Class::fixup_mirror(Klass* k, TRAPS) {
assert(InstanceMirrorKlass::offset_of_static_fields() != 0, "must have been computed already");
// If the offset was read from the shared archive, it was fixed up already
if (!k->is_shared()) {
if (!k->in_aot_cache()) {
if (k->is_instance_klass()) {
// During bootstrap, java.lang.Class wasn't loaded so static field
// offsets were computed without the size added it. Go back and
@ -977,7 +977,7 @@ void java_lang_Class::fixup_mirror(Klass* k, TRAPS) {
}
}
if (k->is_shared() && k->has_archived_mirror_index()) {
if (k->in_aot_cache() && k->has_archived_mirror_index()) {
if (ArchiveHeapLoader::is_in_use()) {
bool present = restore_archived_mirror(k, Handle(), Handle(), Handle(), CHECK);
assert(present, "Missing archived mirror for %s", k->external_name());

View File

@ -51,7 +51,7 @@ InstanceKlass* KlassFactory::check_shared_class_file_load_hook(
TRAPS) {
#if INCLUDE_CDS && INCLUDE_JVMTI
assert(ik != nullptr, "sanity");
assert(ik->is_shared(), "expecting a shared class");
assert(ik->in_aot_cache(), "expecting a shared class");
if (JvmtiExport::should_post_class_file_load_hook()) {
ResourceMark rm(THREAD);
// Post the CFLH

View File

@ -1014,7 +1014,7 @@ bool SystemDictionary::is_shared_class_visible_impl(Symbol* class_name,
bool SystemDictionary::check_shared_class_super_type(InstanceKlass* klass, InstanceKlass* super_type,
Handle class_loader, bool is_superclass, TRAPS) {
assert(super_type->is_shared(), "must be");
assert(super_type->in_aot_cache(), "must be");
// Quick check if the super type has been already loaded.
// + Don't do it for unregistered classes -- they can be unloaded so
@ -1077,7 +1077,7 @@ InstanceKlass* SystemDictionary::load_shared_class(InstanceKlass* ik,
PackageEntry* pkg_entry,
TRAPS) {
assert(ik != nullptr, "sanity");
assert(ik->is_shared(), "sanity");
assert(ik->in_aot_cache(), "sanity");
assert(!ik->is_unshareable_info_restored(), "shared class can be restored only once");
assert(Atomic::add(&ik->_shared_class_load_count, 1) == 1, "shared class loaded more than once");
Symbol* class_name = ik->name();
@ -1745,7 +1745,7 @@ bool SystemDictionary::add_loader_constraint(Symbol* class_name,
klass2, loader_data2);
#if INCLUDE_CDS
if (CDSConfig::is_dumping_archive() && klass_being_linked != nullptr &&
!klass_being_linked->is_shared()) {
!klass_being_linked->in_aot_cache()) {
SystemDictionaryShared::record_linking_constraint(constraint_name,
InstanceKlass::cast(klass_being_linked),
class_loader1, class_loader2);

View File

@ -205,7 +205,7 @@ DumpTimeClassInfo* SystemDictionaryShared::get_info_locked(InstanceKlass* k) {
}
bool SystemDictionaryShared::check_for_exclusion(InstanceKlass* k, DumpTimeClassInfo* info) {
if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::is_in_shared_metaspace(k)) {
if (CDSConfig::is_dumping_dynamic_archive() && MetaspaceShared::in_aot_cache(k)) {
// We have reached a super type that's already in the base archive. Treat it
// as "not excluded".
return false;
@ -250,7 +250,7 @@ bool SystemDictionaryShared::is_early_klass(InstanceKlass* ik) {
bool SystemDictionaryShared::check_for_exclusion_impl(InstanceKlass* k) {
if (CDSConfig::is_dumping_final_static_archive() && k->defined_by_other_loaders()
&& k->is_shared()) {
&& k->in_aot_cache()) {
return false; // Do not exclude: unregistered classes are passed from preimage to final image.
}
@ -483,7 +483,7 @@ InstanceKlass* SystemDictionaryShared::get_unregistered_class(Symbol* name) {
void SystemDictionaryShared::copy_unregistered_class_size_and_crc32(InstanceKlass* klass) {
precond(CDSConfig::is_dumping_final_static_archive());
precond(klass->is_shared());
precond(klass->in_aot_cache());
// A shared class must have a RunTimeClassInfo record
const RunTimeClassInfo* record = find_record(&_static_archive._unregistered_dictionary,
@ -665,7 +665,7 @@ bool SystemDictionaryShared::should_be_excluded(Klass* k) {
} else {
InstanceKlass* ik = InstanceKlass::cast(k);
if (CDSConfig::is_dumping_dynamic_archive() && ik->is_shared()) {
if (CDSConfig::is_dumping_dynamic_archive() && ik->in_aot_cache()) {
// ik is already part of the static archive, so it will never be considered as excluded.
return false;
}
@ -1018,7 +1018,7 @@ unsigned int SystemDictionaryShared::hash_for_shared_dictionary(address ptr) {
uintx offset = ArchiveBuilder::current()->any_to_offset(ptr);
unsigned int hash = primitive_hash<uintx>(offset);
DEBUG_ONLY({
if (MetaspaceObj::is_shared((const MetaspaceObj*)ptr)) {
if (MetaspaceObj::in_aot_cache((const MetaspaceObj*)ptr)) {
assert(hash == SystemDictionaryShared::hash_for_shared_dictionary_quick(ptr), "must be");
}
});
@ -1106,7 +1106,7 @@ void SystemDictionaryShared::serialize_vm_classes(SerializeClosure* soc) {
const RunTimeClassInfo*
SystemDictionaryShared::find_record(RunTimeSharedDictionary* static_dict, RunTimeSharedDictionary* dynamic_dict, Symbol* name) {
if (!CDSConfig::is_using_archive() || !name->is_shared()) {
if (!CDSConfig::is_using_archive() || !name->in_aot_cache()) {
// The names of all shared classes must also be a shared Symbol.
return nullptr;
}
@ -1124,7 +1124,7 @@ SystemDictionaryShared::find_record(RunTimeSharedDictionary* static_dict, RunTim
}
}
if (!MetaspaceShared::is_shared_dynamic(name)) {
if (!MetaspaceShared::in_aot_cache_dynamic_region(name)) {
// The names of all shared classes in the static dict must also be in the
// static archive
record = static_dict->lookup(name, hash, 0);
@ -1163,7 +1163,7 @@ void SystemDictionaryShared::update_shared_entry(InstanceKlass* k, int id) {
const char* SystemDictionaryShared::loader_type_for_shared_class(Klass* k) {
assert(k != nullptr, "Sanity");
assert(k->is_shared(), "Must be");
assert(k->in_aot_cache(), "Must be");
assert(k->is_instance_klass(), "Must be");
InstanceKlass* ik = InstanceKlass::cast(k);
if (ik->defined_by_boot_loader()) {

View File

@ -127,7 +127,7 @@ class SharedClassLoadingMark {
assert(THREAD != nullptr, "Current thread is nullptr");
assert(_klass != nullptr, "InstanceKlass is nullptr");
if (HAS_PENDING_EXCEPTION) {
if (_klass->is_shared()) {
if (_klass->in_aot_cache()) {
_klass->set_shared_loading_failed();
}
}
@ -297,7 +297,7 @@ public:
template <typename T>
static unsigned int hash_for_shared_dictionary_quick(T* ptr) {
assert(MetaspaceObj::is_shared((const MetaspaceObj*)ptr), "must be");
assert(MetaspaceObj::in_aot_cache((const MetaspaceObj*)ptr), "must be");
assert(ptr > (T*)SharedBaseAddress, "must be");
uintx offset = uintx(ptr) - uintx(SharedBaseAddress);
return primitive_hash<uintx>(offset);

View File

@ -140,7 +140,7 @@ static bool is_eligible_for_verification(InstanceKlass* klass, bool should_verif
// Shared classes shouldn't have stackmaps either.
// However, bytecodes for shared old classes can be verified because
// they have not been rewritten.
!(klass->is_shared() && klass->is_rewritten()));
!(klass->in_aot_cache() && klass->is_rewritten()));
}
void Verifier::trace_class_resolution(Klass* resolve_class, InstanceKlass* verify_class) {

View File

@ -138,7 +138,7 @@ void vmClasses::resolve_all(TRAPS) {
ArchiveHeapLoader::fixup_region();
// Initialize the constant pool for the Object_class
assert(Object_klass()->is_shared(), "must be");
assert(Object_klass()->in_aot_cache(), "must be");
Object_klass()->constants()->restore_unshareable_info(CHECK);
resolve_through(VM_CLASS_ID(Class_klass), scan, CHECK);
} else
@ -204,7 +204,7 @@ void vmClasses::resolve_all(TRAPS) {
"All well known classes must be resolved in JVMTI early phase"));
for (auto id : EnumRange<vmClassID>{}) {
InstanceKlass* k = _klasses[as_int(id)];
assert(k->is_shared(), "must not be replaced by JVMTI class file load hook");
assert(k->in_aot_cache(), "must not be replaced by JVMTI class file load hook");
}
}
#endif
@ -219,7 +219,7 @@ void vmClasses::resolve_all(TRAPS) {
void vmClasses::resolve_shared_class(InstanceKlass* klass, ClassLoaderData* loader_data, Handle domain, TRAPS) {
assert(!Universe::is_fully_initialized(), "We can make short cuts only during VM initialization");
assert(klass->is_shared(), "Must be shared class");
assert(klass->in_aot_cache(), "Must be shared class");
if (klass->class_loader_data() != nullptr) {
return;
}

View File

@ -165,7 +165,7 @@ void CompilationPolicy::replay_training_at_init_impl(InstanceKlass* klass, JavaT
void CompilationPolicy::replay_training_at_init(InstanceKlass* klass, JavaThread* current) {
assert(klass->is_initialized(), "");
if (TrainingData::have_data() && klass->is_shared()) {
if (TrainingData::have_data() && klass->in_aot_cache()) {
_training_replay_queue.push(klass, TrainingReplayQueue_lock, current);
}
}

View File

@ -925,7 +925,7 @@ void InterpreterRuntime::cds_resolve_invoke(Bytecodes::Code bytecode, int method
ResourceMark rm;
InstanceKlass* resolved_iklass = InstanceKlass::cast(link_info.resolved_klass());
log_info(aot, resolve)("Not resolved: class not linked: %s %s %s",
resolved_iklass->is_shared() ? "is_shared" : "",
resolved_iklass->in_aot_cache() ? "in_aot_cache" : "",
resolved_iklass->init_state_name(),
resolved_iklass->external_name());
}

View File

@ -124,7 +124,7 @@ void Rewriter::make_constant_pool_cache(TRAPS) {
THREAD);
#if INCLUDE_CDS
if (!HAS_PENDING_EXCEPTION && CDSConfig::is_dumping_archive()) {
if (_pool->pool_holder()->is_shared()) {
if (_pool->pool_holder()->in_aot_cache()) {
assert(CDSConfig::is_dumping_dynamic_archive(), "must be");
// We are linking a shared class from the base archive. This
// class won't be written into the dynamic archive, so there's no
@ -567,8 +567,8 @@ void Rewriter::rewrite_bytecodes(TRAPS) {
void Rewriter::rewrite(InstanceKlass* klass, TRAPS) {
#if INCLUDE_CDS
if (klass->is_shared()) {
assert(!klass->is_rewritten(), "rewritten shared classes cannot be rewritten again");
if (klass->in_aot_cache()) {
assert(!klass->is_rewritten(), "rewritten classes in the AOT cache cannot be rewritten again");
}
#endif // INCLUDE_CDS
ResourceMark rm(THREAD);

View File

@ -65,8 +65,8 @@ void FreeHeap(void* p) {
os::free(p);
}
void* MetaspaceObj::_shared_metaspace_base = nullptr;
void* MetaspaceObj::_shared_metaspace_top = nullptr;
void* MetaspaceObj::_aot_metaspace_base = nullptr;
void* MetaspaceObj::_aot_metaspace_top = nullptr;
void* MetaspaceObj::operator new(size_t size, ClassLoaderData* loader_data,
size_t word_size,

View File

@ -261,43 +261,43 @@ class MetaspaceObj {
// void deallocate_contents(ClassLoaderData* loader_data);
friend class VMStructs;
// When CDS is enabled, all shared metaspace objects are mapped
// All metsapce objects in the AOT cache (CDS archive) are mapped
// into a single contiguous memory block, so we can use these
// two pointers to quickly determine if something is in the
// shared metaspace.
// When CDS is not enabled, both pointers are set to null.
static void* _shared_metaspace_base; // (inclusive) low address
static void* _shared_metaspace_top; // (exclusive) high address
// two pointers to quickly determine if a MetaspaceObj is in the
// AOT cache.
// When AOT/CDS is not enabled, both pointers are set to null.
static void* _aot_metaspace_base; // (inclusive) low address
static void* _aot_metaspace_top; // (exclusive) high address
public:
// Returns true if the pointer points to a valid MetaspaceObj. A valid
// MetaspaceObj is MetaWord-aligned and contained within either
// non-shared or shared metaspace.
// regular- or aot metaspace.
static bool is_valid(const MetaspaceObj* p);
#if INCLUDE_CDS
static bool is_shared(const MetaspaceObj* p) {
// If no shared metaspace regions are mapped, _shared_metaspace_{base,top} will
static bool in_aot_cache(const MetaspaceObj* p) {
// If no shared metaspace regions are mapped, _aot_metaspace_{base,top} will
// both be null and all values of p will be rejected quickly.
return (((void*)p) < _shared_metaspace_top &&
((void*)p) >= _shared_metaspace_base);
return (((void*)p) < _aot_metaspace_top &&
((void*)p) >= _aot_metaspace_base);
}
bool is_shared() const { return MetaspaceObj::is_shared(this); }
bool in_aot_cache() const { return MetaspaceObj::in_aot_cache(this); }
#else
static bool is_shared(const MetaspaceObj* p) { return false; }
bool is_shared() const { return false; }
static bool in_aot_cache(const MetaspaceObj* p) { return false; }
bool in_aot_cache() const { return false; }
#endif
void print_address_on(outputStream* st) const; // nonvirtual address printing
static void set_shared_metaspace_range(void* base, void* top) {
_shared_metaspace_base = base;
_shared_metaspace_top = top;
static void set_aot_metaspace_range(void* base, void* top) {
_aot_metaspace_base = base;
_aot_metaspace_top = top;
}
static void* shared_metaspace_base() { return _shared_metaspace_base; }
static void* shared_metaspace_top() { return _shared_metaspace_top; }
static void* aot_metaspace_base() { return _aot_metaspace_base; }
static void* aot_metaspace_top() { return _aot_metaspace_top; }
#define METASPACE_OBJ_TYPES_DO(f) \
f(Class) \

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2010, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2010, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -59,7 +59,7 @@ class MetadataFactory : AllStatic {
static void free_array(ClassLoaderData* loader_data, Array<T>* data) {
if (data != nullptr) {
assert(loader_data != nullptr, "shouldn't pass null");
assert(!data->is_shared(), "cannot deallocate array in shared spaces");
assert(!data->in_aot_cache(), "cannot deallocate array in aot metaspace spaces");
int size = data->size();
loader_data->metaspace_non_null()->deallocate((MetaWord*)data, size);
}
@ -73,7 +73,7 @@ class MetadataFactory : AllStatic {
int size = md->size();
// Call metadata's deallocate function which will deallocate fields and release_C_heap_structures
assert(!md->on_stack(), "can't deallocate things on stack");
assert(!md->is_shared(), "cannot deallocate if in shared spaces");
assert(!md->in_aot_cache(), "cannot deallocate if in aot metaspace spaces");
md->deallocate_contents(loader_data);
// Call the destructor. This is currently used for MethodData which has a member
// that needs to be destructed to release resources. Most Metadata derived classes have noop

View File

@ -1036,8 +1036,8 @@ void Metaspace::purge(bool classes_unloaded) {
// Returns true if pointer points into one of the metaspace regions, or
// into the class space.
bool Metaspace::is_in_shared_metaspace(const void* ptr) {
return MetaspaceShared::is_in_shared_metaspace(ptr);
bool Metaspace::in_aot_cache(const void* ptr) {
return MetaspaceShared::in_aot_cache(ptr);
}
// Returns true if pointer points into one of the non-class-space metaspace regions.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011, 2024, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2011, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2017, 2021 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -130,7 +130,7 @@ public:
// Returns true if the pointer points into class space, non-class metaspace, or the
// metadata portion of the CDS archive.
static bool contains(const void* ptr) {
return is_in_shared_metaspace(ptr) || // in cds
return in_aot_cache(ptr) || // in cds
is_in_class_space(ptr) || // in class space
is_in_nonclass_metaspace(ptr); // in one of the non-class regions?
}
@ -142,7 +142,7 @@ public:
}
// Returns true if pointer points into the CDS klass region.
static bool is_in_shared_metaspace(const void* ptr);
static bool in_aot_cache(const void* ptr);
// Returns true if pointer points into one of the non-class-space metaspace regions.
static bool is_in_nonclass_metaspace(const void* ptr);

View File

@ -63,7 +63,7 @@ public:
CountKlassClosure() : _num_classes(0), _num_classes_shared(0) {}
void do_klass(Klass* k) {
_num_classes++;
if (k->is_shared()) {
if (k->in_aot_cache()) {
_num_classes_shared++;
}
}

View File

@ -39,8 +39,8 @@ void PrintMetaspaceInfoKlassClosure::do_klass(Klass* k) {
_out->cr();
_out->print("%4zu: ", _cnt);
// Print a 's' for shared classes
_out->put(k->is_shared() ? 's': ' ');
// Print a 's' for classes in the aot metaspace (used to be called shared classes)
_out->put(k->in_aot_cache() ? 's': ' ');
ResourceMark rm;
_out->print(" %s", k->external_name());

View File

@ -259,9 +259,9 @@ void ArrayKlass::log_array_class_load(Klass* k) {
LogStream ls(lt);
ResourceMark rm;
ls.print("%s", k->name()->as_klass_external_name());
if (MetaspaceShared::is_shared_dynamic((void*)k)) {
if (MetaspaceShared::in_aot_cache_dynamic_region((void*)k)) {
ls.print(" source: shared objects file (top)");
} else if (MetaspaceShared::is_shared_static((void*)k)) {
} else if (MetaspaceShared::in_aot_cache_static_region((void*)k)) {
ls.print(" source: shared objects file");
}
ls.cr();

View File

@ -383,8 +383,8 @@ void ConstantPool::restore_unshareable_info(TRAPS) {
return;
}
assert(is_constantPool(), "ensure C++ vtable is restored");
assert(on_stack(), "should always be set for shared constant pools");
assert(is_shared(), "should always be set for shared constant pools");
assert(on_stack(), "should always be set for constant pools in AOT cache");
assert(in_aot_cache(), "should always be set for constant pools in AOT cache");
if (is_for_method_handle_intrinsic()) {
// See the same check in remove_unshareable_info() below.
assert(cache() == nullptr, "must not have cpCache");
@ -428,11 +428,11 @@ void ConstantPool::restore_unshareable_info(TRAPS) {
}
void ConstantPool::remove_unshareable_info() {
// Shared ConstantPools are in the RO region, so the _flags cannot be modified.
// ConstantPools in AOT cache are in the RO region, so the _flags cannot be modified.
// The _on_stack flag is used to prevent ConstantPools from deallocation during
// class redefinition. Since shared ConstantPools cannot be deallocated anyway,
// class redefinition. Since such ConstantPools cannot be deallocated anyway,
// we always set _on_stack to true to avoid having to change _flags during runtime.
_flags |= (_on_stack | _is_shared);
_flags |= (_on_stack | _in_aot_cache);
if (is_for_method_handle_intrinsic()) {
// This CP was created by Method::make_method_handle_intrinsic() and has nothing
@ -2258,13 +2258,13 @@ void ConstantPool::set_on_stack(const bool value) {
if (value) {
// Only record if it's not already set.
if (!on_stack()) {
assert(!is_shared(), "should always be set for shared constant pools");
assert(!in_aot_cache(), "should always be set for constant pools in AOT cache");
_flags |= _on_stack;
MetadataOnStackMark::record(this);
}
} else {
// Clearing is done single-threadedly.
if (!is_shared()) {
if (!in_aot_cache()) {
_flags &= (u2)(~_on_stack);
}
}

View File

@ -145,7 +145,7 @@ class ConstantPool : public Metadata {
enum {
_has_preresolution = 1, // Flags
_on_stack = 2,
_is_shared = 4,
_in_aot_cache = 4,
_has_dynamic_constant = 8,
_is_for_method_handle_intrinsic = 16
};
@ -212,7 +212,7 @@ class ConstantPool : public Metadata {
bool has_preresolution() const { return (_flags & _has_preresolution) != 0; }
void set_has_preresolution() {
assert(!is_shared(), "should never be called on shared ConstantPools");
assert(!in_aot_cache(), "should never be called on ConstantPools in AOT cache");
_flags |= _has_preresolution;
}
@ -248,8 +248,8 @@ class ConstantPool : public Metadata {
bool is_maybe_on_stack() const;
void set_on_stack(const bool value);
// Faster than MetaspaceObj::is_shared() - used by set_on_stack()
bool is_shared() const { return (_flags & _is_shared) != 0; }
// Shadows MetaspaceObj::in_aot_cache(). It's faster and is used by set_on_stack()
bool in_aot_cache() const { return (_flags & _in_aot_cache) != 0; }
bool has_dynamic_constant() const { return (_flags & _has_dynamic_constant) != 0; }
void set_has_dynamic_constant() { _flags |= _has_dynamic_constant; }

View File

@ -584,7 +584,7 @@ bool ConstantPoolCache::can_archive_resolved_method(ConstantPool* src_cp, Resolv
#endif // INCLUDE_CDS
void ConstantPoolCache::deallocate_contents(ClassLoaderData* data) {
assert(!is_shared(), "shared caches are not deallocated");
assert(!in_aot_cache(), "objects in aot metaspace are not deallocated");
data->remove_handle(_resolved_references);
set_resolved_references(OopHandle());
MetadataFactory::free_array<u2>(data, _reference_map);

View File

@ -561,7 +561,7 @@ InstanceKlass::InstanceKlass(const ClassFileParser& parser, KlassKind kind, Refe
void InstanceKlass::deallocate_methods(ClassLoaderData* loader_data,
Array<Method*>* methods) {
if (methods != nullptr && methods != Universe::the_empty_method_array() &&
!methods->is_shared()) {
!methods->in_aot_cache()) {
for (int i = 0; i < methods->length(); i++) {
Method* method = methods->at(i);
if (method == nullptr) continue; // maybe null if error processing
@ -585,21 +585,21 @@ void InstanceKlass::deallocate_interfaces(ClassLoaderData* loader_data,
// check that the interfaces don't come from super class
Array<InstanceKlass*>* sti = (super_klass == nullptr) ? nullptr :
super_klass->transitive_interfaces();
if (ti != sti && ti != nullptr && !ti->is_shared()) {
if (ti != sti && ti != nullptr && !ti->in_aot_cache()) {
MetadataFactory::free_array<InstanceKlass*>(loader_data, ti);
}
}
// local interfaces can be empty
if (local_interfaces != Universe::the_empty_instance_klass_array() &&
local_interfaces != nullptr && !local_interfaces->is_shared()) {
local_interfaces != nullptr && !local_interfaces->in_aot_cache()) {
MetadataFactory::free_array<InstanceKlass*>(loader_data, local_interfaces);
}
}
void InstanceKlass::deallocate_record_components(ClassLoaderData* loader_data,
Array<RecordComponent*>* record_components) {
if (record_components != nullptr && !record_components->is_shared()) {
if (record_components != nullptr && !record_components->in_aot_cache()) {
for (int i = 0; i < record_components->length(); i++) {
RecordComponent* record_component = record_components->at(i);
MetadataFactory::free_metadata(loader_data, record_component);
@ -643,7 +643,7 @@ void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
if (method_ordering() != nullptr &&
method_ordering() != Universe::the_empty_int_array() &&
!method_ordering()->is_shared()) {
!method_ordering()->in_aot_cache()) {
MetadataFactory::free_array<int>(loader_data, method_ordering());
}
set_method_ordering(nullptr);
@ -651,7 +651,7 @@ void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
// default methods can be empty
if (default_methods() != nullptr &&
default_methods() != Universe::the_empty_method_array() &&
!default_methods()->is_shared()) {
!default_methods()->in_aot_cache()) {
MetadataFactory::free_array<Method*>(loader_data, default_methods());
}
// Do NOT deallocate the default methods, they are owned by superinterfaces.
@ -659,7 +659,7 @@ void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
// default methods vtable indices can be empty
if (default_vtable_indices() != nullptr &&
!default_vtable_indices()->is_shared()) {
!default_vtable_indices()->in_aot_cache()) {
MetadataFactory::free_array<int>(loader_data, default_vtable_indices());
}
set_default_vtable_indices(nullptr);
@ -672,7 +672,7 @@ void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
secondary_supers() != Universe::the_empty_klass_array() &&
// see comments in compute_secondary_supers about the following cast
(address)(secondary_supers()) != (address)(transitive_interfaces()) &&
!secondary_supers()->is_shared()) {
!secondary_supers()->in_aot_cache()) {
MetadataFactory::free_array<Klass*>(loader_data, secondary_supers());
}
set_secondary_supers(nullptr, SECONDARY_SUPERS_BITMAP_EMPTY);
@ -681,17 +681,17 @@ void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
set_transitive_interfaces(nullptr);
set_local_interfaces(nullptr);
if (fieldinfo_stream() != nullptr && !fieldinfo_stream()->is_shared()) {
if (fieldinfo_stream() != nullptr && !fieldinfo_stream()->in_aot_cache()) {
MetadataFactory::free_array<u1>(loader_data, fieldinfo_stream());
}
set_fieldinfo_stream(nullptr);
if (fieldinfo_search_table() != nullptr && !fieldinfo_search_table()->is_shared()) {
if (fieldinfo_search_table() != nullptr && !fieldinfo_search_table()->in_aot_cache()) {
MetadataFactory::free_array<u1>(loader_data, fieldinfo_search_table());
}
set_fieldinfo_search_table(nullptr);
if (fields_status() != nullptr && !fields_status()->is_shared()) {
if (fields_status() != nullptr && !fields_status()->in_aot_cache()) {
MetadataFactory::free_array<FieldStatus>(loader_data, fields_status());
}
set_fields_status(nullptr);
@ -700,7 +700,7 @@ void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
// delete it, yet. The new class's previous version will point to this.
if (constants() != nullptr) {
assert (!constants()->on_stack(), "shouldn't be called if anything is onstack");
if (!constants()->is_shared()) {
if (!constants()->in_aot_cache()) {
MetadataFactory::free_metadata(loader_data, constants());
}
// Delete any cached resolution errors for the constant pool
@ -711,27 +711,27 @@ void InstanceKlass::deallocate_contents(ClassLoaderData* loader_data) {
if (inner_classes() != nullptr &&
inner_classes() != Universe::the_empty_short_array() &&
!inner_classes()->is_shared()) {
!inner_classes()->in_aot_cache()) {
MetadataFactory::free_array<jushort>(loader_data, inner_classes());
}
set_inner_classes(nullptr);
if (nest_members() != nullptr &&
nest_members() != Universe::the_empty_short_array() &&
!nest_members()->is_shared()) {
!nest_members()->in_aot_cache()) {
MetadataFactory::free_array<jushort>(loader_data, nest_members());
}
set_nest_members(nullptr);
if (permitted_subclasses() != nullptr &&
permitted_subclasses() != Universe::the_empty_short_array() &&
!permitted_subclasses()->is_shared()) {
!permitted_subclasses()->in_aot_cache()) {
MetadataFactory::free_array<jushort>(loader_data, permitted_subclasses());
}
set_permitted_subclasses(nullptr);
// We should deallocate the Annotations instance if it's not in shared spaces.
if (annotations() != nullptr && !annotations()->is_shared()) {
if (annotations() != nullptr && !annotations()->in_aot_cache()) {
MetadataFactory::free_metadata(loader_data, annotations());
}
set_annotations(nullptr);
@ -994,7 +994,7 @@ bool InstanceKlass::link_class_impl(TRAPS) {
if (!is_linked()) {
if (!is_rewritten()) {
if (is_shared()) {
if (in_aot_cache()) {
assert(!verified_at_dump_time(), "must be");
}
{
@ -1013,7 +1013,7 @@ bool InstanceKlass::link_class_impl(TRAPS) {
// also sets rewritten
rewrite_class(CHECK_false);
} else if (is_shared()) {
} else if (in_aot_cache()) {
SystemDictionaryShared::check_verification_constraints(this, CHECK_false);
}
@ -1031,7 +1031,7 @@ bool InstanceKlass::link_class_impl(TRAPS) {
// 2) the class is loaded by built-in class loader but failed to add archived loader constraints or
// 3) the class was not verified during dump time
bool need_init_table = true;
if (is_shared() && verified_at_dump_time() &&
if (in_aot_cache() && verified_at_dump_time() &&
SystemDictionaryShared::check_linking_constraints(THREAD, this)) {
need_init_table = false;
}
@ -1073,7 +1073,7 @@ bool InstanceKlass::link_class_impl(TRAPS) {
void InstanceKlass::rewrite_class(TRAPS) {
assert(is_loaded(), "must be loaded");
if (is_rewritten()) {
assert(is_shared(), "rewriting an unshared class?");
assert(in_aot_cache(), "rewriting an unshared class?");
return;
}
Rewriter::rewrite(this, CHECK);
@ -1685,7 +1685,7 @@ void InstanceKlass::call_class_initializer(TRAPS) {
AOTClassInitializer::call_runtime_setup(THREAD, this);
return;
} else if (has_archived_enum_objs()) {
assert(is_shared(), "must be");
assert(in_aot_cache(), "must be");
bool initialized = CDSEnumKlass::initialize_enum_klass(this, CHECK);
if (initialized) {
return;
@ -2330,7 +2330,7 @@ void PrintClassClosure::do_klass(Klass* k) {
if (ik->is_rewritten()) buf[i++] = 'W';
if (ik->is_contended()) buf[i++] = 'C';
if (ik->has_been_redefined()) buf[i++] = 'R';
if (ik->is_shared()) buf[i++] = 'S';
if (ik->in_aot_cache()) buf[i++] = 'S';
}
buf[i++] = '\0';
_st->print("%-7s ", buf);
@ -2763,7 +2763,7 @@ void InstanceKlass::init_shared_package_entry() {
}
} else if (CDSConfig::is_dumping_dynamic_archive() &&
CDSConfig::is_using_full_module_graph() &&
MetaspaceShared::is_in_shared_metaspace(_package_entry)) {
MetaspaceShared::in_aot_cache(_package_entry)) {
// _package_entry is an archived package in the base archive. Leave it as is.
} else {
_package_entry = nullptr;
@ -2845,7 +2845,7 @@ void InstanceKlass::restore_unshareable_info(ClassLoaderData* loader_data, Handl
// retrieved during dump time.
// Verification of archived old classes will be performed during run time.
bool InstanceKlass::can_be_verified_at_dumptime() const {
if (MetaspaceShared::is_in_shared_metaspace(this)) {
if (MetaspaceShared::in_aot_cache(this)) {
// This is a class that was dumped into the base archive, so we know
// it was verified at dump time.
return true;
@ -3081,14 +3081,14 @@ void InstanceKlass::set_package(ClassLoaderData* loader_data, PackageEntry* pkg_
// ensure java/ packages only loaded by boot or platform builtin loaders
// not needed for shared class since CDS does not archive prohibited classes.
if (!is_shared()) {
if (!in_aot_cache()) {
check_prohibited_package(name(), loader_data, CHECK);
}
if (is_shared() && _package_entry != nullptr) {
if (in_aot_cache() && _package_entry != nullptr) {
if (CDSConfig::is_using_full_module_graph() && _package_entry == pkg_entry) {
// we can use the saved package
assert(MetaspaceShared::is_in_shared_metaspace(_package_entry), "must be");
assert(MetaspaceShared::in_aot_cache(_package_entry), "must be");
return;
} else {
_package_entry = nullptr;
@ -3970,8 +3970,8 @@ void InstanceKlass::print_class_load_helper(ClassLoaderData* loader_data,
info_stream.print(" source: %s", class_loader->klass()->external_name());
}
} else {
assert(this->is_shared(), "must be");
if (MetaspaceShared::is_shared_dynamic((void*)this)) {
assert(this->in_aot_cache(), "must be");
if (MetaspaceShared::in_aot_cache_dynamic_region((void*)this)) {
info_stream.print(" source: shared objects file (top)");
} else {
info_stream.print(" source: shared objects file");
@ -4254,7 +4254,7 @@ void JNIid::verify(InstanceKlass* holder) {
void InstanceKlass::set_init_state(ClassState state) {
#ifdef ASSERT
bool good_state = is_shared() ? (_init_state <= state)
bool good_state = in_aot_cache() ? (_init_state <= state)
: (_init_state < state);
assert(good_state || state == allocated, "illegal state transition");
#endif
@ -4355,7 +4355,7 @@ void InstanceKlass::purge_previous_version_list() {
assert(pvcp->pool_holder() != nullptr, "Constant pool with no holder");
guarantee (!loader_data->is_unloading(), "unloaded classes can't be on the stack");
live_count++;
if (pvcp->is_shared()) {
if (pvcp->in_aot_cache()) {
// Shared previous versions can never be removed so no cleaning is needed.
log_trace(redefine, class, iklass, purge)("previous version " PTR_FORMAT " is shared", p2i(pv_node));
} else {
@ -4467,7 +4467,7 @@ void InstanceKlass::add_previous_version(InstanceKlass* scratch_class,
assert(scratch_class->previous_versions() == nullptr, "shouldn't have a previous version");
scratch_class->link_previous_versions(previous_versions());
link_previous_versions(scratch_class);
if (cp_ref->is_shared()) {
if (cp_ref->in_aot_cache()) {
log_trace(redefine, class, iklass, add) ("scratch class added; class is shared");
} else {
// We only set clean_previous_versions flag for processing during class

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -52,7 +52,7 @@ void InstanceMirrorKlass::do_metadata(oop obj, OopClosureType* closure) {
if (klass != nullptr) {
if (klass->class_loader_data() == nullptr) {
// This is a mirror that belongs to a shared class that has not been loaded yet.
assert(klass->is_shared(), "Must be");
assert(klass->in_aot_cache(), "Must be");
} else if (klass->is_instance_klass() && klass->class_loader_data()->has_class_mirror_holder()) {
// A non-strong hidden class doesn't have its own class loader,
// so when handling the java mirror for the class we need to make sure its class

View File

@ -805,7 +805,7 @@ void Klass::remove_unshareable_info() {
// Null out class_loader_data because we don't share that yet.
set_class_loader_data(nullptr);
set_is_shared();
set_in_aot_cache();
if (CDSConfig::is_dumping_classic_static_archive()) {
// "Classic" static archives are required to have deterministic contents.
@ -858,7 +858,7 @@ void Klass::remove_java_mirror() {
void Klass::restore_unshareable_info(ClassLoaderData* loader_data, Handle protection_domain, TRAPS) {
assert(is_klass(), "ensure C++ vtable is restored");
assert(is_shared(), "must be set");
assert(in_aot_cache(), "must be set");
assert(secondary_supers()->length() >= (int)population_count(_secondary_supers_bitmap), "must be");
JFR_ONLY(RESTORE_ID(this);)
if (log_is_enabled(Trace, aot, unshareable)) {

View File

@ -174,9 +174,9 @@ private:
#if INCLUDE_CDS
// Various attributes for shared classes. Should be zero for a non-shared class.
u2 _shared_class_flags;
u2 _shared_class_flags;
enum CDSSharedClassFlags {
_is_shared_class = 1 << 0, // shadows MetaspaceObj::is_shared
_in_aot_cache = 1 << 0,
_archived_lambda_proxy_is_available = 1 << 1,
_has_value_based_class_annotation = 1 << 2,
_verified_at_dump_time = 1 << 3,
@ -378,13 +378,13 @@ protected:
NOT_CDS(return false;)
}
bool is_shared() const { // shadows MetaspaceObj::is_shared)()
CDS_ONLY(return (_shared_class_flags & _is_shared_class) != 0;)
bool in_aot_cache() const { // shadows MetaspaceObj::in_aot_cache)()
CDS_ONLY(return (_shared_class_flags & _in_aot_cache) != 0;)
NOT_CDS(return false;)
}
void set_is_shared() {
CDS_ONLY(_shared_class_flags |= _is_shared_class;)
void set_in_aot_cache() {
CDS_ONLY(_shared_class_flags |= _in_aot_cache;)
}
// Obtain the module or package for this class
@ -610,7 +610,7 @@ public:
virtual void remove_java_mirror();
bool is_unshareable_info_restored() const {
assert(is_shared(), "use this for shared classes only");
assert(in_aot_cache(), "use this for shared classes only");
if (has_archived_mirror_index()) {
// _java_mirror is not a valid OopHandle but rather an encoded reference in the shared heap
return false;

View File

@ -50,7 +50,7 @@ inline InstanceKlass* klassVtable::ik() const {
}
bool klassVtable::is_preinitialized_vtable() {
return _klass->is_shared() && !MetaspaceShared::remapped_readwrite() && _klass->verified_at_dump_time();
return _klass->in_aot_cache() && !MetaspaceShared::remapped_readwrite() && _klass->verified_at_dump_time();
}
@ -163,7 +163,7 @@ void klassVtable::initialize_vtable(GrowableArray<InstanceKlass*>* supers) {
// Note: Arrays can have intermediate array supers. Use java_super to skip them.
InstanceKlass* super = _klass->java_super();
bool is_shared = _klass->is_shared();
bool in_aot_cache = _klass->in_aot_cache();
Thread* current = Thread::current();
if (!_klass->is_array_klass()) {
@ -178,7 +178,7 @@ void klassVtable::initialize_vtable(GrowableArray<InstanceKlass*>* supers) {
#endif
if (Universe::is_bootstrapping()) {
assert(!is_shared, "sanity");
assert(!in_aot_cache, "sanity");
// just clear everything
for (int i = 0; i < _length; i++) table()[i].clear();
return;
@ -1089,7 +1089,7 @@ void itableMethodEntry::initialize(InstanceKlass* klass, Method* m) {
if (m == nullptr) return;
#ifdef ASSERT
if (MetaspaceShared::is_in_shared_metaspace((void*)&_method) &&
if (MetaspaceShared::in_aot_cache((void*)&_method) &&
!MetaspaceShared::remapped_readwrite() &&
m->method_holder()->verified_at_dump_time() &&
klass->verified_at_dump_time()) {
@ -1275,7 +1275,7 @@ int klassItable::assign_itable_indices_for_interface(InstanceKlass* klass) {
// A shared method could have an initialized itable_index that
// is < 0.
assert(m->vtable_index() == Method::pending_itable_index ||
m->is_shared(),
m->in_aot_cache(),
"set by initialize_vtable");
m->set_itable_index(ime_num);
// Progress to next itable entry

View File

@ -446,7 +446,7 @@ void Method::restore_unshareable_info(TRAPS) {
#endif
void Method::set_vtable_index(int index) {
if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
if (in_aot_cache() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
// At runtime initialize_vtable is rerun as part of link_class_impl()
// for a shared class loaded by the non-boot loader to obtain the loader
// constraints based on the runtime classloaders' context.
@ -457,7 +457,7 @@ void Method::set_vtable_index(int index) {
}
void Method::set_itable_index(int index) {
if (is_shared() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
if (in_aot_cache() && !MetaspaceShared::remapped_readwrite() && method_holder()->verified_at_dump_time()) {
// At runtime initialize_itable is rerun as part of link_class_impl()
// for a shared class loaded by the non-boot loader to obtain the loader
// constraints based on the runtime classloaders' context. The dumptime
@ -1251,7 +1251,7 @@ void Method::link_method(const methodHandle& h_method, TRAPS) {
// If the code cache is full, we may reenter this function for the
// leftover methods that weren't linked.
if (adapter() != nullptr) {
if (adapter()->is_shared()) {
if (adapter()->in_aot_cache()) {
assert(adapter()->is_linked(), "Adapter is shared but not linked");
} else {
return;
@ -2175,7 +2175,7 @@ bool Method::is_valid_method(const Method* m) {
return false;
} else if (!os::is_readable_range(m, m + 1)) {
return false;
} else if (m->is_shared()) {
} else if (m->in_aot_cache()) {
return CppVtables::is_valid_shared_method(m);
} else if (Metaspace::contains_non_shared(m)) {
return has_method_vptr((const void*)m);

View File

@ -705,7 +705,7 @@ void TrainingData::metaspace_pointers_do(MetaspaceClosure* iter) {
}
bool TrainingData::Key::can_compute_cds_hash(const Key* const& k) {
return k->meta() == nullptr || MetaspaceObj::is_shared(k->meta());
return k->meta() == nullptr || MetaspaceObj::in_aot_cache(k->meta());
}
uint TrainingData::Key::cds_hash(const Key* const& k) {

View File

@ -3422,7 +3422,7 @@ JVM_ENTRY(jclass, JVM_LookupLambdaProxyClassFromArchive(JNIEnv* env,
Klass* caller_k = java_lang_Class::as_Klass(JNIHandles::resolve(caller));
InstanceKlass* caller_ik = InstanceKlass::cast(caller_k);
if (!caller_ik->is_shared()) {
if (!caller_ik->in_aot_cache()) {
// there won't be a shared lambda class if the caller_ik is not in the shared archive.
return nullptr;
}

View File

@ -2154,7 +2154,7 @@ WB_ENTRY(jboolean, WB_IsSharedInternedString(JNIEnv* env, jobject wb, jobject st
WB_END
WB_ENTRY(jboolean, WB_IsSharedClass(JNIEnv* env, jobject wb, jclass clazz))
return (jboolean)MetaspaceShared::is_in_shared_metaspace(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(clazz)));
return (jboolean)MetaspaceShared::in_aot_cache(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(clazz)));
WB_END
WB_ENTRY(jboolean, WB_AreSharedStringsMapped(JNIEnv* env))

View File

@ -2737,7 +2737,7 @@ AdapterHandlerEntry* AdapterHandlerLibrary::get_adapter(const methodHandle& meth
if (entry != nullptr) {
assert(entry->is_linked(), "AdapterHandlerEntry must have been linked");
#ifdef ASSERT
if (!entry->is_shared() && VerifyAdapterSharing) {
if (!entry->in_aot_cache() && VerifyAdapterSharing) {
verify_adapter_sharing(total_args_passed, sig_bt, entry);
}
#endif

View File

@ -347,8 +347,8 @@
/* Memory */ \
/**********/ \
\
static_field(MetaspaceObj, _shared_metaspace_base, void*) \
static_field(MetaspaceObj, _shared_metaspace_top, void*) \
static_field(MetaspaceObj, _aot_metaspace_base, void*) \
static_field(MetaspaceObj, _aot_metaspace_top, void*) \
nonstatic_field(ThreadLocalAllocBuffer, _start, HeapWord*) \
nonstatic_field(ThreadLocalAllocBuffer, _top, HeapWord*) \
nonstatic_field(ThreadLocalAllocBuffer, _end, HeapWord*) \

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, 2020, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -32,8 +32,8 @@ import sun.jvm.hotspot.utilities.Observable;
import sun.jvm.hotspot.utilities.Observer;
public class MetaspaceObj {
private static Address sharedMetaspaceBaseAddr;
private static Address sharedMetaspaceTopAddr;
private static Address aotMetaspaceBaseAddr;
private static Address aotMetaspaceTopAddr;
static {
VM.registerVMInitializedObserver(new Observer() {
@ -45,13 +45,13 @@ public class MetaspaceObj {
private static synchronized void initialize(TypeDataBase db) {
Type type = db.lookupType("MetaspaceObj");
sharedMetaspaceBaseAddr = type.getAddressField("_shared_metaspace_base").getStaticFieldAddress();
sharedMetaspaceTopAddr = type.getAddressField("_shared_metaspace_top").getStaticFieldAddress();
aotMetaspaceBaseAddr = type.getAddressField("_aot_metaspace_base").getStaticFieldAddress();
aotMetaspaceTopAddr = type.getAddressField("_aot_metaspace_top").getStaticFieldAddress();
}
public static boolean isShared(Address addr) {
Address base = sharedMetaspaceBaseAddr.getAddressAt(0);
Address top = sharedMetaspaceTopAddr. getAddressAt(0);
Address base = aotMetaspaceBaseAddr.getAddressAt(0);
Address top = aotMetaspaceTopAddr. getAddressAt(0);
return base.lessThanOrEqual(addr) && addr.lessThan(top);
}