complete stub save and restore for aarch64

This commit is contained in:
Andrew Dinn 2025-11-10 17:44:47 +00:00
parent 400a83da89
commit c2945509df
19 changed files with 2811 additions and 533 deletions

View File

@ -3306,7 +3306,7 @@ void MacroAssembler::subw(Register Rd, Register Rn, RegisterOrConstant decrement
void MacroAssembler::reinit_heapbase()
{
if (UseCompressedOops) {
if (Universe::is_fully_initialized()) {
if (Universe::is_fully_initialized() && !AOTCodeCache::is_on_for_dump()) {
mov(rheapbase, CompressedOops::base());
} else {
lea(rheapbase, ExternalAddress(CompressedOops::base_addr()));

View File

@ -292,7 +292,7 @@ ExceptionBlob* OptoRuntime::generate_exception_blob() {
assert(SimpleRuntimeFrame::framesize % 4 == 0, "sp not 16-byte aligned");
const char* name = OptoRuntime::stub_name(StubId::c2_exception_id);
CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, (uint)BlobId::c2_exception_id, name);
CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, BlobId::c2_exception_id);
if (blob != nullptr) {
return blob->as_exception_blob();
}

View File

@ -84,8 +84,7 @@
do_stub(compiler, count_positives) \
do_arch_entry(aarch64, compiler, count_positives, count_positives, \
count_positives) \
do_stub(compiler, count_positives_long) \
do_arch_entry(aarch64, compiler, count_positives_long, \
do_arch_entry(aarch64, compiler, count_positives, \
count_positives_long, count_positives_long) \
do_stub(compiler, compare_long_string_LL) \
do_arch_entry(aarch64, compiler, compare_long_string_LL, \
@ -108,8 +107,9 @@
do_stub(compiler, string_indexof_linear_ul) \
do_arch_entry(aarch64, compiler, string_indexof_linear_ul, \
string_indexof_linear_ul, string_indexof_linear_ul) \
/* this uses the entry for ghash_processBlocks */ \
do_stub(compiler, ghash_processBlocks_wide) \
do_stub(compiler, ghash_processBlocks_small) \
do_arch_entry(aarch64, compiler, ghash_processBlocks_small, \
ghash_processBlocks_small, ghash_processBlocks_small) \
#define STUBGEN_FINAL_BLOBS_ARCH_DO(do_stub, \
@ -142,6 +142,47 @@
/* stub only -- entries are not stored in StubRoutines::aarch64 */ \
/* n.b. these are not the same as the generic atomic stubs */ \
do_stub(final, atomic_entry_points) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_4_impl, atomic_fetch_add_4_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_8_impl, atomic_fetch_add_8_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_4_relaxed_impl, \
atomic_fetch_add_4_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_8_relaxed_impl, \
atomic_fetch_add_8_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_xchg_4_impl, atomic_xchg_4_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_xchg_8_impl, atomic_xchg_8_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_1_impl, atomic_cmpxchg_1_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_impl, atomic_cmpxchg_4_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_impl, atomic_cmpxchg_8_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_1_relaxed_impl, \
atomic_cmpxchg_1_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_relaxed_impl, \
atomic_cmpxchg_4_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_relaxed_impl, \
atomic_cmpxchg_8_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_release_impl, \
atomic_cmpxchg_4_release_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_release_impl, \
atomic_cmpxchg_8_release_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_seq_cst_impl, \
atomic_cmpxchg_4_seq_cst_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_seq_cst_impl, \
atomic_cmpxchg_8_seq_cst_impl) \
#endif // CPU_AARCH64_STUBDECLARATIONS_HPP

File diff suppressed because it is too large Load Diff

View File

@ -413,3 +413,19 @@ ATTRIBUTE_ALIGNED(64) jdouble StubRoutines::aarch64::_pio2[] = {
2.73370053816464559624e-44, // 0x36E3822280000000
2.16741683877804819444e-51, // 0x3569F31D00000000
};
#define ADD(addr) external_addresses.append((address)addr);
void StubRoutines::aarch64::init_AOTAddressTable(GrowableArray<address>& external_addresses) {
ADD(_kyberConsts);
ADD(_dilithiumConsts);
ADD(_crc_table);
ADD(_adler_table);
ADD(_npio2_hw);
ADD(_dsin_coef);
ADD(_dcos_coef);
ADD(_two_over_pi);
ADD(_pio2);
}
#undef ADD

View File

@ -110,6 +110,8 @@ private:
_completed = true;
}
static void init_AOTAddressTable(GrowableArray<address>& external_addresses);
private:
static uint16_t _kyberConsts[];
static uint32_t _dilithiumConsts[];

View File

@ -278,10 +278,6 @@ bool Runtime1::initialize(BufferBlob* blob) {
if (!generate_blob_for(blob, id)) {
return false;
}
if (id == StubId::c1_forward_exception_id) {
// publish early c1 stubs at this point so later stubs can refer to them
AOTCodeCache::init_early_c1_table();
}
}
// printing
#ifndef PRODUCT

File diff suppressed because it is too large Load Diff

View File

@ -37,6 +37,7 @@
class CodeBuffer;
class RelocIterator;
class AOTCodeCache;
class AOTCodeReader;
class AdapterBlob;
class ExceptionBlob;
class ImmutableOopMapSet;
@ -52,6 +53,7 @@ enum CompLevel : signed char;
Fn(SharedBlob) \
Fn(C1Blob) \
Fn(C2Blob) \
Fn(StubGenBlob) \
// Descriptor of AOT Code Cache's entry
class AOTCodeEntry {
@ -113,7 +115,9 @@ public:
address dumptime_content_start_addr() const { return _dumptime_content_start_addr; }
static bool is_valid_entry_kind(Kind kind) { return kind > None && kind < Kind_count; }
static bool is_blob(Kind kind) { return kind == SharedBlob || kind == C1Blob || kind == C2Blob; }
static bool is_blob(Kind kind) { return kind == SharedBlob || kind == C1Blob || kind == C2Blob || kind == StubGenBlob; }
static bool is_single_stub_blob(Kind kind) { return kind == SharedBlob || kind == C1Blob || kind == C2Blob || kind == StubGenBlob; }
static bool is_multi_stub_blob(Kind kind) { return kind == StubGenBlob; }
static bool is_adapter(Kind kind) { return kind == Adapter; }
};
@ -122,40 +126,36 @@ class AOTCodeAddressTable : public CHeapObj<mtCode> {
private:
address* _extrs_addr;
address* _stubs_addr;
address* _shared_blobs_addr;
address* _C1_blobs_addr;
uint _extrs_length;
uint _stubs_length;
uint _shared_blobs_length;
uint _C1_blobs_length;
bool _extrs_complete;
bool _early_stubs_complete;
bool _shared_blobs_complete;
bool _early_c1_complete;
bool _shared_stubs_complete;
bool _c1_stubs_complete;
bool _c2_stubs_complete;
bool _stubgen_stubs_complete;
bool _complete;
public:
AOTCodeAddressTable() :
_extrs_addr(nullptr),
_stubs_addr(nullptr),
_shared_blobs_addr(nullptr),
_C1_blobs_addr(nullptr),
_extrs_length(0),
_stubs_length(0),
_shared_blobs_length(0),
_C1_blobs_length(0),
_extrs_complete(false),
_early_stubs_complete(false),
_shared_blobs_complete(false),
_early_c1_complete(false),
_shared_stubs_complete(false),
_c1_stubs_complete(false),
_c2_stubs_complete(false),
_stubgen_stubs_complete(false),
_complete(false)
{ }
~AOTCodeAddressTable();
void init_extrs();
void init_early_stubs();
void init_shared_blobs();
void init_early_c1();
void init_extrs2();
void add_stub_entry(EntryId entry_id, address entry);
void add_external_addresses(GrowableArray<address>& addresses);
void set_shared_stubs_complete();
void set_c1_stubs_complete();
void set_c2_stubs_complete();
void set_stubgen_stubs_complete();
const char* add_C_string(const char* str);
int id_for_C_string(address str);
address address_for_C_string(int idx);
@ -163,6 +163,138 @@ public:
address address_for_id(int id);
};
// Auxiliary class used by AOTStubData to locate addresses owned by a
// stub in the _address_array.
class StubAddrRange {
private:
// Index of the first address owned by a stub or -1 if none present
int _start_index;
// Total number of addresses owned by a stub, including in order:
// start address for stub code and first entry, (exclusive) end
// address for stub code, all secondary entry addresses, any
// auxiliary addresses
uint _naddr;
public:
StubAddrRange() : _start_index(-1), _naddr(0) {}
int start_index() { return _start_index; }
int count() { return _naddr; }
void default_init() {
_start_index = -1;
_naddr = 0;
}
void init_entry(int start_index, int naddr) {
_start_index = start_index;
_naddr = naddr;
}
};
// class used to save and restore details of stubs embedded in a
// multi-stub (StubGen) blob
class AOTStubData : public StackObj {
friend class AOTCodeCache;
friend class AOTCodeReader;
private:
BlobId _blob_id; // must be a stubgen blob id
// whatever buffer blob was successfully loaded from the AOT cache
// following a call to load_code_blob or nullptr
CodeBlob *_cached_blob;
// Array of addresses owned by stubs. Each stub appends addresses to
// this array as a block, whether at the end of generation or at the
// end of restoration from the cache. The first two addresses in
// each block are the "start" and "end2 address of the stub. Any
// other visible addresses located within the range [start,end)
// follow, either extra entries, data addresses or SEGV-protected
// subrange start, end and handler addresses. In the special case
// that the SEGV handler address is the (external) common address
// handler the array will hold value nullptr.
GrowableArray<address> _address_array;
// count of how many stubs exist in the current blob (not all of
// which may actually be generated)
int _stub_cnt;
// array identifying range of entries in _address_array for each stub
// indexed by offset of stub in blob
StubAddrRange* _ranges;
// id of last looked up stub or NO_STUBID if lookup not attempted or failed
StubId _current;
// offset of _current in blob or -1 if lookup not attempted or failed
int _current_idx;
// flags indicating whether the AOT code cache is open and, if so,
// whether we are loading or storing stubs.the first of those
// cases whether we have encountered any invalid stubs or failed to
// find a stub that was being generated
enum Flags {
OPEN = 1 << 0, // cache is open for use
USING = 1 << 1, // open and loading stubs
DUMPING = 1 << 2, // open and storing stubs
INVALID = 1 << 3, // found invalid stub when loading
};
uint32_t _flags;
void set_invalid() { _flags |= INVALID; }
StubAddrRange& get_range(int idx) const { return _ranges[idx]; }
GrowableArray<address>& address_array() { return _address_array; }
// accessor for entry/auxiliary addresses defaults to start entry
public:
AOTStubData(BlobId blob_id);
~AOTStubData() {
FREE_C_HEAP_ARRAY(StubAddrRange, _ranges);
}
bool is_open() { return (_flags & OPEN) != 0; }
bool is_using() { return (_flags & USING) != 0; }
bool is_dumping() { return (_flags & DUMPING) != 0; }
bool is_aot() { return is_using() || is_dumping(); }
bool is_invalid() { return (_flags & INVALID) != 0; }
BlobId blob_id() { return _blob_id; }
StubId current_stub_id() { return _current; }
//
bool load_code_blob();
bool store_code_blob(CodeBlob& new_blob, CodeBuffer *code_buffe);
// determine whether a stub is available in the AOT cache
bool find_archive_data(StubId stub_id);
// retrieve stub entry data if it we are using archived stubs and
// the stub has been found in an AOT-restored blob or store stub
// entry data if we are saving archived stubs and the stub has just
// been successfully generated into the current blob.
//
// start and end identify the inclusive start and exclusive end
// address for stub code and must lie in the current blob's code
// range. Stubs presented via this interface must declare at least
// one entry and start is always taken to be the first entry.
//
// Optional arrays entries and extras present other addresses of
// interest all of which must either lie in the interval (start,
// end) or be nullptr (verified by load and store methods).
//
// entries lists secondary entries for the stub each of which must
// match a corresponding entry declaration for the stub (entry count
// verified by load and store methods). Null entry addresses are
// allowed when an architecture does not require a specific entry
// but may not vary from one run to the next. If the cache is in use
// at a store (for loading or saving code) then non-null entry
// addresses are entered into the AOT cache stub address table
// allowing references to them from other stubs or nmethods to be
// relocated.
//
// extras lists other non-entry stub addresses of interest such as
// memory protection ranges and associated handler addresses. These
// do do not need to be declared as entries and their number and
// meaning may vary according to the architecture.
void load_archive_data(StubId stub_id, address& start, address& end, GrowableArray<address>* entries = nullptr, GrowableArray<address>* extras = nullptr);
void store_archive_data(StubId stub_id, address start, address end, GrowableArray<address>* entries = nullptr, GrowableArray<address>* extras = nullptr);
const AOTStubData* as_const() { return (const AOTStubData*)this; }
};
class AOTCodeCache : public CHeapObj<mtCode> {
// Classes used to describe AOT code cache.
@ -284,6 +416,7 @@ private:
void clear_lookup_failed() { _lookup_failed = false; }
bool lookup_failed() const { return _lookup_failed; }
void add_stub_entry(EntryId entry_id, address entry) NOT_CDS_RETURN;
public:
AOTCodeCache(bool is_dumping, bool is_using);
~AOTCodeCache();
@ -300,9 +433,12 @@ public:
void load_strings();
int store_strings();
static void init_early_stubs_table() NOT_CDS_RETURN;
static void init_shared_blobs_table() NOT_CDS_RETURN;
static void init_early_c1_table() NOT_CDS_RETURN;
static void set_shared_stubs_complete();
static void set_c1_stubs_complete();
static void set_c2_stubs_complete();
static void set_stubgen_stubs_complete();
void add_stub_entries(StubId stub_id, address start, GrowableArray<address> *entries = nullptr, int offset = -1) NOT_CDS_RETURN;
address address_for_C_string(int idx) const { return _table->address_for_C_string(idx); }
address address_for_id(int id) const { return _table->address_for_id(id); }
@ -322,22 +458,41 @@ public:
bool finish_write();
bool write_relocations(CodeBlob& code_blob);
bool write_relocations(CodeBlob& code_blob, RelocIterator& iter);
bool write_oop_map_set(CodeBlob& cb);
bool write_stub_data(CodeBlob& blob, AOTStubData *stub_data);
#ifndef PRODUCT
bool write_asm_remarks(CodeBlob& cb);
bool write_dbg_strings(CodeBlob& cb);
#endif // PRODUCT
private:
// internal private API to save and restore blobs
static bool store_code_blob(CodeBlob& blob,
AOTCodeEntry::Kind entry_kind,
uint id,
const char* name,
AOTStubData* stub_data,
CodeBuffer* code_buffer) NOT_CDS_RETURN_(false);
static CodeBlob* load_code_blob(AOTCodeEntry::Kind kind,
uint id,
const char* name,
AOTStubData* stub_data) NOT_CDS_RETURN_(nullptr);
public:
// save and restore API for non-enumerable code blobs
static bool store_code_blob(CodeBlob& blob,
AOTCodeEntry::Kind entry_kind,
uint id, const char* name) NOT_CDS_RETURN_(false);
uint id,
const char* name) NOT_CDS_RETURN_(false);
static CodeBlob* load_code_blob(AOTCodeEntry::Kind kind,
uint id, const char* name) NOT_CDS_RETURN_(nullptr);
// save and restore API for enumerable code blobs
// API for single-stub blobs
static bool store_code_blob(CodeBlob& blob,
AOTCodeEntry::Kind entry_kind,
BlobId id) NOT_CDS_RETURN_(false);
@ -345,6 +500,22 @@ public:
static CodeBlob* load_code_blob(AOTCodeEntry::Kind kind,
BlobId id) NOT_CDS_RETURN_(nullptr);
// API for multi-stub blobs -- for use by class StubGenerator.
static bool store_code_blob(CodeBlob& blob,
AOTCodeEntry::Kind kind,
BlobId id,
AOTStubData* stub_data,
CodeBuffer *code_buffer) NOT_CDS_RETURN_(false);
static CodeBlob* load_code_blob(AOTCodeEntry::Kind kind,
BlobId id,
AOTStubData* stub_data) NOT_CDS_RETURN_(nullptr);
static void publish_external_addresses(GrowableArray<address>& addresses);
// publish all entries for a code blob in code cache address table
static void publish_stub_addresses(CodeBlob &code_blob, BlobId id, AOTStubData *stub_data);
static uint store_entries_cnt() {
if (is_on_for_dump()) {
return cache()->_store_entries_cnt;
@ -369,6 +540,7 @@ public:
static AOTCodeCache* cache() { assert(_passed_init2, "Too early to ask"); return _cache; }
static void initialize() NOT_CDS_RETURN;
static void init2() NOT_CDS_RETURN;
static void init3() NOT_CDS_RETURN;
static void close() NOT_CDS_RETURN;
static bool is_on() CDS_ONLY({ return cache() != nullptr && !_cache->closing(); }) NOT_CDS_RETURN_(false);
static bool is_on_for_use() CDS_ONLY({ return is_on() && _cache->for_use(); }) NOT_CDS_RETURN_(false);
@ -389,7 +561,7 @@ public:
// Concurent AOT code reader
class AOTCodeReader {
private:
const AOTCodeCache* _cache;
AOTCodeCache* _cache;
const AOTCodeEntry* _entry;
const char* _load_buffer; // Loaded cached code buffer
uint _read_position; // Position in _load_buffer
@ -406,11 +578,14 @@ private:
public:
AOTCodeReader(AOTCodeCache* cache, AOTCodeEntry* entry);
CodeBlob* compile_code_blob(const char* name);
CodeBlob* compile_code_blob(const char* name, AOTCodeEntry::Kind entry_kind, int id, AOTStubData* stub_data = nullptr);
ImmutableOopMapSet* read_oop_map_set();
void read_stub_data(CodeBlob* code_blob, AOTStubData *stub_data);
void publish_stub_addresses(CodeBlob &code_blob, BlobId id, AOTStubData *stub_data);
void fix_relocations(CodeBlob* code_blob);
void fix_relocations(CodeBlob* code_blob, RelocIterator& iter);
#ifndef PRODUCT
void read_asm_remarks(AsmRemarks& asm_remarks);
void read_dbg_strings(DbgStrings& dbg_strings);

View File

@ -71,7 +71,7 @@ public:
bool needs_return_buffer,
int captured_state_mask,
bool needs_transition)
: StubCodeGenerator(buffer, PrintMethodHandleStubs),
: StubCodeGenerator(buffer, PrintMethodHandleStubs),
_signature(signature),
_num_args(num_args),
_ret_bt(ret_bt),

View File

@ -70,6 +70,8 @@ void VM_Version_init();
void icache_init2();
void initialize_stub_info(); // must precede all blob/stub generation
void preuniverse_stubs_init();
void stubs_AOTAddressTable_init();
void initial_stubs_init();
jint universe_init(); // depends on codeCache_init and preuniverse_stubs_init
@ -149,13 +151,17 @@ jint init_globals() {
AOTCodeCache::init2(); // depends on universe_init, must be before initial_stubs_init
AsyncLogWriter::initialize();
stubs_AOTAddressTable_init(); // publish external addresses used by stubs
// depends on AOTCodeCache::init2
initial_stubs_init(); // stubgen initial stub routines
// stack overflow exception blob is referenced by the interpreter
AOTCodeCache::init_early_stubs_table(); // need this after stubgen initial stubs and before shared runtime initial stubs
SharedRuntime::generate_initial_stubs();
gc_barrier_stubs_init(); // depends on universe_init, must be before interpreter_init
continuations_init(); // must precede continuation stub generation
continuation_stubs_init(); // depends on continuations_init
AOTCodeCache::init3(); // depends on stubs_AOTAddressTable_init
// and continuations_init and must
// precede continuation stub generation
continuation_stubs_init(); // depends on continuations_init and AOTCodeCache::init3
#if INCLUDE_JFR
SharedRuntime::generate_jfr_stubs();
#endif
@ -164,7 +170,6 @@ jint init_globals() {
InterfaceSupport_init();
VMRegImpl::set_regName(); // need this before generate_stubs (for printing oop maps).
SharedRuntime::generate_stubs();
AOTCodeCache::init_shared_blobs_table(); // need this after generate_stubs
SharedRuntime::init_adapter_library(); // do this after AOTCodeCache::init_shared_blobs_table
return JNI_OK;
}

View File

@ -23,6 +23,7 @@
*/
#include "asm/macroAssembler.inline.hpp"
#include "code/aotCodeCache.hpp"
#include "code/codeCache.hpp"
#include "compiler/disassembler.hpp"
#include "oops/oop.inline.hpp"
@ -69,14 +70,16 @@ void StubCodeDesc::print() const { print_on(tty); }
StubCodeGenerator::StubCodeGenerator(CodeBuffer* code, bool print_code) {
_masm = new MacroAssembler(code);
_blob_id = BlobId::NO_BLOBID;
_stub_data = nullptr;
_print_code = PrintStubCode || print_code;
}
StubCodeGenerator::StubCodeGenerator(CodeBuffer* code, BlobId blob_id, bool print_code) {
StubCodeGenerator::StubCodeGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data, bool print_code) {
assert(StubInfo::is_stubgen(blob_id),
"not a stubgen blob %s", StubInfo::name(blob_id));
_masm = new MacroAssembler(code);
_blob_id = blob_id;
_stub_data = stub_data;
_print_code = PrintStubCode || print_code;
}
@ -91,11 +94,29 @@ StubCodeGenerator::~StubCodeGenerator() {
#endif
}
void StubCodeGenerator::setup_code_desc(const char* name, address start, address end, bool loaded_from_cache) {
StubCodeDesc* cdesc = new StubCodeDesc("StubRoutines", name, start, end);
cdesc->set_disp(uint(start - _masm->code_section()->outer()->insts_begin()));
if (loaded_from_cache) {
cdesc->set_loaded_from_cache();
}
print_stub_code_desc(cdesc);
// copied from ~StubCodeMark()
Forte::register_stub(cdesc->name(), cdesc->begin(), cdesc->end());
if (JvmtiExport::should_post_dynamic_code_generated()) {
JvmtiExport::post_dynamic_code_generated(cdesc->name(), cdesc->begin(), cdesc->end());
}
}
void StubCodeGenerator::stub_prolog(StubCodeDesc* cdesc) {
// default implementation - do nothing
}
void StubCodeGenerator::stub_epilog(StubCodeDesc* cdesc) {
print_stub_code_desc(cdesc);
}
void StubCodeGenerator::print_stub_code_desc(StubCodeDesc* cdesc) {
LogTarget(Debug, stubs) lt;
if (lt.is_enabled()) {
LogStream ls(lt);
@ -119,6 +140,53 @@ void StubCodeGenerator::stub_epilog(StubCodeDesc* cdesc) {
}
}
bool StubCodeGenerator::find_archive_data(StubId stub_id) {
// punt to stub data if it exists and is not for dumping
if (_stub_data == nullptr || _stub_data->is_dumping()) {
return false;
}
return _stub_data->find_archive_data(stub_id);
}
void StubCodeGenerator::load_archive_data(StubId stub_id, address& start, address& end, GrowableArray<address> *entries, GrowableArray<address>* extras) {
assert(_stub_data != nullptr && _stub_data->current_stub_id() == stub_id, "no current archive data for %s", StubInfo::name(stub_id));
// punt to stub data
_stub_data->load_archive_data(stub_id, start, end, entries, extras);
setup_code_desc(StubInfo::name(stub_id), start, end, true);
}
void StubCodeGenerator::store_archive_data(StubId stub_id, address start, address end, GrowableArray<address>* entries, GrowableArray<address>* extras) {
// punt to stub data if we have any
if (_stub_data != nullptr) {
_stub_data->store_archive_data(stub_id, start, end, entries, extras);
}
}
void StubCodeGenerator::print_statistics_on(outputStream* st) {
st->print_cr("StubRoutines Stubs:");
st->print_cr(" Initial stubs: %d", StubInfo::stub_count(BlobId::stubgen_initial_id));
st->print_cr(" Continuation stubs: %d", StubInfo::stub_count(BlobId::stubgen_continuation_id));
st->print_cr(" Compiler stubs: %d", StubInfo::stub_count(BlobId::stubgen_compiler_id));
st->print_cr(" Final stubs: %d", StubInfo::stub_count(BlobId::stubgen_final_id));
int emitted = 0;
int loaded_from_cache = 0;
StubCodeDesc* scd = StubCodeDesc::first();
while (scd != nullptr) {
if (!strcmp(scd->group(), "StubRoutines")) {
emitted += 1;
if (scd->loaded_from_cache()) {
loaded_from_cache += 1;
}
}
scd = StubCodeDesc::next(scd);
}
st->print_cr("Total stubroutines stubs emitted: %d (generated=%d, loaded from cache=%d)", emitted, emitted - loaded_from_cache, loaded_from_cache);
}
#ifdef ASSERT
void StubCodeGenerator::verify_stub(StubId stub_id) {
assert(StubRoutines::stub_to_blob(stub_id) == blob_id(), "wrong blob %s for generation of stub %s", StubRoutines::get_blob_name(blob_id()), StubRoutines::get_stub_name(stub_id));

View File

@ -26,6 +26,7 @@
#define SHARE_RUNTIME_STUBCODEGENERATOR_HPP
#include "asm/assembler.hpp"
#include "code/aotCodeCache.hpp"
#include "memory/allocation.hpp"
#include "runtime/stubInfo.hpp"
@ -48,6 +49,7 @@ class StubCodeDesc: public CHeapObj<mtCode> {
address _begin; // points to the first byte of the stub code (included)
address _end; // points to the first byte after the stub code (excluded)
uint _disp; // Displacement relative base address in buffer.
bool _loaded_from_cache;
friend class StubCodeMark;
friend class StubCodeGenerator;
@ -65,6 +67,8 @@ class StubCodeDesc: public CHeapObj<mtCode> {
void set_disp(uint disp) { _disp = disp; }
void set_loaded_from_cache() { _loaded_from_cache = true; }
public:
static StubCodeDesc* first() { return _list; }
static StubCodeDesc* next(StubCodeDesc* desc) { return desc->_next; }
@ -81,6 +85,7 @@ class StubCodeDesc: public CHeapObj<mtCode> {
_end = end;
_disp = 0;
_list = this;
_loaded_from_cache = false;
};
static void freeze();
@ -93,12 +98,11 @@ class StubCodeDesc: public CHeapObj<mtCode> {
uint disp() const { return _disp; }
int size_in_bytes() const { return pointer_delta_as_int(_end, _begin); }
bool contains(address pc) const { return _begin <= pc && pc < _end; }
bool loaded_from_cache() const { return _loaded_from_cache; }
void print_on(outputStream* st) const;
void print() const;
};
// forward declare blob and stub id enums
// The base class for all stub-generating code generators.
// Provides utility functions.
@ -108,10 +112,13 @@ class StubCodeGenerator: public StackObj {
BlobId _blob_id;
protected:
MacroAssembler* _masm;
AOTStubData* _stub_data;
void setup_code_desc(const char* name, address start, address end, bool loaded_from_cache);
public:
StubCodeGenerator(CodeBuffer* code, bool print_code = false);
StubCodeGenerator(CodeBuffer* code, BlobId blob_id, bool print_code = false);
StubCodeGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data = nullptr, bool print_code = false);
~StubCodeGenerator();
MacroAssembler* assembler() const { return _masm; }
@ -120,9 +127,16 @@ class StubCodeGenerator: public StackObj {
virtual void stub_prolog(StubCodeDesc* cdesc); // called by StubCodeMark constructor
virtual void stub_epilog(StubCodeDesc* cdesc); // called by StubCodeMark destructor
void print_stub_code_desc(StubCodeDesc* cdesc);
static void print_statistics_on(outputStream* st);
bool find_archive_data(StubId stub_id);
void load_archive_data(StubId stub_id, address& start, address& end, GrowableArray<address> *entries = nullptr, GrowableArray<address>* extras = nullptr);
void store_archive_data(StubId stub_id, address start, address end, GrowableArray<address> *entries = nullptr, GrowableArray<address>* extras = nullptr);
#ifdef ASSERT
void verify_stub(StubId stub_id);
#endif
};
// Stack-allocated helper class used to associate a stub code with a name.

View File

@ -975,9 +975,15 @@
do_entry_init(final, arrayof_jlong_arraycopy, \
arrayof_jlong_arraycopy, arrayof_jlong_arraycopy, \
StubRoutines::arrayof_jlong_copy) \
do_entry(final, arrayof_jlong_arraycopy, \
arrayof_jlong_arraycopy_nopush, \
arrayof_jlong_arraycopy_nopush) \
do_stub(final, arrayof_oop_arraycopy) \
do_entry_init(final, arrayof_oop_arraycopy, arrayof_oop_arraycopy, \
arrayof_oop_arraycopy, StubRoutines::arrayof_oop_copy) \
do_entry(final, arrayof_oop_arraycopy, \
arrayof_oop_arraycopy_nopush, \
arrayof_oop_arraycopy_nopush) \
do_stub(final, arrayof_oop_arraycopy_uninit) \
do_entry_init(final, arrayof_oop_arraycopy_uninit, \
arrayof_oop_arraycopy_uninit, \

View File

@ -1096,6 +1096,15 @@ int StubInfo::stubgen_offset(StubId id) {
return local_offset(StubGroup::STUBGEN, id);
}
int StubInfo::stubgen_offset_in_blob(BlobId blob_id, StubId id) {
assert(blob(id) == blob_id, "sanity!");
StubGroup group = StubGroup::STUBGEN;
assert(stubgroup(blob_id) == group, "sanity");
StubId base_id = stub_base(blob_id);
assert(base_id != StubId::NO_STUBID, "sanity");
return local_offset(group, id) - local_offset(group, base_id);
}
// initialization function called to populate blob. stub and entry
// tables. this must be called before any stubs are generated
void initialize_stub_info() {

View File

@ -678,6 +678,11 @@ public:
static int c1_offset(StubId id);
static int c2_offset(StubId id);
static int stubgen_offset(StubId id);
// Convert a stub id to a unique, zero-based offset in the range of
// stub ids for a given blob in the stubgen stub group.
static int stubgen_offset_in_blob(BlobId blob_id, StubId id);
};

View File

@ -102,7 +102,10 @@ BlobId StubRoutines::stub_to_blob(StubId id) {
// Initialization
extern void StubGenerator_generate(CodeBuffer* code, BlobId blob_id); // only interface to generators
extern void StubGenerator_generate(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data); // only interface to generators
#if (defined(X86) && defined(_LP64)) || defined(AARCH64)
extern void StubGenerator_AOTAddressTable_init();
#endif
void UnsafeMemoryAccess::create_table(int max_size) {
UnsafeMemoryAccess::_table = new UnsafeMemoryAccess[max_size];
@ -169,6 +172,33 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
assert(StubInfo::is_stubgen(blob_id), "not a stubgen blob %s", StubInfo::name(blob_id));
ResourceMark rm;
TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
// If we are loading stubs we need to check if we can retrieve a
// blob and/or an associated archived stub descriptor from the
// AOTCodeCache. If we are storing stubs we need to create a blob
// but we still need a stub data descriptor to fill in during
// generation.
AOTStubData stub_data(blob_id);
AOTStubData* stub_data_p = nullptr;
LogTarget(Info, stubs) lt;
if (code_size > 0 && stub_data.is_using()) {
// AOTCodeEntry tracks and logs status of any cached blob
bool loaded = stub_data.load_code_blob();
if (loaded) {
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("Found blob %s in AOT cache", StubInfo::name(blob_id));
}
stub_data_p = &stub_data;
}
} else if (stub_data.is_dumping()) {
stub_data_p = &stub_data;
}
// Even if we managed to load a blob from the AOT cache we still
// need to allocate a code blob and associated buffer. The AOT blob
// may not include all the stubs we need for this runtime.
// Add extra space for large CodeEntryAlignment
int size = code_size + CodeEntryAlignment * max_aligned_stubs;
BufferBlob* stubs_code = BufferBlob::create(buffer_name, size);
@ -178,6 +208,10 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
// In that case we can tolerate an allocation failure because the
// compiler will have been shut down and we have no need of the
// blob.
// TODO: Ideally we would still like to try to use any AOT cached
// blob here but we don't have a fallback if we find that it is
// missing stubs we need so for now we exit. This should only
// happen in cases where we have a very small code cache.
if (Thread::current()->is_Compiler_thread()) {
assert(blob_id == BlobId::stubgen_compiler_id, "sanity");
assert(DelayCompilerStubsGeneration, "sanity");
@ -187,7 +221,10 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
vm_exit_out_of_memory(code_size, OOM_MALLOC_ERROR, "CodeCache: no room for %s", buffer_name);
}
CodeBuffer buffer(stubs_code);
StubGenerator_generate(&buffer, blob_id);
short buffer_locs[20];
buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
sizeof(buffer_locs)/sizeof(relocInfo));
StubGenerator_generate(&buffer, blob_id, stub_data_p);
if (code_size == 0) {
assert(buffer.insts_size() == 0, "should not write into buffer when bob size declared as 0");
LogTarget(Info, stubs) lt;
@ -203,7 +240,25 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
"increase %s, code_size: %d, used: %d, free: %d",
assert_msg, code_size, buffer.total_content_size(), buffer.insts_remaining());
LogTarget(Info, stubs) lt;
if (AOTCodeCache::is_dumping_stub()) {
if (stub_data.store_code_blob(*stubs_code, &buffer)) {
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("Stored blob '%s' to Startup Code Cache", buffer_name);
}
} else {
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("Failed to store blob '%s' to Startup Code Cache", buffer_name);
}
}
}
// close off recording of any further stubgen generation
if (blob_id == BlobId::stubgen_final_id) {
AOTCodeCache::set_stubgen_stubs_complete();
}
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("%s\t [" INTPTR_FORMAT ", " INTPTR_FORMAT "] used: %d, free: %d",
@ -214,6 +269,8 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
return stubs_code;
}
// per blob initializer methods StubRoutines::initialize_xxx_stubs()
#define DEFINE_BLOB_INIT_METHOD(blob_name) \
void StubRoutines::initialize_ ## blob_name ## _stubs() { \
if (STUBGEN_BLOB_FIELD_NAME(blob_name) == nullptr) { \
@ -234,6 +291,7 @@ STUBGEN_BLOBS_DO(DEFINE_BLOB_INIT_METHOD)
#undef DEFINE_BLOB_INIT_METHOD
// external driver API functions for per blob init: xxx_stubs_init()
#define DEFINE_BLOB_INIT_FUNCTION(blob_name) \
void blob_name ## _stubs_init() { \
@ -244,11 +302,24 @@ STUBGEN_BLOBS_DO(DEFINE_BLOB_INIT_FUNCTION)
#undef DEFINE_BLOB_INIT_FUNCTION
// Non-generated init method
void StubRoutines::init_AOTAddressTable() {
#if (defined(X86) && defined(_LP64)) || defined(AARCH64)
StubGenerator_AOTAddressTable_init();
#endif
}
// non-generated external API init driver function
void stubs_AOTAddressTable_init() { StubRoutines::init_AOTAddressTable(); }
/*
* we generate the underlying driver method but this wrapper is needed
* to perform special handling depending on where the compiler init
* gets called from. it ought to be possible to remove this at some
* point and have a determinate ordered init.
* we generate the underlying driver function compiler_stubs_init()
* but this wrapper is needed to perform special handling depending on
* where the compiler init gets called from. it ought to be possible
* to remove this at some point and have a determinate ordered init.
*/
void compiler_stubs_init(bool in_compiler_thread) {

View File

@ -130,6 +130,7 @@ class UnsafeMemoryAccess : public CHeapObj<mtCode> {
static UnsafeMemoryAccess* add_to_table(address start_pc, address end_pc, address error_exit_pc) {
guarantee(_table_length < _table_max_length, "Incorrect UnsafeMemoryAccess::_table_max_length");
UnsafeMemoryAccess* entry = &_table[_table_length];
assert(start_pc != nullptr, "invalid start address");
entry->set_start_pc(start_pc);
entry->set_end_pc(end_pc);
entry->set_error_exit_pc(error_exit_pc);
@ -283,6 +284,9 @@ public:
static BlobId stub_to_blob(StubId id);
#endif
// Initalization
static void init_AOTAddressTable();
// Debugging
static jint verify_oop_count() { return _verify_oop_count; }
static jint* verify_oop_count_addr() { return &_verify_oop_count; }

View File

@ -51,7 +51,7 @@ public class AOTCodeFlags {
public static void main(String... args) throws Exception {
Tester t = new Tester();
// Run only 2 modes (0 - no AOT code, 1 - AOT adapters) until JDK-8357398 is fixed
for (int mode = 0; mode < 2; mode++) {
for (int mode = 0; mode < 4; mode++) {
t.setTestMode(mode);
t.run(new String[] {"AOT", "--two-step-training"});
}