Merge branch 'master' into opt-simploop-8346177

This commit is contained in:
katkerem 2026-04-08 16:53:21 +01:00
commit 6f0215fbcf
149 changed files with 6494 additions and 1584 deletions

View File

@ -30,6 +30,7 @@ import java.io.StringWriter;
import java.lang.reflect.Field;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
@ -76,7 +77,7 @@ public class SetupPreviewFeature {
var target = Path.of(args[1]);
Files.createDirectories(target.getParent());
if (constantsToAdd.isEmpty()) {
Files.copy(source, target);
Files.copy(source, target, StandardCopyOption.REPLACE_EXISTING);
} else {
String sourceCode = Files.readString(source);
try (var out = Files.newBufferedWriter(target)) {

View File

@ -3454,7 +3454,7 @@ void MacroAssembler::subw(Register Rd, Register Rn, RegisterOrConstant decrement
void MacroAssembler::reinit_heapbase()
{
if (UseCompressedOops) {
if (Universe::is_fully_initialized()) {
if (Universe::is_fully_initialized() && !AOTCodeCache::is_on_for_dump()) {
mov(rheapbase, CompressedOops::base());
} else {
lea(rheapbase, ExternalAddress(CompressedOops::base_addr()));
@ -5128,7 +5128,8 @@ void MacroAssembler::cmp_klass(Register obj, Register klass, Register tmp) {
if (CompressedKlassPointers::base() == nullptr) {
cmp(klass, tmp, LSL, CompressedKlassPointers::shift());
return;
} else if (((uint64_t)CompressedKlassPointers::base() & 0xffffffff) == 0
} else if (!AOTCodeCache::is_on_for_dump() &&
((uint64_t)CompressedKlassPointers::base() & 0xffffffff) == 0
&& CompressedKlassPointers::shift() == 0) {
// Only the bottom 32 bits matter
cmpw(klass, tmp);
@ -5371,7 +5372,7 @@ void MacroAssembler::encode_klass_not_null_for_aot(Register dst, Register src) {
}
void MacroAssembler::encode_klass_not_null(Register dst, Register src) {
if (AOTCodeCache::is_on_for_dump()) {
if (CompressedKlassPointers::base() != nullptr && AOTCodeCache::is_on_for_dump()) {
encode_klass_not_null_for_aot(dst, src);
return;
}

View File

@ -290,7 +290,7 @@ ExceptionBlob* OptoRuntime::generate_exception_blob() {
assert(SimpleRuntimeFrame::framesize % 4 == 0, "sp not 16-byte aligned");
const char* name = OptoRuntime::stub_name(StubId::c2_exception_id);
CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, (uint)BlobId::c2_exception_id, name);
CodeBlob* blob = AOTCodeCache::load_code_blob(AOTCodeEntry::C2Blob, BlobId::c2_exception_id);
if (blob != nullptr) {
return blob->as_exception_blob();
}

View File

@ -84,8 +84,7 @@
do_stub(compiler, count_positives) \
do_arch_entry(aarch64, compiler, count_positives, count_positives, \
count_positives) \
do_stub(compiler, count_positives_long) \
do_arch_entry(aarch64, compiler, count_positives_long, \
do_arch_entry(aarch64, compiler, count_positives, \
count_positives_long, count_positives_long) \
do_stub(compiler, compare_long_string_LL) \
do_arch_entry(aarch64, compiler, compare_long_string_LL, \
@ -108,8 +107,9 @@
do_stub(compiler, string_indexof_linear_ul) \
do_arch_entry(aarch64, compiler, string_indexof_linear_ul, \
string_indexof_linear_ul, string_indexof_linear_ul) \
/* this uses the entry for ghash_processBlocks */ \
do_stub(compiler, ghash_processBlocks_wide) \
do_stub(compiler, ghash_processBlocks_small) \
do_arch_entry(aarch64, compiler, ghash_processBlocks_small, \
ghash_processBlocks_small, ghash_processBlocks_small) \
#define STUBGEN_FINAL_BLOBS_ARCH_DO(do_stub, \
@ -139,9 +139,49 @@
do_stub(final, spin_wait) \
do_arch_entry_init(aarch64, final, spin_wait, spin_wait, \
spin_wait, empty_spin_wait) \
/* stub only -- entries are not stored in StubRoutines::aarch64 */ \
/* n.b. these are not the same as the generic atomic stubs */ \
do_stub(final, atomic_entry_points) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_4_impl, atomic_fetch_add_4_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_8_impl, atomic_fetch_add_8_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_4_relaxed_impl, \
atomic_fetch_add_4_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_fetch_add_8_relaxed_impl, \
atomic_fetch_add_8_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_xchg_4_impl, atomic_xchg_4_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_xchg_8_impl, atomic_xchg_8_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_1_impl, atomic_cmpxchg_1_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_impl, atomic_cmpxchg_4_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_impl, atomic_cmpxchg_8_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_1_relaxed_impl, \
atomic_cmpxchg_1_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_relaxed_impl, \
atomic_cmpxchg_4_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_relaxed_impl, \
atomic_cmpxchg_8_relaxed_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_release_impl, \
atomic_cmpxchg_4_release_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_release_impl, \
atomic_cmpxchg_8_release_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_4_seq_cst_impl, \
atomic_cmpxchg_4_seq_cst_impl) \
do_arch_entry(aarch64, final, atomic_entry_points, \
atomic_cmpxchg_8_seq_cst_impl, \
atomic_cmpxchg_8_seq_cst_impl) \
#endif // CPU_AARCH64_STUBDECLARATIONS_HPP

File diff suppressed because it is too large Load Diff

View File

@ -413,3 +413,39 @@ ATTRIBUTE_ALIGNED(64) jdouble StubRoutines::aarch64::_pio2[] = {
2.73370053816464559624e-44, // 0x36E3822280000000
2.16741683877804819444e-51, // 0x3569F31D00000000
};
#if INCLUDE_CDS
extern void StubGenerator_init_AOTAddressTable(GrowableArray<address>& addresses);
void StubRoutines::init_AOTAddressTable() {
ResourceMark rm;
GrowableArray<address> external_addresses;
// publish static addresses referred to by aarch64 generator
// n.b. we have to use use an extern call here because class
// StubGenerator, which provides the static method that knows how to
// add the relevant addresses, is declared in a source file rather
// than in a separately includeable header.
StubGenerator_init_AOTAddressTable(external_addresses);
// publish external data addresses defined in nested aarch64 class
StubRoutines::aarch64::init_AOTAddressTable(external_addresses);
AOTCodeCache::publish_external_addresses(external_addresses);
}
#define ADD(addr) external_addresses.append((address)addr);
void StubRoutines::aarch64::init_AOTAddressTable(GrowableArray<address>& external_addresses) {
ADD(_kyberConsts);
ADD(_dilithiumConsts);
// this is added in generic code
// ADD(_crc_table);
ADD(_adler_table);
ADD(_npio2_hw);
ADD(_dsin_coef);
ADD(_dcos_coef);
ADD(_two_over_pi);
ADD(_pio2);
}
#undef ADD
#endif // INCLUDE_CDS

View File

@ -110,6 +110,11 @@ private:
_completed = true;
}
#if INCLUDE_CDS
static void init_AOTAddressTable(GrowableArray<address>& external_addresses);
#endif // INCLUDE_CDS
private:
static uint16_t _kyberConsts[];
static uint32_t _dilithiumConsts[];

View File

@ -3211,7 +3211,7 @@ class StubGenerator: public StubCodeGenerator {
}
public:
StubGenerator(CodeBuffer* code, BlobId blob_id) : StubCodeGenerator(code, blob_id) {
StubGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data) : StubCodeGenerator(code, blob_id, stub_data) {
switch(blob_id) {
case BlobId::stubgen_preuniverse_id:
generate_preuniverse_stubs();
@ -3235,8 +3235,8 @@ class StubGenerator: public StubCodeGenerator {
}
}; // end class declaration
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id) {
StubGenerator g(code, blob_id);
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data) {
StubGenerator g(code, blob_id, stub_data);
}
// implementation of internal development flag

View File

@ -39,3 +39,9 @@ STUBGEN_ARCH_ENTRIES_DO(DEFINE_ARCH_ENTRY, DEFINE_ARCH_ENTRY_INIT)
address StubRoutines::crc_table_addr() { ShouldNotCallThis(); return nullptr; }
address StubRoutines::crc32c_table_addr() { ShouldNotCallThis(); return nullptr; }
#if INCLUDE_CDS
// nothing to do for arm
void StubRoutines::init_AOTAddressTable() {
}
#endif // INCLUDE_CDS

View File

@ -5095,7 +5095,7 @@ void generate_lookup_secondary_supers_table_stub() {
}
public:
StubGenerator(CodeBuffer* code, BlobId blob_id) : StubCodeGenerator(code, blob_id) {
StubGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData *stub_data) : StubCodeGenerator(code, blob_id, stub_data) {
switch(blob_id) {
case BlobId::stubgen_preuniverse_id:
generate_preuniverse_stubs();
@ -5119,7 +5119,7 @@ void generate_lookup_secondary_supers_table_stub() {
}
};
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id) {
StubGenerator g(code, blob_id);
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id, AOTStubData *stub_data) {
StubGenerator g(code, blob_id, stub_data);
}

View File

@ -183,3 +183,9 @@ address StubRoutines::ppc::generate_crc_constants(juint reverse_poly) {
return consts;
}
#if INCLUDE_CDS
// nothing to do for ppc
void StubRoutines::init_AOTAddressTable() {
}
#endif // INCLUDE_CDS

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -56,8 +56,10 @@ void CardTableBarrierSetAssembler::store_at(MacroAssembler* masm, DecoratorSet d
}
}
void CardTableBarrierSetAssembler::store_check(MacroAssembler* masm, Register obj, Register tmp) {
assert_different_registers(obj, tmp);
void CardTableBarrierSetAssembler::store_check(MacroAssembler* masm, Register obj, Register tmp1, Register tmp2) {
precond(tmp1 != noreg);
precond(tmp2 != noreg);
assert_different_registers(obj, tmp1, tmp2);
BarrierSet* bs = BarrierSet::barrier_set();
assert(bs->kind() == BarrierSet::CardTableBarrierSet, "Wrong barrier set kind");
@ -65,17 +67,17 @@ void CardTableBarrierSetAssembler::store_check(MacroAssembler* masm, Register ob
assert(CardTable::dirty_card_val() == 0, "must be");
__ load_byte_map_base(tmp);
__ add(tmp, obj, tmp);
__ load_byte_map_base(tmp1);
__ add(tmp1, obj, tmp1);
if (UseCondCardMark) {
Label L_already_dirty;
__ lbu(t1, Address(tmp));
__ beqz(t1, L_already_dirty);
__ sb(zr, Address(tmp));
__ lbu(tmp2, Address(tmp1));
__ beqz(tmp2, L_already_dirty);
__ sb(zr, Address(tmp1));
__ bind(L_already_dirty);
} else {
__ sb(zr, Address(tmp));
__ sb(zr, Address(tmp1));
}
}
@ -119,10 +121,10 @@ void CardTableBarrierSetAssembler::oop_store_at(MacroAssembler* masm, DecoratorS
if (needs_post_barrier) {
// flatten object address if needed
if (!precise || dst.offset() == 0) {
store_check(masm, dst.base(), tmp3);
store_check(masm, dst.base(), tmp1, tmp2);
} else {
__ la(tmp3, dst);
store_check(masm, tmp3, t0);
store_check(masm, tmp3, tmp1, tmp2);
}
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, 2026, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2022, Huawei Technologies Co., Ltd. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -31,7 +31,7 @@
class CardTableBarrierSetAssembler: public BarrierSetAssembler {
protected:
void store_check(MacroAssembler* masm, Register obj, Register tmp);
void store_check(MacroAssembler* masm, Register obj, Register tmp1, Register tmp2);
virtual void gen_write_ref_array_pre_barrier(MacroAssembler* masm, DecoratorSet decorators,
Register addr, Register count, RegSet saved_regs) {}

View File

@ -7348,7 +7348,7 @@ static const int64_t right_3_bits = right_n_bits(3);
}
public:
StubGenerator(CodeBuffer* code, BlobId blob_id) : StubCodeGenerator(code, blob_id) {
StubGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data) : StubCodeGenerator(code, blob_id, stub_data) {
switch(blob_id) {
case BlobId::stubgen_preuniverse_id:
generate_preuniverse_stubs();
@ -7372,6 +7372,6 @@ static const int64_t right_3_bits = right_n_bits(3);
}
}; // end class declaration
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id) {
StubGenerator g(code, blob_id);
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data) {
StubGenerator g(code, blob_id, stub_data);
}

View File

@ -501,3 +501,9 @@ ATTRIBUTE_ALIGNED(4096) juint StubRoutines::riscv::_crc_table[] =
0x751997d0UL, 0x00000001UL,
0xccaa009eUL, 0x00000000UL,
};
#if INCLUDE_CDS
// nothing to do for riscv
void StubRoutines::init_AOTAddressTable() {
}
#endif // INCLUDE_CDS

View File

@ -3422,7 +3422,7 @@ class StubGenerator: public StubCodeGenerator {
}
public:
StubGenerator(CodeBuffer* code, BlobId blob_id) : StubCodeGenerator(code, blob_id) {
StubGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data) : StubCodeGenerator(code, blob_id, stub_data) {
switch(blob_id) {
case BlobId::stubgen_preuniverse_id:
generate_preuniverse_stubs();
@ -3479,6 +3479,6 @@ class StubGenerator: public StubCodeGenerator {
};
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id) {
StubGenerator g(code, blob_id);
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data) {
StubGenerator g(code, blob_id, stub_data);
}

View File

@ -736,3 +736,9 @@ juint StubRoutines::zarch::_crc32c_table[CRC32_TABLES][CRC32_COLUMN_SIZE] = {
}
#endif
};
#if INCLUDE_CDS
// nothing to do for s390
void StubRoutines::init_AOTAddressTable() {
}
#endif // INCLUDE_CDS

View File

@ -71,6 +71,17 @@ static jlong *double_signmask_pool = double_quadword(&fp_signmask_pool[2*2],
static jlong *float_signflip_pool = double_quadword(&fp_signmask_pool[3*2], (jlong)UCONST64(0x8000000080000000), (jlong)UCONST64(0x8000000080000000));
static jlong *double_signflip_pool = double_quadword(&fp_signmask_pool[4*2], (jlong)UCONST64(0x8000000000000000), (jlong)UCONST64(0x8000000000000000));
#if INCLUDE_CDS
// publish external addresses defined in this file
void LIR_Assembler::init_AOTAddressTable(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(float_signmask_pool);
ADD(double_signmask_pool);
ADD(float_signflip_pool);
ADD(double_signflip_pool);
#undef ADD
}
#endif // INCLUDE_CDS
NEEDS_CLEANUP // remove this definitions ?
const Register SYNC_header = rax; // synchronization header
@ -519,6 +530,15 @@ void LIR_Assembler::const2reg(LIR_Opr src, LIR_Opr dest, LIR_PatchCode patch_cod
}
case T_LONG: {
#if INCLUDE_CDS
if (AOTCodeCache::is_on_for_dump()) {
address b = c->as_pointer();
if (b == (address)ThreadIdentifier::unsafe_offset()) {
__ lea(dest->as_register_lo(), ExternalAddress(b));
break;
}
}
#endif
assert(patch_code == lir_patch_none, "no patching handled here");
#if INCLUDE_CDS
if (AOTCodeCache::is_on_for_dump()) {

View File

@ -58,4 +58,7 @@ public:
void store_parameter(jobject c, int offset_from_esp_in_words);
void store_parameter(Metadata* c, int offset_from_esp_in_words);
#if INCLUDE_CDS
void static init_AOTAddressTable(GrowableArray<address>& external_addresses);
#endif // INCLUDE_CDS
#endif // CPU_X86_C1_LIRASSEMBLER_X86_HPP

View File

@ -31,6 +31,7 @@
#include "gc/z/zBarrierSetAssembler.hpp"
#include "gc/z/zBarrierSetRuntime.hpp"
#include "gc/z/zThreadLocalData.hpp"
#include "logging/log.hpp"
#include "memory/resourceArea.hpp"
#include "runtime/jniHandles.hpp"
#include "runtime/sharedRuntime.hpp"
@ -1391,10 +1392,13 @@ static uint16_t patch_barrier_relocation_value(int format) {
}
}
void ZBarrierSetAssembler::patch_barrier_relocation(address addr, int format) {
void ZBarrierSetAssembler::patch_barrier_relocation(address addr, int format, bool log) {
const int offset = patch_barrier_relocation_offset(format);
const uint16_t value = patch_barrier_relocation_value(format);
uint8_t* const patch_addr = (uint8_t*)addr + offset;
if (log) {
log_trace(aot, codecache, stubs)("patching address " INTPTR_FORMAT " offset %d value 0x%x", p2i(addr), offset, value);
}
if (format == ZBarrierRelocationFormatLoadGoodBeforeShl) {
if (VM_Version::supports_apx_f()) {
NativeInstruction* instruction = nativeInstruction_at(addr);
@ -1426,6 +1430,74 @@ void ZBarrierSetAssembler::patch_barriers() {
#undef __
#define __ masm->
void ZBarrierSetAssembler::register_reloc_addresses(GrowableArray<address> &entries, int begin, int count) {
int formats[] = {
ZBarrierRelocationFormatLoadBadAfterTest,
ZBarrierRelocationFormatStoreBadAfterTest,
ZBarrierRelocationFormatStoreGoodAfterOr,
-1
};
int format_idx = 0;
int format = formats[format_idx++];
for (int i = begin; i < begin + count; i++) {
address addr = entries.at(i);
// reloc addresses occur in 3 groups terminated with a nullptr
if (addr == nullptr) {
assert(format_idx < (int)(sizeof(formats) / sizeof(formats[0])),
"too many reloc groups");
format = formats[format_idx++];
} else {
switch(format) {
case ZBarrierRelocationFormatLoadBadAfterTest:
_load_bad_relocations.append(addr);
break;
case ZBarrierRelocationFormatStoreBadAfterTest:
_store_bad_relocations.append(addr);
break;
case ZBarrierRelocationFormatStoreGoodAfterOr:
_store_good_relocations.append(addr);
break;
default:
ShouldNotReachHere();
break;
}
patch_barrier_relocation(addr, format, true);
}
}
assert(format == -1, "unterminated format list");
}
void ZBarrierSetAssembler::retrieve_reloc_addresses(address start, address end, GrowableArray<address> &entries) {
assert(start != nullptr, "start address must not be null");
assert(end != nullptr, "start address must not be null");
assert(start < end, "stub range must not be empty");
for (int i = 0; i < _load_bad_relocations.length(); i++) {
address addr = _load_bad_relocations.at(i);
assert(addr != nullptr, "load bad reloc address shoudl not be null!");
if (start <= addr && addr < end) {
entries.append(addr);
}
}
entries.append(nullptr);
for (int i = 0; i < _store_bad_relocations.length(); i++) {
address addr = _store_bad_relocations.at(i);
assert(addr != nullptr, "store bad reloc address shoudl not be null!");
if (start <= addr && addr < end) {
entries.append(addr);
}
}
entries.append(nullptr);
for (int i = 0; i < _store_good_relocations.length(); i++) {
address addr = _store_good_relocations.at(i);
assert(addr != nullptr, "store good reloc address shoudl not be null!");
if (start <= addr && addr < end) {
entries.append(addr);
}
}
entries.append(nullptr);
}
void ZBarrierSetAssembler::check_oop(MacroAssembler* masm, Register obj, Register tmp1, Register tmp2, Label& error) {
// C1 calls verfy_oop in the middle of barriers, before they have been uncolored
// and after being colored. Therefore, we must deal with colored oops as well.

View File

@ -189,10 +189,14 @@ public:
Label& slow_path,
Label& slow_path_continuation) const;
void patch_barrier_relocation(address addr, int format);
void patch_barrier_relocation(address addr, int format, bool log = false);
void patch_barriers();
void register_reloc_addresses(GrowableArray<address> &entries, int begin, int count);
void retrieve_reloc_addresses(address start, address end, GrowableArray<address> &entries);
void check_oop(MacroAssembler* masm, Register obj, Register tmp1, Register tmp2, Label& error);
};

View File

@ -385,7 +385,8 @@ void MacroAssembler::warn(const char* msg) {
// Windows always allocates space for its register args
subq(rsp, frame::arg_reg_save_area_bytes);
#endif
lea(c_rarg0, ExternalAddress((address) msg));
const char* str = (code_section()->scratch_emit()) ? msg : AOTCodeCache::add_C_string(msg);
lea(c_rarg0, ExternalAddress((address) str));
call(RuntimeAddress(CAST_FROM_FN_PTR(address, warning)));
#ifdef _WIN64
@ -5672,7 +5673,12 @@ void MacroAssembler::encode_and_move_klass_not_null(Register dst, Register src)
BLOCK_COMMENT("encode_and_move_klass_not_null {");
assert_different_registers(src, dst);
if (CompressedKlassPointers::base() != nullptr) {
movptr(dst, -(intptr_t)CompressedKlassPointers::base());
if (AOTCodeCache::is_on_for_dump()) {
movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
negq(dst);
} else {
movptr(dst, -(intptr_t)CompressedKlassPointers::base());
}
addq(dst, src);
} else {
movptr(dst, src);
@ -5720,7 +5726,11 @@ void MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src)
} else {
if (CompressedKlassPointers::shift() <= Address::times_8) {
if (CompressedKlassPointers::base() != nullptr) {
movptr(dst, (intptr_t)CompressedKlassPointers::base());
if (AOTCodeCache::is_on_for_dump()) {
movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
} else {
movptr(dst, (intptr_t)CompressedKlassPointers::base());
}
} else {
xorq(dst, dst);
}
@ -5732,9 +5742,14 @@ void MacroAssembler::decode_and_move_klass_not_null(Register dst, Register src)
}
} else {
if (CompressedKlassPointers::base() != nullptr) {
const intptr_t base_right_shifted =
(intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
movptr(dst, base_right_shifted);
if (AOTCodeCache::is_on_for_dump()) {
movptr(dst, ExternalAddress(CompressedKlassPointers::base_addr()));
shrq(dst, CompressedKlassPointers::shift());
} else {
const intptr_t base_right_shifted =
(intptr_t)CompressedKlassPointers::base() >> CompressedKlassPointers::shift();
movptr(dst, base_right_shifted);
}
} else {
xorq(dst, dst);
}
@ -5811,7 +5826,7 @@ void MacroAssembler::cmp_narrow_klass(Address dst, Klass* k) {
void MacroAssembler::reinit_heapbase() {
if (UseCompressedOops) {
if (Universe::heap() != nullptr) {
if (Universe::heap() != nullptr && !AOTCodeCache::is_on_for_dump()) {
if (CompressedOops::base() == nullptr) {
MacroAssembler::xorptr(r12_heapbase, r12_heapbase);
} else {

View File

@ -242,7 +242,6 @@ void MacroAssembler::fast_sha256(XMMRegister msg, XMMRegister state0, XMMRegiste
Label done_hash, loop0;
address K256 = StubRoutines::x86::k256_addr();
address pshuffle_byte_flip_mask = StubRoutines::x86::pshuffle_byte_flip_mask_addr();
movdqu(state0, Address(state, 0));
movdqu(state1, Address(state, 16));
@ -253,7 +252,7 @@ void MacroAssembler::fast_sha256(XMMRegister msg, XMMRegister state0, XMMRegiste
palignr(state0, state1, 8);
pblendw(state1, msgtmp4, 0xF0);
movdqu(shuf_mask, ExternalAddress(pshuffle_byte_flip_mask));
movdqu(shuf_mask, ExternalAddress(StubRoutines::x86::pshuffle_byte_flip_mask_addr()));
lea(rax, ExternalAddress(K256));
bind(loop0);
@ -661,8 +660,6 @@ void MacroAssembler::sha256_AVX2(XMMRegister msg, XMMRegister state0, XMMRegiste
compute_size1, compute_size_end1;
address K256_W = StubRoutines::x86::k256_W_addr();
address pshuffle_byte_flip_mask = StubRoutines::x86::pshuffle_byte_flip_mask_addr();
address pshuffle_byte_flip_mask_addr = nullptr;
const XMMRegister& SHUF_00BA = xmm10; // ymm10: shuffle xBxA -> 00BA
const XMMRegister& SHUF_DC00 = xmm12; // ymm12: shuffle xDxC -> DC00
@ -791,10 +788,14 @@ enum {
// load g - r10 after it is used as scratch
movl(h, Address(CTX, 4*7));
pshuffle_byte_flip_mask_addr = pshuffle_byte_flip_mask;
vmovdqu(BYTE_FLIP_MASK, ExternalAddress(pshuffle_byte_flip_mask_addr + 0)); // [PSHUFFLE_BYTE_FLIP_MASK wrt rip]
vmovdqu(SHUF_00BA, ExternalAddress(pshuffle_byte_flip_mask_addr + 32)); // [_SHUF_00BA wrt rip]
vmovdqu(SHUF_DC00, ExternalAddress(pshuffle_byte_flip_mask_addr + 64)); // [_SHUF_DC00 wrt rip]
// the three successive pshuffle_byte_flip_mask stub entries should
// be offset by 32 bytes
assert(StubRoutines::x86::pshuffle_byte_flip_mask_addr() + 32 == StubRoutines::x86::pshuffle_byte_flip_mask_00ba_addr(), "sanity");
assert(StubRoutines::x86::pshuffle_byte_flip_mask_addr() + 64 == StubRoutines::x86::pshuffle_byte_flip_mask_dc00_addr(), "sanity");
vmovdqu(BYTE_FLIP_MASK, ExternalAddress(StubRoutines::x86::pshuffle_byte_flip_mask_addr())); // [PSHUFFLE_BYTE_FLIP_MASK wrt rip]
vmovdqu(SHUF_00BA, ExternalAddress(StubRoutines::x86::pshuffle_byte_flip_mask_00ba_addr())); // [_SHUF_00BA wrt rip]
vmovdqu(SHUF_DC00, ExternalAddress(StubRoutines::x86::pshuffle_byte_flip_mask_dc00_addr())); // [_SHUF_DC00 wrt rip]
movl(g, Address(CTX, 4*6));
@ -953,11 +954,9 @@ bind(only_one_block);
// load g - r10 after use as scratch
movl(h, Address(CTX, 4*7)); // 0x5be0cd19
pshuffle_byte_flip_mask_addr = pshuffle_byte_flip_mask;
vmovdqu(BYTE_FLIP_MASK, ExternalAddress(pshuffle_byte_flip_mask_addr + 0)); // [PSHUFFLE_BYTE_FLIP_MASK wrt rip]
vmovdqu(SHUF_00BA, ExternalAddress(pshuffle_byte_flip_mask_addr + 32)); // [_SHUF_00BA wrt rip]
vmovdqu(SHUF_DC00, ExternalAddress(pshuffle_byte_flip_mask_addr + 64)); // [_SHUF_DC00 wrt rip]
vmovdqu(BYTE_FLIP_MASK, ExternalAddress(StubRoutines::x86::pshuffle_byte_flip_mask_addr())); // [PSHUFFLE_BYTE_FLIP_MASK wrt rip]
vmovdqu(SHUF_00BA, ExternalAddress(StubRoutines::x86::pshuffle_byte_flip_mask_00ba_addr())); // [_SHUF_00BA wrt rip]
vmovdqu(SHUF_DC00, ExternalAddress(StubRoutines::x86::pshuffle_byte_flip_mask_dc00_addr())); // [_SHUF_DC00 wrt rip]
movl(g, Address(CTX, 4*6)); // 0x1f83d9ab
@ -1346,9 +1345,12 @@ void MacroAssembler::sha512_AVX2(XMMRegister msg, XMMRegister state0, XMMRegiste
// load g - r10 after it is used as scratch
movq(h, Address(CTX, 8 * 7));
pshuffle_byte_flip_mask_addr = pshuffle_byte_flip_mask_sha512;
vmovdqu(BYTE_FLIP_MASK, ExternalAddress(pshuffle_byte_flip_mask_addr + 0)); // PSHUFFLE_BYTE_FLIP_MASK wrt rip
vmovdqu(YMM_MASK_LO, ExternalAddress(pshuffle_byte_flip_mask_addr + 32));
// the two successive pshuffle_byte_flip_mask_sha512 stub entries should
// be offset by 32 bytes
assert(StubRoutines::x86::pshuffle_byte_flip_mask_addr_sha512() + 32 == StubRoutines::x86::pshuffle_byte_flip_mask_ymm_lo_addr_sha512(), "sanity");
vmovdqu(BYTE_FLIP_MASK, ExternalAddress(StubRoutines::x86::pshuffle_byte_flip_mask_addr_sha512())); // PSHUFFLE_BYTE_FLIP_MASK wrt rip
vmovdqu(YMM_MASK_LO, ExternalAddress(StubRoutines::x86::pshuffle_byte_flip_mask_ymm_lo_addr_sha512())); // MASK_YMM_LO wrt rip
movq(g, Address(CTX, 8 * 6));

View File

@ -161,6 +161,12 @@
do_arch_entry(x86, compiler, pshuffle_byte_flip_mask, \
pshuffle_byte_flip_mask_addr, \
pshuffle_byte_flip_mask_addr) \
do_arch_entry(x86, compiler, pshuffle_byte_flip_mask, \
pshuffle_byte_flip_mask_00ba_addr, \
pshuffle_byte_flip_mask_00ba_addr) \
do_arch_entry(x86, compiler, pshuffle_byte_flip_mask, \
pshuffle_byte_flip_mask_dc00_addr, \
pshuffle_byte_flip_mask_dc00_addr) \
/* x86_64 exposes these 3 stubs via a generic entry array */ \
/* other arches use arch-specific entries */ \
/* this really needs rationalising */ \
@ -171,6 +177,9 @@
do_arch_entry(x86, compiler, pshuffle_byte_flip_mask_sha512, \
pshuffle_byte_flip_mask_addr_sha512, \
pshuffle_byte_flip_mask_addr_sha512) \
do_arch_entry(x86, compiler, pshuffle_byte_flip_mask_sha512, \
pshuffle_byte_flip_mask_ymm_lo_addr_sha512, \
pshuffle_byte_flip_mask_ymm_lo_addr_sha512) \
do_stub(compiler, compress_perm_table32) \
do_arch_entry(x86, compiler, compress_perm_table32, \
compress_perm_table32, compress_perm_table32) \

File diff suppressed because it is too large Load Diff

View File

@ -303,11 +303,11 @@ class StubGenerator: public StubCodeGenerator {
address generate_sha512_implCompress(StubId stub_id);
// Mask for byte-swapping a couple of qwords in an XMM register using (v)pshufb.
address generate_pshuffle_byte_flip_mask_sha512();
address generate_pshuffle_byte_flip_mask_sha512(address& entry_ymm_lo);
address generate_upper_word_mask();
address generate_shuffle_byte_flip_mask();
address generate_pshuffle_byte_flip_mask();
address generate_pshuffle_byte_flip_mask(address& entry_00ba, address& entry_dc0);
// AES intrinsic stubs
@ -650,8 +650,33 @@ class StubGenerator: public StubCodeGenerator {
void generate_compiler_stubs();
void generate_final_stubs();
#if INCLUDE_CDS
static void init_AOTAddressTable_adler(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_aes(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_cbrt(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_chacha(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_constants(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_dilithium(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_exp(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_fmod(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_ghash(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_kyber(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_log(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_poly1305(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_poly_mont(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_pow(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_sha3(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_sin(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_sinh(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_tan(GrowableArray<address>& external_addresses);
static void init_AOTAddressTable_tanh(GrowableArray<address>& external_addresses);
#endif // INCLUDE_CDS
public:
StubGenerator(CodeBuffer* code, BlobId blob_id);
StubGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data);
#if INCLUDE_CDS
static void init_AOTAddressTable(GrowableArray<address>& external_addresses);
#endif // INCLUDE_CDS
};
#endif // CPU_X86_STUBGENERATOR_X86_64_HPP

View File

@ -67,8 +67,14 @@ address StubGenerator::generate_updateBytesAdler32() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_updateBytesAdler32_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
// Choose an appropriate LIMIT for inner loop based on the granularity
// of intermediate results. For int, LIMIT of 5552 will ensure intermediate
@ -334,7 +340,19 @@ address StubGenerator::generate_updateBytesAdler32() {
__ leave();
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_adler(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)(addr))
ADD(ADLER32_ASCALE_TABLE);
ADD(ADLER32_SHUF0_TABLE);
ADD(ADLER32_SHUF1_TABLE);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -250,10 +250,16 @@ void StubGenerator::generate_aes_stubs() {
// Output:
// rax - number of processed bytes
address StubGenerator::generate_galoisCounterMode_AESCrypt() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_galoisCounterMode_AESCrypt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register in = c_rarg0;
const Register len = c_rarg1;
@ -319,6 +325,9 @@ address StubGenerator::generate_galoisCounterMode_AESCrypt() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -337,10 +346,16 @@ address StubGenerator::generate_galoisCounterMode_AESCrypt() {
// Output:
// rax - number of processed bytes
address StubGenerator::generate_avx2_galoisCounterMode_AESCrypt() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_galoisCounterMode_AESCrypt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register in = c_rarg0;
const Register len = c_rarg1;
@ -404,15 +419,24 @@ address StubGenerator::generate_avx2_galoisCounterMode_AESCrypt() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
// Vector AES Counter implementation
address StubGenerator::generate_counterMode_VectorAESCrypt() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_counterMode_AESCrypt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address
@ -471,6 +495,9 @@ address StubGenerator::generate_counterMode_VectorAESCrypt() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -498,10 +525,16 @@ address StubGenerator::generate_counterMode_VectorAESCrypt() {
//
address StubGenerator::generate_counterMode_AESCrypt_Parallel() {
assert(UseAES, "need AES instructions and misaligned SSE support");
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_counterMode_AESCrypt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address
@ -781,15 +814,24 @@ address StubGenerator::generate_counterMode_AESCrypt_Parallel() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
address StubGenerator::generate_cipherBlockChaining_decryptVectorAESCrypt() {
assert(VM_Version::supports_avx512_vaes(), "need AES instructions and misaligned SSE support");
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_cipherBlockChaining_decryptAESCrypt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address
@ -1057,6 +1099,9 @@ address StubGenerator::generate_cipherBlockChaining_decryptVectorAESCrypt() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -1069,11 +1114,17 @@ address StubGenerator::generate_cipherBlockChaining_decryptVectorAESCrypt() {
//
address StubGenerator::generate_aescrypt_encryptBlock() {
assert(UseAES, "need AES instructions and misaligned SSE support");
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_aescrypt_encryptBlock_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
Label L_doLast;
address start = __ pc();
start = __ pc();
const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address
@ -1152,6 +1203,9 @@ address StubGenerator::generate_aescrypt_encryptBlock() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -1164,11 +1218,17 @@ address StubGenerator::generate_aescrypt_encryptBlock() {
//
address StubGenerator::generate_aescrypt_decryptBlock() {
assert(UseAES, "need AES instructions and misaligned SSE support");
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_aescrypt_decryptBlock_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
Label L_doLast;
address start = __ pc();
start = __ pc();
const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address
@ -1248,6 +1308,9 @@ address StubGenerator::generate_aescrypt_decryptBlock() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -1266,10 +1329,16 @@ address StubGenerator::generate_aescrypt_decryptBlock() {
//
address StubGenerator::generate_cipherBlockChaining_encryptAESCrypt() {
assert(UseAES, "need AES instructions and misaligned SSE support");
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_cipherBlockChaining_encryptAESCrypt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_exit, L_key_192_256, L_key_256, L_loopTop_128, L_loopTop_192, L_loopTop_256;
const Register from = c_rarg0; // source array address
@ -1398,6 +1467,9 @@ address StubGenerator::generate_cipherBlockChaining_encryptAESCrypt() {
__ jcc(Assembler::notEqual, L_loopTop_256);
__ jmp(L_exit);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -1422,11 +1494,15 @@ address StubGenerator::generate_cipherBlockChaining_encryptAESCrypt() {
//
address StubGenerator::generate_electronicCodeBook_AESCrypt_Parallel(bool is_encrypt) {
assert(UseAES, "need AES instructions and misaligned SSE support");
__ align(CodeEntryAlignment);
StubId stub_id = is_encrypt ? StubId::stubgen_electronicCodeBook_encryptAESCrypt_id
: StubId::stubgen_electronicCodeBook_decryptAESCrypt_id;
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address
@ -1581,6 +1657,9 @@ __ opc(xmm_result0, reg);
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
#undef DoFour
@ -1612,10 +1691,16 @@ address StubGenerator::generate_electronicCodeBook_decryptAESCrypt_Parallel() {
//
address StubGenerator::generate_cipherBlockChaining_decryptAESCrypt_Parallel() {
assert(UseAES, "need AES instructions and misaligned SSE support");
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_cipherBlockChaining_decryptAESCrypt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address
@ -1851,14 +1936,23 @@ __ opc(xmm_result3, src_reg);
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
address StubGenerator::generate_electronicCodeBook_encryptAESCrypt() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_electronicCodeBook_encryptAESCrypt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address
@ -1872,14 +1966,23 @@ address StubGenerator::generate_electronicCodeBook_encryptAESCrypt() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
address StubGenerator::generate_electronicCodeBook_decryptAESCrypt() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_electronicCodeBook_decryptAESCrypt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register from = c_rarg0; // source array address
const Register to = c_rarg1; // destination array address
@ -1893,6 +1996,9 @@ address StubGenerator::generate_electronicCodeBook_decryptAESCrypt() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -4292,3 +4398,27 @@ void StubGenerator::aesgcm_avx2(Register in, Register len, Register ct, Register
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_aes(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)(addr))
ADD(key_shuffle_mask_addr());
ADD(counter_shuffle_mask_addr());
ADD(counter_mask_linc0_addr());
ADD(counter_mask_linc1_addr());
ADD(counter_mask_linc1f_addr());
ADD(counter_mask_linc2_addr());
ADD(counter_mask_linc2f_addr());
ADD(counter_mask_linc4_addr());
ADD(counter_mask_linc8_addr());
ADD(counter_mask_linc16_addr());
ADD(counter_mask_linc32_addr());
ADD(counter_mask_ones_addr());
ADD(ghash_polynomial_reduction_addr());
ADD(ghash_polynomial_two_one_addr());
ADD(counter_mask_addbe_4444_addr());
ADD(counter_mask_addbe_1234_addr());
ADD(counter_mask_add_1234_addr());
#undef ADD
}
#endif // INCLUDE_CDS

File diff suppressed because it is too large Load Diff

View File

@ -191,8 +191,14 @@ ATTRIBUTE_ALIGNED(4) static const juint _D_table[] =
address StubGenerator::generate_libmCbrt() {
StubId stub_id = StubId::stubgen_dcbrt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_1, L_2TAG_PACKET_1_0_1, L_2TAG_PACKET_2_0_1;
Label B1_1, B1_2, B1_4;
@ -335,7 +341,34 @@ address StubGenerator::generate_libmCbrt() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_cbrt(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)(addr))
ADD(_ABS_MASK);
ADD(_SIG_MASK);
ADD(_EXP_MASK);
ADD(_EXP_MSK2);
ADD(_EXP_MSK3);
ADD(_SCALE63);
ADD(_ZERON);
ADD(_INF);
ADD(_NEG_INF);
address coeff_table = (address)_coeff_table;
ADD(coeff_table);
ADD(coeff_table + 16);
ADD(coeff_table + 32);
ADD(coeff_table + 48);
ADD(_rcp_table);
ADD(_cbrt_table);
ADD(_D_table);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -111,10 +111,16 @@ void StubGenerator::generate_chacha_stubs() {
/* The 2-block AVX/AVX2-enabled ChaCha20 block function implementation */
address StubGenerator::generate_chacha20Block_avx() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_chacha20Block_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_twoRounds;
const Register state = c_rarg0;
@ -295,15 +301,25 @@ address StubGenerator::generate_chacha20Block_avx() {
}
__ leave();
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
/* The 4-block AVX512-enabled ChaCha20 block function implementation */
address StubGenerator::generate_chacha20Block_avx512() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_chacha20Block_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_twoRounds;
const Register state = c_rarg0;
@ -466,6 +482,10 @@ address StubGenerator::generate_chacha20Block_avx512() {
__ vzeroupper();
__ leave();
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -584,3 +604,13 @@ bVec,
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_chacha(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)(addr))
ADD(CC20_COUNTER_ADD_AVX);
ADD(CC20_COUNTER_ADD_AVX512);
ADD(CC20_LROT_CONSTS);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -233,3 +233,29 @@ ATTRIBUTE_ALIGNED(16) static const juint _Ctable[] = {
};
address StubGenerator::Ctable = (address)_Ctable;
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_constants(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)(addr))
ADD(_ONE);
ADD(_ONEHALF);
ADD(_SIGN_MASK);
ADD(_TWO_POW_55);
ADD(_TWO_POW_M55);
ADD(_SHIFTER);
ADD(_ZERO);
ADD(_SC_1);
ADD(_SC_2);
ADD(_SC_3);
ADD(_SC_4);
ADD(_PI_4);
ADD(((address)_PI_4+8));
ADD(_PI32INV);
ADD(_NEG_ZERO);
ADD(_P_1);
ADD(_P_2);
ADD(_P_3);
ADD(_PI_INV_TABLE);
ADD(_Ctable);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -174,8 +174,14 @@
address StubGenerator::generate_libmCos() {
StubId stub_id = StubId::stubgen_dcos_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_1, L_2TAG_PACKET_1_0_1, L_2TAG_PACKET_2_0_1, L_2TAG_PACKET_3_0_1;
Label L_2TAG_PACKET_4_0_1, L_2TAG_PACKET_5_0_1, L_2TAG_PACKET_6_0_1, L_2TAG_PACKET_7_0_1;
@ -619,6 +625,9 @@ address StubGenerator::generate_libmCos() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}

View File

@ -401,10 +401,16 @@ static void storeXmms(Register destination, int offset, const XMMRegister xmmReg
//
static address generate_dilithiumAlmostNtt_avx(StubGenerator *stubgen,
int vector_len, MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_dilithiumAlmostNtt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
const Register coeffs = c_rarg0;
@ -646,6 +652,9 @@ static address generate_dilithiumAlmostNtt_avx(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -657,10 +666,16 @@ static address generate_dilithiumAlmostNtt_avx(StubGenerator *stubgen,
// zetas (int[128*8]) = c_rarg1
static address generate_dilithiumAlmostInverseNtt_avx(StubGenerator *stubgen,
int vector_len, MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_dilithiumAlmostInverseNtt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
const Register coeffs = c_rarg0;
@ -886,6 +901,9 @@ static address generate_dilithiumAlmostInverseNtt_avx(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -900,10 +918,16 @@ static address generate_dilithiumAlmostInverseNtt_avx(StubGenerator *stubgen,
static address generate_dilithiumNttMult_avx(StubGenerator *stubgen,
int vector_len, MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_dilithiumNttMult_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
Label L_loop;
@ -972,6 +996,9 @@ static address generate_dilithiumNttMult_avx(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -984,10 +1011,16 @@ static address generate_dilithiumNttMult_avx(StubGenerator *stubgen,
static address generate_dilithiumMontMulByConstant_avx(StubGenerator *stubgen,
int vector_len, MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_dilithiumMontMulByConstant_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
Label L_loop;
@ -1059,6 +1092,9 @@ static address generate_dilithiumMontMulByConstant_avx(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -1073,10 +1109,16 @@ static address generate_dilithiumMontMulByConstant_avx(StubGenerator *stubgen,
// multiplier (int) = c_rarg4
static address generate_dilithiumDecomposePoly_avx(StubGenerator *stubgen,
int vector_len, MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_dilithiumDecomposePoly_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
Label L_loop;
@ -1318,6 +1360,9 @@ static address generate_dilithiumDecomposePoly_avx(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -1340,3 +1385,21 @@ void StubGenerator::generate_dilithium_stubs() {
generate_dilithiumDecomposePoly_avx(this, vector_len, _masm);
}
}
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_dilithium(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)(addr))
// use accessors to correctly identify the relevant addresses
ADD(unshufflePermsAddr(0));
ADD(unshufflePermsAddr(1));
ADD(unshufflePermsAddr(2));
ADD(unshufflePermsAddr(3));
ADD(unshufflePermsAddr(4));
ADD(unshufflePermsAddr(5));
ADD(dilithiumAvx512ConstsAddr(montQInvModRIdx));
ADD(dilithiumAvx512ConstsAddr(dilithium_qIdx));
ADD(dilithiumAvx512ConstsAddr(montRSquareModQIdx));
ADD(dilithiumAvx512ConstsAddr(barrettAddendIdx));
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -166,8 +166,14 @@ ATTRIBUTE_ALIGNED(4) static const juint _INF[] =
address StubGenerator::generate_libmExp() {
StubId stub_id = StubId::stubgen_dexp_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_2, L_2TAG_PACKET_1_0_2, L_2TAG_PACKET_2_0_2, L_2TAG_PACKET_3_0_2;
Label L_2TAG_PACKET_4_0_2, L_2TAG_PACKET_5_0_2, L_2TAG_PACKET_6_0_2, L_2TAG_PACKET_7_0_2;
@ -381,7 +387,31 @@ address StubGenerator::generate_libmExp() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_exp(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(_cv);
ADD(((address)_cv+16));
ADD(((address)_cv+32));
ADD(((address)_cv+48));
ADD(((address)_cv+64));
ADD(((address)_cv+80));
ADD(_mmask);
ADD(_bias);
ADD(_Tbl_addr);
ADD(_ALLONES);
ADD(_ebias);
ADD(_XMAX);
ADD(_XMIN);
ADD(_INF);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -72,10 +72,16 @@ ATTRIBUTE_ALIGNED(32) static const uint64_t CONST_e307[] = {
};
address StubGenerator::generate_libmFmod() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_fmod_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
__ enter(); // required for proper stackwalking of RuntimeStub frame
if (VM_Version::supports_avx512vlbwdq() && VM_Version::supports_fma()) { // AVX512 version
@ -521,7 +527,22 @@ address StubGenerator::generate_libmFmod() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_fmod(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(CONST_NaN);
ADD(CONST_1p260);
ADD(CONST_MAX);
ADD(CONST_INF);
ADD(CONST_e307);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -80,11 +80,17 @@ void StubGenerator::generate_ghash_stubs() {
// Single and multi-block ghash operations.
address StubGenerator::generate_ghash_processBlocks() {
__ align(CodeEntryAlignment);
Label L_ghash_loop, L_exit;
StubId stub_id = StubId::stubgen_ghash_processBlocks_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
Label L_ghash_loop, L_exit;
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
const Register state = c_rarg0;
const Register subkeyH = c_rarg1;
@ -211,17 +217,25 @@ address StubGenerator::generate_ghash_processBlocks() {
__ leave();
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
// Ghash single and multi block operations using AVX instructions
address StubGenerator::generate_avx_ghash_processBlocks() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_ghash_processBlocks_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
// arguments
const Register state = c_rarg0;
@ -237,6 +251,9 @@ address StubGenerator::generate_avx_ghash_processBlocks() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -538,3 +555,14 @@ void StubGenerator::generateHtbl_eight_blocks(Register htbl) {
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_ghash(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(GHASH_SHUFFLE_MASK);
ADD(GHASH_LONG_SWAP_MASK);
ADD(GHASH_BYTE_SWAP_MASK);
ADD(GHASH_POLYNOMIAL);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -400,10 +400,16 @@ static int xmm29_29[] = {29, 29, 29, 29};
// ntt_zetas (short[256]) = c_rarg1
address generate_kyberNtt_avx512(StubGenerator *stubgen,
MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_kyberNtt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
const Register coeffs = c_rarg0;
@ -487,6 +493,9 @@ address generate_kyberNtt_avx512(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -496,11 +505,16 @@ address generate_kyberNtt_avx512(StubGenerator *stubgen,
// ntt_zetas (short[256]) = c_rarg1
address generate_kyberInverseNtt_avx512(StubGenerator *stubgen,
MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_kyberInverseNtt_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
const Register coeffs = c_rarg0;
@ -610,6 +624,9 @@ address generate_kyberInverseNtt_avx512(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -621,11 +638,16 @@ address generate_kyberInverseNtt_avx512(StubGenerator *stubgen,
// zetas (short[128]) = c_rarg3
address generate_kyberNttMult_avx512(StubGenerator *stubgen,
MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_kyberNttMult_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
const Register result = c_rarg0;
@ -731,6 +753,9 @@ address generate_kyberNttMult_avx512(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -741,11 +766,16 @@ address generate_kyberNttMult_avx512(StubGenerator *stubgen,
// b (short[256]) = c_rarg2
address generate_kyberAddPoly_2_avx512(StubGenerator *stubgen,
MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_kyberAddPoly_2_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
const Register result = c_rarg0;
@ -776,6 +806,9 @@ address generate_kyberAddPoly_2_avx512(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -787,11 +820,16 @@ address generate_kyberAddPoly_2_avx512(StubGenerator *stubgen,
// c (short[256]) = c_rarg3
address generate_kyberAddPoly_3_avx512(StubGenerator *stubgen,
MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_kyberAddPoly_3_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
const Register result = c_rarg0;
@ -830,6 +868,9 @@ address generate_kyberAddPoly_3_avx512(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -841,11 +882,16 @@ address generate_kyberAddPoly_3_avx512(StubGenerator *stubgen,
// parsedLength (int) = c_rarg3
address generate_kyber12To16_avx512(StubGenerator *stubgen,
MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_kyber12To16_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
const Register condensed = c_rarg0;
@ -984,6 +1030,9 @@ address generate_kyber12To16_avx512(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -993,11 +1042,16 @@ address generate_kyber12To16_avx512(StubGenerator *stubgen,
// coeffs (short[256]) = c_rarg0
address generate_kyberBarrettReduce_avx512(StubGenerator *stubgen,
MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_kyberBarrettReduce_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
const Register coeffs = c_rarg0;
@ -1021,6 +1075,9 @@ address generate_kyberBarrettReduce_avx512(StubGenerator *stubgen,
__ mov64(rax, 0); // return 0
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -1038,3 +1095,24 @@ void StubGenerator::generate_kyber_stubs() {
}
}
}
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_kyber(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)(addr))
// use accessors to correctly identify the relevant addresses
ADD(kyberAvx512NttPermsAddr());
ADD(kyberAvx512InverseNttPermsAddr());
ADD(kyberAvx512_nttMultPermsAddr());
ADD(kyberAvx512_12To16PermsAddr());
ADD(kyberAvx512_12To16DupAddr());
ADD(kyberAvx512_12To16ShiftAddr());
ADD(kyberAvx512_12To16AndAddr());
ADD(kyberAvx512ConstsAddr(qOffset));
ADD(kyberAvx512ConstsAddr(qInvModROffset));
ADD(kyberAvx512ConstsAddr(dimHalfInverseOffset));
ADD(kyberAvx512ConstsAddr(barretMultiplierOffset));
ADD(kyberAvx512ConstsAddr(montRSquareModqOffset));
ADD(kyberAvx512ConstsAddr(f00Offset));
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -177,8 +177,14 @@ ATTRIBUTE_ALIGNED(16) static const juint _coeff[] =
address StubGenerator::generate_libmLog() {
StubId stub_id = StubId::stubgen_dlog_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_2, L_2TAG_PACKET_1_0_2, L_2TAG_PACKET_2_0_2, L_2TAG_PACKET_3_0_2;
Label L_2TAG_PACKET_4_0_2, L_2TAG_PACKET_5_0_2, L_2TAG_PACKET_6_0_2, L_2TAG_PACKET_7_0_2;
@ -359,6 +365,9 @@ address StubGenerator::generate_libmLog() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -516,8 +525,14 @@ ATTRIBUTE_ALIGNED(16) static const juint _coeff_log10[] =
address StubGenerator::generate_libmLog10() {
StubId stub_id = StubId::stubgen_dlog10_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_2, L_2TAG_PACKET_1_0_2, L_2TAG_PACKET_2_0_2, L_2TAG_PACKET_3_0_2;
Label L_2TAG_PACKET_4_0_2, L_2TAG_PACKET_5_0_2, L_2TAG_PACKET_6_0_2, L_2TAG_PACKET_7_0_2;
@ -704,7 +719,32 @@ address StubGenerator::generate_libmLog10() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_log(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(_L_tbl);
ADD(_log2);
ADD(((address)_log2+8));
ADD(_coeff);
ADD(((address)_coeff+16));
ADD(((address)_coeff+32));
ADD(_HIGHSIGMASK_log10);
ADD(_LOG10_E);
ADD(((address)_LOG10_E+8));
ADD(_L_tbl_log10);
ADD(_log2_log10);
ADD(((address)_log2_log10+8));
ADD(_coeff_log10);
ADD(((address)_coeff_log10+16));
ADD(((address)_coeff_log10+32));
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -909,10 +909,16 @@ void StubGenerator::poly1305_process_blocks_avx512(
// After execution, input and length will point at remaining (unprocessed) data
// and accumulator will point to the current accumulator value
address StubGenerator::generate_poly1305_processBlocks() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_poly1305_processBlocks_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
// Save all 'SOE' registers
@ -1028,6 +1034,10 @@ address StubGenerator::generate_poly1305_processBlocks() {
__ leave();
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -1695,3 +1705,14 @@ void StubGenerator::poly1305_msg_mul_reduce_vec4_avx2(
__ vpaddq(A1, A1, YTMP2, Assembler::AVX_256bit); //Add medium 42-bit bits from new blocks to accumulator
__ vpaddq(A1, A1, YTMP5, Assembler::AVX_256bit);
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_poly1305(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(POLY1305_PAD_MSG);
ADD(POLY1305_MASK42);
ADD(POLY1305_MASK44);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -558,10 +558,16 @@ void montgomeryMultiplyAVX2(const Register aLimbs, const Register bLimbs, const
}
address StubGenerator::generate_intpoly_montgomeryMult_P256() {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_intpoly_montgomeryMult_P256_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
if (VM_Version::supports_avx512ifma() && VM_Version::supports_avx512vlbw()) {
@ -620,6 +626,10 @@ address StubGenerator::generate_intpoly_montgomeryMult_P256() {
__ leave();
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
@ -680,10 +690,16 @@ address StubGenerator::generate_intpoly_assign() {
// P521OrderField: 19 = 8 + 8 + 2 + 1
// Special Cases 5, 10, 14, 16, 19
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_intpoly_assign_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
__ enter();
// Inputs
@ -762,5 +778,24 @@ address StubGenerator::generate_intpoly_assign() {
__ bind(L_Done);
__ leave();
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_poly_mont(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
// use accessors to retrieve all correct addresses
ADD(shift_1L());
ADD(shift_1R());
ADD(p256_mask52());
ADD(mask_limb5());
ADD(modulus_p256());
ADD(modulus_p256(1));
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -760,8 +760,14 @@ ATTRIBUTE_ALIGNED(8) static const juint _DOUBLE0DOT5[] = {
address StubGenerator::generate_libmPow() {
StubId stub_id = StubId::stubgen_dpow_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_2, L_2TAG_PACKET_1_0_2, L_2TAG_PACKET_2_0_2, L_2TAG_PACKET_3_0_2;
Label L_2TAG_PACKET_4_0_2, L_2TAG_PACKET_5_0_2, L_2TAG_PACKET_6_0_2, L_2TAG_PACKET_7_0_2;
@ -1859,7 +1865,40 @@ address StubGenerator::generate_libmPow() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_pow(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(_HIGHSIGMASK);
ADD(_LOG2_E);
ADD(_HIGHMASK_Y);
ADD((address)_HIGHMASK_Y+8);
ADD(_T_exp);
ADD(_e_coeff);
ADD((address)_e_coeff+16);
ADD((address)_e_coeff+32);
ADD(_coeff_h);
ADD((address)_coeff_h+8);
ADD(_HIGHMASK_LOG_X);
ADD(_HALFMASK);
ADD(_coeff_pow);
ADD((address)_coeff_pow+16);
ADD((address)_coeff_pow+32);
ADD((address)_coeff_pow+48);
ADD((address)_coeff_pow+64);
ADD((address)_coeff_pow+80);
ADD(_L_tbl_pow);
ADD(_log2_pow);
ADD(_DOUBLE2);
ADD(_DOUBLE0);
ADD(_DOUBLE0DOT5);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -104,10 +104,15 @@ static address generate_sha3_implCompress(StubId stub_id,
default:
ShouldNotReachHere();
}
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
const Register buf = c_rarg0;
const Register state = c_rarg1;
@ -316,6 +321,9 @@ static address generate_sha3_implCompress(StubId stub_id,
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -326,10 +334,16 @@ static address generate_sha3_implCompress(StubId stub_id,
// Performs two keccak() computations in parallel. The steps of the
// two computations are executed interleaved.
static address generate_double_keccak(StubGenerator *stubgen, MacroAssembler *_masm) {
__ align(CodeEntryAlignment);
StubId stub_id = StubId::stubgen_double_keccak_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = stubgen->load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
__ align(CodeEntryAlignment);
StubCodeMark mark(stubgen, stub_id);
address start = __ pc();
start = __ pc();
const Register state0 = c_rarg0;
const Register state1 = c_rarg1;
@ -495,6 +509,9 @@ static address generate_double_keccak(StubGenerator *stubgen, MacroAssembler *_m
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
stubgen->store_archive_data(stub_id, start, __ pc());
return start;
}
@ -508,3 +525,14 @@ void StubGenerator::generate_sha3_stubs() {
generate_sha3_implCompress(StubId::stubgen_sha3_implCompressMB_id, this, _masm);
}
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_sha3(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(round_constsAddr());
ADD(permsAndRotsAddr());
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -181,8 +181,14 @@ ATTRIBUTE_ALIGNED(8) static const juint _ALL_ONES[] =
address StubGenerator::generate_libmSin() {
StubId stub_id = StubId::stubgen_dsin_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_1, L_2TAG_PACKET_1_0_1, L_2TAG_PACKET_2_0_1, L_2TAG_PACKET_3_0_1;
Label L_2TAG_PACKET_4_0_1, L_2TAG_PACKET_5_0_1, L_2TAG_PACKET_6_0_1, L_2TAG_PACKET_7_0_1;
@ -645,7 +651,18 @@ address StubGenerator::generate_libmSin() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_sin(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(_ALL_ONES);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -290,8 +290,14 @@ ATTRIBUTE_ALIGNED(16) static const juint _T2_neg_f[] =
address StubGenerator::generate_libmSinh() {
StubId stub_id = StubId::stubgen_dsinh_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_2, L_2TAG_PACKET_1_0_2, L_2TAG_PACKET_3_0_2, L_2TAG_PACKET_4_0_2;
Label L_2TAG_PACKET_5_0_2, L_2TAG_PACKET_6_0_2;
@ -519,7 +525,32 @@ address StubGenerator::generate_libmSinh() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_sinh(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(_L2E);
ADD(_L2E + 8);
ADD(_HALFMASK);
ADD(_Shifter);
ADD(_cv);
ADD(_cv + 16);
ADD(_cv + 32);
ADD(_cv + 48);
ADD(_cv + 64);
ADD(_T2f);
ADD(_T2_neg_f);
ADD(_pv);
ADD(_pv + 16);
ADD(_pv + 32);
ADD(_MASK3);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -456,8 +456,14 @@ ATTRIBUTE_ALIGNED(8) static const juint _QQ_2_tan[] =
address StubGenerator::generate_libmTan() {
StubId stub_id = StubId::stubgen_dtan_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_1, L_2TAG_PACKET_1_0_1, L_2TAG_PACKET_2_0_1, L_2TAG_PACKET_3_0_1;
Label L_2TAG_PACKET_4_0_1, L_2TAG_PACKET_5_0_1, L_2TAG_PACKET_6_0_1, L_2TAG_PACKET_7_0_1;
@ -1025,7 +1031,33 @@ address StubGenerator::generate_libmTan() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_tan(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(_MUL16);
ADD(_sign_mask_tan);
ADD(_PI32INV_tan);
ADD(_P_1_tan);
ADD(_P_2_tan);
ADD(_P_3_tan);
ADD(_Ctable_tan);
ADD(_MASK_35_tan);
ADD(_Q_11_tan);
ADD(_Q_9_tan);
ADD(_Q_7_tan);
ADD(_Q_5_tan);
ADD(_Q_3_tan);
ADD(_PI_4_tan);
ADD(((address)_PI_4_tan+8));
ADD(_QQ_2_tan);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -303,8 +303,14 @@ ATTRIBUTE_ALIGNED(16) static const juint _T2_neg_f[] =
address StubGenerator::generate_libmTanh() {
StubId stub_id = StubId::stubgen_dtanh_id;
int entry_count = StubInfo::entry_count(stub_id);
assert(entry_count == 1, "sanity check");
address start = load_archive_data(stub_id);
if (start != nullptr) {
return start;
}
StubCodeMark mark(this, stub_id);
address start = __ pc();
start = __ pc();
Label L_2TAG_PACKET_0_0_1, L_2TAG_PACKET_1_0_1, L_2TAG_PACKET_2_0_1, L_2TAG_PACKET_3_0_1;
Label L_2TAG_PACKET_4_0_1, L_2TAG_PACKET_5_0_1;
@ -495,7 +501,32 @@ address StubGenerator::generate_libmTanh() {
__ leave(); // required for proper stackwalking of RuntimeStub frame
__ ret(0);
// record the stub entry and end
store_archive_data(stub_id, start, __ pc());
return start;
}
#undef __
#if INCLUDE_CDS
void StubGenerator::init_AOTAddressTable_tanh(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(_L2E);
ADD(_L2E + 8);
ADD(_HALFMASK);
ADD(_ONEMASK);
ADD(_TWOMASK);
ADD(_Shifter);
ADD(_cv);
ADD(_cv + 16);
ADD(_cv + 32);
ADD(_T2_neg_f);
ADD(_pv);
ADD(_pv + 16);
ADD(_pv + 32);
ADD(_MASK3);
ADD(_RMASK);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -28,6 +28,10 @@
#include "runtime/stubRoutines.hpp"
#include "utilities/globalDefinitions.hpp"
#include "crc32c.h"
#include "stubGenerator_x86_64.hpp"
#ifdef COMPILER1
#include "c1/c1_LIRAssembler.hpp"
#endif
// Implementation of the platform-specific part of StubRoutines - for
// a description of how to extend it, see the stubRoutines.hpp file.
@ -411,3 +415,46 @@ ATTRIBUTE_ALIGNED(64) const julong StubRoutines::x86::_k512_W[] =
0x4cc5d4becb3e42b6ULL, 0x597f299cfc657e2aULL,
0x5fcb6fab3ad6faecULL, 0x6c44198c4a475817ULL,
};
#if INCLUDE_CDS
void StubRoutines::init_AOTAddressTable() {
ResourceMark rm;
GrowableArray<address> external_addresses;
// publish static addresses referred to by main x86 generator and
// auxiliary x86 generators
StubGenerator::init_AOTAddressTable(external_addresses);
// publish external data addresses defined in nested x86 class
StubRoutines::x86::init_AOTAddressTable(external_addresses);
#ifdef COMPILER1
LIR_Assembler::init_AOTAddressTable(external_addresses);
#endif
AOTCodeCache::publish_external_addresses(external_addresses);
}
// publish addresses of external data defined in this file which may
// be referenced from stub or code
void StubRoutines::x86::init_AOTAddressTable(GrowableArray<address>& external_addresses) {
#define ADD(addr) external_addresses.append((address)addr);
ADD(&_mxcsr_std);
ADD(&_mxcsr_rz);
ADD(crc_by128_masks_addr());
ADD(crc_by128_masks_addr() + 16);
ADD(crc_by128_masks_addr() + 32);
// this is added in generic code
// ADD(_crc_table);
ADD(crc_by128_masks_avx512_addr());
ADD(crc_by128_masks_avx512_addr() + 16);
ADD(crc_by128_masks_avx512_addr() + 32);
ADD(_crc_table_avx512);
ADD(_crc32c_table_avx512);
ADD(_shuf_table_crc32_avx512);
// n.b. call accessor for this one to ensure the table is generated
ADD(crc32c_table_addr());
ADD(_arrays_hashcode_powers_of_31);
ADD(_k256);
ADD(_k256_W);
ADD(_k512_W);
#undef ADD
}
#endif // INCLUDE_CDS

View File

@ -112,6 +112,8 @@ public:
static address arrays_hashcode_powers_of_31() { return (address)_arrays_hashcode_powers_of_31; }
static void generate_CRC32C_table(bool is_pclmulqdq_supported);
static void init_AOTAddressTable(GrowableArray<address>& external_addresses);
};
#endif // CPU_X86_STUBROUTINES_X86_HPP

View File

@ -213,7 +213,7 @@ class StubGenerator: public StubCodeGenerator {
}
public:
StubGenerator(CodeBuffer* code, BlobId blob_id) : StubCodeGenerator(code, blob_id) {
StubGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData *stub_data) : StubCodeGenerator(code, blob_id, stub_data) {
switch(blob_id) {
case BlobId::stubgen_preuniverse_id:
generate_preuniverse_stubs();
@ -237,8 +237,8 @@ class StubGenerator: public StubCodeGenerator {
}
};
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id) {
StubGenerator g(code, blob_id);
void StubGenerator_generate(CodeBuffer* code, BlobId blob_id, AOTStubData *stub_data) {
StubGenerator g(code, blob_id, stub_data);
}
EntryFrame *EntryFrame::build(const intptr_t* parameters,

View File

@ -30,3 +30,9 @@
address StubRoutines::crc_table_addr() { ShouldNotCallThis(); return nullptr; }
address StubRoutines::crc32c_table_addr() { ShouldNotCallThis(); return nullptr; }
#if INCLUDE_CDS
// nothing to do for zero
void StubRoutines::init_AOTAddressTable() {
}
#endif // INCLUDE_CDS

View File

@ -25,4 +25,4 @@
#include "runtime/icache.hpp"
#include "utilities/globalDefinitions.hpp"
NOT_PRODUCT(THREAD_LOCAL AArch64ICacheInvalidationContext* AArch64ICacheInvalidationContext::_current_context = nullptr;)
DEBUG_ONLY(THREAD_LOCAL AArch64ICacheInvalidationContext* AArch64ICacheInvalidationContext::_current_context = nullptr;)

View File

@ -82,7 +82,7 @@ class AArch64ICacheInvalidationContext : StackObj {
}
~AArch64ICacheInvalidationContext() {
NOT_PRODUCT(_current_context = nullptr);
DEBUG_ONLY(_current_context = nullptr);
if (!_has_modified_code || !UseSingleICacheInvalidation) {
return;

View File

@ -1136,7 +1136,7 @@ void AsmRemarks::clear() {
uint AsmRemarks::print(uint offset, outputStream* strm) const {
uint count = 0;
const char* prefix = " ;; ";
const char* remstr = _remarks->lookup(offset);
const char* remstr = (_remarks ? _remarks->lookup(offset) : nullptr);
while (remstr != nullptr) {
strm->bol();
strm->print("%s", prefix);

View File

@ -278,11 +278,9 @@ bool Runtime1::initialize(BufferBlob* blob) {
if (!generate_blob_for(blob, id)) {
return false;
}
if (id == StubId::c1_forward_exception_id) {
// publish early c1 stubs at this point so later stubs can refer to them
AOTCodeCache::init_early_c1_table();
}
}
// disallow any further c1 stub generation
AOTCodeCache::set_c1_stubs_complete();
// printing
#ifndef PRODUCT
if (PrintSimpleStubs) {

File diff suppressed because it is too large Load Diff

View File

@ -28,6 +28,7 @@
#include "gc/shared/collectedHeap.hpp"
#include "gc/shared/gc_globals.hpp"
#include "runtime/stubInfo.hpp"
#include "utilities/hashTable.hpp"
/*
* AOT Code Cache collects code from Code Cache and corresponding metadata
@ -39,6 +40,7 @@
class CodeBuffer;
class RelocIterator;
class AOTCodeCache;
class AOTCodeReader;
class AdapterBlob;
class ExceptionBlob;
class ImmutableOopMapSet;
@ -54,6 +56,7 @@ enum CompLevel : signed char;
Fn(SharedBlob) \
Fn(C1Blob) \
Fn(C2Blob) \
Fn(StubGenBlob) \
// Descriptor of AOT Code Cache's entry
class AOTCodeEntry {
@ -115,48 +118,57 @@ public:
address dumptime_content_start_addr() const { return _dumptime_content_start_addr; }
static bool is_valid_entry_kind(Kind kind) { return kind > None && kind < Kind_count; }
static bool is_blob(Kind kind) { return kind == SharedBlob || kind == C1Blob || kind == C2Blob; }
static bool is_blob(Kind kind) { return kind == SharedBlob || kind == C1Blob || kind == C2Blob || kind == StubGenBlob; }
static bool is_single_stub_blob(Kind kind) { return kind == SharedBlob || kind == C1Blob || kind == C2Blob; }
static bool is_multi_stub_blob(Kind kind) { return kind == StubGenBlob; }
static bool is_adapter(Kind kind) { return kind == Adapter; }
};
// we use a hash table to speed up translation of external addresses
// or stub addresses to their corresponding indexes when dumping stubs
// or nmethods to the AOT code cache.
class AOTCodeAddressHashTable : public HashTable<
address,
int,
36137, // prime number
AnyObj::C_HEAP,
mtCode> {};
// Addresses of stubs, blobs and runtime finctions called from compiled code.
class AOTCodeAddressTable : public CHeapObj<mtCode> {
private:
address* _extrs_addr;
address* _stubs_addr;
address* _shared_blobs_addr;
address* _C1_blobs_addr;
uint _extrs_length;
uint _stubs_length;
uint _shared_blobs_length;
uint _C1_blobs_length;
bool _extrs_complete;
bool _early_stubs_complete;
bool _shared_blobs_complete;
bool _early_c1_complete;
bool _complete;
bool _shared_stubs_complete;
bool _c1_stubs_complete;
bool _c2_stubs_complete;
bool _stubgen_stubs_complete;
AOTCodeAddressHashTable* _hash_table;
void hash_address(address addr, int idx);
public:
AOTCodeAddressTable() :
_extrs_addr(nullptr),
_stubs_addr(nullptr),
_shared_blobs_addr(nullptr),
_C1_blobs_addr(nullptr),
_extrs_length(0),
_stubs_length(0),
_shared_blobs_length(0),
_C1_blobs_length(0),
_extrs_complete(false),
_early_stubs_complete(false),
_shared_blobs_complete(false),
_early_c1_complete(false),
_complete(false)
_shared_stubs_complete(false),
_c1_stubs_complete(false),
_c2_stubs_complete(false),
_stubgen_stubs_complete(false),
_hash_table(nullptr)
{ }
void init_extrs();
void init_early_stubs();
void init_shared_blobs();
void init_early_c1();
void init_extrs2();
void add_stub_entry(EntryId entry_id, address entry);
void add_external_addresses(GrowableArray<address>& addresses) NOT_CDS_RETURN;
void set_shared_stubs_complete();
void set_c1_stubs_complete();
void set_c2_stubs_complete();
void set_stubgen_stubs_complete();
const char* add_C_string(const char* str);
int id_for_C_string(address str);
address address_for_C_string(int idx);
@ -164,7 +176,98 @@ public:
address address_for_id(int id);
};
#define AOTCODECACHE_CONFIGS_GENERIC_DO(do_var, do_fun) \
// Auxiliary class used by AOTStubData to locate addresses owned by a
// stub in the _address_array.
class StubAddrRange {
private:
// Index of the first address owned by a stub or -1 if none present
int _start_index;
// Total number of addresses owned by a stub, including in order:
// start address for stub code and first entry, (exclusive) end
// address for stub code, all secondary entry addresses, any
// auxiliary addresses
uint _naddr;
public:
StubAddrRange() : _start_index(-1), _naddr(0) {}
int start_index() { return _start_index; }
int count() { return _naddr; }
void default_init() {
_start_index = -1;
_naddr = 0;
}
void init_entry(int start_index, int naddr) {
_start_index = start_index;
_naddr = naddr;
}
};
// class used to save and restore details of stubs embedded in a
// multi-stub (StubGen) blob
class AOTStubData : public StackObj {
friend class AOTCodeCache;
friend class AOTCodeReader;
private:
BlobId _blob_id; // must be a stubgen blob id
// whatever buffer blob was successfully loaded from the AOT cache
// following a call to load_code_blob or nullptr
CodeBlob *_cached_blob;
// Array of addresses owned by stubs. Each stub appends addresses to
// this array as a block, whether at the end of generation or at the
// end of restoration from the cache. The first two addresses in
// each block are the "start" and "end2 address of the stub. Any
// other visible addresses located within the range [start,end)
// follow, either extra entries, data addresses or SEGV-protected
// subrange start, end and handler addresses. In the special case
// that the SEGV handler address is the (external) common address
// handler the array will hold value nullptr.
GrowableArray<address> _address_array;
// count of how many stubs exist in the current blob (not all of
// which may actually be generated)
int _stub_cnt;
// array identifying range of entries in _address_array for each stub
// indexed by offset of stub in blob
StubAddrRange* _ranges;
// flags indicating whether the AOT code cache is open and, if so,
// whether we are loading or storing stubs or have encountered any
// invalid stubs.
enum Flags {
USING = 1 << 0, // open and loading stubs
DUMPING = 1 << 1, // open and storing stubs
INVALID = 1 << 2, // found invalid stub when loading
};
uint32_t _flags;
void set_invalid() { _flags |= INVALID; }
StubAddrRange& get_range(int idx) const { return _ranges[idx]; }
GrowableArray<address>& address_array() { return _address_array; }
// accessor for entry/auxiliary addresses defaults to start entry
public:
AOTStubData(BlobId blob_id) NOT_CDS({});
~AOTStubData() CDS_ONLY({FREE_C_HEAP_ARRAY(StubAddrRange, _ranges);}) NOT_CDS({})
bool is_using() CDS_ONLY({ return (_flags & USING) != 0; }) NOT_CDS_RETURN_(false);
bool is_dumping() CDS_ONLY({ return (_flags & DUMPING) != 0; }) NOT_CDS_RETURN_(false);
bool is_invalid() CDS_ONLY({ return (_flags & INVALID) != 0; }) NOT_CDS_RETURN_(false);
BlobId blob_id() { return _blob_id; }
bool load_code_blob() NOT_CDS_RETURN_(true);
bool store_code_blob(CodeBlob& new_blob, CodeBuffer *code_buffer) NOT_CDS_RETURN_(true);
address load_archive_data(StubId stub_id, address &end, GrowableArray<address>* entries = nullptr, GrowableArray<address>* extras = nullptr) NOT_CDS_RETURN_(nullptr);
void store_archive_data(StubId stub_id, address start, address end, GrowableArray<address>* entries = nullptr, GrowableArray<address>* extras = nullptr) NOT_CDS_RETURN;
const AOTStubData* as_const() { return (const AOTStubData*)this; }
};
#define AOTCODECACHE_CONFIGS_GENERIC_DO(do_var, do_fun) \
do_var(int, AllocateInstancePrefetchLines) /* stubs and nmethods */ \
do_var(int, AllocatePrefetchDistance) /* stubs and nmethods */ \
do_var(int, AllocatePrefetchLines) /* stubs and nmethods */ \
@ -301,17 +404,18 @@ protected:
uint _entries_offset; // offset of AOTCodeEntry array describing entries
uint _adapters_count;
uint _shared_blobs_count;
uint _stubgen_blobs_count;
uint _C1_blobs_count;
uint _C2_blobs_count;
Config _config; // must be the last element as there is trailing data stored immediately after Config
public:
void init(uint cache_size,
uint strings_count, uint strings_offset,
uint entries_count, uint entries_offset,
uint adapters_count, uint shared_blobs_count,
uint C1_blobs_count, uint C2_blobs_count,
uint cpu_features_offset) {
uint strings_count, uint strings_offset,
uint entries_count, uint entries_offset,
uint adapters_count, uint shared_blobs_count,
uint stubgen_blobs_count, uint C1_blobs_count,
uint C2_blobs_count, uint cpu_features_offset) {
_version = AOT_CODE_VERSION;
_cache_size = cache_size;
_strings_count = strings_count;
@ -320,6 +424,7 @@ protected:
_entries_offset = entries_offset;
_adapters_count = adapters_count;
_shared_blobs_count = shared_blobs_count;
_stubgen_blobs_count = stubgen_blobs_count;
_C1_blobs_count = C1_blobs_count;
_C2_blobs_count = C2_blobs_count;
_config.record(cpu_features_offset);
@ -332,6 +437,7 @@ protected:
uint entries_count() const { return _entries_count; }
uint entries_offset() const { return _entries_offset; }
uint adapters_count() const { return _adapters_count; }
uint stubgen_blobs_count() const { return _stubgen_blobs_count; }
uint shared_blobs_count() const { return _shared_blobs_count; }
uint C1_blobs_count() const { return _C1_blobs_count; }
uint C2_blobs_count() const { return _C2_blobs_count; }
@ -381,6 +487,7 @@ private:
void clear_lookup_failed() { _lookup_failed = false; }
bool lookup_failed() const { return _lookup_failed; }
void add_stub_entry(EntryId entry_id, address entry) NOT_CDS_RETURN;
public:
AOTCodeCache(bool is_dumping, bool is_using);
@ -396,9 +503,12 @@ public:
void load_strings();
int store_strings();
static void init_early_stubs_table() NOT_CDS_RETURN;
static void init_shared_blobs_table() NOT_CDS_RETURN;
static void init_early_c1_table() NOT_CDS_RETURN;
static void set_shared_stubs_complete() NOT_CDS_RETURN;
static void set_c1_stubs_complete() NOT_CDS_RETURN ;
static void set_c2_stubs_complete() NOT_CDS_RETURN;
static void set_stubgen_stubs_complete() NOT_CDS_RETURN;
void add_stub_entries(StubId stub_id, address start, GrowableArray<address> *entries = nullptr, int offset = -1) NOT_CDS_RETURN;
address address_for_C_string(int idx) const { return _table->address_for_C_string(idx); }
address address_for_id(int id) const { return _table->address_for_id(id); }
@ -418,22 +528,41 @@ public:
bool finish_write();
bool write_relocations(CodeBlob& code_blob);
bool write_relocations(CodeBlob& code_blob, RelocIterator& iter);
bool write_oop_map_set(CodeBlob& cb);
bool write_stub_data(CodeBlob& blob, AOTStubData *stub_data);
#ifndef PRODUCT
bool write_asm_remarks(CodeBlob& cb);
bool write_dbg_strings(CodeBlob& cb);
#endif // PRODUCT
private:
// internal private API to save and restore blobs
static bool store_code_blob(CodeBlob& blob,
AOTCodeEntry::Kind entry_kind,
uint id,
const char* name,
AOTStubData* stub_data,
CodeBuffer* code_buffer) NOT_CDS_RETURN_(false);
static CodeBlob* load_code_blob(AOTCodeEntry::Kind kind,
uint id,
const char* name,
AOTStubData* stub_data) NOT_CDS_RETURN_(nullptr);
public:
// save and restore API for non-enumerable code blobs
static bool store_code_blob(CodeBlob& blob,
AOTCodeEntry::Kind entry_kind,
uint id, const char* name) NOT_CDS_RETURN_(false);
uint id,
const char* name) NOT_CDS_RETURN_(false);
static CodeBlob* load_code_blob(AOTCodeEntry::Kind kind,
uint id, const char* name) NOT_CDS_RETURN_(nullptr);
// save and restore API for enumerable code blobs
// API for single-stub blobs
static bool store_code_blob(CodeBlob& blob,
AOTCodeEntry::Kind entry_kind,
BlobId id) NOT_CDS_RETURN_(false);
@ -441,6 +570,22 @@ public:
static CodeBlob* load_code_blob(AOTCodeEntry::Kind kind,
BlobId id) NOT_CDS_RETURN_(nullptr);
// API for multi-stub blobs -- for use by class StubGenerator.
static bool store_code_blob(CodeBlob& blob,
AOTCodeEntry::Kind kind,
BlobId id,
AOTStubData* stub_data,
CodeBuffer *code_buffer) NOT_CDS_RETURN_(false);
static CodeBlob* load_code_blob(AOTCodeEntry::Kind kind,
BlobId id,
AOTStubData* stub_data) NOT_CDS_RETURN_(nullptr);
static void publish_external_addresses(GrowableArray<address>& addresses) NOT_CDS_RETURN;
// publish all entries for a code blob in code cache address table
static void publish_stub_addresses(CodeBlob &code_blob, BlobId id, AOTStubData *stub_data) NOT_CDS_RETURN;
static uint store_entries_cnt() {
if (is_on_for_dump()) {
return cache()->_store_entries_cnt;
@ -462,9 +607,14 @@ private:
return true;
}
public:
// marker used where an address offset needs to be stored for later
// retrieval and the address turns out to be null
static const uint NULL_ADDRESS_MARKER = UINT_MAX;
static AOTCodeCache* cache() { assert(_passed_init2, "Too early to ask"); return _cache; }
static void initialize() NOT_CDS_RETURN;
static void init2() NOT_CDS_RETURN;
static void init3() NOT_CDS_RETURN;
static void dump() NOT_CDS_RETURN;
static bool is_on() CDS_ONLY({ return cache() != nullptr; }) NOT_CDS_RETURN_(false);
static bool is_on_for_use() CDS_ONLY({ return is_on() && _cache->for_use(); }) NOT_CDS_RETURN_(false);
@ -485,7 +635,7 @@ public:
// Concurent AOT code reader
class AOTCodeReader {
private:
const AOTCodeCache* _cache;
AOTCodeCache* _cache;
const AOTCodeEntry* _entry;
const char* _load_buffer; // Loaded cached code buffer
uint _read_position; // Position in _load_buffer
@ -502,13 +652,18 @@ private:
// They should be set before calling it.
const char* _name;
address _reloc_data;
int _reloc_count;
ImmutableOopMapSet* _oop_maps;
AOTCodeEntry::Kind _entry_kind;
int _id;
AOTStubData* _stub_data;
AOTCodeEntry* aot_code_entry() { return (AOTCodeEntry*)_entry; }
ImmutableOopMapSet* read_oop_map_set();
void read_stub_data(CodeBlob* code_blob, AOTStubData *stub_data);
void fix_relocations(CodeBlob* code_blob);
void fix_relocations(CodeBlob* code_blob, RelocIterator& iter);
#ifndef PRODUCT
void read_asm_remarks(AsmRemarks& asm_remarks);
void read_dbg_strings(DbgStrings& dbg_strings);
@ -517,7 +672,7 @@ private:
public:
AOTCodeReader(AOTCodeCache* cache, AOTCodeEntry* entry);
CodeBlob* compile_code_blob(const char* name);
CodeBlob* compile_code_blob(const char* name, AOTCodeEntry::Kind entry_kind, int id, AOTStubData* stub_data = nullptr);
void restore(CodeBlob* code_blob);
};

View File

@ -1332,7 +1332,6 @@ nmethod::nmethod(
code_buffer->copy_values_to(this);
post_init();
ICache::invalidate_range(code_begin(), code_size());
}
if (PrintNativeNMethods || PrintDebugInfo || PrintRelocations || PrintDependencies) {
@ -1812,7 +1811,6 @@ nmethod::nmethod(
init_immutable_data_ref_count();
post_init();
ICache::invalidate_range(code_begin(), code_size());
// we use the information of entry points to find out if a method is
// static or non static

View File

@ -590,6 +590,15 @@ oop oop_Relocation::oop_value() {
return *oop_addr();
}
void oop_Relocation::fix_oop_relocation() {
// TODO: we need to add some assert here that ICache::invalidate_range is called in the code
// which uses this function.
if (!oop_is_immediate()) {
// get the oop from the pool, and re-insert it into the instruction:
set_value(value());
}
}
void oop_Relocation::verify_oop_relocation() {
if (!oop_is_immediate()) {
// get the oop from the pool, and re-insert it into the instruction:

View File

@ -988,6 +988,8 @@ class oop_Relocation : public DataRelocation {
void pack_data_to(CodeSection* dest) override;
void unpack_data() override;
void fix_oop_relocation(); // reasserts oop value
void verify_oop_relocation();
address value() override { return *reinterpret_cast<address*>(oop_addr()); }

View File

@ -1131,23 +1131,34 @@ bool G1ConcurrentMark::scan_root_regions(WorkerThreads* workers, bool concurrent
//
// Concurrent gc threads enter an STS when starting the task, so they stop, then
// continue after that safepoint.
bool do_scan = !root_regions()->work_completed() && !has_root_region_scan_aborted();
//
// Must not use G1CMRootMemRegions::work_completed() here because we need to get a
// consistent view of the value containing the number of remaining regions across the
// usages below. The safepoint/gc may already be running and modifying it
// while this code is still executing.
uint num_remaining = root_regions()->num_remaining_regions();
bool do_scan = num_remaining > 0 && !has_root_region_scan_aborted();
if (do_scan) {
// Assign one worker to each root-region but subject to the max constraint.
// The constraint is also important to avoid accesses beyond the allocated per-worker
// marking helper data structures. We might get passed different WorkerThreads with
// different number of threads (potential worker ids) than helper data structures when
// completing this work during GC.
const uint num_workers = MIN2(root_regions()->num_remaining_regions(),
const uint num_workers = MIN2(num_remaining,
_max_concurrent_workers);
assert(num_workers > 0, "no more remaining root regions to process");
G1CMRootRegionScanTask task(this, concurrent);
log_debug(gc, ergo)("Running %s using %u workers for %u work units.",
task.name(), num_workers, root_regions()->num_remaining_regions());
task.name(), num_workers, num_remaining);
workers->run_task(&task, num_workers);
}
// At the end of this method, we can re-read num_remaining() in the assert: either
// we got non-zero above and we processed all root regions (and it must be zero
// after the worker task synchronization) or it had already been zero. We also
// can't have started another concurrent cycle that could have set it to something else
// while still in the concurrent cycle (if called concurrently).
assert_root_region_scan_completed_or_aborted();
return do_scan;

View File

@ -34,6 +34,9 @@ public:
static Address load_bad_mask_from_jni_env(Register env);
static Address mark_bad_mask_from_jni_env(Register env);
virtual void register_reloc_addresses(GrowableArray<address> &entries, int begin, int count) { }
virtual void retrieve_reloc_addresses(address start, address end, GrowableArray<address> &entries) { }
};
// Needs to be included after definition of ZBarrierSetAssemblerBase

View File

@ -24,6 +24,7 @@
#include "classfile/vmClasses.hpp"
#include "classfile/vmSymbols.hpp"
#include "code/aotCodeCache.hpp"
#include "code/codeCache.hpp"
#include "code/compiledIC.hpp"
#include "code/nmethod.hpp"
@ -154,7 +155,8 @@ static bool check_compiled_frame(JavaThread* thread) {
bool OptoRuntime::generate(ciEnv* env) {
C2_STUBS_DO(GEN_C2_BLOB, GEN_C2_STUB)
// disallow any further c2 stub generation
AOTCodeCache::set_c2_stubs_complete();
return true;
}

View File

@ -72,7 +72,7 @@ public:
bool needs_return_buffer,
int captured_state_mask,
bool needs_transition)
: StubCodeGenerator(buffer, PrintMethodHandleStubs),
: StubCodeGenerator(buffer, PrintMethodHandleStubs),
_signature(signature),
_num_args(num_args),
_ret_bt(ret_bt),

View File

@ -70,6 +70,10 @@ void VM_Version_init();
void icache_init2();
void initialize_stub_info(); // must precede all blob/stub generation
void preuniverse_stubs_init();
#if INCLUDE_CDS
void stubs_AOTAddressTable_init();
#endif // INCLUDE_CDS
void initial_stubs_init();
jint universe_init(); // depends on codeCache_init and preuniverse_stubs_init
@ -149,13 +153,19 @@ jint init_globals() {
AOTCodeCache::init2(); // depends on universe_init, must be before initial_stubs_init
AsyncLogWriter::initialize();
#if INCLUDE_CDS
stubs_AOTAddressTable_init(); // publish external addresses used by stubs
// depends on AOTCodeCache::init2
#endif // INCLUDE_CDS
initial_stubs_init(); // stubgen initial stub routines
// stack overflow exception blob is referenced by the interpreter
AOTCodeCache::init_early_stubs_table(); // need this after stubgen initial stubs and before shared runtime initial stubs
SharedRuntime::generate_initial_stubs();
gc_barrier_stubs_init(); // depends on universe_init, must be before interpreter_init
continuations_init(); // must precede continuation stub generation
continuation_stubs_init(); // depends on continuations_init
AOTCodeCache::init3(); // depends on stubs_AOTAddressTable_init
// and continuations_init and must
// precede continuation stub generation
continuation_stubs_init(); // depends on continuations_init and AOTCodeCache::init3
#if INCLUDE_JFR
SharedRuntime::generate_jfr_stubs();
#endif
@ -164,7 +174,6 @@ jint init_globals() {
InterfaceSupport_init();
VMRegImpl::set_regName(); // need this before generate_stubs (for printing oop maps).
SharedRuntime::generate_stubs();
AOTCodeCache::init_shared_blobs_table(); // need this after generate_stubs
SharedRuntime::init_adapter_library(); // do this after AOTCodeCache::init_shared_blobs_table
return JNI_OK;
}

View File

@ -177,6 +177,11 @@ void SharedRuntime::generate_stubs() {
CAST_FROM_FN_PTR(address, SafepointSynchronize::handle_polling_page_exception));
generate_deopt_blob();
#if INCLUDE_CDS
// disallow any further generation of runtime stubs
AOTCodeCache::set_shared_stubs_complete();
#endif // INCLUDE_CDS
}
void SharedRuntime::init_adapter_library() {

View File

@ -23,6 +23,7 @@
*/
#include "asm/macroAssembler.inline.hpp"
#include "code/aotCodeCache.hpp"
#include "code/codeCache.hpp"
#include "compiler/disassembler.hpp"
#include "oops/oop.inline.hpp"
@ -30,7 +31,9 @@
#include "prims/jvmtiExport.hpp"
#include "runtime/stubCodeGenerator.hpp"
#include "runtime/stubRoutines.hpp"
#if INCLUDE_ZGC
#include "gc/z/zBarrierSetAssembler.hpp"
#endif // INCLUDE_ZGC
// Implementation of StubCodeDesc
@ -69,14 +72,16 @@ void StubCodeDesc::print() const { print_on(tty); }
StubCodeGenerator::StubCodeGenerator(CodeBuffer* code, bool print_code) {
_masm = new MacroAssembler(code);
_blob_id = BlobId::NO_BLOBID;
_stub_data = nullptr;
_print_code = PrintStubCode || print_code;
}
StubCodeGenerator::StubCodeGenerator(CodeBuffer* code, BlobId blob_id, bool print_code) {
StubCodeGenerator::StubCodeGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data, bool print_code) {
assert(StubInfo::is_stubgen(blob_id),
"not a stubgen blob %s", StubInfo::name(blob_id));
_masm = new MacroAssembler(code);
_blob_id = blob_id;
_stub_data = stub_data;
_print_code = PrintStubCode || print_code;
}
@ -91,11 +96,92 @@ StubCodeGenerator::~StubCodeGenerator() {
#endif
}
void StubCodeGenerator::setup_code_desc(const char* name, address start, address end, bool loaded_from_cache) {
StubCodeDesc* cdesc = new StubCodeDesc("StubRoutines", name, start, end);
cdesc->set_disp(uint(start - _masm->code_section()->outer()->insts_begin()));
if (loaded_from_cache) {
cdesc->set_loaded_from_cache();
}
print_stub_code_desc(cdesc);
// copied from ~StubCodeMark()
Forte::register_stub(cdesc->name(), cdesc->begin(), cdesc->end());
if (JvmtiExport::should_post_dynamic_code_generated()) {
JvmtiExport::post_dynamic_code_generated(cdesc->name(), cdesc->begin(), cdesc->end());
}
}
// Helper used to restore ranges and handler addresses restored from
// AOT cache. Expects entries to contain 3 * count addresses beginning
// at offset begin which identify start of range, end of range and
// address of handler pc. start and end of range may not be null.
// handler pc may be null in which case it defaults to the
// default_handler.
void StubCodeGenerator::register_unsafe_access_handlers(GrowableArray<address> &entries, int begin, int count) {
for (int i = 0; i < count; i++) {
int offset = begin + 3 * i;
address start = entries.at(offset);
address end = entries.at(offset + 1);
address handler = entries.at(offset + 2);
assert(start != nullptr, "sanity");
assert(end != nullptr, "sanity");
if (handler == nullptr) {
assert(UnsafeMemoryAccess::common_exit_stub_pc() != nullptr,
"default unsafe handler must be set before registering unsafe rgeionwiht no handler!");
handler = UnsafeMemoryAccess::common_exit_stub_pc();
}
UnsafeMemoryAccess::add_to_table(start, end, handler);
}
}
// Helper used to retrieve ranges and handler addresses registered
// during generation of the stub which spans [start, end) in order to
// allow them to be saved to an AOT cache.
void StubCodeGenerator::retrieve_unsafe_access_handlers(address start, address end, GrowableArray<address> &entries) {
UnsafeMemoryAccess::collect_entries(start, end, entries);
}
#if INCLUDE_ZGC
// Helper used to restore ZGC pointer colouring relocation addresses
// retrieved from the AOT cache.
void StubCodeGenerator::register_reloc_addresses(GrowableArray<address> &entries, int begin, int count) {
LogTarget(Trace, aot, codecache, stubs) lt;
if (lt.is_enabled()) {
LogStream ls(lt);
for (int i = begin; i < count; i++) {
ls.print_cr("Registered reloc address " INTPTR_FORMAT, p2i(entries.at(i)));
}
}
ZBarrierSetAssembler *zbs = (ZBarrierSetAssembler*)BarrierSet::barrier_set()->barrier_set_assembler();
zbs->register_reloc_addresses(entries, begin, count);
}
// Helper used to retrieve ranges and handler addresses registered
// during generation of the stub which spans [start, end) in order to
// allow them to be saved to an AOT cache.
void StubCodeGenerator::retrieve_reloc_addresses(address start, address end, GrowableArray<address> &entries) {
int l = entries.length();
ZBarrierSetAssembler *zbs = (ZBarrierSetAssembler*)BarrierSet::barrier_set()->barrier_set_assembler();
zbs->retrieve_reloc_addresses(start, end, entries);
LogTarget(Trace, aot, codecache, stubs) lt;
if (lt.is_enabled()) {
LogStream ls(lt);
for (int i = l; i < entries.length(); i++) {
ls.print_cr("retrieved reloc address " INTPTR_FORMAT, p2i(entries.at(i)));
}
}
}
#endif // INCLUDE_ZGC
void StubCodeGenerator::stub_prolog(StubCodeDesc* cdesc) {
// default implementation - do nothing
}
void StubCodeGenerator::stub_epilog(StubCodeDesc* cdesc) {
print_stub_code_desc(cdesc);
}
void StubCodeGenerator::print_stub_code_desc(StubCodeDesc* cdesc) {
LogTarget(Debug, stubs) lt;
if (lt.is_enabled()) {
LogStream ls(lt);
@ -119,6 +205,52 @@ void StubCodeGenerator::stub_epilog(StubCodeDesc* cdesc) {
}
}
address StubCodeGenerator::load_archive_data(StubId stub_id, GrowableArray<address> *entries, GrowableArray<address>* extras) {
// punt to stub data if it exists and is not for dumping
if (_stub_data == nullptr || _stub_data->is_dumping()) {
return nullptr;
}
// punt to stub data
address start, end;
start = _stub_data->load_archive_data(stub_id, end, entries, extras);
if (start != nullptr) {
setup_code_desc(StubInfo::name(stub_id), start, end, true);
}
return start;
}
void StubCodeGenerator::store_archive_data(StubId stub_id, address start, address end, GrowableArray<address>* entries, GrowableArray<address>* extras) {
// punt to stub data if we have any
if (_stub_data != nullptr) {
_stub_data->store_archive_data(stub_id, start, end, entries, extras);
}
}
void StubCodeGenerator::print_statistics_on(outputStream* st) {
st->print_cr("StubRoutines Stubs:");
st->print_cr(" Initial stubs: %d", StubInfo::stub_count(BlobId::stubgen_initial_id));
st->print_cr(" Continuation stubs: %d", StubInfo::stub_count(BlobId::stubgen_continuation_id));
st->print_cr(" Compiler stubs: %d", StubInfo::stub_count(BlobId::stubgen_compiler_id));
st->print_cr(" Final stubs: %d", StubInfo::stub_count(BlobId::stubgen_final_id));
int emitted = 0;
int loaded_from_cache = 0;
StubCodeDesc* scd = StubCodeDesc::first();
while (scd != nullptr) {
if (!strcmp(scd->group(), "StubRoutines")) {
emitted += 1;
if (scd->loaded_from_cache()) {
loaded_from_cache += 1;
}
}
scd = StubCodeDesc::next(scd);
}
st->print_cr("Total stubroutines stubs emitted: %d (generated=%d, loaded from cache=%d)", emitted, emitted - loaded_from_cache, loaded_from_cache);
}
#ifdef ASSERT
void StubCodeGenerator::verify_stub(StubId stub_id) {
assert(StubRoutines::stub_to_blob(stub_id) == blob_id(), "wrong blob %s for generation of stub %s", StubRoutines::get_blob_name(blob_id()), StubRoutines::get_stub_name(stub_id));

View File

@ -26,6 +26,7 @@
#define SHARE_RUNTIME_STUBCODEGENERATOR_HPP
#include "asm/assembler.hpp"
#include "code/aotCodeCache.hpp"
#include "memory/allocation.hpp"
#include "runtime/stubInfo.hpp"
@ -48,6 +49,7 @@ class StubCodeDesc: public CHeapObj<mtCode> {
address _begin; // points to the first byte of the stub code (included)
address _end; // points to the first byte after the stub code (excluded)
uint _disp; // Displacement relative base address in buffer.
bool _loaded_from_cache;
friend class StubCodeMark;
friend class StubCodeGenerator;
@ -65,6 +67,8 @@ class StubCodeDesc: public CHeapObj<mtCode> {
void set_disp(uint disp) { _disp = disp; }
void set_loaded_from_cache() { _loaded_from_cache = true; }
public:
static StubCodeDesc* first() { return _list; }
static StubCodeDesc* next(StubCodeDesc* desc) { return desc->_next; }
@ -81,6 +85,7 @@ class StubCodeDesc: public CHeapObj<mtCode> {
_end = end;
_disp = 0;
_list = this;
_loaded_from_cache = false;
};
static void freeze();
@ -93,12 +98,11 @@ class StubCodeDesc: public CHeapObj<mtCode> {
uint disp() const { return _disp; }
int size_in_bytes() const { return pointer_delta_as_int(_end, _begin); }
bool contains(address pc) const { return _begin <= pc && pc < _end; }
bool loaded_from_cache() const { return _loaded_from_cache; }
void print_on(outputStream* st) const;
void print() const;
};
// forward declare blob and stub id enums
// The base class for all stub-generating code generators.
// Provides utility functions.
@ -108,10 +112,20 @@ class StubCodeGenerator: public StackObj {
BlobId _blob_id;
protected:
MacroAssembler* _masm;
AOTStubData* _stub_data;
public:
void setup_code_desc(const char* name, address start, address end, bool loaded_from_cache);
// unsafe handler management
void register_unsafe_access_handlers(GrowableArray<address> &entries, int begin, int count);
void retrieve_unsafe_access_handlers(address start, address end, GrowableArray<address> &entries);
#if INCLUDE_ZGC
void register_reloc_addresses(GrowableArray<address> &entries, int begin, int count);
void retrieve_reloc_addresses(address start, address end, GrowableArray<address> &entries);
#endif // INCLUDE_ZGC
public:
StubCodeGenerator(CodeBuffer* code, bool print_code = false);
StubCodeGenerator(CodeBuffer* code, BlobId blob_id, bool print_code = false);
StubCodeGenerator(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data = nullptr, bool print_code = false);
~StubCodeGenerator();
MacroAssembler* assembler() const { return _masm; }
@ -120,9 +134,59 @@ class StubCodeGenerator: public StackObj {
virtual void stub_prolog(StubCodeDesc* cdesc); // called by StubCodeMark constructor
virtual void stub_epilog(StubCodeDesc* cdesc); // called by StubCodeMark destructor
void print_stub_code_desc(StubCodeDesc* cdesc);
static void print_statistics_on(outputStream* st);
// load_archive_data should be called before generating the stub
// identified by stub_id. If AOT caching of stubs is enabled and the
// stubis found then the address of the stub's first and, possibly,
// only entry is returned and the caller should use it instead of
// generating thestub. Otherwise a null address is returned and the
// caller should proceed to generate the stub.
//
// store_archive_data should be called when a stub has been
// successfully generated into the current blob irrespctive of
// whether the current JVM is generating or consuming an AOT archive
// (the caller should not check for either case). When generating an
// archive the stub entry and end addresses are recorded for storage
// along with the current blob and also to allow rences to the stub
// from other stubs or from compiled Java methods can be detected
// and marked as requiring relocation. When consuming an archive the
// stub entry address is still inorer to identify it as a relocation
// target. When no archive is in use the call has no side effects.
//
// start and end identify the inclusive start and exclusive end
// address for stub code and must lie in the current blob's code
// range. Stubs presented via this interface must declare at least
// one entry and start is always taken to be the first entry.
//
// Optional arrays entries and extras store other addresses of
// interest all of which must either lie in the interval (start,
// end) or be nullptr (verified by load and store methods).
//
// entries lists secondary entries for the stub each of which must
// match a corresponding entry declaration for the stub (entry count
// verified by load and store methods). Null entry addresses are
// allowed when an architecture does not require a specific entry
// but may not vary from one run to the next. If the cache is in use
// at a store (for loading or saving code) then non-null entry
// addresses are entered into the AOT cache stub address table
// allowing references to them from other stubs or nmethods to be
// relocated.
//
// extras lists other non-entry stub addresses of interest such as
// memory protection ranges and associated handler addresses
// (potentially including a null address). These do do not need to
// be declared as entries and their number and meaning may vary
// according to the architecture.
address load_archive_data(StubId stub_id, GrowableArray<address> *entries = nullptr, GrowableArray<address>* extras = nullptr);
void store_archive_data(StubId stub_id, address start, address end, GrowableArray<address> *entries = nullptr, GrowableArray<address>* extras = nullptr);
#ifdef ASSERT
void verify_stub(StubId stub_id);
#endif
};
// Stack-allocated helper class used to associate a stub code with a name.

View File

@ -952,9 +952,15 @@
do_entry_init(final, arrayof_jlong_arraycopy, \
arrayof_jlong_arraycopy, arrayof_jlong_arraycopy, \
StubRoutines::arrayof_jlong_copy) \
do_entry(final, arrayof_jlong_arraycopy, \
arrayof_jlong_arraycopy_nopush, \
arrayof_jlong_arraycopy_nopush) \
do_stub(final, arrayof_oop_arraycopy) \
do_entry_init(final, arrayof_oop_arraycopy, arrayof_oop_arraycopy, \
arrayof_oop_arraycopy, StubRoutines::arrayof_oop_copy) \
do_entry(final, arrayof_oop_arraycopy, \
arrayof_oop_arraycopy_nopush, \
arrayof_oop_arraycopy_nopush) \
do_stub(final, arrayof_oop_arraycopy_uninit) \
do_entry_init(final, arrayof_oop_arraycopy_uninit, \
arrayof_oop_arraycopy_uninit, \

View File

@ -1087,6 +1087,15 @@ int StubInfo::stubgen_offset(StubId id) {
return local_offset(StubGroup::STUBGEN, id);
}
int StubInfo::stubgen_offset_in_blob(BlobId blob_id, StubId id) {
assert(blob(id) == blob_id, "sanity!");
StubGroup group = StubGroup::STUBGEN;
assert(stubgroup(blob_id) == group, "sanity");
StubId base_id = stub_base(blob_id);
assert(base_id != StubId::NO_STUBID, "sanity");
return local_offset(group, id) - local_offset(group, base_id);
}
// initialization function called to populate blob. stub and entry
// tables. this must be called before any stubs are generated
void initialize_stub_info() {

View File

@ -669,6 +669,11 @@ public:
static int c1_offset(StubId id);
static int c2_offset(StubId id);
static int stubgen_offset(StubId id);
// Convert a stub id to a unique, zero-based offset in the range of
// stub ids for a given blob in the stubgen stub group.
static int stubgen_offset_in_blob(BlobId blob_id, StubId id);
};

View File

@ -102,8 +102,7 @@ BlobId StubRoutines::stub_to_blob(StubId id) {
// Initialization
extern void StubGenerator_generate(CodeBuffer* code, BlobId blob_id); // only interface to generators
extern void StubGenerator_generate(CodeBuffer* code, BlobId blob_id, AOTStubData* stub_data); // only interface to generators
void UnsafeMemoryAccess::create_table(int max_size) {
UnsafeMemoryAccess::_table = new UnsafeMemoryAccess[max_size];
UnsafeMemoryAccess::_table_max_length = max_size;
@ -154,7 +153,8 @@ void UnsafeMemoryAccess::collect_entries(address range_start, address range_end,
if (e._error_exit_pc != _common_exit_stub_pc) {
entries.append(e._error_exit_pc);
} else {
// an address outside the stub must be the common exit stub address
// an address outside the stub must be the common exit stub
// address which is marked with a null address
entries.append(nullptr);
}
}
@ -169,6 +169,33 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
assert(StubInfo::is_stubgen(blob_id), "not a stubgen blob %s", StubInfo::name(blob_id));
ResourceMark rm;
TraceTime timer(timer_msg, TRACETIME_LOG(Info, startuptime));
// If we are loading stubs we need to check if we can retrieve a
// blob and/or an associated archived stub descriptor from the
// AOTCodeCache. If we are storing stubs we need to create a blob
// but we still need a stub data descriptor to fill in during
// generation.
AOTStubData stub_data(blob_id);
AOTStubData* stub_data_p = nullptr;
LogTarget(Info, stubs) lt;
if (code_size > 0 && stub_data.is_using()) {
// AOTCodeEntry tracks and logs status of any cached blob
bool loaded = stub_data.load_code_blob();
if (loaded) {
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("Found blob %s in AOT cache", StubInfo::name(blob_id));
}
stub_data_p = &stub_data;
}
} else if (stub_data.is_dumping()) {
stub_data_p = &stub_data;
}
// Even if we managed to load a blob from the AOT cache we still
// need to allocate a code blob and associated buffer. The AOT blob
// may not include all the stubs we need for this runtime.
// Add extra space for large CodeEntryAlignment
int size = code_size + CodeEntryAlignment * max_aligned_stubs;
BufferBlob* stubs_code = BufferBlob::create(buffer_name, size);
@ -178,6 +205,10 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
// In that case we can tolerate an allocation failure because the
// compiler will have been shut down and we have no need of the
// blob.
// TODO: Ideally we would still like to try to use any AOT cached
// blob here but we don't have a fallback if we find that it is
// missing stubs we need so for now we exit. This should only
// happen in cases where we have a very small code cache.
if (Thread::current()->is_Compiler_thread()) {
assert(blob_id == BlobId::stubgen_compiler_id, "sanity");
assert(DelayCompilerStubsGeneration, "sanity");
@ -187,10 +218,12 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
vm_exit_out_of_memory(code_size, OOM_MALLOC_ERROR, "CodeCache: no room for %s", buffer_name);
}
CodeBuffer buffer(stubs_code);
StubGenerator_generate(&buffer, blob_id);
short buffer_locs[20];
buffer.insts()->initialize_shared_locs((relocInfo*)buffer_locs,
sizeof(buffer_locs)/sizeof(relocInfo));
StubGenerator_generate(&buffer, blob_id, stub_data_p);
if (code_size == 0) {
assert(buffer.insts_size() == 0, "should not write into buffer when bob size declared as 0");
LogTarget(Info, stubs) lt;
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("%s\t not generated", buffer_name);
@ -203,7 +236,35 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
"increase %s, code_size: %d, used: %d, free: %d",
assert_msg, code_size, buffer.total_content_size(), buffer.insts_remaining());
LogTarget(Info, stubs) lt;
if (stub_data.is_using()) {
// we generated some new entries so republish all entries TODO -
// ensure we publish collect and publish the preuniverse stubs but
// don't try to save them
AOTCodeCache::publish_stub_addresses(*stubs_code, blob_id, &stub_data);
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("Republished entries for blob '%s'", buffer_name);
}
} else if (stub_data.is_dumping()) {
// save the blob and publihs the entry addresses
if (stub_data.store_code_blob(*stubs_code, &buffer)) {
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("Stored blob '%s' to Startup Code Cache", buffer_name);
}
} else {
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("Failed to store blob '%s' to Startup Code Cache", buffer_name);
}
}
}
// close off recording of any further stubgen generation
if (blob_id == BlobId::stubgen_final_id) {
AOTCodeCache::set_stubgen_stubs_complete();
}
if (lt.is_enabled()) {
LogStream ls(lt);
ls.print_cr("%s\t [" INTPTR_FORMAT ", " INTPTR_FORMAT "] used: %d, free: %d",
@ -214,6 +275,8 @@ static BufferBlob* initialize_stubs(BlobId blob_id,
return stubs_code;
}
// per blob initializer methods StubRoutines::initialize_xxx_stubs()
#define DEFINE_BLOB_INIT_METHOD(blob_name) \
void StubRoutines::initialize_ ## blob_name ## _stubs() { \
if (STUBGEN_BLOB_FIELD_NAME(blob_name) == nullptr) { \
@ -234,6 +297,7 @@ STUBGEN_BLOBS_DO(DEFINE_BLOB_INIT_METHOD)
#undef DEFINE_BLOB_INIT_METHOD
// external driver API functions for per blob init: xxx_stubs_init()
#define DEFINE_BLOB_INIT_FUNCTION(blob_name) \
void blob_name ## _stubs_init() { \
@ -244,11 +308,18 @@ STUBGEN_BLOBS_DO(DEFINE_BLOB_INIT_FUNCTION)
#undef DEFINE_BLOB_INIT_FUNCTION
#if INCLUDE_CDS
// non-generated external API init driver function
void stubs_AOTAddressTable_init() { StubRoutines::init_AOTAddressTable(); }
#endif // INCLUDE_CDS
/*
* we generate the underlying driver method but this wrapper is needed
* to perform special handling depending on where the compiler init
* gets called from. it ought to be possible to remove this at some
* point and have a determinate ordered init.
* we generate the underlying driver function compiler_stubs_init()
* but this wrapper is needed to perform special handling depending on
* where the compiler init gets called from. it ought to be possible
* to remove this at some point and have a determinate ordered init.
*/
void compiler_stubs_init(bool in_compiler_thread) {

View File

@ -112,6 +112,8 @@ class UnsafeMemoryAccess : public CHeapObj<mtCode> {
address _end_pc;
address _error_exit_pc;
public:
// each table entry requires 3 addresses
static const int COLUMN_COUNT = 3;
static address _common_exit_stub_pc;
static UnsafeMemoryAccess* _table;
static int _table_length;
@ -130,6 +132,7 @@ class UnsafeMemoryAccess : public CHeapObj<mtCode> {
static UnsafeMemoryAccess* add_to_table(address start_pc, address end_pc, address error_exit_pc) {
guarantee(_table_length < _table_max_length, "Incorrect UnsafeMemoryAccess::_table_max_length");
UnsafeMemoryAccess* entry = &_table[_table_length];
assert(start_pc != nullptr, "invalid start address");
entry->set_start_pc(start_pc);
entry->set_end_pc(end_pc);
entry->set_error_exit_pc(error_exit_pc);
@ -283,6 +286,11 @@ public:
static BlobId stub_to_blob(StubId id);
#endif
#if INCLUDE_CDS
// AOT Initalization -- implementation is arch-specific
static void init_AOTAddressTable();
#endif // INCLUDE_CDS
// Debugging
static jint verify_oop_count() { return _verify_oop_count; }
static jint* verify_oop_count_addr() { return &_verify_oop_count; }

View File

@ -154,6 +154,14 @@ public class TypeAnnotations {
} else {
pos.push(env.enclMethod);
}
Env<AttrContext> env1 = env;
while (env1 != null && !env1.tree.hasTag(Tag.CLASSDEF)) {
if (env1.tree instanceof JCLambda l) {
pos.currentLambda = l;
break;
}
env1 = env1.next;
}
pos.scan(tree);
} finally {
log.useSource(oldSource);

View File

@ -23,7 +23,8 @@
*/
/**
* @test
* @test id=default_gc
* @requires vm.gc != "Z"
* @summary Sanity test of combinations of the AOT Code Caching diagnostic flags
* @requires vm.cds.supports.aot.code.caching
* @requires vm.compiler1.enabled & vm.compiler2.enabled
@ -38,7 +39,64 @@
* JavacBenchApp$ClassFile
* JavacBenchApp$FileManager
* JavacBenchApp$SourceFile
* @run driver AOTCodeFlags
* @run driver/timeout=1500 AOTCodeFlags
*/
/**
* @test id=Z
* @requires vm.gc.Z
* @summary Sanity test of combinations of the AOT Code Caching diagnostic flags
* @requires vm.cds.supports.aot.code.caching
* @requires vm.compiler1.enabled & vm.compiler2.enabled
* @comment Both C1 and C2 JIT compilers are required because the test verifies
* compiler's runtime blobs generation.
* @requires vm.opt.VerifyOops == null | vm.opt.VerifyOops == false
* @comment VerifyOops flag switch off AOT code generation. Skip it.
* @library /test/lib /test/setup_aot
* @build AOTCodeFlags JavacBenchApp
* @run driver jdk.test.lib.helpers.ClassFileInstaller -jar app.jar
* JavacBenchApp
* JavacBenchApp$ClassFile
* JavacBenchApp$FileManager
* JavacBenchApp$SourceFile
* @run driver/timeout=1500 AOTCodeFlags Z
*/
/**
* @test id=shenandoah
* @requires vm.gc.Shenandoah
* @summary Sanity test of combinations of the AOT Code Caching diagnostic flags
* @requires vm.cds.supports.aot.code.caching
* @requires vm.compiler1.enabled & vm.compiler2.enabled
* @comment Both C1 and C2 JIT compilers are required because the test verifies
* compiler's runtime blobs generation.
* @requires vm.opt.VerifyOops == null | vm.opt.VerifyOops == false
* @comment VerifyOops flag switch off AOT code generation. Skip it.
* @library /test/lib /test/setup_aot
* @build AOTCodeFlags JavacBenchApp
* @run driver jdk.test.lib.helpers.ClassFileInstaller -jar app.jar
* JavacBenchApp
* JavacBenchApp$ClassFile
* JavacBenchApp$FileManager
* JavacBenchApp$SourceFile
* @run driver/timeout=1500 AOTCodeFlags Shenandoah
*/
/**
* @test id=parallel
* @requires vm.gc.Parallel
* @summary Sanity test of combinations of the AOT Code Caching diagnostic flags
* @requires vm.cds.supports.aot.code.caching
* @requires vm.compiler1.enabled & vm.compiler2.enabled
* @comment Both C1 and C2 JIT compilers are required because the test verifies
* compiler's runtime blobs generation.
* @requires vm.opt.VerifyOops == null | vm.opt.VerifyOops == false
* @comment VerifyOops flag switch off AOT code generation. Skip it.
* @library /test/lib /test/setup_aot
* @build AOTCodeFlags JavacBenchApp
* @run driver jdk.test.lib.helpers.ClassFileInstaller -jar app.jar
* JavacBenchApp
* JavacBenchApp$ClassFile
* JavacBenchApp$FileManager
* JavacBenchApp$SourceFile
* @run driver/timeout=1500 AOTCodeFlags Parallel
*/
import java.util.ArrayList;
@ -48,20 +106,23 @@ import jdk.test.lib.cds.CDSAppTester;
import jdk.test.lib.process.OutputAnalyzer;
public class AOTCodeFlags {
private static String gcName = null;
public static void main(String... args) throws Exception {
Tester t = new Tester();
Tester t = new Tester(args.length == 0 ? null : args[0]);
// Run only 2 modes (0 - no AOT code, 1 - AOT adapters) until JDK-8357398 is fixed
for (int mode = 0; mode < 2; mode++) {
for (int mode = 0; mode < 4; mode++) {
t.setTestMode(mode);
t.run(new String[] {"AOT", "--two-step-training"});
}
}
static class Tester extends CDSAppTester {
private int testMode;
private String gcName;
public Tester() {
public Tester(String name) {
super("AOTCodeFlags");
testMode = 0;
gcName = name;
}
boolean isAdapterCachingOn() {
@ -84,6 +145,24 @@ public class AOTCodeFlags {
return list;
}
public List<String> getGCArgs() {
List<String> args = new ArrayList<String>();
args.add("-Xmx100M");
if (gcName == null) {
return args;
}
switch (gcName) {
case "G1":
case "Z":
case "Shenandoah":
case "Parallel":
args.add("-XX:+Use" + gcName + "GC");
return args;
default:
throw new RuntimeException("Unexpected GC name " + gcName);
}
}
@Override
public String classpath(RunMode runMode) {
return "app.jar";
@ -97,10 +176,12 @@ public class AOTCodeFlags {
List<String> args = getVMArgsForTestMode();
args.addAll(List.of("-Xlog:aot+codecache+init=debug",
"-Xlog:aot+codecache+exit=debug"));
args.addAll(getGCArgs());
return args.toArray(new String[0]);
}
}
return new String[] {};
List<String> args = getGCArgs();
return args.toArray(new String[args.size()]);
}
@Override
@ -147,7 +228,10 @@ public class AOTCodeFlags {
// AOTStubCaching is on, non-zero stubs should be stored/loaded
out.shouldMatch("Shared Blobs:\\s+total=[1-9][0-9]+");
out.shouldMatch("C1 Blobs:\\s+total=[1-9][0-9]+");
out.shouldMatch("C2 Blobs:\\s+total=[1-9][0-9]+");
// we do not currently load or store C2 stubs
// because we are seeing weird memory errors
// when loading them -- see JDK-8357593
out.shouldMatch("C2 Blobs:\\s+total=0");
break;
}
} else {

View File

@ -1,41 +0,0 @@
#
# Copyright (c) 2026, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
#############################################################################
#
# List of quarantined tests for testing with -XX:+UseCompactObjectHeaders
#
#############################################################################
#############################################################################
# Preview project specific failures go here at the end of the file.
#
# These are NOT failures that occur with the '--enable-preview' option
# specified; those go in the appropriate ProblemList-enable-preview.txt file.
# These are failures that occur WITHOUT the '--enable-preview' option
# specified AND occur because of some issue with preview project code,
# in either implementation or test code.
#############################################################################

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -22,7 +22,11 @@
*/
import java.awt.*;
import java.awt.Dialog;
import java.awt.EventQueue;
import java.awt.FileDialog;
import java.awt.Frame;
import java.awt.Toolkit;
// DWD: Dialog, Window, Dialog

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -35,6 +35,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -32,6 +32,7 @@ import java.awt.Dialog;
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -21,8 +21,11 @@
* questions.
*/
import java.awt.*;
import java.awt.Dialog;
import java.awt.EventQueue;
import java.awt.FileDialog;
import java.awt.Frame;
import java.awt.Toolkit;
// FWD: Frame, Window, Dialog

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,6 +34,7 @@
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,6 +34,7 @@
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,6 +34,7 @@
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,6 +34,7 @@
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,6 +34,7 @@
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,6 +34,7 @@
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -21,7 +21,11 @@
* questions.
*/
import java.awt.*;
import java.awt.Dialog;
import java.awt.EventQueue;
import java.awt.FileDialog;
import java.awt.Frame;
import java.awt.Toolkit;
public class FileDialogModalityTest {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,6 +34,7 @@
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2007, 2026, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,6 +34,7 @@
* @library ../helpers /lib/client/
* @library /test/lib
* @build ExtendedRobot
* @build jdk.test.lib.Asserts
* @build Flag
* @build TestDialog
* @build TestFrame

Some files were not shown because too many files have changed in this diff Show More