diff --git a/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp b/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp index e0d9d6c0fb2..6ffb9aa7243 100644 --- a/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/jvmciCodeInstaller_aarch64.cpp @@ -130,21 +130,21 @@ void CodeInstaller::pd_relocate_JavaMethod(CodeBuffer &cbuf, methodHandle& metho assert(!method->is_static(), "cannot call static method with invokeinterface"); NativeCall* call = nativeCall_at(_instructions->start() + pc_offset); _instructions->relocate(call->instruction_address(), virtual_call_Relocation::spec(_invoke_mark_pc)); - call->trampoline_jump(cbuf, SharedRuntime::get_resolve_virtual_call_stub()); + call->trampoline_jump(cbuf, SharedRuntime::get_resolve_virtual_call_stub(), JVMCI_CHECK); break; } case INVOKESTATIC: { assert(method->is_static(), "cannot call non-static method with invokestatic"); NativeCall* call = nativeCall_at(_instructions->start() + pc_offset); _instructions->relocate(call->instruction_address(), relocInfo::static_call_type); - call->trampoline_jump(cbuf, SharedRuntime::get_resolve_static_call_stub()); + call->trampoline_jump(cbuf, SharedRuntime::get_resolve_static_call_stub(), JVMCI_CHECK); break; } case INVOKESPECIAL: { assert(!method->is_static(), "cannot call static method with invokespecial"); NativeCall* call = nativeCall_at(_instructions->start() + pc_offset); _instructions->relocate(call->instruction_address(), relocInfo::opt_virtual_call_type); - call->trampoline_jump(cbuf, SharedRuntime::get_resolve_opt_virtual_call_stub()); + call->trampoline_jump(cbuf, SharedRuntime::get_resolve_opt_virtual_call_stub(), JVMCI_CHECK); break; } default: diff --git a/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp b/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp index b154c434069..0844fdf6021 100644 --- a/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp +++ b/src/hotspot/cpu/aarch64/nativeInst_aarch64.cpp @@ -39,6 +39,9 @@ #ifdef COMPILER1 #include "c1/c1_Runtime1.hpp" #endif +#if INCLUDE_JVMCI +#include "jvmci/jvmciEnv.hpp" +#endif void NativeCall::verify() { assert(NativeCall::is_call_at((address)this), "unexpected code at call site"); @@ -523,26 +526,29 @@ void NativeCallTrampolineStub::set_destination(address new_destination) { OrderAccess::release(); } +#if INCLUDE_JVMCI // Generate a trampoline for a branch to dest. If there's no need for a // trampoline, simply patch the call directly to dest. -address NativeCall::trampoline_jump(CodeBuffer &cbuf, address dest) { +void NativeCall::trampoline_jump(CodeBuffer &cbuf, address dest, JVMCI_TRAPS) { MacroAssembler a(&cbuf); - address stub = NULL; - if (a.far_branches() - && ! is_NativeCallTrampolineStub_at(instruction_address() + displacement())) { - stub = a.emit_trampoline_stub(instruction_address() - cbuf.insts()->start(), dest); - } - - if (stub == NULL) { - // If we generated no stub, patch this call directly to dest. - // This will happen if we don't need far branches or if there - // already was a trampoline. + if (!a.far_branches()) { + // If not using far branches, patch this call directly to dest. set_destination(dest); + } else if (!is_NativeCallTrampolineStub_at(instruction_address() + displacement())) { + // If we want far branches and there isn't a trampoline stub, emit one. + address stub = a.emit_trampoline_stub(instruction_address() - cbuf.insts()->start(), dest); + if (stub == nullptr) { + JVMCI_ERROR("could not emit trampoline stub - code cache is full"); + } + // The relocation created while emitting the stub will ensure this + // call instruction is subsequently patched to call the stub. + } else { + // Not sure how this can be happen but be defensive + JVMCI_ERROR("single-use stub should not exist"); } - - return stub; } +#endif void NativePostCallNop::make_deopt() { NativeDeoptInstruction::insert(addr_at(0)); diff --git a/src/hotspot/cpu/aarch64/nativeInst_aarch64.hpp b/src/hotspot/cpu/aarch64/nativeInst_aarch64.hpp index 8c220ada584..bb9eb9c4dbf 100644 --- a/src/hotspot/cpu/aarch64/nativeInst_aarch64.hpp +++ b/src/hotspot/cpu/aarch64/nativeInst_aarch64.hpp @@ -29,6 +29,11 @@ #include "asm/assembler.hpp" #include "runtime/icache.hpp" #include "runtime/os.hpp" +#include "runtime/os.hpp" +#if INCLUDE_JVMCI +#include "jvmci/jvmciExceptions.hpp" +#endif + // We have interfaces for the following instructions: // - NativeInstruction @@ -251,7 +256,9 @@ public: void set_destination_mt_safe(address dest, bool assert_lock = true); address get_trampoline(); - address trampoline_jump(CodeBuffer &cbuf, address dest); +#if INCLUDE_JVMCI + void trampoline_jump(CodeBuffer &cbuf, address dest, JVMCI_TRAPS); +#endif }; inline NativeCall* nativeCall_at(address address) { diff --git a/src/hotspot/share/code/compiledIC.hpp b/src/hotspot/share/code/compiledIC.hpp index 9933310c239..22400da6469 100644 --- a/src/hotspot/share/code/compiledIC.hpp +++ b/src/hotspot/share/code/compiledIC.hpp @@ -338,6 +338,8 @@ class StaticCallInfo { class CompiledStaticCall : public ResourceObj { public: // Code + + // Returns NULL if CodeBuffer::expand fails static address emit_to_interp_stub(CodeBuffer &cbuf, address mark = nullptr); static int to_interp_stub_size(); static int to_trampoline_stub_size(); diff --git a/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp b/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp index ab7af4d8816..9575b59e0a7 100644 --- a/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp +++ b/src/hotspot/share/jvmci/jvmciCodeInstaller.cpp @@ -1178,7 +1178,9 @@ void CodeInstaller::site_Call(CodeBuffer& buffer, u1 tag, jint pc_offset, HotSpo CodeInstaller::pd_relocate_JavaMethod(buffer, method, pc_offset, JVMCI_CHECK); if (_next_call_type == INVOKESTATIC || _next_call_type == INVOKESPECIAL) { // Need a static call stub for transitions from compiled to interpreted. - CompiledStaticCall::emit_to_interp_stub(buffer, _instructions->start() + pc_offset); + if (CompiledStaticCall::emit_to_interp_stub(buffer, _instructions->start() + pc_offset) == nullptr) { + JVMCI_ERROR("could not emit to_interp stub - code cache is full"); + } } }