diff --git a/src/hotspot/cpu/riscv/assembler_riscv.hpp b/src/hotspot/cpu/riscv/assembler_riscv.hpp index c267cb669ac..0828342ee65 100644 --- a/src/hotspot/cpu/riscv/assembler_riscv.hpp +++ b/src/hotspot/cpu/riscv/assembler_riscv.hpp @@ -912,6 +912,32 @@ protected: emit(insn); } + public: + + static uint32_t encode_jal(Register Rd, const int32_t offset) { + guarantee(is_simm21(offset) && ((offset % 2) == 0), "offset is invalid."); + uint32_t insn = 0; + patch((address)&insn, 6, 0, 0b1101111); + patch_reg((address)&insn, 7, Rd); + patch((address)&insn, 19, 12, (uint32_t)((offset >> 12) & 0xff)); + patch((address)&insn, 20, (uint32_t)((offset >> 11) & 0x1)); + patch((address)&insn, 30, 21, (uint32_t)((offset >> 1) & 0x3ff)); + patch((address)&insn, 31, (uint32_t)((offset >> 20) & 0x1)); + return insn; + } + + static uint32_t encode_jalr(Register Rd, Register Rs, const int32_t offset) { + guarantee(is_simm12(offset), "offset is invalid."); + uint32_t insn = 0; + patch((address)&insn, 6, 0, 0b1100111); + patch_reg((address)&insn, 7, Rd); + patch((address)&insn, 14, 12, 0b000); + patch_reg((address)&insn, 15, Rs); + int32_t val = offset & 0xfff; + patch((address)&insn, 31, 20, val); + return insn; + } + protected: enum barrier { diff --git a/src/hotspot/cpu/riscv/nativeInst_riscv.cpp b/src/hotspot/cpu/riscv/nativeInst_riscv.cpp index eb8ef6349b0..9722fd49d4d 100644 --- a/src/hotspot/cpu/riscv/nativeInst_riscv.cpp +++ b/src/hotspot/cpu/riscv/nativeInst_riscv.cpp @@ -33,6 +33,7 @@ #include "runtime/safepoint.hpp" #include "runtime/sharedRuntime.hpp" #include "runtime/stubRoutines.hpp" +#include "utilities/align.hpp" #include "utilities/ostream.hpp" #ifdef COMPILER1 #include "c1/c1_Runtime1.hpp" @@ -52,15 +53,15 @@ address NativeCall::destination() const { address addr = instruction_address(); assert(NativeCall::is_at(addr), "unexpected code at call site"); - address destination = MacroAssembler::target_addr_for_insn(addr); + address stub_addr = MacroAssembler::target_addr_for_insn(addr); CodeBlob* cb = CodeCache::find_blob(addr); assert(cb != nullptr && cb->is_nmethod(), "nmethod expected"); nmethod *nm = (nmethod *)cb; - assert(nm != nullptr, "Sanity"); - assert(nm->stub_contains(destination), "Sanity"); - assert(destination != nullptr, "Sanity"); - return stub_address_destination_at(destination); + assert(nm->stub_contains(stub_addr), "Sanity"); + assert(stub_addr != nullptr, "Sanity"); + + return stub_address_destination_at(stub_addr); } address NativeCall::reloc_destination() { @@ -89,6 +90,30 @@ void NativeCall::print() { tty->print_cr(PTR_FORMAT ": auipc,ld,jalr x1, offset/reg, ", p2i(instruction_address())); } +void NativeCall::optimize_call(address dest, bool mt_safe) { + // Skip over auipc + ld + address jmp_ins_pc = instruction_address() + 2 * NativeInstruction::instruction_size; + // Rutime calls may be unaligned, but they are never changed after relocation. + assert(!mt_safe || is_aligned(jmp_ins_pc, NativeInstruction::instruction_size), "Must be naturally aligned: %p", jmp_ins_pc); + // If reachable use JAL + if (Assembler::reachable_from_branch_at(jmp_ins_pc, dest)) { + int64_t distance = dest - jmp_ins_pc; + uint32_t new_jal = Assembler::encode_jal(ra, distance); + Atomic::store((uint32_t *)jmp_ins_pc, new_jal); + } else if (!MacroAssembler::is_jalr_at(jmp_ins_pc)) { // The jalr is always identical: jalr ra, 0(t1) + uint32_t new_jalr = Assembler::encode_jalr(ra, t1, 0); + Atomic::store((uint32_t *)jmp_ins_pc, new_jalr); + } else { + // No change to instruction stream + return; + } + // We changed instruction stream + if (mt_safe) { + // IC invalidate provides a leading full fence, it thus happens after we changed the instruction stream. + ICache::invalidate_range(jmp_ins_pc, NativeInstruction::instruction_size); + } +} + bool NativeCall::set_destination_mt_safe(address dest) { assert(NativeCall::is_at(instruction_address()), "unexpected code at call site"); assert((CodeCache_lock->is_locked() || SafepointSynchronize::is_at_safepoint()) || @@ -96,15 +121,17 @@ bool NativeCall::set_destination_mt_safe(address dest) { "concurrent code patching"); address stub_addr = stub_address(); - if (stub_addr != nullptr) { - set_stub_address_destination_at(stub_addr, dest); - return true; - } + assert(stub_addr != nullptr, "No stub?"); + set_stub_address_destination_at(stub_addr, dest); // release + // optimize_call happens after we stored new address in addr stub. + // patches jalr -> jal/jal -> jalr depending on dest + optimize_call(dest, true); - return false; + return true; } -bool NativeCall::reloc_set_destination(address dest) { +// The argument passed in is the address to the stub containing the destination +bool NativeCall::reloc_set_destination(address stub_addr) { address call_addr = instruction_address(); assert(NativeCall::is_at(call_addr), "unexpected code at call site"); @@ -113,10 +140,12 @@ bool NativeCall::reloc_set_destination(address dest) { if (code->is_nmethod()) { // TODO: Need to revisit this when porting the AOT features. - assert(dest != nullptr, "Sanity"); - assert(dest == trampoline_stub_Relocation::get_trampoline_for(call_addr, - code->as_nmethod()), "Sanity"); - MacroAssembler::pd_patch_instruction_size(call_addr, dest); + assert(stub_addr != nullptr, "Sanity"); + assert(stub_addr == trampoline_stub_Relocation::get_trampoline_for(call_addr, code->as_nmethod()), "Sanity"); + MacroAssembler::pd_patch_instruction_size(call_addr, stub_addr); // patches auipc + ld to stub_addr + + address dest = stub_address_destination_at(stub_addr); + optimize_call(dest, false); // patches jalr -> jal/jal -> jalr depending on dest } return true; @@ -142,9 +171,9 @@ address NativeCall::stub_address() { CodeBlob *code = CodeCache::find_blob(call_addr); assert(code != nullptr, "Could not find the containing code blob"); - address dest = MacroAssembler::target_addr_for_insn(call_addr); - assert(code->contains(dest), "Sanity"); - return dest; + address stub_addr = MacroAssembler::target_addr_for_insn(call_addr); + assert(code->contains(stub_addr), "Sanity"); + return stub_addr; } bool NativeCall::is_at(address addr) { @@ -160,6 +189,15 @@ bool NativeCall::is_at(address addr) { (MacroAssembler::extract_rd(addr + 2 * instr_size) == x1)) { return true; } + if (MacroAssembler::is_auipc_at(addr) && + MacroAssembler::is_ld_at(addr + instr_size) && + MacroAssembler::is_jal_at(addr + 2 * instr_size) && + (MacroAssembler::extract_rd(addr) == x6) && + (MacroAssembler::extract_rd(addr + instr_size) == x6) && + (MacroAssembler::extract_rs1(addr + instr_size) == x6) && + (MacroAssembler::extract_rd(addr + 2 * instr_size) == x1)) { + return true; + } return false; } diff --git a/src/hotspot/cpu/riscv/nativeInst_riscv.hpp b/src/hotspot/cpu/riscv/nativeInst_riscv.hpp index 8e6632419e2..4235e23d421 100644 --- a/src/hotspot/cpu/riscv/nativeInst_riscv.hpp +++ b/src/hotspot/cpu/riscv/nativeInst_riscv.hpp @@ -156,6 +156,10 @@ class NativeCall: private NativeInstruction { static void set_stub_address_destination_at(address dest, address value); // return target address at stub static address stub_address_destination_at(address src); + // We either have a jalr or jal depending on distance to old destination. + // This method emits a new jal if new destination is within jal reach. + // Otherwise restores the jalr which can reach any destination. + void optimize_call(address dest, bool mt_safe = true); }; // An interface for accessing/manipulating native mov reg, imm instructions.