8266746: C1: Replace UnsafeGetRaw with UnsafeGet when setting up OSR entry block

Replace UnsafeGetRaw with UnsafeGetObject when setting up OSR entry block, and rename Unsafe{Get,Put}Object to Unsafe{Get,Put}

Reviewed-by: thartmann, dlong, mdoerr
This commit is contained in:
Yi Yang 2021-07-01 01:39:50 +00:00
parent 4660f72c38
commit d89e630cdf
15 changed files with 159 additions and 670 deletions

View File

@ -76,6 +76,5 @@ enum {
#define PATCHED_ADDR (204)
#define CARDTABLEBARRIERSET_POST_BARRIER_HELPER
#define GENERATE_ADDRESS_IS_PREFERRED
#endif // CPU_ARM_C1_DEFS_ARM_HPP

View File

@ -1172,7 +1172,6 @@ void LIR_Assembler::mem2reg(LIR_Opr src_opr, LIR_Opr dest, BasicType type,
assert(Assembler::is_simm16(disp_value), "should have set this up");
offset = load(src, disp_value, to_reg, type, wide, unaligned);
} else {
assert(!unaligned, "unexpected");
offset = load(src, disp_reg, to_reg, type, wide);
}
@ -1301,7 +1300,6 @@ void LIR_Assembler::reg2mem(LIR_Opr from_reg, LIR_Opr dest, BasicType type,
assert(Assembler::is_simm16(disp_value), "should have set this up");
offset = store(from_reg, src, disp_value, type, wide, unaligned);
} else {
assert(!unaligned, "unexpected");
offset = store(from_reg, src, disp_reg, type, wide);
}

View File

@ -857,171 +857,16 @@ void Canonicalizer::do_Throw (Throw* x) {}
void Canonicalizer::do_Base (Base* x) {}
void Canonicalizer::do_OsrEntry (OsrEntry* x) {}
void Canonicalizer::do_ExceptionObject(ExceptionObject* x) {}
static bool match_index_and_scale(Instruction* instr,
Instruction** index,
int* log2_scale) {
// Skip conversion ops. This works only on 32bit because of the implicit l2i that the
// unsafe performs.
#ifndef _LP64
Convert* convert = instr->as_Convert();
if (convert != NULL && convert->op() == Bytecodes::_i2l) {
assert(convert->value()->type() == intType, "invalid input type");
instr = convert->value();
}
#endif
ShiftOp* shift = instr->as_ShiftOp();
if (shift != NULL) {
if (shift->op() == Bytecodes::_lshl) {
assert(shift->x()->type() == longType, "invalid input type");
} else {
#ifndef _LP64
if (shift->op() == Bytecodes::_ishl) {
assert(shift->x()->type() == intType, "invalid input type");
} else {
return false;
}
#else
return false;
#endif
}
// Constant shift value?
Constant* con = shift->y()->as_Constant();
if (con == NULL) return false;
// Well-known type and value?
IntConstant* val = con->type()->as_IntConstant();
assert(val != NULL, "Should be an int constant");
*index = shift->x();
int tmp_scale = val->value();
if (tmp_scale >= 0 && tmp_scale < 4) {
*log2_scale = tmp_scale;
return true;
} else {
return false;
}
}
ArithmeticOp* arith = instr->as_ArithmeticOp();
if (arith != NULL) {
// See if either arg is a known constant
Constant* con = arith->x()->as_Constant();
if (con != NULL) {
*index = arith->y();
} else {
con = arith->y()->as_Constant();
if (con == NULL) return false;
*index = arith->x();
}
long const_value;
// Check for integer multiply
if (arith->op() == Bytecodes::_lmul) {
assert((*index)->type() == longType, "invalid input type");
LongConstant* val = con->type()->as_LongConstant();
assert(val != NULL, "expecting a long constant");
const_value = val->value();
} else {
#ifndef _LP64
if (arith->op() == Bytecodes::_imul) {
assert((*index)->type() == intType, "invalid input type");
IntConstant* val = con->type()->as_IntConstant();
assert(val != NULL, "expecting an int constant");
const_value = val->value();
} else {
return false;
}
#else
return false;
#endif
}
switch (const_value) {
case 1: *log2_scale = 0; return true;
case 2: *log2_scale = 1; return true;
case 4: *log2_scale = 2; return true;
case 8: *log2_scale = 3; return true;
default: return false;
}
}
// Unknown instruction sequence; don't touch it
return false;
}
static bool match(UnsafeRawOp* x,
Instruction** base,
Instruction** index,
int* log2_scale) {
ArithmeticOp* root = x->base()->as_ArithmeticOp();
if (root == NULL) return false;
// Limit ourselves to addition for now
if (root->op() != Bytecodes::_ladd) return false;
bool match_found = false;
// Try to find shift or scale op
if (match_index_and_scale(root->y(), index, log2_scale)) {
*base = root->x();
match_found = true;
} else if (match_index_and_scale(root->x(), index, log2_scale)) {
*base = root->y();
match_found = true;
} else if (NOT_LP64(root->y()->as_Convert() != NULL) LP64_ONLY(false)) {
// Skipping i2l works only on 32bit because of the implicit l2i that the unsafe performs.
// 64bit needs a real sign-extending conversion.
Convert* convert = root->y()->as_Convert();
if (convert->op() == Bytecodes::_i2l) {
assert(convert->value()->type() == intType, "should be an int");
// pick base and index, setting scale at 1
*base = root->x();
*index = convert->value();
*log2_scale = 0;
match_found = true;
}
}
// The default solution
if (!match_found) {
*base = root->x();
*index = root->y();
*log2_scale = 0;
}
// If the value is pinned then it will be always be computed so
// there's no profit to reshaping the expression.
return !root->is_pinned();
}
void Canonicalizer::do_UnsafeRawOp(UnsafeRawOp* x) {
Instruction* base = NULL;
Instruction* index = NULL;
int log2_scale;
if (match(x, &base, &index, &log2_scale)) {
x->set_base(base);
x->set_index(index);
x->set_log2_scale(log2_scale);
if (PrintUnsafeOptimization) {
tty->print_cr("Canonicalizer: UnsafeRawOp id %d: base = id %d, index = id %d, log2_scale = %d",
x->id(), x->base()->id(), x->index()->id(), x->log2_scale());
}
}
}
void Canonicalizer::do_RoundFP(RoundFP* x) {}
void Canonicalizer::do_UnsafeGetRaw(UnsafeGetRaw* x) { if (OptimizeUnsafes) do_UnsafeRawOp(x); }
void Canonicalizer::do_UnsafePutRaw(UnsafePutRaw* x) { if (OptimizeUnsafes) do_UnsafeRawOp(x); }
void Canonicalizer::do_UnsafeGetObject(UnsafeGetObject* x) {}
void Canonicalizer::do_UnsafePutObject(UnsafePutObject* x) {}
void Canonicalizer::do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) {}
void Canonicalizer::do_ProfileCall(ProfileCall* x) {}
void Canonicalizer::do_RoundFP (RoundFP* x) {}
void Canonicalizer::do_UnsafeGet (UnsafeGet* x) {}
void Canonicalizer::do_UnsafePut (UnsafePut* x) {}
void Canonicalizer::do_UnsafeGetAndSet(UnsafeGetAndSet* x) {}
void Canonicalizer::do_ProfileCall (ProfileCall* x) {}
void Canonicalizer::do_ProfileReturnType(ProfileReturnType* x) {}
void Canonicalizer::do_ProfileInvoke(ProfileInvoke* x) {}
void Canonicalizer::do_RuntimeCall(RuntimeCall* x) {}
void Canonicalizer::do_ProfileInvoke (ProfileInvoke* x) {}
void Canonicalizer::do_RuntimeCall (RuntimeCall* x) {}
void Canonicalizer::do_RangeCheckPredicate(RangeCheckPredicate* x) {}
#ifdef ASSERT
void Canonicalizer::do_Assert(Assert* x) {}
void Canonicalizer::do_Assert (Assert* x) {}
#endif
void Canonicalizer::do_MemBar(MemBar* x) {}
void Canonicalizer::do_MemBar (MemBar* x) {}

View File

@ -46,12 +46,6 @@ class Canonicalizer: InstructionVisitor {
#endif
void move_const_to_right(Op2* x);
void do_Op2(Op2* x);
void do_UnsafeRawOp(UnsafeRawOp* x);
void unsafe_raw_match(UnsafeRawOp* x,
Instruction** base,
Instruction** index,
int* scale);
public:
Canonicalizer(Compilation* c, Value x, int bci) : _compilation(c), _canonical(x), _bci(bci) {
@ -99,11 +93,9 @@ class Canonicalizer: InstructionVisitor {
virtual void do_OsrEntry (OsrEntry* x);
virtual void do_ExceptionObject(ExceptionObject* x);
virtual void do_RoundFP (RoundFP* x);
virtual void do_UnsafeGetRaw (UnsafeGetRaw* x);
virtual void do_UnsafePutRaw (UnsafePutRaw* x);
virtual void do_UnsafeGetObject(UnsafeGetObject* x);
virtual void do_UnsafePutObject(UnsafePutObject* x);
virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x);
virtual void do_UnsafeGet (UnsafeGet* x);
virtual void do_UnsafePut (UnsafePut* x);
virtual void do_UnsafeGetAndSet(UnsafeGetAndSet* x);
virtual void do_ProfileCall (ProfileCall* x);
virtual void do_ProfileReturnType (ProfileReturnType* x);
virtual void do_ProfileInvoke (ProfileInvoke* x);

View File

@ -3138,10 +3138,11 @@ void GraphBuilder::setup_osr_entry_block() {
// doesn't so pretend that the interpreter passed in null.
get = append(new Constant(objectNull));
} else {
get = append(new UnsafeGetRaw(as_BasicType(local->type()), e,
append(new Constant(new IntConstant(offset))),
0,
true /*unaligned*/, true /*wide*/));
Value off_val = append(new Constant(new IntConstant(offset)));
get = append(new UnsafeGet(as_BasicType(local->type()), e,
off_val,
false/*is_volatile*/,
true/*is_raw*/));
}
_state->store_local(index, get);
}
@ -3468,60 +3469,60 @@ void GraphBuilder::build_graph_for_intrinsic(ciMethod* callee, bool ignore_retur
// Some intrinsics need special IR nodes.
switch(id) {
case vmIntrinsics::_getReference : append_unsafe_get_obj(callee, T_OBJECT, false); return;
case vmIntrinsics::_getBoolean : append_unsafe_get_obj(callee, T_BOOLEAN, false); return;
case vmIntrinsics::_getByte : append_unsafe_get_obj(callee, T_BYTE, false); return;
case vmIntrinsics::_getShort : append_unsafe_get_obj(callee, T_SHORT, false); return;
case vmIntrinsics::_getChar : append_unsafe_get_obj(callee, T_CHAR, false); return;
case vmIntrinsics::_getInt : append_unsafe_get_obj(callee, T_INT, false); return;
case vmIntrinsics::_getLong : append_unsafe_get_obj(callee, T_LONG, false); return;
case vmIntrinsics::_getFloat : append_unsafe_get_obj(callee, T_FLOAT, false); return;
case vmIntrinsics::_getDouble : append_unsafe_get_obj(callee, T_DOUBLE, false); return;
case vmIntrinsics::_putReference : append_unsafe_put_obj(callee, T_OBJECT, false); return;
case vmIntrinsics::_putBoolean : append_unsafe_put_obj(callee, T_BOOLEAN, false); return;
case vmIntrinsics::_putByte : append_unsafe_put_obj(callee, T_BYTE, false); return;
case vmIntrinsics::_putShort : append_unsafe_put_obj(callee, T_SHORT, false); return;
case vmIntrinsics::_putChar : append_unsafe_put_obj(callee, T_CHAR, false); return;
case vmIntrinsics::_putInt : append_unsafe_put_obj(callee, T_INT, false); return;
case vmIntrinsics::_putLong : append_unsafe_put_obj(callee, T_LONG, false); return;
case vmIntrinsics::_putFloat : append_unsafe_put_obj(callee, T_FLOAT, false); return;
case vmIntrinsics::_putDouble : append_unsafe_put_obj(callee, T_DOUBLE, false); return;
case vmIntrinsics::_getShortUnaligned : append_unsafe_get_obj(callee, T_SHORT, false); return;
case vmIntrinsics::_getCharUnaligned : append_unsafe_get_obj(callee, T_CHAR, false); return;
case vmIntrinsics::_getIntUnaligned : append_unsafe_get_obj(callee, T_INT, false); return;
case vmIntrinsics::_getLongUnaligned : append_unsafe_get_obj(callee, T_LONG, false); return;
case vmIntrinsics::_putShortUnaligned : append_unsafe_put_obj(callee, T_SHORT, false); return;
case vmIntrinsics::_putCharUnaligned : append_unsafe_put_obj(callee, T_CHAR, false); return;
case vmIntrinsics::_putIntUnaligned : append_unsafe_put_obj(callee, T_INT, false); return;
case vmIntrinsics::_putLongUnaligned : append_unsafe_put_obj(callee, T_LONG, false); return;
case vmIntrinsics::_getReferenceVolatile : append_unsafe_get_obj(callee, T_OBJECT, true); return;
case vmIntrinsics::_getBooleanVolatile : append_unsafe_get_obj(callee, T_BOOLEAN, true); return;
case vmIntrinsics::_getByteVolatile : append_unsafe_get_obj(callee, T_BYTE, true); return;
case vmIntrinsics::_getShortVolatile : append_unsafe_get_obj(callee, T_SHORT, true); return;
case vmIntrinsics::_getCharVolatile : append_unsafe_get_obj(callee, T_CHAR, true); return;
case vmIntrinsics::_getIntVolatile : append_unsafe_get_obj(callee, T_INT, true); return;
case vmIntrinsics::_getLongVolatile : append_unsafe_get_obj(callee, T_LONG, true); return;
case vmIntrinsics::_getFloatVolatile : append_unsafe_get_obj(callee, T_FLOAT, true); return;
case vmIntrinsics::_getDoubleVolatile : append_unsafe_get_obj(callee, T_DOUBLE, true); return;
case vmIntrinsics::_putReferenceVolatile : append_unsafe_put_obj(callee, T_OBJECT, true); return;
case vmIntrinsics::_putBooleanVolatile : append_unsafe_put_obj(callee, T_BOOLEAN, true); return;
case vmIntrinsics::_putByteVolatile : append_unsafe_put_obj(callee, T_BYTE, true); return;
case vmIntrinsics::_putShortVolatile : append_unsafe_put_obj(callee, T_SHORT, true); return;
case vmIntrinsics::_putCharVolatile : append_unsafe_put_obj(callee, T_CHAR, true); return;
case vmIntrinsics::_putIntVolatile : append_unsafe_put_obj(callee, T_INT, true); return;
case vmIntrinsics::_putLongVolatile : append_unsafe_put_obj(callee, T_LONG, true); return;
case vmIntrinsics::_putFloatVolatile : append_unsafe_put_obj(callee, T_FLOAT, true); return;
case vmIntrinsics::_putDoubleVolatile : append_unsafe_put_obj(callee, T_DOUBLE, true); return;
case vmIntrinsics::_getReference : append_unsafe_get(callee, T_OBJECT, false); return;
case vmIntrinsics::_getBoolean : append_unsafe_get(callee, T_BOOLEAN, false); return;
case vmIntrinsics::_getByte : append_unsafe_get(callee, T_BYTE, false); return;
case vmIntrinsics::_getShort : append_unsafe_get(callee, T_SHORT, false); return;
case vmIntrinsics::_getChar : append_unsafe_get(callee, T_CHAR, false); return;
case vmIntrinsics::_getInt : append_unsafe_get(callee, T_INT, false); return;
case vmIntrinsics::_getLong : append_unsafe_get(callee, T_LONG, false); return;
case vmIntrinsics::_getFloat : append_unsafe_get(callee, T_FLOAT, false); return;
case vmIntrinsics::_getDouble : append_unsafe_get(callee, T_DOUBLE, false); return;
case vmIntrinsics::_putReference : append_unsafe_put(callee, T_OBJECT, false); return;
case vmIntrinsics::_putBoolean : append_unsafe_put(callee, T_BOOLEAN, false); return;
case vmIntrinsics::_putByte : append_unsafe_put(callee, T_BYTE, false); return;
case vmIntrinsics::_putShort : append_unsafe_put(callee, T_SHORT, false); return;
case vmIntrinsics::_putChar : append_unsafe_put(callee, T_CHAR, false); return;
case vmIntrinsics::_putInt : append_unsafe_put(callee, T_INT, false); return;
case vmIntrinsics::_putLong : append_unsafe_put(callee, T_LONG, false); return;
case vmIntrinsics::_putFloat : append_unsafe_put(callee, T_FLOAT, false); return;
case vmIntrinsics::_putDouble : append_unsafe_put(callee, T_DOUBLE, false); return;
case vmIntrinsics::_getShortUnaligned : append_unsafe_get(callee, T_SHORT, false); return;
case vmIntrinsics::_getCharUnaligned : append_unsafe_get(callee, T_CHAR, false); return;
case vmIntrinsics::_getIntUnaligned : append_unsafe_get(callee, T_INT, false); return;
case vmIntrinsics::_getLongUnaligned : append_unsafe_get(callee, T_LONG, false); return;
case vmIntrinsics::_putShortUnaligned : append_unsafe_put(callee, T_SHORT, false); return;
case vmIntrinsics::_putCharUnaligned : append_unsafe_put(callee, T_CHAR, false); return;
case vmIntrinsics::_putIntUnaligned : append_unsafe_put(callee, T_INT, false); return;
case vmIntrinsics::_putLongUnaligned : append_unsafe_put(callee, T_LONG, false); return;
case vmIntrinsics::_getReferenceVolatile : append_unsafe_get(callee, T_OBJECT, true); return;
case vmIntrinsics::_getBooleanVolatile : append_unsafe_get(callee, T_BOOLEAN, true); return;
case vmIntrinsics::_getByteVolatile : append_unsafe_get(callee, T_BYTE, true); return;
case vmIntrinsics::_getShortVolatile : append_unsafe_get(callee, T_SHORT, true); return;
case vmIntrinsics::_getCharVolatile : append_unsafe_get(callee, T_CHAR, true); return;
case vmIntrinsics::_getIntVolatile : append_unsafe_get(callee, T_INT, true); return;
case vmIntrinsics::_getLongVolatile : append_unsafe_get(callee, T_LONG, true); return;
case vmIntrinsics::_getFloatVolatile : append_unsafe_get(callee, T_FLOAT, true); return;
case vmIntrinsics::_getDoubleVolatile : append_unsafe_get(callee, T_DOUBLE, true); return;
case vmIntrinsics::_putReferenceVolatile : append_unsafe_put(callee, T_OBJECT, true); return;
case vmIntrinsics::_putBooleanVolatile : append_unsafe_put(callee, T_BOOLEAN, true); return;
case vmIntrinsics::_putByteVolatile : append_unsafe_put(callee, T_BYTE, true); return;
case vmIntrinsics::_putShortVolatile : append_unsafe_put(callee, T_SHORT, true); return;
case vmIntrinsics::_putCharVolatile : append_unsafe_put(callee, T_CHAR, true); return;
case vmIntrinsics::_putIntVolatile : append_unsafe_put(callee, T_INT, true); return;
case vmIntrinsics::_putLongVolatile : append_unsafe_put(callee, T_LONG, true); return;
case vmIntrinsics::_putFloatVolatile : append_unsafe_put(callee, T_FLOAT, true); return;
case vmIntrinsics::_putDoubleVolatile : append_unsafe_put(callee, T_DOUBLE, true); return;
case vmIntrinsics::_compareAndSetLong:
case vmIntrinsics::_compareAndSetInt:
case vmIntrinsics::_compareAndSetReference : append_unsafe_CAS(callee); return;
case vmIntrinsics::_getAndAddInt:
case vmIntrinsics::_getAndAddLong : append_unsafe_get_and_set_obj(callee, true); return;
case vmIntrinsics::_getAndSetInt :
case vmIntrinsics::_getAndSetLong :
case vmIntrinsics::_getAndSetReference : append_unsafe_get_and_set_obj(callee, false); return;
case vmIntrinsics::_getCharStringU : append_char_access(callee, false); return;
case vmIntrinsics::_putCharStringU : append_char_access(callee, true); return;
case vmIntrinsics::_getAndAddLong : append_unsafe_get_and_set(callee, true); return;
case vmIntrinsics::_getAndSetInt :
case vmIntrinsics::_getAndSetLong :
case vmIntrinsics::_getAndSetReference : append_unsafe_get_and_set(callee, false); return;
case vmIntrinsics::_getCharStringU : append_char_access(callee, false); return;
case vmIntrinsics::_putCharStringU : append_char_access(callee, true); return;
default:
break;
}
@ -4199,20 +4200,20 @@ void GraphBuilder::pop_scope_for_jsr() {
_scope_data = scope_data()->parent();
}
void GraphBuilder::append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile) {
void GraphBuilder::append_unsafe_get(ciMethod* callee, BasicType t, bool is_volatile) {
Values* args = state()->pop_arguments(callee->arg_size());
null_check(args->at(0));
Instruction* offset = args->at(2);
#ifndef _LP64
offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
#endif
Instruction* op = append(new UnsafeGetObject(t, args->at(1), offset, is_volatile));
Instruction* op = append(new UnsafeGet(t, args->at(1), offset, is_volatile));
push(op->type(), op);
compilation()->set_has_unsafe_access(true);
}
void GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile) {
void GraphBuilder::append_unsafe_put(ciMethod* callee, BasicType t, bool is_volatile) {
Values* args = state()->pop_arguments(callee->arg_size());
null_check(args->at(0));
Instruction* offset = args->at(2);
@ -4224,29 +4225,11 @@ void GraphBuilder::append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_
Value mask = append(new Constant(new IntConstant(1)));
val = append(new LogicOp(Bytecodes::_iand, val, mask));
}
Instruction* op = append(new UnsafePutObject(t, args->at(1), offset, val, is_volatile));
Instruction* op = append(new UnsafePut(t, args->at(1), offset, val, is_volatile));
compilation()->set_has_unsafe_access(true);
kill_all();
}
void GraphBuilder::append_unsafe_get_raw(ciMethod* callee, BasicType t) {
Values* args = state()->pop_arguments(callee->arg_size());
null_check(args->at(0));
Instruction* op = append(new UnsafeGetRaw(t, args->at(1), false));
push(op->type(), op);
compilation()->set_has_unsafe_access(true);
}
void GraphBuilder::append_unsafe_put_raw(ciMethod* callee, BasicType t) {
Values* args = state()->pop_arguments(callee->arg_size());
null_check(args->at(0));
Instruction* op = append(new UnsafePutRaw(t, args->at(1), args->at(2)));
compilation()->set_has_unsafe_access(true);
}
void GraphBuilder::append_unsafe_CAS(ciMethod* callee) {
ValueStack* state_before = copy_state_for_exception();
ValueType* result_type = as_ValueType(callee->return_type());
@ -4334,7 +4317,7 @@ void GraphBuilder::print_inlining(ciMethod* callee, const char* msg, bool succes
}
}
void GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add) {
void GraphBuilder::append_unsafe_get_and_set(ciMethod* callee, bool is_add) {
Values* args = state()->pop_arguments(callee->arg_size());
BasicType t = callee->return_type()->basic_type();
null_check(args->at(0));
@ -4342,7 +4325,7 @@ void GraphBuilder::append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add)
#ifndef _LP64
offset = append(new Convert(Bytecodes::_l2i, offset, as_ValueType(T_INT)));
#endif
Instruction* op = append(new UnsafeGetAndSetObject(t, args->at(1), offset, args->at(3), is_add));
Instruction* op = append(new UnsafeGetAndSet(t, args->at(1), offset, args->at(3), is_add));
compilation()->set_has_unsafe_access(true);
kill_all();
push(op->type(), op);

View File

@ -373,12 +373,10 @@ class GraphBuilder {
void pop_scope();
void pop_scope_for_jsr();
void append_unsafe_get_obj(ciMethod* callee, BasicType t, bool is_volatile);
void append_unsafe_put_obj(ciMethod* callee, BasicType t, bool is_volatile);
void append_unsafe_get_raw(ciMethod* callee, BasicType t);
void append_unsafe_put_raw(ciMethod* callee, BasicType t);
void append_unsafe_get(ciMethod* callee, BasicType t, bool is_volatile);
void append_unsafe_put(ciMethod* callee, BasicType t, bool is_volatile);
void append_unsafe_CAS(ciMethod* callee);
void append_unsafe_get_and_set_obj(ciMethod* callee, bool is_add);
void append_unsafe_get_and_set(ciMethod* callee, bool is_add);
void append_char_access(ciMethod* callee, bool is_store);
void print_inlining(ciMethod* callee, const char* msg, bool success = true);

View File

@ -95,13 +95,9 @@ class Throw;
class Base;
class RoundFP;
class UnsafeOp;
class UnsafeRawOp;
class UnsafeGetRaw;
class UnsafePutRaw;
class UnsafeObjectOp;
class UnsafeGetObject;
class UnsafePutObject;
class UnsafeGetAndSetObject;
class UnsafeGet;
class UnsafePut;
class UnsafeGetAndSet;
class ProfileCall;
class ProfileReturnType;
class ProfileInvoke;
@ -195,11 +191,9 @@ class InstructionVisitor: public StackObj {
virtual void do_OsrEntry (OsrEntry* x) = 0;
virtual void do_ExceptionObject(ExceptionObject* x) = 0;
virtual void do_RoundFP (RoundFP* x) = 0;
virtual void do_UnsafeGetRaw (UnsafeGetRaw* x) = 0;
virtual void do_UnsafePutRaw (UnsafePutRaw* x) = 0;
virtual void do_UnsafeGetObject(UnsafeGetObject* x) = 0;
virtual void do_UnsafePutObject(UnsafePutObject* x) = 0;
virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) = 0;
virtual void do_UnsafeGet (UnsafeGet* x) = 0;
virtual void do_UnsafePut (UnsafePut* x) = 0;
virtual void do_UnsafeGetAndSet(UnsafeGetAndSet* x) = 0;
virtual void do_ProfileCall (ProfileCall* x) = 0;
virtual void do_ProfileReturnType (ProfileReturnType* x) = 0;
virtual void do_ProfileInvoke (ProfileInvoke* x) = 0;
@ -2193,13 +2187,16 @@ LEAF(RoundFP, Instruction)
BASE(UnsafeOp, Instruction)
private:
BasicType _basic_type; // ValueType can not express byte-sized integers
Value _object; // Object to be fetched from or mutated
Value _offset; // Offset within object
bool _is_volatile; // true if volatile - dl/JSR166
BasicType _basic_type; // ValueType can not express byte-sized integers
protected:
// creation
UnsafeOp(BasicType basic_type, bool is_put)
: Instruction(is_put ? voidType : as_ValueType(basic_type))
, _basic_type(basic_type)
UnsafeOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile)
: Instruction(is_put ? voidType : as_ValueType(basic_type)),
_object(object), _offset(offset), _is_volatile(is_volatile), _basic_type(basic_type)
{
//Note: Unsafe ops are not not guaranteed to throw NPE.
// Convservatively, Unsafe operations must be pinned though we could be
@ -2210,148 +2207,42 @@ BASE(UnsafeOp, Instruction)
public:
// accessors
BasicType basic_type() { return _basic_type; }
// generic
virtual void input_values_do(ValueVisitor* f) { }
};
BASE(UnsafeRawOp, UnsafeOp)
private:
Value _base; // Base address (a Java long)
Value _index; // Index if computed by optimizer; initialized to NULL
int _log2_scale; // Scale factor: 0, 1, 2, or 3.
// Indicates log2 of number of bytes (1, 2, 4, or 8)
// to scale index by.
protected:
UnsafeRawOp(BasicType basic_type, Value addr, bool is_put)
: UnsafeOp(basic_type, is_put)
, _base(addr)
, _index(NULL)
, _log2_scale(0)
{
// Can not use ASSERT_VALUES because index may be NULL
assert(addr != NULL && addr->type()->is_long(), "just checking");
}
UnsafeRawOp(BasicType basic_type, Value base, Value index, int log2_scale, bool is_put)
: UnsafeOp(basic_type, is_put)
, _base(base)
, _index(index)
, _log2_scale(log2_scale)
{
}
public:
// accessors
Value base() { return _base; }
Value index() { return _index; }
bool has_index() { return (_index != NULL); }
int log2_scale() { return _log2_scale; }
// setters
void set_base (Value base) { _base = base; }
void set_index(Value index) { _index = index; }
void set_log2_scale(int log2_scale) { _log2_scale = log2_scale; }
// generic
virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
f->visit(&_base);
if (has_index()) f->visit(&_index); }
};
LEAF(UnsafeGetRaw, UnsafeRawOp)
private:
bool _may_be_unaligned, _is_wide; // For OSREntry
public:
UnsafeGetRaw(BasicType basic_type, Value addr, bool may_be_unaligned, bool is_wide = false)
: UnsafeRawOp(basic_type, addr, false) {
_may_be_unaligned = may_be_unaligned;
_is_wide = is_wide;
}
UnsafeGetRaw(BasicType basic_type, Value base, Value index, int log2_scale, bool may_be_unaligned, bool is_wide = false)
: UnsafeRawOp(basic_type, base, index, log2_scale, false) {
_may_be_unaligned = may_be_unaligned;
_is_wide = is_wide;
}
bool may_be_unaligned() { return _may_be_unaligned; }
bool is_wide() { return _is_wide; }
};
LEAF(UnsafePutRaw, UnsafeRawOp)
private:
Value _value; // Value to be stored
public:
UnsafePutRaw(BasicType basic_type, Value addr, Value value)
: UnsafeRawOp(basic_type, addr, true)
, _value(value)
{
assert(value != NULL, "just checking");
ASSERT_VALUES
}
UnsafePutRaw(BasicType basic_type, Value base, Value index, int log2_scale, Value value)
: UnsafeRawOp(basic_type, base, index, log2_scale, true)
, _value(value)
{
assert(value != NULL, "just checking");
ASSERT_VALUES
}
// accessors
Value value() { return _value; }
// generic
virtual void input_values_do(ValueVisitor* f) { UnsafeRawOp::input_values_do(f);
f->visit(&_value); }
};
BASE(UnsafeObjectOp, UnsafeOp)
private:
Value _object; // Object to be fetched from or mutated
Value _offset; // Offset within object
bool _is_volatile; // true if volatile - dl/JSR166
public:
UnsafeObjectOp(BasicType basic_type, Value object, Value offset, bool is_put, bool is_volatile)
: UnsafeOp(basic_type, is_put), _object(object), _offset(offset), _is_volatile(is_volatile)
{
}
// accessors
Value object() { return _object; }
Value offset() { return _offset; }
bool is_volatile() { return _is_volatile; }
// generic
virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
f->visit(&_object);
f->visit(&_offset); }
virtual void input_values_do(ValueVisitor* f) { f->visit(&_object);
f->visit(&_offset); }
};
LEAF(UnsafeGetObject, UnsafeObjectOp)
LEAF(UnsafeGet, UnsafeOp)
private:
bool _is_raw;
public:
UnsafeGetObject(BasicType basic_type, Value object, Value offset, bool is_volatile)
: UnsafeObjectOp(basic_type, object, offset, false, is_volatile)
UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile)
: UnsafeOp(basic_type, object, offset, false, is_volatile)
{
ASSERT_VALUES
_is_raw = false;
}
UnsafeGet(BasicType basic_type, Value object, Value offset, bool is_volatile, bool is_raw)
: UnsafeOp(basic_type, object, offset, false, is_volatile), _is_raw(is_raw)
{
ASSERT_VALUES
}
// accessors
bool is_raw() { return _is_raw; }
};
LEAF(UnsafePutObject, UnsafeObjectOp)
LEAF(UnsafePut, UnsafeOp)
private:
Value _value; // Value to be stored
public:
UnsafePutObject(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile)
: UnsafeObjectOp(basic_type, object, offset, true, is_volatile)
UnsafePut(BasicType basic_type, Value object, Value offset, Value value, bool is_volatile)
: UnsafeOp(basic_type, object, offset, true, is_volatile)
, _value(value)
{
ASSERT_VALUES
@ -2361,17 +2252,17 @@ LEAF(UnsafePutObject, UnsafeObjectOp)
Value value() { return _value; }
// generic
virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
f->visit(&_value); }
};
LEAF(UnsafeGetAndSetObject, UnsafeObjectOp)
LEAF(UnsafeGetAndSet, UnsafeOp)
private:
Value _value; // Value to be stored
bool _is_add;
public:
UnsafeGetAndSetObject(BasicType basic_type, Value object, Value offset, Value value, bool is_add)
: UnsafeObjectOp(basic_type, object, offset, false, false)
UnsafeGetAndSet(BasicType basic_type, Value object, Value offset, Value value, bool is_add)
: UnsafeOp(basic_type, object, offset, false, false)
, _value(value)
, _is_add(is_add)
{
@ -2383,7 +2274,7 @@ LEAF(UnsafeGetAndSetObject, UnsafeObjectOp)
Value value() { return _value; }
// generic
virtual void input_values_do(ValueVisitor* f) { UnsafeObjectOp::input_values_do(f);
virtual void input_values_do(ValueVisitor* f) { UnsafeOp::input_values_do(f);
f->visit(&_value); }
};

View File

@ -263,22 +263,6 @@ void InstructionPrinter::print_inline_level(BlockBegin* block) {
void InstructionPrinter::print_unsafe_op(UnsafeOp* op, const char* name) {
output()->print("%s", name);
output()->print(".(");
}
void InstructionPrinter::print_unsafe_raw_op(UnsafeRawOp* op, const char* name) {
print_unsafe_op(op, name);
output()->print("base ");
print_value(op->base());
if (op->has_index()) {
output()->print(", index "); print_value(op->index());
output()->print(", log2_scale %d", op->log2_scale());
}
}
void InstructionPrinter::print_unsafe_object_op(UnsafeObjectOp* op, const char* name) {
print_unsafe_op(op, name);
print_value(op->object());
output()->print(", ");
print_value(op->offset());
@ -809,36 +793,20 @@ void InstructionPrinter::do_RoundFP(RoundFP* x) {
print_value(x->input());
}
void InstructionPrinter::do_UnsafeGetRaw(UnsafeGetRaw* x) {
print_unsafe_raw_op(x, "UnsafeGetRaw");
void InstructionPrinter::do_UnsafeGet(UnsafeGet* x) {
print_unsafe_op(x, x->is_raw() ? "UnsafeGet (raw)" : "UnsafeGet");
output()->put(')');
}
void InstructionPrinter::do_UnsafePutRaw(UnsafePutRaw* x) {
print_unsafe_raw_op(x, "UnsafePutRaw");
void InstructionPrinter::do_UnsafePut(UnsafePut* x) {
print_unsafe_op(x, "UnsafePut");
output()->print(", value ");
print_value(x->value());
output()->put(')');
}
void InstructionPrinter::do_UnsafeGetObject(UnsafeGetObject* x) {
print_unsafe_object_op(x, "UnsafeGetObject");
output()->put(')');
}
void InstructionPrinter::do_UnsafePutObject(UnsafePutObject* x) {
print_unsafe_object_op(x, "UnsafePutObject");
output()->print(", value ");
print_value(x->value());
output()->put(')');
}
void InstructionPrinter::do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) {
print_unsafe_object_op(x, x->is_add()?"UnsafeGetAndSetObject (add)":"UnsafeGetAndSetObject");
void InstructionPrinter::do_UnsafeGetAndSet(UnsafeGetAndSet* x) {
print_unsafe_op(x, x->is_add()?"UnsafeGetAndSet (add)":"UnsafeGetAndSet");
output()->print(", value ");
print_value(x->value());
output()->put(')');

View File

@ -74,8 +74,6 @@ class InstructionPrinter: public InstructionVisitor {
void print_stack(ValueStack* stack);
void print_inline_level(BlockBegin* block);
void print_unsafe_op(UnsafeOp* op, const char* name);
void print_unsafe_raw_op(UnsafeRawOp* op, const char* name);
void print_unsafe_object_op(UnsafeObjectOp* op, const char* name);
void print_phi(int i, Value v, BlockBegin* b);
void print_alias(Value v);
@ -123,11 +121,9 @@ class InstructionPrinter: public InstructionVisitor {
virtual void do_OsrEntry (OsrEntry* x);
virtual void do_ExceptionObject(ExceptionObject* x);
virtual void do_RoundFP (RoundFP* x);
virtual void do_UnsafeGetRaw (UnsafeGetRaw* x);
virtual void do_UnsafePutRaw (UnsafePutRaw* x);
virtual void do_UnsafeGetObject(UnsafeGetObject* x);
virtual void do_UnsafePutObject(UnsafePutObject* x);
virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x);
virtual void do_UnsafeGet (UnsafeGet* x);
virtual void do_UnsafePut (UnsafePut* x);
virtual void do_UnsafeGetAndSet(UnsafeGetAndSet* x);
virtual void do_ProfileCall (ProfileCall* x);
virtual void do_ProfileReturnType (ProfileReturnType* x);
virtual void do_ProfileInvoke (ProfileInvoke* x);

View File

@ -2088,189 +2088,8 @@ void LIRGenerator::do_RoundFP(RoundFP* x) {
}
}
// Here UnsafeGetRaw may have x->base() and x->index() be int or long
// on both 64 and 32 bits. Expecting x->base() to be always long on 64bit.
void LIRGenerator::do_UnsafeGetRaw(UnsafeGetRaw* x) {
LIRItem base(x->base(), this);
LIRItem idx(this);
base.load_item();
if (x->has_index()) {
idx.set_instruction(x->index());
idx.load_nonconstant();
}
LIR_Opr reg = rlock_result(x, x->basic_type());
int log2_scale = 0;
if (x->has_index()) {
log2_scale = x->log2_scale();
}
assert(!x->has_index() || idx.value() == x->index(), "should match");
LIR_Opr base_op = base.result();
LIR_Opr index_op = idx.result();
#ifndef _LP64
if (base_op->type() == T_LONG) {
base_op = new_register(T_INT);
__ convert(Bytecodes::_l2i, base.result(), base_op);
}
if (x->has_index()) {
if (index_op->type() == T_LONG) {
LIR_Opr long_index_op = index_op;
if (index_op->is_constant()) {
long_index_op = new_register(T_LONG);
__ move(index_op, long_index_op);
}
index_op = new_register(T_INT);
__ convert(Bytecodes::_l2i, long_index_op, index_op);
} else {
assert(x->index()->type()->tag() == intTag, "must be");
}
}
// At this point base and index should be all ints.
assert(base_op->type() == T_INT && !base_op->is_constant(), "base should be an non-constant int");
assert(!x->has_index() || index_op->type() == T_INT, "index should be an int");
#else
if (x->has_index()) {
if (index_op->type() == T_INT) {
if (!index_op->is_constant()) {
index_op = new_register(T_LONG);
__ convert(Bytecodes::_i2l, idx.result(), index_op);
}
} else {
assert(index_op->type() == T_LONG, "must be");
if (index_op->is_constant()) {
index_op = new_register(T_LONG);
__ move(idx.result(), index_op);
}
}
}
// At this point base is a long non-constant
// Index is a long register or a int constant.
// We allow the constant to stay an int because that would allow us a more compact encoding by
// embedding an immediate offset in the address expression. If we have a long constant, we have to
// move it into a register first.
assert(base_op->type() == T_LONG && !base_op->is_constant(), "base must be a long non-constant");
assert(!x->has_index() || (index_op->type() == T_INT && index_op->is_constant()) ||
(index_op->type() == T_LONG && !index_op->is_constant()), "unexpected index type");
#endif
BasicType dst_type = x->basic_type();
LIR_Address* addr;
if (index_op->is_constant()) {
assert(log2_scale == 0, "must not have a scale");
assert(index_op->type() == T_INT, "only int constants supported");
addr = new LIR_Address(base_op, index_op->as_jint(), dst_type);
} else {
#ifdef X86
addr = new LIR_Address(base_op, index_op, LIR_Address::Scale(log2_scale), 0, dst_type);
#elif defined(GENERATE_ADDRESS_IS_PREFERRED)
addr = generate_address(base_op, index_op, log2_scale, 0, dst_type);
#else
if (index_op->is_illegal() || log2_scale == 0) {
addr = new LIR_Address(base_op, index_op, dst_type);
} else {
LIR_Opr tmp = new_pointer_register();
__ shift_left(index_op, log2_scale, tmp);
addr = new LIR_Address(base_op, tmp, dst_type);
}
#endif
}
if (x->may_be_unaligned() && (dst_type == T_LONG || dst_type == T_DOUBLE)) {
__ unaligned_move(addr, reg);
} else {
if (dst_type == T_OBJECT && x->is_wide()) {
__ move_wide(addr, reg);
} else {
__ move(addr, reg);
}
}
}
void LIRGenerator::do_UnsafePutRaw(UnsafePutRaw* x) {
int log2_scale = 0;
BasicType type = x->basic_type();
if (x->has_index()) {
log2_scale = x->log2_scale();
}
LIRItem base(x->base(), this);
LIRItem value(x->value(), this);
LIRItem idx(this);
base.load_item();
if (x->has_index()) {
idx.set_instruction(x->index());
idx.load_item();
}
if (type == T_BYTE || type == T_BOOLEAN) {
value.load_byte_item();
} else {
value.load_item();
}
set_no_result(x);
LIR_Opr base_op = base.result();
LIR_Opr index_op = idx.result();
#ifdef GENERATE_ADDRESS_IS_PREFERRED
LIR_Address* addr = generate_address(base_op, index_op, log2_scale, 0, x->basic_type());
#else
#ifndef _LP64
if (base_op->type() == T_LONG) {
base_op = new_register(T_INT);
__ convert(Bytecodes::_l2i, base.result(), base_op);
}
if (x->has_index()) {
if (index_op->type() == T_LONG) {
index_op = new_register(T_INT);
__ convert(Bytecodes::_l2i, idx.result(), index_op);
}
}
// At this point base and index should be all ints and not constants
assert(base_op->type() == T_INT && !base_op->is_constant(), "base should be an non-constant int");
assert(!x->has_index() || (index_op->type() == T_INT && !index_op->is_constant()), "index should be an non-constant int");
#else
if (x->has_index()) {
if (index_op->type() == T_INT) {
index_op = new_register(T_LONG);
__ convert(Bytecodes::_i2l, idx.result(), index_op);
}
}
// At this point base and index are long and non-constant
assert(base_op->type() == T_LONG && !base_op->is_constant(), "base must be a non-constant long");
assert(!x->has_index() || (index_op->type() == T_LONG && !index_op->is_constant()), "index must be a non-constant long");
#endif
if (log2_scale != 0) {
// temporary fix (platform dependent code without shift on Intel would be better)
// TODO: ARM also allows embedded shift in the address
LIR_Opr tmp = new_pointer_register();
if (TwoOperandLIRForm) {
__ move(index_op, tmp);
index_op = tmp;
}
__ shift_left(index_op, log2_scale, tmp);
if (!TwoOperandLIRForm) {
index_op = tmp;
}
}
LIR_Address* addr = new LIR_Address(base_op, index_op, x->basic_type());
#endif // !GENERATE_ADDRESS_IS_PREFERRED
__ move(value.result(), addr);
}
void LIRGenerator::do_UnsafeGetObject(UnsafeGetObject* x) {
void LIRGenerator::do_UnsafeGet(UnsafeGet* x) {
BasicType type = x->basic_type();
LIRItem src(x->object(), this);
LIRItem off(x->offset(), this);
@ -2291,12 +2110,28 @@ void LIRGenerator::do_UnsafeGetObject(UnsafeGetObject* x) {
}
LIR_Opr result = rlock_result(x, type);
access_load_at(decorators, type,
src, off.result(), result);
if (!x->is_raw()) {
access_load_at(decorators, type, src, off.result(), result);
} else {
// Currently it is only used in GraphBuilder::setup_osr_entry_block.
// It reads the value from [src + offset] directly.
#ifdef _LP64
LIR_Opr offset = new_register(T_LONG);
__ convert(Bytecodes::_i2l, off.result(), offset);
#else
LIR_Opr offset = off.result();
#endif
LIR_Address* addr = new LIR_Address(src.result(), offset, type);
if (type == T_LONG || type == T_DOUBLE) {
__ unaligned_move(addr, result);
} else {
access_load(IN_NATIVE, type, LIR_OprFact::address(addr), result);
}
}
}
void LIRGenerator::do_UnsafePutObject(UnsafePutObject* x) {
void LIRGenerator::do_UnsafePut(UnsafePut* x) {
BasicType type = x->basic_type();
LIRItem src(x->object(), this);
LIRItem off(x->offset(), this);
@ -2322,7 +2157,7 @@ void LIRGenerator::do_UnsafePutObject(UnsafePutObject* x) {
access_store_at(decorators, type, src, off.result(), data.result());
}
void LIRGenerator::do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) {
void LIRGenerator::do_UnsafeGetAndSet(UnsafeGetAndSet* x) {
BasicType type = x->basic_type();
LIRItem src(x->object(), this);
LIRItem off(x->offset(), this);

View File

@ -581,11 +581,9 @@ class LIRGenerator: public InstructionVisitor, public BlockClosure {
virtual void do_OsrEntry (OsrEntry* x);
virtual void do_ExceptionObject(ExceptionObject* x);
virtual void do_RoundFP (RoundFP* x);
virtual void do_UnsafeGetRaw (UnsafeGetRaw* x);
virtual void do_UnsafePutRaw (UnsafePutRaw* x);
virtual void do_UnsafeGetObject(UnsafeGetObject* x);
virtual void do_UnsafePutObject(UnsafePutObject* x);
virtual void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x);
virtual void do_UnsafeGet (UnsafeGet* x);
virtual void do_UnsafePut (UnsafePut* x);
virtual void do_UnsafeGetAndSet(UnsafeGetAndSet* x);
virtual void do_ProfileCall (ProfileCall* x);
virtual void do_ProfileReturnType (ProfileReturnType* x);
virtual void do_ProfileInvoke (ProfileInvoke* x);

View File

@ -529,11 +529,9 @@ public:
void do_OsrEntry (OsrEntry* x);
void do_ExceptionObject(ExceptionObject* x);
void do_RoundFP (RoundFP* x);
void do_UnsafeGetRaw (UnsafeGetRaw* x);
void do_UnsafePutRaw (UnsafePutRaw* x);
void do_UnsafeGetObject(UnsafeGetObject* x);
void do_UnsafePutObject(UnsafePutObject* x);
void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x);
void do_UnsafeGet (UnsafeGet* x);
void do_UnsafePut (UnsafePut* x);
void do_UnsafeGetAndSet(UnsafeGetAndSet* x);
void do_ProfileCall (ProfileCall* x);
void do_ProfileReturnType (ProfileReturnType* x);
void do_ProfileInvoke (ProfileInvoke* x);
@ -714,11 +712,9 @@ void NullCheckVisitor::do_Base (Base* x) {}
void NullCheckVisitor::do_OsrEntry (OsrEntry* x) {}
void NullCheckVisitor::do_ExceptionObject(ExceptionObject* x) { nce()->handle_ExceptionObject(x); }
void NullCheckVisitor::do_RoundFP (RoundFP* x) {}
void NullCheckVisitor::do_UnsafeGetRaw (UnsafeGetRaw* x) {}
void NullCheckVisitor::do_UnsafePutRaw (UnsafePutRaw* x) {}
void NullCheckVisitor::do_UnsafeGetObject(UnsafeGetObject* x) {}
void NullCheckVisitor::do_UnsafePutObject(UnsafePutObject* x) {}
void NullCheckVisitor::do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) {}
void NullCheckVisitor::do_UnsafeGet (UnsafeGet* x) {}
void NullCheckVisitor::do_UnsafePut (UnsafePut* x) {}
void NullCheckVisitor::do_UnsafeGetAndSet(UnsafeGetAndSet* x) {}
void NullCheckVisitor::do_ProfileCall (ProfileCall* x) { nce()->clear_last_explicit_null_check();
nce()->handle_ProfileCall(x); }
void NullCheckVisitor::do_ProfileReturnType (ProfileReturnType* x) { nce()->handle_ProfileReturnType(x); }

View File

@ -130,8 +130,6 @@ public:
void do_MonitorEnter (MonitorEnter* x) { /* nothing to do */ };
void do_MonitorExit (MonitorExit* x) { /* nothing to do */ };
void do_Invoke (Invoke* x) { /* nothing to do */ };
void do_UnsafePutRaw (UnsafePutRaw* x) { /* nothing to do */ };
void do_UnsafePutObject(UnsafePutObject* x) { /* nothing to do */ };
void do_Intrinsic (Intrinsic* x) { /* nothing to do */ };
void do_Local (Local* x) { /* nothing to do */ };
void do_LoadField (LoadField* x) { /* nothing to do */ };
@ -160,9 +158,9 @@ public:
void do_OsrEntry (OsrEntry* x) { /* nothing to do */ };
void do_ExceptionObject(ExceptionObject* x) { /* nothing to do */ };
void do_RoundFP (RoundFP* x) { /* nothing to do */ };
void do_UnsafeGetRaw (UnsafeGetRaw* x) { /* nothing to do */ };
void do_UnsafeGetObject(UnsafeGetObject* x) { /* nothing to do */ };
void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) { /* nothing to do */ };
void do_UnsafePut (UnsafePut* x) { /* nothing to do */ };
void do_UnsafeGet (UnsafeGet* x) { /* nothing to do */ };
void do_UnsafeGetAndSet(UnsafeGetAndSet* x) { /* nothing to do */ };
void do_ProfileCall (ProfileCall* x) { /* nothing to do */ };
void do_ProfileReturnType (ProfileReturnType* x) { /* nothing to do */ };
void do_ProfileInvoke (ProfileInvoke* x) { /* nothing to do */ };

View File

@ -154,11 +154,9 @@ class ValueNumberingVisitor: public InstructionVisitor {
void do_MonitorEnter (MonitorEnter* x) { kill_memory(); }
void do_MonitorExit (MonitorExit* x) { kill_memory(); }
void do_Invoke (Invoke* x) { kill_memory(); }
void do_UnsafePutRaw (UnsafePutRaw* x) { kill_memory(); }
void do_UnsafePutObject(UnsafePutObject* x) { kill_memory(); }
void do_UnsafeGetAndSetObject(UnsafeGetAndSetObject* x) { kill_memory(); }
void do_UnsafeGetRaw (UnsafeGetRaw* x) { /* nothing to do */ }
void do_UnsafeGetObject(UnsafeGetObject* x) {
void do_UnsafePut (UnsafePut* x) { kill_memory(); }
void do_UnsafeGetAndSet(UnsafeGetAndSet* x) { kill_memory(); }
void do_UnsafeGet (UnsafeGet* x) {
if (x->is_volatile()) { // the JMM requires this
kill_memory();
}

View File

@ -294,12 +294,6 @@
develop(bool, TraceFPURegisterUsage, false, \
"Trace usage of FPU registers at start of blocks (intel only)") \
\
develop(bool, OptimizeUnsafes, true, \
"Optimize raw unsafe ops") \
\
develop(bool, PrintUnsafeOptimization, false, \
"Print optimization of raw unsafe ops") \
\
develop(intx, InstructionCountCutoff, 37000, \
"If GraphBuilder adds this many instructions, bails out") \
range(0, max_jint) \