diff --git a/src/hotspot/share/gc/shared/c1/barrierSetC1.cpp b/src/hotspot/share/gc/shared/c1/barrierSetC1.cpp index ac640fb88d2..a31078f7e67 100644 --- a/src/hotspot/share/gc/shared/c1/barrierSetC1.cpp +++ b/src/hotspot/share/gc/shared/c1/barrierSetC1.cpp @@ -38,6 +38,16 @@ #define __ gen->lir()-> #endif +// Return true iff an access to bt is single-copy atomic. + +// The JMM requires atomicity for all accesses to fields of primitive +// types other than double and long. In practice, HotSpot assumes that +// on all processors, accesses to memory operands of wordSize and +// smaller are atomic. +static bool access_is_atomic(BasicType bt) { + return type2aelembytes(bt) <= wordSize; +} + LIR_Opr BarrierSetC1::resolve_address(LIRAccess& access, bool resolve_in_register) { DecoratorSet decorators = access.decorators(); bool is_array = (decorators & IS_ARRAY) != 0; @@ -140,7 +150,7 @@ LIR_Opr BarrierSetC1::atomic_add_at(LIRAccess& access, LIRItem& value) { void BarrierSetC1::store_at_resolved(LIRAccess& access, LIR_Opr value) { DecoratorSet decorators = access.decorators(); bool is_volatile = (decorators & MO_SEQ_CST) != 0; - bool is_atomic = is_volatile || AlwaysAtomicAccesses; + bool needs_atomic = AlwaysAtomicAccesses && !access_is_atomic(value->type()); bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0; bool mask_boolean = (decorators & C1_MASK_BOOLEAN) != 0; LIRGenerator* gen = access.gen(); @@ -154,7 +164,7 @@ void BarrierSetC1::store_at_resolved(LIRAccess& access, LIR_Opr value) { } LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none; - if (is_atomic && !needs_patching) { + if ((is_volatile || needs_atomic) && !needs_patching) { gen->volatile_field_store(value, access.resolved_addr()->as_address_ptr(), access.access_emit_info()); } else { __ store(value, access.resolved_addr()->as_address_ptr(), access.access_emit_info(), patch_code); @@ -169,7 +179,7 @@ void BarrierSetC1::load_at_resolved(LIRAccess& access, LIR_Opr result) { LIRGenerator *gen = access.gen(); DecoratorSet decorators = access.decorators(); bool is_volatile = (decorators & MO_SEQ_CST) != 0; - bool is_atomic = is_volatile || AlwaysAtomicAccesses; + bool needs_atomic = AlwaysAtomicAccesses && !access_is_atomic(result->type()); bool needs_patching = (decorators & C1_NEEDS_PATCHING) != 0; bool mask_boolean = (decorators & C1_MASK_BOOLEAN) != 0; bool in_native = (decorators & IN_NATIVE) != 0; @@ -181,7 +191,7 @@ void BarrierSetC1::load_at_resolved(LIRAccess& access, LIR_Opr result) { LIR_PatchCode patch_code = needs_patching ? lir_patch_normal : lir_patch_none; if (in_native) { __ move_wide(access.resolved_addr()->as_address_ptr(), result); - } else if (is_atomic && !needs_patching) { + } else if ((is_volatile || needs_atomic) && !needs_patching) { gen->volatile_field_load(access.resolved_addr()->as_address_ptr(), result, access.access_emit_info()); } else { __ load(access.resolved_addr()->as_address_ptr(), result, access.access_emit_info(), patch_code);