Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1850,3 +1850,15 @@ void InterpreterMacroAssembler::profile_parameters_type(Register mdp, Register t
bind(profile_continue);
}
}

#ifdef ASSERT
void InterpreterMacroAssembler::verify_field_offset(Register reg) {
// Verify the field offset is not in the header, implicitly checks for 0
Label L;
subs(zr, reg, static_cast<int>(sizeof(markWord) + (UseCompressedClassPointers ? sizeof(narrowKlass) : sizeof(Klass*))));
br(Assembler::GE, L);
stop("bad field offset");
bind(L);
}
#endif

2 changes: 2 additions & 0 deletions src/hotspot/cpu/aarch64/interp_masm_aarch64.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -292,6 +292,8 @@ class InterpreterMacroAssembler: public MacroAssembler {
set_last_Java_frame(esp, rfp, (address) pc(), rscratch1);
MacroAssembler::_call_Unimplemented(call_site);
}

void verify_field_offset(Register reg) NOT_DEBUG_RETURN;
};

#endif // CPU_AARCH64_INTERP_MASM_AARCH64_HPP
17 changes: 15 additions & 2 deletions src/hotspot/cpu/aarch64/templateTable_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
int byte_no)
{
assert_different_registers(bc_reg, temp_reg);
if (!RewriteBytecodes) return;
Label L_patch_done;

Expand Down Expand Up @@ -222,8 +223,12 @@ void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
__ bind(L_okay);
#endif

// patch bytecode
__ strb(bc_reg, at_bcp(0));
// Patch bytecode with release store to coordinate with ConstantPoolCacheEntry loads
// in fast bytecode codelets. The fast bytecode codelets have a memory barrier that gains
// the needed ordering, together with control dependency on entering the fast codelet
// itself.
__ lea(temp_reg, at_bcp(0));
__ stlrb(bc_reg, temp_reg);
__ bind(L_patch_done);
}

Expand Down Expand Up @@ -2914,6 +2919,7 @@ void TemplateTable::fast_storefield(TosState state)

// replace index with field offset from cache entry
__ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
__ verify_field_offset(r1);

{
Label notVolatile;
Expand Down Expand Up @@ -3007,6 +3013,8 @@ void TemplateTable::fast_accessfield(TosState state)

__ ldr(r1, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
ConstantPoolCacheEntry::f2_offset())));
__ verify_field_offset(r1);

__ ldrw(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
ConstantPoolCacheEntry::flags_offset())));

Expand Down Expand Up @@ -3074,8 +3082,13 @@ void TemplateTable::fast_xaccess(TosState state)
__ ldr(r0, aaddress(0));
// access constant pool cache
__ get_cache_and_index_at_bcp(r2, r3, 2);

// Must prevent reordering of the following cp cache loads with bytecode load
__ membar(MacroAssembler::LoadLoad);

__ ldr(r1, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
ConstantPoolCacheEntry::f2_offset())));
__ verify_field_offset(r1);

// 8179954: We need to make sure that the code generated for
// volatile accesses forms a sequentially-consistent set of
Expand Down