Signed-off-by: Pranith Kumar <bobby.pr...@gmail.com> --- target-arm/translate-a64.c | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-)
diff --git a/target-arm/translate-a64.c b/target-arm/translate-a64.c index f5e29d2..09877bc 100644 --- a/target-arm/translate-a64.c +++ b/target-arm/translate-a64.c @@ -1305,7 +1305,7 @@ static void handle_sync(DisasContext *s, uint32_t insn, return; case 4: /* DSB */ case 5: /* DMB */ - /* We don't emulate caches so barriers are no-ops */ + tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); return; case 6: /* ISB */ /* We need to break the TB after this insn to execute @@ -1934,7 +1934,13 @@ static void disas_ldst_excl(DisasContext *s, uint32_t insn) if (!is_store) { s->is_ldex = true; gen_load_exclusive(s, rt, rt2, tcg_addr, size, is_pair); + if (is_lasr) { + tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); + } } else { + if (is_lasr) { + tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); + } gen_store_exclusive(s, rs, rt, rt2, tcg_addr, size, is_pair); } } else { @@ -1943,11 +1949,17 @@ static void disas_ldst_excl(DisasContext *s, uint32_t insn) /* Generate ISS for non-exclusive accesses including LASR. */ if (is_store) { + if (is_lasr) { + tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); + } do_gpr_st(s, tcg_rt, tcg_addr, size, true, rt, iss_sf, is_lasr); } else { do_gpr_ld(s, tcg_rt, tcg_addr, size, false, false, true, rt, iss_sf, is_lasr); + if (is_lasr) { + tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); + } } } } -- 2.9.0