* tree-ssa-dse.c (live_bytes_read): New function. (dse_classify_store): Ignore reads of dead bytes. * testsuite/gcc.dg/tree-ssa/ssa-dse-26.c: New test. * testsuite/gcc.dg/tree-ssa/ssa-dse-26.c: Likewise. diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-26.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-26.c new file mode 100644 index 0000000..6605dfe --- /dev/null +++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-26.c @@ -0,0 +1,33 @@ +/* { dg-do compile } */ +/* { dg-options "-O2 -fdump-tree-dse1-details" } */ + +enum constraint_expr_type +{ + SCALAR, DEREF, ADDRESSOF +}; +typedef struct constraint_expr +{ + enum constraint_expr_type type; + unsigned int var; + long offset; +} constraint_expr ; +typedef struct constraint +{ + struct constraint_expr lhs; + struct constraint_expr rhs; +} constraint; +static _Bool +constraint_expr_equal (struct constraint_expr x, struct constraint_expr y) +{ + return x.type == y.type && x.var == y.var && x.offset == y.offset; +} + +_Bool +constraint_equal (struct constraint a, struct constraint b) +{ + return constraint_expr_equal (a.lhs, b.lhs) + && constraint_expr_equal (a.rhs, b.rhs); +} + +/* { dg-final { scan-tree-dump-times "Deleted dead store" 2 "dse1" } } */ + diff --git a/gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-27.c b/gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-27.c new file mode 100644 index 0000000..48dc92e --- /dev/null +++ b/gcc/testsuite/gcc.dg/tree-ssa/ssa-dse-27.c @@ -0,0 +1,21 @@ +/* { dg-do compile } */ +/* { dg-options "-O2 -fdump-tree-dse1-details -fno-tree-fre -fno-tree-sra" } */ + +struct S { struct R { int x; int y; } r; int z; } s; + +extern void blah (struct S); + +void +foo () +{ + struct S s = { {1, 2}, 3 }; + s.r.x = 1; + s.r.y = 2; + struct R r = s.r; + s.z = 3; + blah (s); +} + + +/* { dg-final { scan-tree-dump-times "Deleted dead store" 4 "dse1" } } */ + diff --git a/gcc/tree-ssa-dse.c b/gcc/tree-ssa-dse.c index a807d6d..f5b53fc 100644 --- a/gcc/tree-ssa-dse.c +++ b/gcc/tree-ssa-dse.c @@ -475,6 +475,41 @@ maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt) } } +/* Return TRUE if USE_REF reads bytes from LIVE where live is + derived from REF, a write reference. + + While this routine may modify USE_REF, it's passed by value, not + location. So callers do not see those modifications. */ + +static bool +live_bytes_read (ao_ref use_ref, ao_ref *ref, sbitmap live) +{ + /* We have already verified that USE_REF and REF hit the same object. + Now verify that there's actually an overlap between USE_REF and REF. */ + if ((use_ref.offset < ref->offset + && use_ref.offset + use_ref.size > ref->offset) + || (use_ref.offset >= ref->offset + && use_ref.offset < ref->offset + ref->size)) + { + normalize_ref (&use_ref, ref); + + /* If USE_REF covers all of REF, then it will hit one or more + live bytes. This avoids useless iteration over the bitmap + below. */ + if (use_ref.offset == ref->offset && use_ref.size == ref->size) + return true; + + /* Now iterate over what's left in USE_REF and see if any of + those bits are i LIVE. */ + for (int i = (use_ref.offset - ref->offset) / BITS_PER_UNIT; + i < (use_ref.offset + use_ref.size) / BITS_PER_UNIT; i++) + if (bitmap_bit_p (live, i)) + return true; + return false; + } + return true; +} + /* A helper of dse_optimize_stmt. Given a GIMPLE_ASSIGN in STMT that writes to REF, find a candidate statement *USE_STMT that may prove STMT to be dead. @@ -554,6 +589,41 @@ dse_classify_store (ao_ref *ref, gimple *stmt, gimple **use_stmt, /* If the statement is a use the store is not dead. */ else if (ref_maybe_used_by_stmt_p (use_stmt, ref)) { + /* Handle common cases where we can easily build a ao_ref + structure for USE_STMT and in doing so we find that the + references hit non-live bytes and thus can be ignored. */ + if (live_bytes) + { + if (is_gimple_assign (use_stmt)) + { + /* Other cases were noted as non-aliasing by + the call to ref_maybe_used_by_stmt_p. */ + ao_ref use_ref; + ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt)); + if (valid_ao_ref_for_dse (&use_ref) + && use_ref.base == ref->base + && use_ref.size == use_ref.max_size + && !live_bytes_read (use_ref, ref, live_bytes)) + { + if (gimple_vdef (use_stmt)) + { + /* If we have already seen a store and + this is also a store, then we have to + fail. */ + if (temp) + { + fail = true; + BREAK_FROM_IMM_USE_STMT (ui); + } + + /* Otherwise walk through this store. */ + temp = use_stmt; + } + continue; + } + } + } + fail = true; BREAK_FROM_IMM_USE_STMT (ui); }