diff options
author | Richard Biener <rguenther@suse.de> | 2020-01-23 12:43:26 +0100 |
---|---|---|
committer | Richard Biener <rguenther@suse.de> | 2020-01-23 12:46:39 +0100 |
commit | f5ee5d05f367d6221b76f7a3ef7dad96605dbf04 (patch) | |
tree | f925dbaaeff662eeffb4dbf2e9223d46315269d1 /gcc/tree-ssa-sccvn.c | |
parent | 9592f639ff4655203f1cffb7c6752696e2721fb0 (diff) |
tree-optimization/93354 FRE redundant store removal validity fix
This fixes tracking of the alias-set of partial defs for use by
redundant store removal.
2020-01-23 Richard Biener <rguenther@suse.de>
PR tree-optimization/93381
* tree-ssa-sccvn.c (vn_walk_cb_data::push_partial_def): Take
alias-set of the def as argument and record the first one.
(vn_walk_cb_data::first_set): New member.
(vn_reference_lookup_3): Pass the alias-set of the current def
to push_partial_def. Fix alias-set used in the aggregate copy
case.
(vn_reference_lookup): Consistently set *last_vuse_ptr.
* real.c (clear_significand_below): Fix out-of-bound access.
* gcc.dg/torture/pr93354.c: New testcase.
Diffstat (limited to 'gcc/tree-ssa-sccvn.c')
-rw-r--r-- | gcc/tree-ssa-sccvn.c | 34 |
1 files changed, 22 insertions, 12 deletions
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c index 0b8ee586139..6e0b2202157 100644 --- a/gcc/tree-ssa-sccvn.c +++ b/gcc/tree-ssa-sccvn.c @@ -1693,7 +1693,8 @@ struct vn_walk_cb_data ao_ref_init (&orig_ref, orig_ref_); } ~vn_walk_cb_data (); - void *push_partial_def (const pd_data& pd, tree, HOST_WIDE_INT); + void *push_partial_def (const pd_data& pd, tree, + alias_set_type, HOST_WIDE_INT); vn_reference_t vr; ao_ref orig_ref; @@ -1706,6 +1707,7 @@ struct vn_walk_cb_data /* The first defs range to avoid splay tree setup in most cases. */ pd_range first_range; tree first_vuse; + alias_set_type first_set; splay_tree known_ranges; obstack ranges_obstack; }; @@ -1746,13 +1748,13 @@ pd_tree_dealloc (void *, void *) } /* Push PD to the vector of partial definitions returning a - value when we are ready to combine things with VUSE and MAXSIZEI, + value when we are ready to combine things with VUSE, SET and MAXSIZEI, NULL when we want to continue looking for partial defs or -1 on failure. */ void * vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse, - HOST_WIDE_INT maxsizei) + alias_set_type set, HOST_WIDE_INT maxsizei) { const HOST_WIDE_INT bufsize = 64; /* We're using a fixed buffer for encoding so fail early if the object @@ -1773,6 +1775,7 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse, first_range.offset = pd.offset; first_range.size = pd.size; first_vuse = vuse; + first_set = set; last_vuse_ptr = NULL; /* Continue looking for partial defs. */ return NULL; @@ -1903,10 +1906,10 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse, if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Successfully combined %u partial definitions\n", ndefs); - /* ??? If we track partial defs alias-set we could use that if it - is the same for all. Use zero for now. */ + /* We are using the alias-set of the first store we encounter which + should be appropriate here. */ return vn_reference_lookup_or_insert_for_pieces - (first_vuse, 0, vr->type, vr->operands, val); + (first_vuse, first_set, vr->type, vr->operands, val); } else { @@ -2492,7 +2495,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = build_constructor (NULL_TREE, NULL); pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.size = leni; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, 0, maxsizei); } } @@ -2553,7 +2556,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = gimple_assign_rhs1 (def_stmt); pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.size = size2i / BITS_PER_UNIT; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, get_alias_set (lhs), + maxsizei); } } } @@ -2665,7 +2669,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = rhs; pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.size = size2i / BITS_PER_UNIT; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, get_alias_set (lhs), + maxsizei); } } } @@ -2751,7 +2756,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = SSA_VAL (def_rhs); pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.size = size2i / BITS_PER_UNIT; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, get_alias_set (lhs), + maxsizei); } } } @@ -2764,6 +2770,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF || handled_component_p (gimple_assign_rhs1 (def_stmt)))) { + tree lhs = gimple_assign_lhs (def_stmt); tree base2; int i, j, k; auto_vec<vn_reference_op_s> rhs; @@ -2870,7 +2877,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, { if (data->partial_defs.is_empty ()) return vn_reference_lookup_or_insert_for_pieces - (vuse, get_alias_set (rhs1), vr->type, vr->operands, val); + (vuse, get_alias_set (lhs), vr->type, vr->operands, val); /* This is the only interesting case for partial-def handling coming from targets that like to gimplify init-ctors as aggregate copies from constant data like aarch64 for @@ -2882,7 +2889,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = val; pd.offset = 0; pd.size = maxsizei / BITS_PER_UNIT; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, get_alias_set (lhs), + maxsizei); } } @@ -3205,6 +3213,8 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind, return NULL_TREE; } + if (last_vuse_ptr) + *last_vuse_ptr = vr1.vuse; return vn_reference_lookup_1 (&vr1, vnresult); } |