summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Sandiford <richard.sandiford@linaro.org>2017-12-21 07:02:13 +0000
committerRichard Sandiford <rsandifo@gcc.gnu.org>2017-12-21 07:02:13 +0000
commitaca52e6f8d29064f4712e5f3f4429a36f918f099 (patch)
tree45c2884952716e83f81ee8162585cbf16e06bf73
parent3fed2ce96f7ec8ee8603b33ba0426ac40acecf24 (diff)
poly_int: MEM_REF offsets
This patch allows MEM_REF offsets to be polynomial, with mem_ref_offset now returning a poly_offset_int instead of an offset_int. The non-mechanical changes to callers of mem_ref_offset were handled by previous patches. 2017-12-21 Richard Sandiford <richard.sandiford@linaro.org> Alan Hayward <alan.hayward@arm.com> David Sherwood <david.sherwood@arm.com> gcc/ * fold-const.h (mem_ref_offset): Return a poly_offset_int rather than an offset_int. * tree.c (mem_ref_offset): Likewise. (build_simple_mem_ref_loc): Treat MEM_REF offsets as poly_ints. * builtins.c (get_object_alignment_2): Likewise. * expr.c (get_inner_reference, expand_expr_real_1): Likewise. * gimple-fold.c (get_base_constructor): Likewise. * gimple-ssa-strength-reduction.c (restructure_reference): Likewise. * gimple-ssa-warn-restrict.c (builtin_memref::builtin_memref): Likewise. * ipa-polymorphic-call.c (ipa_polymorphic_call_context::ipa_polymorphic_call_context): Likewise. * ipa-prop.c (compute_complex_assign_jump_func): Likewise. (get_ancestor_addr_info): Likewise. * ipa-param-manipulation.c (ipa_get_adjustment_candidate): Likewise. * match.pd: Likewise. * tree-data-ref.c (dr_analyze_innermost): Likewise. * tree-dfa.c (get_addr_base_and_unit_offset_1): Likewise. * tree-eh.c (tree_could_trap_p): Likewise. * tree-object-size.c (addr_object_size): Likewise. * tree-ssa-address.c (copy_ref_info): Likewise. * tree-ssa-alias.c (indirect_ref_may_alias_decl_p): Likewise. (indirect_refs_may_alias_p): Likewise. * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise. * tree-ssa.c (maybe_rewrite_mem_ref_base): Likewise. (non_rewritable_mem_ref_base): Likewise. * tree-vect-data-refs.c (vect_check_gather_scatter): Likewise. * tree-vrp.c (vrp_prop::check_array_ref): Likewise. * varasm.c (decode_addr_const): Likewise. Co-Authored-By: Alan Hayward <alan.hayward@arm.com> Co-Authored-By: David Sherwood <david.sherwood@arm.com> From-SVN: r255930
-rw-r--r--gcc/ChangeLog34
-rw-r--r--gcc/builtins.c2
-rw-r--r--gcc/expr.c8
-rw-r--r--gcc/fold-const.h2
-rw-r--r--gcc/gimple-fold.c2
-rw-r--r--gcc/gimple-ssa-strength-reduction.c4
-rw-r--r--gcc/gimple-ssa-warn-restrict.c13
-rw-r--r--gcc/ipa-param-manipulation.c2
-rw-r--r--gcc/ipa-polymorphic-call.c6
-rw-r--r--gcc/ipa-prop.c14
-rw-r--r--gcc/match.pd4
-rw-r--r--gcc/tree-data-ref.c28
-rw-r--r--gcc/tree-dfa.c8
-rw-r--r--gcc/tree-eh.c13
-rw-r--r--gcc/tree-object-size.c16
-rw-r--r--gcc/tree-ssa-address.c4
-rw-r--r--gcc/tree-ssa-alias.c6
-rw-r--r--gcc/tree-ssa-sccvn.c7
-rw-r--r--gcc/tree-ssa.c22
-rw-r--r--gcc/tree-vect-data-refs.c5
-rw-r--r--gcc/tree-vrp.c5
-rw-r--r--gcc/tree.c7
-rw-r--r--gcc/varasm.c2
23 files changed, 135 insertions, 79 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 58da0852dde..2bdcde16a21 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -2,6 +2,40 @@
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
+ * fold-const.h (mem_ref_offset): Return a poly_offset_int rather
+ than an offset_int.
+ * tree.c (mem_ref_offset): Likewise.
+ (build_simple_mem_ref_loc): Treat MEM_REF offsets as poly_ints.
+ * builtins.c (get_object_alignment_2): Likewise.
+ * expr.c (get_inner_reference, expand_expr_real_1): Likewise.
+ * gimple-fold.c (get_base_constructor): Likewise.
+ * gimple-ssa-strength-reduction.c (restructure_reference): Likewise.
+ * gimple-ssa-warn-restrict.c (builtin_memref::builtin_memref):
+ Likewise.
+ * ipa-polymorphic-call.c
+ (ipa_polymorphic_call_context::ipa_polymorphic_call_context): Likewise.
+ * ipa-prop.c (compute_complex_assign_jump_func): Likewise.
+ (get_ancestor_addr_info): Likewise.
+ * ipa-param-manipulation.c (ipa_get_adjustment_candidate): Likewise.
+ * match.pd: Likewise.
+ * tree-data-ref.c (dr_analyze_innermost): Likewise.
+ * tree-dfa.c (get_addr_base_and_unit_offset_1): Likewise.
+ * tree-eh.c (tree_could_trap_p): Likewise.
+ * tree-object-size.c (addr_object_size): Likewise.
+ * tree-ssa-address.c (copy_ref_info): Likewise.
+ * tree-ssa-alias.c (indirect_ref_may_alias_decl_p): Likewise.
+ (indirect_refs_may_alias_p): Likewise.
+ * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise.
+ * tree-ssa.c (maybe_rewrite_mem_ref_base): Likewise.
+ (non_rewritable_mem_ref_base): Likewise.
+ * tree-vect-data-refs.c (vect_check_gather_scatter): Likewise.
+ * tree-vrp.c (vrp_prop::check_array_ref): Likewise.
+ * varasm.c (decode_addr_const): Likewise.
+
+2017-12-21 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
* gimple-ssa-stor-merging.c (find_bswap_or_nop_load): Track polynomial
offsets for MEM_REFs.
diff --git a/gcc/builtins.c b/gcc/builtins.c
index 0ff0efea908..753809f2357 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -346,7 +346,7 @@ get_object_alignment_2 (tree exp, unsigned int *alignp,
bitpos += ptr_bitpos;
if (TREE_CODE (exp) == MEM_REF
|| TREE_CODE (exp) == TARGET_MEM_REF)
- bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
+ bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
}
}
else if (TREE_CODE (exp) == STRING_CST)
diff --git a/gcc/expr.c b/gcc/expr.c
index 8a1227908fa..4e7349e6494 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -7197,8 +7197,8 @@ get_inner_reference (tree exp, poly_int64_pod *pbitsize,
tree off = TREE_OPERAND (exp, 1);
if (!integer_zerop (off))
{
- offset_int boff, coff = mem_ref_offset (exp);
- boff = coff << LOG2_BITS_PER_UNIT;
+ poly_offset_int boff = mem_ref_offset (exp);
+ boff <<= LOG2_BITS_PER_UNIT;
bit_offset += boff;
}
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
@@ -10222,9 +10222,9 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
might end up in a register. */
if (mem_ref_refers_to_non_mem_p (exp))
{
- HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
+ poly_int64 offset = mem_ref_offset (exp).force_shwi ();
base = TREE_OPERAND (base, 0);
- if (offset == 0
+ if (known_eq (offset, 0)
&& !reverse
&& tree_fits_uhwi_p (TYPE_SIZE (type))
&& (GET_MODE_BITSIZE (DECL_MODE (base))
diff --git a/gcc/fold-const.h b/gcc/fold-const.h
index 256dc6ffb35..b777606b6e9 100644
--- a/gcc/fold-const.h
+++ b/gcc/fold-const.h
@@ -114,7 +114,7 @@ extern tree fold_indirect_ref_loc (location_t, tree);
extern tree build_simple_mem_ref_loc (location_t, tree);
#define build_simple_mem_ref(T)\
build_simple_mem_ref_loc (UNKNOWN_LOCATION, T)
-extern offset_int mem_ref_offset (const_tree);
+extern poly_offset_int mem_ref_offset (const_tree);
extern tree build_invariant_address (tree, tree, poly_int64);
extern tree constant_boolean_node (bool, tree);
extern tree div_if_zero_remainder (const_tree, const_tree);
diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c
index 33eae3e1ed1..ae2c9a176c6 100644
--- a/gcc/gimple-fold.c
+++ b/gcc/gimple-fold.c
@@ -6346,7 +6346,7 @@ get_base_constructor (tree base, poly_int64_pod *bit_offset,
{
if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
return NULL_TREE;
- *bit_offset += (mem_ref_offset (base).to_short_addr ()
+ *bit_offset += (mem_ref_offset (base).force_shwi ()
* BITS_PER_UNIT);
}
diff --git a/gcc/gimple-ssa-strength-reduction.c b/gcc/gimple-ssa-strength-reduction.c
index 7d8543c28ca..9eab9a72a7f 100644
--- a/gcc/gimple-ssa-strength-reduction.c
+++ b/gcc/gimple-ssa-strength-reduction.c
@@ -970,17 +970,19 @@ restructure_reference (tree *pbase, tree *poffset, widest_int *pindex,
widest_int index = *pindex;
tree mult_op0, t1, t2, type;
widest_int c1, c2, c3, c4, c5;
+ offset_int mem_offset;
if (!base
|| !offset
|| TREE_CODE (base) != MEM_REF
+ || !mem_ref_offset (base).is_constant (&mem_offset)
|| TREE_CODE (offset) != MULT_EXPR
|| TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
|| wi::umod_floor (index, BITS_PER_UNIT) != 0)
return false;
t1 = TREE_OPERAND (base, 0);
- c1 = widest_int::from (mem_ref_offset (base), SIGNED);
+ c1 = widest_int::from (mem_offset, SIGNED);
type = TREE_TYPE (TREE_OPERAND (base, 1));
mult_op0 = TREE_OPERAND (offset, 0);
diff --git a/gcc/gimple-ssa-warn-restrict.c b/gcc/gimple-ssa-warn-restrict.c
index 69992b89bd6..ac545e4cf67 100644
--- a/gcc/gimple-ssa-warn-restrict.c
+++ b/gcc/gimple-ssa-warn-restrict.c
@@ -349,10 +349,15 @@ builtin_memref::builtin_memref (tree expr, tree size)
if (TREE_CODE (base) == MEM_REF)
{
- offset_int off = mem_ref_offset (base);
- refoff += off;
- offrange[0] += off;
- offrange[1] += off;
+ offset_int off;
+ if (mem_ref_offset (base).is_constant (&off))
+ {
+ refoff += off;
+ offrange[0] += off;
+ offrange[1] += off;
+ }
+ else
+ size = NULL_TREE;
base = TREE_OPERAND (base, 0);
}
diff --git a/gcc/ipa-param-manipulation.c b/gcc/ipa-param-manipulation.c
index f8610888cd5..efa1ea27c66 100644
--- a/gcc/ipa-param-manipulation.c
+++ b/gcc/ipa-param-manipulation.c
@@ -636,7 +636,7 @@ ipa_get_adjustment_candidate (tree **expr, bool *convert,
if (TREE_CODE (base) == MEM_REF)
{
- offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
+ offset += mem_ref_offset (base).force_shwi () * BITS_PER_UNIT;
base = TREE_OPERAND (base, 0);
}
diff --git a/gcc/ipa-polymorphic-call.c b/gcc/ipa-polymorphic-call.c
index 21ae70bf493..78cde864244 100644
--- a/gcc/ipa-polymorphic-call.c
+++ b/gcc/ipa-polymorphic-call.c
@@ -917,9 +917,11 @@ ipa_polymorphic_call_context::ipa_polymorphic_call_context (tree fndecl,
{
/* We found dereference of a pointer. Type of the pointer
and MEM_REF is meaningless, but we can look futher. */
- if (TREE_CODE (base) == MEM_REF)
+ offset_int mem_offset;
+ if (TREE_CODE (base) == MEM_REF
+ && mem_ref_offset (base).is_constant (&mem_offset))
{
- offset_int o = mem_ref_offset (base) * BITS_PER_UNIT;
+ offset_int o = mem_offset * BITS_PER_UNIT;
o += offset;
o += offset2;
if (!wi::fits_shwi_p (o))
diff --git a/gcc/ipa-prop.c b/gcc/ipa-prop.c
index 071c0d69a20..7ac250a0a6a 100644
--- a/gcc/ipa-prop.c
+++ b/gcc/ipa-prop.c
@@ -1267,9 +1267,12 @@ compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
return;
base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
- if (!base || TREE_CODE (base) != MEM_REF)
+ offset_int mem_offset;
+ if (!base
+ || TREE_CODE (base) != MEM_REF
+ || !mem_ref_offset (base).is_constant (&mem_offset))
return;
- offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
+ offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
ssa = TREE_OPERAND (base, 0);
if (TREE_CODE (ssa) != SSA_NAME
|| !SSA_NAME_IS_DEFAULT_DEF (ssa)
@@ -1311,7 +1314,10 @@ get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
obj = expr;
expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
- if (!expr || TREE_CODE (expr) != MEM_REF)
+ offset_int mem_offset;
+ if (!expr
+ || TREE_CODE (expr) != MEM_REF
+ || !mem_ref_offset (expr).is_constant (&mem_offset))
return NULL_TREE;
parm = TREE_OPERAND (expr, 0);
if (TREE_CODE (parm) != SSA_NAME
@@ -1319,7 +1325,7 @@ get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
|| TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
return NULL_TREE;
- *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
+ *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
*obj_p = obj;
return expr;
}
diff --git a/gcc/match.pd b/gcc/match.pd
index ec99250f1c5..bd1673ac5e9 100644
--- a/gcc/match.pd
+++ b/gcc/match.pd
@@ -3561,12 +3561,12 @@ DEFINE_INT_AND_FLOAT_ROUND_FN (RINT)
tree base1 = get_addr_base_and_unit_offset (TREE_OPERAND (@1, 0), &off1);
if (base0 && TREE_CODE (base0) == MEM_REF)
{
- off0 += mem_ref_offset (base0).to_short_addr ();
+ off0 += mem_ref_offset (base0).force_shwi ();
base0 = TREE_OPERAND (base0, 0);
}
if (base1 && TREE_CODE (base1) == MEM_REF)
{
- off1 += mem_ref_offset (base1).to_short_addr ();
+ off1 += mem_ref_offset (base1).force_shwi ();
base1 = TREE_OPERAND (base1, 0);
}
}
diff --git a/gcc/tree-data-ref.c b/gcc/tree-data-ref.c
index 86a587d04c0..2707cf82eba 100644
--- a/gcc/tree-data-ref.c
+++ b/gcc/tree-data-ref.c
@@ -820,16 +820,16 @@ dr_analyze_innermost (innermost_loop_behavior *drb, tree ref,
}
/* Calculate the alignment and misalignment for the inner reference. */
- unsigned int HOST_WIDE_INT base_misalignment;
- unsigned int base_alignment;
- get_object_alignment_1 (base, &base_alignment, &base_misalignment);
+ unsigned int HOST_WIDE_INT bit_base_misalignment;
+ unsigned int bit_base_alignment;
+ get_object_alignment_1 (base, &bit_base_alignment, &bit_base_misalignment);
/* There are no bitfield references remaining in BASE, so the values
we got back must be whole bytes. */
- gcc_assert (base_alignment % BITS_PER_UNIT == 0
- && base_misalignment % BITS_PER_UNIT == 0);
- base_alignment /= BITS_PER_UNIT;
- base_misalignment /= BITS_PER_UNIT;
+ gcc_assert (bit_base_alignment % BITS_PER_UNIT == 0
+ && bit_base_misalignment % BITS_PER_UNIT == 0);
+ unsigned int base_alignment = bit_base_alignment / BITS_PER_UNIT;
+ poly_int64 base_misalignment = bit_base_misalignment / BITS_PER_UNIT;
if (TREE_CODE (base) == MEM_REF)
{
@@ -837,8 +837,8 @@ dr_analyze_innermost (innermost_loop_behavior *drb, tree ref,
{
/* Subtract MOFF from the base and add it to POFFSET instead.
Adjust the misalignment to reflect the amount we subtracted. */
- offset_int moff = mem_ref_offset (base);
- base_misalignment -= moff.to_short_addr ();
+ poly_offset_int moff = mem_ref_offset (base);
+ base_misalignment -= moff.force_shwi ();
tree mofft = wide_int_to_tree (sizetype, moff);
if (!poffset)
poffset = mofft;
@@ -925,8 +925,14 @@ dr_analyze_innermost (innermost_loop_behavior *drb, tree ref,
drb->offset = fold_convert (ssizetype, offset_iv.base);
drb->init = init;
drb->step = step;
- drb->base_alignment = base_alignment;
- drb->base_misalignment = base_misalignment & (base_alignment - 1);
+ if (known_misalignment (base_misalignment, base_alignment,
+ &drb->base_misalignment))
+ drb->base_alignment = base_alignment;
+ else
+ {
+ drb->base_alignment = known_alignment (base_misalignment);
+ drb->base_misalignment = 0;
+ }
drb->offset_alignment = highest_pow2_factor (offset_iv.base);
drb->step_alignment = highest_pow2_factor (step);
diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c
index 681afbcd056..3358763b466 100644
--- a/gcc/tree-dfa.c
+++ b/gcc/tree-dfa.c
@@ -798,8 +798,8 @@ get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset,
{
if (!integer_zerop (TREE_OPERAND (exp, 1)))
{
- offset_int off = mem_ref_offset (exp);
- byte_offset += off.to_short_addr ();
+ poly_offset_int off = mem_ref_offset (exp);
+ byte_offset += off.force_shwi ();
}
exp = TREE_OPERAND (base, 0);
}
@@ -820,8 +820,8 @@ get_addr_base_and_unit_offset_1 (tree exp, poly_int64_pod *poffset,
return NULL_TREE;
if (!integer_zerop (TMR_OFFSET (exp)))
{
- offset_int off = mem_ref_offset (exp);
- byte_offset += off.to_short_addr ();
+ poly_offset_int off = mem_ref_offset (exp);
+ byte_offset += off.force_shwi ();
}
exp = TREE_OPERAND (base, 0);
}
diff --git a/gcc/tree-eh.c b/gcc/tree-eh.c
index 65e850d36ad..fc5fb02ddcd 100644
--- a/gcc/tree-eh.c
+++ b/gcc/tree-eh.c
@@ -2658,14 +2658,15 @@ tree_could_trap_p (tree expr)
if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
{
tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
- offset_int off = mem_ref_offset (expr);
- if (wi::neg_p (off, SIGNED))
+ poly_offset_int off = mem_ref_offset (expr);
+ if (maybe_lt (off, 0))
return true;
if (TREE_CODE (base) == STRING_CST)
- return wi::leu_p (TREE_STRING_LENGTH (base), off);
- else if (DECL_SIZE_UNIT (base) == NULL_TREE
- || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
- || wi::leu_p (wi::to_offset (DECL_SIZE_UNIT (base)), off))
+ return maybe_le (TREE_STRING_LENGTH (base), off);
+ tree size = DECL_SIZE_UNIT (base);
+ if (size == NULL_TREE
+ || !poly_int_tree_p (size)
+ || maybe_le (wi::to_poly_offset (size), off))
return true;
/* Now we are sure the first byte of the access is inside
the object. */
diff --git a/gcc/tree-object-size.c b/gcc/tree-object-size.c
index 14cb435f07f..488676e4d20 100644
--- a/gcc/tree-object-size.c
+++ b/gcc/tree-object-size.c
@@ -210,11 +210,17 @@ addr_object_size (struct object_size_info *osi, const_tree ptr,
}
if (sz != unknown[object_size_type])
{
- offset_int dsz = wi::sub (sz, mem_ref_offset (pt_var));
- if (wi::neg_p (dsz))
- sz = 0;
- else if (wi::fits_uhwi_p (dsz))
- sz = dsz.to_uhwi ();
+ offset_int mem_offset;
+ if (mem_ref_offset (pt_var).is_constant (&mem_offset))
+ {
+ offset_int dsz = wi::sub (sz, mem_offset);
+ if (wi::neg_p (dsz))
+ sz = 0;
+ else if (wi::fits_uhwi_p (dsz))
+ sz = dsz.to_uhwi ();
+ else
+ sz = unknown[object_size_type];
+ }
else
sz = unknown[object_size_type];
}
diff --git a/gcc/tree-ssa-address.c b/gcc/tree-ssa-address.c
index 440215b7818..87df1238bfa 100644
--- a/gcc/tree-ssa-address.c
+++ b/gcc/tree-ssa-address.c
@@ -1008,8 +1008,8 @@ copy_ref_info (tree new_ref, tree old_ref)
&& (TREE_INT_CST_LOW (TMR_STEP (new_ref))
< align)))))
{
- unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
- - mem_ref_offset (new_ref).to_short_addr ());
+ poly_uint64 inc = (mem_ref_offset (old_ref)
+ - mem_ref_offset (new_ref)).force_uhwi ();
adjust_ptr_info_misalignment (new_pi, inc);
}
else
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index 2f8ff40f042..7601b19408c 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -1139,7 +1139,7 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
&& DECL_P (base2));
ptr1 = TREE_OPERAND (base1, 0);
- offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
+ poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
/* If only one reference is based on a variable, they cannot alias if
the pointer access is beyond the extent of the variable access.
@@ -1295,8 +1295,8 @@ indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
&& operand_equal_p (TMR_INDEX2 (base1),
TMR_INDEX2 (base2), 0))))))
{
- offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
- offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
+ poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
+ poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
offset2 + moff2, max_size2);
}
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index 89ef26c7a3f..7146f8f7e2d 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -766,11 +766,8 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
case MEM_REF:
/* The base address gets its own vn_reference_op_s structure. */
temp.op0 = TREE_OPERAND (ref, 1);
- {
- offset_int off = mem_ref_offset (ref);
- if (wi::fits_shwi_p (off))
- temp.off = off.to_shwi ();
- }
+ if (!mem_ref_offset (ref).to_shwi (&temp.off))
+ temp.off = -1;
temp.clique = MR_DEPENDENCE_CLIQUE (ref);
temp.base = MR_DEPENDENCE_BASE (ref);
temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
index 151f544baba..c04e0cc6dbf 100644
--- a/gcc/tree-ssa.c
+++ b/gcc/tree-ssa.c
@@ -1379,10 +1379,10 @@ maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
}
else if (DECL_SIZE (sym)
&& TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
- && mem_ref_offset (*tp) >= 0
- && wi::leu_p (mem_ref_offset (*tp)
- + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
- wi::to_offset (DECL_SIZE_UNIT (sym)))
+ && (known_subrange_p
+ (mem_ref_offset (*tp),
+ wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
+ 0, wi::to_offset (DECL_SIZE_UNIT (sym))))
&& (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
|| (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
== TYPE_PRECISION (TREE_TYPE (*tp))))
@@ -1433,9 +1433,8 @@ non_rewritable_mem_ref_base (tree ref)
|| TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
&& useless_type_conversion_p (TREE_TYPE (base),
TREE_TYPE (TREE_TYPE (decl)))
- && wi::fits_uhwi_p (mem_ref_offset (base))
- && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
- mem_ref_offset (base))
+ && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
+ mem_ref_offset (base))
&& multiple_of_p (sizetype, TREE_OPERAND (base, 1),
TYPE_SIZE_UNIT (TREE_TYPE (base))))
return NULL_TREE;
@@ -1445,11 +1444,10 @@ non_rewritable_mem_ref_base (tree ref)
return NULL_TREE;
/* For integral typed extracts we can use a BIT_FIELD_REF. */
if (DECL_SIZE (decl)
- && TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
- && mem_ref_offset (base) >= 0
- && wi::leu_p (mem_ref_offset (base)
- + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
- wi::to_offset (DECL_SIZE_UNIT (decl)))
+ && (known_subrange_p
+ (mem_ref_offset (base),
+ wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
+ 0, wi::to_poly_offset (DECL_SIZE_UNIT (decl))))
/* ??? We can't handle bitfield precision extracts without
either using an alternate type for the BIT_FIELD_REF and
then doing a conversion or possibly adjusting the offset
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c
index 9867077cd86..7d039185446 100644
--- a/gcc/tree-vect-data-refs.c
+++ b/gcc/tree-vect-data-refs.c
@@ -3276,10 +3276,7 @@ vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo,
if (!integer_zerop (TREE_OPERAND (base, 1)))
{
if (off == NULL_TREE)
- {
- offset_int moff = mem_ref_offset (base);
- off = wide_int_to_tree (sizetype, moff);
- }
+ off = wide_int_to_tree (sizetype, mem_ref_offset (base));
else
off = size_binop (PLUS_EXPR, off,
fold_convert (sizetype, TREE_OPERAND (base, 1)));
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index 9940f5012a1..27f7c37cbd7 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -4952,7 +4952,9 @@ vrp_prop::search_for_addr_array (tree t, location_t location)
|| TREE_CODE (el_sz) != INTEGER_CST)
return;
- idx = mem_ref_offset (t);
+ if (!mem_ref_offset (t).is_constant (&idx))
+ return;
+
idx = wi::sdiv_trunc (idx, wi::to_offset (el_sz));
if (idx < 0)
{
@@ -5266,7 +5268,6 @@ remove_range_assertions (void)
}
}
-
/* Return true if STMT is interesting for VRP. */
bool
diff --git a/gcc/tree.c b/gcc/tree.c
index 400c5c7895e..97c9f815039 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -4840,7 +4840,7 @@ build_simple_mem_ref_loc (location_t loc, tree ptr)
gcc_assert (ptr);
if (TREE_CODE (ptr) == MEM_REF)
{
- offset += mem_ref_offset (ptr).to_short_addr ();
+ offset += mem_ref_offset (ptr).force_shwi ();
ptr = TREE_OPERAND (ptr, 0);
}
else
@@ -4855,10 +4855,11 @@ build_simple_mem_ref_loc (location_t loc, tree ptr)
/* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
-offset_int
+poly_offset_int
mem_ref_offset (const_tree t)
{
- return offset_int::from (wi::to_wide (TREE_OPERAND (t, 1)), SIGNED);
+ return poly_offset_int::from (wi::to_poly_wide (TREE_OPERAND (t, 1)),
+ SIGNED);
}
/* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
diff --git a/gcc/varasm.c b/gcc/varasm.c
index 2b2cde97270..28e35ff6c3e 100644
--- a/gcc/varasm.c
+++ b/gcc/varasm.c
@@ -2904,7 +2904,7 @@ decode_addr_const (tree exp, struct addr_const *value)
else if (TREE_CODE (target) == MEM_REF
&& TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
{
- offset += mem_ref_offset (target).to_short_addr ();
+ offset += mem_ref_offset (target).force_shwi ();
target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
}
else if (TREE_CODE (target) == INDIRECT_REF