+2011-07-19 Richard Guenther <rguenther@suse.de>
+
+ * tree.h (fold_build_pointer_plus_loc): New helper function.
+ (fold_build_pointer_plus_hwi_loc): Likewise.
+ (fold_build_pointer_plus): Define.
+ (fold_build_pointer_plus_hwi): Likewise.
+ * builtins.c (std_gimplify_va_arg_expr): Use fold_build_pointer_plus.
+ (fold_builtin_memory_op): Likewise.
+ (fold_builtin_stpcpy): Likewise.
+ (fold_builtin_memchr): Likewise.
+ (fold_builtin_strstr): Likewise.
+ (fold_builtin_strchr): Likewise.
+ (fold_builtin_strrchr): Likewise.
+ (fold_builtin_strpbrk): Likewise.
+ (fold_builtin_strcat): Likewise.
+ (expand_builtin_memory_chk): Likewise.
+ (fold_builtin_memory_chk): Likewise.
+ * c-typeck.c (build_unary_op): Likewise.
+ * cgraphunit.c (thunk_adjust): Likewise.
+ * fold-const.c (build_range_check): Likewise.
+ (fold_binary_loc): Likewise.
+ * omp-low.c (extract_omp_for_data): Likewise.
+ (expand_omp_for_generic): Likewise.
+ (expand_omp_for_static_nochunk): Likewise.
+ (expand_omp_for_static_chunk): Likewise.
+ * tree-affine.c (add_elt_to_tree): Likewise.
+ * tree-data-ref.c (split_constant_offset_1): Likewise.
+ * tree-loop-distribution.c (generate_memset_zero): Likewise.
+ * tree-mudflap.c (mf_xform_derefs_1): Likewise.
+ * tree-predcom.c (ref_at_iteration): Likewise.
+ * tree-ssa-address.c (tree_mem_ref_addr): Likewise.
+ (add_to_parts): Likewise.
+ (create_mem_ref): Likewise.
+ * tree-ssa-loop-ivopts.c (force_expr_to_var_cost): Likewise.
+ * tree-ssa-loop-niter.c (number_of_iterations_lt_to_ne): Likewise.
+ (number_of_iterations_le): Likewise.
+ * tree-ssa-loop-prefetch.c (issue_prefetch_ref): Likewise.
+ * tree-vect-data-refs.c (vect_analyze_data_refs): Likewise.
+ (vect_create_addr_base_for_vector_ref): Likewise.
+ * tree-vect-loop-manip.c (vect_update_ivs_after_vectorizer): Likewise.
+ (vect_create_cond_for_alias_checks): Likewise.
+ * tree-vrp.c (extract_range_from_assert): Likewise.
+ * config/alpha/alpha.c (alpha_va_start): Likewise.
+ (alpha_gimplify_va_arg_1): Likewise.
+ * config/i386/i386.c (ix86_va_start): Likewise.
+ (ix86_gimplify_va_arg): Likewise.
+ * config/ia64/ia64.c (ia64_gimplify_va_arg): Likewise.
+ * config/mep/mep.c (mep_expand_va_start): Likewise.
+ (mep_gimplify_va_arg_expr): Likewise.
+ * config/mips/mips.c (mips_va_start): Likewise.
+ (mips_gimplify_va_arg_expr): Likewise.
+ * config/pa/pa.c (hppa_gimplify_va_arg_expr): Likewise.
+ * config/rs6000/rs6000.c (rs6000_va_start): Likewise.
+ (rs6000_gimplify_va_arg): Likewise.
+ * config/s390/s390.c (s390_va_start): Likewise.
+ (s390_gimplify_va_arg): Likewise.
+ * config/sh/sh.c (sh_va_start): Likewise.
+ (sh_gimplify_va_arg_expr): Likewise.
+ * config/sparc/sparc.c (sparc_gimplify_va_arg): Likewise.
+ * config/spu/spu.c (spu_va_start): Likewise.
+ (spu_gimplify_va_arg_expr): Likewise.
+ * config/stormy16/stormy16.c (xstormy16_expand_builtin_va_start):
+ Likewise.
+ (xstormy16_gimplify_va_arg_expr): Likewise.
+ * config/xtensa/xtensa.c (xtensa_va_start): Likewise.
+ (xtensa_gimplify_va_arg_expr): Likewise.
+
2011-07-19 Richard Guenther <rguenther@suse.de>
* expr.c (expand_expr_real_2): Remove TRUTH_*_EXPR handling.
&& !integer_zerop (TYPE_SIZE (type)))
{
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- fold_build2 (POINTER_PLUS_EXPR,
- TREE_TYPE (valist),
- valist_tmp, size_int (boundary - 1)));
+ fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
gimplify_and_add (t, pre_p);
t = fold_convert (sizetype, valist_tmp);
rounded_size, size_int (align));
t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
size_binop (MINUS_EXPR, rounded_size, type_size));
- addr = fold_build2 (POINTER_PLUS_EXPR,
- TREE_TYPE (addr), addr, t);
+ addr = fold_build_pointer_plus (addr, t);
}
/* Compute new value for AP. */
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
+ t = fold_build_pointer_plus (valist_tmp, rounded_size);
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
gimplify_and_add (t, pre_p);
len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
ssize_int (1));
- len = fold_convert_loc (loc, sizetype, len);
- dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ dest = fold_build_pointer_plus_loc (loc, dest, len);
dest = fold_convert_loc (loc, type, dest);
if (expr)
dest = omit_one_operand_loc (loc, type, dest, expr);
call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
type = TREE_TYPE (TREE_TYPE (fndecl));
- len = fold_convert_loc (loc, sizetype, len);
- dest = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ dest = fold_build_pointer_plus_loc (loc, dest, len);
dest = fold_convert_loc (loc, type, dest);
dest = omit_one_operand_loc (loc, type, dest, call);
return dest;
if (r == NULL)
return build_int_cst (TREE_TYPE (arg1), 0);
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
- size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
- s1, size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
return fold_convert_loc (loc, type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
- s1, size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
return fold_convert_loc (loc, type, tem);
}
return NULL_TREE;
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
- s1, size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
return fold_convert_loc (loc, type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (s1),
- s1, size_int (r - p1));
+ tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
return fold_convert_loc (loc, type, tem);
}
newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
/* Create (dst p+ strlen (dst)). */
- newdst = fold_build2_loc (loc, POINTER_PLUS_EXPR,
- TREE_TYPE (dst), dst, newdst);
+ newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
newdst = builtin_save_expr (newdst);
call = build_call_expr_loc (loc, strcpy_fn, 2, newdst, src);
return expand_expr (dest, target, mode, EXPAND_NORMAL);
}
- expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ expr = fold_build_pointer_plus (dest, len);
return expand_expr (expr, target, mode, EXPAND_NORMAL);
}
dest, len);
else
{
- tree temp = fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (dest),
- dest, len);
+ tree temp = fold_build_pointer_plus_loc (loc, dest, len);
return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
}
}
+2011-07-19 Richard Guenther <rguenther@suse.de>
+
+ * c-common.c (pointer_int_sum): Use fold_build_pointer_plus.
+ * c-omp.c (c_finish_omp_for): Likewise.
+
2011-07-12 Eric Botcazou <ebotcazou@adacore.com>
* c-ada-spec.c (dump_nested_types): Put semi-colon after empty loop
if (resultcode == MINUS_EXPR)
intop = fold_build1_loc (loc, NEGATE_EXPR, sizetype, intop);
- ret = fold_build2_loc (loc, POINTER_PLUS_EXPR, result_type, ptrop, intop);
+ ret = fold_build_pointer_plus_loc (loc, ptrop, intop);
fold_undefer_and_ignore_overflow_warnings ();
if (TREE_CODE (incr) == POSTDECREMENT_EXPR
|| TREE_CODE (incr) == PREDECREMENT_EXPR)
t = fold_build1_loc (elocus, NEGATE_EXPR, sizetype, t);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (decl), decl, t);
+ t = fold_build_pointer_plus (decl, t);
incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
}
break;
if (val && TREE_CODE (val) == INDIRECT_REF
&& TREE_CONSTANT (TREE_OPERAND (val, 0)))
{
- tree op0 = fold_convert_loc (location, sizetype,
- fold_offsetof (arg, val)), op1;
+ tree op0 = fold_offsetof (arg, val), op1;
op1 = fold_convert_loc (location, argtype, TREE_OPERAND (val, 0));
- ret = fold_build2_loc (location, POINTER_PLUS_EXPR, argtype, op1, op0);
+ ret = fold_build_pointer_plus_loc (location, op1, op0);
goto return_build_unary_op;
}
if (this_adjusting
&& fixed_offset != 0)
{
- stmt = gimple_build_assign (ptr,
- fold_build2_loc (input_location,
- POINTER_PLUS_EXPR,
- TREE_TYPE (ptr), ptr,
- size_int (fixed_offset)));
+ stmt = gimple_build_assign
+ (ptr, fold_build_pointer_plus_hwi_loc (input_location,
+ ptr,
+ fixed_offset));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
}
/* Find the entry with the vcall offset. */
stmt = gimple_build_assign (vtabletmp2,
- fold_build2_loc (input_location,
- POINTER_PLUS_EXPR,
- TREE_TYPE (vtabletmp2),
- vtabletmp2,
- fold_convert (sizetype,
- virtual_offset)));
+ fold_build_pointer_plus_loc (input_location,
+ vtabletmp2,
+ virtual_offset));
gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
/* Get the offset itself. */
find_referenced_vars_in (stmt);
/* Adjust the `this' pointer. */
- ptr = fold_build2_loc (input_location,
- POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
- offsettmp);
+ ptr = fold_build_pointer_plus_loc (input_location, ptr, offsettmp);
}
if (!this_adjusting
mark_symbols_for_renaming (stmt);
find_referenced_vars_in (stmt);
}
- ptr = fold_build2_loc (input_location,
- POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
- size_int (fixed_offset));
+ ptr = fold_build_pointer_plus_hwi_loc (input_location,
+ ptrtmp, fixed_offset);
}
/* Emit the statement and gimplify the adjustment expression. */
if (TARGET_ABI_OPEN_VMS)
{
t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
- size_int (offset + NUM_ARGS * UNITS_PER_WORD));
+ t = fold_build_pointer_plus_hwi (t, offset + NUM_ARGS * UNITS_PER_WORD);
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
valist, offset_field, NULL_TREE);
t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
- size_int (offset));
+ t = fold_build_pointer_plus_hwi (t, offset);
t = build2 (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
}
/* Build the final address and force that value into a temporary. */
- addr = build2 (POINTER_PLUS_EXPR, ptr_type, fold_convert (ptr_type, base),
- fold_convert (sizetype, addend));
+ addr = fold_build_pointer_plus (fold_convert (ptr_type, base), addend);
internal_post = NULL;
gimplify_expr (&addr, pre_p, &internal_post, is_gimple_val, fb_rvalue);
gimple_seq_add_seq (pre_p, internal_post);
ovf_rtx = cfun->machine->split_stack_varargs_pointer;
t = make_tree (type, ovf_rtx);
if (words != 0)
- t = build2 (POINTER_PLUS_EXPR, type, t,
- size_int (words * UNITS_PER_WORD));
+ t = fold_build_pointer_plus_hwi (t, words * UNITS_PER_WORD);
t = build2 (MODIFY_EXPR, type, ovf, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
type = TREE_TYPE (sav);
t = make_tree (type, frame_pointer_rtx);
if (!ix86_varargs_gpr_size)
- t = build2 (POINTER_PLUS_EXPR, type, t,
- size_int (-8 * X86_64_REGPARM_MAX));
+ t = fold_build_pointer_plus_hwi (t, -8 * X86_64_REGPARM_MAX);
t = build2 (MODIFY_EXPR, type, sav, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
if (needed_intregs)
{
/* int_addr = gpr + sav; */
- t = fold_convert (sizetype, gpr);
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, t);
+ t = fold_build_pointer_plus (sav, gpr);
gimplify_assign (int_addr, t, pre_p);
}
if (needed_sseregs)
{
/* sse_addr = fpr + sav; */
- t = fold_convert (sizetype, fpr);
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, t);
+ t = fold_build_pointer_plus (sav, fpr);
gimplify_assign (sse_addr, t, pre_p);
}
if (need_temp)
src_offset = REGNO (reg) * 8;
}
src_addr = fold_convert (addr_type, src_addr);
- src_addr = fold_build2 (POINTER_PLUS_EXPR, addr_type, src_addr,
- size_int (src_offset));
+ src_addr = fold_build_pointer_plus_hwi (src_addr, src_offset);
dest_addr = fold_convert (daddr_type, addr);
- dest_addr = fold_build2 (POINTER_PLUS_EXPR, daddr_type, dest_addr,
- size_int (prev_size));
+ dest_addr = fold_build_pointer_plus_hwi (dest_addr, prev_size);
if (cur_size == GET_MODE_SIZE (mode))
{
src = build_va_arg_indirect_ref (src_addr);
else
{
HOST_WIDE_INT align = arg_boundary / 8;
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), ovf,
- size_int (align - 1));
- t = fold_convert (sizetype, t);
+ t = fold_build_pointer_plus_hwi (ovf, align - 1);
t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
- size_int (-align));
- t = fold_convert (TREE_TYPE (ovf), t);
+ build_int_cst (TREE_TYPE (t), -align));
}
gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
gimplify_assign (addr, t, pre_p);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
- size_int (rsize * UNITS_PER_WORD));
+ t = fold_build_pointer_plus_hwi (t, rsize * UNITS_PER_WORD);
gimplify_assign (unshare_expr (ovf), t, pre_p);
if (container)
if ((TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == INTEGER_TYPE)
? int_size_in_bytes (type) > 8 : TYPE_ALIGN (type) > 8 * BITS_PER_UNIT)
{
- tree t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist,
- size_int (2 * UNITS_PER_WORD - 1));
- t = fold_convert (sizetype, t);
+ tree t = fold_build_pointer_plus_hwi (valist, 2 * UNITS_PER_WORD - 1);
t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
- size_int (-2 * UNITS_PER_WORD));
- t = fold_convert (TREE_TYPE (valist), t);
+ build_int_cst (TREE_TYPE (t), -2 * UNITS_PER_WORD));
gimplify_assign (unshare_expr (valist), t, pre_p);
}
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
/* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
- u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
- size_int (4 * ns));
+ u = fold_build_pointer_plus_hwi (u, 4 * ns);
t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
- u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
- size_int (8 * ((ns+1)/2)));
+ u = fold_build_pointer_plus_hwi (u, 8 * ((ns+1)/2));
/* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
TREE_SIDE_EFFECTS (t) = 1;
gimplify_and_add (tmp, pre_p);
}
- tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
- unshare_expr (next_gp), size_int (4));
+ tmp = fold_build_pointer_plus_hwi (unshare_expr (next_gp), 4);
gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
- tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
- unshare_expr (next_cop), size_int (8));
+ tmp = fold_build_pointer_plus_hwi (unshare_expr (next_cop), 8);
gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
gimplify_and_add (tmp, pre_p);
- tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
- unshare_expr (next_stack), size_int (rsize));
+ tmp = fold_build_pointer_plus_hwi (unshare_expr (next_stack), rsize);
gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
/* - - */
words used by named arguments. */
t = make_tree (TREE_TYPE (ovfl), virtual_incoming_args_rtx);
if (cum->stack_words > 0)
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), t,
- size_int (cum->stack_words * UNITS_PER_WORD));
+ t = fold_build_pointer_plus_hwi (t, cum->stack_words * UNITS_PER_WORD);
t = build2 (MODIFY_EXPR, TREE_TYPE (ovfl), ovfl, t);
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
fpr_offset = gpr_save_area_size + UNITS_PER_FPVALUE - 1;
fpr_offset &= -UNITS_PER_FPVALUE;
if (fpr_offset)
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ftop), t,
- size_int (-fpr_offset));
+ t = fold_build_pointer_plus_hwi (t, -fpr_offset);
t = build2 (MODIFY_EXPR, TREE_TYPE (ftop), ftop, t);
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
addr_rtx = top - off + (BYTES_BIG_ENDIAN ? RSIZE - SIZE : 0). */
t = fold_convert (sizetype, t);
t = fold_build1 (NEGATE_EXPR, sizetype, t);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (top), top, t);
+ t = fold_build_pointer_plus (top, t);
if (BYTES_BIG_ENDIAN && rsize > size)
- {
- u = size_int (rsize - size);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
- }
+ t = fold_build_pointer_plus_hwi (t, rsize - size);
COND_EXPR_THEN (addr) = t;
if (osize > UNITS_PER_WORD)
{
/* [9] Emit: ovfl = ((intptr_t) ovfl + osize - 1) & -osize. */
- u = size_int (osize - 1);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl),
- unshare_expr (ovfl), u);
- t = fold_convert (sizetype, t);
- u = size_int (-osize);
+ t = fold_build_pointer_plus_hwi (unshare_expr (ovfl), osize - 1);
+ u = build_int_cst (TREE_TYPE (t), -osize);
t = build2 (BIT_AND_EXPR, sizetype, t, u);
- t = fold_convert (TREE_TYPE (ovfl), t);
align = build2 (MODIFY_EXPR, TREE_TYPE (ovfl),
unshare_expr (ovfl), t);
}
u = fold_convert (TREE_TYPE (ovfl), build_int_cst (NULL_TREE, osize));
t = build2 (POSTINCREMENT_EXPR, TREE_TYPE (ovfl), ovfl, u);
if (BYTES_BIG_ENDIAN && osize > size)
- {
- u = size_int (osize - size);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
- }
+ t = fold_build_pointer_plus_hwi (t, osize - size);
/* String [9] and [10, 11] together. */
if (align)
u = fold_convert (sizetype, size_in_bytes (type));
u = fold_build1 (NEGATE_EXPR, sizetype, u);
- t = build2 (POINTER_PLUS_EXPR, valist_type, valist, u);
+ t = fold_build_pointer_plus (valist, u);
/* Align to 4 or 8 byte boundary depending on argument size. */
ofs = (8 - size) % 4;
if (ofs != 0)
- {
- u = size_int (ofs);
- t = build2 (POINTER_PLUS_EXPR, valist_type, t, u);
- }
+ t = fold_build_pointer_plus_hwi (t, ofs);
t = fold_convert (ptr, t);
t = build_va_arg_indirect_ref (t);
/* Find the overflow area. */
t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
if (words != 0)
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
- size_int (words * UNITS_PER_WORD));
+ t = fold_build_pointer_plus_hwi (t, words * UNITS_PER_WORD);
t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
/* Find the register save area. */
t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
if (cfun->machine->varargs_save_offset)
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
- size_int (cfun->machine->varargs_save_offset));
+ t = fold_build_pointer_plus_hwi (t, cfun->machine->varargs_save_offset);
t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
/* This updates arg ptr by the amount that would be necessary
to align the zero-sized (but not zero-alignment) item. */
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- fold_build2 (POINTER_PLUS_EXPR,
- TREE_TYPE (valist),
- valist_tmp, size_int (boundary - 1)));
+ fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
gimplify_and_add (t, pre_p);
t = fold_convert (sizetype, valist_tmp);
t = sav;
if (sav_ofs)
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
+ t = fold_build_pointer_plus_hwi (sav, sav_ofs);
u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), unshare_expr (reg),
build_int_cst (TREE_TYPE (reg), n_reg));
u = fold_convert (sizetype, u);
u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
+ t = fold_build_pointer_plus (t, u);
/* _Decimal32 varargs are located in the second word of the 64-bit
FP register for 32-bit binaries. */
if (!TARGET_POWERPC64
&& TARGET_HARD_FLOAT && TARGET_FPRS
&& TYPE_MODE (type) == SDmode)
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
+ t = fold_build_pointer_plus_hwi (t, size);
gimplify_assign (addr, t, pre_p);
t = ovf;
if (align != 1)
{
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
- t = fold_convert (sizetype, t);
+ t = fold_build_pointer_plus_hwi (t, align - 1);
t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
- size_int (-align));
- t = fold_convert (TREE_TYPE (ovf), t);
+ build_int_cst (TREE_TYPE (t), -align));
}
gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
gimplify_assign (unshare_expr (addr), t, pre_p);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
+ t = fold_build_pointer_plus_hwi (t, size);
gimplify_assign (unshare_expr (ovf), t, pre_p);
if (lab_over)
fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
(int)n_gpr, (int)n_fpr, off);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
+ t = fold_build_pointer_plus_hwi (t, off);
t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
TREE_SIDE_EFFECTS (t) = 1;
|| (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
{
t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
- size_int (-RETURN_REGNUM * UNITS_PER_LONG));
+ t = fold_build_pointer_plus_hwi (t, -RETURN_REGNUM * UNITS_PER_LONG);
t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
TREE_SIDE_EFFECTS (t) = 1;
t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
gimplify_and_add (t, pre_p);
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
- size_int (sav_ofs));
+ t = fold_build_pointer_plus_hwi (sav, sav_ofs);
u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
+ t = fold_build_pointer_plus (t, u);
gimplify_assign (addr, t, pre_p);
t = ovf;
if (size < UNITS_PER_LONG)
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
- size_int (UNITS_PER_LONG - size));
+ t = fold_build_pointer_plus_hwi (t, UNITS_PER_LONG - size);
gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
gimplify_assign (addr, t, pre_p);
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
- size_int (size));
+ t = fold_build_pointer_plus_hwi (t, size);
gimplify_assign (ovf, t, pre_p);
gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over));
nfp = 8 - nfp;
else
nfp = 0;
- u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
- size_int (UNITS_PER_WORD * nfp));
+ u = fold_build_pointer_plus_hwi (u, UNITS_PER_WORD * nfp);
t = build2 (MODIFY_EXPR, ptr_type_node, next_fp_limit, u);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
nint = 4 - nint;
else
nint = 0;
- u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
- size_int (UNITS_PER_WORD * nint));
+ u = fold_build_pointer_plus_hwi (u, UNITS_PER_WORD * nint);
t = build2 (MODIFY_EXPR, ptr_type_node, next_o_limit, u);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
gimplify_assign (unshare_expr (next_fp_tmp), valist, pre_p);
tmp = next_fp_limit;
if (size > 4 && !is_double)
- tmp = build2 (POINTER_PLUS_EXPR, TREE_TYPE (tmp),
- unshare_expr (tmp), size_int (4 - size));
+ tmp = fold_build_pointer_plus_hwi (unshare_expr (tmp), 4 - size);
tmp = build2 (GE_EXPR, boolean_type_node,
unshare_expr (next_fp_tmp), unshare_expr (tmp));
cmp = build3 (COND_EXPR, void_type_node, tmp,
tmp = fold_convert (sizetype, next_fp_tmp);
tmp = build2 (BIT_AND_EXPR, sizetype, tmp,
size_int (UNITS_PER_WORD));
- tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
- unshare_expr (next_fp_tmp), tmp);
+ tmp = fold_build_pointer_plus (unshare_expr (next_fp_tmp), tmp);
gimplify_assign (unshare_expr (next_fp_tmp), tmp, pre_p);
}
if (is_double)
}
else
{
- tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
- unshare_expr (next_o), size_int (rsize));
+ tmp = fold_build_pointer_plus_hwi (unshare_expr (next_o), rsize);
tmp = build2 (GT_EXPR, boolean_type_node, tmp,
unshare_expr (next_o_limit));
tmp = build3 (COND_EXPR, void_type_node, tmp,
incr = valist;
if (align)
{
- incr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, incr,
- size_int (align - 1));
+ incr = fold_build_pointer_plus_hwi (incr, align - 1);
incr = fold_convert (sizetype, incr);
incr = fold_build2 (BIT_AND_EXPR, sizetype, incr,
size_int (-align));
addr = incr;
if (BYTES_BIG_ENDIAN && size < rsize)
- addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, incr,
- size_int (rsize - size));
+ addr = fold_build_pointer_plus_hwi (incr, rsize - size);
if (indirect)
{
else
addr = fold_convert (ptrtype, addr);
- incr
- = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, incr, size_int (rsize));
+ incr = fold_build_pointer_plus_hwi (incr, rsize);
gimplify_assign (valist, incr, post_p);
return build_va_arg_indirect_ref (addr);
/* Find the __args area. */
t = make_tree (TREE_TYPE (args), nextarg);
if (crtl->args.pretend_args_size > 0)
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (args), t,
- size_int (-STACK_POINTER_OFFSET));
+ t = fold_build_pointer_plus_hwi (t, -STACK_POINTER_OFFSET);
t = build2 (MODIFY_EXPR, TREE_TYPE (args), args, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
/* Find the __skip area. */
t = make_tree (TREE_TYPE (skip), virtual_incoming_args_rtx);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (skip), t,
- size_int (crtl->args.pretend_args_size
- - STACK_POINTER_OFFSET));
+ t = fold_build_pointer_plus_hwi (t, (crtl->args.pretend_args_size
+ - STACK_POINTER_OFFSET));
t = build2 (MODIFY_EXPR, TREE_TYPE (skip), skip, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
tree f_args, f_skip;
tree args, skip;
HOST_WIDE_INT size, rsize;
- tree paddedsize, addr, tmp;
+ tree addr, tmp;
bool pass_by_reference_p;
f_args = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
/* build conditional expression to calculate addr. The expression
will be gimplified later. */
- paddedsize = size_int (rsize);
- tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (args), paddedsize);
+ tmp = fold_build_pointer_plus_hwi (unshare_expr (args), rsize);
tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
build2 (GT_EXPR, boolean_type_node, tmp, unshare_expr (skip)),
build2 (LE_EXPR, boolean_type_node, unshare_expr (args),
unshare_expr (skip)));
tmp = build3 (COND_EXPR, ptr_type_node, tmp,
- build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (skip),
- size_int (32)), unshare_expr (args));
+ fold_build_pointer_plus_hwi (unshare_expr (skip), 32),
+ unshare_expr (args));
gimplify_assign (addr, tmp, pre_p);
/* update VALIST.__args */
- tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, paddedsize);
+ tmp = fold_build_pointer_plus_hwi (addr, rsize);
gimplify_assign (unshare_expr (args), tmp, pre_p);
addr = fold_convert (build_pointer_type_for_mode (type, ptr_mode, true),
t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
u = fold_convert (TREE_TYPE (count), u);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
+ t = fold_build_pointer_plus (t, u);
t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
NULL_TREE);
gimplify_and_add (t, pre_p);
- t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
+ t = fold_build_pointer_plus (base, count_tmp);
gimplify_assign (addr, t, pre_p);
t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
fold_convert (TREE_TYPE (count), size_tree));
t = fold_convert (TREE_TYPE (t), fold (t));
t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
+ t = fold_build_pointer_plus (base, t);
gimplify_assign (addr, t, pre_p);
t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
/* Set the __va_stk member to ($arg_ptr - 32). */
u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
- u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u, size_int (-32));
+ u = fold_build_pointer_plus_hwi (u, -32);
t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
t = fold_convert (sizetype, unshare_expr (ndx));
t = build2 (MINUS_EXPR, sizetype, t, size);
- addr = build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (array), t);
+ addr = fold_build_pointer_plus (unshare_expr (array), t);
addr = fold_convert (build_pointer_type (type), addr);
if (indirect)
+2011-07-19 Richard Guenther <rguenther@suse.de>
+
+ * call.c (build_special_member_call): Use fold_build_pointer_plus.
+ * class.c (build_base_path): Likewise.
+ (convert_to_base_statically): Likewise.
+ (dfs_accumulate_vtbl_inits): Likewise.
+ * cp-gimplify.c (cxx_omp_clause_apply_fn): Likewise.
+ * except.c (expand_start_catch_block): Likewise.
+ * init.c (expand_virtual_init): Likewise.
+ (build_new_1): Likewise.
+ (build_vec_delete_1): Likewise.
+ (build_vec_delete): Likewise.
+ * rtti.c (build_headof): Likewise.
+ (tinfo_base_init): Likewise.
+ * typeck.c (get_member_function_from_ptrfunc): Likewise.
+ (cp_build_addr_expr_1): Likewise.
+ * typeck2.c (build_m_component_ref): Likewise.
+
2011-07-18 Martin Jambor <mjambor@suse.cz>
* parser.c (cp_parser_parameter_declaration_list): Initialize
current_vtt_parm,
vtt);
gcc_assert (BINFO_SUBVTT_INDEX (binfo));
- sub_vtt = build2 (POINTER_PLUS_EXPR, TREE_TYPE (vtt), vtt,
- BINFO_SUBVTT_INDEX (binfo));
+ sub_vtt = fold_build_pointer_plus (vtt, BINFO_SUBVTT_INDEX (binfo));
if (args == NULL)
{
tf_warning_or_error),
TREE_TYPE (TREE_TYPE (expr)));
- v_offset = build2 (POINTER_PLUS_EXPR, TREE_TYPE (v_offset),
- v_offset, fold_convert (sizetype, BINFO_VPTR_FIELD (v_binfo)));
+ v_offset = fold_build_pointer_plus (v_offset, BINFO_VPTR_FIELD (v_binfo));
v_offset = build1 (NOP_EXPR,
build_pointer_type (ptrdiff_type_node),
v_offset);
offset = fold_convert (sizetype, offset);
if (code == MINUS_EXPR)
offset = fold_build1_loc (input_location, NEGATE_EXPR, sizetype, offset);
- expr = build2 (POINTER_PLUS_EXPR, ptr_target_type, expr, offset);
+ expr = fold_build_pointer_plus (expr, offset);
}
else
null_test = NULL;
expr_type = TREE_TYPE (expr);
if (!SAME_BINFO_TYPE_P (BINFO_TYPE (base), expr_type))
{
- tree pointer_type;
-
- pointer_type = build_pointer_type (expr_type);
-
/* We use fold_build2 and fold_convert below to simplify the trees
provided to the optimizers. It is not safe to call these functions
when processing a template because they do not handle C++-specific
gcc_assert (!processing_template_decl);
expr = cp_build_addr_expr (expr, tf_warning_or_error);
if (!integer_zerop (BINFO_OFFSET (base)))
- expr = fold_build2_loc (input_location,
- POINTER_PLUS_EXPR, pointer_type, expr,
- fold_convert (sizetype, BINFO_OFFSET (base)));
+ expr = fold_build_pointer_plus_loc (input_location,
+ expr, BINFO_OFFSET (base));
expr = fold_convert (build_pointer_type (BINFO_TYPE (base)), expr);
expr = build_fold_indirect_ref_loc (input_location, expr);
}
/* Figure out the position to which the VPTR should point. */
vtbl = build1 (ADDR_EXPR, vtbl_ptr_type_node, orig_vtbl);
- index = size_binop (PLUS_EXPR,
- size_int (non_fn_entries),
- size_int (n_inits));
index = size_binop (MULT_EXPR,
TYPE_SIZE_UNIT (vtable_entry_type),
- index);
- vtbl = build2 (POINTER_PLUS_EXPR, TREE_TYPE (vtbl), vtbl, index);
+ size_int (non_fn_entries + n_inits));
+ vtbl = fold_build_pointer_plus (vtbl, index);
}
if (ctor_vtbl_p)
start2 = build_fold_addr_expr_loc (input_location, start2);
end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
- end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
+ end1 = fold_build_pointer_plus (start1, end1);
p1 = create_tmp_var (TREE_TYPE (start1), NULL);
t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
append_to_statement_list (t, &ret);
- t = TYPE_SIZE_UNIT (inner_type);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
+ t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
append_to_statement_list (t, &ret);
if (arg2)
{
- t = TYPE_SIZE_UNIT (inner_type);
- t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
+ t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
append_to_statement_list (t, &ret);
}
generic exception header. */
exp = build_exc_ptr ();
exp = build1 (NOP_EXPR, build_pointer_type (type), exp);
- exp = build2 (POINTER_PLUS_EXPR, TREE_TYPE (exp), exp,
+ exp = fold_build_pointer_plus (exp,
fold_build1_loc (input_location,
- NEGATE_EXPR, sizetype,
- TYPE_SIZE_UNIT (TREE_TYPE (exp))));
+ NEGATE_EXPR, sizetype,
+ TYPE_SIZE_UNIT (TREE_TYPE (exp))));
exp = cp_build_indirect_ref (exp, RO_NULL, tf_warning_or_error);
initialize_handler_parm (decl, exp);
return type;
/* Compute the value to use, when there's a VTT. */
vtt_parm = current_vtt_parm;
- vtbl2 = build2 (POINTER_PLUS_EXPR,
- TREE_TYPE (vtt_parm),
- vtt_parm,
- vtt_index);
+ vtbl2 = fold_build_pointer_plus (vtt_parm, vtt_index);
vtbl2 = cp_build_indirect_ref (vtbl2, RO_NULL, tf_warning_or_error);
vtbl2 = convert (TREE_TYPE (vtbl), vtbl2);
tree size_ptr_type;
/* Adjust so we're pointing to the start of the object. */
- data_addr = build2 (POINTER_PLUS_EXPR, TREE_TYPE (alloc_node),
- alloc_node, cookie_size);
+ data_addr = fold_build_pointer_plus (alloc_node, cookie_size);
/* Store the number of bytes allocated so that we can know how
many elements to destroy later. We use the last sizeof
(size_t) bytes to store the number of elements. */
cookie_ptr = size_binop (MINUS_EXPR, cookie_size, size_in_bytes (sizetype));
- cookie_ptr = fold_build2_loc (input_location,
- POINTER_PLUS_EXPR, TREE_TYPE (alloc_node),
- alloc_node, cookie_ptr);
+ cookie_ptr = fold_build_pointer_plus_loc (input_location,
+ alloc_node, cookie_ptr);
size_ptr_type = build_pointer_type (sizetype);
cookie_ptr = fold_convert (size_ptr_type, cookie_ptr);
cookie = cp_build_indirect_ref (cookie_ptr, RO_NULL, complain);
if (targetm.cxx.cookie_has_size ())
{
/* Also store the element size. */
- cookie_ptr = build2 (POINTER_PLUS_EXPR, size_ptr_type, cookie_ptr,
+ cookie_ptr = fold_build_pointer_plus (cookie_ptr,
fold_build1_loc (input_location,
- NEGATE_EXPR, sizetype,
- size_in_bytes (sizetype)));
+ NEGATE_EXPR, sizetype,
+ size_in_bytes (sizetype)));
cookie = cp_build_indirect_ref (cookie_ptr, RO_NULL, complain);
cookie = build2 (MODIFY_EXPR, sizetype, cookie,
convert (sizetype, maxindex));
tbase = create_temporary_var (ptype);
- tbase_init = cp_build_modify_expr (tbase, NOP_EXPR,
- fold_build2_loc (input_location,
- POINTER_PLUS_EXPR, ptype,
- fold_convert (ptype, base),
- virtual_size),
- complain);
+ tbase_init
+ = cp_build_modify_expr (tbase, NOP_EXPR,
+ fold_build_pointer_plus_loc (input_location,
+ fold_convert (ptype,
+ base),
+ virtual_size),
+ complain);
if (tbase_init == error_mark_node)
return error_mark_node;
controller = build3 (BIND_EXPR, void_type_node, tbase,
build2 (EQ_EXPR, boolean_type_node, tbase,
fold_convert (ptype, base)));
tmp = fold_build1_loc (input_location, NEGATE_EXPR, sizetype, size_exp);
- tmp = build2 (POINTER_PLUS_EXPR, ptype, tbase, tmp);
+ tmp = fold_build_pointer_plus (tbase, tmp);
tmp = cp_build_modify_expr (tbase, NOP_EXPR, tmp, complain);
if (tmp == error_mark_node)
return error_mark_node;
type = strip_array_types (TREE_TYPE (type));
cookie_addr = fold_build1_loc (input_location, NEGATE_EXPR,
sizetype, TYPE_SIZE_UNIT (sizetype));
- cookie_addr = build2 (POINTER_PLUS_EXPR,
- size_ptr_type,
- fold_convert (size_ptr_type, base),
- cookie_addr);
+ cookie_addr = fold_build_pointer_plus (fold_convert (size_ptr_type, base),
+ cookie_addr);
maxindex = cp_build_indirect_ref (cookie_addr, RO_NULL, complain);
}
else if (TREE_CODE (type) == ARRAY_TYPE)
type = cp_build_qualified_type (ptr_type_node,
cp_type_quals (TREE_TYPE (exp)));
- return build2 (POINTER_PLUS_EXPR, type, exp,
- convert_to_integer (sizetype, offset));
+ return fold_build_pointer_plus (exp, offset);
}
/* Get a bad_cast node for the program to throw...
vtable_ptr = cp_build_addr_expr (vtable_ptr, tf_warning_or_error);
/* We need to point into the middle of the vtable. */
- vtable_ptr = build2
- (POINTER_PLUS_EXPR, TREE_TYPE (vtable_ptr), vtable_ptr,
+ vtable_ptr = fold_build_pointer_plus
+ (vtable_ptr,
size_binop (MULT_EXPR,
size_int (2 * TARGET_VTABLE_DATA_ENTRY_DISTANCE),
TYPE_SIZE_UNIT (vtable_entry_type)));
return error_mark_node;
}
/* ...and then the delta in the PMF. */
- instance_ptr = build2 (POINTER_PLUS_EXPR, TREE_TYPE (instance_ptr),
- instance_ptr, fold_convert (sizetype, delta));
+ instance_ptr = fold_build_pointer_plus (instance_ptr, delta);
/* Hand back the adjusted 'this' argument to our caller. */
*instance_ptrptr = instance_ptr;
TREE_NO_WARNING (vtbl) = 1;
/* Finally, extract the function pointer from the vtable. */
- e2 = fold_build2_loc (input_location,
- POINTER_PLUS_EXPR, TREE_TYPE (vtbl), vtbl,
- fold_convert (sizetype, idx));
+ e2 = fold_build_pointer_plus_loc (input_location, vtbl, idx);
e2 = cp_build_indirect_ref (e2, RO_NULL, tf_warning_or_error);
TREE_CONSTANT (e2) = 1;
{
tree type = build_pointer_type (argtype);
tree op0 = fold_convert (type, TREE_OPERAND (val, 0));
- tree op1 = fold_convert (sizetype, fold_offsetof (arg, val));
- return fold_build2 (POINTER_PLUS_EXPR, type, op0, op1);
+ tree op1 = fold_offsetof (arg, val);
+ return fold_build_pointer_plus (op0, op1);
}
/* Handle complex lvalues (when permitted)
/* Build an expression for "object + offset" where offset is the
value stored in the pointer-to-data-member. */
ptype = build_pointer_type (type);
- datum = build2 (POINTER_PLUS_EXPR, ptype,
- fold_convert (ptype, datum),
- build_nop (sizetype, component));
+ datum = fold_build_pointer_plus (fold_convert (ptype, datum), component);
datum = cp_build_indirect_ref (datum, RO_NULL, tf_warning_or_error);
/* If the object expression was an rvalue, return an rvalue. */
if (!is_lval)
low = fold_convert_loc (loc, sizetype, low);
low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
return build_range_check (loc, type,
- fold_build2_loc (loc, POINTER_PLUS_EXPR,
- etype, exp, low),
+ fold_build_pointer_plus_loc (loc, exp, low),
1, build_int_cst (etype, 0), value);
}
return 0;
inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
arg01, fold_convert_loc (loc, sizetype, arg1));
return fold_convert_loc (loc, type,
- fold_build2_loc (loc, POINTER_PLUS_EXPR,
- TREE_TYPE (arg00),
- arg00, inner));
+ fold_build_pointer_plus_loc (loc,
+ arg00, inner));
}
/* PTR_CST +p CST -> CST1 */
+2011-07-19 Richard Guenther <rguenther@suse.de>
+
+ * trans-expr.c (fill_with_spaces): Use fold_build_pointer_plus.
+ (gfc_trans_string_copy): Likewise.
+ * trans-intrinsic.c (gfc_conv_intrinsic_repeat): Likewise.
+ * trans-types.c (gfc_get_array_descr_info): Likewise.
+ * trans.c (gfc_build_array_ref): Likewise.
+
2011-07-19 Janus Weil <janus@gcc.gnu.org>
PR fortran/49708
fold_build2_loc (input_location, MINUS_EXPR, sizetype, i,
TYPE_SIZE_UNIT (type)));
gfc_add_modify (&loop, el,
- fold_build2_loc (input_location, POINTER_PLUS_EXPR,
- TREE_TYPE (el), el, TYPE_SIZE_UNIT (type)));
+ fold_build_pointer_plus_loc (input_location,
+ el, TYPE_SIZE_UNIT (type)));
/* Making the loop... actually loop! */
tmp = gfc_finish_block (&loop);
built_in_decls[BUILT_IN_MEMMOVE],
3, dest, src, slen);
- tmp4 = fold_build2_loc (input_location, POINTER_PLUS_EXPR, TREE_TYPE (dest),
- dest, fold_convert (sizetype, slen));
+ tmp4 = fold_build_pointer_plus_loc (input_location, dest, slen);
tmp4 = fill_with_spaces (tmp4, chartype,
fold_build2_loc (input_location, MINUS_EXPR,
TREE_TYPE(dlen), dlen, slen));
fold_convert (gfc_charlen_type_node, count));
tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_charlen_type_node,
tmp, fold_convert (gfc_charlen_type_node, size));
- tmp = fold_build2_loc (input_location, POINTER_PLUS_EXPR, pvoid_type_node,
- fold_convert (pvoid_type_node, dest),
- fold_convert (sizetype, tmp));
+ tmp = fold_build_pointer_plus_loc (input_location,
+ fold_convert (pvoid_type_node, dest), tmp);
tmp = build_call_expr_loc (input_location,
built_in_decls[BUILT_IN_MEMMOVE], 3, tmp, src,
fold_build2_loc (input_location, MULT_EXPR,
t = base_decl;
if (!integer_zerop (data_off))
- t = build2 (POINTER_PLUS_EXPR, ptype, t, data_off);
+ t = fold_build_pointer_plus (t, data_off);
t = build1 (NOP_EXPR, build_pointer_type (ptr_type_node), t);
info->data_location = build1 (INDIRECT_REF, ptr_type_node, t);
if (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE)
for (dim = 0; dim < rank; dim++)
{
- t = build2 (POINTER_PLUS_EXPR, ptype, base_decl,
- size_binop (PLUS_EXPR, dim_off, lower_suboff));
+ t = fold_build_pointer_plus (base_decl,
+ size_binop (PLUS_EXPR,
+ dim_off, lower_suboff));
t = build1 (INDIRECT_REF, gfc_array_index_type, t);
info->dimen[dim].lower_bound = t;
- t = build2 (POINTER_PLUS_EXPR, ptype, base_decl,
- size_binop (PLUS_EXPR, dim_off, upper_suboff));
+ t = fold_build_pointer_plus (base_decl,
+ size_binop (PLUS_EXPR,
+ dim_off, upper_suboff));
t = build1 (INDIRECT_REF, gfc_array_index_type, t);
info->dimen[dim].upper_bound = t;
if (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE
info->dimen[dim].lower_bound,
info->dimen[dim].upper_bound);
}
- t = build2 (POINTER_PLUS_EXPR, ptype, base_decl,
- size_binop (PLUS_EXPR, dim_off, stride_suboff));
+ t = fold_build_pointer_plus (base_decl,
+ size_binop (PLUS_EXPR,
+ dim_off, stride_suboff));
t = build1 (INDIRECT_REF, gfc_array_index_type, t);
t = build2 (MULT_EXPR, gfc_array_index_type, t, elem_size);
info->dimen[dim].stride = t;
gfc_array_index_type,
offset, GFC_DECL_SPAN(decl));
tmp = gfc_build_addr_expr (pvoid_type_node, base);
- tmp = fold_build2_loc (input_location, POINTER_PLUS_EXPR,
- pvoid_type_node, tmp,
- fold_convert (sizetype, offset));
+ tmp = fold_build_pointer_plus_loc (input_location, tmp, offset);
tmp = fold_convert (build_pointer_type (type), tmp);
if (!TYPE_STRING_FLAG (type))
tmp = build_fold_indirect_ref_loc (input_location, tmp);
+2011-07-19 Richard Guenther <rguenther@suse.de>
+
+ * builtins.c (static): Use fold_build_pointer_plus.
+ * class.c (make_class_data): Likewise.
+ (build_symbol_entry): Likewise.
+ * except.c (build_exception_object_ref): Likewise.
+ * expr.c (build_java_arrayaccess): Likewise.
+ (build_field_ref): Likewise.
+ (build_known_method_ref): Likewise.
+ (build_invokevirtual): Likewise.
+
2011-07-06 Richard Guenther <rguenther@suse.de>
* decl.c (java_init_decl_processing):
build_addr_sum (tree type, tree addr, tree offset)
{
tree ptr_type = build_pointer_type (type);
- return fold_build2 (POINTER_PLUS_EXPR,
- ptr_type,
- fold_convert (ptr_type, addr),
- fold_convert (sizetype, offset));
+ return fold_build_pointer_plus (fold_convert (ptr_type, addr), offset);
}
/* Make sure that this-arg is non-NULL. This is a security check. */
PUSH_FIELD_VALUE (v1, "vtable",
(flag_indirect_classes
? null_pointer_node
- : build2 (POINTER_PLUS_EXPR, dtable_ptr_type,
- build1 (ADDR_EXPR, dtable_ptr_type,
- class_dtable_decl),
- dtable_start_offset)));
+ : fold_build_pointer_plus
+ (build1 (ADDR_EXPR, dtable_ptr_type,
+ class_dtable_decl),
+ dtable_start_offset)));
if (! flag_hash_synchronization)
PUSH_FIELD_VALUE (v1, "sync_info", null_pointer_node);
FINISH_RECORD_CONSTRUCTOR (temp, v1, object_type_node);
PUSH_FIELD_VALUE (v2, "vtable",
(flag_indirect_dispatch || dtable_decl == NULL_TREE
? null_pointer_node
- : build2 (POINTER_PLUS_EXPR, dtable_ptr_type,
- build1 (ADDR_EXPR, dtable_ptr_type,
- dtable_decl),
- dtable_start_offset)));
+ : fold_build_pointer_plus
+ (build1 (ADDR_EXPR, dtable_ptr_type,
+ dtable_decl),
+ dtable_start_offset)));
add_table_and_syms (&v2, TYPE_OTABLE_METHODS (type),
"otable", TYPE_OTABLE_DECL (type), otable_ptr_type,
"otable_syms", TYPE_OTABLE_SYMS_DECL (type));
system that this is a "special" symbol, i.e. one that should
bypass access controls. */
if (special != NULL_TREE)
- signature = build2 (POINTER_PLUS_EXPR, TREE_TYPE (signature), signature,
- fold_convert (sizetype, special));
+ signature = fold_build_pointer_plus (signature, special);
return build_symbol_table_entry (clname, name, signature);
}
The java object is immediately before the generic exception header. */
obj = build_exception_object_var ();
obj = fold_convert (build_pointer_type (type), obj);
- obj = build2 (POINTER_PLUS_EXPR, TREE_TYPE (obj), obj,
+ obj = fold_build_pointer_plus (obj,
fold_build1 (NEGATE_EXPR, sizetype,
TYPE_SIZE_UNIT (TREE_TYPE (obj))));
obj = build1 (INDIRECT_REF, type, obj);
size_exp);
/* Sum the byte offset and the address of the data field. */
- node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
+ node = fold_build_pointer_plus (node, index);
/* Finally, return
1, otable_index),
field_offset);
- field_offset = fold (convert (sizetype, field_offset));
self_value = java_check_reference (self_value, check);
- address
- = fold_build2 (POINTER_PLUS_EXPR,
- TREE_TYPE (self_value),
- self_value, field_offset);
+ address = fold_build_pointer_plus (self_value, field_offset);
address = fold_convert (build_pointer_type (TREE_TYPE (field_decl)),
address);
return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
method_index++;
}
method_index *= int_size_in_bytes (method_type_node);
- ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
- ref, size_int (method_index));
+ ref = fold_build_pointer_plus_hwi (ref, method_index);
ref = build1 (INDIRECT_REF, method_type_node, ref);
func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
ref, lookup_field (&method_type_node, ncode_ident),
size_int (TARGET_VTABLE_USES_DESCRIPTORS));
}
- func = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dtable), dtable,
- convert (sizetype, method_index));
+ func = fold_build_pointer_plus (dtable, method_index);
if (TARGET_VTABLE_USES_DESCRIPTORS)
func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
+2011-07-19 Richard Guenther <rguenther@suse.de>
+
+ * objc-next-runtime-abi-02.c (objc_v2_build_ivar_ref):
+ Use fold_build_pointer_plus.
+ (objc2_build_ehtype_initializer): Likewise.
+
2011-07-11 Nicola Pero <nicola.pero@meta-innovation.com>
* objc-encoding.h (obstack.h): Do not include.
string_type_node, build_fold_addr_expr (datum));
/* (char*)datum + offset */
- expr = fold_build2_loc (input_location,
- POINTER_PLUS_EXPR, string_type_node, expr, offset);
+ expr = fold_build_pointer_plus_loc (input_location, expr, offset);
/* (ftype*)((char*)datum + offset) */
expr = build_c_cast (input_location, build_pointer_type (ftype), expr);
}
addr = build_fold_addr_expr_with_type (next_v2_ehvtable_decl, ptr_type_node);
offs = size_int (2 * int_cst_value (TYPE_SIZE_UNIT (ptr_type_node)));
- addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, offs);
+ addr = fold_build_pointer_plus (addr, offs);
CONSTRUCTOR_APPEND_ELT (initlist, NULL_TREE, addr);
break;
case LE_EXPR:
if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
- loop->n2 = fold_build2_loc (loc,
- POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
- loop->n2, size_one_node);
+ loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, 1);
else
loop->n2 = fold_build2_loc (loc,
PLUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
break;
case GE_EXPR:
if (POINTER_TYPE_P (TREE_TYPE (loop->n2)))
- loop->n2 = fold_build2_loc (loc,
- POINTER_PLUS_EXPR, TREE_TYPE (loop->n2),
- loop->n2, size_int (-1));
+ loop->n2 = fold_build_pointer_plus_hwi_loc (loc, loop->n2, -1);
else
loop->n2 = fold_build2_loc (loc,
MINUS_EXPR, TREE_TYPE (loop->n2), loop->n2,
t = fold_build2 (MULT_EXPR, itype, t,
fold_convert (itype, fd->loops[i].step));
if (POINTER_TYPE_P (vtype))
- t = fold_build2 (POINTER_PLUS_EXPR, vtype,
- fd->loops[i].n1, fold_convert (sizetype, t));
+ t = fold_build_pointer_plus (fd->loops[i].n1, t);
else
t = fold_build2 (PLUS_EXPR, itype, fd->loops[i].n1, t);
t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
vback = gimple_omp_continue_control_def (stmt);
if (POINTER_TYPE_P (type))
- t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
- fold_convert (sizetype, fd->loop.step));
+ t = fold_build_pointer_plus (vmain, fd->loop.step);
else
t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
set_immediate_dominator (CDI_DOMINATORS, bb, last_bb);
if (POINTER_TYPE_P (vtype))
- t = fold_build2 (POINTER_PLUS_EXPR, vtype,
- fd->loops[i].v,
- fold_convert (sizetype, fd->loops[i].step));
+ t = fold_build_pointer_plus (fd->loops[i].v, fd->loops[i].step);
else
t = fold_build2 (PLUS_EXPR, vtype, fd->loops[i].v,
fd->loops[i].step);
t = fold_convert (itype, s0);
t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
if (POINTER_TYPE_P (type))
- t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
- fold_convert (sizetype, t));
+ t = fold_build_pointer_plus (fd->loop.n1, t);
else
t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
t = fold_convert (itype, e0);
t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
if (POINTER_TYPE_P (type))
- t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
- fold_convert (sizetype, t));
+ t = fold_build_pointer_plus (fd->loop.n1, t);
else
t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
e = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
vback = gimple_omp_continue_control_def (stmt);
if (POINTER_TYPE_P (type))
- t = fold_build2 (POINTER_PLUS_EXPR, type, vmain,
- fold_convert (sizetype, fd->loop.step));
+ t = fold_build_pointer_plus (vmain, fd->loop.step);
else
t = fold_build2 (PLUS_EXPR, type, vmain, fd->loop.step);
t = force_gimple_operand_gsi (&gsi, t, false, NULL_TREE,
t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
if (POINTER_TYPE_P (type))
- t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
- fold_convert (sizetype, t));
+ t = fold_build_pointer_plus (fd->loop.n1, t);
else
t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
v_extra = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
t = fold_convert (itype, s0);
t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
if (POINTER_TYPE_P (type))
- t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
- fold_convert (sizetype, t));
+ t = fold_build_pointer_plus (fd->loop.n1, t);
else
t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
t = force_gimple_operand_gsi (&si, t, false, NULL_TREE,
t = fold_convert (itype, e0);
t = fold_build2 (MULT_EXPR, itype, t, fd->loop.step);
if (POINTER_TYPE_P (type))
- t = fold_build2 (POINTER_PLUS_EXPR, type, fd->loop.n1,
- fold_convert (sizetype, t));
+ t = fold_build_pointer_plus (fd->loop.n1, t);
else
t = fold_build2 (PLUS_EXPR, type, t, fd->loop.n1);
e = force_gimple_operand_gsi (&si, t, true, NULL_TREE,
v_back = gimple_omp_continue_control_def (stmt);
if (POINTER_TYPE_P (type))
- t = fold_build2 (POINTER_PLUS_EXPR, type, v_main,
- fold_convert (sizetype, fd->loop.step));
+ t = fold_build_pointer_plus (v_main, fd->loop.step);
else
t = fold_build2 (PLUS_EXPR, type, v_main, fd->loop.step);
stmt = gimple_build_assign (v_back, t);
return fold_convert (type, elt);
if (POINTER_TYPE_P (type))
- return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
+ return fold_build_pointer_plus (expr, elt);
return fold_build2 (PLUS_EXPR, type, expr, elt);
}
if (POINTER_TYPE_P (type))
{
elt = fold_build1 (NEGATE_EXPR, type1, elt);
- return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
+ return fold_build_pointer_plus (expr, elt);
}
return fold_build2 (MINUS_EXPR, type, expr, elt);
}
{
if (code == MINUS_EXPR)
elt = fold_build1 (NEGATE_EXPR, type1, elt);
- return fold_build2 (POINTER_PLUS_EXPR, type, expr, elt);
+ return fold_build_pointer_plus (expr, elt);
}
return fold_build2 (code, type, expr, elt);
}
split_constant_offset (poffset, &poffset, &off1);
off0 = size_binop (PLUS_EXPR, off0, off1);
if (POINTER_TYPE_P (TREE_TYPE (base)))
- base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (base),
- base, fold_convert (sizetype, poffset));
+ base = fold_build_pointer_plus (base, poffset);
else
base = fold_build2 (PLUS_EXPR, TREE_TYPE (base), base,
fold_convert (TREE_TYPE (base), poffset));
TYPE_SIZE_UNIT (TREE_TYPE (op0)));
}
- addr_base = fold_build2_loc (loc, POINTER_PLUS_EXPR,
- TREE_TYPE (DR_BASE_ADDRESS (dr)),
- DR_BASE_ADDRESS (dr), addr_base);
+ addr_base = fold_build_pointer_plus_loc (loc,
+ DR_BASE_ADDRESS (dr), addr_base);
mem = force_gimple_operand (addr_base, &stmts, true, NULL);
gimple_seq_add_seq (&stmt_list, stmts);
elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
elt);
addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
- addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
- addr, fold_convert_loc (location, sizetype,
- byte_position (field)));
+ addr = fold_build_pointer_plus_loc (location,
+ addr, byte_position (field));
}
else
addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
case INDIRECT_REF:
addr = TREE_OPERAND (t, 0);
base = addr;
- limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
- fold_build2_loc (location,
- POINTER_PLUS_EXPR, ptr_type_node, base,
- size),
- size_int (-1));
+ limit = fold_build_pointer_plus_hwi_loc
+ (location, fold_build_pointer_plus_loc (location, base, size), -1);
break;
case MEM_REF:
- addr = fold_build2_loc (location, POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (t, 0)),
- TREE_OPERAND (t, 0),
- fold_convert (sizetype, TREE_OPERAND (t, 1)));
+ addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
+ TREE_OPERAND (t, 1));
base = addr;
- limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
- fold_build2_loc (location,
- POINTER_PLUS_EXPR, ptr_type_node, base,
- size),
- size_int (-1));
+ limit = fold_build_pointer_plus_hwi_loc (location,
+ fold_build_pointer_plus_loc (location,
+ base, size), -1);
break;
case TARGET_MEM_REF:
addr = tree_mem_ref_addr (ptr_type_node, t);
base = addr;
- limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
- fold_build2_loc (location,
- POINTER_PLUS_EXPR, ptr_type_node, base,
- size),
- size_int (-1));
+ limit = fold_build_pointer_plus_hwi_loc (location,
+ fold_build_pointer_plus_loc (location,
+ base, size), -1);
break;
case ARRAY_RANGE_REF:
addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
addr = convert (ptr_type_node, addr);
- addr = fold_build2_loc (location, POINTER_PLUS_EXPR,
- ptr_type_node, addr, ofs);
+ addr = fold_build_pointer_plus_loc (location, addr, ofs);
base = addr;
- limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
- fold_build2_loc (location,
- POINTER_PLUS_EXPR, ptr_type_node,
- base, size),
- size_int (-1));
+ limit = fold_build_pointer_plus_hwi_loc (location,
+ fold_build_pointer_plus_loc (location,
+ base, size), -1);
}
break;
{
val = fold_build2 (MULT_EXPR, sizetype, iv.step,
size_int (iter));
- val = fold_build2 (POINTER_PLUS_EXPR, type, iv.base, val);
+ val = fold_build_pointer_plus (iv.base, val);
}
else
{
}
if (addr_off)
- addr = fold_build2 (POINTER_PLUS_EXPR, type, addr_base, addr_off);
+ addr = fold_build_pointer_plus (addr_base, addr_off);
else
addr = addr_base;
/* Add ELT to base. */
type = TREE_TYPE (parts->base);
if (POINTER_TYPE_P (type))
- parts->base = fold_build2 (POINTER_PLUS_EXPR, type,
- parts->base,
- fold_convert (sizetype, elt));
+ parts->base = fold_build_pointer_plus (parts->base, elt);
else
parts->base = fold_build2 (PLUS_EXPR, type,
parts->base, elt);
tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
{
tree mem_ref, tmp;
- tree atype;
struct mem_address parts;
addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
if (parts.index)
{
- atype = TREE_TYPE (tmp);
parts.base = force_gimple_operand_gsi_1 (gsi,
- fold_build2 (POINTER_PLUS_EXPR, atype,
- tmp,
- fold_convert (sizetype, parts.base)),
+ fold_build_pointer_plus (tmp, parts.base),
is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
}
else
/* Add index to base. */
if (parts.base)
{
- atype = TREE_TYPE (parts.base);
parts.base = force_gimple_operand_gsi_1 (gsi,
- fold_build2 (POINTER_PLUS_EXPR, atype,
- parts.base,
- parts.index),
+ fold_build_pointer_plus (parts.base, parts.index),
is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
}
else
/* Try adding offset to base. */
if (parts.base)
{
- atype = TREE_TYPE (parts.base);
parts.base = force_gimple_operand_gsi_1 (gsi,
- fold_build2 (POINTER_PLUS_EXPR, atype,
- parts.base,
- fold_convert (sizetype, parts.offset)),
+ fold_build_pointer_plus (parts.base, parts.offset),
is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
}
else
symbol_cost[i] = computation_cost (addr, i) + 1;
address_cost[i]
- = computation_cost (build2 (POINTER_PLUS_EXPR, type,
- addr,
- build_int_cst (sizetype, 2000)), i) + 1;
+ = computation_cost (fold_build_pointer_plus_hwi (addr, 2000), i) + 1;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "force_expr_to_var_cost %s costs:\n", i ? "speed" : "size");
else if (POINTER_TYPE_P (type))
noloop = fold_build2 (GT_EXPR, boolean_type_node,
iv0->base,
- fold_build2 (POINTER_PLUS_EXPR, type,
- iv1->base, tmod));
+ fold_build_pointer_plus (iv1->base, tmod));
else
noloop = fold_build2 (GT_EXPR, boolean_type_node,
iv0->base,
noloop = boolean_false_node;
else if (POINTER_TYPE_P (type))
noloop = fold_build2 (GT_EXPR, boolean_type_node,
- fold_build2 (POINTER_PLUS_EXPR, type,
- iv0->base,
- fold_build1 (NEGATE_EXPR,
- type1, tmod)),
+ fold_build_pointer_plus (iv0->base,
+ fold_build1 (NEGATE_EXPR,
+ type1, tmod)),
iv1->base);
else
noloop = fold_build2 (GT_EXPR, boolean_type_node,
if (integer_nonzerop (iv0->step))
{
if (POINTER_TYPE_P (type))
- iv1->base = fold_build2 (POINTER_PLUS_EXPR, type, iv1->base,
- build_int_cst (type1, 1));
+ iv1->base = fold_build_pointer_plus_hwi (iv1->base, 1);
else
iv1->base = fold_build2 (PLUS_EXPR, type1, iv1->base,
build_int_cst (type1, 1));
}
else if (POINTER_TYPE_P (type))
- iv0->base = fold_build2 (POINTER_PLUS_EXPR, type, iv0->base,
- fold_build1 (NEGATE_EXPR, type1,
- build_int_cst (type1, 1)));
+ iv0->base = fold_build_pointer_plus_hwi (iv0->base, -1);
else
iv0->base = fold_build2 (MINUS_EXPR, type1,
iv0->base, build_int_cst (type1, 1));
/* Determine the address to prefetch. */
delta = (ahead + ap * ref->prefetch_mod) *
int_cst_value (ref->group->step);
- addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
- addr_base, size_int (delta));
+ addr = fold_build_pointer_plus_hwi (addr_base, delta);
addr = force_gimple_operand_gsi (&bsi, unshare_expr (addr), true, NULL,
true, GSI_SAME_STMT);
}
forward = fold_build2 (MULT_EXPR, sizetype,
fold_convert (sizetype, ref->group->step),
fold_convert (sizetype, size_int (ahead)));
- addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr_base,
- forward);
+ addr = fold_build_pointer_plus (addr_base, forward);
addr = force_gimple_operand_gsi (&bsi, unshare_expr (addr), true,
NULL, true, GSI_SAME_STMT);
}
inner-loop: *(BASE+INIT). (The first location is actually
BASE+INIT+OFFSET, but we add OFFSET separately later). */
tree inner_base = build_fold_indirect_ref
- (fold_build2 (POINTER_PLUS_EXPR,
- TREE_TYPE (base), base,
- fold_convert (sizetype, init)));
+ (fold_build_pointer_plus (base, init));
if (vect_print_dump_info (REPORT_DETAILS))
{
/* base + base_offset */
if (loop_vinfo)
- addr_base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (data_ref_base),
- data_ref_base, base_offset);
+ addr_base = fold_build_pointer_plus (data_ref_base, base_offset);
else
{
addr_base = build1 (ADDR_EXPR,
fold_convert (TREE_TYPE (step_expr), niters),
step_expr);
if (POINTER_TYPE_P (TREE_TYPE (init_expr)))
- ni = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (init_expr),
- init_expr,
- fold_convert (sizetype, off));
+ ni = fold_build_pointer_plus (init_expr, off);
else
ni = fold_build2 (PLUS_EXPR, TREE_TYPE (init_expr),
init_expr,
}
seg_a_min = addr_base_a;
- seg_a_max = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr_base_a),
- addr_base_a, segment_length_a);
+ seg_a_max = fold_build_pointer_plus (addr_base_a, segment_length_a);
if (tree_int_cst_compare (DR_STEP (dr_a), size_zero_node) < 0)
seg_a_min = seg_a_max, seg_a_max = addr_base_a;
seg_b_min = addr_base_b;
- seg_b_max = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr_base_b),
- addr_base_b, segment_length_b);
+ seg_b_max = fold_build_pointer_plus (addr_base_b, segment_length_b);
if (tree_int_cst_compare (DR_STEP (dr_b), size_zero_node) < 0)
seg_b_min = seg_b_max, seg_b_max = addr_base_b;
anti_max,
build_int_cst (TREE_TYPE (var_vr->min), 1));
else
- min = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (var_vr->min),
- anti_max, size_int (1));
+ min = fold_build_pointer_plus_hwi (anti_max, 1);
max = real_max;
set_value_range (vr_p, VR_RANGE, min, max, vr_p->equiv);
}
anti_min,
build_int_cst (TREE_TYPE (var_vr->min), 1));
else
- max = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (var_vr->min),
- anti_min,
- size_int (-1));
+ max = fold_build_pointer_plus_hwi (anti_min, -1);
min = real_min;
set_value_range (vr_p, VR_RANGE, min, max, vr_p->equiv);
}
|| code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
}
+/* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
+static inline tree
+fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
+{
+ return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
+ ptr, fold_convert_loc (loc, sizetype, off));
+}
+#define fold_build_pointer_plus(p,o) \
+ fold_build_pointer_plus_loc (UNKNOWN_LOCATION, p, o)
+
+/* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
+static inline tree
+fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
+{
+ return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
+ ptr, size_int (off));
+}
+#define fold_build_pointer_plus_hwi(p,o) \
+ fold_build_pointer_plus_hwi_loc (UNKNOWN_LOCATION, p, o)
/* In builtins.c */
extern tree fold_call_expr (location_t, tree, bool);