+2004-07-25 Roger Sayle <roger@eyesopen.com>
+
+ * convert.c (convert_to_real, convert_to_integer,
+ convert_to_complex): Replace calls to build with calls to buildN.
+ * coverage.c (tree_coverage_counter_ref): Likewise.
+ * dojump.c (do_jump): Likewise.
+ * dwarf2out.c (loc_descriptor_from_tree): Likewise.
+ * emit-rtl.c (component_ref_for_mem_expr,
+ set_mem_attributes_minus_bitpos): Likewise.
+ * explow.c (update_nonlocal_goto_save_area): Likewise.
+ * expmed.c (expand_shift, make_tree, const_mult_add_overflow_p,
+ expand_mult_add): Likewise.
+ * expr.c (emit_block_move_via_libcall, clear_storage_via_libcall,
+ store_constructor, get_inner_reference, expand_expr_real_1,
+ try_casesi, try_tablejump): Likewise.
+ * function.c (expand_function_start): Likewise.
+ * stmt.c (emit_case_bit_tests, expand_end_case_type,
+ node_has_low_bound, node_has_high_bound, emit_case_nodes): Likewise.
+ * stor-layout.c (place_union_field, layout_type): Likewise.
+ * tree.c (substitute_in_expr, tree_fold_gcd): Likewise.
+ * varasm.c (copy_constant): Likewise.
+
2004-07-25 Bernardo Innocenti <bernie@develer.com>
* c-common.c: Rename all identifiers named `new'.
newtype = TREE_TYPE (arg1);
if (TYPE_PRECISION (newtype) < TYPE_PRECISION (itype))
{
- expr = build (TREE_CODE (expr), newtype,
- fold (convert_to_real (newtype, arg0)),
- fold (convert_to_real (newtype, arg1)));
+ expr = build2 (TREE_CODE (expr), newtype,
+ fold (convert_to_real (newtype, arg0)),
+ fold (convert_to_real (newtype, arg1)));
if (newtype == type)
return expr;
}
/* If the original expression had side-effects, we must
preserve it. */
if (TREE_SIDE_EFFECTS (expr))
- return build (COMPOUND_EXPR, type, expr, t);
+ return build2 (COMPOUND_EXPR, type, expr, t);
else
return t;
}
else
typex = lang_hooks.types.signed_type (typex);
return convert (type,
- fold (build (ex_form, typex,
- convert (typex, arg0),
- convert (typex, arg1))));
+ fold (build2 (ex_form, typex,
+ convert (typex, arg0),
+ convert (typex, arg1))));
}
}
}
case COND_EXPR:
/* It is sometimes worthwhile to push the narrowing down through
the conditional and never loses. */
- return fold (build (COND_EXPR, type, TREE_OPERAND (expr, 0),
- convert (type, TREE_OPERAND (expr, 1)),
- convert (type, TREE_OPERAND (expr, 2))));
+ return fold (build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
+ convert (type, TREE_OPERAND (expr, 1)),
+ convert (type, TREE_OPERAND (expr, 2))));
default:
break;
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
case CHAR_TYPE:
- return build (COMPLEX_EXPR, type, convert (subtype, expr),
- convert (subtype, integer_zero_node));
+ return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
+ convert (subtype, integer_zero_node));
case COMPLEX_TYPE:
{
if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
return expr;
else if (TREE_CODE (expr) == COMPLEX_EXPR)
- return fold (build (COMPLEX_EXPR,
- type,
- convert (subtype, TREE_OPERAND (expr, 0)),
- convert (subtype, TREE_OPERAND (expr, 1))));
+ return fold (build2 (COMPLEX_EXPR, type,
+ convert (subtype, TREE_OPERAND (expr, 0)),
+ convert (subtype, TREE_OPERAND (expr, 1))));
else
{
expr = save_expr (expr);
return
- fold (build (COMPLEX_EXPR,
- type, convert (subtype,
- fold (build1 (REALPART_EXPR,
- TREE_TYPE (TREE_TYPE (expr)),
- expr))),
- convert (subtype,
- fold (build1 (IMAGPART_EXPR,
- TREE_TYPE (TREE_TYPE (expr)),
- expr)))));
+ fold (build2 (COMPLEX_EXPR, type,
+ convert (subtype,
+ fold (build1 (REALPART_EXPR,
+ TREE_TYPE (TREE_TYPE (expr)),
+ expr))),
+ convert (subtype,
+ fold (build1 (IMAGPART_EXPR,
+ TREE_TYPE (TREE_TYPE (expr)),
+ expr)))));
}
}
no += prg_n_ctrs[counter] + fn_b_ctrs[counter];
/* "no" here is an array index, scaled to bytes later. */
- return build (ARRAY_REF, GCOV_TYPE_NODE, tree_ctr_tables[counter],
- fold_convert (domain_type, build_int_2 (no, 0)),
- TYPE_MIN_VALUE (domain_type),
- size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (GCOV_TYPE_NODE),
- size_int (TYPE_ALIGN (GCOV_TYPE_NODE))));
+ return build4 (ARRAY_REF, GCOV_TYPE_NODE, tree_ctr_tables[counter],
+ fold_convert (domain_type, build_int_2 (no, 0)),
+ TYPE_MIN_VALUE (domain_type),
+ size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (GCOV_TYPE_NODE),
+ size_int (TYPE_ALIGN (GCOV_TYPE_NODE))));
}
\f
/* Generate a checksum for a string. CHKSUM is the current
a test and can be longer if the test is eliminated. */
case PLUS_EXPR:
/* Reduce to minus. */
- exp = build (MINUS_EXPR, TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
- TREE_OPERAND (exp, 1))));
+ exp = build2 (MINUS_EXPR, TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ fold (build1 (NEGATE_EXPR,
+ TREE_TYPE (TREE_OPERAND (exp, 1)),
+ TREE_OPERAND (exp, 1))));
/* Process as MINUS. */
#endif
case MINUS_EXPR:
/* Nonzero iff operands of minus differ. */
- do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
- TREE_OPERAND (exp, 0),
- TREE_OPERAND (exp, 1)),
+ do_compare_and_jump (build2 (NE_EXPR, TREE_TYPE (exp),
+ TREE_OPERAND (exp, 0),
+ TREE_OPERAND (exp, 1)),
NE, NE, if_false_label, if_true_label);
break;
&& prefer_and_bit_test (TYPE_MODE (argtype),
TREE_INT_CST_LOW (shift)))
{
- do_jump (build (BIT_AND_EXPR, argtype, arg,
- fold (build (LSHIFT_EXPR, argtype, one, shift))),
+ do_jump (build2 (BIT_AND_EXPR, argtype, arg,
+ fold (build2 (LSHIFT_EXPR, argtype,
+ one, shift))),
if_false_label, if_true_label);
break;
}
tree exp1 = save_expr (TREE_OPERAND (exp, 1));
do_jump
(fold
- (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
- fold (build (EQ_EXPR, TREE_TYPE (exp),
+ (build2 (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
+ fold (build2 (EQ_EXPR, TREE_TYPE (exp),
fold (build1 (REALPART_EXPR,
TREE_TYPE (inner_type),
exp0)),
fold (build1 (REALPART_EXPR,
TREE_TYPE (inner_type),
exp1)))),
- fold (build (EQ_EXPR, TREE_TYPE (exp),
+ fold (build2 (EQ_EXPR, TREE_TYPE (exp),
fold (build1 (IMAGPART_EXPR,
TREE_TYPE (inner_type),
exp0)),
tree exp1 = save_expr (TREE_OPERAND (exp, 1));
do_jump
(fold
- (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
- fold (build (NE_EXPR, TREE_TYPE (exp),
+ (build2 (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
+ fold (build2 (NE_EXPR, TREE_TYPE (exp),
fold (build1 (REALPART_EXPR,
TREE_TYPE (inner_type),
exp0)),
fold (build1 (REALPART_EXPR,
TREE_TYPE (inner_type),
exp1)))),
- fold (build (NE_EXPR, TREE_TYPE (exp),
+ fold (build2 (NE_EXPR, TREE_TYPE (exp),
fold (build1 (IMAGPART_EXPR,
TREE_TYPE (inner_type),
exp0)),
/* If the target doesn't support combined unordered
compares, decompose into two comparisons. */
- cmp0 = fold (build (tcode1, TREE_TYPE (exp), op0, op1));
- cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
- exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
+ cmp0 = fold (build2 (tcode1, TREE_TYPE (exp), op0, op1));
+ cmp1 = fold (build2 (tcode2, TREE_TYPE (exp), op0, op1));
+ exp = build2 (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
do_jump (exp, if_false_label, if_true_label);
}
}
const enum tree_code code =
TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
- loc = build (COND_EXPR, TREE_TYPE (loc),
- build (code, integer_type_node,
- TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
+ loc = build3 (COND_EXPR, TREE_TYPE (loc),
+ build2 (code, integer_type_node,
+ TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
}
if (inner == TREE_OPERAND (ref, 0))
return ref;
else
- return build (COMPONENT_REF, TREE_TYPE (ref), inner, TREE_OPERAND (ref, 1),
- NULL_TREE);
+ return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
+ TREE_OPERAND (ref, 1), NULL_TREE);
}
/* Returns 1 if both MEM_EXPR can be considered equal
index, then convert to sizetype and multiply by the size of
the array element. */
if (! integer_zerop (low_bound))
- index = fold (build (MINUS_EXPR, TREE_TYPE (index),
- index, low_bound));
+ index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
+ index, low_bound));
off_tree = size_binop (PLUS_EXPR,
size_binop (MULT_EXPR, convert (sizetype,
first one is used for the frame pointer save; the rest are sized by
STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
of the stack save area slots. */
- t_save = build (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
- integer_one_node, NULL_TREE, NULL_TREE);
+ t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
+ integer_one_node, NULL_TREE, NULL_TREE);
r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX);
tree type = TREE_TYPE (amount);
tree new_amount = make_tree (type, op1);
tree other_amount
- = fold (build (MINUS_EXPR, type,
- convert (type,
- build_int_2 (GET_MODE_BITSIZE (mode),
- 0)),
- amount));
+ = fold (build2 (MINUS_EXPR, type,
+ convert (type,
+ build_int_2 (GET_MODE_BITSIZE (mode),
+ 0)),
+ amount));
shifted = force_reg (mode, shifted);
}
case PLUS:
- return fold (build (PLUS_EXPR, type, make_tree (type, XEXP (x, 0)),
- make_tree (type, XEXP (x, 1))));
+ return fold (build2 (PLUS_EXPR, type, make_tree (type, XEXP (x, 0)),
+ make_tree (type, XEXP (x, 1))));
case MINUS:
- return fold (build (MINUS_EXPR, type, make_tree (type, XEXP (x, 0)),
- make_tree (type, XEXP (x, 1))));
+ return fold (build2 (MINUS_EXPR, type, make_tree (type, XEXP (x, 0)),
+ make_tree (type, XEXP (x, 1))));
case NEG:
return fold (build1 (NEGATE_EXPR, type, make_tree (type, XEXP (x, 0))));
case MULT:
- return fold (build (MULT_EXPR, type, make_tree (type, XEXP (x, 0)),
- make_tree (type, XEXP (x, 1))));
+ return fold (build2 (MULT_EXPR, type, make_tree (type, XEXP (x, 0)),
+ make_tree (type, XEXP (x, 1))));
case ASHIFT:
- return fold (build (LSHIFT_EXPR, type, make_tree (type, XEXP (x, 0)),
- make_tree (type, XEXP (x, 1))));
+ return fold (build2 (LSHIFT_EXPR, type, make_tree (type, XEXP (x, 0)),
+ make_tree (type, XEXP (x, 1))));
case LSHIFTRT:
t = lang_hooks.types.unsigned_type (type);
return fold (convert (type,
- build (RSHIFT_EXPR, t,
- make_tree (t, XEXP (x, 0)),
- make_tree (type, XEXP (x, 1)))));
+ build2 (RSHIFT_EXPR, t,
+ make_tree (t, XEXP (x, 0)),
+ make_tree (type, XEXP (x, 1)))));
case ASHIFTRT:
t = lang_hooks.types.signed_type (type);
return fold (convert (type,
- build (RSHIFT_EXPR, t,
- make_tree (t, XEXP (x, 0)),
- make_tree (type, XEXP (x, 1)))));
+ build2 (RSHIFT_EXPR, t,
+ make_tree (t, XEXP (x, 0)),
+ make_tree (type, XEXP (x, 1)))));
case DIV:
if (TREE_CODE (type) != REAL_TYPE)
t = type;
return fold (convert (type,
- build (TRUNC_DIV_EXPR, t,
- make_tree (t, XEXP (x, 0)),
- make_tree (t, XEXP (x, 1)))));
+ build2 (TRUNC_DIV_EXPR, t,
+ make_tree (t, XEXP (x, 0)),
+ make_tree (t, XEXP (x, 1)))));
case UDIV:
t = lang_hooks.types.unsigned_type (type);
return fold (convert (type,
- build (TRUNC_DIV_EXPR, t,
- make_tree (t, XEXP (x, 0)),
- make_tree (t, XEXP (x, 1)))));
+ build2 (TRUNC_DIV_EXPR, t,
+ make_tree (t, XEXP (x, 0)),
+ make_tree (t, XEXP (x, 1)))));
case SIGN_EXTEND:
case ZERO_EXTEND:
add_type = (GET_MODE (add) == VOIDmode ? mult_type
: lang_hooks.types.type_for_mode (GET_MODE (add), unsignedp));
- result = fold (build (PLUS_EXPR, mult_type,
- fold (build (MULT_EXPR, mult_type,
- make_tree (mult_type, x),
- make_tree (mult_type, mult))),
- make_tree (add_type, add)));
+ result = fold (build2 (PLUS_EXPR, mult_type,
+ fold (build2 (MULT_EXPR, mult_type,
+ make_tree (mult_type, x),
+ make_tree (mult_type, mult))),
+ make_tree (add_type, add)));
return TREE_CONSTANT_OVERFLOW (result);
}
tree add_type = (GET_MODE (add) == VOIDmode
? type: lang_hooks.types.type_for_mode (GET_MODE (add),
unsignedp));
- tree result = fold (build (PLUS_EXPR, type,
- fold (build (MULT_EXPR, type,
- make_tree (type, x),
- make_tree (type, mult))),
- make_tree (add_type, add)));
+ tree result = fold (build2 (PLUS_EXPR, type,
+ fold (build2 (MULT_EXPR, type,
+ make_tree (type, x),
+ make_tree (type, mult))),
+ make_tree (add_type, add)));
return expand_expr (result, target, VOIDmode, 0);
}
/* Now we have to build up the CALL_EXPR itself. */
call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
- call_expr, arg_list, NULL_TREE);
+ call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
+ call_expr, arg_list, NULL_TREE);
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
/* Now we have to build up the CALL_EXPR itself. */
call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
- call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
- call_expr, arg_list, NULL_TREE);
+ call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
+ call_expr, arg_list, NULL_TREE);
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
if (BYTES_BIG_ENDIAN)
value
- = fold (build (LSHIFT_EXPR, type, value,
- build_int_2 (BITS_PER_WORD - bitsize, 0)));
+ = fold (build2 (LSHIFT_EXPR, type, value,
+ build_int_2 (BITS_PER_WORD - bitsize, 0)));
bitsize = BITS_PER_WORD;
mode = word_mode;
}
/* Assign value to element index. */
position
= convert (ssizetype,
- fold (build (MINUS_EXPR, TREE_TYPE (index),
- index, TYPE_MIN_VALUE (domain))));
+ fold (build2 (MINUS_EXPR, TREE_TYPE (index),
+ index, TYPE_MIN_VALUE (domain))));
position = size_binop (MULT_EXPR, position,
convert (ssizetype,
TYPE_SIZE_UNIT (elttype)));
store_expr (value, xtarget, 0);
/* Generate a conditional jump to exit the loop. */
- exit_cond = build (LT_EXPR, integer_type_node,
- index, hi_index);
+ exit_cond = build2 (LT_EXPR, integer_type_node,
+ index, hi_index);
jumpif (exit_cond, loop_end);
/* Update the loop counter, and jump to the head of
if (minelt)
index = fold_convert (ssizetype,
- fold (build (MINUS_EXPR, index,
- TYPE_MIN_VALUE (domain))));
+ fold (build2 (MINUS_EXPR,
+ TREE_TYPE (index),
+ index,
+ TYPE_MIN_VALUE (domain))));
position = size_binop (MULT_EXPR, index,
convert (ssizetype,
index, then convert to sizetype and multiply by the size of the
array element. */
if (! integer_zerop (low_bound))
- index = fold (build (MINUS_EXPR, TREE_TYPE (index),
- index, low_bound));
+ index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
+ index, low_bound));
offset = size_binop (PLUS_EXPR, offset,
size_binop (MULT_EXPR,
if (flag_unsafe_math_optimizations && optimize && !optimize_size
&& TREE_CODE (type) == REAL_TYPE
&& !real_onep (TREE_OPERAND (exp, 0)))
- return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
- build (RDIV_EXPR, type,
- build_real (type, dconst1),
- TREE_OPERAND (exp, 1))),
+ return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
+ build2 (RDIV_EXPR, type,
+ build_real (type, dconst1),
+ TREE_OPERAND (exp, 1))),
target, tmode, modifier);
this_optab = sdiv_optab;
goto binop;
|| (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
&& operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
return expand_expr (build1 (NOP_EXPR, type,
- build (COND_EXPR, TREE_TYPE (iftrue),
- TREE_OPERAND (exp, 0),
- iftrue, iffalse)),
+ build3 (COND_EXPR, TREE_TYPE (iftrue),
+ TREE_OPERAND (exp, 0),
+ iftrue, iffalse)),
target, tmode, modifier);
}
expand_expr (TREE_OPERAND (binary_op, 1),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
else if (binary_op)
- store_expr (build (TREE_CODE (binary_op), type,
- make_tree (type, temp),
- TREE_OPERAND (binary_op, 1)),
+ store_expr (build2 (TREE_CODE (binary_op), type,
+ make_tree (type, temp),
+ TREE_OPERAND (binary_op, 1)),
temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
else
store_expr (build1 (TREE_CODE (unary_op), type,
rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
/* We must handle the endpoints in the original mode. */
- index_expr = build (MINUS_EXPR, index_type,
- index_expr, minval);
+ index_expr = build2 (MINUS_EXPR, index_type,
+ index_expr, minval);
minval = integer_zero_node;
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
if (! HAVE_tablejump)
return 0;
- index_expr = fold (build (MINUS_EXPR, index_type,
- convert (index_type, index_expr),
- convert (index_type, minval)));
+ index_expr = fold (build2 (MINUS_EXPR, index_type,
+ convert (index_type, index_expr),
+ convert (index_type, minval)));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
do_pending_stack_adjust ();
before the frame variable gets declared. Help out... */
expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
- t_save = build (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
- integer_zero_node, NULL_TREE, NULL_TREE);
+ t_save = build4 (ARRAY_REF, ptr_type_node,
+ cfun->nonlocal_goto_save_area,
+ integer_zero_node, NULL_TREE, NULL_TREE);
r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
r_save = convert_memory_address (Pmode, r_save);
else
test[i].bits++;
- lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
- n->low, minval)), 1);
- hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
- n->high, minval)), 1);
+ lo = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
+ n->low, minval)), 1);
+ hi = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
+ n->high, minval)), 1);
for (j = lo; j <= hi; j++)
if (j >= HOST_BITS_PER_WIDE_INT)
test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
qsort (test, count, sizeof(*test), case_bit_test_cmp);
- index_expr = fold (build (MINUS_EXPR, index_type,
- convert (index_type, index_expr),
- convert (index_type, minval)));
+ index_expr = fold (build2 (MINUS_EXPR, index_type,
+ convert (index_type, index_expr),
+ convert (index_type, minval)));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
do_pending_stack_adjust ();
/* Compute span of values. */
if (count != 0)
- range = fold (build (MINUS_EXPR, index_type, maxval, minval));
+ range = fold (build2 (MINUS_EXPR, index_type, maxval, minval));
if (count == 0)
{
value since that should fit in a HOST_WIDE_INT while the
actual values may not. */
HOST_WIDE_INT i_low
- = tree_low_cst (fold (build (MINUS_EXPR, index_type,
- n->low, minval)), 1);
+ = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
+ n->low, minval)), 1);
HOST_WIDE_INT i_high
- = tree_low_cst (fold (build (MINUS_EXPR, index_type,
- n->high, minval)), 1);
+ = tree_low_cst (fold (build2 (MINUS_EXPR, index_type,
+ n->high, minval)), 1);
HOST_WIDE_INT i;
for (i = i_low; i <= i_high; i ++)
if (node->left)
return 0;
- low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
- node->low, integer_one_node));
+ low_minus_one = fold (build2 (MINUS_EXPR, TREE_TYPE (node->low),
+ node->low, integer_one_node));
/* If the subtraction above overflowed, we can't verify anything.
Otherwise, look for a parent that tests our value - 1. */
if (node->right)
return 0;
- high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
- node->high, integer_one_node));
+ high_plus_one = fold (build2 (PLUS_EXPR, TREE_TYPE (node->high),
+ node->high, integer_one_node));
/* If the addition above overflowed, we can't verify anything.
Otherwise, look for a parent that tests our value + 1. */
new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
NULL_RTX, unsignedp,
OPTAB_WIDEN);
- new_bound = expand_expr (fold (build (MINUS_EXPR, type,
- high, low)),
+ new_bound = expand_expr (fold (build2 (MINUS_EXPR, type,
+ high, low)),
NULL_RTX, mode, 0);
emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
if (TREE_CODE (rli->t) == UNION_TYPE)
rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
- rli->offset = fold (build (COND_EXPR, sizetype,
- DECL_QUALIFIER (field),
- DECL_SIZE_UNIT (field), rli->offset));
+ rli->offset = fold (build3 (COND_EXPR, sizetype,
+ DECL_QUALIFIER (field),
+ DECL_SIZE_UNIT (field), rli->offset));
}
#if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
that (possible) negative values are handled appropriately. */
length = size_binop (PLUS_EXPR, size_one_node,
convert (sizetype,
- fold (build (MINUS_EXPR,
- TREE_TYPE (lb),
- ub, lb))));
+ fold (build2 (MINUS_EXPR,
+ TREE_TYPE (lb),
+ ub, lb))));
/* Special handling for arrays of bits (for Chill). */
element_size = TYPE_SIZE (element);
&& TREE_OPERAND (exp, 1) == f)
return r;
- /* If this expression hasn't been completed let, leave it
- alone. */
+ /* If this expression hasn't been completed let, leave it alone. */
if (TREE_CODE (inner) == PLACEHOLDER_EXPR && TREE_TYPE (inner) == 0)
return exp;
if (op0 == TREE_OPERAND (exp, 0))
return exp;
- new = fold (build (code, TREE_TYPE (exp), op0, TREE_OPERAND (exp, 1),
- NULL_TREE));
+ new = fold (build3 (COMPONENT_REF, TREE_TYPE (exp),
+ op0, TREE_OPERAND (exp, 1), NULL_TREE));
}
else
switch (TREE_CODE_CLASS (code))
return a;
if (tree_int_cst_sgn (a) == -1)
- a = fold (build (MULT_EXPR, type, a,
- convert (type, integer_minus_one_node)));
+ a = fold (build2 (MULT_EXPR, type, a,
+ convert (type, integer_minus_one_node)));
if (tree_int_cst_sgn (b) == -1)
- b = fold (build (MULT_EXPR, type, b,
- convert (type, integer_minus_one_node)));
+ b = fold (build2 (MULT_EXPR, type, b,
+ convert (type, integer_minus_one_node)));
while (1)
{
- a_mod_b = fold (build (CEIL_MOD_EXPR, type, a, b));
+ a_mod_b = fold (build2 (CEIL_MOD_EXPR, type, a, b));
if (!TREE_INT_CST_LOW (a_mod_b)
&& !TREE_INT_CST_HIGH (a_mod_b))
case PLUS_EXPR:
case MINUS_EXPR:
- return build (TREE_CODE (exp), TREE_TYPE (exp),
- copy_constant (TREE_OPERAND (exp, 0)),
- copy_constant (TREE_OPERAND (exp, 1)));
+ return build2 (TREE_CODE (exp), TREE_TYPE (exp),
+ copy_constant (TREE_OPERAND (exp, 0)),
+ copy_constant (TREE_OPERAND (exp, 1)));
case NOP_EXPR:
case CONVERT_EXPR: