X-Git-Url: https://git.libre-soc.org/?a=blobdiff_plain;f=gcc%2Fgimple-fold.c;h=89d1ac993f6de78e07da3fa3675d0d592f7059cf;hb=862d0b359fb98790d5fad654eecf471228d8bb77;hp=c98fd6a0d40d453de3692eb02d2bc5c0c5c6feee;hpb=0e8b84ec02f16c1560f81eaf971f4deb8e3223ad;p=gcc.git diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c index c98fd6a0d40..89d1ac993f6 100644 --- a/gcc/gimple-fold.c +++ b/gcc/gimple-fold.c @@ -1,5 +1,5 @@ /* Statement simplification on GIMPLE. - Copyright (C) 2010, 2011 Free Software Foundation, Inc. + Copyright (C) 2010-2013 Free Software Foundation, Inc. Split out from tree-ssa-ccp.c. This file is part of GCC. @@ -25,16 +25,25 @@ along with GCC; see the file COPYING3. If not see #include "tree.h" #include "flags.h" #include "function.h" -#include "tree-dump.h" -#include "tree-flow.h" -#include "tree-pass.h" +#include "dumpfile.h" +#include "bitmap.h" +#include "gimple.h" +#include "gimple-ssa.h" +#include "tree-ssanames.h" +#include "tree-into-ssa.h" +#include "tree-dfa.h" +#include "tree-ssa.h" #include "tree-ssa-propagate.h" #include "target.h" -#include "gimple-fold.h" +#include "ipa-utils.h" +#include "gimple-pretty-print.h" +#include "tree-ssa-address.h" +#include "langhooks.h" /* Return true when DECL can be referenced from current unit. - We can get declarations that are not possible to reference for - various reasons: + FROM_DECL (if non-null) specify constructor of variable DECL was taken from. + We can get declarations that are not possible to reference for various + reasons: 1) When analyzing C++ virtual tables. C++ virtual tables do have known constructors even @@ -48,31 +57,52 @@ along with GCC; see the file COPYING3. If not see that has no corresponding callgraph/varpool node declaring the body. 3) COMDAT functions referred by external vtables that - we devirtualize only during final copmilation stage. + we devirtualize only during final compilation stage. At this time we already decided that we will not output the function body and thus we can't reference the symbol directly. */ static bool -can_refer_decl_in_current_unit_p (tree decl) +can_refer_decl_in_current_unit_p (tree decl, tree from_decl) { struct varpool_node *vnode; struct cgraph_node *node; + symtab_node *snode; - if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) + if (DECL_ABSTRACT (decl)) + return false; + + /* We are concerned only about static/external vars and functions. */ + if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) + || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL)) return true; - /* External flag is set, so we deal with C++ reference - to static object from other file. */ - if (DECL_EXTERNAL (decl) && TREE_STATIC (decl) - && TREE_CODE (decl) == VAR_DECL) + + /* Static objects can be referred only if they was not optimized out yet. */ + if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl)) { - /* Just be sure it is not big in frontend setting - flags incorrectly. Those variables should never - be finalized. */ - gcc_checking_assert (!(vnode = varpool_get_node (decl)) - || !vnode->finalized); - return false; + snode = symtab_get_node (decl); + if (!snode) + return false; + node = dyn_cast (snode); + return !node || !node->global.inlined_to; } + + /* We will later output the initializer, so we can refer to it. + So we are concerned only when DECL comes from initializer of + external var. */ + if (!from_decl + || TREE_CODE (from_decl) != VAR_DECL + || !DECL_EXTERNAL (from_decl) + || (flag_ltrans + && symtab_get_node (from_decl)->in_other_partition)) + return true; + /* We are folding reference from external vtable. The vtable may reffer + to a symbol keyed to other compilation unit. The other compilation + unit may be in separate DSO and the symbol may be hidden. */ + if (DECL_VISIBILITY_SPECIFIED (decl) + && DECL_EXTERNAL (decl) + && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition)) + return false; /* When function is public, we always can introduce new reference. Exception are the COMDAT functions where introducing a direct reference imply need to include function body in the curren tunit. */ @@ -81,14 +111,19 @@ can_refer_decl_in_current_unit_p (tree decl) /* We are not at ltrans stage; so don't worry about WHOPR. Also when still gimplifying all referred comdat functions will be produced. - ??? as observed in PR20991 for already optimized out comdat virtual functions - we may not neccesarily give up because the copy will be output elsewhere when - corresponding vtable is output. */ + + As observed in PR20991 for already optimized out comdat virtual functions + it may be tempting to not necessarily give up because the copy will be + output elsewhere when corresponding vtable is output. + This is however not possible - ABI specify that COMDATs are output in + units where they are used and when the other unit was compiled with LTO + it is possible that vtable was kept public while the function itself + was privatized. */ if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready)) return true; - /* If we already output the function body, we are safe. */ - if (TREE_ASM_WRITTEN (decl)) - return true; + + /* OK we are seeing either COMDAT or static variable. In this case we must + check that the definition is still around so we can refer it. */ if (TREE_CODE (decl) == FUNCTION_DECL) { node = cgraph_get_node (decl); @@ -97,50 +132,83 @@ can_refer_decl_in_current_unit_p (tree decl) The second is important when devirtualization happens during final compilation stage when making a new reference no longer makes callee to be compiled. */ - if (!node || !node->analyzed || node->global.inlined_to) - return false; + if (!node || !node->definition || node->global.inlined_to) + { + gcc_checking_assert (!TREE_ASM_WRITTEN (decl)); + return false; + } } else if (TREE_CODE (decl) == VAR_DECL) { vnode = varpool_get_node (decl); - if (!vnode || !vnode->finalized) - return false; + if (!vnode || !vnode->definition) + { + gcc_checking_assert (!TREE_ASM_WRITTEN (decl)); + return false; + } } return true; } /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into - acceptable form for is_gimple_min_invariant. */ + acceptable form for is_gimple_min_invariant. + FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */ tree -canonicalize_constructor_val (tree cval) +canonicalize_constructor_val (tree cval, tree from_decl) { + tree orig_cval = cval; STRIP_NOPS (cval); - if (TREE_CODE (cval) == POINTER_PLUS_EXPR) + if (TREE_CODE (cval) == POINTER_PLUS_EXPR + && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST) { - tree t = maybe_fold_offset_to_address (EXPR_LOCATION (cval), - TREE_OPERAND (cval, 0), - TREE_OPERAND (cval, 1), - TREE_TYPE (cval)); - if (t) - cval = t; + tree ptr = TREE_OPERAND (cval, 0); + if (is_gimple_min_invariant (ptr)) + cval = build1_loc (EXPR_LOCATION (cval), + ADDR_EXPR, TREE_TYPE (ptr), + fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)), + ptr, + fold_convert (ptr_type_node, + TREE_OPERAND (cval, 1)))); } if (TREE_CODE (cval) == ADDR_EXPR) { - tree base = get_base_address (TREE_OPERAND (cval, 0)); + tree base = NULL_TREE; + if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR) + { + base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0)); + if (base) + TREE_OPERAND (cval, 0) = base; + } + else + base = get_base_address (TREE_OPERAND (cval, 0)); + if (!base) + return NULL_TREE; - if (base - && (TREE_CODE (base) == VAR_DECL - || TREE_CODE (base) == FUNCTION_DECL) - && !can_refer_decl_in_current_unit_p (base)) + if ((TREE_CODE (base) == VAR_DECL + || TREE_CODE (base) == FUNCTION_DECL) + && !can_refer_decl_in_current_unit_p (base, from_decl)) return NULL_TREE; - if (cfun && base && TREE_CODE (base) == VAR_DECL) - add_referenced_var (base); + if (TREE_CODE (base) == VAR_DECL) + TREE_ADDRESSABLE (base) = 1; + else if (TREE_CODE (base) == FUNCTION_DECL) + { + /* Make sure we create a cgraph node for functions we'll reference. + They can be non-existent if the reference comes from an entry + of an external vtable for example. */ + cgraph_get_create_real_symbol_node (base); + } /* Fixup types in global initializers. */ if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0))) cval = build_fold_addr_expr (TREE_OPERAND (cval, 0)); + + if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval))) + cval = fold_convert (TREE_TYPE (orig_cval), cval); + return cval; } - return cval; + if (TREE_OVERFLOW_P (cval)) + return drop_tree_overflow (cval); + return orig_cval; } /* If SYM is a constant variable with known value, return the value. @@ -149,12 +217,12 @@ canonicalize_constructor_val (tree cval) tree get_symbol_constant_value (tree sym) { - if (const_value_known_p (sym)) + tree val = ctor_for_folding (sym); + if (val != error_mark_node) { - tree val = DECL_INITIAL (sym); if (val) { - val = canonicalize_constructor_val (val); + val = canonicalize_constructor_val (unshare_expr (val), sym); if (val && is_gimple_min_invariant (val)) return val; else @@ -173,384 +241,6 @@ get_symbol_constant_value (tree sym) } -/* Return true if we may propagate the address expression ADDR into the - dereference DEREF and cancel them. */ - -bool -may_propagate_address_into_dereference (tree addr, tree deref) -{ - gcc_assert (TREE_CODE (deref) == MEM_REF - && TREE_CODE (addr) == ADDR_EXPR); - - /* Don't propagate if ADDR's operand has incomplete type. */ - if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0)))) - return false; - - /* If the address is invariant then we do not need to preserve restrict - qualifications. But we do need to preserve volatile qualifiers until - we can annotate the folded dereference itself properly. */ - if (is_gimple_min_invariant (addr) - && (!TREE_THIS_VOLATILE (deref) - || TYPE_VOLATILE (TREE_TYPE (addr)))) - return useless_type_conversion_p (TREE_TYPE (deref), - TREE_TYPE (TREE_OPERAND (addr, 0))); - - /* Else both the address substitution and the folding must result in - a valid useless type conversion sequence. */ - return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)), - TREE_TYPE (addr)) - && useless_type_conversion_p (TREE_TYPE (deref), - TREE_TYPE (TREE_OPERAND (addr, 0)))); -} - - -/* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X]. - BASE is an array type. OFFSET is a byte displacement. - - LOC is the location of the original expression. */ - -static tree -maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset) -{ - tree min_idx, idx, idx_type, elt_offset = integer_zero_node; - tree array_type, elt_type, elt_size; - tree domain_type; - - /* If BASE is an ARRAY_REF, we can pick up another offset (this time - measured in units of the size of elements type) from that ARRAY_REF). - We can't do anything if either is variable. - - The case we handle here is *(&A[N]+O). */ - if (TREE_CODE (base) == ARRAY_REF) - { - tree low_bound = array_ref_low_bound (base); - - elt_offset = TREE_OPERAND (base, 1); - if (TREE_CODE (low_bound) != INTEGER_CST - || TREE_CODE (elt_offset) != INTEGER_CST) - return NULL_TREE; - - elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound); - base = TREE_OPERAND (base, 0); - } - - /* Ignore stupid user tricks of indexing non-array variables. */ - array_type = TREE_TYPE (base); - if (TREE_CODE (array_type) != ARRAY_TYPE) - return NULL_TREE; - elt_type = TREE_TYPE (array_type); - - /* Use signed size type for intermediate computation on the index. */ - idx_type = ssizetype; - - /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the - element type (so we can use the alignment if it's not constant). - Otherwise, compute the offset as an index by using a division. If the - division isn't exact, then don't do anything. */ - elt_size = TYPE_SIZE_UNIT (elt_type); - if (!elt_size) - return NULL; - if (integer_zerop (offset)) - { - if (TREE_CODE (elt_size) != INTEGER_CST) - elt_size = size_int (TYPE_ALIGN (elt_type)); - - idx = build_int_cst (idx_type, 0); - } - else - { - unsigned HOST_WIDE_INT lquo, lrem; - HOST_WIDE_INT hquo, hrem; - double_int soffset; - - /* The final array offset should be signed, so we need - to sign-extend the (possibly pointer) offset here - and use signed division. */ - soffset = double_int_sext (tree_to_double_int (offset), - TYPE_PRECISION (TREE_TYPE (offset))); - if (TREE_CODE (elt_size) != INTEGER_CST - || div_and_round_double (TRUNC_DIV_EXPR, 0, - soffset.low, soffset.high, - TREE_INT_CST_LOW (elt_size), - TREE_INT_CST_HIGH (elt_size), - &lquo, &hquo, &lrem, &hrem) - || lrem || hrem) - return NULL_TREE; - - idx = build_int_cst_wide (idx_type, lquo, hquo); - } - - /* Assume the low bound is zero. If there is a domain type, get the - low bound, if any, convert the index into that type, and add the - low bound. */ - min_idx = build_int_cst (idx_type, 0); - domain_type = TYPE_DOMAIN (array_type); - if (domain_type) - { - idx_type = domain_type; - if (TYPE_MIN_VALUE (idx_type)) - min_idx = TYPE_MIN_VALUE (idx_type); - else - min_idx = fold_convert (idx_type, min_idx); - - if (TREE_CODE (min_idx) != INTEGER_CST) - return NULL_TREE; - - elt_offset = fold_convert (idx_type, elt_offset); - } - - if (!integer_zerop (min_idx)) - idx = int_const_binop (PLUS_EXPR, idx, min_idx); - if (!integer_zerop (elt_offset)) - idx = int_const_binop (PLUS_EXPR, idx, elt_offset); - - /* Make sure to possibly truncate late after offsetting. */ - idx = fold_convert (idx_type, idx); - - /* We don't want to construct access past array bounds. For example - char *(c[4]); - c[3][2]; - should not be simplified into (*c)[14] or tree-vrp will - give false warnings. - This is only an issue for multi-dimensional arrays. */ - if (TREE_CODE (elt_type) == ARRAY_TYPE - && domain_type) - { - if (TYPE_MAX_VALUE (domain_type) - && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST - && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx)) - return NULL_TREE; - else if (TYPE_MIN_VALUE (domain_type) - && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST - && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type))) - return NULL_TREE; - else if (compare_tree_int (idx, 0) < 0) - return NULL_TREE; - } - - { - tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE); - SET_EXPR_LOCATION (t, loc); - return t; - } -} - - -/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index]. - LOC is the location of original expression. - - Before attempting the conversion strip off existing ADDR_EXPRs. */ - -tree -maybe_fold_offset_to_reference (location_t loc, tree base, tree offset, - tree orig_type) -{ - tree ret; - - STRIP_NOPS (base); - if (TREE_CODE (base) != ADDR_EXPR) - return NULL_TREE; - - base = TREE_OPERAND (base, 0); - if (types_compatible_p (orig_type, TREE_TYPE (base)) - && integer_zerop (offset)) - return base; - - ret = maybe_fold_offset_to_array_ref (loc, base, offset); - if (ret && types_compatible_p (orig_type, TREE_TYPE (ret))) - return ret; - return NULL_TREE; -} - -/* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index]. - LOC is the location of the original expression. */ - -tree -maybe_fold_offset_to_address (location_t loc, tree addr, tree offset, - tree orig_type) -{ - tree base, ret; - - STRIP_NOPS (addr); - if (TREE_CODE (addr) != ADDR_EXPR) - return NULL_TREE; - base = TREE_OPERAND (addr, 0); - ret = maybe_fold_offset_to_array_ref (loc, base, offset); - if (ret) - { - ret = build_fold_addr_expr (ret); - if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret))) - return NULL_TREE; - SET_EXPR_LOCATION (ret, loc); - } - - return ret; -} - - -/* A quaint feature extant in our address arithmetic is that there - can be hidden type changes here. The type of the result need - not be the same as the type of the input pointer. - - What we're after here is an expression of the form - (T *)(&array + const) - where array is OP0, const is OP1, RES_TYPE is T and - the cast doesn't actually exist, but is implicit in the - type of the POINTER_PLUS_EXPR. We'd like to turn this into - &array[x] - which may be able to propagate further. */ - -tree -maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1) -{ - tree ptd_type; - tree t; - - /* The first operand should be an ADDR_EXPR. */ - if (TREE_CODE (op0) != ADDR_EXPR) - return NULL_TREE; - op0 = TREE_OPERAND (op0, 0); - - /* It had better be a constant. */ - if (TREE_CODE (op1) != INTEGER_CST) - { - /* Or op0 should now be A[0] and the non-constant offset defined - via a multiplication by the array element size. */ - if (TREE_CODE (op0) == ARRAY_REF - /* As we will end up creating a variable index array access - in the outermost array dimension make sure there isn't - a more inner array that the index could overflow to. */ - && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF - && integer_zerop (TREE_OPERAND (op0, 1)) - && TREE_CODE (op1) == SSA_NAME) - { - gimple offset_def = SSA_NAME_DEF_STMT (op1); - tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0)); - if (!host_integerp (elsz, 1) - || !is_gimple_assign (offset_def)) - return NULL_TREE; - - /* Do not build array references of something that we can't - see the true number of array dimensions for. */ - if (!DECL_P (TREE_OPERAND (op0, 0)) - && !handled_component_p (TREE_OPERAND (op0, 0))) - return NULL_TREE; - - if (gimple_assign_rhs_code (offset_def) == MULT_EXPR - && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST - && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz)) - return build_fold_addr_expr - (build4 (ARRAY_REF, TREE_TYPE (op0), - TREE_OPERAND (op0, 0), - gimple_assign_rhs1 (offset_def), - TREE_OPERAND (op0, 2), - TREE_OPERAND (op0, 3))); - else if (integer_onep (elsz) - && gimple_assign_rhs_code (offset_def) != MULT_EXPR) - return build_fold_addr_expr - (build4 (ARRAY_REF, TREE_TYPE (op0), - TREE_OPERAND (op0, 0), - op1, - TREE_OPERAND (op0, 2), - TREE_OPERAND (op0, 3))); - } - else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE - /* Dto. */ - && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE - && TREE_CODE (op1) == SSA_NAME) - { - gimple offset_def = SSA_NAME_DEF_STMT (op1); - tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0))); - if (!host_integerp (elsz, 1) - || !is_gimple_assign (offset_def)) - return NULL_TREE; - - /* Do not build array references of something that we can't - see the true number of array dimensions for. */ - if (!DECL_P (op0) - && !handled_component_p (op0)) - return NULL_TREE; - - if (gimple_assign_rhs_code (offset_def) == MULT_EXPR - && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST - && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz)) - return build_fold_addr_expr - (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)), - op0, gimple_assign_rhs1 (offset_def), - integer_zero_node, NULL_TREE)); - else if (integer_onep (elsz) - && gimple_assign_rhs_code (offset_def) != MULT_EXPR) - return build_fold_addr_expr - (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)), - op0, op1, - integer_zero_node, NULL_TREE)); - } - - return NULL_TREE; - } - - /* If the first operand is an ARRAY_REF, expand it so that we can fold - the offset into it. */ - while (TREE_CODE (op0) == ARRAY_REF) - { - tree array_obj = TREE_OPERAND (op0, 0); - tree array_idx = TREE_OPERAND (op0, 1); - tree elt_type = TREE_TYPE (op0); - tree elt_size = TYPE_SIZE_UNIT (elt_type); - tree min_idx; - - if (TREE_CODE (array_idx) != INTEGER_CST) - break; - if (TREE_CODE (elt_size) != INTEGER_CST) - break; - - /* Un-bias the index by the min index of the array type. */ - min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj)); - if (min_idx) - { - min_idx = TYPE_MIN_VALUE (min_idx); - if (min_idx) - { - if (TREE_CODE (min_idx) != INTEGER_CST) - break; - - array_idx = fold_convert (TREE_TYPE (min_idx), array_idx); - if (!integer_zerop (min_idx)) - array_idx = int_const_binop (MINUS_EXPR, array_idx, - min_idx); - } - } - - /* Convert the index to a byte offset. */ - array_idx = fold_convert (sizetype, array_idx); - array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size); - - /* Update the operands for the next round, or for folding. */ - op1 = int_const_binop (PLUS_EXPR, - array_idx, op1); - op0 = array_obj; - } - - ptd_type = TREE_TYPE (res_type); - /* If we want a pointer to void, reconstruct the reference from the - array element type. A pointer to that can be trivially converted - to void *. This happens as we fold (void *)(ptr p+ off). */ - if (VOID_TYPE_P (ptd_type) - && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) - ptd_type = TREE_TYPE (TREE_TYPE (op0)); - - /* At which point we can try some of the same things as for indirects. */ - t = maybe_fold_offset_to_array_ref (loc, op0, op1); - if (t) - { - t = build_fold_addr_expr (t); - if (!useless_type_conversion_p (res_type, TREE_TYPE (t))) - return NULL_TREE; - SET_EXPR_LOCATION (t, loc); - } - - return t; -} /* Subroutine of fold_stmt. We perform several simplifications of the memory reference tree EXPR and make sure to re-gimplify them properly @@ -669,42 +359,7 @@ fold_gimple_assign (gimple_stmt_iterator *si) { tree rhs = gimple_assign_rhs1 (stmt); - /* Try to fold a conditional expression. */ - if (TREE_CODE (rhs) == COND_EXPR) - { - tree op0 = COND_EXPR_COND (rhs); - tree tem; - bool set = false; - location_t cond_loc = EXPR_LOCATION (rhs); - - if (COMPARISON_CLASS_P (op0)) - { - fold_defer_overflow_warnings (); - tem = fold_binary_loc (cond_loc, - TREE_CODE (op0), TREE_TYPE (op0), - TREE_OPERAND (op0, 0), - TREE_OPERAND (op0, 1)); - /* This is actually a conditional expression, not a GIMPLE - conditional statement, however, the valid_gimple_rhs_p - test still applies. */ - set = (tem && is_gimple_condexpr (tem) - && valid_gimple_rhs_p (tem)); - fold_undefer_overflow_warnings (set, stmt, 0); - } - else if (is_gimple_min_invariant (op0)) - { - tem = op0; - set = true; - } - else - return NULL_TREE; - - if (set) - result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem, - COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs)); - } - - else if (REFERENCE_CLASS_P (rhs)) + if (REFERENCE_CLASS_P (rhs)) return maybe_fold_reference (rhs, false); else if (TREE_CODE (rhs) == ADDR_EXPR) @@ -743,7 +398,7 @@ fold_gimple_assign (gimple_stmt_iterator *si) } else if (DECL_P (rhs)) - return unshare_expr (get_symbol_constant_value (rhs)); + return get_symbol_constant_value (rhs); /* If we couldn't fold the RHS, hand over to the generic fold routines. */ @@ -783,87 +438,116 @@ fold_gimple_assign (gimple_stmt_iterator *si) if (valid_gimple_rhs_p (result)) return result; } - else if (CONVERT_EXPR_CODE_P (subcode) - && POINTER_TYPE_P (gimple_expr_type (stmt)) - && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt)))) - { - tree type = gimple_expr_type (stmt); - tree t = maybe_fold_offset_to_address (loc, - gimple_assign_rhs1 (stmt), - integer_zero_node, type); - if (t) - return t; - } } break; case GIMPLE_BINARY_RHS: - /* Try to fold pointer addition. */ - if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) - { - tree type = TREE_TYPE (gimple_assign_rhs1 (stmt)); - if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE) + /* Try to canonicalize for boolean-typed X the comparisons + X == 0, X == 1, X != 0, and X != 1. */ + if (gimple_assign_rhs_code (stmt) == EQ_EXPR + || gimple_assign_rhs_code (stmt) == NE_EXPR) + { + tree lhs = gimple_assign_lhs (stmt); + tree op1 = gimple_assign_rhs1 (stmt); + tree op2 = gimple_assign_rhs2 (stmt); + tree type = TREE_TYPE (op1); + + /* Check whether the comparison operands are of the same boolean + type as the result type is. + Check that second operand is an integer-constant with value + one or zero. */ + if (TREE_CODE (op2) == INTEGER_CST + && (integer_zerop (op2) || integer_onep (op2)) + && useless_type_conversion_p (TREE_TYPE (lhs), type)) { - type = build_pointer_type (TREE_TYPE (TREE_TYPE (type))); - if (!useless_type_conversion_p - (TREE_TYPE (gimple_assign_lhs (stmt)), type)) - type = TREE_TYPE (gimple_assign_rhs1 (stmt)); + enum tree_code cmp_code = gimple_assign_rhs_code (stmt); + bool is_logical_not = false; + + /* X == 0 and X != 1 is a logical-not.of X + X == 1 and X != 0 is X */ + if ((cmp_code == EQ_EXPR && integer_zerop (op2)) + || (cmp_code == NE_EXPR && integer_onep (op2))) + is_logical_not = true; + + if (is_logical_not == false) + result = op1; + /* Only for one-bit precision typed X the transformation + !X -> ~X is valied. */ + else if (TYPE_PRECISION (type) == 1) + result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR, + type, op1); + /* Otherwise we use !X -> X ^ 1. */ + else + result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR, + type, op1, build_int_cst (type, 1)); + } - result = maybe_fold_stmt_addition (gimple_location (stmt), - type, - gimple_assign_rhs1 (stmt), - gimple_assign_rhs2 (stmt)); } if (!result) result = fold_binary_loc (loc, subcode, - TREE_TYPE (gimple_assign_lhs (stmt)), - gimple_assign_rhs1 (stmt), - gimple_assign_rhs2 (stmt)); + TREE_TYPE (gimple_assign_lhs (stmt)), + gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt)); if (result) { STRIP_USELESS_TYPE_CONVERSION (result); if (valid_gimple_rhs_p (result)) return result; - - /* Fold might have produced non-GIMPLE, so if we trust it blindly - we lose canonicalization opportunities. Do not go again - through fold here though, or the same non-GIMPLE will be - produced. */ - if (commutative_tree_code (subcode) - && tree_swap_operands_p (gimple_assign_rhs1 (stmt), - gimple_assign_rhs2 (stmt), false)) - return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)), - gimple_assign_rhs2 (stmt), - gimple_assign_rhs1 (stmt)); } break; case GIMPLE_TERNARY_RHS: - result = fold_ternary_loc (loc, subcode, - TREE_TYPE (gimple_assign_lhs (stmt)), - gimple_assign_rhs1 (stmt), - gimple_assign_rhs2 (stmt), - gimple_assign_rhs3 (stmt)); + /* Try to fold a conditional expression. */ + if (gimple_assign_rhs_code (stmt) == COND_EXPR) + { + tree op0 = gimple_assign_rhs1 (stmt); + tree tem; + bool set = false; + location_t cond_loc = gimple_location (stmt); + + if (COMPARISON_CLASS_P (op0)) + { + fold_defer_overflow_warnings (); + tem = fold_binary_loc (cond_loc, + TREE_CODE (op0), TREE_TYPE (op0), + TREE_OPERAND (op0, 0), + TREE_OPERAND (op0, 1)); + /* This is actually a conditional expression, not a GIMPLE + conditional statement, however, the valid_gimple_rhs_p + test still applies. */ + set = (tem && is_gimple_condexpr (tem) + && valid_gimple_rhs_p (tem)); + fold_undefer_overflow_warnings (set, stmt, 0); + } + else if (is_gimple_min_invariant (op0)) + { + tem = op0; + set = true; + } + else + return NULL_TREE; + + if (set) + result = fold_build3_loc (cond_loc, COND_EXPR, + TREE_TYPE (gimple_assign_lhs (stmt)), tem, + gimple_assign_rhs2 (stmt), + gimple_assign_rhs3 (stmt)); + } + + if (!result) + result = fold_ternary_loc (loc, subcode, + TREE_TYPE (gimple_assign_lhs (stmt)), + gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt), + gimple_assign_rhs3 (stmt)); if (result) { STRIP_USELESS_TYPE_CONVERSION (result); if (valid_gimple_rhs_p (result)) return result; - - /* Fold might have produced non-GIMPLE, so if we trust it blindly - we lose canonicalization opportunities. Do not go again - through fold here though, or the same non-GIMPLE will be - produced. */ - if (commutative_ternary_tree_code (subcode) - && tree_swap_operands_p (gimple_assign_rhs1 (stmt), - gimple_assign_rhs2 (stmt), false)) - return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)), - gimple_assign_rhs2 (stmt), - gimple_assign_rhs1 (stmt), - gimple_assign_rhs3 (stmt)); } break; @@ -915,24 +599,21 @@ void gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) { tree lhs; - tree tmp = NULL_TREE; /* Silence warning. */ gimple stmt, new_stmt; gimple_stmt_iterator i; - gimple_seq stmts = gimple_seq_alloc(); + gimple_seq stmts = NULL; struct gimplify_ctx gctx; - gimple last = NULL; - gimple laststore = NULL; + gimple laststore; tree reaching_vuse; stmt = gsi_stmt (*si_p); gcc_assert (is_gimple_call (stmt)); - lhs = gimple_call_lhs (stmt); - reaching_vuse = gimple_vuse (stmt); - push_gimplify_context (&gctx); + gctx.into_ssa = gimple_in_ssa_p (cfun); + lhs = gimple_call_lhs (stmt); if (lhs == NULL_TREE) { gimplify_and_add (expr, &stmts); @@ -946,113 +627,79 @@ gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) unlink_stmt_vdef (stmt); release_defs (stmt); } - gsi_remove (si_p, true); + gsi_replace (si_p, gimple_build_nop (), true); return; } } else - tmp = get_initialized_tmp_var (expr, &stmts, NULL); + { + tree tmp = get_initialized_tmp_var (expr, &stmts, NULL); + new_stmt = gimple_build_assign (lhs, tmp); + i = gsi_last (stmts); + gsi_insert_after_without_update (&i, new_stmt, + GSI_CONTINUE_LINKING); + } pop_gimplify_context (NULL); if (gimple_has_location (stmt)) annotate_all_with_location (stmts, gimple_location (stmt)); - /* The replacement can expose previously unreferenced variables. */ - for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i)) + /* First iterate over the replacement statements backward, assigning + virtual operands to their defining statements. */ + laststore = NULL; + for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i)) { - if (last) - { - gsi_insert_before (si_p, last, GSI_NEW_STMT); - gsi_next (si_p); - } new_stmt = gsi_stmt (i); - if (gimple_in_ssa_p (cfun)) - { - find_new_referenced_vars (new_stmt); - mark_symbols_for_renaming (new_stmt); - } - /* If the new statement has a VUSE, update it with exact SSA name we - know will reach this one. */ - if (gimple_vuse (new_stmt)) - { - /* If we've also seen a previous store create a new VDEF for - the latter one, and make that the new reaching VUSE. */ - if (laststore) - { - reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore); - gimple_set_vdef (laststore, reaching_vuse); - update_stmt (laststore); - laststore = NULL; - } - gimple_set_vuse (new_stmt, reaching_vuse); - gimple_set_modified (new_stmt, true); - } - if (gimple_assign_single_p (new_stmt) - && !is_gimple_reg (gimple_assign_lhs (new_stmt))) + if ((gimple_assign_single_p (new_stmt) + && !is_gimple_reg (gimple_assign_lhs (new_stmt))) + || (is_gimple_call (new_stmt) + && (gimple_call_flags (new_stmt) + & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0)) { + tree vdef; + if (!laststore) + vdef = gimple_vdef (stmt); + else + vdef = make_ssa_name (gimple_vop (cfun), new_stmt); + gimple_set_vdef (new_stmt, vdef); + if (vdef && TREE_CODE (vdef) == SSA_NAME) + SSA_NAME_DEF_STMT (vdef) = new_stmt; laststore = new_stmt; } - last = new_stmt; } - if (lhs == NULL_TREE) + /* Second iterate over the statements forward, assigning virtual + operands to their uses. */ + reaching_vuse = gimple_vuse (stmt); + for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i)) { - /* If we replace a call without LHS that has a VDEF and our new - sequence ends with a store we must make that store have the same - vdef in order not to break the sequencing. This can happen - for instance when folding memcpy calls into assignments. */ - if (gimple_vdef (stmt) && laststore) - { - gimple_set_vdef (laststore, gimple_vdef (stmt)); - if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) - SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore; - update_stmt (laststore); - } - else if (gimple_in_ssa_p (cfun)) - { - unlink_stmt_vdef (stmt); - release_defs (stmt); - } - new_stmt = last; + new_stmt = gsi_stmt (i); + /* If the new statement possibly has a VUSE, update it with exact SSA + name we know will reach this one. */ + if (gimple_has_mem_ops (new_stmt)) + gimple_set_vuse (new_stmt, reaching_vuse); + gimple_set_modified (new_stmt, true); + if (gimple_vdef (new_stmt)) + reaching_vuse = gimple_vdef (new_stmt); } - else + + /* If the new sequence does not do a store release the virtual + definition of the original statement. */ + if (reaching_vuse + && reaching_vuse == gimple_vuse (stmt)) { - if (last) - { - gsi_insert_before (si_p, last, GSI_NEW_STMT); - gsi_next (si_p); - } - if (laststore && is_gimple_reg (lhs)) - { - gimple_set_vdef (laststore, gimple_vdef (stmt)); - update_stmt (laststore); - if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) - SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore; - laststore = NULL; - } - else if (laststore) - { - reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore); - gimple_set_vdef (laststore, reaching_vuse); - update_stmt (laststore); - laststore = NULL; - } - new_stmt = gimple_build_assign (lhs, tmp); - if (!is_gimple_reg (tmp)) - gimple_set_vuse (new_stmt, reaching_vuse); - if (!is_gimple_reg (lhs)) + tree vdef = gimple_vdef (stmt); + if (vdef + && TREE_CODE (vdef) == SSA_NAME) { - gimple_set_vdef (new_stmt, gimple_vdef (stmt)); - if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME) - SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt; + unlink_stmt_vdef (stmt); + release_ssa_name (vdef); } - else if (reaching_vuse == gimple_vuse (stmt)) - unlink_stmt_vdef (stmt); } - gimple_set_location (new_stmt, gimple_location (stmt)); - gsi_replace (si_p, new_stmt, false); + /* Finally replace the original statement with the sequence. */ + gsi_replace_with_seq (si_p, stmts, false); } /* Return the string length, maximum string length or maximum value of @@ -1072,13 +719,10 @@ get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) if (TREE_CODE (arg) != SSA_NAME) { - if (TREE_CODE (arg) == COND_EXPR) - return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type) - && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type); /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ - else if (TREE_CODE (arg) == ADDR_EXPR - && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF - && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) + if (TREE_CODE (arg) == ADDR_EXPR + && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF + && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) { tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); if (TREE_CODE (aop0) == INDIRECT_REF @@ -1119,6 +763,11 @@ get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) return true; } + /* If ARG is registered for SSA update we cannot look at its defining + statement. */ + if (name_registered_for_update_p (arg)) + return false; + /* If we were already here, break the infinite cycle. */ if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg))) return true; @@ -1138,6 +787,13 @@ get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) tree rhs = gimple_assign_rhs1 (def_stmt); return get_maxval_strlen (rhs, length, visited, type); } + else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR) + { + tree op2 = gimple_assign_rhs2 (def_stmt); + tree op3 = gimple_assign_rhs3 (def_stmt); + return get_maxval_strlen (op2, length, visited, type) + && get_maxval_strlen (op3, length, visited, type); + } return false; case GIMPLE_PHI: @@ -1208,6 +864,11 @@ gimple_fold_builtin (gimple stmt) if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD) return NULL_TREE; + /* Give up for always_inline inline builtins until they are + inlined. */ + if (avoid_folding_inline_builtin (callee)) + return NULL_TREE; + /* If the builtin could not be folded, and it has no argument list, we're done. */ nargs = gimple_call_num_args (stmt); @@ -1233,6 +894,7 @@ gimple_fold_builtin (gimple stmt) case BUILT_IN_MEMMOVE_CHK: case BUILT_IN_MEMSET_CHK: case BUILT_IN_STRNCPY_CHK: + case BUILT_IN_STPNCPY_CHK: arg_idx = 2; type = 2; break; @@ -1339,12 +1001,14 @@ gimple_fold_builtin (gimple stmt) break; case BUILT_IN_STRNCPY_CHK: + case BUILT_IN_STPNCPY_CHK: if (val[2] && is_gimple_val (val[2]) && nargs == 4) - result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0), + result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0), gimple_call_arg (stmt, 1), gimple_call_arg (stmt, 2), gimple_call_arg (stmt, 3), - val[2]); + val[2], ignore, + DECL_FUNCTION_CODE (callee)); break; case BUILT_IN_SNPRINTF_CHK: @@ -1363,99 +1027,19 @@ gimple_fold_builtin (gimple stmt) return result; } -/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN - is integer form of OBJ_TYPE_REF_TOKEN of the reference expression. - KNOWN_BINFO carries the binfo describing the true type of - OBJ_TYPE_REF_OBJECT(REF). If a call to the function must be accompanied - with a this adjustment, the constant which should be added to this pointer - is stored to *DELTA. If REFUSE_THUNKS is true, return NULL if the function - is a thunk (other than a this adjustment which is dealt with by DELTA). */ - -tree -gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo, - tree *delta, bool refuse_thunks) -{ - HOST_WIDE_INT i; - tree v, fndecl; - struct cgraph_node *node; - - v = BINFO_VIRTUALS (known_binfo); - /* If there is no virtual methods leave the OBJ_TYPE_REF alone. */ - if (!v) - return NULL_TREE; - i = 0; - while (i != token) - { - i += (TARGET_VTABLE_USES_DESCRIPTORS - ? TARGET_VTABLE_USES_DESCRIPTORS : 1); - v = TREE_CHAIN (v); - } - - /* If BV_VCALL_INDEX is non-NULL, give up. */ - if (TREE_TYPE (v)) - return NULL_TREE; - - fndecl = TREE_VALUE (v); - node = cgraph_get_node_or_alias (fndecl); - if (refuse_thunks - && (!node - /* Bail out if it is a thunk declaration. Since simple this_adjusting - thunks are represented by a constant in TREE_PURPOSE of items in - BINFO_VIRTUALS, this is a more complicate type which we cannot handle as - yet. - - FIXME: Remove the following condition once we are able to represent - thunk information on call graph edges. */ - || (node->same_body_alias && node->thunk.thunk_p))) - return NULL_TREE; - - /* When cgraph node is missing and function is not public, we cannot - devirtualize. This can happen in WHOPR when the actual method - ends up in other partition, because we found devirtualization - possibility too late. */ - if (!can_refer_decl_in_current_unit_p (TREE_VALUE (v))) - return NULL_TREE; - - *delta = TREE_PURPOSE (v); - gcc_checking_assert (host_integerp (*delta, 0)); - return fndecl; -} - -/* Generate code adjusting the first parameter of a call statement determined - by GSI by DELTA. */ - -void -gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta) -{ - gimple call_stmt = gsi_stmt (*gsi); - tree parm, tmp; - gimple new_stmt; - - delta = fold_convert (sizetype, delta); - gcc_assert (gimple_call_num_args (call_stmt) >= 1); - parm = gimple_call_arg (call_stmt, 0); - gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm))); - tmp = create_tmp_var (TREE_TYPE (parm), NULL); - add_referenced_var (tmp); - - tmp = make_ssa_name (tmp, NULL); - new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta); - SSA_NAME_DEF_STMT (tmp) = new_stmt; - gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); - gimple_call_set_arg (call_stmt, 0, tmp); -} /* Return a binfo to be used for devirtualization of calls based on an object represented by a declaration (i.e. a global or automatically allocated one) or NULL if it cannot be found or is not safe. CST is expected to be an ADDR_EXPR of such object or the function will return NULL. Currently it is - safe to use such binfo only if it has no base binfo (i.e. no ancestors). */ + safe to use such binfo only if it has no base binfo (i.e. no ancestors) + EXPECTED_TYPE is type of the class virtual belongs to. */ tree -gimple_extract_devirt_binfo_from_cst (tree cst) +gimple_extract_devirt_binfo_from_cst (tree cst, tree expected_type) { HOST_WIDE_INT offset, size, max_size; - tree base, type, expected_type, binfo; + tree base, type, binfo; bool last_artificial = false; if (!flag_devirtualize @@ -1464,7 +1048,6 @@ gimple_extract_devirt_binfo_from_cst (tree cst) return NULL_TREE; cst = TREE_OPERAND (cst, 0); - expected_type = TREE_TYPE (cst); base = get_ref_base_and_extent (cst, &offset, &size, &max_size); type = TREE_TYPE (base); if (!DECL_P (base) @@ -1480,7 +1063,7 @@ gimple_extract_devirt_binfo_from_cst (tree cst) HOST_WIDE_INT pos, size; tree fld; - if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type)) + if (types_same_for_odr (type, expected_type)) break; if (offset < 0) return NULL_TREE; @@ -1502,7 +1085,7 @@ gimple_extract_devirt_binfo_from_cst (tree cst) type = TREE_TYPE (fld); offset -= pos; } - /* Artifical sub-objects are ancestors, we do not want to use them for + /* Artificial sub-objects are ancestors, we do not want to use them for devirtualization, at least not here. */ if (last_artificial) return NULL_TREE; @@ -1518,54 +1101,92 @@ gimple_extract_devirt_binfo_from_cst (tree cst) simplifies to a constant value. Return true if any changes were made. It is assumed that the operands have been previously folded. */ -bool +static bool gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace) { gimple stmt = gsi_stmt (*gsi); tree callee; + bool changed = false; + unsigned i; - /* Check for builtins that CCP can handle using information not - available in the generic fold routines. */ - callee = gimple_call_fndecl (stmt); - if (!inplace && callee && DECL_BUILT_IN (callee)) - { - tree result = gimple_fold_builtin (stmt); - - if (result) - { - if (!update_call_from_tree (gsi, result)) - gimplify_and_update_call_from_tree (gsi, result); - return true; - } - } + /* Fold *& in call arguments. */ + for (i = 0; i < gimple_call_num_args (stmt); ++i) + if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i))) + { + tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false); + if (tmp) + { + gimple_call_set_arg (stmt, i, tmp); + changed = true; + } + } /* Check for virtual calls that became direct calls. */ callee = gimple_call_fn (stmt); if (callee && TREE_CODE (callee) == OBJ_TYPE_REF) { - tree binfo, fndecl, delta, obj; - HOST_WIDE_INT token; - if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE) { + if (dump_file && virtual_method_call_p (callee) + && !possible_polymorphic_call_target_p + (callee, cgraph_get_node (gimple_call_addr_fndecl + (OBJ_TYPE_REF_EXPR (callee))))) + { + fprintf (dump_file, + "Type inheritnace inconsistent devirtualization of "); + print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); + fprintf (dump_file, " to "); + print_generic_expr (dump_file, callee, TDF_SLIM); + fprintf (dump_file, "\n"); + } + gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee)); - return true; + changed = true; } + else if (virtual_method_call_p (callee)) + { + tree obj = OBJ_TYPE_REF_OBJECT (callee); + tree binfo = gimple_extract_devirt_binfo_from_cst + (obj, obj_type_ref_class (callee)); + if (binfo) + { + HOST_WIDE_INT token + = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee)); + tree fndecl = gimple_get_virt_method_for_binfo (token, binfo); + if (fndecl) + { +#ifdef ENABLE_CHECKING + gcc_assert (possible_polymorphic_call_target_p + (callee, cgraph_get_node (fndecl))); - obj = OBJ_TYPE_REF_OBJECT (callee); - binfo = gimple_extract_devirt_binfo_from_cst (obj); - if (!binfo) - return false; - token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee)); - fndecl = gimple_get_virt_method_for_binfo (token, binfo, &delta, false); - if (!fndecl) - return false; - gcc_assert (integer_zerop (delta)); - gimple_call_set_fndecl (stmt, fndecl); - return true; +#endif + gimple_call_set_fndecl (stmt, fndecl); + changed = true; + } + } + } } - return false; + if (inplace) + return changed; + + /* Check for builtins that CCP can handle using information not + available in the generic fold routines. */ + callee = gimple_call_fndecl (stmt); + if (callee && DECL_BUILT_IN (callee)) + { + tree result = gimple_fold_builtin (stmt); + if (result) + { + if (!update_call_from_tree (gsi, result)) + gimplify_and_update_call_from_tree (gsi, result); + changed = true; + } + else if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD) + changed |= targetm.gimple_fold_builtin (gsi); + } + + return changed; } /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument @@ -1584,8 +1205,22 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace) case GIMPLE_ASSIGN: { unsigned old_num_ops = gimple_num_ops (stmt); - tree new_rhs = fold_gimple_assign (gsi); + enum tree_code subcode = gimple_assign_rhs_code (stmt); tree lhs = gimple_assign_lhs (stmt); + tree new_rhs; + /* First canonicalize operand order. This avoids building new + trees if this is the only thing fold would later do. */ + if ((commutative_tree_code (subcode) + || commutative_ternary_tree_code (subcode)) + && tree_swap_operands_p (gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt), false)) + { + tree tem = gimple_assign_rhs1 (stmt); + gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt)); + gimple_assign_set_rhs2 (stmt, tem); + changed = true; + } + new_rhs = fold_gimple_assign (gsi); if (new_rhs && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_rhs))) @@ -1605,44 +1240,50 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace) break; case GIMPLE_CALL: - /* Fold *& in call arguments. */ - for (i = 0; i < gimple_call_num_args (stmt); ++i) - if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i))) - { - tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false); - if (tmp) - { - gimple_call_set_arg (stmt, i, tmp); - changed = true; - } - } changed |= gimple_fold_call (gsi, inplace); break; case GIMPLE_ASM: /* Fold *& in asm operands. */ - for (i = 0; i < gimple_asm_noutputs (stmt); ++i) - { - tree link = gimple_asm_output_op (stmt, i); - tree op = TREE_VALUE (link); - if (REFERENCE_CLASS_P (op) - && (op = maybe_fold_reference (op, true)) != NULL_TREE) - { - TREE_VALUE (link) = op; - changed = true; - } - } - for (i = 0; i < gimple_asm_ninputs (stmt); ++i) - { - tree link = gimple_asm_input_op (stmt, i); - tree op = TREE_VALUE (link); - if (REFERENCE_CLASS_P (op) - && (op = maybe_fold_reference (op, false)) != NULL_TREE) - { - TREE_VALUE (link) = op; - changed = true; - } - } + { + size_t noutputs; + const char **oconstraints; + const char *constraint; + bool allows_mem, allows_reg; + + noutputs = gimple_asm_noutputs (stmt); + oconstraints = XALLOCAVEC (const char *, noutputs); + + for (i = 0; i < gimple_asm_noutputs (stmt); ++i) + { + tree link = gimple_asm_output_op (stmt, i); + tree op = TREE_VALUE (link); + oconstraints[i] + = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); + if (REFERENCE_CLASS_P (op) + && (op = maybe_fold_reference (op, true)) != NULL_TREE) + { + TREE_VALUE (link) = op; + changed = true; + } + } + for (i = 0; i < gimple_asm_ninputs (stmt); ++i) + { + tree link = gimple_asm_input_op (stmt, i); + tree op = TREE_VALUE (link); + constraint + = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); + parse_input_constraint (&constraint, 0, 0, noutputs, 0, + oconstraints, &allows_mem, &allows_reg); + if (REFERENCE_CLASS_P (op) + && (op = maybe_fold_reference (op, !allows_reg && allows_mem)) + != NULL_TREE) + { + TREE_VALUE (link) = op; + changed = true; + } + } + } break; case GIMPLE_DEBUG: @@ -1659,6 +1300,18 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace) changed = true; } } + else if (val + && TREE_CODE (val) == ADDR_EXPR) + { + tree ref = TREE_OPERAND (val, 0); + tree tem = maybe_fold_reference (ref, false); + if (tem) + { + tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val)); + gimple_debug_bind_set_value (stmt, tem); + changed = true; + } + } } break; @@ -1698,20 +1351,20 @@ fold_stmt (gimple_stmt_iterator *gsi) return fold_stmt_1 (gsi, false); } -/* Perform the minimal folding on statement STMT. Only operations like +/* Perform the minimal folding on statement *GSI. Only operations like *&x created by constant propagation are handled. The statement cannot be replaced with a new one. Return true if the statement was changed, false otherwise. - The statement STMT should be in valid gimple form but may + The statement *GSI should be in valid gimple form but may be in unfolded state as resulting from for example constant propagation which can produce *&x = 0. */ bool -fold_stmt_inplace (gimple stmt) +fold_stmt_inplace (gimple_stmt_iterator *gsi) { - gimple_stmt_iterator gsi = gsi_for_stmt (stmt); - bool changed = fold_stmt_1 (&gsi, true); - gcc_assert (gsi_stmt (gsi) == stmt); + gimple stmt = gsi_stmt (*gsi); + bool changed = fold_stmt_1 (gsi, true); + gcc_assert (gsi_stmt (*gsi) == stmt); return changed; } @@ -1945,17 +1598,15 @@ and_var_with_comparison_1 (gimple stmt, /* If the definition is an AND or OR expression, we may be able to simplify by reassociating. */ - if (innercode == TRUTH_AND_EXPR - || innercode == TRUTH_OR_EXPR - || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE - && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))) + if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE + && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) { tree inner1 = gimple_assign_rhs1 (stmt); tree inner2 = gimple_assign_rhs2 (stmt); gimple s; tree t; tree partial = NULL_TREE; - bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR); + bool is_and = (innercode == BIT_AND_EXPR); /* Check for boolean identities that don't require recursive examination of inner1/inner2: @@ -2073,13 +1724,16 @@ static tree and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, enum tree_code code2, tree op2a, tree op2b) { + tree truth_type = truth_type_for (TREE_TYPE (op1a)); + /* First check for ((x CODE1 y) AND (x CODE2 y)). */ if (operand_equal_p (op1a, op2a, 0) && operand_equal_p (op1b, op2b, 0)) { + /* Result will be either NULL_TREE, or a combined comparison. */ tree t = combine_comparisons (UNKNOWN_LOCATION, TRUTH_ANDIF_EXPR, code1, code2, - boolean_type_node, op1a, op1b); + truth_type, op1a, op1b); if (t) return t; } @@ -2088,10 +1742,11 @@ and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, if (operand_equal_p (op1a, op2b, 0) && operand_equal_p (op1b, op2a, 0)) { + /* Result will be either NULL_TREE, or a combined comparison. */ tree t = combine_comparisons (UNKNOWN_LOCATION, TRUTH_ANDIF_EXPR, code1, swap_tree_comparison (code2), - boolean_type_node, op1a, op1b); + truth_type, op1a, op1b); if (t) return t; } @@ -2406,17 +2061,15 @@ or_var_with_comparison_1 (gimple stmt, /* If the definition is an AND or OR expression, we may be able to simplify by reassociating. */ - if (innercode == TRUTH_AND_EXPR - || innercode == TRUTH_OR_EXPR - || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE - && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))) + if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE + && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) { tree inner1 = gimple_assign_rhs1 (stmt); tree inner2 = gimple_assign_rhs2 (stmt); gimple s; tree t; tree partial = NULL_TREE; - bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR); + bool is_or = (innercode == BIT_IOR_EXPR); /* Check for boolean identities that don't require recursive examination of inner1/inner2: @@ -2535,13 +2188,16 @@ static tree or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, enum tree_code code2, tree op2a, tree op2b) { + tree truth_type = truth_type_for (TREE_TYPE (op1a)); + /* First check for ((x CODE1 y) OR (x CODE2 y)). */ if (operand_equal_p (op1a, op2a, 0) && operand_equal_p (op1b, op2b, 0)) { + /* Result will be either NULL_TREE, or a combined comparison. */ tree t = combine_comparisons (UNKNOWN_LOCATION, TRUTH_ORIF_EXPR, code1, code2, - boolean_type_node, op1a, op1b); + truth_type, op1a, op1b); if (t) return t; } @@ -2550,10 +2206,11 @@ or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, if (operand_equal_p (op1a, op2b, 0) && operand_equal_p (op1b, op2a, 0)) { + /* Result will be either NULL_TREE, or a combined comparison. */ tree t = combine_comparisons (UNKNOWN_LOCATION, TRUTH_ORIF_EXPR, code1, swap_tree_comparison (code2), - boolean_type_node, op1a, op1b); + truth_type, op1a, op1b); if (t) return t; } @@ -2830,7 +2487,7 @@ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree)) else if (TREE_CODE (rhs) == ADDR_EXPR && !is_gimple_min_invariant (rhs)) { - HOST_WIDE_INT offset; + HOST_WIDE_INT offset = 0; tree base; base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0), &offset, @@ -2847,21 +2504,22 @@ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree)) == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) { unsigned i; - tree val, list; + tree val, *vec; - list = NULL_TREE; + vec = XALLOCAVEC (tree, + TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))); FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) { val = (*valueize) (val); if (TREE_CODE (val) == INTEGER_CST || TREE_CODE (val) == REAL_CST || TREE_CODE (val) == FIXED_CST) - list = tree_cons (NULL_TREE, val, list); + vec[i] = val; else return NULL_TREE; } - return build_vector (TREE_TYPE (rhs), nreverse (list)); + return build_vector (TREE_TYPE (rhs), vec); } if (kind == tcc_reference) @@ -2912,29 +2570,22 @@ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree)) /* Handle unary operators that can appear in GIMPLE form. Note that we know the single operand must be a constant, so this should almost always return a simplified RHS. */ - tree lhs = gimple_assign_lhs (stmt); + tree lhs = gimple_assign_lhs (stmt); tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); /* Conversions are useless for CCP purposes if they are value-preserving. Thus the restrictions that - useless_type_conversion_p places for pointer type conversions - do not apply here. Substitution later will only substitute to - allowed places. */ + useless_type_conversion_p places for restrict qualification + of pointer types should not apply here. + Substitution later will only substitute to allowed places. */ if (CONVERT_EXPR_CODE_P (subcode) && POINTER_TYPE_P (TREE_TYPE (lhs)) - && POINTER_TYPE_P (TREE_TYPE (op0))) - { - tree tem; - /* Try to re-construct array references on-the-fly. */ - if (!useless_type_conversion_p (TREE_TYPE (lhs), - TREE_TYPE (op0)) - && ((tem = maybe_fold_offset_to_address - (loc, - op0, integer_zero_node, TREE_TYPE (lhs))) - != NULL_TREE)) - return tem; - return op0; - } + && POINTER_TYPE_P (TREE_TYPE (op0)) + && TYPE_ADDR_SPACE (TREE_TYPE (lhs)) + == TYPE_ADDR_SPACE (TREE_TYPE (op0)) + && TYPE_MODE (TREE_TYPE (lhs)) + == TYPE_MODE (TREE_TYPE (op0))) + return op0; return fold_unary_ignore_overflow_loc (loc, subcode, @@ -2954,8 +2605,9 @@ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree)) && TREE_CODE (op1) == INTEGER_CST) { tree off = fold_convert (ptr_type_node, op1); - return build_fold_addr_expr - (fold_build2 (MEM_REF, + return build_fold_addr_expr_loc + (loc, + fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (op0)), unshare_expr (op0), off)); } @@ -2971,6 +2623,19 @@ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree)) tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); tree op2 = (*valueize) (gimple_assign_rhs3 (stmt)); + /* Fold embedded expressions in ternary codes. */ + if ((subcode == COND_EXPR + || subcode == VEC_COND_EXPR) + && COMPARISON_CLASS_P (op0)) + { + tree op00 = (*valueize) (TREE_OPERAND (op0, 0)); + tree op01 = (*valueize) (TREE_OPERAND (op0, 1)); + tree tem = fold_binary_loc (loc, TREE_CODE (op0), + TREE_TYPE (op0), op00, op01); + if (tem) + op0 = tem; + } + return fold_ternary_loc (loc, subcode, gimple_expr_type (stmt), op0, op1, op2); } @@ -3034,7 +2699,7 @@ gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree)) static tree fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, - unsigned HOST_WIDE_INT size); + unsigned HOST_WIDE_INT size, tree); /* See if we can find constructor defining value of BASE. When we know the consructor with constant offset (such as @@ -3074,15 +2739,18 @@ get_base_constructor (tree base, HOST_WIDE_INT *bit_offset, switch (TREE_CODE (base)) { case VAR_DECL: - if (!const_value_known_p (base)) - return NULL_TREE; - - /* Fallthru. */ case CONST_DECL: - if (!DECL_INITIAL (base) - && (TREE_STATIC (base) || DECL_EXTERNAL (base))) - return error_mark_node; - return DECL_INITIAL (base); + { + tree init = ctor_for_folding (base); + + /* Our semantic is exact opposite of ctor_for_folding; + NULL means unknown, while error_mark_node is 0. */ + if (init == error_mark_node) + return NULL_TREE; + if (!init) + return error_mark_node; + return init; + } case ARRAY_REF: case COMPONENT_REF: @@ -3142,27 +2810,31 @@ fold_string_cst_ctor_reference (tree type, tree ctor, static tree fold_array_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, - unsigned HOST_WIDE_INT size) + unsigned HOST_WIDE_INT size, + tree from_decl) { unsigned HOST_WIDE_INT cnt; tree cfield, cval; double_int low_bound, elt_size; double_int index, max_index; double_int access_index; - tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor)); + tree domain_type = NULL_TREE, index_type = NULL_TREE; HOST_WIDE_INT inner_offset; /* Compute low bound and elt size. */ + if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE) + domain_type = TYPE_DOMAIN (TREE_TYPE (ctor)); if (domain_type && TYPE_MIN_VALUE (domain_type)) { /* Static constructors for variably sized objects makes no sense. */ gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST); + index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type)); low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type)); } else low_bound = double_int_zero; /* Static constructors for variably sized objects makes no sense. */ - gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) + gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) == INTEGER_CST); elt_size = tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))); @@ -3172,24 +2844,29 @@ fold_array_ctor_reference (tree type, tree ctor, be larger than size of array element. */ if (!TYPE_SIZE_UNIT (type) || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST - || double_int_cmp (elt_size, - tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0) + || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type)))) return NULL_TREE; /* Compute the array index we look for. */ - access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT), - elt_size, TRUNC_DIV_EXPR); - access_index = double_int_add (access_index, low_bound); + access_index = double_int::from_uhwi (offset / BITS_PER_UNIT) + .udiv (elt_size, TRUNC_DIV_EXPR); + access_index += low_bound; + if (index_type) + access_index = access_index.ext (TYPE_PRECISION (index_type), + TYPE_UNSIGNED (index_type)); /* And offset within the access. */ - inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT); + inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT); /* See if the array field is large enough to span whole access. We do not care to fold accesses spanning multiple array indexes. */ - if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT) + if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT) return NULL_TREE; - index = double_int_sub (low_bound, double_int_one); + index = low_bound - double_int_one; + if (index_type) + index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type)); + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) { /* Array constructor might explicitely set index, or specify range @@ -3207,12 +2884,19 @@ fold_array_ctor_reference (tree type, tree ctor, } } else - max_index = index = double_int_add (index, double_int_one); + { + index += double_int_one; + if (index_type) + index = index.ext (TYPE_PRECISION (index_type), + TYPE_UNSIGNED (index_type)); + max_index = index; + } /* Do we have match? */ - if (double_int_cmp (access_index, index, 1) >= 0 - && double_int_cmp (access_index, max_index, 1) <= 0) - return fold_ctor_reference (type, cval, inner_offset, size); + if (access_index.cmp (index, 1) >= 0 + && access_index.cmp (max_index, 1) <= 0) + return fold_ctor_reference (type, cval, inner_offset, size, + from_decl); } /* When memory is not explicitely mentioned in constructor, it is 0 (or out of range). */ @@ -3225,7 +2909,8 @@ fold_array_ctor_reference (tree type, tree ctor, static tree fold_nonarray_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, - unsigned HOST_WIDE_INT size) + unsigned HOST_WIDE_INT size, + tree from_decl) { unsigned HOST_WIDE_INT cnt; tree cfield, cval; @@ -3238,8 +2923,8 @@ fold_nonarray_ctor_reference (tree type, tree ctor, tree field_size = DECL_SIZE (cfield); double_int bitoffset; double_int byte_offset_cst = tree_to_double_int (byte_offset); - double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT); - double_int bitoffset_end; + double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT); + double_int bitoffset_end, access_end; /* Variable sized objects in static constructors makes no sense, but field_size can be NULL for flexible array members. */ @@ -3250,33 +2935,34 @@ fold_nonarray_ctor_reference (tree type, tree ctor, : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE)); /* Compute bit offset of the field. */ - bitoffset = double_int_add (tree_to_double_int (field_offset), - double_int_mul (byte_offset_cst, - bits_per_unit_cst)); + bitoffset = tree_to_double_int (field_offset) + + byte_offset_cst * bits_per_unit_cst; /* Compute bit offset where the field ends. */ if (field_size != NULL_TREE) - bitoffset_end = double_int_add (bitoffset, - tree_to_double_int (field_size)); + bitoffset_end = bitoffset + tree_to_double_int (field_size); else bitoffset_end = double_int_zero; - /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */ - if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0 + access_end = double_int::from_uhwi (offset) + + double_int::from_uhwi (size); + + /* Is there any overlap between [OFFSET, OFFSET+SIZE) and + [BITOFFSET, BITOFFSET_END)? */ + if (access_end.cmp (bitoffset, 0) > 0 && (field_size == NULL_TREE - || double_int_cmp (uhwi_to_double_int (offset), - bitoffset_end, 0) < 0)) + || double_int::from_uhwi (offset).slt (bitoffset_end))) { - double_int access_end = double_int_add (uhwi_to_double_int (offset), - uhwi_to_double_int (size)); - double_int inner_offset = double_int_sub (uhwi_to_double_int (offset), - bitoffset); + double_int inner_offset = double_int::from_uhwi (offset) - bitoffset; /* We do have overlap. Now see if field is large enough to cover the access. Give up for accesses spanning multiple fields. */ - if (double_int_cmp (access_end, bitoffset_end, 0) > 0) + if (access_end.cmp (bitoffset_end, 0) > 0) + return NULL_TREE; + if (double_int::from_uhwi (offset).slt (bitoffset)) return NULL_TREE; return fold_ctor_reference (type, cval, - double_int_to_uhwi (inner_offset), size); + inner_offset.to_uhwi (), size, + from_decl); } } /* When memory is not explicitely mentioned in constructor, it is 0. */ @@ -3288,14 +2974,14 @@ fold_nonarray_ctor_reference (tree type, tree ctor, static tree fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, - unsigned HOST_WIDE_INT size) + unsigned HOST_WIDE_INT size, tree from_decl) { tree ret; /* We found the field with exact match. */ if (useless_type_conversion_p (type, TREE_TYPE (ctor)) && !offset) - return canonicalize_constructor_val (ctor); + return canonicalize_constructor_val (unshare_expr (ctor), from_decl); /* We are at the end of walk, see if we can view convert the result. */ @@ -3304,7 +2990,7 @@ fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (ctor)), 0)) { - ret = canonicalize_constructor_val (ctor); + ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl); ret = fold_unary (VIEW_CONVERT_EXPR, type, ret); if (ret) STRIP_NOPS (ret); @@ -3315,10 +3001,13 @@ fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, if (TREE_CODE (ctor) == CONSTRUCTOR) { - if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE) - return fold_array_ctor_reference (type, ctor, offset, size); + if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE + || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE) + return fold_array_ctor_reference (type, ctor, offset, size, + from_decl); else - return fold_nonarray_ctor_reference (type, ctor, offset, size); + return fold_nonarray_ctor_reference (type, ctor, offset, size, + from_decl); } return NULL_TREE; @@ -3335,6 +3024,9 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) HOST_WIDE_INT offset, size, max_size; tree tem; + if (TREE_THIS_VOLATILE (t)) + return NULL_TREE; + if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration) return get_symbol_constant_value (t); @@ -3354,18 +3046,21 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME && valueize && (idx = (*valueize) (TREE_OPERAND (t, 1))) - && host_integerp (idx, 0)) + && TREE_CODE (idx) == INTEGER_CST) { tree low_bound, unit_size; + double_int doffset; /* If the resulting bit-offset is constant, track it. */ if ((low_bound = array_ref_low_bound (t), - host_integerp (low_bound, 0)) + TREE_CODE (low_bound) == INTEGER_CST) && (unit_size = array_ref_element_size (t), - host_integerp (unit_size, 1))) + host_integerp (unit_size, 1)) + && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound)) + .sext (TYPE_PRECISION (TREE_TYPE (idx))), + doffset.fits_shwi ())) { - offset = TREE_INT_CST_LOW (idx); - offset -= TREE_INT_CST_LOW (low_bound); + offset = doffset.to_shwi (); offset *= TREE_INT_CST_LOW (unit_size); offset *= BITS_PER_UNIT; @@ -3383,7 +3078,8 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) return NULL_TREE; return fold_ctor_reference (TREE_TYPE (t), ctor, offset, TREE_INT_CST_LOW (unit_size) - * BITS_PER_UNIT); + * BITS_PER_UNIT, + base); } } /* Fallthru. */ @@ -3409,7 +3105,8 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) if (offset < 0) return NULL_TREE; - return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size); + return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, + base); case REALPART_EXPR: case IMAGPART_EXPR: @@ -3433,3 +3130,326 @@ fold_const_aggregate_ref (tree t) { return fold_const_aggregate_ref_1 (t, NULL); } + +/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN + is integer form of OBJ_TYPE_REF_TOKEN of the reference expression. + KNOWN_BINFO carries the binfo describing the true type of + OBJ_TYPE_REF_OBJECT(REF). */ + +tree +gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo) +{ + unsigned HOST_WIDE_INT offset, size; + tree v, fn, vtable, init; + + vtable = v = BINFO_VTABLE (known_binfo); + /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */ + if (!v) + return NULL_TREE; + + if (TREE_CODE (v) == POINTER_PLUS_EXPR) + { + offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT; + v = TREE_OPERAND (v, 0); + } + else + offset = 0; + + if (TREE_CODE (v) != ADDR_EXPR) + return NULL_TREE; + v = TREE_OPERAND (v, 0); + + if (TREE_CODE (v) != VAR_DECL + || !DECL_VIRTUAL_P (v)) + return NULL_TREE; + init = ctor_for_folding (v); + + /* The virtual tables should always be born with constructors. + and we always should assume that they are avaialble for + folding. At the moment we do not stream them in all cases, + but it should never happen that ctor seem unreachable. */ + gcc_assert (init); + if (init == error_mark_node) + { + gcc_assert (in_lto_p); + return NULL_TREE; + } + gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE); + size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1); + offset += token * size; + fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init, + offset, size, v); + if (!fn || integer_zerop (fn)) + return NULL_TREE; + gcc_assert (TREE_CODE (fn) == ADDR_EXPR + || TREE_CODE (fn) == FDESC_EXPR); + fn = TREE_OPERAND (fn, 0); + gcc_assert (TREE_CODE (fn) == FUNCTION_DECL); + + /* When cgraph node is missing and function is not public, we cannot + devirtualize. This can happen in WHOPR when the actual method + ends up in other partition, because we found devirtualization + possibility too late. */ + if (!can_refer_decl_in_current_unit_p (fn, vtable)) + return NULL_TREE; + + /* Make sure we create a cgraph node for functions we'll reference. + They can be non-existent if the reference comes from an entry + of an external vtable for example. */ + cgraph_get_create_node (fn); + + return fn; +} + +/* Return true iff VAL is a gimple expression that is known to be + non-negative. Restricted to floating-point inputs. */ + +bool +gimple_val_nonnegative_real_p (tree val) +{ + gimple def_stmt; + + gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val))); + + /* Use existing logic for non-gimple trees. */ + if (tree_expr_nonnegative_p (val)) + return true; + + if (TREE_CODE (val) != SSA_NAME) + return false; + + /* Currently we look only at the immediately defining statement + to make this determination, since recursion on defining + statements of operands can lead to quadratic behavior in the + worst case. This is expected to catch almost all occurrences + in practice. It would be possible to implement limited-depth + recursion if important cases are lost. Alternatively, passes + that need this information (such as the pow/powi lowering code + in the cse_sincos pass) could be revised to provide it through + dataflow propagation. */ + + def_stmt = SSA_NAME_DEF_STMT (val); + + if (is_gimple_assign (def_stmt)) + { + tree op0, op1; + + /* See fold-const.c:tree_expr_nonnegative_p for additional + cases that could be handled with recursion. */ + + switch (gimple_assign_rhs_code (def_stmt)) + { + case ABS_EXPR: + /* Always true for floating-point operands. */ + return true; + + case MULT_EXPR: + /* True if the two operands are identical (since we are + restricted to floating-point inputs). */ + op0 = gimple_assign_rhs1 (def_stmt); + op1 = gimple_assign_rhs2 (def_stmt); + + if (op0 == op1 + || operand_equal_p (op0, op1, 0)) + return true; + + default: + return false; + } + } + else if (is_gimple_call (def_stmt)) + { + tree fndecl = gimple_call_fndecl (def_stmt); + if (fndecl + && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) + { + tree arg1; + + switch (DECL_FUNCTION_CODE (fndecl)) + { + CASE_FLT_FN (BUILT_IN_ACOS): + CASE_FLT_FN (BUILT_IN_ACOSH): + CASE_FLT_FN (BUILT_IN_CABS): + CASE_FLT_FN (BUILT_IN_COSH): + CASE_FLT_FN (BUILT_IN_ERFC): + CASE_FLT_FN (BUILT_IN_EXP): + CASE_FLT_FN (BUILT_IN_EXP10): + CASE_FLT_FN (BUILT_IN_EXP2): + CASE_FLT_FN (BUILT_IN_FABS): + CASE_FLT_FN (BUILT_IN_FDIM): + CASE_FLT_FN (BUILT_IN_HYPOT): + CASE_FLT_FN (BUILT_IN_POW10): + return true; + + CASE_FLT_FN (BUILT_IN_SQRT): + /* sqrt(-0.0) is -0.0, and sqrt is not defined over other + nonnegative inputs. */ + if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val)))) + return true; + + break; + + CASE_FLT_FN (BUILT_IN_POWI): + /* True if the second argument is an even integer. */ + arg1 = gimple_call_arg (def_stmt, 1); + + if (TREE_CODE (arg1) == INTEGER_CST + && (TREE_INT_CST_LOW (arg1) & 1) == 0) + return true; + + break; + + CASE_FLT_FN (BUILT_IN_POW): + /* True if the second argument is an even integer-valued + real. */ + arg1 = gimple_call_arg (def_stmt, 1); + + if (TREE_CODE (arg1) == REAL_CST) + { + REAL_VALUE_TYPE c; + HOST_WIDE_INT n; + + c = TREE_REAL_CST (arg1); + n = real_to_integer (&c); + + if ((n & 1) == 0) + { + REAL_VALUE_TYPE cint; + real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0); + if (real_identical (&c, &cint)) + return true; + } + } + + break; + + default: + return false; + } + } + } + + return false; +} + +/* Given a pointer value OP0, return a simplified version of an + indirection through OP0, or NULL_TREE if no simplification is + possible. Note that the resulting type may be different from + the type pointed to in the sense that it is still compatible + from the langhooks point of view. */ + +tree +gimple_fold_indirect_ref (tree t) +{ + tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype); + tree sub = t; + tree subtype; + + STRIP_NOPS (sub); + subtype = TREE_TYPE (sub); + if (!POINTER_TYPE_P (subtype)) + return NULL_TREE; + + if (TREE_CODE (sub) == ADDR_EXPR) + { + tree op = TREE_OPERAND (sub, 0); + tree optype = TREE_TYPE (op); + /* *&p => p */ + if (useless_type_conversion_p (type, optype)) + return op; + + /* *(foo *)&fooarray => fooarray[0] */ + if (TREE_CODE (optype) == ARRAY_TYPE + && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST + && useless_type_conversion_p (type, TREE_TYPE (optype))) + { + tree type_domain = TYPE_DOMAIN (optype); + tree min_val = size_zero_node; + if (type_domain && TYPE_MIN_VALUE (type_domain)) + min_val = TYPE_MIN_VALUE (type_domain); + if (TREE_CODE (min_val) == INTEGER_CST) + return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); + } + /* *(foo *)&complexfoo => __real__ complexfoo */ + else if (TREE_CODE (optype) == COMPLEX_TYPE + && useless_type_conversion_p (type, TREE_TYPE (optype))) + return fold_build1 (REALPART_EXPR, type, op); + /* *(foo *)&vectorfoo => BIT_FIELD_REF */ + else if (TREE_CODE (optype) == VECTOR_TYPE + && useless_type_conversion_p (type, TREE_TYPE (optype))) + { + tree part_width = TYPE_SIZE (type); + tree index = bitsize_int (0); + return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); + } + } + + /* *(p + CST) -> ... */ + if (TREE_CODE (sub) == POINTER_PLUS_EXPR + && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) + { + tree addr = TREE_OPERAND (sub, 0); + tree off = TREE_OPERAND (sub, 1); + tree addrtype; + + STRIP_NOPS (addr); + addrtype = TREE_TYPE (addr); + + /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF */ + if (TREE_CODE (addr) == ADDR_EXPR + && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE + && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))) + && host_integerp (off, 1)) + { + unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1); + tree part_width = TYPE_SIZE (type); + unsigned HOST_WIDE_INT part_widthi + = tree_low_cst (part_width, 0) / BITS_PER_UNIT; + unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; + tree index = bitsize_int (indexi); + if (offset / part_widthi + <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))) + return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0), + part_width, index); + } + + /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */ + if (TREE_CODE (addr) == ADDR_EXPR + && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE + && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))) + { + tree size = TYPE_SIZE_UNIT (type); + if (tree_int_cst_equal (size, off)) + return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0)); + } + + /* *(p + CST) -> MEM_REF . */ + if (TREE_CODE (addr) != ADDR_EXPR + || DECL_P (TREE_OPERAND (addr, 0))) + return fold_build2 (MEM_REF, type, + addr, + build_int_cst_wide (ptype, + TREE_INT_CST_LOW (off), + TREE_INT_CST_HIGH (off))); + } + + /* *(foo *)fooarrptr => (*fooarrptr)[0] */ + if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE + && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST + && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype)))) + { + tree type_domain; + tree min_val = size_zero_node; + tree osub = sub; + sub = gimple_fold_indirect_ref (sub); + if (! sub) + sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub); + type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); + if (type_domain && TYPE_MIN_VALUE (type_domain)) + min_val = TYPE_MIN_VALUE (type_domain); + if (TREE_CODE (min_val) == INTEGER_CST) + return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); + } + + return NULL_TREE; +}