2015-03-26 Jakub Jelinek <jakub@redhat.com>
+ PR tree-optimization/64715
+ * passes.def: Add another instance of pass_object_sizes before
+ ccp1.
+ * tree-object-size.c (pass_object_sizes::execute): In
+ first_pass_instance, only handle __bos (, 1) and __bos (, 3)
+ calls, and keep the call in the IL, as {MIN,MAX}_EXPR of the
+ __bos result and the computed constant. Remove redundant
+ checks, obsoleted by gimple_call_builtin_p test.
+
* var-tracking.c (variable_tracking_main_1): Don't track
variables for targetm.no_register_allocation targets.
--- /dev/null
+/* PR tree-optimization/64715 */
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized" } */
+
+extern inline __attribute__ ((always_inline, gnu_inline, artificial, nothrow, leaf)) char *
+strcpy (char *__restrict dest, const char *__restrict src)
+{
+ return __builtin___strcpy_chk (dest, src, __builtin_object_size (dest, 2 > 1));
+}
+
+const char *str1 = "JIHGFEDCBA";
+void bar (char *);
+
+void
+foo ()
+{
+ struct A { char buf1[9]; char buf2[1]; } a;
+ strcpy (a.buf1 + (0 + 4), str1 + 5);
+ bar ((char *) &a);
+}
+
+/* { dg-final { scan-tree-dump "__builtin___strcpy_chk\[^;\n\r\]*, 5\\\);" "optimized" } } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */
continue;
init_object_sizes ();
+
+ /* In the first pass instance, only attempt to fold
+ __builtin_object_size (x, 1) and __builtin_object_size (x, 3),
+ and rather than folding the builtin to the constant if any,
+ create a MIN_EXPR or MAX_EXPR of the __builtin_object_size
+ call result and the computed constant. */
+ if (first_pass_instance)
+ {
+ tree ost = gimple_call_arg (call, 1);
+ if (tree_fits_uhwi_p (ost))
+ {
+ unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
+ tree ptr = gimple_call_arg (call, 0);
+ tree lhs = gimple_call_lhs (call);
+ if ((object_size_type == 1 || object_size_type == 3)
+ && (TREE_CODE (ptr) == ADDR_EXPR
+ || TREE_CODE (ptr) == SSA_NAME)
+ && lhs)
+ {
+ tree type = TREE_TYPE (lhs);
+ unsigned HOST_WIDE_INT bytes
+ = compute_builtin_object_size (ptr, object_size_type);
+ if (bytes != (unsigned HOST_WIDE_INT) (object_size_type == 1
+ ? -1 : 0)
+ && wi::fits_to_tree_p (bytes, type))
+ {
+ tree tem = make_ssa_name (type);
+ gimple_call_set_lhs (call, tem);
+ enum tree_code code
+ = object_size_type == 1 ? MIN_EXPR : MAX_EXPR;
+ tree cst = build_int_cstu (type, bytes);
+ gimple g = gimple_build_assign (lhs, code, tem, cst);
+ gsi_insert_after (&i, g, GSI_NEW_STMT);
+ update_stmt (call);
+ }
+ }
+ }
+ continue;
+ }
+
result = fold_call_stmt (as_a <gcall *> (call), false);
if (!result)
{
- if (gimple_call_num_args (call) == 2
- && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
+ tree ost = gimple_call_arg (call, 1);
+
+ if (tree_fits_uhwi_p (ost))
{
- tree ost = gimple_call_arg (call, 1);
+ unsigned HOST_WIDE_INT object_size_type = tree_to_uhwi (ost);
- if (tree_fits_uhwi_p (ost))
- {
- unsigned HOST_WIDE_INT object_size_type
- = tree_to_uhwi (ost);
-
- if (object_size_type < 2)
- result = fold_convert (size_type_node,
- integer_minus_one_node);
- else if (object_size_type < 4)
- result = build_zero_cst (size_type_node);
- }
+ if (object_size_type < 2)
+ result = fold_convert (size_type_node,
+ integer_minus_one_node);
+ else if (object_size_type < 4)
+ result = build_zero_cst (size_type_node);
}
if (!result)