* tree.def (VTABLE_REF): Remove.
(OBJ_TYPE_REF): New.
(TRY_CATCH_EXPR, TRY_FINALLY_EXPR): Set type 's'.
* expr.c (expand_expr_real_1): Replace VTABLE_REF with OBJ_TYPE_REF.
* fold-const.c (non_lvalue): Likewise.
* gimplify.c (gimplify_expr): Likewise.
(gimplify_call_expr): Use is_gimple_call_addr.
* langhooks-def.h (LANG_HOOKS_FOLD_OBJ_TYPE_REF): New.
* langhooks.h (fold_obj_type_ref): New.
* tree-gimple.c (is_gimple_call_addr): New.
* tree-gimple.h (is_gimple_call_addr): Declare.
* tree-inline.c (inlinable_function_p): Fix merge error.
(estimate_num_insns_1): Replace VTABLE_REF with OBJ_TYPE_REF.
* tree-pretty-print.c (dump_generic_node): Likewise.
(print_call_name): Handle OBJ_TYPE_REF.
* tree-ssa-ccp.c (fold_stmt): Fold OBJ_TYPE_REF.
* tree-ssa-operands.c (get_expr_operands): Handle OBJ_TYPE_REF.
* tree.h (OBJ_TYPE_REF_EXPR): New.
(OBJ_TYPE_REF_OBJECT, OBJ_TYPE_REF_TOKEN): New.
* doc/c-tree.texi (VTABLE_REF): Remove.
* objc/objc-act.c (build_objc_method_call): Build an OBJ_TYPE_REF.
cp/
* class.c (build_vfn_ref): Take a pointer not object. Build
an OBJ_TYPE_REF.
(cp_fold_obj_type_ref): New.
* call.c (build_over_call): Update build_vfn_ref call.
* cp-lang.c (LANG_HOOKS_FOLD_OBJ_TYPE_REF): New.
* cp-tree.h (cp_fold_obj_type_ref): Declare.
testsuite/
* g++.dg/opt/devirt1.C: New.
From-SVN: r83531
+2004-06-22 Richard Henderson <rth@redhat.com>
+
+ * tree.def (VTABLE_REF): Remove.
+ (OBJ_TYPE_REF): New.
+ (TRY_CATCH_EXPR, TRY_FINALLY_EXPR): Set type 's'.
+ * expr.c (expand_expr_real_1): Replace VTABLE_REF with OBJ_TYPE_REF.
+ * fold-const.c (non_lvalue): Likewise.
+ * gimplify.c (gimplify_expr): Likewise.
+ (gimplify_call_expr): Use is_gimple_call_addr.
+ * langhooks-def.h (LANG_HOOKS_FOLD_OBJ_TYPE_REF): New.
+ * langhooks.h (fold_obj_type_ref): New.
+ * tree-gimple.c (is_gimple_call_addr): New.
+ * tree-gimple.h (is_gimple_call_addr): Declare.
+ * tree-inline.c (inlinable_function_p): Fix merge error.
+ (estimate_num_insns_1): Replace VTABLE_REF with OBJ_TYPE_REF.
+ * tree-pretty-print.c (dump_generic_node): Likewise.
+ (print_call_name): Handle OBJ_TYPE_REF.
+ * tree-ssa-ccp.c (fold_stmt): Fold OBJ_TYPE_REF.
+ * tree-ssa-operands.c (get_expr_operands): Handle OBJ_TYPE_REF.
+ * tree.h (OBJ_TYPE_REF_EXPR): New.
+ (OBJ_TYPE_REF_OBJECT, OBJ_TYPE_REF_TOKEN): New.
+ * doc/c-tree.texi (VTABLE_REF): Remove.
+ * objc/objc-act.c (build_objc_method_call): Build an OBJ_TYPE_REF.
+
2004-06-22 Richard Henderson <rth@redhat.com>
PR middle-end/16026
+2004-06-22 Richard Henderson <rth@redhat.com>
+
+ * class.c (build_vfn_ref): Take a pointer not object. Build
+ an OBJ_TYPE_REF.
+ (cp_fold_obj_type_ref): New.
+ * call.c (build_over_call): Update build_vfn_ref call.
+ * cp-lang.c (LANG_HOOKS_FOLD_OBJ_TYPE_REF): New.
+ * cp-tree.h (cp_fold_obj_type_ref): Declare.
+
2004-06-21 Jason Merrill <jason@redhat.com>
PR c++/16112
if (DECL_CONTEXT (fn) && TYPE_JAVA_INTERFACE (DECL_CONTEXT (fn)))
fn = build_java_interface_fn_ref (fn, *p);
else
- fn = build_vfn_ref (build_indirect_ref (*p, 0), DECL_VINDEX (fn));
+ fn = build_vfn_ref (*p, DECL_VINDEX (fn));
TREE_TYPE (fn) = t;
}
else if (DECL_INLINE (fn))
return aref;
}
-/* Given an object INSTANCE, return an expression which yields a
- function pointer corresponding to vtable element INDEX. */
+/* Given a stable object pointer INSTANCE_PTR, return an expression which
+ yields a function pointer corresponding to vtable element INDEX. */
tree
-build_vfn_ref (tree instance, tree idx)
+build_vfn_ref (tree instance_ptr, tree idx)
{
- tree aref = build_vtbl_ref_1 (instance, idx);
+ tree aref;
+
+ aref = build_vtbl_ref_1 (build_indirect_ref (instance_ptr, 0), idx);
/* When using function descriptors, the address of the
vtable entry is treated as a function pointer. */
aref = build1 (NOP_EXPR, TREE_TYPE (aref),
build_unary_op (ADDR_EXPR, aref, /*noconvert=*/1));
+ /* Remember this as a method reference, for later devirtualization. */
+ aref = build (OBJ_TYPE_REF, TREE_TYPE (aref), aref, instance_ptr, idx);
+
return aref;
}
*vid->last_init = build_tree_list (NULL_TREE, init);
vid->last_init = &TREE_CHAIN (*vid->last_init);
}
+
+/* Fold a OBJ_TYPE_REF expression to the address of a function.
+ KNOWN_TYPE carries the true type of OBJ_TYPE_REF_OBJECT(REF). */
+
+tree
+cp_fold_obj_type_ref (tree ref, tree known_type)
+{
+ HOST_WIDE_INT index = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
+ HOST_WIDE_INT i = 0;
+ tree v = TYPE_BINFO_VIRTUALS (known_type);
+ tree fndecl;
+
+ while (i != index)
+ {
+ i += (TARGET_VTABLE_USES_DESCRIPTORS
+ ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
+ v = TREE_CHAIN (v);
+ }
+
+ fndecl = BV_FN (v);
+
+#ifdef ENABLE_CHECKING
+ if (!tree_int_cst_equal (OBJ_TYPE_REF_TOKEN (ref), DECL_VINDEX (fndecl)))
+ abort ();
+#endif
+
+ return build_address (fndecl);
+}
#define LANG_HOOKS_REGISTER_BUILTIN_TYPE c_register_builtin_type
#undef LANG_HOOKS_GIMPLIFY_EXPR
#define LANG_HOOKS_GIMPLIFY_EXPR cp_gimplify_expr
+#undef LANG_HOOKS_FOLD_OBJ_TYPE_REF
+#define LANG_HOOKS_FOLD_OBJ_TYPE_REF cp_fold_obj_type_ref
/* Each front end provides its own hooks, for toplev.c. */
const struct lang_hooks lang_hooks = LANG_HOOKS_INITIALIZER;
extern tree get_primary_binfo (tree);
extern void debug_class (tree);
extern void debug_thunks (tree);
+extern tree cp_fold_obj_type_ref (tree, tree);
/* in cvt.c */
extern tree convert_to_reference (tree, tree, int, int, tree);
@tindex SAVE_EXPR
@tindex TARGET_EXPR
@tindex AGGR_INIT_EXPR
-@tindex VTABLE_REF
@tindex VA_ARG_EXPR
The internal representation for expressions is for the most part quite
In either case, the expression is void.
-@item VTABLE_REF
-A @code{VTABLE_REF} indicates that the interior expression computes
-a value that is a vtable entry. It is used with @option{-fvtable-gc}
-to track the reference through to front end to the middle end, at
-which point we transform this to a @code{REG_VTABLE_REF} note, which
-survives the balance of code generation.
-
-The first operand is the expression that computes the vtable reference.
-The second operand is the @code{VAR_DECL} of the vtable. The third
-operand is an @code{INTEGER_CST} of the byte offset into the vtable.
-
@item VA_ARG_EXPR
This node is used to implement support for the C/C++ variable argument-list
mechanism. It represents expressions like @code{va_arg (ap, type)}.
return target;
}
- case VTABLE_REF:
- {
- rtx insn, before = get_last_insn (), vtbl_ref;
-
- /* Evaluate the interior expression. */
- subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
- tmode, modifier);
-
- /* Get or create an instruction off which to hang a note. */
- if (REG_P (subtarget))
- {
- target = subtarget;
- insn = get_last_insn ();
- if (insn == before)
- abort ();
- if (! INSN_P (insn))
- insn = prev_nonnote_insn (insn);
- }
- else
- {
- target = gen_reg_rtx (GET_MODE (subtarget));
- insn = emit_move_insn (target, subtarget);
- }
-
- /* Collect the data for the note. */
- vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
- vtbl_ref = plus_constant (vtbl_ref,
- tree_low_cst (TREE_OPERAND (exp, 2), 0));
- /* Discard the initial CONST that was added. */
- vtbl_ref = XEXP (vtbl_ref, 0);
-
- REG_NOTES (insn)
- = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
-
- return target;
- }
+ case OBJ_TYPE_REF:
+ return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
/* Intended for a reference to a buffer of a file-object in Pascal.
But it's not certain that a special tree code will really be
case ARRAY_RANGE_REF:
case BIT_FIELD_REF:
case BUFFER_REF:
- case VTABLE_REF:
+ case OBJ_TYPE_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
the calling expression must occur before the actual call. Force
gimplify_expr to use an internal post queue. */
ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
- is_gimple_val, fb_rvalue);
+ is_gimple_call_addr, fb_rvalue);
if (PUSH_ARGS_REVERSED)
TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
ret = GS_ALL_DONE;
break;
- case VTABLE_REF:
- /* This moves much of the actual computation out of the
- VTABLE_REF. Perhaps this should be revisited once we want to
- do clever things with VTABLE_REFs. */
- ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
- is_gimple_min_lval, fb_lvalue);
+ case OBJ_TYPE_REF:
+ {
+ enum gimplify_status r0, r1;
+ r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
+ is_gimple_val, fb_rvalue);
+ r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
+ is_gimple_val, fb_rvalue);
+ ret = MIN (r0, r1);
+ }
break;
case MIN_EXPR:
/* Hooks for tree gimplification. */
#define LANG_HOOKS_GIMPLIFY_EXPR lhd_gimplify_expr
+#define LANG_HOOKS_FOLD_OBJ_TYPE_REF NULL
#define LANG_HOOKS_GIMPLE_BEFORE_INLINING true
/* Tree dump hooks. */
LANG_HOOKS_DECLS, \
LANG_HOOKS_FOR_TYPES_INITIALIZER, \
LANG_HOOKS_GIMPLIFY_EXPR, \
+ LANG_HOOKS_FOLD_OBJ_TYPE_REF, \
LANG_HOOKS_GIMPLE_BEFORE_INLINING \
}
enum gimplify_status, though we can't see that type here. */
int (*gimplify_expr) (tree *, tree *, tree *);
+ /* Fold an OBJ_TYPE_REF expression to the address of a function.
+ KNOWN_TYPE carries the true type of the OBJ_TYPE_REF_OBJECT. */
+ tree (*fold_obj_type_ref) (tree, tree);
+
/* True if the front end has gimplified the function before running the
inliner, false if the front end generates GENERIC directly. */
bool gimple_before_inlining;
(ret_type,
get_arg_type_list
(method_prototype, METHOD_REF, super_flag)));
+ tree method, t;
lookup_object = build_c_cast (rcv_p, lookup_object);
method_params = tree_cons (NULL_TREE, lookup_object,
tree_cons (NULL_TREE, selector,
method_params));
- TREE_USED (sender) = 1;
- assemble_external (sender);
- /* We want to cast the sender, not convert it. */
- return build_function_call (build_c_cast (sender_cast, sender),
- method_params);
+ method = build_fold_addr_expr (sender);
}
else
{
/* This is the portable (GNU) way. */
- tree method, object;
+ tree object;
/* First, call the lookup function to get a pointer to the method,
then cast the pointer, then call it with the method arguments.
Use SAVE_EXPR to avoid evaluating the receiver twice. */
lookup_object = save_expr (lookup_object);
object = (super_flag ? self_decl : lookup_object);
- TREE_USED (sender) = 1;
- assemble_external (sender);
- method
- = build_function_call (sender,
- tree_cons (NULL_TREE, lookup_object,
- tree_cons (NULL_TREE, selector,
- NULL_TREE)));
+
+ t = tree_cons (NULL_TREE, selector, NULL_TREE);
+ t = tree_cons (NULL_TREE, lookup_object, t);
+ method = build_function_call (sender, t);
/* Pass the object to the method. */
- TREE_USED (method) = 1;
- assemble_external (method);
- return build_function_call
- (build_c_cast (sender_cast, method),
- tree_cons (NULL_TREE, object,
- tree_cons (NULL_TREE, selector, method_params)));
+ method_params = tree_cons (NULL_TREE, object,
+ tree_cons (NULL_TREE, selector,
+ method_params));
}
+
+ /* ??? Selector is not at this point something we can use inside
+ the compiler itself. Set it to garbage for the nonce. */
+ t = build (OBJ_TYPE_REF, sender_cast, method, lookup_object, size_zero_node);
+ return build_function_call (t, method_params);
}
\f
static void
function1_template = make_node (FUNCTION_TYPE);
/* Install argument types - normally set by build_function_type. */
- TYPE_ARG_TYPES (function1_template) = get_arg_type_list (proto, METHOD_DEF, 0);
+ TYPE_ARG_TYPES (function1_template)
+ = get_arg_type_list (proto, METHOD_DEF, 0);
/* install return type */
TREE_TYPE (function1_template) = groktypename (TREE_TYPE (proto));
-2004-06-22 Kelley Cook <kcook@gcc.gnu.org>
+2006-06-22 Richard Henderson <rth@redhat.com>
+
+ * g++.dg/opt/devirt1.C: New.
+
+2004-06-22 Kelley Cook <kcook@gcc.gnu.org>
* g++.dg/opt/pr15551.C: New testcase.
--- /dev/null
+// { dg-do compile }
+// { dg-options "-O" }
+// { dg-final { scan-assembler "xyzzy" } }
+
+struct S { S(); virtual void xyzzy(); };
+inline void foo(S *s) { s->xyzzy(); }
+void bar() { S s; foo(&s); }
op0 -> lhs
op1 -> rhs
call-stmt: CALL_EXPR
- op0 -> ID | '&' ID
+ op0 -> ID | '&' ID | OBJ_TYPE_REF
op1 -> arglist
addr-expr-arg : compref | ID
| unop val
| val binop val
| '(' cast ')' val
+ | method_ref
(cast here stands for all valid C typecasts)
case STRING_CST:
case COMPLEX_CST:
case VECTOR_CST:
+ case OBJ_TYPE_REF:
return 1;
default:
|| TREE_CODE (t) == FIX_ROUND_EXPR);
}
+/* Return true if T is a valid op0 of a CALL_EXPR. */
+
+bool
+is_gimple_call_addr (tree t)
+{
+ return (TREE_CODE (t) == OBJ_TYPE_REF
+ || is_gimple_val (t));
+}
/* If T makes a function call, return the corresponding CALL_EXPR operand.
Otherwise, return NULL_TREE. */
/* Returns true iff T is a variable that does not need to live in memory. */
extern bool is_gimple_non_addressable (tree t);
+/* Returns true iff T is a valid call address expression. */
+bool is_gimple_call_addr (tree);
/* If T makes a function call, returns the CALL_EXPR operand. */
extern tree get_call_expr_in (tree t);
else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
inlinable = false;
-#ifdef INLINER_FOR_JAVA
- /* Synchronized methods can't be inlined. This is a bug. */
- else if (METHOD_SYNCHRONIZED (fn))
- inlinable = false;
-#endif /* INLINER_FOR_JAVA */
-
else if (inline_forbidden_p (fn))
{
/* See if we should warn about uninlinable functions. Previously,
case BUFFER_REF:
case ARRAY_REF:
case ARRAY_RANGE_REF:
- case VTABLE_REF:
+ case OBJ_TYPE_REF:
case EXC_PTR_EXPR: /* ??? */
case FILTER_EXPR: /* ??? */
case COMPOUND_EXPR:
pp_character (buffer, ':');
break;
- case VTABLE_REF:
- pp_string (buffer, "VTABLE_REF <(");
- dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, false);
- pp_string (buffer, "),");
- dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, false);
- pp_character (buffer, ',');
- dump_generic_node (buffer, TREE_OPERAND (node, 2), spc, flags, false);
+ case OBJ_TYPE_REF:
+ pp_string (buffer, "OBJ_TYPE_REF(");
+ dump_generic_node (buffer, OBJ_TYPE_REF_EXPR (node), spc, flags, false);
+ pp_character (buffer, ';');
+ dump_generic_node (buffer, OBJ_TYPE_REF_OBJECT (node), spc, flags, false);
+ pp_character (buffer, '-');
pp_character (buffer, '>');
+ dump_generic_node (buffer, OBJ_TYPE_REF_TOKEN (node), spc, flags, false);
+ pp_character (buffer, ')');
break;
case PHI_NODE:
break;
case SSA_NAME:
+ case OBJ_TYPE_REF:
dump_generic_node (buffer, op0, 0, 0, false);
break;
return changed;
result = NULL_TREE;
- /* Check for builtins that CCP can handle using information not
- available in the generic fold routines. */
if (TREE_CODE (rhs) == CALL_EXPR)
{
- tree callee = get_callee_fndecl (rhs);
+ tree callee;
+
+ /* Check for builtins that CCP can handle using information not
+ available in the generic fold routines. */
+ callee = get_callee_fndecl (rhs);
if (callee && DECL_BUILT_IN (callee))
result = ccp_fold_builtin (stmt, rhs);
+ else
+ {
+ /* Check for resolvable OBJ_TYPE_REF. The only sorts we can resolve
+ here are when we've propagated the address of a decl into the
+ object slot. */
+ /* ??? Should perhaps do this in fold proper. However, doing it
+ there requires that we create a new CALL_EXPR, and that requires
+ copying EH region info to the new node. Easier to just do it
+ here where we can just smash the call operand. */
+ callee = TREE_OPERAND (rhs, 0);
+ if (TREE_CODE (callee) == OBJ_TYPE_REF
+ && lang_hooks.fold_obj_type_ref
+ && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR
+ && DECL_P (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (callee), 0)))
+ {
+ tree t;
+
+ t = TREE_TYPE (TREE_OPERAND (OBJ_TYPE_REF_OBJECT (callee), 0));
+ t = lang_hooks.fold_obj_type_ref (callee, t);
+ if (t)
+ {
+ TREE_OPERAND (rhs, 0) = t;
+ changed = true;
+ }
+ }
+ }
}
/* If we couldn't fold the RHS, hand over to the generic fold routines. */
|| code == TRUTH_AND_EXPR
|| code == TRUTH_OR_EXPR
|| code == TRUTH_XOR_EXPR
- || code == COMPOUND_EXPR)
+ || code == COMPOUND_EXPR
+ || code == OBJ_TYPE_REF)
{
tree op0 = TREE_OPERAND (expr, 0);
tree op1 = TREE_OPERAND (expr, 1);
of the range is taken from the type of the expression. */
DEFTREECODE (ARRAY_RANGE_REF, "array_range_ref", 'r', 4)
-/* Vtable indexing. Carries data useful for emitting information
- for vtable garbage collection.
- Operand 0: an array_ref (or equivalent expression)
- Operand 1: the vtable base (must be a var_decl)
- Operand 2: index into vtable (must be an integer_cst). */
-DEFTREECODE (VTABLE_REF, "vtable_ref", 'r', 3)
+/* Used to represent lookup of runtime type dependent data. Often this is
+ a reference to a vtable, but it needn't be. Operands are:
+ OBJ_TYPE_REF_EXPR: An expression that evaluates the value to use.
+ OBJ_TYPE_REF_OBJECT: Is the object on whose behalf the lookup is
+ being performed. Through this the optimizers may be able to statically
+ determine the dynamic type of the object.
+ OBJ_TYPE_REF_TOKEN: Something front-end specific used to resolve the
+ reference to something simpler, usually to the address of a DECL.
+ Never touched by the middle-end. Good choices would be either an
+ identifier or a vtable index. */
+DEFTREECODE (OBJ_TYPE_REF, "obj_type_ref", 'e', 3)
/* The exception object from the runtime. */
DEFTREECODE (EXC_PTR_EXPR, "exc_ptr_expr", 'e', 0)
This differs from TRY_FINALLY_EXPR in that operand 2 is not evaluated
on a normal or jump exit, only on an exception. */
-DEFTREECODE (TRY_CATCH_EXPR, "try_catch_expr", 'e', 2)
+DEFTREECODE (TRY_CATCH_EXPR, "try_catch_expr", 's', 2)
/* Evaluate the first operand.
The second operand is a cleanup expression which is evaluated
on any exit (normal, exception, or jump out) from this expression. */
-DEFTREECODE (TRY_FINALLY_EXPR, "try_finally", 'e', 2)
+DEFTREECODE (TRY_FINALLY_EXPR, "try_finally", 's', 2)
\f
/* These types of expressions have no useful value,
and always have side effects. */
#define EH_FILTER_FAILURE(NODE) TREE_OPERAND (EH_FILTER_EXPR_CHECK (NODE), 1)
#define EH_FILTER_MUST_NOT_THROW(NODE) TREE_STATIC (EH_FILTER_EXPR_CHECK (NODE))
+/* OBJ_TYPE_REF accessors. */
+#define OBJ_TYPE_REF_EXPR(NODE) TREE_OPERAND (OBJ_TYPE_REF_CHECK (NODE), 0)
+#define OBJ_TYPE_REF_OBJECT(NODE) TREE_OPERAND (OBJ_TYPE_REF_CHECK (NODE), 1)
+#define OBJ_TYPE_REF_TOKEN(NODE) TREE_OPERAND (OBJ_TYPE_REF_CHECK (NODE), 2)
+
struct tree_exp GTY(())
{
struct tree_common common;