+2004-05-30 Kazu Hirata <kazu@cs.umass.edu>
+
+ * bb-reorder.c, builtins.c, c-common.c, c-gimplify.c,
+ c-incpath.c, cgraphunit.c, ddg.c, defaults.h, dwarf2out.c,
+ expmed.c, flags.h, gcc.c, gensupport.c, gimplify.c, global.c,
+ passes.c, reg-stack.c, target.h, toplev.c, tree-alias-ander.c,
+ tree-alias-common.c, tree-cfg.c, tree-complex.c, tree-dfa.c,
+ tree-eh.c, tree-mudflap.c, tree-mudflap.h, tree-outof-ssa.c,
+ tree-phinodes.c, tree-pretty-print.c, tree-ssa-alias.c,
+ tree-ssa-ccp.c, tree-ssa-live.c, tree-ssa-live.h,
+ tree-ssa-pre.c, tree.h, value-prof.h, varasm.c: Fix comment
+ formatting.
+
2004-05-30 Steven Bosscher <stevenb@suse.de>
* gimplify.c (sort_case_labels): New. Split out from...
/* If any destination of a crossing edge does not have a label, add label;
Convert any fall-through crossing edges (for blocks that do not contain
- a jump) to unconditional jumps. */
+ a jump) to unconditional jumps. */
static void
add_labels_and_missing_jumps (edge *crossing_edges, int n_crossing_edges)
&& cur_bb->rbi->next == cond_jump->dest)
{
/* Find label in fall_thru block. We've already added
- any missing labels, so there must be one. */
+ any missing labels, so there must be one. */
fall_thru_label = block_label (fall_thru->dest);
/* Check each predecessor to see if it has a label, and contains
only one executable instruction, which is an unconditional jump.
- If so, we can use it. */
+ If so, we can use it. */
if (GET_CODE (BB_HEAD (src)) == CODE_LABEL)
for (insn = BB_HEAD (src);
errno_set = false;
/* Check if sincos insn is available, otherwise fallback
- to sin or cos insn. */
+ to sin or cos insn. */
if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
switch (DECL_FUNCTION_CODE (fndecl))
{
{
tree string_arg;
- /* If optimizing for size keep fputs. */
+ /* If optimizing for size keep fputs. */
if (optimize_size)
return 0;
string_arg = TREE_VALUE (arglist);
/* Hash function for the problem of multiple type definitions in
different files. This must hash all types that will compare
equal via comptypes to the same value. In practice it hashes
- on some of the simple stuff and leaves the details to comptypes. */
+ on some of the simple stuff and leaves the details to comptypes. */
static hashval_t
c_type_hash (const void *p)
tree t2;
switch (TREE_CODE (t))
{
- /* For pointers, hash on pointee type plus some swizzling. */
+ /* For pointers, hash on pointee type plus some swizzling. */
case POINTER_TYPE:
return c_type_hash (TREE_TYPE (t)) ^ 0x3003003;
/* Hash on number of elements and total size. */
if (c_language != clk_c || flag_isoc99)
return -1;
- /* Save time if there's only one input file. */
+ /* Save time if there's only one input file. */
if (!current_file_decl || TREE_CHAIN (current_file_decl) == NULL_TREE)
return -1;
{
/* Reorganize the statements so that we do the right thing with a
CLEANUP_STMT. We want the FOR_STMT and nothing else to be in the
- scope of the cleanup, so play with pointers to accomplish that. */
+ scope of the cleanup, so play with pointers to accomplish that. */
FOR_INIT_STMT (stmt) = NULL_TREE;
chainon (init, stmt);
*stmt_p = init;
cpp_errno (pfile, CPP_DL_ERROR, cur->name);
else
{
- /* If -Wmissing-include-dirs is given, warn. */
+ /* If -Wmissing-include-dirs is given, warn. */
cpp_options *opts = cpp_get_options (pfile);
if (opts->warn_missing_include_dirs && cur->user_supplied_p)
cpp_errno (pfile, CPP_DL_WARNING, cur->name);
what = e->callee;
- /* Now update size of caller and all functions caller is inlined into. */
+ /* Now update size of caller and all functions caller is inlined into. */
for (;e && !e->inline_failed; e = e->caller->callers)
{
old_insns = e->caller->global.insns;
return;
/* We must not add ANTI dep when there is an intra-loop TRUE dep in
the opozite direction. If the first_def reaches the USE then there is
- such a dep. */
+ such a dep. */
if (! bitmap_bit_p (bb_info->rd_gen, first_def->id))
create_ddg_dep_no_link (g, use_node, def_node, ANTI_DEP, REG_DEP, 1);
}
}
/* Perform intra-block Data Dependency analysis and connect the nodes in
- the DDG. We assume the loop has a single basic block. */
+ the DDG. We assume the loop has a single basic block. */
static void
build_intra_loop_deps (ddg_ptr g)
{
/* Determines whether explicit template instantiations should
be given link-once semantics. The C++ ABI requires this
- macro to be nonzero; see the documentation. */
+ macro to be nonzero; see the documentation. */
#ifndef TARGET_EXPLICIT_INSTANTIATIONS_ONE_ONLY
# define TARGET_EXPLICIT_INSTANTIATIONS_ONE_ONLY 1
#endif
/* This determines whether this target supports hidden visibility.
This is a weaker condition than HAVE_GAS_HIDDEN, which probes for
- specific assembler syntax. */
+ specific assembler syntax. */
#ifndef TARGET_SUPPORTS_HIDDEN
# ifdef HAVE_GAS_HIDDEN
# define TARGET_SUPPORTS_HIDDEN 1
#endif
/* Determines whether we may use common symbols to represent one-only
- semantics (a.k.a. "vague linkage"). */
+ semantics (a.k.a. "vague linkage"). */
#ifndef USE_COMMON_FOR_ONE_ONLY
# define USE_COMMON_FOR_ONE_ONLY 1
#endif
having an FDE kept around when the function it refers to is
discarded. (Example where this matters: a primary function
template in C++ requires EH information, but an explicit
- specialization doesn't. */
+ specialization doesn't. */
if (TARGET_USES_WEAK_UNWIND_INFO
&& ! flag_asynchronous_unwind_tables
&& for_eh)
tem = expand_mult_const (wider_mode, tem, cnst1, 0, &alg, variant);
tem = extract_high_half (mode, tem);
- /* Adjust result for signedness. */
+ /* Adjust result for signedness. */
if (sign_adjust)
tem = force_operand (gen_rtx_MINUS (mode, tem, op0), tem);
extern int flag_shared_data;
-/* Controls the activation of SMS modulo scheduling. */
+/* Controls the activation of SMS modulo scheduling. */
extern int flag_modulo_sched;
/* flag_schedule_insns means schedule insns within basic blocks (before
else
{
/* Since there is no compiler for this input file, assume it is a
- linker file. */
+ linker file. */
explicit_link_files[i] = 1;
infiles[i].incompiler = NULL;
}
continue;
/* If the original insn came from a define_insn_and_split,
- generate a new split to handle the predicated insn. */
+ generate a new split to handle the predicated insn. */
split = copy_rtx (insn_elem->split->data);
/* Predicate the pattern matched by the split. */
pattern = rtx_alloc (COND_EXEC);
else_ = build_empty_stmt ();
}
- /* If we aren't hijacking a label for the 'then' branch, it falls through. */
+ /* If we aren't hijacking a label for the 'then' branch, it falls through. */
if (true_label)
true_label_p = &true_label;
else
conflicts and as a consequence worse register allocation. The
typical example where the information can be different is a
register initialized in the loop at the basic block preceding the
- loop in CFG. */
+ loop in CFG. */
/* The following structure contains basic block data flow information
used to calculate partial availability of registers. */
blocks. After the function call a register lives at a program
point only if it is initialized on a path from CFG entry to the
program point. The standard GCC life analysis permits registers to
- live uninitialized. */
+ live uninitialized. */
static void
make_accurate_live_analysis (void)
/* The optimization to partition hot/cold basic blocks into separate
sections of the .o file does not work well with exception handling.
- Don't call it if there are exceptions. */
+ Don't call it if there are exceptions. */
if (flag_reorder_blocks_and_partition && !flag_exceptions)
{
swap_to_top (insn, regstack, *src1, *src2);
/* Push the result back onto stack. Empty stack slot
- will be filled in second part of insn. */
+ will be filled in second part of insn. */
if (STACK_REG_P (*dest)) {
regstack->reg[regstack->top] = REGNO (*dest);
SET_HARD_REG_BIT (regstack->reg_set, REGNO (*dest));
abort();
/* Push the result back onto stack. Empty stack slot
- will be filled in second part of insn. */
+ will be filled in second part of insn. */
if (STACK_REG_P (*dest)) {
regstack->reg[regstack->top + 1] = REGNO (*dest);
SET_HARD_REG_BIT (regstack->reg_set, REGNO (*dest));
target requires such labels. Second argument is the decl the
unwind info is associated with, third is a boolean: true if
this is for exception handling, fourth is a boolean: true if
- this is only a placeholder for an omitted FDE. */
+ this is only a placeholder for an omitted FDE. */
void (* unwind_label) (FILE *, tree, int, int);
/* Output an internal label. */
int flag_reorder_blocks = 0;
/* Nonzero if blocks should be partitioned into hot and cold sections in
- addition to being reordered. */
+ addition to being reordered. */
int flag_reorder_blocks_and_partition = 0;
int flag_sched_stalled_insns = 0;
int flag_sched_stalled_insns_dep = 1;
-/* The following flag controls the module scheduling activation. */
+/* The following flag controls the module scheduling activation. */
int flag_modulo_sched = 0;
int flag_single_precision_constant;
/* Enable the SSA-PRE tree optimization. */
int flag_tree_pre = 0;
-/* Enable points-to analysis on trees. */
+/* Enable points-to analysis on trees. */
enum pta_type flag_tree_points_to = PTA_NONE;
/* Enable SSA-CCP on trees. */
andersen_empty_points_to_set,
0, /* data */
0, /* Currently non-interprocedural */
- 1 /* Can do IP on all statics without help. */
+ 1 /* Can do IP on all statics without help. */
};
struct tree_alias_ops *andersen_alias_ops = &andersen_ops;
deleteregion (scratch_rgn);
}
-/* Initialize Andersen alias analysis. */
+/* Initialize Andersen alias analysis. */
static int initted = 0;
static void
andersen_init (struct tree_alias_ops *ops ATTRIBUTE_UNUSED)
{
#if 0
- /* Don't claim we can do ip partial unless we have unit_at_a_time on. */
+ /* Don't claim we can do ip partial unless we have unit_at_a_time on. */
if (!flag_unit_at_a_time)
#endif
andersen_ops.ip_partial = 0;
/* Pointer destroying operations do not give us the same valid pointer
- back, and thus, are assignment to pta_bottom. */
+ back, and thus, are assignment to pta_bottom. */
if (pointer_destroying_op (operation))
{
pta_assignment (ALIAS_VAR_ATERM (lhs), pta_rvalue (pta_bottom ()));
size_t l = VARRAY_ACTIVE_SIZE (params);
size_t i;
- /* Set up the arguments for the new function type. */
+ /* Set up the arguments for the new function type. */
for (i = 0; i < l; i++)
{
alias_var tv = VARRAY_GENERIC_PTR (params, i);
/* We can handle functions we've got trees for. non-statics will
just have incoming parameters assigned to global_var if
- necessary. */
+ necessary. */
if (TREE_CODE (decl) == FUNCTION_DECL
&& DECL_PTA_ALIASVAR (decl)
&& ops->ip_partial
}
-/* Get the points-to set for TV, caching if it we had to compute it. */
+/* Get the points-to set for TV, caching if it we had to compute it. */
static aterm_list
get_ptset (alias_var tv)
}
-/* Determine if two aterm's have the same points-to set. */
+/* Determine if two aterm's have the same points-to set. */
static bool
andersen_same_points_to_set (struct tree_alias_ops *ops ATTRIBUTE_UNUSED,
#include "tree-alias-type.h"
#include "bitmap.h"
#include "tree-alias-common.h"
-/* If we have andersen's points-to analysis, include it. */
+/* If we have andersen's points-to analysis, include it. */
#ifdef HAVE_BANSHEE
#include "tree-alias-ander.h"
#endif
{
newvar = create_alias_var (decl);
/* Assign globals to global var for purposes of intraprocedural
- analysis. */
+ analysis. */
if ((DECL_CONTEXT (decl) == NULL
|| TREE_PUBLIC (decl)
|| TREE_STATIC (decl)
get_alias_var (pta_global_var),
newvar);
/* If the global has some DECL_INITIAL, we need to process
- it here. */
+ it here. */
if (DECL_INITIAL (decl))
find_func_aliases (decl);
}
/* If it's a decl, get the alias var of the decl. We farm this off
to get_alias_var_decl so it can abort if the alias var doesn't
exist, and in case something else *knows* it has a decl, and
- wants the alias var. */
+ wants the alias var. */
if (DECL_P (expr))
return get_alias_var_decl (expr);
return get_alias_var (TREE_OPERAND (expr, 1));
}
#else
- /* Find the first non-component ref, and return its alias variable. */
+ /* Find the first non-component ref, and return its alias variable. */
tree p;
for (p = expr; TREE_CODE (p) == COMPONENT_REF;
p = TREE_OPERAND (p, 0));
case INDIRECT_REF:
case BIT_FIELD_REF:
/* If it's a ref or cast or conversion of something, get the
- alias var of the something. */
+ alias var of the something. */
return get_alias_var (TREE_OPERAND (expr, 0));
break;
size_t i;
alias_var av = get_alias_var (pta_global_var);
- /* We assume assignments among the actual parameters. */
+ /* We assume assignments among the actual parameters. */
for (i = 0; i < l; i++)
{
alias_var argi = VARRAY_GENERIC_PTR (args, i);
continue;
argj = VARRAY_GENERIC_PTR (args, j);
/* Restricted pointers can't be aliased with other
- restricted pointers. */
+ restricted pointers. */
if (!TYPE_RESTRICT (TREE_TYPE (ALIAS_VAR_DECL (argi)))
|| !TYPE_RESTRICT (TREE_TYPE (ALIAS_VAR_DECL (argj))))
- /* Do a bit of TBAA to avoid pointless assignments. */
+ /* Do a bit of TBAA to avoid pointless assignments. */
if (alias_sets_conflict_p (get_alias_set (ALIAS_VAR_DECL (argi)),
get_alias_set (ALIAS_VAR_DECL (argj))))
current_alias_ops->simple_assign (current_alias_ops, argi, argj);
}
}
- /* We assume that an actual parameter can point to any global. */
+ /* We assume that an actual parameter can point to any global. */
for (i = 0; i < l; i++)
{
alias_var argav = VARRAY_GENERIC_PTR (args, i);
/* Restricted pointers can't be aliased with other
- restricted pointers. */
+ restricted pointers. */
if (!TYPE_RESTRICT (TREE_TYPE (ALIAS_VAR_DECL (argav)))
|| !TYPE_RESTRICT (TREE_TYPE (ALIAS_VAR_DECL (av))))
{
/* Arguments can alias globals, and whatever they point to
- can point to a global as well. */
+ can point to a global as well. */
current_alias_ops->simple_assign (current_alias_ops, argav, av);
}
}
op1 = TREE_OPERAND (op1, 0);
}
/* Take care of fact that we may have multi-level component
- refs. */
+ refs. */
if (TREE_CODE (op1) == COMPONENT_REF)
op1 = find_op_of_decl (op1);
#endif
/* You would think we could test rhsAV at the top, rather than
50 separate times, but we can't, because it can be NULL for
operator assignments, where we'd still collect the individual
- alias vars for the operator. */
+ alias vars for the operator. */
/* Note that structures are treated as a single alias
variable, since we can disambiguate based on TBAA first,
- and fall back on points-to. */
+ and fall back on points-to. */
/* x = <something> */
if (is_gimple_variable (op0))
{
else if (TREE_CODE (op1) == CALL_EXPR)
{
/* Heap assignment. These are __attribute__ malloc or
- something, i'll deal with it later. */
+ something, i'll deal with it later. */
if (0)
{}
else
{
- /* NORETURN functions have no effect on aliasing. */
+ /* NORETURN functions have no effect on aliasing. */
if (call_may_return (op1))
{
varray_type args;
|| TREE_CODE (op0) == ARRAY_REF)
&& TREE_CODE (op1) == ADDR_EXPR)
{
- /* This becomes temp = &y and *x = temp . */
+ /* This becomes temp = &y and *x = temp . */
alias_var tempvar;
tree temp = create_tmp_var_raw (void_type_node, "aliastmp");
tempvar = current_alias_ops->add_var (current_alias_ops, temp);
&& (TREE_CODE (op1) == INDIRECT_REF
|| TREE_CODE (op1) == ARRAY_REF))
{
- /* This becomes temp = *y and *x = temp . */
+ /* This becomes temp = *y and *x = temp . */
alias_var tempvar;
tree temp;
temp = create_tmp_var_raw (void_type_node, "aliastmp");
{
if (rhsAV != NULL)
{
- /* This becomes temp = (cast) y and *x = temp. */
+ /* This becomes temp = (cast) y and *x = temp. */
alias_var tempvar;
tree temp;
temp = create_tmp_var_raw (void_type_node, "aliastmp");
}
}
}
- /* Calls without return values. */
+ /* Calls without return values. */
else if (TREE_CODE (stp) == CALL_EXPR)
{
alias_var callvar;
/* Incoming pointers can point to pta_global_var, unless
either we are interprocedural, or we can do ip on all
statics + this function has been defined + it's not an
- external function. */
+ external function. */
if (POINTER_TYPE_P (TREE_TYPE (arg))
&& !current_alias_ops->ip
- /* FIXME: Need to let analyzer decide in partial case. */
+ /* FIXME: Need to let analyzer decide in partial case. */
&& (!current_alias_ops->ip_partial
|| !cgraph_local_info (decl)->local))
current_alias_ops->simple_assign (current_alias_ops, var,
/* Incoming pointers can point to pta_global_var, unless
either we are interprocedural, or we can do ip on all
statics + this function has been defined + it's not an
- external function. */
+ external function. */
if (POINTER_TYPE_P (TREE_TYPE (fakedecl))
&& !current_alias_ops->ip
- /* FIXME: need to let analyzer decide in partial case. */
+ /* FIXME: need to let analyzer decide in partial case. */
&& (!current_alias_ops->ip_partial
|| !TREE_STATIC (decl)
|| TREE_PUBLIC (decl)))
DECL_PTA_ALIASVAR (decl) = avar;
/* FIXME: Also, if this is a defining declaration then add the annotation
- to all extern definitions of the function. */
+ to all extern definitions of the function. */
return avar;
}
/* 2 = UF
4 = the masked pointer
2 = the <> around it
- 1 = the terminator. */
+ 1 = the terminator. */
namep = ggc_alloc (2 + 4 + 2 + 1);
sprintf (namep, "<UV%x>", MASK_POINTER (t));
return namep;
return;
}
- /* Nothing more to do for nonlocal gotos. */
+ /* Nothing more to do for nonlocal gotos. */
if (TREE_CODE (dest) == LABEL_DECL)
return;
t1 = do_binop (bsi, MULT_EXPR, inner_type, min, ratio);
div = do_binop (bsi, PLUS_EXPR, inner_type, t1, max);
- /* Result is now ((ar + ai*ratio)/div) + i((ai - ar*ratio)/div). */
+ /* Result is now ((ar + ai*ratio)/div) + i((ai - ar*ratio)/div). */
t1 = do_binop (bsi, MULT_EXPR, inner_type, ai, ratio);
t2 = do_binop (bsi, PLUS_EXPR, inner_type, ar, t1);
rr = do_binop (bsi, code, inner_type, t2, div);
/* Mark variables in BLOCK that have hidden uses. A hidden use can
- occur due to VLA declarations or nested functions. */
+ occur due to VLA declarations or nested functions. */
static void
find_hidden_use_vars (tree block)
VLA parameter and that declaration is not seen by the SSA code.
Note get_pending_sizes clears the PENDING_SIZES chain, so we
- must restore it. */
+ must restore it. */
tem = get_pending_sizes ();
put_pending_sizes (tem);
for (; tem; tem = TREE_CHAIN (tem))
/* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
whatever is needed to finish the return. If MOD is non-null, insert it
before the new branch. RETURN_VALUE_P is a cache containing a temporary
- variable to be used in manipulating the value returned from the function. */
+ variable to be used in manipulating the value returned from the function. */
static void
do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
/* extern const uintptr_t __mf_lc_mask; */
static GTY (()) tree mf_cache_mask_decl;
-/* Their function-scope local shadows, used in single-threaded mode only. */
+/* Their function-scope local shadows, used in single-threaded mode only. */
/* auto const unsigned char __mf_lc_shift_l; */
static GTY (()) tree mf_cache_shift_decl_l;
/* Create and initialize local shadow variables for the lookup cache
globals. Put their decls in the *_l globals for use by
- mf_build_check_statement_for. */
+ mf_build_check_statement_for. */
static void
mf_decl_cache_locals (tree* body)
static void
mf_decl_clear_locals (void)
{
- /* Unset local shadows. */
+ /* Unset local shadows. */
mf_cache_shift_decl_l = NULL_TREE;
mf_cache_mask_decl_l = NULL_TREE;
}
}
pp_clear_output_area (buf);
- /* Add FILENAME[:LINENUMBER]. */
+ /* Add FILENAME[:LINENUMBER]. */
{
const char *sourcefile;
unsigned sourceline;
/* This struct is passed between mf_xform_decls to store state needed
during the traversal searching for objects that have their
- addresses taken. */
+ addresses taken. */
struct mf_xform_decls_data
{
tree param_decls;
method by which VLA declarations turn into calls to
BUILT_IN_STACK_ALLOC. We assume that multiple
VLAs declared later in the same block get allocation
- code later than the others. */
+ code later than the others. */
tree stack_alloc_call = NULL_TREE;
while(! tsi_end_p (initially_stmts))
/* Got it! */
size = stack_alloc_op2;
stack_alloc_call = call;
- /* Advance iterator to point past this allocation call. */
+ /* Advance iterator to point past this allocation call. */
tsi_next (&initially_stmts);
break;
}
internal_error ("mudflap ran off end of BIND_EXPR body");
tsi_link_before (&initially_stmts, register_fncall, TSI_SAME_STMT);
- /* Accumulate the FINALLY piece. */
+ /* Accumulate the FINALLY piece. */
append_to_statement_list (unregister_fncall, &finally_stmts);
mf_mark (decl);
#ifndef TREE_MUDFLAP_H
#define TREE_MUDFLAP_H
-/* Instrumentation. */
+/* Instrumentation. */
extern void mudflap_c_function_decls (tree);
extern void mudflap_c_function_ops (tree);
extern void mudflap_enqueue_decl (tree);
extern void mudflap_enqueue_constant (tree);
extern void mudflap_finish_file (void);
-/* Tree node marking. */
+/* Tree node marking. */
extern int mf_marked_p (tree);
extern tree mf_mark (tree);
-/* To be provided by a front-end interface module. */
+/* To be provided by a front-end interface module. */
extern tree mflang_lookup_decl (const char *);
extern void mflang_flush_calls (tree);
/* List of nodes in the elimination graph. */
varray_type nodes;
- /* The predecessor and successor edge list. */
+ /* The predecessor and successor edge list. */
varray_type edge_list;
/* Visited vector. */
root_var_decompact (rv);
/* First, coalesce all live on entry variables to their root variable.
- This will ensure the first use is coming from the correct location. */
+ This will ensure the first use is coming from the correct location. */
live = sbitmap_alloc (num_var_partitions (map));
sbitmap_zero (live);
phi = make_phi_node (var, bb_ann (bb)->num_preds);
/* This is a new phi node, so note that is has not yet been
- rewritten. */
+ rewritten. */
PHI_REWRITTEN (phi) = 0;
/* Add the new PHI node to the list of PHI nodes for block BB. */
if (TYPE_METHODS (TREE_TYPE (node)))
{
/* The type is a c++ class: all structures have at least
- 4 methods. */
+ 4 methods. */
pp_string (buffer, "class ");
dump_generic_node (buffer, TREE_TYPE (node), spc, flags, false);
}
/* Note that VAR and TAG may be the same if the function has no
addressable variables (see the discussion at the end of
- setup_pointers_and_addressables). */
+ setup_pointers_and_addressables). */
if (var != tag)
add_may_alias (var, tag);
else
for (i = 0; i < PHI_NUM_ARGS (phi); i++)
{
- /* Compute the meet operator over all the PHI arguments. */
+ /* Compute the meet operator over all the PHI arguments. */
edge e = PHI_ARG_EDGE (phi, i);
if (dump_file && (dump_flags & TDF_DETAILS))
}
/* We may be able to fold away calls to builtin functions if their
- arguments are constants. */
+ arguments are constants. */
else if (code == CALL_EXPR
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == ADDR_EXPR
&& (TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0))
/* A subroutine of fold_stmt_r. Attempts to fold *(A+O) to A[X].
BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE
- is the desired result type. */
+ is the desired result type. */
static tree
maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
break;
}
}
- /* Fall through is an error; it will be detected in tree-sra. */
+ /* Fall through is an error; it will be detected in tree-sra. */
}
break;
/* This function will combine the partitions in MAP for VAR1 and VAR2. It
Returns the partition which represents the new partition. If the two
- partitions cannot be combined, NO_PARTITION is returned. */
+ partitions cannot be combined, NO_PARTITION is returned. */
int
var_union (var_map map, tree var1, tree var2)
This is handled specially here since we may also be interested
in copies between real variables and SSA_NAME variables. We may
be interested in trying to coalesce SSA_NAME variables with
- root variables in some cases. */
+ root variables in some cases. */
if (TREE_CODE (stmt) == MODIFY_EXPR)
{
Note we delete PHI nodes in this loop if they are
associated with virtual vars which are going to be
- renamed. */
+ renamed. */
for (phi = phi_nodes (bb); phi; phi = next)
{
tree result = SSA_NAME_VAR (PHI_RESULT (phi));
/* Merge the live on entry information in LIVE for partitions P1 and P2. Place
- the result into P1. Clear P2. */
+ the result into P1. Clear P2. */
static inline void
live_merge_and_clear (tree_live_info_p live, int p1, int p2)
/* Iterated dominance frontiers cache. */
static bitmap *idfs_cache;
-/* Partial redundancies statistics. */
+/* Partial redundancies statistics. */
static struct pre_stats_d
{
int reloads;
{
/* The actual expression. */
tree expr;
- /* The occurrences. */
+ /* The occurrences. */
varray_type occurs;
- /* The kills. */
+ /* The kills. */
varray_type kills;
- /* The left occurrences. */
+ /* The left occurrences. */
varray_type lefts;
- /* An array of real occurrences. */
+ /* An array of real occurrences. */
varray_type reals;
- /* True if it's a strength reduction candidate. */
+ /* True if it's a strength reduction candidate. */
bool strred_cand;
- /* True if it's a load PRE candidate. */
+ /* True if it's a load PRE candidate. */
bool loadpre_cand;
- /* The euses/ephis in preorder dt order. */
+ /* The euses/ephis in preorder dt order. */
varray_type euses_dt_order;
- /* The name of the temporary for this expression. */
+ /* The name of the temporary for this expression. */
tree temp;
};
static bool
is_injuring_def (struct expr_info *ei, tree inj)
{
- /* Things that are never injuring definitions. */
+ /* Things that are never injuring definitions. */
if (!inj || IS_EMPTY_STMT (inj) || TREE_CODE (inj) == PHI_NODE)
return false;
- /* Things we can't handle. */
+ /* Things we can't handle. */
if (TREE_CODE (TREE_OPERAND (inj, 1)) != PLUS_EXPR
&& TREE_CODE (TREE_OPERAND (inj, 1)) != MINUS_EXPR)
return false;
for an expression like "a * 5".
This limitation only exists because we don't know how to repair
- other forms of increments/decrements. */
+ other forms of increments/decrements. */
if (!names_match_p (TREE_OPERAND (inj, 0), TREE_OPERAND (ei->expr, 0))
|| !TREE_OPERAND (TREE_OPERAND (inj, 1), 0)
|| !names_match_p (TREE_OPERAND (TREE_OPERAND (inj, 1), 0),
/* If we are strength reducing a multiply, we have the additional
constraints that
1. {expr} is 1
- 2. {expr} and the RHS of the expression are constants. */
+ 2. {expr} and the RHS of the expression are constants. */
if (TREE_CODE (ei->expr) == MULT_EXPR)
{
tree irhs1;
}
/* Find the statement defining VAR, ignoring injuries we can repair.
- START is the first potential injuring def. */
+ START is the first potential injuring def. */
static tree
factor_through_injuries (struct expr_info *ei, tree start, tree var,
alteration reaches that merge point).
We do this recursively, because we have to figure out
- EPHI's for the variables in the PHI as well. */
+ EPHI's for the variables in the PHI as well. */
static void
set_var_phis (struct expr_info *ei, tree phi)
{
basic_block bb = bb_for_stmt (phi);
/* If we've already got an EPHI set to be placed in PHI's BB, we
- don't need to do this again. */
+ don't need to do this again. */
if (!bitmap_bit_p (varphis, bb->index)
&& !bitmap_bit_p (dfphis, bb->index))
{
{
phi_operand = PHI_ARG_DEF (phi, curr_phi_operand);
/* For strength reduction, factor through injuries we can
- repair. */
+ repair. */
if (ei->strred_cand && TREE_CODE (phi_operand) != PHI_NODE)
{
phi_operand = factor_through_injuries (ei, phi_operand,
/* If our phi operand is defined by a phi, we need to
record where the phi operands alter the expression as
- well, and place EPHI's at each point. */
+ well, and place EPHI's at each point. */
if (TREE_CODE (phi_operand) == PHI_NODE)
set_var_phis (ei, phi_operand);
}
}
}
/* Union the results of the dfphis and the varphis to get the
- answer to everywhere we need EPHIS. */
+ answer to everywhere we need EPHIS. */
bitmap_a_or_b (dfphis, dfphis, varphis);
- /* Now create the EPHI's in each of these blocks. */
+ /* Now create the EPHI's in each of these blocks. */
EXECUTE_IF_SET_IN_BITMAP(dfphis, 0, i,
{
tree ref = create_expr_ref (ei, ei->expr, EPHI_NODE, BASIC_BLOCK (i),
{
tree ephi = ephi_at_block (block);
/* The ordering for a given BB is EPHI's, real/left/kill
- occurrences, phi preds, exit occurrences. */
+ occurrences, phi preds, exit occurrences. */
if (ephi != NULL_TREE)
VARRAY_PUSH_TREE (ei->euses_dt_order, ephi);
}
else if (succ->dest == EXIT_BLOCK_PTR && !(succ->flags & EDGE_FAKE))
{
/* No point in inserting exit blocks into heap first, since
- they'll never be anything on the stack. */
+ they'll never be anything on the stack. */
tree newref;
newref = create_expr_ref (ei, ei->expr, EEXIT_NODE,
block,
}
/* Make a copy of Z as it would look in basic block PRED, using the PHIs
- in BB. */
+ in BB. */
static tree
subst_phis (struct expr_info *ei, tree Z, basic_block pred, basic_block bb)
tree stmt_copy;
size_t i;
- /* Return the cached version, if we have one. */
+ /* Return the cached version, if we have one. */
if (pred->index < n_phi_preds
&& phi_pred_cache[pred->index] != NULL_TREE)
return phi_pred_cache[pred->index];
basic_block use_bb ATTRIBUTE_UNUSED,
int opnd_num ATTRIBUTE_UNUSED)
{
- /* XXX: Implement. */
+ /* XXX: Implement. */
return false;
}
Renaming is done like Open64 does it. Basically as the paper says,
except that we try to use earlier defined occurrences if they are
- available in order to keep the number of saves down. */
+ available in order to keep the number of saves down. */
static void
rename_1 (struct expr_info *ei)
anything).
Otherwise, we have to assign a new version.
lvalue occurrences always need a new version,
- since they are definitions. */
+ since they are definitions. */
if (!EUSE_LVAL (occur)
&& same_e_version_real_occ_real_occ (ei, tos, occur))
{
must change in between the ephi result and the next
occurrence), and we need a new version for the real
occurrence.
- lvalues always need a new version. */
+ lvalues always need a new version. */
if (!EUSE_LVAL (occur)
&& same_e_version_phi_result (ei, tos, EREF_STMT (occur),
occur))
}
}
}
- /* EPHI occurrences always get new versions. */
+ /* EPHI occurrences always get new versions. */
else if (TREE_CODE (occur) == EPHI_NODE)
{
assign_new_class (occur, &stack, NULL);
/* Determine if the EPHI has an argument we could never insert
or extend the lifetime of, such as an argument occurring on
- an abnormal edge. */
+ an abnormal edge. */
static bool
ephi_has_unsafe_arg (tree ephi)
basic_block dom;
tree phi;
- /* Check phis first. */
+ /* Check phis first. */
for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
{
if (phi == currstmt)
}
/* We can't walk BB's backwards right now, so we have to walk *all*
- the statements, and choose the last name we find. */
+ the statements, and choose the last name we find. */
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
{
tree stmt = bsi_stmt (bsi);
tree newtemp;
basic_block bb = bb_for_stmt (x);
- /* Insert definition of expr at end of BB containing x. */
+ /* Insert definition of expr at end of BB containing x. */
copy = TREE_OPERAND (EREF_STMT (ephi), 1);
copy = unshare_expr (copy);
expr = build (MODIFY_EXPR, TREE_TYPE (ei->expr),
do_ephi_df_search (ei, replacing_search);
}
-/* Perform a DFS on EPHI using the functions in SEARCH. */
+/* Perform a DFS on EPHI using the functions in SEARCH. */
static void
do_ephi_df_search_1 (struct ephi_df_search search, tree ephi)
}
#if 0
-/* Calculate the increment necessary due to EXPR for the temporary. */
+/* Calculate the increment necessary due to EXPR for the temporary. */
static tree
calculate_increment (struct expr_info *ei, tree expr)
{
basic_block bb;
/* First, add the phi node temporaries so the reaching defs are
- always right. */
+ always right. */
for (euse_iter = 0;
euse_iter < VARRAY_ACTIVE_SIZE (ei->euses_dt_order);
euse_iter++)
}
}
}
- /* Now do the actual saves and reloads, plus repairs. */
+ /* Now do the actual saves and reloads, plus repairs. */
for (euse_iter = 0;
euse_iter < VARRAY_ACTIVE_SIZE (ei->euses_dt_order);
euse_iter++)
}
}
- /* Now do the phi nodes. */
+ /* Now do the phi nodes. */
for (euse_iter = 0;
euse_iter < VARRAY_ACTIVE_SIZE (ei->euses_dt_order);
euse_iter++)
}
-/* Return true if EXPR is a strength reduction candidate. */
+/* Return true if EXPR is a strength reduction candidate. */
static bool
is_strred_cand (const tree expr ATTRIBUTE_UNUSED)
{
-/* Determine if two expressions are lexically equivalent. */
+/* Determine if two expressions are lexically equivalent. */
static inline bool
expr_lexically_eq (const tree v1, const tree v2)
/* Compute immediate dominators. */
calculate_dominance_info (CDI_DOMINATORS);
- /* DCE screws the dom_children up, without bothering to fix it. So fix it. */
+ /* DCE screws the dom_children up, without bothering to fix it. So fix it. */
currbbs = n_basic_blocks;
dfn = xcalloc (last_basic_block + 1, sizeof (int));
build_dfn_array (ENTRY_BLOCK_PTR, 0);
extern const char tree_code_type[];
#define TREE_CODE_CLASS(CODE) tree_code_type[(int) (CODE)]
-/* Returns nonzero iff CLASS is not the tree code of a type. */
+/* Returns nonzero iff CLASS is not the tree code of a type. */
#define IS_NON_TYPE_CODE_CLASS(CLASS) (strchr ("xbcdr<12se", (CLASS)) != 0)
/* SSAPRE: The statement associated with this expression reference. */
tree stmt;
- /* SSAPRE: True if expression needs to be saved to a temporary. */
+ /* SSAPRE: True if expression needs to be saved to a temporary. */
unsigned int save:1;
/* SSAPRE: True if expression needs to be reloaded from a temporary. */
/* SSAPRE: Redundancy class of expression. */
unsigned int class;
- /* SSAPRE: Processed flag 1. */
+ /* SSAPRE: Processed flag 1. */
unsigned int processed:1;
/* SSAPRE: True if expression is injured. */
{
struct tree_eref_common common;
- /* SSAPRE: Definition for this use. */
+ /* SSAPRE: Definition for this use. */
tree def;
- /* SSAPRE: True if this is an EPHI operand occurrence. */
+ /* SSAPRE: True if this is an EPHI operand occurrence. */
unsigned int op_occurrence:1;
/* SSAPRE: True if expression was inserted as a PHI operand occurrence. */
/* SSAPRE: True if this phi argument is injured. */
unsigned int injured:1;
- /* SSAPRE: True if there is a real occurrence for this phi argument. */
+ /* SSAPRE: True if there is a real occurrence for this phi argument. */
unsigned int has_real_use:1;
/* SSAPRE: True if delayed renaming is required on this phi argument. */
/* SSAPRE: True if this operand stops forward movement. */
unsigned int stops:1;
- /* SSAPRE: Definition of this phi operand. */
+ /* SSAPRE: Definition of this phi operand. */
tree def;
/* SSAPRE: Phi predecessor for this phi operand. */
/* SSAPRE: True if PHI is cant_be_avail. */
unsigned int cant_be_avail:1;
- /* SSAPRE: True if PHI is dead. */
+ /* SSAPRE: True if PHI is dead. */
unsigned int dead:1;
- /* SSAPRE: True if PHI is pointless or identical to some value. */
+ /* SSAPRE: True if PHI is pointless or identical to some value. */
unsigned int identity:1;
/* SSAPRE: True if replacing occurrence known for ESSA minimization. */
#define DECL_POINTER_ALIAS_SET(NODE) \
(DECL_CHECK (NODE)->decl.pointer_alias_set)
-/* Used to store the alias_var for a DECL node. */
+/* Used to store the alias_var for a DECL node. */
#define DECL_PTA_ALIASVAR(NODE) \
(DECL_CHECK (NODE)->decl.alias_var)
within it. */
TDI_vcg, /* create a VCG graph file for each
function's flowgraph. */
- TDI_xml, /* dump function call graph. */
+ TDI_xml, /* dump function call graph. */
TDI_all, /* enable all the dumps. */
TDI_end
};
\f
/* External declarations for edge-based profiling. */
struct profile_hooks {
- /* Insert code to increment an edge count. */
+ /* Insert code to increment an edge count. */
void (*gen_edge_profiler) (int, edge);
/* Insert code to increment the interval histogram counter. */
/* Insert code to increment the power of two histogram counter. */
void (*gen_pow2_profiler) (struct histogram_value *, unsigned, unsigned);
- /* Insert code to find the most common value. */
+ /* Insert code to find the most common value. */
void (*gen_one_value_profiler) (struct histogram_value *, unsigned, unsigned);
/* Insert code to find the most common value of a difference between two
desc = ggc_alloc (sizeof (*desc));
desc->value = copy_constant (exp);
- /* Propagate marked-ness to copied constant. */
+ /* Propagate marked-ness to copied constant. */
if (flag_mudflap && mf_marked_p (exp))
mf_mark (desc->value);
/* Default function to output a label for unwind information. The
default is to do nothing. A target that needs nonlocal labels for
- unwind information must provide its own function to do this. */
+ unwind information must provide its own function to do this. */
void
default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED,
tree decl ATTRIBUTE_UNUSED,