+2004-09-05 Kazu Hirata <kazu@cs.umass.edu>
+
+ * c-common.c, c-decl.c, combine.c, defaults.h, fold-const.c,
+ gimplify.c, gthr-nks.h, hooks.c, lambda-code.c, lambda-mat.c,
+ stor-layout.c, target.h, tree-cfg.c, tree-chrec.c,
+ tree-if-conv.c, tree-inline.c, tree-into-ssa.c,
+ tree-mudflap.c, tree-optimize.c, tree-scalar-evolution.c,
+ tree-ssa-alias.c, tree-ssa-ccp.c, tree-ssa-dce.c,
+ tree-ssa-pre.c, tree-vectorizer.c, tree-vectorizer.h, tree.h,
+ vec.h: Fix comment formatting.
+
2004-09-05 Kazu Hirata <kazu@cs.umass.edu>
* c-common.c, cfgexpand.c, cgraphunit.c, defaults.h,
tree t2;
switch (TREE_CODE (t))
{
- /* For pointers, hash on pointee type plus some swizzling. */
+ /* For pointers, hash on pointee type plus some swizzling. */
case POINTER_TYPE:
return c_type_hash (TREE_TYPE (t)) ^ 0x3003003;
/* Hash on number of elements and total size. */
record_builtin_type (RID_VOID, NULL, void_type_node);
- /* This node must not be shared. */
+ /* This node must not be shared. */
void_zero_node = make_node (INTEGER_CST);
TREE_TYPE (void_zero_node) = void_type_node;
not undefined behavior, and is the most convenient way to get
some effects (see e.g. what unwind-dw2-fde-glibc.c does to
the definition of _Unwind_Find_FDE in unwind-dw2-fde.c), but
- we do diagnose it if -Wtraditional. */
+ we do diagnose it if -Wtraditional. */
if (TREE_PUBLIC (olddecl) && !TREE_PUBLIC (newdecl))
{
/* Two exceptions to the rule. If olddecl is an extern
definition. */
&& !(TREE_CODE (newdecl) == FUNCTION_DECL
&& DECL_INITIAL (newdecl) && !DECL_INITIAL (olddecl))
- /* Don't warn about redundant redeclarations of builtins. */
+ /* Don't warn about redundant redeclarations of builtins. */
&& !(TREE_CODE (newdecl) == FUNCTION_DECL
&& !DECL_BUILT_IN (newdecl)
&& DECL_BUILT_IN (olddecl)
if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
{
/* Don't substitute for a register intended as a clobberable
- operand. */
+ operand. */
rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
if (rtx_equal_p (reg, dest))
return 0;
archive's table of contents. Defining this macro to be nonzero has
the consequence that certain symbols will not be made weak that
otherwise would be. The C++ ABI requires this macro to be zero;
- see the documentation. */
+ see the documentation. */
#ifndef TARGET_WEAK_NOT_IN_ARCHIVE_TOC
#define TARGET_WEAK_NOT_IN_ARCHIVE_TOC 0
#endif
/* See if VALUE is already a multiple of DIVISOR. If so, we don't
have to do anything. Only do this when we are not given a const,
because in that case, this check is more expensive than just
- doing it. */
+ doing it. */
if (TREE_CODE (value) != INTEGER_CST)
{
div = build_int_cst (TREE_TYPE (value), divisor);
/* See if VALUE is already a multiple of DIVISOR. If so, we don't
have to do anything. Only do this when we are not given a const,
because in that case, this check is more expensive than just
- doing it. */
+ doing it. */
if (TREE_CODE (value) != INTEGER_CST)
{
div = build_int_cst (TREE_TYPE (value), divisor);
break;
case ARRAY_TYPE:
- /* These anonymous types don't have declarations, so handle them here. */
+ /* These anonymous types don't have declarations, so handle them here. */
gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
break;
#define __gthr_nks_h
/* NKS threads specific definitions.
- Easy, since the interface is mostly one-to-one mapping. */
+ Easy, since the interface is mostly one-to-one mapping. */
#define __GTHREADS 1
/* Backend initialization functions */
-/* Initialize the threads subsystem. */
+/* Initialize the threads subsystem. */
static inline int
__gthread_objc_init_thread_system(void)
{
return -1;
}
-/* Close the threads subsystem. */
+/* Close the threads subsystem. */
static inline int
__gthread_objc_close_thread_system(void)
{
/* Backend thread functions */
-/* Create a new thread of execution. */
+/* Create a new thread of execution. */
static inline objc_thread_t
__gthread_objc_thread_detach(void (*func)(void *), void *arg)
{
return thread_id;
}
-/* Set the current thread's priority. */
+/* Set the current thread's priority. */
static inline int
__gthread_objc_thread_set_priority(int priority)
{
return -1;
}
-/* Return the current thread's priority. */
+/* Return the current thread's priority. */
static inline int
__gthread_objc_thread_get_priority(void)
{
return -1;
}
-/* Yield our process time to another thread. */
+/* Yield our process time to another thread. */
static inline void
__gthread_objc_thread_yield(void)
{
NXThreadYield();
}
-/* Terminate the current thread. */
+/* Terminate the current thread. */
static inline int
__gthread_objc_thread_exit(void)
{
return -1;
}
-/* Returns an integer value which uniquely describes a thread. */
+/* Returns an integer value which uniquely describes a thread. */
static inline objc_thread_t
__gthread_objc_thread_id(void)
{
(objc_thread_t) NXThreadGetId();
}
-/* Sets the thread's local storage pointer. */
+/* Sets the thread's local storage pointer. */
static inline int
__gthread_objc_thread_set_data(void *value)
{
return NXKeySetValue(_objc_thread_storage, value);
}
-/* Returns the thread's local storage pointer. */
+/* Returns the thread's local storage pointer. */
static inline void *
__gthread_objc_thread_get_data(void)
{
/* Backend mutex functions */
-/* Allocate a mutex. */
+/* Allocate a mutex. */
static inline int
__gthread_objc_mutex_allocate(objc_mutex_t mutex)
{
return -1;
}
-/* Deallocate a mutex. */
+/* Deallocate a mutex. */
static inline int
__gthread_objc_mutex_deallocate(objc_mutex_t mutex)
{
return 0;
}
-/* Grab a lock on a mutex. */
+/* Grab a lock on a mutex. */
static inline int
__gthread_objc_mutex_lock(objc_mutex_t mutex)
{
return NXLock((NXMutex_t *)mutex->backend);
}
-/* Try to grab a lock on a mutex. */
+/* Try to grab a lock on a mutex. */
static inline int
__gthread_objc_mutex_trylock(objc_mutex_t mutex)
{
/* Backend condition mutex functions */
-/* Allocate a condition. */
+/* Allocate a condition. */
static inline int
__gthread_objc_condition_allocate(objc_condition_t condition)
{
return 0;
}
-/* Deallocate a condition. */
+/* Deallocate a condition. */
static inline int
__gthread_objc_condition_deallocate(objc_condition_t condition)
{
return NXCondWait((NXCond_t *)condition->backend, (NXMutex_t *)mutex->backend);
}
-/* Wake up all threads waiting on this condition. */
+/* Wake up all threads waiting on this condition. */
static inline int
__gthread_objc_condition_broadcast(objc_condition_t condition)
{
return NXCondBroadcast((NXCond_t *)condition->backend);
}
-/* Wake up one thread waiting on this condition. */
+/* Wake up one thread waiting on this condition. */
static inline int
__gthread_objc_condition_signal(objc_condition_t condition)
{
static inline int
__gthread_key_dtor (__gthread_key_t key, void *ptr)
{
- /* Just reset the key value to zero. */
+ /* Just reset the key value to zero. */
if (ptr)
return NXKeySetValue (key, NULL);
return 0;
return true;
}
-/* Generic hook that takes no arguments and returns NO_REGS. */
+/* Generic hook that takes no arguments and returns NO_REGS. */
int
hook_int_void_no_regs (void)
{
/* Print a lambda loop structure LOOP to OUTFILE. The depth/number of
coefficients is given by DEPTH, the number of invariants is
given by INVARIANTS, and the character to start variable names with is given
- by START. */
+ by START. */
void
print_lambda_loop (FILE * outfile, lambda_loop loop, int depth,
}
/* Compute the loop bounds for the auxiliary space NEST.
- Input system used is Ax <= b. TRANS is the unimodular transformation. */
+ Input system used is Ax <= b. TRANS is the unimodular transformation. */
static lambda_loopnest
lambda_compute_auxillary_space (lambda_loopnest nest,
}
}
- /* creates a new system by deleting the i'th variable. */
+ /* creates a new system by deleting the i'th variable. */
newsize = 0;
for (j = 0; j < size; j++)
{
if (VEC_length (tree, *invariants) > (unsigned int) (2 * depth))
abort ();
- /* We might have some leftover. */
+ /* We might have some leftover. */
if (TREE_CODE (test) == LT_EXPR)
extra = -1 * stepint;
else if (TREE_CODE (test) == NE_EXPR)
newloop = LN_LOOPS (new_loopnest)[i];
/* Linear offset is a bit tricky to handle. Punt on the unhandled
- cases for now. */
+ cases for now. */
offset = LL_LINEAR_OFFSET (newloop);
if (LLE_DENOMINATOR (offset) != 1
abort ();
/* Now build the new lower bounds, and insert the statements
- necessary to generate it on the loop preheader. */
+ necessary to generate it on the loop preheader. */
newlowerbound = lle_to_gcc_expression (LL_LOWER_BOUND (newloop),
LL_LINEAR_OFFSET (newloop),
new_ivs,
vec[i] = mat[i][col];
}
-/* Delete rows r1 to r2 (not including r2). */
+/* Delete rows r1 to r2 (not including r2). */
void
lambda_matrix_delete_rows (lambda_matrix mat, int rows, int from, int to)
"U.A = S". This decomposition is also known as "right Hermite".
Ref: Algorithm 2.1 page 33 in "Loop Transformations for
- Restructuring Compilers" Utpal Banerjee. */
+ Restructuring Compilers" Utpal Banerjee. */
void
lambda_matrix_right_hermite (lambda_matrix A, int m, int n,
V.S". This decomposition is also known as "left Hermite".
Ref: Algorithm 2.2 page 36 in "Loop Transformations for
- Restructuring Compilers" Utpal Banerjee. */
+ Restructuring Compilers" Utpal Banerjee. */
void
lambda_matrix_left_hermite (lambda_matrix A, int m, int n,
/* If nonzero, this is an upper limit on alignment of structure fields.
The value is measured in bits. */
unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
-/* ... and its original value in bytes, specified via -fpack-struct=<value>. */
+/* ... and its original value in bytes, specified via -fpack-struct=<value>. */
unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT;
/* If nonzero, the alignment of a bitstring or (power-)set value, in bits.
bool (* is_costly_dependence) (rtx, rtx, rtx, int, int);
} sched;
- /* Return machine mode for filter value. */
+ /* Return machine mode for filter value. */
enum machine_mode (* eh_return_filter_mode) (void);
/* Given two decls, merge their attributes and return the result. */
/* Given a control block BB and a predicate VAL, return the edge that
will be taken out of the block. If VAL does not match a unique
- edge, NULL is returned. */
+ edge, NULL is returned. */
edge
find_taken_edge (basic_block bb, tree val)
|| op1 == chrec_not_analyzed_yet)
return chrec_not_analyzed_yet;
- /* The default case produces a safe result. */
+ /* The default case produces a safe result. */
return chrec_dont_know;
}
- Basic block is after exit block but before latch.
- Basic block edge(s) is not normal.
EXIT_BB_SEEN is true if basic block with exit edge is already seen.
- BB is inside loop LOOP. */
+ BB is inside loop LOOP. */
static bool
if_convertable_bb_p (struct loop *loop, basic_block bb, bool exit_bb_seen)
}
/* Combine all basic block from the given LOOP into one or two super
- basic block. Replace PHI nodes with conditional modify expression. */
+ basic block. Replace PHI nodes with conditional modify expression. */
static void
combine_blocks (struct loop *loop)
walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
- /* If fields, do likewise for offset and qualifier. */
+ /* If fields, do likewise for offset and qualifier. */
if (TREE_CODE (t) == FIELD_DECL)
{
walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
/* Make an equivalent VAR_DECL. Note that we must NOT remap the type
here since the type of this decl must be visible to the calling
- function. */
+ function. */
var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
/* Register the VAR_DECL as the equivalent for the PARM_DECL;
case ARRAY_TYPE:
/* Don't follow this nodes's type if a pointer for fear that we'll
- have infinite recursion. Those types are uninteresting anyway. */
+ have infinite recursion. Those types are uninteresting anyway. */
if (!POINTER_TYPE_P (TREE_TYPE (type))
&& TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)
WALK_SUBTREE (TREE_TYPE (type));
Ith block contains a definition of VAR. */
bitmap def_blocks;
- /* Blocks that contain a phi node for VAR. */
+ /* Blocks that contain a phi node for VAR. */
bitmap phi_blocks;
/* Blocks where VAR is live-on-entry. Similar semantics as
char *string;
tree result;
- /* Add FILENAME[:LINENUMBER]. */
+ /* Add FILENAME[:LINENUMBER]. */
file = xloc.file;
if (file == NULL && current_function_decl != NULL_TREE)
file = DECL_SOURCE_FILE (current_function_decl);
/* extern uintptr_t __mf_lc_mask; */
static GTY (()) tree mf_cache_mask_decl;
-/* Their function-scope local shadows, used in single-threaded mode only. */
+/* Their function-scope local shadows, used in single-threaded mode only. */
/* auto const unsigned char __mf_lc_shift_l; */
static GTY (()) tree mf_cache_shift_decl_l;
/* Create and initialize local shadow variables for the lookup cache
globals. Put their decls in the *_l globals for use by
- mf_build_check_statement_for. */
+ mf_build_check_statement_for. */
static void
mf_decl_cache_locals (void)
static void
mf_decl_clear_locals (void)
{
- /* Unset local shadows. */
+ /* Unset local shadows. */
mf_cache_shift_decl_l = NULL_TREE;
mf_cache_mask_decl_l = NULL_TREE;
}
{
/* A non-zero static_pass_number indicates that the
- pass is already in the list. */
+ pass is already in the list. */
if (pass->static_pass_number)
{
struct tree_opt_pass *new;
its value in the nb_iter-th iteration. */
res = chrec_apply (inner_loop->num, evolution_fn, nb_iter);
- /* Continue the computation until ending on a parent of LOOP. */
+ /* Continue the computation until ending on a parent of LOOP. */
return compute_overall_effect_of_inner_loop (loop, res);
}
}
ev_fn = chrec_dont_know;
/* When there are multiple back edges of the loop (which in fact never
- happens currently, but nevertheless), merge their evolutions. */
+ happens currently, but nevertheless), merge their evolutions. */
evolution_function = chrec_merge (evolution_function, ev_fn);
}
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
/* If the pointer has already been found to point to arbitrary
- memory locations, it is unsafe to mark it as pointing to malloc. */
+ memory locations, it is unsafe to mark it as pointing to malloc. */
if (pi->pt_anything)
return;
return UNKNOWN_VAL;
#ifdef ENABLE_CHECKING
- /* There should be no VUSE operands that are UNDEFINED. */
+ /* There should be no VUSE operands that are UNDEFINED. */
if (val->lattice_val == UNDEFINED)
abort ();
#endif
fprintf (dump_file, "\n\n");
}
- /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
+ /* Check for an invalid change from UNKNOWN_VAL to UNDEFINED. */
if (old_val->lattice_val == UNKNOWN_VAL
&& new_val.lattice_val == UNDEFINED)
return SSA_PROP_NOT_INTERESTING;
}
/* If LHS is not a gimple register, then it cannot take on an
- UNDEFINED value. */
+ UNDEFINED value. */
if (!is_gimple_reg (SSA_NAME_VAR (lhs))
&& val.lattice_val == UNDEFINED)
val.lattice_val = UNKNOWN_VAL;
{
/* If LHS is NULL, it means that we couldn't get the base
address of the reference. In which case, we should not
- remove this store. */
+ remove this store. */
mark_stmt_necessary (stmt, true);
}
else if (DECL_P (lhs))
/* An unordered bitmap set. One bitmap tracks values, the other,
- expressions. */
+ expressions. */
typedef struct bitmap_set
{
bitmap expressions;
typedef struct expr_pred_trans_d
{
- /* The expression. */
+ /* The expression. */
tree e;
/* The predecessor block along which we translated the expression. */
/* Search in the phi translation table for the translation of
expression E in basic block PRED. Return the translated value, if
- found, NULL otherwise. */
+ found, NULL otherwise. */
static inline tree
phi_trans_lookup (tree e, basic_block pred)
return ret;
}
-/* Return true if two sets are equal. */
+/* Return true if two sets are equal. */
static bool
set_equal (value_set_t a, value_set_t b)
fprintf (dump_file, "\n");
}
- /* Make the necessary insertions. */
+ /* Make the necessary insertions. */
for (pred = block->pred;
pred;
pred = pred->pred_next)
&loop_exit_bsi, false, &indx_before_incr, &indx_after_incr);
/* bsi_insert is using BSI_NEW_STMT. We need to bump it back
- to point to the exit condition. */
+ to point to the exit condition. */
bsi_next (&loop_exit_bsi);
if (bsi_stmt (loop_exit_bsi) != orig_cond_expr)
abort ();
}
-/* Utility functions used by vect_mark_stmts_to_be_vectorized. */
+/* Utility functions used by vect_mark_stmts_to_be_vectorized. */
/* Function vect_mark_relevant.
vect_pointer_var
};
-/* Defines type of operation: unary or binary. */
+/* Defines type of operation: unary or binary. */
enum operation_type {
unary_op = 1,
binary_op
#define SET_EXPR_LOCATION(NODE, FROM) \
(EXPR_CHECK (NODE)->exp.locus = (FROM))
#define EXPR_HAS_LOCATION(NODE) (EXPR_LOCATION (NODE) != UNKNOWN_LOCATION)
-/* EXPR_LOCUS and SET_EXPR_LOCUS are deprecated. */
+/* EXPR_LOCUS and SET_EXPR_LOCUS are deprecated. */
#define EXPR_LOCUS(NODE) \
(IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (NODE))) \
? &(NODE)->exp.locus \
#define TYPE_DEBUG_REPRESENTATION_TYPE(NODE) (VECTOR_TYPE_CHECK (NODE)->type.values)
/* For record and union types, information about this type, as a base type
- for itself. */
+ for itself. */
#define TYPE_BINFO(NODE) (RECORD_OR_UNION_CHECK(NODE)->type.binfo)
/* For non record and union types, used in a language-dependent way. */
/* Unique ID for this value handle. IDs are handed out in a
conveniently dense form starting at 0, so that we can make
- bitmaps of value handles. */
+ bitmaps of value handles. */
unsigned int id;
};
\f
/* If nonzero, an upper limit on alignment of structure fields, in bits, */
extern unsigned int maximum_field_alignment;
-/* and its original value in bytes, specified via -fpack-struct=<value>. */
+/* and its original value in bytes, specified via -fpack-struct=<value>. */
extern unsigned int initial_max_fld_align;
/* If nonzero, the alignment of a bitstring or (power-)set value, in bits. */
Push a new element onto the end, returns a pointer to the slot
filled in. For object vectors, the new value can be NULL, in which
case NO initialization is performed. Aborts if there is
- insufficient space in the vector. */
+ insufficient space in the vector. */
#define VEC_quick_push(TDEF,V,O) \
(VEC_OP(TDEF,quick_push)(V,O VEC_CHECK_INFO))