+2011-07-12 Eric Botcazou <ebotcazou@adacore.com>
+
+ * cse.c (insert_with_costs): Put semi-colon after empty loop body
+ on the next line.
+ * emit-rtl.c (push_to_sequence): Likewise.
+ * haifa-sched.c (max_issue): Likewise.
+ * matrix-reorg.c (add_allocation_site): Likewise.
+ * postreload-gcse.c (eliminate_partially_redundant_load): Likewise.
+ * reload.c (alternative_allows_const_pool_ref): Likewise.
+ * sched-rgn.c (rgn_add_block): Likewise.
+ (rgn_fix_recovery_cfg): Likewise.
+ * tree.c (attribute_list_contained): Likewise.
+
2011-07-12 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.c: Tidy processor feature bitmasks.
+2011-07-12 Eric Botcazou <ebotcazou@adacore.com>
+
+ * c-ada-spec.c (dump_nested_types): Put semi-colon after empty loop
+ body on the next line.
+
2011-07-08 Jason Merrill <jason@redhat.com>
PR c++/45437
if (TREE_CODE (decl) == FUNCTION_TYPE)
for (decl = TREE_TYPE (decl);
decl && TREE_CODE (decl) == POINTER_TYPE;
- decl = TREE_TYPE (decl));
+ decl = TREE_TYPE (decl))
+ ;
decl = get_underlying_decl (decl);
/* Put it after the last element cheaper than X. */
struct table_elt *p, *next;
- for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
- p = next);
+ for (p = classp;
+ (next = p->next_same_value) && CHEAPER (next, elt);
+ p = next)
+ ;
/* Put it after P and before NEXT. */
elt->next_same_value = next;
start_sequence ();
- for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
+ for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
+ ;
set_first_insn (first);
set_last_insn (last);
{
n = privileged_n;
/* Try to find issued privileged insn. */
- while (n && !ready_try[--n]);
+ while (n && !ready_try[--n])
+ ;
}
if (/* If all insns are equally good... */
must be set accordingly. */
for (min_malloc_level = 0;
min_malloc_level < mi->max_malloced_level
- && mi->malloc_for_level[min_malloc_level]; min_malloc_level++);
+ && mi->malloc_for_level[min_malloc_level]; min_malloc_level++)
+ ;
if (level < min_malloc_level)
{
mi->allocation_function_decl = current_function_decl;
discover additional redundancies, so mark it for later deletion. */
for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
a_occr && (a_occr->insn != insn);
- a_occr = get_bb_avail_insn (bb, a_occr->next));
+ a_occr = get_bb_avail_insn (bb, a_occr->next))
+ ;
if (!a_occr)
{
/* Skip alternatives before the one requested. */
while (altnum > 0)
{
- while (*constraint++ != ',');
+ while (*constraint++ != ',')
+ ;
altnum--;
}
/* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
/* Now POS is the index of the last block in the region. */
/* Find index of basic block AFTER. */
- for (; rgn_bb_table[pos] != after->index; pos--);
+ for (; rgn_bb_table[pos] != after->index; pos--)
+ ;
pos++;
gcc_assert (pos > ebb_head[i - 1]);
for (old_pos = ebb_head[BLOCK_TO_BB (check_bbi) + 1] - 1;
rgn_bb_table[old_pos] != check_bb_nexti;
- old_pos--);
+ old_pos--)
+ ;
gcc_assert (old_pos > ebb_head[BLOCK_TO_BB (check_bbi)]);
for (new_pos = ebb_head[BLOCK_TO_BB (bbi) + 1] - 1;
rgn_bb_table[new_pos] != bbi;
- new_pos--);
+ new_pos--)
+ ;
new_pos++;
gcc_assert (new_pos > ebb_head[BLOCK_TO_BB (bbi)]);
t1 != 0 && t2 != 0
&& TREE_PURPOSE (t1) == TREE_PURPOSE (t2)
&& TREE_VALUE (t1) == TREE_VALUE (t2);
- t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2));
+ t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
+ ;
/* Maybe the lists are equal. */
if (t1 == 0 && t2 == 0)