break;
case OMP_PARALLEL:
+ case OMP_TASK:
case OMP_FOR:
case OMP_SINGLE:
case OMP_MASTER:
break;
case OMP_PARALLEL:
+ case OMP_TASK:
/* Make sure the outermost BIND_EXPR in OMP_BODY isn't removed
as useless. */
- remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_BODY (*tp)), data);
+ remove_useless_stmts_1 (&BIND_EXPR_BODY (OMP_TASKREG_BODY (*tp)), data);
data->last_goto = NULL;
break;
case OMP_SECTIONS:
case OMP_SINGLE:
case OMP_SECTION:
- case OMP_MASTER :
+ case OMP_MASTER:
case OMP_ORDERED:
case OMP_CRITICAL:
remove_useless_stmts_1 (&OMP_BODY (*tp), data);
there is no sign or zero extension involved. */
if (((POINTER_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op)))
|| (POINTER_TYPE_P (TREE_TYPE (op)) && INTEGRAL_TYPE_P (type)))
- && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op)))
+ && (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op))
+ /* For targets were the precision of sizetype doesn't
+ match that of pointers we need the following. */
+ || type == sizetype || TREE_TYPE (op) == sizetype))
return false;
/* Allow conversion from integer to offset type and vice versa. */
*loc = new_t;
}
else
- new_t = *loc;
+ new_t = (tree) *loc;
*tp = new_t;
}
*loc = new_name;
}
else
- new_name = *loc;
+ new_name = (tree) *loc;
return new_name;
}
{
struct tree_map in, *out;
in.base.from = t;
- out = htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
+ out = (struct tree_map *)
+ htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
if (out)
*tp = t = out->to;
}
gcc_assert (TREE_CODE (decl) == LABEL_DECL);
- m = xmalloc (sizeof (struct tree_map));
+ m = XNEW (struct tree_map);
m->hash = DECL_UID (decl);
m->base.from = decl;
m->to = create_artificial_label ();