* c-common.c (c_alignof_expr): Use DECL_ALIGN_UNIT and TYPE_ALIGN_UNIT.
(c_sizeof_of_alignof_type): Likewise.
* expr.c (array_ref_element_size): Likewise.
(highest_pow2_factor_for_target): Likewise.
* gimplify.c (canonicalize_addr_expr): Likewise.
(gimplify_compound_lval): Likewise.
* stor-layout.c (finalize_record_size, finalize_type_size): Likewise.
* tree-ssa-ccp.c (maybe_fold_offset_to_array_ref): Likewise.
* varasm.c (assemble_variable): Likewise.
(output_constant_def_contents): Alignments are unsigned.
From-SVN: r86848
+2004-08-31 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
+
+ * c-common.c (c_alignof_expr): Use DECL_ALIGN_UNIT and TYPE_ALIGN_UNIT.
+ (c_sizeof_of_alignof_type): Likewise.
+ * expr.c (array_ref_element_size): Likewise.
+ (highest_pow2_factor_for_target): Likewise.
+ * gimplify.c (canonicalize_addr_expr): Likewise.
+ (gimplify_compound_lval): Likewise.
+ * stor-layout.c (finalize_record_size, finalize_type_size): Likewise.
+ * tree-ssa-ccp.c (maybe_fold_offset_to_array_ref): Likewise.
+ * varasm.c (assemble_variable): Likewise.
+ (output_constant_def_contents): Alignments are unsigned.
+
2004-08-31 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
Jeff Law <law@redhat.com>
size_int (TYPE_PRECISION (char_type_node)
/ BITS_PER_UNIT));
else
- value = size_int (TYPE_ALIGN (type) / BITS_PER_UNIT);
+ value = size_int (TYPE_ALIGN_UNIT (type));
}
/* VALUE will have an integer type with TYPE_IS_SIZETYPE set.
tree t;
if (TREE_CODE (expr) == VAR_DECL)
- t = size_int (DECL_ALIGN (expr) / BITS_PER_UNIT);
+ t = size_int (DECL_ALIGN_UNIT (expr));
else if (TREE_CODE (expr) == COMPONENT_REF
&& DECL_C_BIT_FIELD (TREE_OPERAND (expr, 1)))
}
else if (TREE_CODE (expr) == COMPONENT_REF
&& TREE_CODE (TREE_OPERAND (expr, 1)) == FIELD_DECL)
- t = size_int (DECL_ALIGN (TREE_OPERAND (expr, 1)) / BITS_PER_UNIT);
+ t = size_int (DECL_ALIGN_UNIT (TREE_OPERAND (expr, 1)));
else if (TREE_CODE (expr) == INDIRECT_REF)
{
if (TREE_TYPE (aligned_size) != sizetype)
aligned_size = fold_convert (sizetype, aligned_size);
return size_binop (MULT_EXPR, aligned_size,
- size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
+ size_int (TYPE_ALIGN_UNIT (elmt_type)));
}
/* Otherwise, take the size from that of the element type. Substitute
factor = highest_pow2_factor (exp);
if (TREE_CODE (target) == COMPONENT_REF)
- target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
+ target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
else
- target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
+ target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
return MAX (factor, target_align);
}
\f
TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
- size_int (TYPE_ALIGN (dctype)
- / BITS_PER_UNIT)));
+ size_int (TYPE_ALIGN_UNIT (dctype))));
*expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
}
{
tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
tree elmt_size = unshare_expr (array_ref_element_size (t));
- tree factor = size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT);
+ tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
/* Divide the element size by the alignment of the element
type (above). */
/* Round the size up to be a multiple of the required alignment. */
TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
- TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
- TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
+ TYPE_SIZE_UNIT (rli->t)
+ = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
if (warn_padded && TREE_CONSTANT (unpadded_size)
&& simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
if (TYPE_SIZE (type) != 0)
{
TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
- TYPE_SIZE_UNIT (type)
- = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
+ TYPE_SIZE_UNIT (type) = round_up (TYPE_SIZE_UNIT (type),
+ TYPE_ALIGN_UNIT (type));
}
/* Evaluate nonconstant sizes only once, either now or as soon as safe. */
return build (ARRAY_REF, orig_type, base, idx, min_idx,
size_int (tree_low_cst (elt_size, 1)
- / (TYPE_ALIGN (elt_type) / BITS_PER_UNIT)));
+ / (TYPE_ALIGN_UNIT (elt_type))));
}
* (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
#if !defined(ASM_OUTPUT_ALIGNED_COMMON) && !defined(ASM_OUTPUT_ALIGNED_DECL_COMMON) && !defined(ASM_OUTPUT_ALIGNED_BSS)
- if ((unsigned HOST_WIDE_INT) DECL_ALIGN (decl) / BITS_PER_UNIT > rounded)
+ if ((unsigned HOST_WIDE_INT) DECL_ALIGN_UNIT (decl) > rounded)
warning ("%Jrequested alignment for '%D' is greater than "
"implemented alignment of %d", decl, decl, rounded);
#endif
/* Output the alignment of this data. */
if (align > BITS_PER_UNIT)
- {
- ASM_OUTPUT_ALIGN (asm_out_file,
- floor_log2 (DECL_ALIGN (decl) / BITS_PER_UNIT));
- }
+ ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (DECL_ALIGN_UNIT (decl)));
/* Do any machine/system dependent processing of the object. */
#ifdef ASM_DECLARE_OBJECT_NAME
int reloc = compute_reloc_for_constant (exp);
/* Align the location counter as required by EXP's data type. */
- int align = TYPE_ALIGN (TREE_TYPE (exp));
+ unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
#ifdef CONSTANT_ALIGNMENT
align = CONSTANT_ALIGNMENT (exp, align);
#endif