+2017-04-24 Martin Jambor <mjambor@suse.cz>
+
+ PR tree-optimization/80293
+ * tree-sra.c (scalarizable_type_p): New parameter const_decl, make
+ char arrays not totally scalarizable if it is false.
+ (analyze_all_variable_accesses): Pass correct value in the new
+ parameter. Add a statistics counter.
+
2017-04-24 Jan Hubicka <hubicka@ucw.cz>
PR middle-end/79931
--- /dev/null
+// { dg-do compile }
+// { dg-options "-O2 -std=gnu++11 -fdump-tree-optimized" } */
+
+#include <array>
+
+// Return a copy of the underlying memory of an arbitrary value.
+template <
+ typename T,
+ typename = typename std::enable_if<std::is_trivially_copyable<T>::value>::type
+>
+auto getMem(
+ T const & value
+) -> std::array<char, sizeof(T)> {
+ auto ret = std::array<char, sizeof(T)>{};
+ __builtin_memcpy(ret.data(), &value, sizeof(T));
+ return ret;
+}
+
+template <
+ typename T,
+ typename = typename std::enable_if<std::is_trivially_copyable<T>::value>::type
+>
+auto fromMem(
+ std::array<char, sizeof(T)> const & buf
+) -> T {
+ auto ret = T{};
+ __builtin_memcpy(&ret, buf.data(), sizeof(T));
+ return ret;
+}
+
+double foo1(std::uint64_t arg) {
+ return fromMem<double>(getMem(arg));
+}
+
+double foo2(std::uint64_t arg) {
+ return *reinterpret_cast<double*>(&arg);
+}
+
+double foo3(std::uint64_t arg) {
+ double ret;
+ __builtin_memcpy(&ret, &arg, sizeof(arg));
+ return ret;
+}
+
+// { dg-final { scan-tree-dump-not "BIT_FIELD_REF" "optimized" } }
/* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
ARRAY_TYPE with fields that are either of gimple register types (excluding
- bit-fields) or (recursively) scalarizable types. */
+ bit-fields) or (recursively) scalarizable types. CONST_DECL must be true if
+ we are considering a decl from constant pool. If it is false, char arrays
+ will be refused. */
static bool
-scalarizable_type_p (tree type)
+scalarizable_type_p (tree type, bool const_decl)
{
gcc_assert (!is_gimple_reg_type (type));
if (type_contains_placeholder_p (type))
return false;
if (!is_gimple_reg_type (ft)
- && !scalarizable_type_p (ft))
+ && !scalarizable_type_p (ft, const_decl))
return false;
}
case ARRAY_TYPE:
{
+ HOST_WIDE_INT min_elem_size;
+ if (const_decl)
+ min_elem_size = 0;
+ else
+ min_elem_size = BITS_PER_UNIT;
+
if (TYPE_DOMAIN (type) == NULL_TREE
|| !tree_fits_shwi_p (TYPE_SIZE (type))
|| !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
- || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= 0)
+ || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
|| !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
return false;
if (tree_to_shwi (TYPE_SIZE (type)) == 0
tree elem = TREE_TYPE (type);
if (!is_gimple_reg_type (elem)
- && !scalarizable_type_p (elem))
+ && !scalarizable_type_p (elem, const_decl))
return false;
return true;
}
{
tree var = candidate (i);
- if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var)))
+ if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
+ constant_decl_p (var)))
{
if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
<= max_scalarization_size)
{
create_total_scalarization_access (var);
completely_scalarize (var, TREE_TYPE (var), 0, var);
+ statistics_counter_event (cfun,
+ "Totally-scalarized aggregates", 1);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Will attempt to totally scalarize ");