gcc_unreachable ();
case BIT_FIELD_REF:
- if ((TREE_CODE (arg0) == VECTOR_CST
- || (TREE_CODE (arg0) == CONSTRUCTOR
- && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
+ if (TREE_CODE (arg0) == VECTOR_CST
&& (type == TREE_TYPE (TREE_TYPE (arg0))
|| (TREE_CODE (type) == VECTOR_TYPE
&& TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
vals[i] = VECTOR_CST_ELT (arg0, idx + i);
return build_vector (type, vals);
}
-
- /* Constructor elements can be subvectors. */
- unsigned HOST_WIDE_INT k = 1;
- if (CONSTRUCTOR_NELTS (arg0) != 0)
- {
- tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
- if (TREE_CODE (cons_elem) == VECTOR_TYPE)
- k = TYPE_VECTOR_SUBPARTS (cons_elem);
- }
-
- /* We keep an exact subset of the constructor elements. */
- if ((idx % k) == 0 && (n % k) == 0)
- {
- if (CONSTRUCTOR_NELTS (arg0) == 0)
- return build_constructor (type, NULL);
- idx /= k;
- n /= k;
- if (n == 1)
- {
- if (idx < CONSTRUCTOR_NELTS (arg0))
- return CONSTRUCTOR_ELT (arg0, idx)->value;
- return build_zero_cst (type);
- }
-
- vec<constructor_elt, va_gc> *vals;
- vec_alloc (vals, n);
- for (unsigned i = 0;
- i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
- ++i)
- CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
- CONSTRUCTOR_ELT
- (arg0, idx + i)->value);
- return build_constructor (type, vals);
- }
- /* The bitfield references a single constructor element. */
- else if (idx + n <= (idx / k + 1) * k)
- {
- if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
- return build_zero_cst (type);
- else if (n == k)
- return CONSTRUCTOR_ELT (arg0, idx / k)->value;
- else
- return fold_build3_loc (loc, code, type,
- CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
- build_int_cst (TREE_TYPE (op2), (idx % k) * width));
- }
}
}
- /* A bit-field-ref that referenced the full argument can be stripped. */
- if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
- && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
- && integer_zerop (op2))
- return fold_convert_loc (loc, type, arg0);
-
/* On constants we can use native encode/interpret to constant
fold (nearly) all BIT_FIELD_REFs. */
if (CONSTANT_CLASS_P (arg0)
&& can_native_interpret_type_p (type)
- && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
- /* This limitation should not be necessary, we just need to
- round this up to mode size. */
- && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
- /* Need bit-shifting of the buffer to relax the following. */
- && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
+ && BITS_PER_UNIT == 8)
{
unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
- unsigned HOST_WIDE_INT clen;
- clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
- /* ??? We cannot tell native_encode_expr to start at
- some random byte only. So limit us to a reasonable amount
- of work. */
- if (clen <= 4096)
+ /* Limit us to a reasonable amount of work. To relax the
+ other limitations we need bit-shifting of the buffer
+ and rounding up the size. */
+ if (bitpos % BITS_PER_UNIT == 0
+ && bitsize % BITS_PER_UNIT == 0
+ && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
{
- unsigned char *b = XALLOCAVEC (unsigned char, clen);
- unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
+ unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
+ unsigned HOST_WIDE_INT len
+ = native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
+ bitpos / BITS_PER_UNIT);
if (len > 0
- && len * BITS_PER_UNIT >= bitpos + bitsize)
+ && len * BITS_PER_UNIT >= bitsize)
{
- tree v = native_interpret_expr (type,
- b + bitpos / BITS_PER_UNIT,
+ tree v = native_interpret_expr (type, b,
bitsize / BITS_PER_UNIT);
if (v)
return v;
WARN_STRICT_OVERFLOW_COMPARISON);
}
(cmp @0 { res; })))))))))
+
+/* Canonicalizations of BIT_FIELD_REFs. */
+
+(simplify
+ (BIT_FIELD_REF @0 @1 @2)
+ (switch
+ (if (TREE_CODE (TREE_TYPE (@0)) == COMPLEX_TYPE
+ && tree_int_cst_equal (@1, TYPE_SIZE (TREE_TYPE (TREE_TYPE (@0)))))
+ (switch
+ (if (integer_zerop (@2))
+ (view_convert (realpart @0)))
+ (if (tree_int_cst_equal (@2, TYPE_SIZE (TREE_TYPE (TREE_TYPE (@0)))))
+ (view_convert (imagpart @0)))))
+ (if (INTEGRAL_TYPE_P (TREE_TYPE (@0))
+ && INTEGRAL_TYPE_P (type)
+ /* A bit-field-ref that referenced the full argument can be stripped. */
+ && ((compare_tree_int (@1, TYPE_PRECISION (TREE_TYPE (@0))) == 0
+ && integer_zerop (@2))
+ /* Low-parts can be reduced to integral conversions.
+ ??? The following doesn't work for PDP endian. */
+ || (BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
+ /* Don't even think about BITS_BIG_ENDIAN. */
+ && TYPE_PRECISION (TREE_TYPE (@0)) % BITS_PER_UNIT == 0
+ && TYPE_PRECISION (type) % BITS_PER_UNIT == 0
+ && compare_tree_int (@2, (BYTES_BIG_ENDIAN
+ ? (TYPE_PRECISION (TREE_TYPE (@0))
+ - TYPE_PRECISION (type))
+ : 0)) == 0)))
+ (convert @0))))
+
+/* Simplify vector extracts. */
+
+(simplify
+ (BIT_FIELD_REF CONSTRUCTOR@0 @1 @2)
+ (if (VECTOR_TYPE_P (TREE_TYPE (@0))
+ && (types_match (type, TREE_TYPE (TREE_TYPE (@0)))
+ || (VECTOR_TYPE_P (type)
+ && types_match (TREE_TYPE (type), TREE_TYPE (TREE_TYPE (@0))))))
+ (with
+ {
+ tree ctor = (TREE_CODE (@0) == SSA_NAME
+ ? gimple_assign_rhs1 (SSA_NAME_DEF_STMT (@0)) : @0);
+ tree eltype = TREE_TYPE (TREE_TYPE (ctor));
+ unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
+ unsigned HOST_WIDE_INT n = tree_to_uhwi (@1);
+ unsigned HOST_WIDE_INT idx = tree_to_uhwi (@2);
+ }
+ (if (n != 0
+ && (idx % width) == 0
+ && (n % width) == 0
+ && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (ctor)))
+ (with
+ {
+ idx = idx / width;
+ n = n / width;
+ /* Constructor elements can be subvectors. */
+ unsigned HOST_WIDE_INT k = 1;
+ if (CONSTRUCTOR_NELTS (ctor) != 0)
+ {
+ tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (ctor, 0)->value);
+ if (TREE_CODE (cons_elem) == VECTOR_TYPE)
+ k = TYPE_VECTOR_SUBPARTS (cons_elem);
+ }
+ }
+ (switch
+ /* We keep an exact subset of the constructor elements. */
+ (if ((idx % k) == 0 && (n % k) == 0)
+ (if (CONSTRUCTOR_NELTS (ctor) == 0)
+ { build_constructor (type, NULL); }
+ (with
+ {
+ idx /= k;
+ n /= k;
+ }
+ (if (n == 1)
+ (if (idx < CONSTRUCTOR_NELTS (ctor))
+ { CONSTRUCTOR_ELT (ctor, idx)->value; }
+ { build_zero_cst (type); })
+ {
+ vec<constructor_elt, va_gc> *vals;
+ vec_alloc (vals, n);
+ for (unsigned i = 0;
+ i < n && idx + i < CONSTRUCTOR_NELTS (ctor); ++i)
+ CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
+ CONSTRUCTOR_ELT (ctor, idx + i)->value);
+ build_constructor (type, vals);
+ }))))
+ /* The bitfield references a single constructor element. */
+ (if (idx + n <= (idx / k + 1) * k)
+ (switch
+ (if (CONSTRUCTOR_NELTS (ctor) <= idx / k)
+ { build_zero_cst (type); })
+ (if (n == k)
+ { CONSTRUCTOR_ELT (ctor, idx / k)->value; })
+ (BIT_FIELD_REF { CONSTRUCTOR_ELT (ctor, idx / k)->value; }
+ @1 { bitsize_int ((idx % k) * width); })))))))))