From: Jakub Jelinek Date: Mon, 31 Aug 2020 08:27:00 +0000 (+0200) Subject: varasm: Optimize memory broadcast for constant vector under AVX512 [PR54201] X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=0106300f6c3f7bae5eb1c46dbd45aa07c94e1b15;p=gcc.git varasm: Optimize memory broadcast for constant vector under AVX512 [PR54201] I meant something like the following, which on e.g. a dumb: typedef float V __attribute__((vector_size (4 * sizeof (float)))); void foo (V *p, float *q) { p[0] += (V) { 1.0f, 2.0f, 3.0f, 4.0f }; q[0] += 4.0f; q[1] -= 3.0f; q[17] -= 2.0f; q[31] += 1.0f; } testcase merges all the 4 scalar constant pool entries into the CONST_VECTOR one. I'm punting for section anchors and not doing it in the per-function (i.e. non-shared) constant pools simply because I don't know them well enough, don't know whether backends use the offsets for something etc. For section anchors, I guess it would need to be done before (re)computing the offsets and arrange for the desc->mark < 0 entries not to be considered as objects in the object block, for non-shared pools, perhaps it would be enough to call the new function from output_constant_pool before calling recompute_pool_offsets and adjust recompute_pool_offsets to ignore desc->mark < 0. Here is an adjusted patch that ought to merge even the same sized different mode vectors with the same byte representation, etc. It won't really help with avoiding the multiple reads of the constant in the same function, but as you found, your patch doesn't help with that either. Your patch isn't really incompatible with what the patch below does, though I wonder whether a) it wouldn't be better to always canonicalize to an integral mode with as few elts as possible even e.g. for floats b) whether asserting that it simplify_rtx succeeds is safe, whether it shouldn't just canonicalize if the canonicalization works and just do what it previously did otherwise. The following patch puts all pool entries which can be natively encoded into a vector, sorts it by decreasing size, determines minimum size of a pool entry and adds hash elts for each (aligned) min_size or wider power of two-ish portion of the pool constant in addition to the whole pool constant byte representation. This is the version that passed bootstrap/regtest on both x86_64-linux and i686-linux. In both bootstraps/regtests together, it saved (from the statistics I've gathered) 63104 .rodata bytes (before constant merging), in 6814 hits of the data->desc->mark = ~(*slot)->desc->labelno;. 2020-08-31 Jakub Jelinek PR middle-end/54201 * varasm.c: Include alloc-pool.h. (output_constant_pool_contents): Emit desc->mark < 0 entries as aliases. (struct constant_descriptor_rtx_data): New type. (constant_descriptor_rtx_data_cmp): New function. (struct const_rtx_data_hasher): New type. (const_rtx_data_hasher::hash, const_rtx_data_hasher::equal): New methods. (optimize_constant_pool): New function. (output_shared_constant_pool): Call it if TARGET_SUPPORTS_ALIASES. --- diff --git a/gcc/varasm.c b/gcc/varasm.c index 84df52013d7..ea0b59cf44a 100644 --- a/gcc/varasm.c +++ b/gcc/varasm.c @@ -57,6 +57,7 @@ along with GCC; see the file COPYING3. If not see #include "asan.h" #include "rtl-iter.h" #include "file-prefix-map.h" /* remap_debug_filename() */ +#include "alloc-pool.h" #ifdef XCOFF_DEBUGGING_INFO #include "xcoffout.h" /* Needed for external data declarations. */ @@ -4198,7 +4199,27 @@ output_constant_pool_contents (struct rtx_constant_pool *pool) class constant_descriptor_rtx *desc; for (desc = pool->first; desc ; desc = desc->next) - if (desc->mark) + if (desc->mark < 0) + { +#ifdef ASM_OUTPUT_DEF + const char *name = targetm.strip_name_encoding (XSTR (desc->sym, 0)); + char label[256]; + char buffer[256 + 32]; + const char *p; + + ASM_GENERATE_INTERNAL_LABEL (label, "LC", ~desc->mark); + p = targetm.strip_name_encoding (label); + if (desc->offset) + { + sprintf (buffer, "%s+%ld", p, (long) (desc->offset)); + p = buffer; + } + ASM_OUTPUT_DEF (asm_out_file, name, p); +#else + gcc_unreachable (); +#endif + } + else if (desc->mark) { /* If the constant is part of an object_block, make sure that the constant has been positioned within its block, but do not @@ -4216,6 +4237,160 @@ output_constant_pool_contents (struct rtx_constant_pool *pool) } } +struct constant_descriptor_rtx_data { + constant_descriptor_rtx *desc; + target_unit *bytes; + unsigned short size; + unsigned short offset; + unsigned int hash; +}; + +/* qsort callback to sort constant_descriptor_rtx_data * vector by + decreasing size. */ + +static int +constant_descriptor_rtx_data_cmp (const void *p1, const void *p2) +{ + constant_descriptor_rtx_data *const data1 + = *(constant_descriptor_rtx_data * const *) p1; + constant_descriptor_rtx_data *const data2 + = *(constant_descriptor_rtx_data * const *) p2; + if (data1->size > data2->size) + return -1; + if (data1->size < data2->size) + return 1; + if (data1->hash < data2->hash) + return -1; + gcc_assert (data1->hash > data2->hash); + return 1; +} + +struct const_rtx_data_hasher : nofree_ptr_hash +{ + static hashval_t hash (constant_descriptor_rtx_data *); + static bool equal (constant_descriptor_rtx_data *, + constant_descriptor_rtx_data *); +}; + +/* Hash and compare functions for const_rtx_data_htab. */ + +hashval_t +const_rtx_data_hasher::hash (constant_descriptor_rtx_data *data) +{ + return data->hash; +} + +bool +const_rtx_data_hasher::equal (constant_descriptor_rtx_data *x, + constant_descriptor_rtx_data *y) +{ + if (x->hash != y->hash || x->size != y->size) + return 0; + unsigned int align1 = x->desc->align; + unsigned int align2 = y->desc->align; + unsigned int offset1 = (x->offset * BITS_PER_UNIT) & (align1 - 1); + unsigned int offset2 = (y->offset * BITS_PER_UNIT) & (align2 - 1); + if (offset1) + align1 = least_bit_hwi (offset1); + if (offset2) + align2 = least_bit_hwi (offset2); + if (align2 > align1) + return 0; + if (memcmp (x->bytes, y->bytes, x->size * sizeof (target_unit)) != 0) + return 0; + return 1; +} + +/* Attempt to optimize constant pool POOL. If it contains both CONST_VECTOR + constants and scalar constants with the values of CONST_VECTOR elements, + try to alias the scalar constants with the CONST_VECTOR elements. */ + +static void +optimize_constant_pool (struct rtx_constant_pool *pool) +{ + auto_vec buffer; + auto_vec vec; + object_allocator + data_pool ("constant_descriptor_rtx_data_pool"); + int idx = 0; + size_t size = 0; + for (constant_descriptor_rtx *desc = pool->first; desc; desc = desc->next) + if (desc->mark > 0 + && ! (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym) + && SYMBOL_REF_BLOCK (desc->sym))) + { + buffer.truncate (0); + buffer.reserve (GET_MODE_SIZE (desc->mode)); + if (native_encode_rtx (desc->mode, desc->constant, buffer, 0, + GET_MODE_SIZE (desc->mode))) + { + constant_descriptor_rtx_data *data = data_pool.allocate (); + data->desc = desc; + data->bytes = NULL; + data->size = GET_MODE_SIZE (desc->mode); + data->offset = 0; + data->hash = idx++; + size += data->size; + vec.safe_push (data); + } + } + if (idx) + { + vec.qsort (constant_descriptor_rtx_data_cmp); + unsigned min_size = vec.last ()->size; + target_unit *bytes = XNEWVEC (target_unit, size); + unsigned int i; + constant_descriptor_rtx_data *data; + hash_table * htab + = new hash_table (31); + size = 0; + FOR_EACH_VEC_ELT (vec, i, data) + { + buffer.truncate (0); + native_encode_rtx (data->desc->mode, data->desc->constant, + buffer, 0, data->size); + memcpy (bytes + size, buffer.address (), data->size); + data->bytes = bytes + size; + data->hash = iterative_hash (data->bytes, + data->size * sizeof (target_unit), 0); + size += data->size; + constant_descriptor_rtx_data **slot + = htab->find_slot_with_hash (data, data->hash, INSERT); + if (*slot) + { + data->desc->mark = ~(*slot)->desc->labelno; + data->desc->offset = (*slot)->offset; + } + else + { + unsigned int sz = 1 << floor_log2 (data->size); + + *slot = data; + for (sz >>= 1; sz >= min_size; sz >>= 1) + for (unsigned off = 0; off + sz <= data->size; off += sz) + { + constant_descriptor_rtx_data tmp; + tmp.desc = data->desc; + tmp.bytes = data->bytes + off; + tmp.size = sz; + tmp.offset = off; + tmp.hash = iterative_hash (tmp.bytes, + sz * sizeof (target_unit), 0); + slot = htab->find_slot_with_hash (&tmp, tmp.hash, INSERT); + if (*slot == NULL) + { + *slot = data_pool.allocate (); + **slot = tmp; + } + } + } + } + delete htab; + XDELETE (bytes); + } + data_pool.release (); +} + /* Mark all constants that are used in the current function, then write out the function's private constant pool. */ @@ -4251,6 +4426,10 @@ output_constant_pool (const char *fnname ATTRIBUTE_UNUSED, void output_shared_constant_pool (void) { + if (optimize + && TARGET_SUPPORTS_ALIASES) + optimize_constant_pool (shared_constant_pool); + output_constant_pool_contents (shared_constant_pool); }