LTO: get_section: add new argument
[gcc.git] / gcc / varasm.c
1 /* Output variables, constants and external declarations, for GNU compiler.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* This file handles generation of all the assembler code
22 *except* the instructions of a function.
23 This includes declarations of variables and their initial values.
24
25 We also output the assembler code for constants stored in memory
26 and are responsible for combining constants with the same value. */
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "predict.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "stringpool.h"
39 #include "regs.h"
40 #include "emit-rtl.h"
41 #include "cgraph.h"
42 #include "diagnostic-core.h"
43 #include "fold-const.h"
44 #include "stor-layout.h"
45 #include "varasm.h"
46 #include "flags.h"
47 #include "stmt.h"
48 #include "expr.h"
49 #include "expmed.h"
50 #include "optabs.h"
51 #include "output.h"
52 #include "langhooks.h"
53 #include "debug.h"
54 #include "common/common-target.h"
55 #include "stringpool.h"
56 #include "attribs.h"
57 #include "asan.h"
58 #include "rtl-iter.h"
59 #include "file-prefix-map.h" /* remap_debug_filename() */
60 #include "alloc-pool.h"
61
62 #ifdef XCOFF_DEBUGGING_INFO
63 #include "xcoffout.h" /* Needed for external data declarations. */
64 #endif
65
66 /* The (assembler) name of the first globally-visible object output. */
67 extern GTY(()) const char *first_global_object_name;
68 extern GTY(()) const char *weak_global_object_name;
69
70 const char *first_global_object_name;
71 const char *weak_global_object_name;
72
73 class addr_const;
74 class constant_descriptor_rtx;
75 struct rtx_constant_pool;
76
77 #define n_deferred_constants (crtl->varasm.deferred_constants)
78
79 /* Number for making the label on the next
80 constant that is stored in memory. */
81
82 static GTY(()) int const_labelno;
83
84 /* Carry information from ASM_DECLARE_OBJECT_NAME
85 to ASM_FINISH_DECLARE_OBJECT. */
86
87 int size_directive_output;
88
89 /* The last decl for which assemble_variable was called,
90 if it did ASM_DECLARE_OBJECT_NAME.
91 If the last call to assemble_variable didn't do that,
92 this holds 0. */
93
94 tree last_assemble_variable_decl;
95
96 /* The following global variable indicates if the first basic block
97 in a function belongs to the cold partition or not. */
98
99 bool first_function_block_is_cold;
100
101 /* Whether we saw any functions with no_split_stack. */
102
103 static bool saw_no_split_stack;
104
105 static const char *strip_reg_name (const char *);
106 static int contains_pointers_p (tree);
107 #ifdef ASM_OUTPUT_EXTERNAL
108 static bool incorporeal_function_p (tree);
109 #endif
110 static void decode_addr_const (tree, class addr_const *);
111 static hashval_t const_hash_1 (const tree);
112 static int compare_constant (const tree, const tree);
113 static void output_constant_def_contents (rtx);
114 static void output_addressed_constants (tree, int);
115 static unsigned HOST_WIDE_INT output_constant (tree, unsigned HOST_WIDE_INT,
116 unsigned int, bool, bool);
117 static void globalize_decl (tree);
118 static bool decl_readonly_section_1 (enum section_category);
119 #ifdef BSS_SECTION_ASM_OP
120 #ifdef ASM_OUTPUT_ALIGNED_BSS
121 static void asm_output_aligned_bss (FILE *, tree, const char *,
122 unsigned HOST_WIDE_INT, int)
123 ATTRIBUTE_UNUSED;
124 #endif
125 #endif /* BSS_SECTION_ASM_OP */
126 static void mark_weak (tree);
127 static void output_constant_pool (const char *, tree);
128 static void handle_vtv_comdat_section (section *, const_tree);
129 \f
130 /* Well-known sections, each one associated with some sort of *_ASM_OP. */
131 section *text_section;
132 section *data_section;
133 section *readonly_data_section;
134 section *sdata_section;
135 section *ctors_section;
136 section *dtors_section;
137 section *bss_section;
138 section *sbss_section;
139
140 /* Various forms of common section. All are guaranteed to be nonnull. */
141 section *tls_comm_section;
142 section *comm_section;
143 section *lcomm_section;
144
145 /* A SECTION_NOSWITCH section used for declaring global BSS variables.
146 May be null. */
147 section *bss_noswitch_section;
148
149 /* The section that holds the main exception table, when known. The section
150 is set either by the target's init_sections hook or by the first call to
151 switch_to_exception_section. */
152 section *exception_section;
153
154 /* The section that holds the DWARF2 frame unwind information, when known.
155 The section is set either by the target's init_sections hook or by the
156 first call to switch_to_eh_frame_section. */
157 section *eh_frame_section;
158
159 /* asm_out_file's current section. This is NULL if no section has yet
160 been selected or if we lose track of what the current section is. */
161 section *in_section;
162
163 /* True if code for the current function is currently being directed
164 at the cold section. */
165 bool in_cold_section_p;
166
167 /* The following global holds the "function name" for the code in the
168 cold section of a function, if hot/cold function splitting is enabled
169 and there was actually code that went into the cold section. A
170 pseudo function name is needed for the cold section of code for some
171 debugging tools that perform symbolization. */
172 tree cold_function_name = NULL_TREE;
173
174 /* A linked list of all the unnamed sections. */
175 static GTY(()) section *unnamed_sections;
176
177 /* Return a nonzero value if DECL has a section attribute. */
178 #define IN_NAMED_SECTION(DECL) \
179 (VAR_OR_FUNCTION_DECL_P (DECL) && DECL_SECTION_NAME (DECL) != NULL)
180
181 struct section_hasher : ggc_ptr_hash<section>
182 {
183 typedef const char *compare_type;
184
185 static hashval_t hash (section *);
186 static bool equal (section *, const char *);
187 };
188
189 /* Hash table of named sections. */
190 static GTY(()) hash_table<section_hasher> *section_htab;
191
192 struct object_block_hasher : ggc_ptr_hash<object_block>
193 {
194 typedef const section *compare_type;
195
196 static hashval_t hash (object_block *);
197 static bool equal (object_block *, const section *);
198 };
199
200 /* A table of object_blocks, indexed by section. */
201 static GTY(()) hash_table<object_block_hasher> *object_block_htab;
202
203 /* The next number to use for internal anchor labels. */
204 static GTY(()) int anchor_labelno;
205
206 /* A pool of constants that can be shared between functions. */
207 static GTY(()) struct rtx_constant_pool *shared_constant_pool;
208
209 /* Helper routines for maintaining section_htab. */
210
211 bool
212 section_hasher::equal (section *old, const char *new_name)
213 {
214 return strcmp (old->named.name, new_name) == 0;
215 }
216
217 hashval_t
218 section_hasher::hash (section *old)
219 {
220 return htab_hash_string (old->named.name);
221 }
222
223 /* Return a hash value for section SECT. */
224
225 static hashval_t
226 hash_section (section *sect)
227 {
228 if (sect->common.flags & SECTION_NAMED)
229 return htab_hash_string (sect->named.name);
230 return sect->common.flags & ~SECTION_DECLARED;
231 }
232
233 /* Helper routines for maintaining object_block_htab. */
234
235 inline bool
236 object_block_hasher::equal (object_block *old, const section *new_section)
237 {
238 return old->sect == new_section;
239 }
240
241 hashval_t
242 object_block_hasher::hash (object_block *old)
243 {
244 return hash_section (old->sect);
245 }
246
247 /* Return a new unnamed section with the given fields. */
248
249 section *
250 get_unnamed_section (unsigned int flags, void (*callback) (const void *),
251 const void *data)
252 {
253 section *sect;
254
255 sect = ggc_alloc<section> ();
256 sect->unnamed.common.flags = flags | SECTION_UNNAMED;
257 sect->unnamed.callback = callback;
258 sect->unnamed.data = data;
259 sect->unnamed.next = unnamed_sections;
260
261 unnamed_sections = sect;
262 return sect;
263 }
264
265 /* Return a SECTION_NOSWITCH section with the given fields. */
266
267 static section *
268 get_noswitch_section (unsigned int flags, noswitch_section_callback callback)
269 {
270 section *sect;
271
272 sect = ggc_alloc<section> ();
273 sect->noswitch.common.flags = flags | SECTION_NOSWITCH;
274 sect->noswitch.callback = callback;
275
276 return sect;
277 }
278
279 /* Return the named section structure associated with NAME. Create
280 a new section with the given fields if no such structure exists.
281 When NOT_EXISTING, then fail if the section already exists. */
282
283 section *
284 get_section (const char *name, unsigned int flags, tree decl,
285 bool not_existing)
286 {
287 section *sect, **slot;
288
289 slot = section_htab->find_slot_with_hash (name, htab_hash_string (name),
290 INSERT);
291 flags |= SECTION_NAMED;
292 if (*slot == NULL)
293 {
294 sect = ggc_alloc<section> ();
295 sect->named.common.flags = flags;
296 sect->named.name = ggc_strdup (name);
297 sect->named.decl = decl;
298 *slot = sect;
299 }
300 else
301 {
302 if (not_existing)
303 internal_error ("Section already exists: %qs", name);
304
305 sect = *slot;
306 /* It is fine if one of the sections has SECTION_NOTYPE as long as
307 the other has none of the contrary flags (see the logic at the end
308 of default_section_type_flags, below). */
309 if (((sect->common.flags ^ flags) & SECTION_NOTYPE)
310 && !((sect->common.flags | flags)
311 & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE
312 | (HAVE_COMDAT_GROUP ? SECTION_LINKONCE : 0))))
313 {
314 sect->common.flags |= SECTION_NOTYPE;
315 flags |= SECTION_NOTYPE;
316 }
317 if ((sect->common.flags & ~SECTION_DECLARED) != flags
318 && ((sect->common.flags | flags) & SECTION_OVERRIDE) == 0)
319 {
320 /* It is fine if one of the section flags is
321 SECTION_WRITE | SECTION_RELRO and the other has none of these
322 flags (i.e. read-only) in named sections and either the
323 section hasn't been declared yet or has been declared as writable.
324 In that case just make sure the resulting flags are
325 SECTION_WRITE | SECTION_RELRO, ie. writable only because of
326 relocations. */
327 if (((sect->common.flags ^ flags) & (SECTION_WRITE | SECTION_RELRO))
328 == (SECTION_WRITE | SECTION_RELRO)
329 && (sect->common.flags
330 & ~(SECTION_DECLARED | SECTION_WRITE | SECTION_RELRO))
331 == (flags & ~(SECTION_WRITE | SECTION_RELRO))
332 && ((sect->common.flags & SECTION_DECLARED) == 0
333 || (sect->common.flags & SECTION_WRITE)))
334 {
335 sect->common.flags |= (SECTION_WRITE | SECTION_RELRO);
336 return sect;
337 }
338 /* Sanity check user variables for flag changes. */
339 if (sect->named.decl != NULL
340 && DECL_P (sect->named.decl)
341 && decl != sect->named.decl)
342 {
343 if (decl != NULL && DECL_P (decl))
344 error ("%+qD causes a section type conflict with %qD",
345 decl, sect->named.decl);
346 else
347 error ("section type conflict with %qD", sect->named.decl);
348 inform (DECL_SOURCE_LOCATION (sect->named.decl),
349 "%qD was declared here", sect->named.decl);
350 }
351 else if (decl != NULL && DECL_P (decl))
352 error ("%+qD causes a section type conflict", decl);
353 else
354 error ("section type conflict");
355 /* Make sure we don't error about one section multiple times. */
356 sect->common.flags |= SECTION_OVERRIDE;
357 }
358 }
359 return sect;
360 }
361
362 /* Return true if the current compilation mode benefits from having
363 objects grouped into blocks. */
364
365 static bool
366 use_object_blocks_p (void)
367 {
368 return flag_section_anchors;
369 }
370
371 /* Return the object_block structure for section SECT. Create a new
372 structure if we haven't created one already. Return null if SECT
373 itself is null. Return also null for mergeable sections since
374 section anchors can't be used in mergeable sections anyway,
375 because the linker might move objects around, and using the
376 object blocks infrastructure in that case is both a waste and a
377 maintenance burden. */
378
379 static struct object_block *
380 get_block_for_section (section *sect)
381 {
382 struct object_block *block;
383
384 if (sect == NULL)
385 return NULL;
386
387 if (sect->common.flags & SECTION_MERGE)
388 return NULL;
389
390 object_block **slot
391 = object_block_htab->find_slot_with_hash (sect, hash_section (sect),
392 INSERT);
393 block = *slot;
394 if (block == NULL)
395 {
396 block = ggc_cleared_alloc<object_block> ();
397 block->sect = sect;
398 *slot = block;
399 }
400 return block;
401 }
402
403 /* Create a symbol with label LABEL and place it at byte offset
404 OFFSET in BLOCK. OFFSET can be negative if the symbol's offset
405 is not yet known. LABEL must be a garbage-collected string. */
406
407 static rtx
408 create_block_symbol (const char *label, struct object_block *block,
409 HOST_WIDE_INT offset)
410 {
411 rtx symbol;
412 unsigned int size;
413
414 /* Create the extended SYMBOL_REF. */
415 size = RTX_HDR_SIZE + sizeof (struct block_symbol);
416 symbol = (rtx) ggc_internal_alloc (size);
417
418 /* Initialize the normal SYMBOL_REF fields. */
419 memset (symbol, 0, size);
420 PUT_CODE (symbol, SYMBOL_REF);
421 PUT_MODE (symbol, Pmode);
422 XSTR (symbol, 0) = label;
423 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_HAS_BLOCK_INFO;
424
425 /* Initialize the block_symbol stuff. */
426 SYMBOL_REF_BLOCK (symbol) = block;
427 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
428
429 return symbol;
430 }
431
432 /* Return a section with a particular name and with whatever SECTION_*
433 flags section_type_flags deems appropriate. The name of the section
434 is taken from NAME if nonnull, otherwise it is taken from DECL's
435 DECL_SECTION_NAME. DECL is the decl associated with the section
436 (see the section comment for details) and RELOC is as for
437 section_type_flags. */
438
439 section *
440 get_named_section (tree decl, const char *name, int reloc)
441 {
442 unsigned int flags;
443
444 if (name == NULL)
445 {
446 gcc_assert (decl && DECL_P (decl) && DECL_SECTION_NAME (decl));
447 name = DECL_SECTION_NAME (decl);
448 }
449
450 flags = targetm.section_type_flags (decl, name, reloc);
451 return get_section (name, flags, decl);
452 }
453
454 /* Worker for resolve_unique_section. */
455
456 static bool
457 set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
458 {
459 n->implicit_section = true;
460 return false;
461 }
462
463 /* If required, set DECL_SECTION_NAME to a unique name. */
464
465 void
466 resolve_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED,
467 int flag_function_or_data_sections)
468 {
469 if (DECL_SECTION_NAME (decl) == NULL
470 && targetm_common.have_named_sections
471 && (flag_function_or_data_sections
472 || DECL_COMDAT_GROUP (decl)))
473 {
474 targetm.asm_out.unique_section (decl, reloc);
475 if (DECL_SECTION_NAME (decl))
476 symtab_node::get (decl)->call_for_symbol_and_aliases
477 (set_implicit_section, NULL, true);
478 }
479 }
480
481 #ifdef BSS_SECTION_ASM_OP
482
483 #ifdef ASM_OUTPUT_ALIGNED_BSS
484
485 /* Utility function for targets to use in implementing
486 ASM_OUTPUT_ALIGNED_BSS.
487 ??? It is believed that this function will work in most cases so such
488 support is localized here. */
489
490 static void
491 asm_output_aligned_bss (FILE *file, tree decl ATTRIBUTE_UNUSED,
492 const char *name, unsigned HOST_WIDE_INT size,
493 int align)
494 {
495 switch_to_section (bss_section);
496 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
497 #ifdef ASM_DECLARE_OBJECT_NAME
498 last_assemble_variable_decl = decl;
499 ASM_DECLARE_OBJECT_NAME (file, name, decl);
500 #else
501 /* Standard thing is just output label for the object. */
502 ASM_OUTPUT_LABEL (file, name);
503 #endif /* ASM_DECLARE_OBJECT_NAME */
504 ASM_OUTPUT_SKIP (file, size ? size : 1);
505 }
506
507 #endif
508
509 #endif /* BSS_SECTION_ASM_OP */
510
511 #ifndef USE_SELECT_SECTION_FOR_FUNCTIONS
512 /* Return the hot section for function DECL. Return text_section for
513 null DECLs. */
514
515 static section *
516 hot_function_section (tree decl)
517 {
518 if (decl != NULL_TREE
519 && DECL_SECTION_NAME (decl) != NULL
520 && targetm_common.have_named_sections)
521 return get_named_section (decl, NULL, 0);
522 else
523 return text_section;
524 }
525 #endif
526
527 /* Return section for TEXT_SECTION_NAME if DECL or DECL_SECTION_NAME (DECL)
528 is NULL.
529
530 When DECL_SECTION_NAME is non-NULL and it is implicit section and
531 NAMED_SECTION_SUFFIX is non-NULL, then produce section called
532 concatenate the name with NAMED_SECTION_SUFFIX.
533 Otherwise produce "TEXT_SECTION_NAME.IMPLICIT_NAME". */
534
535 section *
536 get_named_text_section (tree decl,
537 const char *text_section_name,
538 const char *named_section_suffix)
539 {
540 if (decl && DECL_SECTION_NAME (decl))
541 {
542 if (named_section_suffix)
543 {
544 const char *dsn = DECL_SECTION_NAME (decl);
545 const char *stripped_name;
546 char *name, *buffer;
547
548 name = (char *) alloca (strlen (dsn) + 1);
549 memcpy (name, dsn,
550 strlen (dsn) + 1);
551
552 stripped_name = targetm.strip_name_encoding (name);
553
554 buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
555 return get_named_section (decl, buffer, 0);
556 }
557 else if (symtab_node::get (decl)->implicit_section)
558 {
559 const char *name;
560
561 /* Do not try to split gnu_linkonce functions. This gets somewhat
562 slipperly. */
563 if (DECL_COMDAT_GROUP (decl) && !HAVE_COMDAT_GROUP)
564 return NULL;
565 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
566 name = targetm.strip_name_encoding (name);
567 return get_named_section (decl, ACONCAT ((text_section_name, ".",
568 name, NULL)), 0);
569 }
570 else
571 return NULL;
572 }
573 return get_named_section (decl, text_section_name, 0);
574 }
575
576 /* Choose named function section based on its frequency. */
577
578 section *
579 default_function_section (tree decl, enum node_frequency freq,
580 bool startup, bool exit)
581 {
582 #if defined HAVE_LD_EH_GC_SECTIONS && defined HAVE_LD_EH_GC_SECTIONS_BUG
583 /* Old GNU linkers have buggy --gc-section support, which sometimes
584 results in .gcc_except_table* sections being garbage collected. */
585 if (decl
586 && symtab_node::get (decl)->implicit_section)
587 return NULL;
588 #endif
589
590 if (!flag_reorder_functions
591 || !targetm_common.have_named_sections)
592 return NULL;
593 /* Startup code should go to startup subsection unless it is
594 unlikely executed (this happens especially with function splitting
595 where we can split away unnecessary parts of static constructors. */
596 if (startup && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
597 {
598 /* During LTO the tp_first_run profiling will naturally place all
599 initialization code first. Using separate section is counter-productive
600 because startup only code may call functions which are no longer
601 startup only. */
602 if (!in_lto_p
603 || !cgraph_node::get (decl)->tp_first_run
604 || !opt_for_fn (decl, flag_profile_reorder_functions))
605 return get_named_text_section (decl, ".text.startup", NULL);
606 else
607 return NULL;
608 }
609
610 /* Similarly for exit. */
611 if (exit && freq != NODE_FREQUENCY_UNLIKELY_EXECUTED)
612 return get_named_text_section (decl, ".text.exit", NULL);
613
614 /* Group cold functions together, similarly for hot code. */
615 switch (freq)
616 {
617 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
618 return get_named_text_section (decl, ".text.unlikely", NULL);
619 case NODE_FREQUENCY_HOT:
620 return get_named_text_section (decl, ".text.hot", NULL);
621 /* FALLTHRU */
622 default:
623 return NULL;
624 }
625 }
626
627 /* Return the section for function DECL.
628
629 If DECL is NULL_TREE, return the text section. We can be passed
630 NULL_TREE under some circumstances by dbxout.c at least.
631
632 If FORCE_COLD is true, return cold function section ignoring
633 the frequency info of cgraph_node. */
634
635 static section *
636 function_section_1 (tree decl, bool force_cold)
637 {
638 section *section = NULL;
639 enum node_frequency freq = NODE_FREQUENCY_NORMAL;
640 bool startup = false, exit = false;
641
642 if (decl)
643 {
644 struct cgraph_node *node = cgraph_node::get (decl);
645
646 if (node)
647 {
648 freq = node->frequency;
649 startup = node->only_called_at_startup;
650 exit = node->only_called_at_exit;
651 }
652 }
653 if (force_cold)
654 freq = NODE_FREQUENCY_UNLIKELY_EXECUTED;
655
656 #ifdef USE_SELECT_SECTION_FOR_FUNCTIONS
657 if (decl != NULL_TREE
658 && DECL_SECTION_NAME (decl) != NULL)
659 {
660 if (targetm.asm_out.function_section)
661 section = targetm.asm_out.function_section (decl, freq,
662 startup, exit);
663 if (section)
664 return section;
665 return get_named_section (decl, NULL, 0);
666 }
667 else
668 return targetm.asm_out.select_section
669 (decl, freq == NODE_FREQUENCY_UNLIKELY_EXECUTED,
670 symtab_node::get (decl)->definition_alignment ());
671 #else
672 if (targetm.asm_out.function_section)
673 section = targetm.asm_out.function_section (decl, freq, startup, exit);
674 if (section)
675 return section;
676 return hot_function_section (decl);
677 #endif
678 }
679
680 /* Return the section for function DECL.
681
682 If DECL is NULL_TREE, return the text section. We can be passed
683 NULL_TREE under some circumstances by dbxout.c at least. */
684
685 section *
686 function_section (tree decl)
687 {
688 /* Handle cases where function splitting code decides
689 to put function entry point into unlikely executed section
690 despite the fact that the function itself is not cold
691 (i.e. it is called rarely but contains a hot loop that is
692 better to live in hot subsection for the code locality). */
693 return function_section_1 (decl,
694 first_function_block_is_cold);
695 }
696
697 /* Return the section for the current function, take IN_COLD_SECTION_P
698 into account. */
699
700 section *
701 current_function_section (void)
702 {
703 return function_section_1 (current_function_decl, in_cold_section_p);
704 }
705
706 /* Tell assembler to switch to unlikely-to-be-executed text section. */
707
708 section *
709 unlikely_text_section (void)
710 {
711 return function_section_1 (current_function_decl, true);
712 }
713
714 /* When called within a function context, return true if the function
715 has been assigned a cold text section and if SECT is that section.
716 When called outside a function context, return true if SECT is the
717 default cold section. */
718
719 bool
720 unlikely_text_section_p (section *sect)
721 {
722 return sect == function_section_1 (current_function_decl, true);
723 }
724
725 /* Switch to the other function partition (if inside of hot section
726 into cold section, otherwise into the hot section). */
727
728 void
729 switch_to_other_text_partition (void)
730 {
731 in_cold_section_p = !in_cold_section_p;
732 switch_to_section (current_function_section ());
733 }
734
735 /* Return the read-only data section associated with function DECL. */
736
737 section *
738 default_function_rodata_section (tree decl)
739 {
740 if (decl != NULL_TREE && DECL_SECTION_NAME (decl))
741 {
742 const char *name = DECL_SECTION_NAME (decl);
743
744 if (DECL_COMDAT_GROUP (decl) && HAVE_COMDAT_GROUP)
745 {
746 const char *dot;
747 size_t len;
748 char* rname;
749
750 dot = strchr (name + 1, '.');
751 if (!dot)
752 dot = name;
753 len = strlen (dot) + 8;
754 rname = (char *) alloca (len);
755
756 strcpy (rname, ".rodata");
757 strcat (rname, dot);
758 return get_section (rname, SECTION_LINKONCE, decl);
759 }
760 /* For .gnu.linkonce.t.foo we want to use .gnu.linkonce.r.foo. */
761 else if (DECL_COMDAT_GROUP (decl)
762 && strncmp (name, ".gnu.linkonce.t.", 16) == 0)
763 {
764 size_t len = strlen (name) + 1;
765 char *rname = (char *) alloca (len);
766
767 memcpy (rname, name, len);
768 rname[14] = 'r';
769 return get_section (rname, SECTION_LINKONCE, decl);
770 }
771 /* For .text.foo we want to use .rodata.foo. */
772 else if (flag_function_sections && flag_data_sections
773 && strncmp (name, ".text.", 6) == 0)
774 {
775 size_t len = strlen (name) + 1;
776 char *rname = (char *) alloca (len + 2);
777
778 memcpy (rname, ".rodata", 7);
779 memcpy (rname + 7, name + 5, len - 5);
780 return get_section (rname, 0, decl);
781 }
782 }
783
784 return readonly_data_section;
785 }
786
787 /* Return the read-only data section associated with function DECL
788 for targets where that section should be always the single
789 readonly data section. */
790
791 section *
792 default_no_function_rodata_section (tree decl ATTRIBUTE_UNUSED)
793 {
794 return readonly_data_section;
795 }
796
797 /* A subroutine of mergeable_string_section and mergeable_constant_section. */
798
799 static const char *
800 function_mergeable_rodata_prefix (void)
801 {
802 section *s = targetm.asm_out.function_rodata_section (current_function_decl);
803 if (SECTION_STYLE (s) == SECTION_NAMED)
804 return s->named.name;
805 else
806 return targetm.asm_out.mergeable_rodata_prefix;
807 }
808
809 /* Return the section to use for string merging. */
810
811 static section *
812 mergeable_string_section (tree decl ATTRIBUTE_UNUSED,
813 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
814 unsigned int flags ATTRIBUTE_UNUSED)
815 {
816 HOST_WIDE_INT len;
817
818 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
819 && TREE_CODE (decl) == STRING_CST
820 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
821 && align <= 256
822 && (len = int_size_in_bytes (TREE_TYPE (decl))) > 0
823 && TREE_STRING_LENGTH (decl) == len)
824 {
825 scalar_int_mode mode;
826 unsigned int modesize;
827 const char *str;
828 HOST_WIDE_INT i;
829 int j, unit;
830 const char *prefix = function_mergeable_rodata_prefix ();
831 char *name = (char *) alloca (strlen (prefix) + 30);
832
833 mode = SCALAR_INT_TYPE_MODE (TREE_TYPE (TREE_TYPE (decl)));
834 modesize = GET_MODE_BITSIZE (mode);
835 if (modesize >= 8 && modesize <= 256
836 && (modesize & (modesize - 1)) == 0)
837 {
838 if (align < modesize)
839 align = modesize;
840
841 if (!HAVE_LD_ALIGNED_SHF_MERGE && align > 8)
842 return readonly_data_section;
843
844 str = TREE_STRING_POINTER (decl);
845 unit = GET_MODE_SIZE (mode);
846
847 /* Check for embedded NUL characters. */
848 for (i = 0; i < len; i += unit)
849 {
850 for (j = 0; j < unit; j++)
851 if (str[i + j] != '\0')
852 break;
853 if (j == unit)
854 break;
855 }
856 if (i == len - unit || (unit == 1 && i == len))
857 {
858 sprintf (name, "%s.str%d.%d", prefix,
859 modesize / 8, (int) (align / 8));
860 flags |= (modesize / 8) | SECTION_MERGE | SECTION_STRINGS;
861 return get_section (name, flags, NULL);
862 }
863 }
864 }
865
866 return readonly_data_section;
867 }
868
869 /* Return the section to use for constant merging. */
870
871 section *
872 mergeable_constant_section (machine_mode mode ATTRIBUTE_UNUSED,
873 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED,
874 unsigned int flags ATTRIBUTE_UNUSED)
875 {
876 if (HAVE_GAS_SHF_MERGE && flag_merge_constants
877 && mode != VOIDmode
878 && mode != BLKmode
879 && known_le (GET_MODE_BITSIZE (mode), align)
880 && align >= 8
881 && align <= 256
882 && (align & (align - 1)) == 0
883 && (HAVE_LD_ALIGNED_SHF_MERGE ? 1 : align == 8))
884 {
885 const char *prefix = function_mergeable_rodata_prefix ();
886 char *name = (char *) alloca (strlen (prefix) + 30);
887
888 sprintf (name, "%s.cst%d", prefix, (int) (align / 8));
889 flags |= (align / 8) | SECTION_MERGE;
890 return get_section (name, flags, NULL);
891 }
892 return readonly_data_section;
893 }
894 \f
895 /* Given NAME, a putative register name, discard any customary prefixes. */
896
897 static const char *
898 strip_reg_name (const char *name)
899 {
900 #ifdef REGISTER_PREFIX
901 if (!strncmp (name, REGISTER_PREFIX, strlen (REGISTER_PREFIX)))
902 name += strlen (REGISTER_PREFIX);
903 #endif
904 if (name[0] == '%' || name[0] == '#')
905 name++;
906 return name;
907 }
908 \f
909 /* The user has asked for a DECL to have a particular name. Set (or
910 change) it in such a way that we don't prefix an underscore to
911 it. */
912 void
913 set_user_assembler_name (tree decl, const char *name)
914 {
915 char *starred = (char *) alloca (strlen (name) + 2);
916 starred[0] = '*';
917 strcpy (starred + 1, name);
918 symtab->change_decl_assembler_name (decl, get_identifier (starred));
919 SET_DECL_RTL (decl, NULL_RTX);
920 }
921 \f
922 /* Decode an `asm' spec for a declaration as a register name.
923 Return the register number, or -1 if nothing specified,
924 or -2 if the ASMSPEC is not `cc' or `memory' and is not recognized,
925 or -3 if ASMSPEC is `cc' and is not recognized,
926 or -4 if ASMSPEC is `memory' and is not recognized.
927 Accept an exact spelling or a decimal number.
928 Prefixes such as % are optional. */
929
930 int
931 decode_reg_name_and_count (const char *asmspec, int *pnregs)
932 {
933 /* Presume just one register is clobbered. */
934 *pnregs = 1;
935
936 if (asmspec != 0)
937 {
938 int i;
939
940 /* Get rid of confusing prefixes. */
941 asmspec = strip_reg_name (asmspec);
942
943 /* Allow a decimal number as a "register name". */
944 for (i = strlen (asmspec) - 1; i >= 0; i--)
945 if (! ISDIGIT (asmspec[i]))
946 break;
947 if (asmspec[0] != 0 && i < 0)
948 {
949 i = atoi (asmspec);
950 if (i < FIRST_PSEUDO_REGISTER && i >= 0 && reg_names[i][0])
951 return i;
952 else
953 return -2;
954 }
955
956 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
957 if (reg_names[i][0]
958 && ! strcmp (asmspec, strip_reg_name (reg_names[i])))
959 return i;
960
961 #ifdef OVERLAPPING_REGISTER_NAMES
962 {
963 static const struct
964 {
965 const char *const name;
966 const int number;
967 const int nregs;
968 } table[] = OVERLAPPING_REGISTER_NAMES;
969
970 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
971 if (table[i].name[0]
972 && ! strcmp (asmspec, table[i].name))
973 {
974 *pnregs = table[i].nregs;
975 return table[i].number;
976 }
977 }
978 #endif /* OVERLAPPING_REGISTER_NAMES */
979
980 #ifdef ADDITIONAL_REGISTER_NAMES
981 {
982 static const struct { const char *const name; const int number; } table[]
983 = ADDITIONAL_REGISTER_NAMES;
984
985 for (i = 0; i < (int) ARRAY_SIZE (table); i++)
986 if (table[i].name[0]
987 && ! strcmp (asmspec, table[i].name)
988 && reg_names[table[i].number][0])
989 return table[i].number;
990 }
991 #endif /* ADDITIONAL_REGISTER_NAMES */
992
993 if (!strcmp (asmspec, "memory"))
994 return -4;
995
996 if (!strcmp (asmspec, "cc"))
997 return -3;
998
999 return -2;
1000 }
1001
1002 return -1;
1003 }
1004
1005 int
1006 decode_reg_name (const char *name)
1007 {
1008 int count;
1009 return decode_reg_name_and_count (name, &count);
1010 }
1011
1012 \f
1013 /* Return true if DECL's initializer is suitable for a BSS section. */
1014
1015 bool
1016 bss_initializer_p (const_tree decl, bool named)
1017 {
1018 /* Do not put non-common constants into the .bss section, they belong in
1019 a readonly section, except when NAMED is true. */
1020 return ((!TREE_READONLY (decl) || DECL_COMMON (decl) || named)
1021 && (DECL_INITIAL (decl) == NULL
1022 /* In LTO we have no errors in program; error_mark_node is used
1023 to mark offlined constructors. */
1024 || (DECL_INITIAL (decl) == error_mark_node
1025 && !in_lto_p)
1026 || (flag_zero_initialized_in_bss
1027 && initializer_zerop (DECL_INITIAL (decl)))));
1028 }
1029
1030 /* Compute the alignment of variable specified by DECL.
1031 DONT_OUTPUT_DATA is from assemble_variable. */
1032
1033 void
1034 align_variable (tree decl, bool dont_output_data)
1035 {
1036 unsigned int align = DECL_ALIGN (decl);
1037
1038 /* In the case for initialing an array whose length isn't specified,
1039 where we have not yet been able to do the layout,
1040 figure out the proper alignment now. */
1041 if (dont_output_data && DECL_SIZE (decl) == 0
1042 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1043 align = MAX (align, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1044
1045 /* Some object file formats have a maximum alignment which they support.
1046 In particular, a.out format supports a maximum alignment of 4. */
1047 if (align > MAX_OFILE_ALIGNMENT)
1048 {
1049 error ("alignment of %q+D is greater than maximum object "
1050 "file alignment %d", decl,
1051 MAX_OFILE_ALIGNMENT/BITS_PER_UNIT);
1052 align = MAX_OFILE_ALIGNMENT;
1053 }
1054
1055 if (! DECL_USER_ALIGN (decl))
1056 {
1057 #ifdef DATA_ABI_ALIGNMENT
1058 unsigned int data_abi_align
1059 = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1060 /* For backwards compatibility, don't assume the ABI alignment for
1061 TLS variables. */
1062 if (! DECL_THREAD_LOCAL_P (decl) || data_abi_align <= BITS_PER_WORD)
1063 align = data_abi_align;
1064 #endif
1065
1066 /* On some machines, it is good to increase alignment sometimes.
1067 But as DECL_ALIGN is used both for actually emitting the variable
1068 and for code accessing the variable as guaranteed alignment, we
1069 can only increase the alignment if it is a performance optimization
1070 if the references to it must bind to the current definition. */
1071 if (decl_binds_to_current_def_p (decl)
1072 && !DECL_VIRTUAL_P (decl))
1073 {
1074 #ifdef DATA_ALIGNMENT
1075 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1076 /* Don't increase alignment too much for TLS variables - TLS space
1077 is too precious. */
1078 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1079 align = data_align;
1080 #endif
1081 if (DECL_INITIAL (decl) != 0
1082 /* In LTO we have no errors in program; error_mark_node is used
1083 to mark offlined constructors. */
1084 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1085 {
1086 unsigned int const_align
1087 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1088 /* Don't increase alignment too much for TLS variables - TLS
1089 space is too precious. */
1090 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1091 align = const_align;
1092 }
1093 }
1094 }
1095
1096 /* Reset the alignment in case we have made it tighter, so we can benefit
1097 from it in get_pointer_alignment. */
1098 SET_DECL_ALIGN (decl, align);
1099 }
1100
1101 /* Return DECL_ALIGN (decl), possibly increased for optimization purposes
1102 beyond what align_variable returned. */
1103
1104 static unsigned int
1105 get_variable_align (tree decl)
1106 {
1107 unsigned int align = DECL_ALIGN (decl);
1108
1109 /* For user aligned vars or static vars align_variable already did
1110 everything. */
1111 if (DECL_USER_ALIGN (decl) || !TREE_PUBLIC (decl))
1112 return align;
1113
1114 #ifdef DATA_ABI_ALIGNMENT
1115 if (DECL_THREAD_LOCAL_P (decl))
1116 align = DATA_ABI_ALIGNMENT (TREE_TYPE (decl), align);
1117 #endif
1118
1119 /* For decls that bind to the current definition, align_variable
1120 did also everything, except for not assuming ABI required alignment
1121 of TLS variables. For other vars, increase the alignment here
1122 as an optimization. */
1123 if (!decl_binds_to_current_def_p (decl))
1124 {
1125 /* On some machines, it is good to increase alignment sometimes. */
1126 #ifdef DATA_ALIGNMENT
1127 unsigned int data_align = DATA_ALIGNMENT (TREE_TYPE (decl), align);
1128 /* Don't increase alignment too much for TLS variables - TLS space
1129 is too precious. */
1130 if (! DECL_THREAD_LOCAL_P (decl) || data_align <= BITS_PER_WORD)
1131 align = data_align;
1132 #endif
1133 if (DECL_INITIAL (decl) != 0
1134 /* In LTO we have no errors in program; error_mark_node is used
1135 to mark offlined constructors. */
1136 && (in_lto_p || DECL_INITIAL (decl) != error_mark_node))
1137 {
1138 unsigned int const_align
1139 = targetm.constant_alignment (DECL_INITIAL (decl), align);
1140 /* Don't increase alignment too much for TLS variables - TLS space
1141 is too precious. */
1142 if (! DECL_THREAD_LOCAL_P (decl) || const_align <= BITS_PER_WORD)
1143 align = const_align;
1144 }
1145 }
1146
1147 return align;
1148 }
1149
1150 /* Return the section into which the given VAR_DECL or CONST_DECL
1151 should be placed. PREFER_NOSWITCH_P is true if a noswitch
1152 section should be used wherever possible. */
1153
1154 section *
1155 get_variable_section (tree decl, bool prefer_noswitch_p)
1156 {
1157 addr_space_t as = ADDR_SPACE_GENERIC;
1158 int reloc;
1159 varpool_node *vnode = varpool_node::get (decl);
1160 if (vnode)
1161 {
1162 vnode = vnode->ultimate_alias_target ();
1163 decl = vnode->decl;
1164 }
1165
1166 if (TREE_TYPE (decl) != error_mark_node)
1167 as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1168
1169 /* We need the constructor to figure out reloc flag. */
1170 if (vnode)
1171 vnode->get_constructor ();
1172
1173 if (DECL_COMMON (decl))
1174 {
1175 /* If the decl has been given an explicit section name, or it resides
1176 in a non-generic address space, then it isn't common, and shouldn't
1177 be handled as such. */
1178 gcc_assert (DECL_SECTION_NAME (decl) == NULL
1179 && ADDR_SPACE_GENERIC_P (as));
1180 if (DECL_THREAD_LOCAL_P (decl))
1181 return tls_comm_section;
1182 else if (TREE_PUBLIC (decl) && bss_initializer_p (decl))
1183 return comm_section;
1184 }
1185
1186 if (DECL_INITIAL (decl) == error_mark_node)
1187 reloc = contains_pointers_p (TREE_TYPE (decl)) ? 3 : 0;
1188 else if (DECL_INITIAL (decl))
1189 reloc = compute_reloc_for_constant (DECL_INITIAL (decl));
1190 else
1191 reloc = 0;
1192
1193 resolve_unique_section (decl, reloc, flag_data_sections);
1194 if (IN_NAMED_SECTION (decl))
1195 {
1196 section *sect = get_named_section (decl, NULL, reloc);
1197
1198 if ((sect->common.flags & SECTION_BSS)
1199 && !bss_initializer_p (decl, true))
1200 {
1201 error_at (DECL_SOURCE_LOCATION (decl),
1202 "only zero initializers are allowed in section %qs",
1203 sect->named.name);
1204 DECL_INITIAL (decl) = error_mark_node;
1205 }
1206 return sect;
1207 }
1208
1209 if (ADDR_SPACE_GENERIC_P (as)
1210 && !DECL_THREAD_LOCAL_P (decl)
1211 && !(prefer_noswitch_p && targetm.have_switchable_bss_sections)
1212 && bss_initializer_p (decl))
1213 {
1214 if (!TREE_PUBLIC (decl)
1215 && !((flag_sanitize & SANITIZE_ADDRESS)
1216 && asan_protect_global (decl)))
1217 return lcomm_section;
1218 if (bss_noswitch_section)
1219 return bss_noswitch_section;
1220 }
1221
1222 return targetm.asm_out.select_section (decl, reloc,
1223 get_variable_align (decl));
1224 }
1225
1226 /* Return the block into which object_block DECL should be placed. */
1227
1228 static struct object_block *
1229 get_block_for_decl (tree decl)
1230 {
1231 section *sect;
1232
1233 if (VAR_P (decl))
1234 {
1235 /* The object must be defined in this translation unit. */
1236 if (DECL_EXTERNAL (decl))
1237 return NULL;
1238
1239 /* There's no point using object blocks for something that is
1240 isolated by definition. */
1241 if (DECL_COMDAT_GROUP (decl))
1242 return NULL;
1243 }
1244
1245 /* We can only calculate block offsets if the decl has a known
1246 constant size. */
1247 if (DECL_SIZE_UNIT (decl) == NULL)
1248 return NULL;
1249 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (decl)))
1250 return NULL;
1251
1252 /* Find out which section should contain DECL. We cannot put it into
1253 an object block if it requires a standalone definition. */
1254 if (VAR_P (decl))
1255 align_variable (decl, 0);
1256 sect = get_variable_section (decl, true);
1257 if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
1258 return NULL;
1259
1260 return get_block_for_section (sect);
1261 }
1262
1263 /* Make sure block symbol SYMBOL is in block BLOCK. */
1264
1265 static void
1266 change_symbol_block (rtx symbol, struct object_block *block)
1267 {
1268 if (block != SYMBOL_REF_BLOCK (symbol))
1269 {
1270 gcc_assert (SYMBOL_REF_BLOCK_OFFSET (symbol) < 0);
1271 SYMBOL_REF_BLOCK (symbol) = block;
1272 }
1273 }
1274
1275 /* Return true if it is possible to put DECL in an object_block. */
1276
1277 static bool
1278 use_blocks_for_decl_p (tree decl)
1279 {
1280 struct symtab_node *snode;
1281
1282 /* Only data DECLs can be placed into object blocks. */
1283 if (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
1284 return false;
1285
1286 /* DECL_INITIAL (decl) set to decl is a hack used for some decls that
1287 are never used from code directly and we never want object block handling
1288 for those. */
1289 if (DECL_INITIAL (decl) == decl)
1290 return false;
1291
1292 /* If this decl is an alias, then we don't want to emit a
1293 definition. */
1294 if (VAR_P (decl)
1295 && (snode = symtab_node::get (decl)) != NULL
1296 && snode->alias)
1297 return false;
1298
1299 return targetm.use_blocks_for_decl_p (decl);
1300 }
1301
1302 /* Follow the IDENTIFIER_TRANSPARENT_ALIAS chain starting at *ALIAS
1303 until we find an identifier that is not itself a transparent alias.
1304 Modify the alias passed to it by reference (and all aliases on the
1305 way to the ultimate target), such that they do not have to be
1306 followed again, and return the ultimate target of the alias
1307 chain. */
1308
1309 static inline tree
1310 ultimate_transparent_alias_target (tree *alias)
1311 {
1312 tree target = *alias;
1313
1314 if (IDENTIFIER_TRANSPARENT_ALIAS (target))
1315 {
1316 gcc_assert (TREE_CHAIN (target));
1317 target = ultimate_transparent_alias_target (&TREE_CHAIN (target));
1318 gcc_assert (! IDENTIFIER_TRANSPARENT_ALIAS (target)
1319 && ! TREE_CHAIN (target));
1320 *alias = target;
1321 }
1322
1323 return target;
1324 }
1325
1326 /* Create the DECL_RTL for a VAR_DECL or FUNCTION_DECL. DECL should
1327 have static storage duration. In other words, it should not be an
1328 automatic variable, including PARM_DECLs.
1329
1330 There is, however, one exception: this function handles variables
1331 explicitly placed in a particular register by the user.
1332
1333 This is never called for PARM_DECL nodes. */
1334
1335 void
1336 make_decl_rtl (tree decl)
1337 {
1338 const char *name = 0;
1339 int reg_number;
1340 tree id;
1341 rtx x;
1342
1343 /* Check that we are not being given an automatic variable. */
1344 gcc_assert (TREE_CODE (decl) != PARM_DECL
1345 && TREE_CODE (decl) != RESULT_DECL);
1346
1347 /* A weak alias has TREE_PUBLIC set but not the other bits. */
1348 gcc_assert (!VAR_P (decl)
1349 || TREE_STATIC (decl)
1350 || TREE_PUBLIC (decl)
1351 || DECL_EXTERNAL (decl)
1352 || DECL_REGISTER (decl));
1353
1354 /* And that we were not given a type or a label. */
1355 gcc_assert (TREE_CODE (decl) != TYPE_DECL
1356 && TREE_CODE (decl) != LABEL_DECL);
1357
1358 /* For a duplicate declaration, we can be called twice on the
1359 same DECL node. Don't discard the RTL already made. */
1360 if (DECL_RTL_SET_P (decl))
1361 {
1362 /* If the old RTL had the wrong mode, fix the mode. */
1363 x = DECL_RTL (decl);
1364 if (GET_MODE (x) != DECL_MODE (decl))
1365 SET_DECL_RTL (decl, adjust_address_nv (x, DECL_MODE (decl), 0));
1366
1367 if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1368 return;
1369
1370 /* ??? Another way to do this would be to maintain a hashed
1371 table of such critters. Instead of adding stuff to a DECL
1372 to give certain attributes to it, we could use an external
1373 hash map from DECL to set of attributes. */
1374
1375 /* Let the target reassign the RTL if it wants.
1376 This is necessary, for example, when one machine specific
1377 decl attribute overrides another. */
1378 targetm.encode_section_info (decl, DECL_RTL (decl), false);
1379
1380 /* If the symbol has a SYMBOL_REF_BLOCK field, update it based
1381 on the new decl information. */
1382 if (MEM_P (x)
1383 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1384 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (x, 0)))
1385 change_symbol_block (XEXP (x, 0), get_block_for_decl (decl));
1386
1387 return;
1388 }
1389
1390 /* If this variable belongs to the global constant pool, retrieve the
1391 pre-computed RTL or recompute it in LTO mode. */
1392 if (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
1393 {
1394 SET_DECL_RTL (decl, output_constant_def (DECL_INITIAL (decl), 1));
1395 return;
1396 }
1397
1398 id = DECL_ASSEMBLER_NAME (decl);
1399 name = IDENTIFIER_POINTER (id);
1400
1401 if (name[0] != '*' && TREE_CODE (decl) != FUNCTION_DECL
1402 && DECL_REGISTER (decl))
1403 {
1404 error ("register name not specified for %q+D", decl);
1405 }
1406 else if (TREE_CODE (decl) != FUNCTION_DECL && DECL_REGISTER (decl))
1407 {
1408 const char *asmspec = name+1;
1409 machine_mode mode = DECL_MODE (decl);
1410 reg_number = decode_reg_name (asmspec);
1411 /* First detect errors in declaring global registers. */
1412 if (reg_number == -1)
1413 error ("register name not specified for %q+D", decl);
1414 else if (reg_number < 0)
1415 error ("invalid register name for %q+D", decl);
1416 else if (mode == BLKmode)
1417 error ("data type of %q+D isn%'t suitable for a register",
1418 decl);
1419 else if (!in_hard_reg_set_p (accessible_reg_set, mode, reg_number))
1420 error ("the register specified for %q+D cannot be accessed"
1421 " by the current target", decl);
1422 else if (!in_hard_reg_set_p (operand_reg_set, mode, reg_number))
1423 error ("the register specified for %q+D is not general enough"
1424 " to be used as a register variable", decl);
1425 else if (!targetm.hard_regno_mode_ok (reg_number, mode))
1426 error ("register specified for %q+D isn%'t suitable for data type",
1427 decl);
1428 /* Now handle properly declared static register variables. */
1429 else
1430 {
1431 int nregs;
1432
1433 if (DECL_INITIAL (decl) != 0 && TREE_STATIC (decl))
1434 {
1435 DECL_INITIAL (decl) = 0;
1436 error ("global register variable has initial value");
1437 }
1438 if (TREE_THIS_VOLATILE (decl))
1439 warning (OPT_Wvolatile_register_var,
1440 "optimization may eliminate reads and/or "
1441 "writes to register variables");
1442
1443 /* If the user specified one of the eliminables registers here,
1444 e.g., FRAME_POINTER_REGNUM, we don't want to get this variable
1445 confused with that register and be eliminated. This usage is
1446 somewhat suspect... */
1447
1448 SET_DECL_RTL (decl, gen_raw_REG (mode, reg_number));
1449 ORIGINAL_REGNO (DECL_RTL (decl)) = reg_number;
1450 REG_USERVAR_P (DECL_RTL (decl)) = 1;
1451
1452 if (TREE_STATIC (decl))
1453 {
1454 /* Make this register global, so not usable for anything
1455 else. */
1456 #ifdef ASM_DECLARE_REGISTER_GLOBAL
1457 name = IDENTIFIER_POINTER (DECL_NAME (decl));
1458 ASM_DECLARE_REGISTER_GLOBAL (asm_out_file, decl, reg_number, name);
1459 #endif
1460 nregs = hard_regno_nregs (reg_number, mode);
1461 while (nregs > 0)
1462 globalize_reg (decl, reg_number + --nregs);
1463 }
1464
1465 /* As a register variable, it has no section. */
1466 return;
1467 }
1468 /* Avoid internal errors from invalid register
1469 specifications. */
1470 SET_DECL_ASSEMBLER_NAME (decl, NULL_TREE);
1471 DECL_HARD_REGISTER (decl) = 0;
1472 /* Also avoid SSA inconsistencies by pretending this is an external
1473 decl now. */
1474 DECL_EXTERNAL (decl) = 1;
1475 return;
1476 }
1477 /* Now handle ordinary static variables and functions (in memory).
1478 Also handle vars declared register invalidly. */
1479 else if (name[0] == '*')
1480 {
1481 #ifdef REGISTER_PREFIX
1482 if (strlen (REGISTER_PREFIX) != 0)
1483 {
1484 reg_number = decode_reg_name (name);
1485 if (reg_number >= 0 || reg_number == -3)
1486 error ("register name given for non-register variable %q+D", decl);
1487 }
1488 #endif
1489 }
1490
1491 /* Specifying a section attribute on a variable forces it into a
1492 non-.bss section, and thus it cannot be common. */
1493 /* FIXME: In general this code should not be necessary because
1494 visibility pass is doing the same work. But notice_global_symbol
1495 is called early and it needs to make DECL_RTL to get the name.
1496 we take care of recomputing the DECL_RTL after visibility is changed. */
1497 if (VAR_P (decl)
1498 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1499 && DECL_SECTION_NAME (decl) != NULL
1500 && DECL_INITIAL (decl) == NULL_TREE
1501 && DECL_COMMON (decl))
1502 DECL_COMMON (decl) = 0;
1503
1504 /* Variables can't be both common and weak. */
1505 if (VAR_P (decl) && DECL_WEAK (decl))
1506 DECL_COMMON (decl) = 0;
1507
1508 if (use_object_blocks_p () && use_blocks_for_decl_p (decl))
1509 x = create_block_symbol (name, get_block_for_decl (decl), -1);
1510 else
1511 {
1512 machine_mode address_mode = Pmode;
1513 if (TREE_TYPE (decl) != error_mark_node)
1514 {
1515 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
1516 address_mode = targetm.addr_space.address_mode (as);
1517 }
1518 x = gen_rtx_SYMBOL_REF (address_mode, name);
1519 }
1520 SYMBOL_REF_WEAK (x) = DECL_WEAK (decl);
1521 SET_SYMBOL_REF_DECL (x, decl);
1522
1523 x = gen_rtx_MEM (DECL_MODE (decl), x);
1524 if (TREE_CODE (decl) != FUNCTION_DECL)
1525 set_mem_attributes (x, decl, 1);
1526 SET_DECL_RTL (decl, x);
1527
1528 /* Optionally set flags or add text to the name to record information
1529 such as that it is a function name.
1530 If the name is changed, the macro ASM_OUTPUT_LABELREF
1531 will have to know how to strip this information. */
1532 targetm.encode_section_info (decl, DECL_RTL (decl), true);
1533 }
1534
1535 /* Like make_decl_rtl, but inhibit creation of new alias sets when
1536 calling make_decl_rtl. Also, reset DECL_RTL before returning the
1537 rtl. */
1538
1539 rtx
1540 make_decl_rtl_for_debug (tree decl)
1541 {
1542 unsigned int save_aliasing_flag;
1543 rtx rtl;
1544
1545 if (DECL_RTL_SET_P (decl))
1546 return DECL_RTL (decl);
1547
1548 /* Kludge alert! Somewhere down the call chain, make_decl_rtl will
1549 call new_alias_set. If running with -fcompare-debug, sometimes
1550 we do not want to create alias sets that will throw the alias
1551 numbers off in the comparison dumps. So... clearing
1552 flag_strict_aliasing will keep new_alias_set() from creating a
1553 new set. */
1554 save_aliasing_flag = flag_strict_aliasing;
1555 flag_strict_aliasing = 0;
1556
1557 rtl = DECL_RTL (decl);
1558 /* Reset DECL_RTL back, as various parts of the compiler expects
1559 DECL_RTL set meaning it is actually going to be output. */
1560 SET_DECL_RTL (decl, NULL);
1561
1562 flag_strict_aliasing = save_aliasing_flag;
1563 return rtl;
1564 }
1565 \f
1566 /* Output a string of literal assembler code
1567 for an `asm' keyword used between functions. */
1568
1569 void
1570 assemble_asm (tree string)
1571 {
1572 const char *p;
1573 app_enable ();
1574
1575 if (TREE_CODE (string) == ADDR_EXPR)
1576 string = TREE_OPERAND (string, 0);
1577
1578 p = TREE_STRING_POINTER (string);
1579 fprintf (asm_out_file, "%s%s\n", p[0] == '\t' ? "" : "\t", p);
1580 }
1581
1582 /* Write the address of the entity given by SYMBOL to SEC. */
1583 void
1584 assemble_addr_to_section (rtx symbol, section *sec)
1585 {
1586 switch_to_section (sec);
1587 assemble_align (POINTER_SIZE);
1588 assemble_integer (symbol, POINTER_SIZE_UNITS, POINTER_SIZE, 1);
1589 }
1590
1591 /* Return the numbered .ctors.N (if CONSTRUCTOR_P) or .dtors.N (if
1592 not) section for PRIORITY. */
1593 section *
1594 get_cdtor_priority_section (int priority, bool constructor_p)
1595 {
1596 /* Buffer conservatively large enough for the full range of a 32-bit
1597 int plus the text below. */
1598 char buf[18];
1599
1600 /* ??? This only works reliably with the GNU linker. */
1601 sprintf (buf, "%s.%.5u",
1602 constructor_p ? ".ctors" : ".dtors",
1603 /* Invert the numbering so the linker puts us in the proper
1604 order; constructors are run from right to left, and the
1605 linker sorts in increasing order. */
1606 MAX_INIT_PRIORITY - priority);
1607 return get_section (buf, SECTION_WRITE, NULL);
1608 }
1609
1610 void
1611 default_named_section_asm_out_destructor (rtx symbol, int priority)
1612 {
1613 section *sec;
1614
1615 if (priority != DEFAULT_INIT_PRIORITY)
1616 sec = get_cdtor_priority_section (priority,
1617 /*constructor_p=*/false);
1618 else
1619 sec = get_section (".dtors", SECTION_WRITE, NULL);
1620
1621 assemble_addr_to_section (symbol, sec);
1622 }
1623
1624 #ifdef DTORS_SECTION_ASM_OP
1625 void
1626 default_dtor_section_asm_out_destructor (rtx symbol,
1627 int priority ATTRIBUTE_UNUSED)
1628 {
1629 assemble_addr_to_section (symbol, dtors_section);
1630 }
1631 #endif
1632
1633 void
1634 default_named_section_asm_out_constructor (rtx symbol, int priority)
1635 {
1636 section *sec;
1637
1638 if (priority != DEFAULT_INIT_PRIORITY)
1639 sec = get_cdtor_priority_section (priority,
1640 /*constructor_p=*/true);
1641 else
1642 sec = get_section (".ctors", SECTION_WRITE, NULL);
1643
1644 assemble_addr_to_section (symbol, sec);
1645 }
1646
1647 #ifdef CTORS_SECTION_ASM_OP
1648 void
1649 default_ctor_section_asm_out_constructor (rtx symbol,
1650 int priority ATTRIBUTE_UNUSED)
1651 {
1652 assemble_addr_to_section (symbol, ctors_section);
1653 }
1654 #endif
1655 \f
1656 /* CONSTANT_POOL_BEFORE_FUNCTION may be defined as an expression with
1657 a nonzero value if the constant pool should be output before the
1658 start of the function, or a zero value if the pool should output
1659 after the end of the function. The default is to put it before the
1660 start. */
1661
1662 #ifndef CONSTANT_POOL_BEFORE_FUNCTION
1663 #define CONSTANT_POOL_BEFORE_FUNCTION 1
1664 #endif
1665
1666 /* DECL is an object (either VAR_DECL or FUNCTION_DECL) which is going
1667 to be output to assembler.
1668 Set first_global_object_name and weak_global_object_name as appropriate. */
1669
1670 void
1671 notice_global_symbol (tree decl)
1672 {
1673 const char **t = &first_global_object_name;
1674
1675 if (first_global_object_name
1676 || !TREE_PUBLIC (decl)
1677 || DECL_EXTERNAL (decl)
1678 || !DECL_NAME (decl)
1679 || (VAR_P (decl) && DECL_HARD_REGISTER (decl))
1680 || (TREE_CODE (decl) != FUNCTION_DECL
1681 && (!VAR_P (decl)
1682 || (DECL_COMMON (decl)
1683 && (DECL_INITIAL (decl) == 0
1684 || DECL_INITIAL (decl) == error_mark_node)))))
1685 return;
1686
1687 /* We win when global object is found, but it is useful to know about weak
1688 symbol as well so we can produce nicer unique names. */
1689 if (DECL_WEAK (decl) || DECL_ONE_ONLY (decl) || flag_shlib)
1690 t = &weak_global_object_name;
1691
1692 if (!*t)
1693 {
1694 tree id = DECL_ASSEMBLER_NAME (decl);
1695 ultimate_transparent_alias_target (&id);
1696 *t = ggc_strdup (targetm.strip_name_encoding (IDENTIFIER_POINTER (id)));
1697 }
1698 }
1699
1700 /* If not using flag_reorder_blocks_and_partition, decide early whether the
1701 current function goes into the cold section, so that targets can use
1702 current_function_section during RTL expansion. DECL describes the
1703 function. */
1704
1705 void
1706 decide_function_section (tree decl)
1707 {
1708 first_function_block_is_cold = false;
1709
1710 if (DECL_SECTION_NAME (decl))
1711 {
1712 struct cgraph_node *node = cgraph_node::get (current_function_decl);
1713 /* Calls to function_section rely on first_function_block_is_cold
1714 being accurate. */
1715 first_function_block_is_cold = (node
1716 && node->frequency
1717 == NODE_FREQUENCY_UNLIKELY_EXECUTED);
1718 }
1719
1720 in_cold_section_p = first_function_block_is_cold;
1721 }
1722
1723 /* Get the function's name, as described by its RTL. This may be
1724 different from the DECL_NAME name used in the source file. */
1725 const char *
1726 get_fnname_from_decl (tree decl)
1727 {
1728 rtx x = DECL_RTL (decl);
1729 gcc_assert (MEM_P (x));
1730 x = XEXP (x, 0);
1731 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1732 return XSTR (x, 0);
1733 }
1734
1735 /* Output assembler code for the constant pool of a function and associated
1736 with defining the name of the function. DECL describes the function.
1737 NAME is the function's name. For the constant pool, we use the current
1738 constant pool data. */
1739
1740 void
1741 assemble_start_function (tree decl, const char *fnname)
1742 {
1743 int align;
1744 char tmp_label[100];
1745 bool hot_label_written = false;
1746
1747 if (crtl->has_bb_partition)
1748 {
1749 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTB", const_labelno);
1750 crtl->subsections.hot_section_label = ggc_strdup (tmp_label);
1751 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDB", const_labelno);
1752 crtl->subsections.cold_section_label = ggc_strdup (tmp_label);
1753 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LHOTE", const_labelno);
1754 crtl->subsections.hot_section_end_label = ggc_strdup (tmp_label);
1755 ASM_GENERATE_INTERNAL_LABEL (tmp_label, "LCOLDE", const_labelno);
1756 crtl->subsections.cold_section_end_label = ggc_strdup (tmp_label);
1757 const_labelno++;
1758 cold_function_name = NULL_TREE;
1759 }
1760 else
1761 {
1762 crtl->subsections.hot_section_label = NULL;
1763 crtl->subsections.cold_section_label = NULL;
1764 crtl->subsections.hot_section_end_label = NULL;
1765 crtl->subsections.cold_section_end_label = NULL;
1766 }
1767
1768 /* The following code does not need preprocessing in the assembler. */
1769
1770 app_disable ();
1771
1772 if (CONSTANT_POOL_BEFORE_FUNCTION)
1773 output_constant_pool (fnname, decl);
1774
1775 align = symtab_node::get (decl)->definition_alignment ();
1776
1777 /* Make sure the not and cold text (code) sections are properly
1778 aligned. This is necessary here in the case where the function
1779 has both hot and cold sections, because we don't want to re-set
1780 the alignment when the section switch happens mid-function. */
1781
1782 if (crtl->has_bb_partition)
1783 {
1784 first_function_block_is_cold = false;
1785
1786 switch_to_section (unlikely_text_section ());
1787 assemble_align (align);
1788 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_label);
1789
1790 /* When the function starts with a cold section, we need to explicitly
1791 align the hot section and write out the hot section label.
1792 But if the current function is a thunk, we do not have a CFG. */
1793 if (!cfun->is_thunk
1794 && BB_PARTITION (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) == BB_COLD_PARTITION)
1795 {
1796 switch_to_section (text_section);
1797 assemble_align (align);
1798 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1799 hot_label_written = true;
1800 first_function_block_is_cold = true;
1801 }
1802 in_cold_section_p = first_function_block_is_cold;
1803 }
1804
1805
1806 /* Switch to the correct text section for the start of the function. */
1807
1808 switch_to_section (function_section (decl));
1809 if (crtl->has_bb_partition && !hot_label_written)
1810 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_label);
1811
1812 /* Tell assembler to move to target machine's alignment for functions. */
1813 align = floor_log2 (align / BITS_PER_UNIT);
1814 if (align > 0)
1815 {
1816 ASM_OUTPUT_ALIGN (asm_out_file, align);
1817 }
1818
1819 /* Handle a user-specified function alignment.
1820 Note that we still need to align to DECL_ALIGN, as above,
1821 because ASM_OUTPUT_MAX_SKIP_ALIGN might not do any alignment at all. */
1822 if (! DECL_USER_ALIGN (decl)
1823 && align_functions.levels[0].log > align
1824 && optimize_function_for_speed_p (cfun))
1825 {
1826 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1827 int align_log = align_functions.levels[0].log;
1828 #endif
1829 int max_skip = align_functions.levels[0].maxskip;
1830 if (flag_limit_function_alignment && crtl->max_insn_address > 0
1831 && max_skip >= crtl->max_insn_address)
1832 max_skip = crtl->max_insn_address - 1;
1833
1834 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1835 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file, align_log, max_skip);
1836 if (max_skip == align_functions.levels[0].maxskip)
1837 ASM_OUTPUT_MAX_SKIP_ALIGN (asm_out_file,
1838 align_functions.levels[1].log,
1839 align_functions.levels[1].maxskip);
1840 #else
1841 ASM_OUTPUT_ALIGN (asm_out_file, align_functions.levels[0].log);
1842 #endif
1843 }
1844
1845 #ifdef ASM_OUTPUT_FUNCTION_PREFIX
1846 ASM_OUTPUT_FUNCTION_PREFIX (asm_out_file, fnname);
1847 #endif
1848
1849 if (!DECL_IGNORED_P (decl))
1850 (*debug_hooks->begin_function) (decl);
1851
1852 /* Make function name accessible from other files, if appropriate. */
1853
1854 if (TREE_PUBLIC (decl))
1855 {
1856 notice_global_symbol (decl);
1857
1858 globalize_decl (decl);
1859
1860 maybe_assemble_visibility (decl);
1861 }
1862
1863 if (DECL_PRESERVE_P (decl))
1864 targetm.asm_out.mark_decl_preserved (fnname);
1865
1866 unsigned short patch_area_size = crtl->patch_area_size;
1867 unsigned short patch_area_entry = crtl->patch_area_entry;
1868
1869 /* Emit the patching area before the entry label, if any. */
1870 if (patch_area_entry > 0)
1871 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1872 patch_area_entry, true);
1873
1874 /* Do any machine/system dependent processing of the function name. */
1875 #ifdef ASM_DECLARE_FUNCTION_NAME
1876 ASM_DECLARE_FUNCTION_NAME (asm_out_file, fnname, current_function_decl);
1877 #else
1878 /* Standard thing is just output label for the function. */
1879 ASM_OUTPUT_FUNCTION_LABEL (asm_out_file, fnname, current_function_decl);
1880 #endif /* ASM_DECLARE_FUNCTION_NAME */
1881
1882 /* And the area after the label. Record it if we haven't done so yet. */
1883 if (patch_area_size > patch_area_entry)
1884 targetm.asm_out.print_patchable_function_entry (asm_out_file,
1885 patch_area_size
1886 - patch_area_entry,
1887 patch_area_entry == 0);
1888
1889 if (lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (decl)))
1890 saw_no_split_stack = true;
1891 }
1892
1893 /* Output assembler code associated with defining the size of the
1894 function. DECL describes the function. NAME is the function's name. */
1895
1896 void
1897 assemble_end_function (tree decl, const char *fnname ATTRIBUTE_UNUSED)
1898 {
1899 #ifdef ASM_DECLARE_FUNCTION_SIZE
1900 /* We could have switched section in the middle of the function. */
1901 if (crtl->has_bb_partition)
1902 switch_to_section (function_section (decl));
1903 ASM_DECLARE_FUNCTION_SIZE (asm_out_file, fnname, decl);
1904 #endif
1905 if (! CONSTANT_POOL_BEFORE_FUNCTION)
1906 {
1907 output_constant_pool (fnname, decl);
1908 switch_to_section (function_section (decl)); /* need to switch back */
1909 }
1910 /* Output labels for end of hot/cold text sections (to be used by
1911 debug info.) */
1912 if (crtl->has_bb_partition)
1913 {
1914 section *save_text_section;
1915
1916 save_text_section = in_section;
1917 switch_to_section (unlikely_text_section ());
1918 #ifdef ASM_DECLARE_COLD_FUNCTION_SIZE
1919 if (cold_function_name != NULL_TREE)
1920 ASM_DECLARE_COLD_FUNCTION_SIZE (asm_out_file,
1921 IDENTIFIER_POINTER (cold_function_name),
1922 decl);
1923 #endif
1924 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.cold_section_end_label);
1925 if (first_function_block_is_cold)
1926 switch_to_section (text_section);
1927 else
1928 switch_to_section (function_section (decl));
1929 ASM_OUTPUT_LABEL (asm_out_file, crtl->subsections.hot_section_end_label);
1930 switch_to_section (save_text_section);
1931 }
1932 }
1933 \f
1934 /* Assemble code to leave SIZE bytes of zeros. */
1935
1936 void
1937 assemble_zeros (unsigned HOST_WIDE_INT size)
1938 {
1939 /* Do no output if -fsyntax-only. */
1940 if (flag_syntax_only)
1941 return;
1942
1943 #ifdef ASM_NO_SKIP_IN_TEXT
1944 /* The `space' pseudo in the text section outputs nop insns rather than 0s,
1945 so we must output 0s explicitly in the text section. */
1946 if (ASM_NO_SKIP_IN_TEXT && (in_section->common.flags & SECTION_CODE) != 0)
1947 {
1948 unsigned HOST_WIDE_INT i;
1949 for (i = 0; i < size; i++)
1950 assemble_integer (const0_rtx, 1, BITS_PER_UNIT, 1);
1951 }
1952 else
1953 #endif
1954 if (size > 0)
1955 ASM_OUTPUT_SKIP (asm_out_file, size);
1956 }
1957
1958 /* Assemble an alignment pseudo op for an ALIGN-bit boundary. */
1959
1960 void
1961 assemble_align (unsigned int align)
1962 {
1963 if (align > BITS_PER_UNIT)
1964 {
1965 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
1966 }
1967 }
1968
1969 /* Assemble a string constant with the specified C string as contents. */
1970
1971 void
1972 assemble_string (const char *p, int size)
1973 {
1974 int pos = 0;
1975 int maximum = 2000;
1976
1977 /* If the string is very long, split it up. */
1978
1979 while (pos < size)
1980 {
1981 int thissize = size - pos;
1982 if (thissize > maximum)
1983 thissize = maximum;
1984
1985 ASM_OUTPUT_ASCII (asm_out_file, p, thissize);
1986
1987 pos += thissize;
1988 p += thissize;
1989 }
1990 }
1991
1992 \f
1993 /* A noswitch_section_callback for lcomm_section. */
1994
1995 static bool
1996 emit_local (tree decl ATTRIBUTE_UNUSED,
1997 const char *name ATTRIBUTE_UNUSED,
1998 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
1999 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2000 {
2001 #if defined ASM_OUTPUT_ALIGNED_DECL_LOCAL
2002 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2003 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, decl, name,
2004 size, align);
2005 return true;
2006 #elif defined ASM_OUTPUT_ALIGNED_LOCAL
2007 unsigned int align = symtab_node::get (decl)->definition_alignment ();
2008 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, align);
2009 return true;
2010 #else
2011 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2012 return false;
2013 #endif
2014 }
2015
2016 /* A noswitch_section_callback for bss_noswitch_section. */
2017
2018 #if defined ASM_OUTPUT_ALIGNED_BSS
2019 static bool
2020 emit_bss (tree decl ATTRIBUTE_UNUSED,
2021 const char *name ATTRIBUTE_UNUSED,
2022 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2023 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2024 {
2025 ASM_OUTPUT_ALIGNED_BSS (asm_out_file, decl, name, size,
2026 get_variable_align (decl));
2027 return true;
2028 }
2029 #endif
2030
2031 /* A noswitch_section_callback for comm_section. */
2032
2033 static bool
2034 emit_common (tree decl ATTRIBUTE_UNUSED,
2035 const char *name ATTRIBUTE_UNUSED,
2036 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2037 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2038 {
2039 #if defined ASM_OUTPUT_ALIGNED_DECL_COMMON
2040 ASM_OUTPUT_ALIGNED_DECL_COMMON (asm_out_file, decl, name,
2041 size, get_variable_align (decl));
2042 return true;
2043 #elif defined ASM_OUTPUT_ALIGNED_COMMON
2044 ASM_OUTPUT_ALIGNED_COMMON (asm_out_file, name, size,
2045 get_variable_align (decl));
2046 return true;
2047 #else
2048 ASM_OUTPUT_COMMON (asm_out_file, name, size, rounded);
2049 return false;
2050 #endif
2051 }
2052
2053 /* A noswitch_section_callback for tls_comm_section. */
2054
2055 static bool
2056 emit_tls_common (tree decl ATTRIBUTE_UNUSED,
2057 const char *name ATTRIBUTE_UNUSED,
2058 unsigned HOST_WIDE_INT size ATTRIBUTE_UNUSED,
2059 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED)
2060 {
2061 #ifdef ASM_OUTPUT_TLS_COMMON
2062 ASM_OUTPUT_TLS_COMMON (asm_out_file, decl, name, size);
2063 return true;
2064 #else
2065 sorry ("thread-local COMMON data not implemented");
2066 return true;
2067 #endif
2068 }
2069
2070 /* Assemble DECL given that it belongs in SECTION_NOSWITCH section SECT.
2071 NAME is the name of DECL's SYMBOL_REF. */
2072
2073 static void
2074 assemble_noswitch_variable (tree decl, const char *name, section *sect,
2075 unsigned int align)
2076 {
2077 unsigned HOST_WIDE_INT size, rounded;
2078
2079 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2080 rounded = size;
2081
2082 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_protect_global (decl))
2083 size += asan_red_zone_size (size);
2084
2085 /* Don't allocate zero bytes of common,
2086 since that means "undefined external" in the linker. */
2087 if (size == 0)
2088 rounded = 1;
2089
2090 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2091 so that each uninitialized object starts on such a boundary. */
2092 rounded += (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1;
2093 rounded = (rounded / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2094 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2095
2096 if (!sect->noswitch.callback (decl, name, size, rounded)
2097 && (unsigned HOST_WIDE_INT) (align / BITS_PER_UNIT) > rounded)
2098 error ("requested alignment for %q+D is greater than "
2099 "implemented alignment of %wu", decl, rounded);
2100 }
2101
2102 /* A subroutine of assemble_variable. Output the label and contents of
2103 DECL, whose address is a SYMBOL_REF with name NAME. DONT_OUTPUT_DATA
2104 is as for assemble_variable. */
2105
2106 static void
2107 assemble_variable_contents (tree decl, const char *name,
2108 bool dont_output_data, bool merge_strings)
2109 {
2110 /* Do any machine/system dependent processing of the object. */
2111 #ifdef ASM_DECLARE_OBJECT_NAME
2112 last_assemble_variable_decl = decl;
2113 ASM_DECLARE_OBJECT_NAME (asm_out_file, name, decl);
2114 #else
2115 /* Standard thing is just output label for the object. */
2116 ASM_OUTPUT_LABEL (asm_out_file, name);
2117 #endif /* ASM_DECLARE_OBJECT_NAME */
2118
2119 if (!dont_output_data)
2120 {
2121 /* Caller is supposed to use varpool_get_constructor when it wants
2122 to output the body. */
2123 gcc_assert (!in_lto_p || DECL_INITIAL (decl) != error_mark_node);
2124 if (DECL_INITIAL (decl)
2125 && DECL_INITIAL (decl) != error_mark_node
2126 && !initializer_zerop (DECL_INITIAL (decl)))
2127 /* Output the actual data. */
2128 output_constant (DECL_INITIAL (decl),
2129 tree_to_uhwi (DECL_SIZE_UNIT (decl)),
2130 get_variable_align (decl),
2131 false, merge_strings);
2132 else
2133 /* Leave space for it. */
2134 assemble_zeros (tree_to_uhwi (DECL_SIZE_UNIT (decl)));
2135 targetm.asm_out.decl_end ();
2136 }
2137 }
2138
2139 /* Write out assembly for the variable DECL, which is not defined in
2140 the current translation unit. */
2141 void
2142 assemble_undefined_decl (tree decl)
2143 {
2144 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
2145 targetm.asm_out.assemble_undefined_decl (asm_out_file, name, decl);
2146 }
2147
2148 /* Assemble everything that is needed for a variable or function declaration.
2149 Not used for automatic variables, and not used for function definitions.
2150 Should not be called for variables of incomplete structure type.
2151
2152 TOP_LEVEL is nonzero if this variable has file scope.
2153 AT_END is nonzero if this is the special handling, at end of compilation,
2154 to define things that have had only tentative definitions.
2155 DONT_OUTPUT_DATA if nonzero means don't actually output the
2156 initial value (that will be done by the caller). */
2157
2158 void
2159 assemble_variable (tree decl, int top_level ATTRIBUTE_UNUSED,
2160 int at_end ATTRIBUTE_UNUSED, int dont_output_data)
2161 {
2162 const char *name;
2163 rtx decl_rtl, symbol;
2164 section *sect;
2165 unsigned int align;
2166 bool asan_protected = false;
2167
2168 /* This function is supposed to handle VARIABLES. Ensure we have one. */
2169 gcc_assert (VAR_P (decl));
2170
2171 /* Emulated TLS had better not get this far. */
2172 gcc_checking_assert (targetm.have_tls || !DECL_THREAD_LOCAL_P (decl));
2173
2174 last_assemble_variable_decl = 0;
2175
2176 /* Normally no need to say anything here for external references,
2177 since assemble_external is called by the language-specific code
2178 when a declaration is first seen. */
2179
2180 if (DECL_EXTERNAL (decl))
2181 return;
2182
2183 /* Do nothing for global register variables. */
2184 if (DECL_RTL_SET_P (decl) && REG_P (DECL_RTL (decl)))
2185 {
2186 TREE_ASM_WRITTEN (decl) = 1;
2187 return;
2188 }
2189
2190 /* If type was incomplete when the variable was declared,
2191 see if it is complete now. */
2192
2193 if (DECL_SIZE (decl) == 0)
2194 layout_decl (decl, 0);
2195
2196 /* Still incomplete => don't allocate it; treat the tentative defn
2197 (which is what it must have been) as an `extern' reference. */
2198
2199 if (!dont_output_data && DECL_SIZE (decl) == 0)
2200 {
2201 error ("storage size of %q+D isn%'t known", decl);
2202 TREE_ASM_WRITTEN (decl) = 1;
2203 return;
2204 }
2205
2206 /* The first declaration of a variable that comes through this function
2207 decides whether it is global (in C, has external linkage)
2208 or local (in C, has internal linkage). So do nothing more
2209 if this function has already run. */
2210
2211 if (TREE_ASM_WRITTEN (decl))
2212 return;
2213
2214 /* Make sure targetm.encode_section_info is invoked before we set
2215 ASM_WRITTEN. */
2216 decl_rtl = DECL_RTL (decl);
2217
2218 TREE_ASM_WRITTEN (decl) = 1;
2219
2220 /* Do no output if -fsyntax-only. */
2221 if (flag_syntax_only)
2222 return;
2223
2224 if (! dont_output_data
2225 && ! valid_constant_size_p (DECL_SIZE_UNIT (decl)))
2226 {
2227 error ("size of variable %q+D is too large", decl);
2228 return;
2229 }
2230
2231 gcc_assert (MEM_P (decl_rtl));
2232 gcc_assert (GET_CODE (XEXP (decl_rtl, 0)) == SYMBOL_REF);
2233 symbol = XEXP (decl_rtl, 0);
2234
2235 /* If this symbol belongs to the tree constant pool, output the constant
2236 if it hasn't already been written. */
2237 if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
2238 {
2239 tree decl = SYMBOL_REF_DECL (symbol);
2240 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
2241 output_constant_def_contents (symbol);
2242 return;
2243 }
2244
2245 app_disable ();
2246
2247 name = XSTR (symbol, 0);
2248 if (TREE_PUBLIC (decl) && DECL_NAME (decl))
2249 notice_global_symbol (decl);
2250
2251 /* Compute the alignment of this data. */
2252
2253 align_variable (decl, dont_output_data);
2254
2255 if ((flag_sanitize & SANITIZE_ADDRESS)
2256 && asan_protect_global (decl))
2257 {
2258 asan_protected = true;
2259 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
2260 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
2261 }
2262
2263 set_mem_align (decl_rtl, DECL_ALIGN (decl));
2264
2265 align = get_variable_align (decl);
2266
2267 if (TREE_PUBLIC (decl))
2268 maybe_assemble_visibility (decl);
2269
2270 if (DECL_PRESERVE_P (decl))
2271 targetm.asm_out.mark_decl_preserved (name);
2272
2273 /* First make the assembler name(s) global if appropriate. */
2274 sect = get_variable_section (decl, false);
2275 if (TREE_PUBLIC (decl)
2276 && (sect->common.flags & SECTION_COMMON) == 0)
2277 globalize_decl (decl);
2278
2279 /* Output any data that we will need to use the address of. */
2280 if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node)
2281 output_addressed_constants (DECL_INITIAL (decl), 0);
2282
2283 /* dbxout.c needs to know this. */
2284 if (sect && (sect->common.flags & SECTION_CODE) != 0)
2285 DECL_IN_TEXT_SECTION (decl) = 1;
2286
2287 /* If the decl is part of an object_block, make sure that the decl
2288 has been positioned within its block, but do not write out its
2289 definition yet. output_object_blocks will do that later. */
2290 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
2291 {
2292 gcc_assert (!dont_output_data);
2293 place_block_symbol (symbol);
2294 }
2295 else if (SECTION_STYLE (sect) == SECTION_NOSWITCH)
2296 assemble_noswitch_variable (decl, name, sect, align);
2297 else
2298 {
2299 /* Special-case handling of vtv comdat sections. */
2300 if (sect->named.name
2301 && (strcmp (sect->named.name, ".vtable_map_vars") == 0))
2302 handle_vtv_comdat_section (sect, decl);
2303 else
2304 switch_to_section (sect);
2305 if (align > BITS_PER_UNIT)
2306 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
2307 assemble_variable_contents (decl, name, dont_output_data,
2308 (sect->common.flags & SECTION_MERGE)
2309 && (sect->common.flags & SECTION_STRINGS));
2310 if (asan_protected)
2311 {
2312 unsigned HOST_WIDE_INT int size
2313 = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2314 assemble_zeros (asan_red_zone_size (size));
2315 }
2316 }
2317 }
2318
2319
2320 /* Given a function declaration (FN_DECL), this function assembles the
2321 function into the .preinit_array section. */
2322
2323 void
2324 assemble_vtv_preinit_initializer (tree fn_decl)
2325 {
2326 section *sect;
2327 unsigned flags = SECTION_WRITE;
2328 rtx symbol = XEXP (DECL_RTL (fn_decl), 0);
2329
2330 flags |= SECTION_NOTYPE;
2331 sect = get_section (".preinit_array", flags, fn_decl);
2332 switch_to_section (sect);
2333 assemble_addr_to_section (symbol, sect);
2334 }
2335
2336 /* Return 1 if type TYPE contains any pointers. */
2337
2338 static int
2339 contains_pointers_p (tree type)
2340 {
2341 switch (TREE_CODE (type))
2342 {
2343 case POINTER_TYPE:
2344 case REFERENCE_TYPE:
2345 /* I'm not sure whether OFFSET_TYPE needs this treatment,
2346 so I'll play safe and return 1. */
2347 case OFFSET_TYPE:
2348 return 1;
2349
2350 case RECORD_TYPE:
2351 case UNION_TYPE:
2352 case QUAL_UNION_TYPE:
2353 {
2354 tree fields;
2355 /* For a type that has fields, see if the fields have pointers. */
2356 for (fields = TYPE_FIELDS (type); fields; fields = DECL_CHAIN (fields))
2357 if (TREE_CODE (fields) == FIELD_DECL
2358 && contains_pointers_p (TREE_TYPE (fields)))
2359 return 1;
2360 return 0;
2361 }
2362
2363 case ARRAY_TYPE:
2364 /* An array type contains pointers if its element type does. */
2365 return contains_pointers_p (TREE_TYPE (type));
2366
2367 default:
2368 return 0;
2369 }
2370 }
2371
2372 /* We delay assemble_external processing until
2373 the compilation unit is finalized. This is the best we can do for
2374 right now (i.e. stage 3 of GCC 4.0) - the right thing is to delay
2375 it all the way to final. See PR 17982 for further discussion. */
2376 static GTY(()) tree pending_assemble_externals;
2377
2378 #ifdef ASM_OUTPUT_EXTERNAL
2379 /* Some targets delay some output to final using TARGET_ASM_FILE_END.
2380 As a result, assemble_external can be called after the list of externals
2381 is processed and the pointer set destroyed. */
2382 static bool pending_assemble_externals_processed;
2383
2384 /* Avoid O(external_decls**2) lookups in the pending_assemble_externals
2385 TREE_LIST in assemble_external. */
2386 static hash_set<tree> *pending_assemble_externals_set;
2387
2388 /* True if DECL is a function decl for which no out-of-line copy exists.
2389 It is assumed that DECL's assembler name has been set. */
2390
2391 static bool
2392 incorporeal_function_p (tree decl)
2393 {
2394 if (TREE_CODE (decl) == FUNCTION_DECL && fndecl_built_in_p (decl))
2395 {
2396 const char *name;
2397
2398 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2399 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2400 return true;
2401
2402 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2403 /* Atomic or sync builtins which have survived this far will be
2404 resolved externally and therefore are not incorporeal. */
2405 if (strncmp (name, "__builtin_", 10) == 0)
2406 return true;
2407 }
2408 return false;
2409 }
2410
2411 /* Actually do the tests to determine if this is necessary, and invoke
2412 ASM_OUTPUT_EXTERNAL. */
2413 static void
2414 assemble_external_real (tree decl)
2415 {
2416 rtx rtl = DECL_RTL (decl);
2417
2418 if (MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF
2419 && !SYMBOL_REF_USED (XEXP (rtl, 0))
2420 && !incorporeal_function_p (decl))
2421 {
2422 /* Some systems do require some output. */
2423 SYMBOL_REF_USED (XEXP (rtl, 0)) = 1;
2424 ASM_OUTPUT_EXTERNAL (asm_out_file, decl, XSTR (XEXP (rtl, 0), 0));
2425 }
2426 }
2427 #endif
2428
2429 void
2430 process_pending_assemble_externals (void)
2431 {
2432 #ifdef ASM_OUTPUT_EXTERNAL
2433 tree list;
2434 for (list = pending_assemble_externals; list; list = TREE_CHAIN (list))
2435 assemble_external_real (TREE_VALUE (list));
2436
2437 pending_assemble_externals = 0;
2438 pending_assemble_externals_processed = true;
2439 delete pending_assemble_externals_set;
2440 #endif
2441 }
2442
2443 /* This TREE_LIST contains any weak symbol declarations waiting
2444 to be emitted. */
2445 static GTY(()) tree weak_decls;
2446
2447 /* Output something to declare an external symbol to the assembler,
2448 and qualifiers such as weakness. (Most assemblers don't need
2449 extern declaration, so we normally output nothing.) Do nothing if
2450 DECL is not external. */
2451
2452 void
2453 assemble_external (tree decl ATTRIBUTE_UNUSED)
2454 {
2455 /* Make sure that the ASM_OUT_FILE is open.
2456 If it's not, we should not be calling this function. */
2457 gcc_assert (asm_out_file);
2458
2459 /* In a perfect world, the following condition would be true.
2460 Sadly, the Go front end emit assembly *from the front end*,
2461 bypassing the call graph. See PR52739. Fix before GCC 4.8. */
2462 #if 0
2463 /* This function should only be called if we are expanding, or have
2464 expanded, to RTL.
2465 Ideally, only final.c would be calling this function, but it is
2466 not clear whether that would break things somehow. See PR 17982
2467 for further discussion. */
2468 gcc_assert (state == EXPANSION
2469 || state == FINISHED);
2470 #endif
2471
2472 if (!DECL_P (decl) || !DECL_EXTERNAL (decl) || !TREE_PUBLIC (decl))
2473 return;
2474
2475 /* We want to output annotation for weak and external symbols at
2476 very last to check if they are references or not. */
2477
2478 if (TARGET_SUPPORTS_WEAK
2479 && DECL_WEAK (decl)
2480 /* TREE_STATIC is a weird and abused creature which is not
2481 generally the right test for whether an entity has been
2482 locally emitted, inlined or otherwise not-really-extern, but
2483 for declarations that can be weak, it happens to be
2484 match. */
2485 && !TREE_STATIC (decl)
2486 && lookup_attribute ("weak", DECL_ATTRIBUTES (decl))
2487 && value_member (decl, weak_decls) == NULL_TREE)
2488 weak_decls = tree_cons (NULL, decl, weak_decls);
2489
2490 #ifdef ASM_OUTPUT_EXTERNAL
2491 if (pending_assemble_externals_processed)
2492 {
2493 assemble_external_real (decl);
2494 return;
2495 }
2496
2497 if (! pending_assemble_externals_set->add (decl))
2498 pending_assemble_externals = tree_cons (NULL, decl,
2499 pending_assemble_externals);
2500 #endif
2501 }
2502
2503 /* Similar, for calling a library function FUN. */
2504
2505 void
2506 assemble_external_libcall (rtx fun)
2507 {
2508 /* Declare library function name external when first used, if nec. */
2509 if (! SYMBOL_REF_USED (fun))
2510 {
2511 SYMBOL_REF_USED (fun) = 1;
2512 targetm.asm_out.external_libcall (fun);
2513 }
2514 }
2515
2516 /* Assemble a label named NAME. */
2517
2518 void
2519 assemble_label (FILE *file, const char *name)
2520 {
2521 ASM_OUTPUT_LABEL (file, name);
2522 }
2523
2524 /* Set the symbol_referenced flag for ID. */
2525 void
2526 mark_referenced (tree id)
2527 {
2528 TREE_SYMBOL_REFERENCED (id) = 1;
2529 }
2530
2531 /* Set the symbol_referenced flag for DECL and notify callgraph. */
2532 void
2533 mark_decl_referenced (tree decl)
2534 {
2535 if (TREE_CODE (decl) == FUNCTION_DECL)
2536 {
2537 /* Extern inline functions don't become needed when referenced.
2538 If we know a method will be emitted in other TU and no new
2539 functions can be marked reachable, just use the external
2540 definition. */
2541 struct cgraph_node *node = cgraph_node::get_create (decl);
2542 if (!DECL_EXTERNAL (decl)
2543 && !node->definition)
2544 node->mark_force_output ();
2545 }
2546 else if (VAR_P (decl))
2547 {
2548 varpool_node *node = varpool_node::get_create (decl);
2549 /* C++ frontend use mark_decl_references to force COMDAT variables
2550 to be output that might appear dead otherwise. */
2551 node->force_output = true;
2552 }
2553 /* else do nothing - we can get various sorts of CST nodes here,
2554 which do not need to be marked. */
2555 }
2556
2557
2558 /* Output to FILE (an assembly file) a reference to NAME. If NAME
2559 starts with a *, the rest of NAME is output verbatim. Otherwise
2560 NAME is transformed in a target-specific way (usually by the
2561 addition of an underscore). */
2562
2563 void
2564 assemble_name_raw (FILE *file, const char *name)
2565 {
2566 if (name[0] == '*')
2567 fputs (&name[1], file);
2568 else
2569 ASM_OUTPUT_LABELREF (file, name);
2570 }
2571
2572 /* Return NAME that should actually be emitted, looking through
2573 transparent aliases. If NAME refers to an entity that is also
2574 represented as a tree (like a function or variable), mark the entity
2575 as referenced. */
2576 const char *
2577 assemble_name_resolve (const char *name)
2578 {
2579 const char *real_name = targetm.strip_name_encoding (name);
2580 tree id = maybe_get_identifier (real_name);
2581
2582 if (id)
2583 {
2584 tree id_orig = id;
2585
2586 mark_referenced (id);
2587 ultimate_transparent_alias_target (&id);
2588 if (id != id_orig)
2589 name = IDENTIFIER_POINTER (id);
2590 gcc_assert (! TREE_CHAIN (id));
2591 }
2592
2593 return name;
2594 }
2595
2596 /* Like assemble_name_raw, but should be used when NAME might refer to
2597 an entity that is also represented as a tree (like a function or
2598 variable). If NAME does refer to such an entity, that entity will
2599 be marked as referenced. */
2600
2601 void
2602 assemble_name (FILE *file, const char *name)
2603 {
2604 assemble_name_raw (file, assemble_name_resolve (name));
2605 }
2606
2607 /* Allocate SIZE bytes writable static space with a gensym name
2608 and return an RTX to refer to its address. */
2609
2610 rtx
2611 assemble_static_space (unsigned HOST_WIDE_INT size)
2612 {
2613 char name[17];
2614 const char *namestring;
2615 rtx x;
2616
2617 ASM_GENERATE_INTERNAL_LABEL (name, "LF", const_labelno);
2618 ++const_labelno;
2619 namestring = ggc_strdup (name);
2620
2621 x = gen_rtx_SYMBOL_REF (Pmode, namestring);
2622 SYMBOL_REF_FLAGS (x) = SYMBOL_FLAG_LOCAL;
2623
2624 #ifdef ASM_OUTPUT_ALIGNED_DECL_LOCAL
2625 ASM_OUTPUT_ALIGNED_DECL_LOCAL (asm_out_file, NULL_TREE, name, size,
2626 BIGGEST_ALIGNMENT);
2627 #else
2628 #ifdef ASM_OUTPUT_ALIGNED_LOCAL
2629 ASM_OUTPUT_ALIGNED_LOCAL (asm_out_file, name, size, BIGGEST_ALIGNMENT);
2630 #else
2631 {
2632 /* Round size up to multiple of BIGGEST_ALIGNMENT bits
2633 so that each uninitialized object starts on such a boundary. */
2634 /* Variable `rounded' might or might not be used in ASM_OUTPUT_LOCAL. */
2635 unsigned HOST_WIDE_INT rounded ATTRIBUTE_UNUSED
2636 = ((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1)
2637 / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2638 * (BIGGEST_ALIGNMENT / BITS_PER_UNIT));
2639 ASM_OUTPUT_LOCAL (asm_out_file, name, size, rounded);
2640 }
2641 #endif
2642 #endif
2643 return x;
2644 }
2645
2646 /* Assemble the static constant template for function entry trampolines.
2647 This is done at most once per compilation.
2648 Returns an RTX for the address of the template. */
2649
2650 static GTY(()) rtx initial_trampoline;
2651
2652 rtx
2653 assemble_trampoline_template (void)
2654 {
2655 char label[256];
2656 const char *name;
2657 int align;
2658 rtx symbol;
2659
2660 gcc_assert (targetm.asm_out.trampoline_template != NULL);
2661
2662 if (initial_trampoline)
2663 return initial_trampoline;
2664
2665 /* By default, put trampoline templates in read-only data section. */
2666
2667 #ifdef TRAMPOLINE_SECTION
2668 switch_to_section (TRAMPOLINE_SECTION);
2669 #else
2670 switch_to_section (readonly_data_section);
2671 #endif
2672
2673 /* Write the assembler code to define one. */
2674 align = floor_log2 (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
2675 if (align > 0)
2676 ASM_OUTPUT_ALIGN (asm_out_file, align);
2677
2678 targetm.asm_out.internal_label (asm_out_file, "LTRAMP", 0);
2679 targetm.asm_out.trampoline_template (asm_out_file);
2680
2681 /* Record the rtl to refer to it. */
2682 ASM_GENERATE_INTERNAL_LABEL (label, "LTRAMP", 0);
2683 name = ggc_strdup (label);
2684 symbol = gen_rtx_SYMBOL_REF (Pmode, name);
2685 SYMBOL_REF_FLAGS (symbol) = SYMBOL_FLAG_LOCAL;
2686
2687 initial_trampoline = gen_const_mem (BLKmode, symbol);
2688 set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
2689 set_mem_size (initial_trampoline, TRAMPOLINE_SIZE);
2690
2691 return initial_trampoline;
2692 }
2693 \f
2694 /* A and B are either alignments or offsets. Return the minimum alignment
2695 that may be assumed after adding the two together. */
2696
2697 static inline unsigned
2698 min_align (unsigned int a, unsigned int b)
2699 {
2700 return least_bit_hwi (a | b);
2701 }
2702
2703 /* Return the assembler directive for creating a given kind of integer
2704 object. SIZE is the number of bytes in the object and ALIGNED_P
2705 indicates whether it is known to be aligned. Return NULL if the
2706 assembly dialect has no such directive.
2707
2708 The returned string should be printed at the start of a new line and
2709 be followed immediately by the object's initial value. */
2710
2711 const char *
2712 integer_asm_op (int size, int aligned_p)
2713 {
2714 struct asm_int_op *ops;
2715
2716 if (aligned_p)
2717 ops = &targetm.asm_out.aligned_op;
2718 else
2719 ops = &targetm.asm_out.unaligned_op;
2720
2721 switch (size)
2722 {
2723 case 1:
2724 return targetm.asm_out.byte_op;
2725 case 2:
2726 return ops->hi;
2727 case 3:
2728 return ops->psi;
2729 case 4:
2730 return ops->si;
2731 case 5:
2732 case 6:
2733 case 7:
2734 return ops->pdi;
2735 case 8:
2736 return ops->di;
2737 case 9:
2738 case 10:
2739 case 11:
2740 case 12:
2741 case 13:
2742 case 14:
2743 case 15:
2744 return ops->pti;
2745 case 16:
2746 return ops->ti;
2747 default:
2748 return NULL;
2749 }
2750 }
2751
2752 /* Use directive OP to assemble an integer object X. Print OP at the
2753 start of the line, followed immediately by the value of X. */
2754
2755 void
2756 assemble_integer_with_op (const char *op, rtx x)
2757 {
2758 fputs (op, asm_out_file);
2759 output_addr_const (asm_out_file, x);
2760 fputc ('\n', asm_out_file);
2761 }
2762
2763 /* The default implementation of the asm_out.integer target hook. */
2764
2765 bool
2766 default_assemble_integer (rtx x ATTRIBUTE_UNUSED,
2767 unsigned int size ATTRIBUTE_UNUSED,
2768 int aligned_p ATTRIBUTE_UNUSED)
2769 {
2770 const char *op = integer_asm_op (size, aligned_p);
2771 /* Avoid GAS bugs for large values. Specifically negative values whose
2772 absolute value fits in a bfd_vma, but not in a bfd_signed_vma. */
2773 if (size > UNITS_PER_WORD && size > POINTER_SIZE_UNITS)
2774 return false;
2775 return op && (assemble_integer_with_op (op, x), true);
2776 }
2777
2778 /* Assemble the integer constant X into an object of SIZE bytes. ALIGN is
2779 the alignment of the integer in bits. Return 1 if we were able to output
2780 the constant, otherwise 0. We must be able to output the constant,
2781 if FORCE is nonzero. */
2782
2783 bool
2784 assemble_integer (rtx x, unsigned int size, unsigned int align, int force)
2785 {
2786 int aligned_p;
2787
2788 aligned_p = (align >= MIN (size * BITS_PER_UNIT, BIGGEST_ALIGNMENT));
2789
2790 /* See if the target hook can handle this kind of object. */
2791 if (targetm.asm_out.integer (x, size, aligned_p))
2792 return true;
2793
2794 /* If the object is a multi-byte one, try splitting it up. Split
2795 it into words it if is multi-word, otherwise split it into bytes. */
2796 if (size > 1)
2797 {
2798 machine_mode omode, imode;
2799 unsigned int subalign;
2800 unsigned int subsize, i;
2801 enum mode_class mclass;
2802
2803 subsize = size > UNITS_PER_WORD? UNITS_PER_WORD : 1;
2804 subalign = MIN (align, subsize * BITS_PER_UNIT);
2805 if (GET_CODE (x) == CONST_FIXED)
2806 mclass = GET_MODE_CLASS (GET_MODE (x));
2807 else
2808 mclass = MODE_INT;
2809
2810 omode = mode_for_size (subsize * BITS_PER_UNIT, mclass, 0).require ();
2811 imode = mode_for_size (size * BITS_PER_UNIT, mclass, 0).require ();
2812
2813 for (i = 0; i < size; i += subsize)
2814 {
2815 rtx partial = simplify_subreg (omode, x, imode, i);
2816 if (!partial || !assemble_integer (partial, subsize, subalign, 0))
2817 break;
2818 }
2819 if (i == size)
2820 return true;
2821
2822 /* If we've printed some of it, but not all of it, there's no going
2823 back now. */
2824 gcc_assert (!i);
2825 }
2826
2827 gcc_assert (!force);
2828
2829 return false;
2830 }
2831 \f
2832 /* Assemble the floating-point constant D into an object of size MODE. ALIGN
2833 is the alignment of the constant in bits. If REVERSE is true, D is output
2834 in reverse storage order. */
2835
2836 void
2837 assemble_real (REAL_VALUE_TYPE d, scalar_float_mode mode, unsigned int align,
2838 bool reverse)
2839 {
2840 long data[4] = {0, 0, 0, 0};
2841 int bitsize, nelts, nunits, units_per;
2842 rtx elt;
2843
2844 /* This is hairy. We have a quantity of known size. real_to_target
2845 will put it into an array of *host* longs, 32 bits per element
2846 (even if long is more than 32 bits). We need to determine the
2847 number of array elements that are occupied (nelts) and the number
2848 of *target* min-addressable units that will be occupied in the
2849 object file (nunits). We cannot assume that 32 divides the
2850 mode's bitsize (size * BITS_PER_UNIT) evenly.
2851
2852 size * BITS_PER_UNIT is used here to make sure that padding bits
2853 (which might appear at either end of the value; real_to_target
2854 will include the padding bits in its output array) are included. */
2855
2856 nunits = GET_MODE_SIZE (mode);
2857 bitsize = nunits * BITS_PER_UNIT;
2858 nelts = CEIL (bitsize, 32);
2859 units_per = 32 / BITS_PER_UNIT;
2860
2861 real_to_target (data, &d, mode);
2862
2863 /* Put out the first word with the specified alignment. */
2864 unsigned int chunk_nunits = MIN (nunits, units_per);
2865 if (reverse)
2866 elt = flip_storage_order (SImode, gen_int_mode (data[nelts - 1], SImode));
2867 else
2868 elt = GEN_INT (sext_hwi (data[0], chunk_nunits * BITS_PER_UNIT));
2869 assemble_integer (elt, chunk_nunits, align, 1);
2870 nunits -= chunk_nunits;
2871
2872 /* Subsequent words need only 32-bit alignment. */
2873 align = min_align (align, 32);
2874
2875 for (int i = 1; i < nelts; i++)
2876 {
2877 chunk_nunits = MIN (nunits, units_per);
2878 if (reverse)
2879 elt = flip_storage_order (SImode,
2880 gen_int_mode (data[nelts - 1 - i], SImode));
2881 else
2882 elt = GEN_INT (sext_hwi (data[i], chunk_nunits * BITS_PER_UNIT));
2883 assemble_integer (elt, chunk_nunits, align, 1);
2884 nunits -= chunk_nunits;
2885 }
2886 }
2887 \f
2888 /* Given an expression EXP with a constant value,
2889 reduce it to the sum of an assembler symbol and an integer.
2890 Store them both in the structure *VALUE.
2891 EXP must be reducible. */
2892
2893 class addr_const {
2894 public:
2895 rtx base;
2896 poly_int64 offset;
2897 };
2898
2899 static void
2900 decode_addr_const (tree exp, class addr_const *value)
2901 {
2902 tree target = TREE_OPERAND (exp, 0);
2903 poly_int64 offset = 0;
2904 rtx x;
2905
2906 while (1)
2907 {
2908 poly_int64 bytepos;
2909 if (TREE_CODE (target) == COMPONENT_REF
2910 && poly_int_tree_p (byte_position (TREE_OPERAND (target, 1)),
2911 &bytepos))
2912 {
2913 offset += bytepos;
2914 target = TREE_OPERAND (target, 0);
2915 }
2916 else if (TREE_CODE (target) == ARRAY_REF
2917 || TREE_CODE (target) == ARRAY_RANGE_REF)
2918 {
2919 /* Truncate big offset. */
2920 offset
2921 += (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (target)))
2922 * wi::to_poly_widest (TREE_OPERAND (target, 1)).force_shwi ());
2923 target = TREE_OPERAND (target, 0);
2924 }
2925 else if (TREE_CODE (target) == MEM_REF
2926 && TREE_CODE (TREE_OPERAND (target, 0)) == ADDR_EXPR)
2927 {
2928 offset += mem_ref_offset (target).force_shwi ();
2929 target = TREE_OPERAND (TREE_OPERAND (target, 0), 0);
2930 }
2931 else if (TREE_CODE (target) == INDIRECT_REF
2932 && TREE_CODE (TREE_OPERAND (target, 0)) == NOP_EXPR
2933 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (target, 0), 0))
2934 == ADDR_EXPR)
2935 target = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (target, 0), 0), 0);
2936 else
2937 break;
2938 }
2939
2940 switch (TREE_CODE (target))
2941 {
2942 case VAR_DECL:
2943 case FUNCTION_DECL:
2944 x = DECL_RTL (target);
2945 break;
2946
2947 case LABEL_DECL:
2948 x = gen_rtx_MEM (FUNCTION_MODE,
2949 gen_rtx_LABEL_REF (Pmode, force_label_rtx (target)));
2950 break;
2951
2952 case REAL_CST:
2953 case FIXED_CST:
2954 case STRING_CST:
2955 case COMPLEX_CST:
2956 case CONSTRUCTOR:
2957 case INTEGER_CST:
2958 x = lookup_constant_def (target);
2959 /* Should have been added by output_addressed_constants. */
2960 gcc_assert (x);
2961 break;
2962
2963 case INDIRECT_REF:
2964 /* This deals with absolute addresses. */
2965 offset += tree_to_shwi (TREE_OPERAND (target, 0));
2966 x = gen_rtx_MEM (QImode,
2967 gen_rtx_SYMBOL_REF (Pmode, "origin of addresses"));
2968 break;
2969
2970 case COMPOUND_LITERAL_EXPR:
2971 gcc_assert (COMPOUND_LITERAL_EXPR_DECL (target));
2972 x = DECL_RTL (COMPOUND_LITERAL_EXPR_DECL (target));
2973 break;
2974
2975 default:
2976 gcc_unreachable ();
2977 }
2978
2979 gcc_assert (MEM_P (x));
2980 x = XEXP (x, 0);
2981
2982 value->base = x;
2983 value->offset = offset;
2984 }
2985 \f
2986 static GTY(()) hash_table<tree_descriptor_hasher> *const_desc_htab;
2987
2988 static void maybe_output_constant_def_contents (struct constant_descriptor_tree *, int);
2989
2990 /* Constant pool accessor function. */
2991
2992 hash_table<tree_descriptor_hasher> *
2993 constant_pool_htab (void)
2994 {
2995 return const_desc_htab;
2996 }
2997
2998 /* Compute a hash code for a constant expression. */
2999
3000 hashval_t
3001 tree_descriptor_hasher::hash (constant_descriptor_tree *ptr)
3002 {
3003 return ptr->hash;
3004 }
3005
3006 static hashval_t
3007 const_hash_1 (const tree exp)
3008 {
3009 const char *p;
3010 hashval_t hi;
3011 int len, i;
3012 enum tree_code code = TREE_CODE (exp);
3013
3014 /* Either set P and LEN to the address and len of something to hash and
3015 exit the switch or return a value. */
3016
3017 switch (code)
3018 {
3019 case INTEGER_CST:
3020 p = (char *) &TREE_INT_CST_ELT (exp, 0);
3021 len = TREE_INT_CST_NUNITS (exp) * sizeof (HOST_WIDE_INT);
3022 break;
3023
3024 case REAL_CST:
3025 return real_hash (TREE_REAL_CST_PTR (exp));
3026
3027 case FIXED_CST:
3028 return fixed_hash (TREE_FIXED_CST_PTR (exp));
3029
3030 case STRING_CST:
3031 p = TREE_STRING_POINTER (exp);
3032 len = TREE_STRING_LENGTH (exp);
3033 break;
3034
3035 case COMPLEX_CST:
3036 return (const_hash_1 (TREE_REALPART (exp)) * 5
3037 + const_hash_1 (TREE_IMAGPART (exp)));
3038
3039 case VECTOR_CST:
3040 {
3041 hi = 7 + VECTOR_CST_NPATTERNS (exp);
3042 hi = hi * 563 + VECTOR_CST_NELTS_PER_PATTERN (exp);
3043 unsigned int count = vector_cst_encoded_nelts (exp);
3044 for (unsigned int i = 0; i < count; ++i)
3045 hi = hi * 563 + const_hash_1 (VECTOR_CST_ENCODED_ELT (exp, i));
3046 return hi;
3047 }
3048
3049 case CONSTRUCTOR:
3050 {
3051 unsigned HOST_WIDE_INT idx;
3052 tree value;
3053
3054 hi = 5 + int_size_in_bytes (TREE_TYPE (exp));
3055
3056 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
3057 if (value)
3058 hi = hi * 603 + const_hash_1 (value);
3059
3060 return hi;
3061 }
3062
3063 case ADDR_EXPR:
3064 if (CONSTANT_CLASS_P (TREE_OPERAND (exp, 0)))
3065 return const_hash_1 (TREE_OPERAND (exp, 0));
3066
3067 /* Fallthru. */
3068 case FDESC_EXPR:
3069 {
3070 class addr_const value;
3071
3072 decode_addr_const (exp, &value);
3073 switch (GET_CODE (value.base))
3074 {
3075 case SYMBOL_REF:
3076 /* Don't hash the address of the SYMBOL_REF;
3077 only use the offset and the symbol name. */
3078 hi = value.offset.coeffs[0];
3079 p = XSTR (value.base, 0);
3080 for (i = 0; p[i] != 0; i++)
3081 hi = ((hi * 613) + (unsigned) (p[i]));
3082 break;
3083
3084 case LABEL_REF:
3085 hi = (value.offset.coeffs[0]
3086 + CODE_LABEL_NUMBER (label_ref_label (value.base)) * 13);
3087 break;
3088
3089 default:
3090 gcc_unreachable ();
3091 }
3092 }
3093 return hi;
3094
3095 case PLUS_EXPR:
3096 case POINTER_PLUS_EXPR:
3097 case MINUS_EXPR:
3098 return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
3099 + const_hash_1 (TREE_OPERAND (exp, 1)));
3100
3101 CASE_CONVERT:
3102 return const_hash_1 (TREE_OPERAND (exp, 0)) * 7 + 2;
3103
3104 default:
3105 /* A language specific constant. Just hash the code. */
3106 return code;
3107 }
3108
3109 /* Compute hashing function. */
3110 hi = len;
3111 for (i = 0; i < len; i++)
3112 hi = ((hi * 613) + (unsigned) (p[i]));
3113
3114 return hi;
3115 }
3116
3117 /* Wrapper of compare_constant, for the htab interface. */
3118 bool
3119 tree_descriptor_hasher::equal (constant_descriptor_tree *c1,
3120 constant_descriptor_tree *c2)
3121 {
3122 if (c1->hash != c2->hash)
3123 return 0;
3124 return compare_constant (c1->value, c2->value);
3125 }
3126
3127 /* Compare t1 and t2, and return 1 only if they are known to result in
3128 the same bit pattern on output. */
3129
3130 static int
3131 compare_constant (const tree t1, const tree t2)
3132 {
3133 enum tree_code typecode;
3134
3135 if (t1 == NULL_TREE)
3136 return t2 == NULL_TREE;
3137 if (t2 == NULL_TREE)
3138 return 0;
3139
3140 if (TREE_CODE (t1) != TREE_CODE (t2))
3141 return 0;
3142
3143 switch (TREE_CODE (t1))
3144 {
3145 case INTEGER_CST:
3146 /* Integer constants are the same only if the same width of type. */
3147 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3148 return 0;
3149 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3150 return 0;
3151 return tree_int_cst_equal (t1, t2);
3152
3153 case REAL_CST:
3154 /* Real constants are the same only if the same width of type. In
3155 addition to the same width, we need to check whether the modes are the
3156 same. There might be two floating point modes that are the same size
3157 but have different representations, such as the PowerPC that has 2
3158 different 128-bit floating point types (IBM extended double and IEEE
3159 128-bit floating point). */
3160 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3161 return 0;
3162 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2)))
3163 return 0;
3164 return real_identical (&TREE_REAL_CST (t1), &TREE_REAL_CST (t2));
3165
3166 case FIXED_CST:
3167 /* Fixed constants are the same only if the same width of type. */
3168 if (TYPE_PRECISION (TREE_TYPE (t1)) != TYPE_PRECISION (TREE_TYPE (t2)))
3169 return 0;
3170
3171 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
3172
3173 case STRING_CST:
3174 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3175 || int_size_in_bytes (TREE_TYPE (t1))
3176 != int_size_in_bytes (TREE_TYPE (t2)))
3177 return 0;
3178
3179 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
3180 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
3181 TREE_STRING_LENGTH (t1)));
3182
3183 case COMPLEX_CST:
3184 return (compare_constant (TREE_REALPART (t1), TREE_REALPART (t2))
3185 && compare_constant (TREE_IMAGPART (t1), TREE_IMAGPART (t2)));
3186
3187 case VECTOR_CST:
3188 {
3189 if (VECTOR_CST_NPATTERNS (t1)
3190 != VECTOR_CST_NPATTERNS (t2))
3191 return 0;
3192
3193 if (VECTOR_CST_NELTS_PER_PATTERN (t1)
3194 != VECTOR_CST_NELTS_PER_PATTERN (t2))
3195 return 0;
3196
3197 unsigned int count = vector_cst_encoded_nelts (t1);
3198 for (unsigned int i = 0; i < count; ++i)
3199 if (!compare_constant (VECTOR_CST_ENCODED_ELT (t1, i),
3200 VECTOR_CST_ENCODED_ELT (t2, i)))
3201 return 0;
3202
3203 return 1;
3204 }
3205
3206 case CONSTRUCTOR:
3207 {
3208 vec<constructor_elt, va_gc> *v1, *v2;
3209 unsigned HOST_WIDE_INT idx;
3210
3211 typecode = TREE_CODE (TREE_TYPE (t1));
3212 if (typecode != TREE_CODE (TREE_TYPE (t2)))
3213 return 0;
3214
3215 if (typecode == ARRAY_TYPE)
3216 {
3217 HOST_WIDE_INT size_1 = int_size_in_bytes (TREE_TYPE (t1));
3218 /* For arrays, check that mode, size and storage order match. */
3219 if (TYPE_MODE (TREE_TYPE (t1)) != TYPE_MODE (TREE_TYPE (t2))
3220 || size_1 == -1
3221 || size_1 != int_size_in_bytes (TREE_TYPE (t2))
3222 || TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t1))
3223 != TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (t2)))
3224 return 0;
3225 }
3226 else
3227 {
3228 /* For record and union constructors, require exact type
3229 equality. */
3230 if (TREE_TYPE (t1) != TREE_TYPE (t2))
3231 return 0;
3232 }
3233
3234 v1 = CONSTRUCTOR_ELTS (t1);
3235 v2 = CONSTRUCTOR_ELTS (t2);
3236 if (vec_safe_length (v1) != vec_safe_length (v2))
3237 return 0;
3238
3239 for (idx = 0; idx < vec_safe_length (v1); ++idx)
3240 {
3241 constructor_elt *c1 = &(*v1)[idx];
3242 constructor_elt *c2 = &(*v2)[idx];
3243
3244 /* Check that each value is the same... */
3245 if (!compare_constant (c1->value, c2->value))
3246 return 0;
3247 /* ... and that they apply to the same fields! */
3248 if (typecode == ARRAY_TYPE)
3249 {
3250 if (!compare_constant (c1->index, c2->index))
3251 return 0;
3252 }
3253 else
3254 {
3255 if (c1->index != c2->index)
3256 return 0;
3257 }
3258 }
3259
3260 return 1;
3261 }
3262
3263 case ADDR_EXPR:
3264 case FDESC_EXPR:
3265 {
3266 class addr_const value1, value2;
3267 enum rtx_code code;
3268 int ret;
3269
3270 decode_addr_const (t1, &value1);
3271 decode_addr_const (t2, &value2);
3272
3273 if (maybe_ne (value1.offset, value2.offset))
3274 return 0;
3275
3276 code = GET_CODE (value1.base);
3277 if (code != GET_CODE (value2.base))
3278 return 0;
3279
3280 switch (code)
3281 {
3282 case SYMBOL_REF:
3283 ret = (strcmp (XSTR (value1.base, 0), XSTR (value2.base, 0)) == 0);
3284 break;
3285
3286 case LABEL_REF:
3287 ret = (CODE_LABEL_NUMBER (label_ref_label (value1.base))
3288 == CODE_LABEL_NUMBER (label_ref_label (value2.base)));
3289 break;
3290
3291 default:
3292 gcc_unreachable ();
3293 }
3294 return ret;
3295 }
3296
3297 case PLUS_EXPR:
3298 case POINTER_PLUS_EXPR:
3299 case MINUS_EXPR:
3300 case RANGE_EXPR:
3301 return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
3302 && compare_constant (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)));
3303
3304 CASE_CONVERT:
3305 case VIEW_CONVERT_EXPR:
3306 return compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
3307
3308 default:
3309 return 0;
3310 }
3311
3312 gcc_unreachable ();
3313 }
3314 \f
3315 /* Return the section into which constant EXP should be placed. */
3316
3317 static section *
3318 get_constant_section (tree exp, unsigned int align)
3319 {
3320 return targetm.asm_out.select_section (exp,
3321 compute_reloc_for_constant (exp),
3322 align);
3323 }
3324
3325 /* Return the size of constant EXP in bytes. */
3326
3327 static HOST_WIDE_INT
3328 get_constant_size (tree exp)
3329 {
3330 HOST_WIDE_INT size;
3331
3332 size = int_size_in_bytes (TREE_TYPE (exp));
3333 gcc_checking_assert (size >= 0);
3334 gcc_checking_assert (TREE_CODE (exp) != STRING_CST
3335 || size >= TREE_STRING_LENGTH (exp));
3336 return size;
3337 }
3338
3339 /* Subroutine of output_constant_def:
3340 No constant equal to EXP is known to have been output.
3341 Make a constant descriptor to enter EXP in the hash table.
3342 Assign the label number and construct RTL to refer to the
3343 constant's location in memory.
3344 Caller is responsible for updating the hash table. */
3345
3346 static struct constant_descriptor_tree *
3347 build_constant_desc (tree exp)
3348 {
3349 struct constant_descriptor_tree *desc;
3350 rtx symbol, rtl;
3351 char label[256];
3352 int labelno;
3353 tree decl;
3354
3355 desc = ggc_alloc<constant_descriptor_tree> ();
3356 desc->value = exp;
3357
3358 /* Create a string containing the label name, in LABEL. */
3359 labelno = const_labelno++;
3360 ASM_GENERATE_INTERNAL_LABEL (label, "LC", labelno);
3361
3362 /* Construct the VAR_DECL associated with the constant. */
3363 decl = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (label),
3364 TREE_TYPE (exp));
3365 DECL_ARTIFICIAL (decl) = 1;
3366 DECL_IGNORED_P (decl) = 1;
3367 TREE_READONLY (decl) = 1;
3368 TREE_STATIC (decl) = 1;
3369 TREE_ADDRESSABLE (decl) = 1;
3370 /* We don't set the RTL yet as this would cause varpool to assume that the
3371 variable is referenced. Moreover, it would just be dropped in LTO mode.
3372 Instead we set the flag that will be recognized in make_decl_rtl. */
3373 DECL_IN_CONSTANT_POOL (decl) = 1;
3374 DECL_INITIAL (decl) = desc->value;
3375 /* ??? targetm.constant_alignment hasn't been updated for vector types on
3376 most architectures so use DATA_ALIGNMENT as well, except for strings. */
3377 if (TREE_CODE (exp) == STRING_CST)
3378 SET_DECL_ALIGN (decl, targetm.constant_alignment (exp, DECL_ALIGN (decl)));
3379 else
3380 {
3381 align_variable (decl, 0);
3382 if (DECL_ALIGN (decl) < GET_MODE_ALIGNMENT (DECL_MODE (decl))
3383 && ((optab_handler (movmisalign_optab, DECL_MODE (decl))
3384 != CODE_FOR_nothing)
3385 || targetm.slow_unaligned_access (DECL_MODE (decl),
3386 DECL_ALIGN (decl))))
3387 SET_DECL_ALIGN (decl, GET_MODE_ALIGNMENT (DECL_MODE (decl)));
3388 }
3389
3390 /* Now construct the SYMBOL_REF and the MEM. */
3391 if (use_object_blocks_p ())
3392 {
3393 int align = (TREE_CODE (decl) == CONST_DECL
3394 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3395 ? DECL_ALIGN (decl)
3396 : symtab_node::get (decl)->definition_alignment ());
3397 section *sect = get_constant_section (exp, align);
3398 symbol = create_block_symbol (ggc_strdup (label),
3399 get_block_for_section (sect), -1);
3400 }
3401 else
3402 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3403 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3404 SET_SYMBOL_REF_DECL (symbol, decl);
3405 TREE_CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3406
3407 rtl = gen_const_mem (TYPE_MODE (TREE_TYPE (exp)), symbol);
3408 set_mem_alias_set (rtl, 0);
3409
3410 /* Putting EXP into the literal pool might have imposed a different
3411 alignment which should be visible in the RTX as well. */
3412 set_mem_align (rtl, DECL_ALIGN (decl));
3413
3414 /* We cannot share RTX'es in pool entries.
3415 Mark this piece of RTL as required for unsharing. */
3416 RTX_FLAG (rtl, used) = 1;
3417
3418 /* Set flags or add text to the name to record information, such as
3419 that it is a local symbol. If the name is changed, the macro
3420 ASM_OUTPUT_LABELREF will have to know how to strip this
3421 information. This call might invalidate our local variable
3422 SYMBOL; we can't use it afterward. */
3423 targetm.encode_section_info (exp, rtl, true);
3424
3425 desc->rtl = rtl;
3426
3427 return desc;
3428 }
3429
3430 /* Subroutine of output_constant_def and tree_output_constant_def:
3431 Add a constant to the hash table that tracks which constants
3432 already have labels. */
3433
3434 static constant_descriptor_tree *
3435 add_constant_to_table (tree exp, int defer)
3436 {
3437 /* The hash table methods may call output_constant_def for addressed
3438 constants, so handle them first. */
3439 output_addressed_constants (exp, defer);
3440
3441 /* Sanity check to catch recursive insertion. */
3442 static bool inserting;
3443 gcc_assert (!inserting);
3444 inserting = true;
3445
3446 /* Look up EXP in the table of constant descriptors. If we didn't
3447 find it, create a new one. */
3448 struct constant_descriptor_tree key;
3449 key.value = exp;
3450 key.hash = const_hash_1 (exp);
3451 constant_descriptor_tree **loc
3452 = const_desc_htab->find_slot_with_hash (&key, key.hash, INSERT);
3453
3454 inserting = false;
3455
3456 struct constant_descriptor_tree *desc = *loc;
3457 if (!desc)
3458 {
3459 desc = build_constant_desc (exp);
3460 desc->hash = key.hash;
3461 *loc = desc;
3462 }
3463
3464 return desc;
3465 }
3466
3467 /* Return an rtx representing a reference to constant data in memory
3468 for the constant expression EXP.
3469
3470 If assembler code for such a constant has already been output,
3471 return an rtx to refer to it.
3472 Otherwise, output such a constant in memory
3473 and generate an rtx for it.
3474
3475 If DEFER is nonzero, this constant can be deferred and output only
3476 if referenced in the function after all optimizations.
3477
3478 `const_desc_table' records which constants already have label strings. */
3479
3480 rtx
3481 output_constant_def (tree exp, int defer)
3482 {
3483 struct constant_descriptor_tree *desc = add_constant_to_table (exp, defer);
3484 maybe_output_constant_def_contents (desc, defer);
3485 return desc->rtl;
3486 }
3487
3488 /* Subroutine of output_constant_def: Decide whether or not we need to
3489 output the constant DESC now, and if so, do it. */
3490 static void
3491 maybe_output_constant_def_contents (struct constant_descriptor_tree *desc,
3492 int defer)
3493 {
3494 rtx symbol = XEXP (desc->rtl, 0);
3495 tree exp = desc->value;
3496
3497 if (flag_syntax_only)
3498 return;
3499
3500 if (TREE_ASM_WRITTEN (exp))
3501 /* Already output; don't do it again. */
3502 return;
3503
3504 /* We can always defer constants as long as the context allows
3505 doing so. */
3506 if (defer)
3507 {
3508 /* Increment n_deferred_constants if it exists. It needs to be at
3509 least as large as the number of constants actually referred to
3510 by the function. If it's too small we'll stop looking too early
3511 and fail to emit constants; if it's too large we'll only look
3512 through the entire function when we could have stopped earlier. */
3513 if (cfun)
3514 n_deferred_constants++;
3515 return;
3516 }
3517
3518 output_constant_def_contents (symbol);
3519 }
3520
3521 /* Subroutine of output_constant_def_contents. Output the definition
3522 of constant EXP, which is pointed to by label LABEL. ALIGN is the
3523 constant's alignment in bits. */
3524
3525 static void
3526 assemble_constant_contents (tree exp, const char *label, unsigned int align,
3527 bool merge_strings)
3528 {
3529 HOST_WIDE_INT size;
3530
3531 size = get_constant_size (exp);
3532
3533 /* Do any machine/system dependent processing of the constant. */
3534 targetm.asm_out.declare_constant_name (asm_out_file, label, exp, size);
3535
3536 /* Output the value of EXP. */
3537 output_constant (exp, size, align, false, merge_strings);
3538
3539 targetm.asm_out.decl_end ();
3540 }
3541
3542 /* We must output the constant data referred to by SYMBOL; do so. */
3543
3544 static void
3545 output_constant_def_contents (rtx symbol)
3546 {
3547 tree decl = SYMBOL_REF_DECL (symbol);
3548 tree exp = DECL_INITIAL (decl);
3549 bool asan_protected = false;
3550
3551 /* Make sure any other constants whose addresses appear in EXP
3552 are assigned label numbers. */
3553 output_addressed_constants (exp, 0);
3554
3555 /* We are no longer deferring this constant. */
3556 TREE_ASM_WRITTEN (decl) = TREE_ASM_WRITTEN (exp) = 1;
3557
3558 if ((flag_sanitize & SANITIZE_ADDRESS)
3559 && TREE_CODE (exp) == STRING_CST
3560 && asan_protect_global (exp))
3561 {
3562 asan_protected = true;
3563 SET_DECL_ALIGN (decl, MAX (DECL_ALIGN (decl),
3564 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT));
3565 }
3566
3567 /* If the constant is part of an object block, make sure that the
3568 decl has been positioned within its block, but do not write out
3569 its definition yet. output_object_blocks will do that later. */
3570 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol) && SYMBOL_REF_BLOCK (symbol))
3571 place_block_symbol (symbol);
3572 else
3573 {
3574 int align = (TREE_CODE (decl) == CONST_DECL
3575 || (VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl))
3576 ? DECL_ALIGN (decl)
3577 : symtab_node::get (decl)->definition_alignment ());
3578 section *sect = get_constant_section (exp, align);
3579 switch_to_section (sect);
3580 if (align > BITS_PER_UNIT)
3581 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
3582 assemble_constant_contents (exp, XSTR (symbol, 0), align,
3583 (sect->common.flags & SECTION_MERGE)
3584 && (sect->common.flags & SECTION_STRINGS));
3585 if (asan_protected)
3586 {
3587 HOST_WIDE_INT size = get_constant_size (exp);
3588 assemble_zeros (asan_red_zone_size (size));
3589 }
3590 }
3591 }
3592
3593 /* Look up EXP in the table of constant descriptors. Return the rtl
3594 if it has been emitted, else null. */
3595
3596 rtx
3597 lookup_constant_def (tree exp)
3598 {
3599 struct constant_descriptor_tree key;
3600
3601 key.value = exp;
3602 key.hash = const_hash_1 (exp);
3603 constant_descriptor_tree *desc
3604 = const_desc_htab->find_with_hash (&key, key.hash);
3605
3606 return (desc ? desc->rtl : NULL_RTX);
3607 }
3608
3609 /* Return a tree representing a reference to constant data in memory
3610 for the constant expression EXP.
3611
3612 This is the counterpart of output_constant_def at the Tree level. */
3613
3614 tree
3615 tree_output_constant_def (tree exp)
3616 {
3617 struct constant_descriptor_tree *desc = add_constant_to_table (exp, 1);
3618 tree decl = SYMBOL_REF_DECL (XEXP (desc->rtl, 0));
3619 varpool_node::finalize_decl (decl);
3620 return decl;
3621 }
3622 \f
3623 class GTY((chain_next ("%h.next"), for_user)) constant_descriptor_rtx {
3624 public:
3625 class constant_descriptor_rtx *next;
3626 rtx mem;
3627 rtx sym;
3628 rtx constant;
3629 HOST_WIDE_INT offset;
3630 hashval_t hash;
3631 fixed_size_mode mode;
3632 unsigned int align;
3633 int labelno;
3634 int mark;
3635 };
3636
3637 struct const_rtx_desc_hasher : ggc_ptr_hash<constant_descriptor_rtx>
3638 {
3639 static hashval_t hash (constant_descriptor_rtx *);
3640 static bool equal (constant_descriptor_rtx *, constant_descriptor_rtx *);
3641 };
3642
3643 /* Used in the hash tables to avoid outputting the same constant
3644 twice. Unlike 'struct constant_descriptor_tree', RTX constants
3645 are output once per function, not once per file. */
3646 /* ??? Only a few targets need per-function constant pools. Most
3647 can use one per-file pool. Should add a targetm bit to tell the
3648 difference. */
3649
3650 struct GTY(()) rtx_constant_pool {
3651 /* Pointers to first and last constant in pool, as ordered by offset. */
3652 class constant_descriptor_rtx *first;
3653 class constant_descriptor_rtx *last;
3654
3655 /* Hash facility for making memory-constants from constant rtl-expressions.
3656 It is used on RISC machines where immediate integer arguments and
3657 constant addresses are restricted so that such constants must be stored
3658 in memory. */
3659 hash_table<const_rtx_desc_hasher> *const_rtx_htab;
3660
3661 /* Current offset in constant pool (does not include any
3662 machine-specific header). */
3663 HOST_WIDE_INT offset;
3664 };
3665
3666 /* Hash and compare functions for const_rtx_htab. */
3667
3668 hashval_t
3669 const_rtx_desc_hasher::hash (constant_descriptor_rtx *desc)
3670 {
3671 return desc->hash;
3672 }
3673
3674 bool
3675 const_rtx_desc_hasher::equal (constant_descriptor_rtx *x,
3676 constant_descriptor_rtx *y)
3677 {
3678 if (x->mode != y->mode)
3679 return 0;
3680 return rtx_equal_p (x->constant, y->constant);
3681 }
3682
3683 /* Hash one component of a constant. */
3684
3685 static hashval_t
3686 const_rtx_hash_1 (const_rtx x)
3687 {
3688 unsigned HOST_WIDE_INT hwi;
3689 machine_mode mode;
3690 enum rtx_code code;
3691 hashval_t h;
3692 int i;
3693
3694 code = GET_CODE (x);
3695 mode = GET_MODE (x);
3696 h = (hashval_t) code * 1048573 + mode;
3697
3698 switch (code)
3699 {
3700 case CONST_INT:
3701 hwi = INTVAL (x);
3702
3703 fold_hwi:
3704 {
3705 int shift = sizeof (hashval_t) * CHAR_BIT;
3706 const int n = sizeof (HOST_WIDE_INT) / sizeof (hashval_t);
3707
3708 h ^= (hashval_t) hwi;
3709 for (i = 1; i < n; ++i)
3710 {
3711 hwi >>= shift;
3712 h ^= (hashval_t) hwi;
3713 }
3714 }
3715 break;
3716
3717 case CONST_WIDE_INT:
3718 hwi = 0;
3719 {
3720 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
3721 hwi ^= CONST_WIDE_INT_ELT (x, i);
3722 goto fold_hwi;
3723 }
3724
3725 case CONST_DOUBLE:
3726 if (TARGET_SUPPORTS_WIDE_INT == 0 && mode == VOIDmode)
3727 {
3728 hwi = CONST_DOUBLE_LOW (x) ^ CONST_DOUBLE_HIGH (x);
3729 goto fold_hwi;
3730 }
3731 else
3732 h ^= real_hash (CONST_DOUBLE_REAL_VALUE (x));
3733 break;
3734
3735 case CONST_FIXED:
3736 h ^= fixed_hash (CONST_FIXED_VALUE (x));
3737 break;
3738
3739 case SYMBOL_REF:
3740 h ^= htab_hash_string (XSTR (x, 0));
3741 break;
3742
3743 case LABEL_REF:
3744 h = h * 251 + CODE_LABEL_NUMBER (label_ref_label (x));
3745 break;
3746
3747 case UNSPEC:
3748 case UNSPEC_VOLATILE:
3749 h = h * 251 + XINT (x, 1);
3750 break;
3751
3752 default:
3753 break;
3754 }
3755
3756 return h;
3757 }
3758
3759 /* Compute a hash value for X, which should be a constant. */
3760
3761 static hashval_t
3762 const_rtx_hash (rtx x)
3763 {
3764 hashval_t h = 0;
3765 subrtx_iterator::array_type array;
3766 FOR_EACH_SUBRTX (iter, array, x, ALL)
3767 h = h * 509 + const_rtx_hash_1 (*iter);
3768 return h;
3769 }
3770
3771 \f
3772 /* Create and return a new rtx constant pool. */
3773
3774 static struct rtx_constant_pool *
3775 create_constant_pool (void)
3776 {
3777 struct rtx_constant_pool *pool;
3778
3779 pool = ggc_alloc<rtx_constant_pool> ();
3780 pool->const_rtx_htab = hash_table<const_rtx_desc_hasher>::create_ggc (31);
3781 pool->first = NULL;
3782 pool->last = NULL;
3783 pool->offset = 0;
3784 return pool;
3785 }
3786
3787 /* Initialize constant pool hashing for a new function. */
3788
3789 void
3790 init_varasm_status (void)
3791 {
3792 crtl->varasm.pool = create_constant_pool ();
3793 crtl->varasm.deferred_constants = 0;
3794 }
3795 \f
3796 /* Given a MINUS expression, simplify it if both sides
3797 include the same symbol. */
3798
3799 rtx
3800 simplify_subtraction (rtx x)
3801 {
3802 rtx r = simplify_rtx (x);
3803 return r ? r : x;
3804 }
3805 \f
3806 /* Given a constant rtx X, make (or find) a memory constant for its value
3807 and return a MEM rtx to refer to it in memory. IN_MODE is the mode
3808 of X. */
3809
3810 rtx
3811 force_const_mem (machine_mode in_mode, rtx x)
3812 {
3813 class constant_descriptor_rtx *desc, tmp;
3814 struct rtx_constant_pool *pool;
3815 char label[256];
3816 rtx def, symbol;
3817 hashval_t hash;
3818 unsigned int align;
3819 constant_descriptor_rtx **slot;
3820 fixed_size_mode mode;
3821
3822 /* We can't force variable-sized objects to memory. */
3823 if (!is_a <fixed_size_mode> (in_mode, &mode))
3824 return NULL_RTX;
3825
3826 /* If we're not allowed to drop X into the constant pool, don't. */
3827 if (targetm.cannot_force_const_mem (mode, x))
3828 return NULL_RTX;
3829
3830 /* Record that this function has used a constant pool entry. */
3831 crtl->uses_const_pool = 1;
3832
3833 /* Decide which pool to use. */
3834 pool = (targetm.use_blocks_for_constant_p (mode, x)
3835 ? shared_constant_pool
3836 : crtl->varasm.pool);
3837
3838 /* Lookup the value in the hashtable. */
3839 tmp.constant = x;
3840 tmp.mode = mode;
3841 hash = const_rtx_hash (x);
3842 slot = pool->const_rtx_htab->find_slot_with_hash (&tmp, hash, INSERT);
3843 desc = *slot;
3844
3845 /* If the constant was already present, return its memory. */
3846 if (desc)
3847 return copy_rtx (desc->mem);
3848
3849 /* Otherwise, create a new descriptor. */
3850 desc = ggc_alloc<constant_descriptor_rtx> ();
3851 *slot = desc;
3852
3853 /* Align the location counter as required by EXP's data type. */
3854 machine_mode align_mode = (mode == VOIDmode ? word_mode : mode);
3855 align = targetm.static_rtx_alignment (align_mode);
3856
3857 pool->offset += (align / BITS_PER_UNIT) - 1;
3858 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
3859
3860 desc->next = NULL;
3861 desc->constant = copy_rtx (tmp.constant);
3862 desc->offset = pool->offset;
3863 desc->hash = hash;
3864 desc->mode = mode;
3865 desc->align = align;
3866 desc->labelno = const_labelno;
3867 desc->mark = 0;
3868
3869 pool->offset += GET_MODE_SIZE (mode);
3870 if (pool->last)
3871 pool->last->next = desc;
3872 else
3873 pool->first = pool->last = desc;
3874 pool->last = desc;
3875
3876 /* Create a string containing the label name, in LABEL. */
3877 ASM_GENERATE_INTERNAL_LABEL (label, "LC", const_labelno);
3878 ++const_labelno;
3879
3880 /* Construct the SYMBOL_REF. Make sure to mark it as belonging to
3881 the constants pool. */
3882 if (use_object_blocks_p () && targetm.use_blocks_for_constant_p (mode, x))
3883 {
3884 section *sect = targetm.asm_out.select_rtx_section (mode, x, align);
3885 symbol = create_block_symbol (ggc_strdup (label),
3886 get_block_for_section (sect), -1);
3887 }
3888 else
3889 symbol = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (label));
3890 desc->sym = symbol;
3891 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_LOCAL;
3892 CONSTANT_POOL_ADDRESS_P (symbol) = 1;
3893 SET_SYMBOL_REF_CONSTANT (symbol, desc);
3894
3895 /* Construct the MEM. */
3896 desc->mem = def = gen_const_mem (mode, symbol);
3897 set_mem_align (def, align);
3898
3899 /* If we're dropping a label to the constant pool, make sure we
3900 don't delete it. */
3901 if (GET_CODE (x) == LABEL_REF)
3902 LABEL_PRESERVE_P (XEXP (x, 0)) = 1;
3903
3904 return copy_rtx (def);
3905 }
3906 \f
3907 /* Given a constant pool SYMBOL_REF, return the corresponding constant. */
3908
3909 rtx
3910 get_pool_constant (const_rtx addr)
3911 {
3912 return SYMBOL_REF_CONSTANT (addr)->constant;
3913 }
3914
3915 /* Given a constant pool SYMBOL_REF, return the corresponding constant
3916 and whether it has been output or not. */
3917
3918 rtx
3919 get_pool_constant_mark (rtx addr, bool *pmarked)
3920 {
3921 class constant_descriptor_rtx *desc;
3922
3923 desc = SYMBOL_REF_CONSTANT (addr);
3924 *pmarked = (desc->mark != 0);
3925 return desc->constant;
3926 }
3927
3928 /* Similar, return the mode. */
3929
3930 fixed_size_mode
3931 get_pool_mode (const_rtx addr)
3932 {
3933 return SYMBOL_REF_CONSTANT (addr)->mode;
3934 }
3935
3936 /* Return TRUE if and only if the constant pool has no entries. Note
3937 that even entries we might end up choosing not to emit are counted
3938 here, so there is the potential for missed optimizations. */
3939
3940 bool
3941 constant_pool_empty_p (void)
3942 {
3943 return crtl->varasm.pool->first == NULL;
3944 }
3945 \f
3946 /* Worker function for output_constant_pool_1. Emit assembly for X
3947 in MODE with known alignment ALIGN. */
3948
3949 static void
3950 output_constant_pool_2 (fixed_size_mode mode, rtx x, unsigned int align)
3951 {
3952 switch (GET_MODE_CLASS (mode))
3953 {
3954 case MODE_FLOAT:
3955 case MODE_DECIMAL_FLOAT:
3956 {
3957 gcc_assert (CONST_DOUBLE_AS_FLOAT_P (x));
3958 assemble_real (*CONST_DOUBLE_REAL_VALUE (x),
3959 as_a <scalar_float_mode> (mode), align, false);
3960 break;
3961 }
3962
3963 case MODE_INT:
3964 case MODE_PARTIAL_INT:
3965 case MODE_FRACT:
3966 case MODE_UFRACT:
3967 case MODE_ACCUM:
3968 case MODE_UACCUM:
3969 assemble_integer (x, GET_MODE_SIZE (mode), align, 1);
3970 break;
3971
3972 case MODE_VECTOR_BOOL:
3973 {
3974 gcc_assert (GET_CODE (x) == CONST_VECTOR);
3975
3976 /* Pick the smallest integer mode that contains at least one
3977 whole element. Often this is byte_mode and contains more
3978 than one element. */
3979 unsigned int nelts = GET_MODE_NUNITS (mode);
3980 unsigned int elt_bits = GET_MODE_BITSIZE (mode) / nelts;
3981 unsigned int int_bits = MAX (elt_bits, BITS_PER_UNIT);
3982 scalar_int_mode int_mode = int_mode_for_size (int_bits, 0).require ();
3983
3984 /* Build the constant up one integer at a time. */
3985 unsigned int elts_per_int = int_bits / elt_bits;
3986 for (unsigned int i = 0; i < nelts; i += elts_per_int)
3987 {
3988 unsigned HOST_WIDE_INT value = 0;
3989 unsigned int limit = MIN (nelts - i, elts_per_int);
3990 for (unsigned int j = 0; j < limit; ++j)
3991 if (INTVAL (CONST_VECTOR_ELT (x, i + j)) != 0)
3992 value |= 1 << (j * elt_bits);
3993 output_constant_pool_2 (int_mode, gen_int_mode (value, int_mode),
3994 i != 0 ? MIN (align, int_bits) : align);
3995 }
3996 break;
3997 }
3998 case MODE_VECTOR_FLOAT:
3999 case MODE_VECTOR_INT:
4000 case MODE_VECTOR_FRACT:
4001 case MODE_VECTOR_UFRACT:
4002 case MODE_VECTOR_ACCUM:
4003 case MODE_VECTOR_UACCUM:
4004 {
4005 int i, units;
4006 scalar_mode submode = GET_MODE_INNER (mode);
4007 unsigned int subalign = MIN (align, GET_MODE_BITSIZE (submode));
4008
4009 gcc_assert (GET_CODE (x) == CONST_VECTOR);
4010 units = GET_MODE_NUNITS (mode);
4011
4012 for (i = 0; i < units; i++)
4013 {
4014 rtx elt = CONST_VECTOR_ELT (x, i);
4015 output_constant_pool_2 (submode, elt, i ? subalign : align);
4016 }
4017 }
4018 break;
4019
4020 default:
4021 gcc_unreachable ();
4022 }
4023 }
4024
4025 /* Worker function for output_constant_pool. Emit constant DESC,
4026 giving it ALIGN bits of alignment. */
4027
4028 static void
4029 output_constant_pool_1 (class constant_descriptor_rtx *desc,
4030 unsigned int align)
4031 {
4032 rtx x, tmp;
4033
4034 x = desc->constant;
4035
4036 /* See if X is a LABEL_REF (or a CONST referring to a LABEL_REF)
4037 whose CODE_LABEL has been deleted. This can occur if a jump table
4038 is eliminated by optimization. If so, write a constant of zero
4039 instead. Note that this can also happen by turning the
4040 CODE_LABEL into a NOTE. */
4041 /* ??? This seems completely and utterly wrong. Certainly it's
4042 not true for NOTE_INSN_DELETED_LABEL, but I disbelieve proper
4043 functioning even with rtx_insn::deleted and friends. */
4044
4045 tmp = x;
4046 switch (GET_CODE (tmp))
4047 {
4048 case CONST:
4049 if (GET_CODE (XEXP (tmp, 0)) != PLUS
4050 || GET_CODE (XEXP (XEXP (tmp, 0), 0)) != LABEL_REF)
4051 break;
4052 tmp = XEXP (XEXP (tmp, 0), 0);
4053 /* FALLTHRU */
4054
4055 case LABEL_REF:
4056 {
4057 rtx_insn *insn = label_ref_label (tmp);
4058 gcc_assert (!insn->deleted ());
4059 gcc_assert (!NOTE_P (insn)
4060 || NOTE_KIND (insn) != NOTE_INSN_DELETED);
4061 break;
4062 }
4063
4064 default:
4065 break;
4066 }
4067
4068 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4069 ASM_OUTPUT_SPECIAL_POOL_ENTRY (asm_out_file, x, desc->mode,
4070 align, desc->labelno, done);
4071 #endif
4072
4073 assemble_align (align);
4074
4075 /* Output the label. */
4076 targetm.asm_out.internal_label (asm_out_file, "LC", desc->labelno);
4077
4078 /* Output the data.
4079 Pass actual alignment value while emitting string constant to asm code
4080 as function 'output_constant_pool_1' explicitly passes the alignment as 1
4081 assuming that the data is already aligned which prevents the generation
4082 of fix-up table entries. */
4083 output_constant_pool_2 (desc->mode, x, desc->align);
4084
4085 /* Make sure all constants in SECTION_MERGE and not SECTION_STRINGS
4086 sections have proper size. */
4087 if (align > GET_MODE_BITSIZE (desc->mode)
4088 && in_section
4089 && (in_section->common.flags & SECTION_MERGE))
4090 assemble_align (align);
4091
4092 #ifdef ASM_OUTPUT_SPECIAL_POOL_ENTRY
4093 done:
4094 #endif
4095 return;
4096 }
4097
4098 /* Recompute the offsets of entries in POOL, and the overall size of
4099 POOL. Do this after calling mark_constant_pool to ensure that we
4100 are computing the offset values for the pool which we will actually
4101 emit. */
4102
4103 static void
4104 recompute_pool_offsets (struct rtx_constant_pool *pool)
4105 {
4106 class constant_descriptor_rtx *desc;
4107 pool->offset = 0;
4108
4109 for (desc = pool->first; desc ; desc = desc->next)
4110 if (desc->mark)
4111 {
4112 /* Recalculate offset. */
4113 unsigned int align = desc->align;
4114 pool->offset += (align / BITS_PER_UNIT) - 1;
4115 pool->offset &= ~ ((align / BITS_PER_UNIT) - 1);
4116 desc->offset = pool->offset;
4117 pool->offset += GET_MODE_SIZE (desc->mode);
4118 }
4119 }
4120
4121 /* Mark all constants that are referenced by SYMBOL_REFs in X.
4122 Emit referenced deferred strings. */
4123
4124 static void
4125 mark_constants_in_pattern (rtx insn)
4126 {
4127 subrtx_iterator::array_type array;
4128 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), ALL)
4129 {
4130 const_rtx x = *iter;
4131 if (GET_CODE (x) == SYMBOL_REF)
4132 {
4133 if (CONSTANT_POOL_ADDRESS_P (x))
4134 {
4135 class constant_descriptor_rtx *desc = SYMBOL_REF_CONSTANT (x);
4136 if (desc->mark == 0)
4137 {
4138 desc->mark = 1;
4139 iter.substitute (desc->constant);
4140 }
4141 }
4142 else if (TREE_CONSTANT_POOL_ADDRESS_P (x))
4143 {
4144 tree decl = SYMBOL_REF_DECL (x);
4145 if (!TREE_ASM_WRITTEN (DECL_INITIAL (decl)))
4146 {
4147 n_deferred_constants--;
4148 output_constant_def_contents (CONST_CAST_RTX (x));
4149 }
4150 }
4151 }
4152 }
4153 }
4154
4155 /* Look through appropriate parts of INSN, marking all entries in the
4156 constant pool which are actually being used. Entries that are only
4157 referenced by other constants are also marked as used. Emit
4158 deferred strings that are used. */
4159
4160 static void
4161 mark_constants (rtx_insn *insn)
4162 {
4163 if (!INSN_P (insn))
4164 return;
4165
4166 /* Insns may appear inside a SEQUENCE. Only check the patterns of
4167 insns, not any notes that may be attached. We don't want to mark
4168 a constant just because it happens to appear in a REG_EQUIV note. */
4169 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
4170 {
4171 int i, n = seq->len ();
4172 for (i = 0; i < n; ++i)
4173 {
4174 rtx subinsn = seq->element (i);
4175 if (INSN_P (subinsn))
4176 mark_constants_in_pattern (subinsn);
4177 }
4178 }
4179 else
4180 mark_constants_in_pattern (insn);
4181 }
4182
4183 /* Look through the instructions for this function, and mark all the
4184 entries in POOL which are actually being used. Emit deferred constants
4185 which have indeed been used. */
4186
4187 static void
4188 mark_constant_pool (void)
4189 {
4190 rtx_insn *insn;
4191
4192 if (!crtl->uses_const_pool && n_deferred_constants == 0)
4193 return;
4194
4195 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4196 mark_constants (insn);
4197 }
4198
4199 /* Write all the constants in POOL. */
4200
4201 static void
4202 output_constant_pool_contents (struct rtx_constant_pool *pool)
4203 {
4204 class constant_descriptor_rtx *desc;
4205
4206 for (desc = pool->first; desc ; desc = desc->next)
4207 if (desc->mark < 0)
4208 {
4209 #ifdef ASM_OUTPUT_DEF
4210 const char *name = targetm.strip_name_encoding (XSTR (desc->sym, 0));
4211 char label[256];
4212 char buffer[256 + 32];
4213 const char *p;
4214
4215 ASM_GENERATE_INTERNAL_LABEL (label, "LC", ~desc->mark);
4216 p = targetm.strip_name_encoding (label);
4217 if (desc->offset)
4218 {
4219 sprintf (buffer, "%s+%ld", p, (long) (desc->offset));
4220 p = buffer;
4221 }
4222 ASM_OUTPUT_DEF (asm_out_file, name, p);
4223 #else
4224 gcc_unreachable ();
4225 #endif
4226 }
4227 else if (desc->mark)
4228 {
4229 /* If the constant is part of an object_block, make sure that
4230 the constant has been positioned within its block, but do not
4231 write out its definition yet. output_object_blocks will do
4232 that later. */
4233 if (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4234 && SYMBOL_REF_BLOCK (desc->sym))
4235 place_block_symbol (desc->sym);
4236 else
4237 {
4238 switch_to_section (targetm.asm_out.select_rtx_section
4239 (desc->mode, desc->constant, desc->align));
4240 output_constant_pool_1 (desc, desc->align);
4241 }
4242 }
4243 }
4244
4245 struct constant_descriptor_rtx_data {
4246 constant_descriptor_rtx *desc;
4247 target_unit *bytes;
4248 unsigned short size;
4249 unsigned short offset;
4250 unsigned int hash;
4251 };
4252
4253 /* qsort callback to sort constant_descriptor_rtx_data * vector by
4254 decreasing size. */
4255
4256 static int
4257 constant_descriptor_rtx_data_cmp (const void *p1, const void *p2)
4258 {
4259 constant_descriptor_rtx_data *const data1
4260 = *(constant_descriptor_rtx_data * const *) p1;
4261 constant_descriptor_rtx_data *const data2
4262 = *(constant_descriptor_rtx_data * const *) p2;
4263 if (data1->size > data2->size)
4264 return -1;
4265 if (data1->size < data2->size)
4266 return 1;
4267 if (data1->hash < data2->hash)
4268 return -1;
4269 gcc_assert (data1->hash > data2->hash);
4270 return 1;
4271 }
4272
4273 struct const_rtx_data_hasher : nofree_ptr_hash<constant_descriptor_rtx_data>
4274 {
4275 static hashval_t hash (constant_descriptor_rtx_data *);
4276 static bool equal (constant_descriptor_rtx_data *,
4277 constant_descriptor_rtx_data *);
4278 };
4279
4280 /* Hash and compare functions for const_rtx_data_htab. */
4281
4282 hashval_t
4283 const_rtx_data_hasher::hash (constant_descriptor_rtx_data *data)
4284 {
4285 return data->hash;
4286 }
4287
4288 bool
4289 const_rtx_data_hasher::equal (constant_descriptor_rtx_data *x,
4290 constant_descriptor_rtx_data *y)
4291 {
4292 if (x->hash != y->hash || x->size != y->size)
4293 return 0;
4294 unsigned int align1 = x->desc->align;
4295 unsigned int align2 = y->desc->align;
4296 unsigned int offset1 = (x->offset * BITS_PER_UNIT) & (align1 - 1);
4297 unsigned int offset2 = (y->offset * BITS_PER_UNIT) & (align2 - 1);
4298 if (offset1)
4299 align1 = least_bit_hwi (offset1);
4300 if (offset2)
4301 align2 = least_bit_hwi (offset2);
4302 if (align2 > align1)
4303 return 0;
4304 if (memcmp (x->bytes, y->bytes, x->size * sizeof (target_unit)) != 0)
4305 return 0;
4306 return 1;
4307 }
4308
4309 /* Attempt to optimize constant pool POOL. If it contains both CONST_VECTOR
4310 constants and scalar constants with the values of CONST_VECTOR elements,
4311 try to alias the scalar constants with the CONST_VECTOR elements. */
4312
4313 static void
4314 optimize_constant_pool (struct rtx_constant_pool *pool)
4315 {
4316 auto_vec<target_unit, 128> buffer;
4317 auto_vec<constant_descriptor_rtx_data *, 128> vec;
4318 object_allocator<constant_descriptor_rtx_data>
4319 data_pool ("constant_descriptor_rtx_data_pool");
4320 int idx = 0;
4321 size_t size = 0;
4322 for (constant_descriptor_rtx *desc = pool->first; desc; desc = desc->next)
4323 if (desc->mark > 0
4324 && ! (SYMBOL_REF_HAS_BLOCK_INFO_P (desc->sym)
4325 && SYMBOL_REF_BLOCK (desc->sym)))
4326 {
4327 buffer.truncate (0);
4328 buffer.reserve (GET_MODE_SIZE (desc->mode));
4329 if (native_encode_rtx (desc->mode, desc->constant, buffer, 0,
4330 GET_MODE_SIZE (desc->mode)))
4331 {
4332 constant_descriptor_rtx_data *data = data_pool.allocate ();
4333 data->desc = desc;
4334 data->bytes = NULL;
4335 data->size = GET_MODE_SIZE (desc->mode);
4336 data->offset = 0;
4337 data->hash = idx++;
4338 size += data->size;
4339 vec.safe_push (data);
4340 }
4341 }
4342 if (idx)
4343 {
4344 vec.qsort (constant_descriptor_rtx_data_cmp);
4345 unsigned min_size = vec.last ()->size;
4346 target_unit *bytes = XNEWVEC (target_unit, size);
4347 unsigned int i;
4348 constant_descriptor_rtx_data *data;
4349 hash_table<const_rtx_data_hasher> * htab
4350 = new hash_table<const_rtx_data_hasher> (31);
4351 size = 0;
4352 FOR_EACH_VEC_ELT (vec, i, data)
4353 {
4354 buffer.truncate (0);
4355 native_encode_rtx (data->desc->mode, data->desc->constant,
4356 buffer, 0, data->size);
4357 memcpy (bytes + size, buffer.address (), data->size);
4358 data->bytes = bytes + size;
4359 data->hash = iterative_hash (data->bytes,
4360 data->size * sizeof (target_unit), 0);
4361 size += data->size;
4362 constant_descriptor_rtx_data **slot
4363 = htab->find_slot_with_hash (data, data->hash, INSERT);
4364 if (*slot)
4365 {
4366 data->desc->mark = ~(*slot)->desc->labelno;
4367 data->desc->offset = (*slot)->offset;
4368 }
4369 else
4370 {
4371 unsigned int sz = 1 << floor_log2 (data->size);
4372
4373 *slot = data;
4374 for (sz >>= 1; sz >= min_size; sz >>= 1)
4375 for (unsigned off = 0; off + sz <= data->size; off += sz)
4376 {
4377 constant_descriptor_rtx_data tmp;
4378 tmp.desc = data->desc;
4379 tmp.bytes = data->bytes + off;
4380 tmp.size = sz;
4381 tmp.offset = off;
4382 tmp.hash = iterative_hash (tmp.bytes,
4383 sz * sizeof (target_unit), 0);
4384 slot = htab->find_slot_with_hash (&tmp, tmp.hash, INSERT);
4385 if (*slot == NULL)
4386 {
4387 *slot = data_pool.allocate ();
4388 **slot = tmp;
4389 }
4390 }
4391 }
4392 }
4393 delete htab;
4394 XDELETE (bytes);
4395 }
4396 data_pool.release ();
4397 }
4398
4399 /* Mark all constants that are used in the current function, then write
4400 out the function's private constant pool. */
4401
4402 static void
4403 output_constant_pool (const char *fnname ATTRIBUTE_UNUSED,
4404 tree fndecl ATTRIBUTE_UNUSED)
4405 {
4406 struct rtx_constant_pool *pool = crtl->varasm.pool;
4407
4408 /* It is possible for gcc to call force_const_mem and then to later
4409 discard the instructions which refer to the constant. In such a
4410 case we do not need to output the constant. */
4411 mark_constant_pool ();
4412
4413 /* Having marked the constant pool entries we'll actually emit, we
4414 now need to rebuild the offset information, which may have become
4415 stale. */
4416 recompute_pool_offsets (pool);
4417
4418 #ifdef ASM_OUTPUT_POOL_PROLOGUE
4419 ASM_OUTPUT_POOL_PROLOGUE (asm_out_file, fnname, fndecl, pool->offset);
4420 #endif
4421
4422 output_constant_pool_contents (pool);
4423
4424 #ifdef ASM_OUTPUT_POOL_EPILOGUE
4425 ASM_OUTPUT_POOL_EPILOGUE (asm_out_file, fnname, fndecl, pool->offset);
4426 #endif
4427 }
4428 \f
4429 /* Write the contents of the shared constant pool. */
4430
4431 void
4432 output_shared_constant_pool (void)
4433 {
4434 if (optimize
4435 && TARGET_SUPPORTS_ALIASES)
4436 optimize_constant_pool (shared_constant_pool);
4437
4438 output_constant_pool_contents (shared_constant_pool);
4439 }
4440 \f
4441 /* Determine what kind of relocations EXP may need. */
4442
4443 int
4444 compute_reloc_for_constant (tree exp)
4445 {
4446 int reloc = 0, reloc2;
4447 tree tem;
4448
4449 switch (TREE_CODE (exp))
4450 {
4451 case ADDR_EXPR:
4452 case FDESC_EXPR:
4453 /* Go inside any operations that get_inner_reference can handle and see
4454 if what's inside is a constant: no need to do anything here for
4455 addresses of variables or functions. */
4456 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4457 tem = TREE_OPERAND (tem, 0))
4458 ;
4459
4460 if (TREE_CODE (tem) == MEM_REF
4461 && TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR)
4462 {
4463 reloc = compute_reloc_for_constant (TREE_OPERAND (tem, 0));
4464 break;
4465 }
4466
4467 if (!targetm.binds_local_p (tem))
4468 reloc |= 2;
4469 else
4470 reloc |= 1;
4471 break;
4472
4473 case PLUS_EXPR:
4474 case POINTER_PLUS_EXPR:
4475 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4476 reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4477 break;
4478
4479 case MINUS_EXPR:
4480 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4481 reloc2 = compute_reloc_for_constant (TREE_OPERAND (exp, 1));
4482 /* The difference of two local labels is computable at link time. */
4483 if (reloc == 1 && reloc2 == 1)
4484 reloc = 0;
4485 else
4486 reloc |= reloc2;
4487 break;
4488
4489 CASE_CONVERT:
4490 case VIEW_CONVERT_EXPR:
4491 reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
4492 break;
4493
4494 case CONSTRUCTOR:
4495 {
4496 unsigned HOST_WIDE_INT idx;
4497 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4498 if (tem != 0)
4499 reloc |= compute_reloc_for_constant (tem);
4500 }
4501 break;
4502
4503 default:
4504 break;
4505 }
4506 return reloc;
4507 }
4508
4509 /* Find all the constants whose addresses are referenced inside of EXP,
4510 and make sure assembler code with a label has been output for each one.
4511 Indicate whether an ADDR_EXPR has been encountered. */
4512
4513 static void
4514 output_addressed_constants (tree exp, int defer)
4515 {
4516 tree tem;
4517
4518 switch (TREE_CODE (exp))
4519 {
4520 case ADDR_EXPR:
4521 case FDESC_EXPR:
4522 /* Go inside any operations that get_inner_reference can handle and see
4523 if what's inside is a constant: no need to do anything here for
4524 addresses of variables or functions. */
4525 for (tem = TREE_OPERAND (exp, 0); handled_component_p (tem);
4526 tem = TREE_OPERAND (tem, 0))
4527 ;
4528
4529 /* If we have an initialized CONST_DECL, retrieve the initializer. */
4530 if (TREE_CODE (tem) == CONST_DECL && DECL_INITIAL (tem))
4531 tem = DECL_INITIAL (tem);
4532
4533 if (CONSTANT_CLASS_P (tem) || TREE_CODE (tem) == CONSTRUCTOR)
4534 output_constant_def (tem, defer);
4535
4536 if (TREE_CODE (tem) == MEM_REF)
4537 output_addressed_constants (TREE_OPERAND (tem, 0), defer);
4538 break;
4539
4540 case PLUS_EXPR:
4541 case POINTER_PLUS_EXPR:
4542 case MINUS_EXPR:
4543 output_addressed_constants (TREE_OPERAND (exp, 1), defer);
4544 gcc_fallthrough ();
4545
4546 CASE_CONVERT:
4547 case VIEW_CONVERT_EXPR:
4548 output_addressed_constants (TREE_OPERAND (exp, 0), defer);
4549 break;
4550
4551 case CONSTRUCTOR:
4552 {
4553 unsigned HOST_WIDE_INT idx;
4554 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, tem)
4555 if (tem != 0)
4556 output_addressed_constants (tem, defer);
4557 }
4558 break;
4559
4560 default:
4561 break;
4562 }
4563 }
4564 \f
4565 /* Whether a constructor CTOR is a valid static constant initializer if all
4566 its elements are. This used to be internal to initializer_constant_valid_p
4567 and has been exposed to let other functions like categorize_ctor_elements
4568 evaluate the property while walking a constructor for other purposes. */
4569
4570 bool
4571 constructor_static_from_elts_p (const_tree ctor)
4572 {
4573 return (TREE_CONSTANT (ctor)
4574 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4575 || TREE_CODE (TREE_TYPE (ctor)) == RECORD_TYPE
4576 || TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE));
4577 }
4578
4579 static tree initializer_constant_valid_p_1 (tree value, tree endtype,
4580 tree *cache);
4581
4582 /* A subroutine of initializer_constant_valid_p. VALUE is a MINUS_EXPR,
4583 PLUS_EXPR or POINTER_PLUS_EXPR. This looks for cases of VALUE
4584 which are valid when ENDTYPE is an integer of any size; in
4585 particular, this does not accept a pointer minus a constant. This
4586 returns null_pointer_node if the VALUE is an absolute constant
4587 which can be used to initialize a static variable. Otherwise it
4588 returns NULL. */
4589
4590 static tree
4591 narrowing_initializer_constant_valid_p (tree value, tree endtype, tree *cache)
4592 {
4593 tree op0, op1;
4594
4595 if (!INTEGRAL_TYPE_P (endtype))
4596 return NULL_TREE;
4597
4598 op0 = TREE_OPERAND (value, 0);
4599 op1 = TREE_OPERAND (value, 1);
4600
4601 /* Like STRIP_NOPS except allow the operand mode to widen. This
4602 works around a feature of fold that simplifies (int)(p1 - p2) to
4603 ((int)p1 - (int)p2) under the theory that the narrower operation
4604 is cheaper. */
4605
4606 while (CONVERT_EXPR_P (op0)
4607 || TREE_CODE (op0) == NON_LVALUE_EXPR)
4608 {
4609 tree inner = TREE_OPERAND (op0, 0);
4610 if (inner == error_mark_node
4611 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4612 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op0)))
4613 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4614 break;
4615 op0 = inner;
4616 }
4617
4618 while (CONVERT_EXPR_P (op1)
4619 || TREE_CODE (op1) == NON_LVALUE_EXPR)
4620 {
4621 tree inner = TREE_OPERAND (op1, 0);
4622 if (inner == error_mark_node
4623 || ! INTEGRAL_MODE_P (TYPE_MODE (TREE_TYPE (inner)))
4624 || (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (op1)))
4625 > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (inner)))))
4626 break;
4627 op1 = inner;
4628 }
4629
4630 op0 = initializer_constant_valid_p_1 (op0, endtype, cache);
4631 if (!op0)
4632 return NULL_TREE;
4633
4634 op1 = initializer_constant_valid_p_1 (op1, endtype,
4635 cache ? cache + 2 : NULL);
4636 /* Both initializers must be known. */
4637 if (op1)
4638 {
4639 if (op0 == op1
4640 && (op0 == null_pointer_node
4641 || TREE_CODE (value) == MINUS_EXPR))
4642 return null_pointer_node;
4643
4644 /* Support differences between labels. */
4645 if (TREE_CODE (op0) == LABEL_DECL
4646 && TREE_CODE (op1) == LABEL_DECL)
4647 return null_pointer_node;
4648
4649 if (TREE_CODE (op0) == STRING_CST
4650 && TREE_CODE (op1) == STRING_CST
4651 && operand_equal_p (op0, op1, 1))
4652 return null_pointer_node;
4653 }
4654
4655 return NULL_TREE;
4656 }
4657
4658 /* Helper function of initializer_constant_valid_p.
4659 Return nonzero if VALUE is a valid constant-valued expression
4660 for use in initializing a static variable; one that can be an
4661 element of a "constant" initializer.
4662
4663 Return null_pointer_node if the value is absolute;
4664 if it is relocatable, return the variable that determines the relocation.
4665 We assume that VALUE has been folded as much as possible;
4666 therefore, we do not need to check for such things as
4667 arithmetic-combinations of integers.
4668
4669 Use CACHE (pointer to 2 tree values) for caching if non-NULL. */
4670
4671 static tree
4672 initializer_constant_valid_p_1 (tree value, tree endtype, tree *cache)
4673 {
4674 tree ret;
4675
4676 switch (TREE_CODE (value))
4677 {
4678 case CONSTRUCTOR:
4679 if (constructor_static_from_elts_p (value))
4680 {
4681 unsigned HOST_WIDE_INT idx;
4682 tree elt;
4683 bool absolute = true;
4684
4685 if (cache && cache[0] == value)
4686 return cache[1];
4687 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4688 {
4689 tree reloc;
4690 reloc = initializer_constant_valid_p_1 (elt, TREE_TYPE (elt),
4691 NULL);
4692 if (!reloc
4693 /* An absolute value is required with reverse SSO. */
4694 || (reloc != null_pointer_node
4695 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (value))
4696 && !AGGREGATE_TYPE_P (TREE_TYPE (elt))))
4697 {
4698 if (cache)
4699 {
4700 cache[0] = value;
4701 cache[1] = NULL_TREE;
4702 }
4703 return NULL_TREE;
4704 }
4705 if (reloc != null_pointer_node)
4706 absolute = false;
4707 }
4708 /* For a non-absolute relocation, there is no single
4709 variable that can be "the variable that determines the
4710 relocation." */
4711 if (cache)
4712 {
4713 cache[0] = value;
4714 cache[1] = absolute ? null_pointer_node : error_mark_node;
4715 }
4716 return absolute ? null_pointer_node : error_mark_node;
4717 }
4718
4719 return TREE_STATIC (value) ? null_pointer_node : NULL_TREE;
4720
4721 case INTEGER_CST:
4722 case VECTOR_CST:
4723 case REAL_CST:
4724 case FIXED_CST:
4725 case STRING_CST:
4726 case COMPLEX_CST:
4727 return null_pointer_node;
4728
4729 case ADDR_EXPR:
4730 case FDESC_EXPR:
4731 {
4732 tree op0 = staticp (TREE_OPERAND (value, 0));
4733 if (op0)
4734 {
4735 /* "&(*a).f" is like unto pointer arithmetic. If "a" turns out
4736 to be a constant, this is old-skool offsetof-like nonsense. */
4737 if (TREE_CODE (op0) == INDIRECT_REF
4738 && TREE_CONSTANT (TREE_OPERAND (op0, 0)))
4739 return null_pointer_node;
4740 /* Taking the address of a nested function involves a trampoline,
4741 unless we don't need or want one. */
4742 if (TREE_CODE (op0) == FUNCTION_DECL
4743 && DECL_STATIC_CHAIN (op0)
4744 && !TREE_NO_TRAMPOLINE (value))
4745 return NULL_TREE;
4746 /* "&{...}" requires a temporary to hold the constructed
4747 object. */
4748 if (TREE_CODE (op0) == CONSTRUCTOR)
4749 return NULL_TREE;
4750 }
4751 return op0;
4752 }
4753
4754 case NON_LVALUE_EXPR:
4755 return initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4756 endtype, cache);
4757
4758 case VIEW_CONVERT_EXPR:
4759 {
4760 tree src = TREE_OPERAND (value, 0);
4761 tree src_type = TREE_TYPE (src);
4762 tree dest_type = TREE_TYPE (value);
4763
4764 /* Allow view-conversions from aggregate to non-aggregate type only
4765 if the bit pattern is fully preserved afterwards; otherwise, the
4766 RTL expander won't be able to apply a subsequent transformation
4767 to the underlying constructor. */
4768 if (AGGREGATE_TYPE_P (src_type) && !AGGREGATE_TYPE_P (dest_type))
4769 {
4770 if (TYPE_MODE (endtype) == TYPE_MODE (dest_type))
4771 return initializer_constant_valid_p_1 (src, endtype, cache);
4772 else
4773 return NULL_TREE;
4774 }
4775
4776 /* Allow all other kinds of view-conversion. */
4777 return initializer_constant_valid_p_1 (src, endtype, cache);
4778 }
4779
4780 CASE_CONVERT:
4781 {
4782 tree src = TREE_OPERAND (value, 0);
4783 tree src_type = TREE_TYPE (src);
4784 tree dest_type = TREE_TYPE (value);
4785
4786 /* Allow conversions between pointer types, floating-point
4787 types, and offset types. */
4788 if ((POINTER_TYPE_P (dest_type) && POINTER_TYPE_P (src_type))
4789 || (FLOAT_TYPE_P (dest_type) && FLOAT_TYPE_P (src_type))
4790 || (TREE_CODE (dest_type) == OFFSET_TYPE
4791 && TREE_CODE (src_type) == OFFSET_TYPE))
4792 return initializer_constant_valid_p_1 (src, endtype, cache);
4793
4794 /* Allow length-preserving conversions between integer types. */
4795 if (INTEGRAL_TYPE_P (dest_type) && INTEGRAL_TYPE_P (src_type)
4796 && (TYPE_PRECISION (dest_type) == TYPE_PRECISION (src_type)))
4797 return initializer_constant_valid_p_1 (src, endtype, cache);
4798
4799 /* Allow conversions between other integer types only if
4800 explicit value. Don't allow sign-extension to a type larger
4801 than word and pointer, there aren't relocations that would
4802 allow to sign extend it to a wider type. */
4803 if (INTEGRAL_TYPE_P (dest_type)
4804 && INTEGRAL_TYPE_P (src_type)
4805 && (TYPE_UNSIGNED (src_type)
4806 || TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type)
4807 || TYPE_PRECISION (dest_type) <= BITS_PER_WORD
4808 || TYPE_PRECISION (dest_type) <= POINTER_SIZE))
4809 {
4810 tree inner = initializer_constant_valid_p_1 (src, endtype, cache);
4811 if (inner == null_pointer_node)
4812 return null_pointer_node;
4813 break;
4814 }
4815
4816 /* Allow (int) &foo provided int is as wide as a pointer. */
4817 if (INTEGRAL_TYPE_P (dest_type) && POINTER_TYPE_P (src_type)
4818 && (TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type)))
4819 return initializer_constant_valid_p_1 (src, endtype, cache);
4820
4821 /* Likewise conversions from int to pointers, but also allow
4822 conversions from 0. */
4823 if ((POINTER_TYPE_P (dest_type)
4824 || TREE_CODE (dest_type) == OFFSET_TYPE)
4825 && INTEGRAL_TYPE_P (src_type))
4826 {
4827 if (TREE_CODE (src) == INTEGER_CST
4828 && TYPE_PRECISION (dest_type) >= TYPE_PRECISION (src_type))
4829 return null_pointer_node;
4830 if (integer_zerop (src))
4831 return null_pointer_node;
4832 else if (TYPE_PRECISION (dest_type) <= TYPE_PRECISION (src_type))
4833 return initializer_constant_valid_p_1 (src, endtype, cache);
4834 }
4835
4836 /* Allow conversions to struct or union types if the value
4837 inside is okay. */
4838 if (TREE_CODE (dest_type) == RECORD_TYPE
4839 || TREE_CODE (dest_type) == UNION_TYPE)
4840 return initializer_constant_valid_p_1 (src, endtype, cache);
4841 }
4842 break;
4843
4844 case POINTER_PLUS_EXPR:
4845 case PLUS_EXPR:
4846 /* Any valid floating-point constants will have been folded by now;
4847 with -frounding-math we hit this with addition of two constants. */
4848 if (TREE_CODE (endtype) == REAL_TYPE)
4849 return NULL_TREE;
4850 if (cache && cache[0] == value)
4851 return cache[1];
4852 if (! INTEGRAL_TYPE_P (endtype)
4853 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4854 {
4855 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4856 tree valid0
4857 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4858 endtype, ncache);
4859 tree valid1
4860 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4861 endtype, ncache + 2);
4862 /* If either term is absolute, use the other term's relocation. */
4863 if (valid0 == null_pointer_node)
4864 ret = valid1;
4865 else if (valid1 == null_pointer_node)
4866 ret = valid0;
4867 /* Support narrowing pointer differences. */
4868 else
4869 ret = narrowing_initializer_constant_valid_p (value, endtype,
4870 ncache);
4871 }
4872 else
4873 /* Support narrowing pointer differences. */
4874 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4875 if (cache)
4876 {
4877 cache[0] = value;
4878 cache[1] = ret;
4879 }
4880 return ret;
4881
4882 case POINTER_DIFF_EXPR:
4883 case MINUS_EXPR:
4884 if (TREE_CODE (endtype) == REAL_TYPE)
4885 return NULL_TREE;
4886 if (cache && cache[0] == value)
4887 return cache[1];
4888 if (! INTEGRAL_TYPE_P (endtype)
4889 || TYPE_PRECISION (endtype) >= TYPE_PRECISION (TREE_TYPE (value)))
4890 {
4891 tree ncache[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
4892 tree valid0
4893 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 0),
4894 endtype, ncache);
4895 tree valid1
4896 = initializer_constant_valid_p_1 (TREE_OPERAND (value, 1),
4897 endtype, ncache + 2);
4898 /* Win if second argument is absolute. */
4899 if (valid1 == null_pointer_node)
4900 ret = valid0;
4901 /* Win if both arguments have the same relocation.
4902 Then the value is absolute. */
4903 else if (valid0 == valid1 && valid0 != 0)
4904 ret = null_pointer_node;
4905 /* Since GCC guarantees that string constants are unique in the
4906 generated code, a subtraction between two copies of the same
4907 constant string is absolute. */
4908 else if (valid0 && TREE_CODE (valid0) == STRING_CST
4909 && valid1 && TREE_CODE (valid1) == STRING_CST
4910 && operand_equal_p (valid0, valid1, 1))
4911 ret = null_pointer_node;
4912 /* Support narrowing differences. */
4913 else
4914 ret = narrowing_initializer_constant_valid_p (value, endtype,
4915 ncache);
4916 }
4917 else
4918 /* Support narrowing differences. */
4919 ret = narrowing_initializer_constant_valid_p (value, endtype, NULL);
4920 if (cache)
4921 {
4922 cache[0] = value;
4923 cache[1] = ret;
4924 }
4925 return ret;
4926
4927 default:
4928 break;
4929 }
4930
4931 return NULL_TREE;
4932 }
4933
4934 /* Return nonzero if VALUE is a valid constant-valued expression
4935 for use in initializing a static variable; one that can be an
4936 element of a "constant" initializer.
4937
4938 Return null_pointer_node if the value is absolute;
4939 if it is relocatable, return the variable that determines the relocation.
4940 We assume that VALUE has been folded as much as possible;
4941 therefore, we do not need to check for such things as
4942 arithmetic-combinations of integers. */
4943 tree
4944 initializer_constant_valid_p (tree value, tree endtype, bool reverse)
4945 {
4946 tree reloc = initializer_constant_valid_p_1 (value, endtype, NULL);
4947
4948 /* An absolute value is required with reverse storage order. */
4949 if (reloc
4950 && reloc != null_pointer_node
4951 && reverse
4952 && !AGGREGATE_TYPE_P (endtype)
4953 && !VECTOR_TYPE_P (endtype))
4954 reloc = NULL_TREE;
4955
4956 return reloc;
4957 }
4958 \f
4959 /* Return true if VALUE is a valid constant-valued expression
4960 for use in initializing a static bit-field; one that can be
4961 an element of a "constant" initializer. */
4962
4963 bool
4964 initializer_constant_valid_for_bitfield_p (tree value)
4965 {
4966 /* For bitfields we support integer constants or possibly nested aggregates
4967 of such. */
4968 switch (TREE_CODE (value))
4969 {
4970 case CONSTRUCTOR:
4971 {
4972 unsigned HOST_WIDE_INT idx;
4973 tree elt;
4974
4975 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (value), idx, elt)
4976 if (!initializer_constant_valid_for_bitfield_p (elt))
4977 return false;
4978 return true;
4979 }
4980
4981 case INTEGER_CST:
4982 case REAL_CST:
4983 return true;
4984
4985 case VIEW_CONVERT_EXPR:
4986 case NON_LVALUE_EXPR:
4987 return
4988 initializer_constant_valid_for_bitfield_p (TREE_OPERAND (value, 0));
4989
4990 default:
4991 break;
4992 }
4993
4994 return false;
4995 }
4996
4997 /* Check if a STRING_CST fits into the field.
4998 Tolerate only the case when the NUL termination
4999 does not fit into the field. */
5000
5001 static bool
5002 check_string_literal (tree string, unsigned HOST_WIDE_INT size)
5003 {
5004 tree type = TREE_TYPE (string);
5005 tree eltype = TREE_TYPE (type);
5006 unsigned HOST_WIDE_INT elts = tree_to_uhwi (TYPE_SIZE_UNIT (eltype));
5007 unsigned HOST_WIDE_INT mem_size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
5008 int len = TREE_STRING_LENGTH (string);
5009
5010 if (elts != 1 && elts != 2 && elts != 4)
5011 return false;
5012 if (len < 0 || len % elts != 0)
5013 return false;
5014 if (size < (unsigned)len)
5015 return false;
5016 if (mem_size != size)
5017 return false;
5018 return true;
5019 }
5020
5021 /* output_constructor outer state of relevance in recursive calls, typically
5022 for nested aggregate bitfields. */
5023
5024 struct oc_outer_state {
5025 unsigned int bit_offset; /* current position in ... */
5026 int byte; /* ... the outer byte buffer. */
5027 };
5028
5029 static unsigned HOST_WIDE_INT
5030 output_constructor (tree, unsigned HOST_WIDE_INT, unsigned int, bool,
5031 oc_outer_state *);
5032
5033 /* Output assembler code for constant EXP, with no label.
5034 This includes the pseudo-op such as ".int" or ".byte", and a newline.
5035 Assumes output_addressed_constants has been done on EXP already.
5036
5037 Generate at least SIZE bytes of assembler data, padding at the end
5038 with zeros if necessary. SIZE must always be specified. The returned
5039 value is the actual number of bytes of assembler data generated, which
5040 may be bigger than SIZE if the object contains a variable length field.
5041
5042 SIZE is important for structure constructors,
5043 since trailing members may have been omitted from the constructor.
5044 It is also important for initialization of arrays from string constants
5045 since the full length of the string constant might not be wanted.
5046 It is also needed for initialization of unions, where the initializer's
5047 type is just one member, and that may not be as long as the union.
5048
5049 There a case in which we would fail to output exactly SIZE bytes:
5050 for a structure constructor that wants to produce more than SIZE bytes.
5051 But such constructors will never be generated for any possible input.
5052
5053 ALIGN is the alignment of the data in bits.
5054
5055 If REVERSE is true, EXP is output in reverse storage order. */
5056
5057 static unsigned HOST_WIDE_INT
5058 output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5059 bool reverse, bool merge_strings)
5060 {
5061 enum tree_code code;
5062 unsigned HOST_WIDE_INT thissize;
5063 rtx cst;
5064
5065 if (size == 0 || flag_syntax_only)
5066 return size;
5067
5068 /* See if we're trying to initialize a pointer in a non-default mode
5069 to the address of some declaration somewhere. If the target says
5070 the mode is valid for pointers, assume the target has a way of
5071 resolving it. */
5072 if (TREE_CODE (exp) == NOP_EXPR
5073 && POINTER_TYPE_P (TREE_TYPE (exp))
5074 && targetm.addr_space.valid_pointer_mode
5075 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5076 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5077 {
5078 tree saved_type = TREE_TYPE (exp);
5079
5080 /* Peel off any intermediate conversions-to-pointer for valid
5081 pointer modes. */
5082 while (TREE_CODE (exp) == NOP_EXPR
5083 && POINTER_TYPE_P (TREE_TYPE (exp))
5084 && targetm.addr_space.valid_pointer_mode
5085 (SCALAR_INT_TYPE_MODE (TREE_TYPE (exp)),
5086 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)))))
5087 exp = TREE_OPERAND (exp, 0);
5088
5089 /* If what we're left with is the address of something, we can
5090 convert the address to the final type and output it that
5091 way. */
5092 if (TREE_CODE (exp) == ADDR_EXPR)
5093 exp = build1 (ADDR_EXPR, saved_type, TREE_OPERAND (exp, 0));
5094 /* Likewise for constant ints. */
5095 else if (TREE_CODE (exp) == INTEGER_CST)
5096 exp = fold_convert (saved_type, exp);
5097
5098 }
5099
5100 /* Eliminate any conversions since we'll be outputting the underlying
5101 constant. */
5102 while (CONVERT_EXPR_P (exp)
5103 || TREE_CODE (exp) == NON_LVALUE_EXPR
5104 || TREE_CODE (exp) == VIEW_CONVERT_EXPR)
5105 {
5106 HOST_WIDE_INT type_size = int_size_in_bytes (TREE_TYPE (exp));
5107 HOST_WIDE_INT op_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0)));
5108
5109 /* Make sure eliminating the conversion is really a no-op, except with
5110 VIEW_CONVERT_EXPRs to allow for wild Ada unchecked conversions and
5111 union types to allow for Ada unchecked unions. */
5112 if (type_size > op_size
5113 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5114 && TREE_CODE (TREE_TYPE (exp)) != UNION_TYPE)
5115 /* Keep the conversion. */
5116 break;
5117 else
5118 exp = TREE_OPERAND (exp, 0);
5119 }
5120
5121 code = TREE_CODE (TREE_TYPE (exp));
5122 thissize = int_size_in_bytes (TREE_TYPE (exp));
5123
5124 /* Allow a constructor with no elements for any data type.
5125 This means to fill the space with zeros. */
5126 if (TREE_CODE (exp) == CONSTRUCTOR
5127 && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp)))
5128 {
5129 assemble_zeros (size);
5130 return size;
5131 }
5132
5133 if (TREE_CODE (exp) == FDESC_EXPR)
5134 {
5135 #ifdef ASM_OUTPUT_FDESC
5136 HOST_WIDE_INT part = tree_to_shwi (TREE_OPERAND (exp, 1));
5137 tree decl = TREE_OPERAND (exp, 0);
5138 ASM_OUTPUT_FDESC (asm_out_file, decl, part);
5139 #else
5140 gcc_unreachable ();
5141 #endif
5142 return size;
5143 }
5144
5145 /* Now output the underlying data. If we've handling the padding, return.
5146 Otherwise, break and ensure SIZE is the size written. */
5147 switch (code)
5148 {
5149 case BOOLEAN_TYPE:
5150 case INTEGER_TYPE:
5151 case ENUMERAL_TYPE:
5152 case POINTER_TYPE:
5153 case REFERENCE_TYPE:
5154 case OFFSET_TYPE:
5155 case FIXED_POINT_TYPE:
5156 case NULLPTR_TYPE:
5157 cst = expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
5158 if (reverse)
5159 cst = flip_storage_order (TYPE_MODE (TREE_TYPE (exp)), cst);
5160 if (!assemble_integer (cst, MIN (size, thissize), align, 0))
5161 error ("initializer for integer/fixed-point value is too complicated");
5162 break;
5163
5164 case REAL_TYPE:
5165 if (TREE_CODE (exp) != REAL_CST)
5166 error ("initializer for floating value is not a floating constant");
5167 else
5168 assemble_real (TREE_REAL_CST (exp),
5169 SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (exp)),
5170 align, reverse);
5171 break;
5172
5173 case COMPLEX_TYPE:
5174 output_constant (TREE_REALPART (exp), thissize / 2, align,
5175 reverse, false);
5176 output_constant (TREE_IMAGPART (exp), thissize / 2,
5177 min_align (align, BITS_PER_UNIT * (thissize / 2)),
5178 reverse, false);
5179 break;
5180
5181 case ARRAY_TYPE:
5182 case VECTOR_TYPE:
5183 switch (TREE_CODE (exp))
5184 {
5185 case CONSTRUCTOR:
5186 return output_constructor (exp, size, align, reverse, NULL);
5187 case STRING_CST:
5188 thissize = (unsigned HOST_WIDE_INT)TREE_STRING_LENGTH (exp);
5189 if (merge_strings
5190 && (thissize == 0
5191 || TREE_STRING_POINTER (exp) [thissize - 1] != '\0'))
5192 thissize++;
5193 gcc_checking_assert (check_string_literal (exp, size));
5194 assemble_string (TREE_STRING_POINTER (exp), thissize);
5195 break;
5196 case VECTOR_CST:
5197 {
5198 scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5199 unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
5200 int elt_size = GET_MODE_SIZE (inner);
5201 output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
5202 reverse, false);
5203 thissize = elt_size;
5204 /* Static constants must have a fixed size. */
5205 unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
5206 for (unsigned int i = 1; i < nunits; i++)
5207 {
5208 output_constant (VECTOR_CST_ELT (exp, i), elt_size, nalign,
5209 reverse, false);
5210 thissize += elt_size;
5211 }
5212 break;
5213 }
5214 default:
5215 gcc_unreachable ();
5216 }
5217 break;
5218
5219 case RECORD_TYPE:
5220 case UNION_TYPE:
5221 gcc_assert (TREE_CODE (exp) == CONSTRUCTOR);
5222 return output_constructor (exp, size, align, reverse, NULL);
5223
5224 case ERROR_MARK:
5225 return 0;
5226
5227 default:
5228 gcc_unreachable ();
5229 }
5230
5231 if (size > thissize)
5232 assemble_zeros (size - thissize);
5233
5234 return size;
5235 }
5236 \f
5237 /* Subroutine of output_constructor, used for computing the size of
5238 arrays of unspecified length. VAL must be a CONSTRUCTOR of an array
5239 type with an unspecified upper bound. */
5240
5241 static unsigned HOST_WIDE_INT
5242 array_size_for_constructor (tree val)
5243 {
5244 tree max_index;
5245 unsigned HOST_WIDE_INT cnt;
5246 tree index, value, tmp;
5247 offset_int i;
5248
5249 /* This code used to attempt to handle string constants that are not
5250 arrays of single-bytes, but nothing else does, so there's no point in
5251 doing it here. */
5252 if (TREE_CODE (val) == STRING_CST)
5253 return TREE_STRING_LENGTH (val);
5254
5255 max_index = NULL_TREE;
5256 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (val), cnt, index, value)
5257 {
5258 if (TREE_CODE (index) == RANGE_EXPR)
5259 index = TREE_OPERAND (index, 1);
5260 if (max_index == NULL_TREE || tree_int_cst_lt (max_index, index))
5261 max_index = index;
5262 }
5263
5264 if (max_index == NULL_TREE)
5265 return 0;
5266
5267 /* Compute the total number of array elements. */
5268 tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
5269 i = wi::to_offset (max_index) - wi::to_offset (tmp) + 1;
5270
5271 /* Multiply by the array element unit size to find number of bytes. */
5272 i *= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
5273
5274 gcc_assert (wi::fits_uhwi_p (i));
5275 return i.to_uhwi ();
5276 }
5277
5278 /* Other datastructures + helpers for output_constructor. */
5279
5280 /* output_constructor local state to support interaction with helpers. */
5281
5282 struct oc_local_state {
5283
5284 /* Received arguments. */
5285 tree exp; /* Constructor expression. */
5286 tree type; /* Type of constructor expression. */
5287 unsigned HOST_WIDE_INT size; /* # bytes to output - pad if necessary. */
5288 unsigned int align; /* Known initial alignment. */
5289 tree min_index; /* Lower bound if specified for an array. */
5290
5291 /* Output processing state. */
5292 HOST_WIDE_INT total_bytes; /* # bytes output so far / current position. */
5293 int byte; /* Part of a bitfield byte yet to be output. */
5294 int last_relative_index; /* Implicit or explicit index of the last
5295 array element output within a bitfield. */
5296 bool byte_buffer_in_use; /* Whether BYTE is in use. */
5297 bool reverse; /* Whether reverse storage order is in use. */
5298
5299 /* Current element. */
5300 tree field; /* Current field decl in a record. */
5301 tree val; /* Current element value. */
5302 tree index; /* Current element index. */
5303
5304 };
5305
5306 /* Helper for output_constructor. From the current LOCAL state, output a
5307 RANGE_EXPR element. */
5308
5309 static void
5310 output_constructor_array_range (oc_local_state *local)
5311 {
5312 /* Perform the index calculation in modulo arithmetic but
5313 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5314 but we are using an unsigned sizetype. */
5315 unsigned prec = TYPE_PRECISION (sizetype);
5316 offset_int idx = wi::sext (wi::to_offset (TREE_OPERAND (local->index, 0))
5317 - wi::to_offset (local->min_index), prec);
5318 tree valtype = TREE_TYPE (local->val);
5319 HOST_WIDE_INT fieldpos
5320 = (idx * wi::to_offset (TYPE_SIZE_UNIT (valtype))).to_short_addr ();
5321
5322 /* Advance to offset of this element. */
5323 if (fieldpos > local->total_bytes)
5324 {
5325 assemble_zeros (fieldpos - local->total_bytes);
5326 local->total_bytes = fieldpos;
5327 }
5328 else
5329 /* Must not go backwards. */
5330 gcc_assert (fieldpos == local->total_bytes);
5331
5332 unsigned HOST_WIDE_INT fieldsize
5333 = int_size_in_bytes (TREE_TYPE (local->type));
5334
5335 HOST_WIDE_INT lo_index
5336 = tree_to_shwi (TREE_OPERAND (local->index, 0));
5337 HOST_WIDE_INT hi_index
5338 = tree_to_shwi (TREE_OPERAND (local->index, 1));
5339 HOST_WIDE_INT index;
5340
5341 unsigned int align2
5342 = min_align (local->align, fieldsize * BITS_PER_UNIT);
5343
5344 for (index = lo_index; index <= hi_index; index++)
5345 {
5346 /* Output the element's initial value. */
5347 if (local->val == NULL_TREE)
5348 assemble_zeros (fieldsize);
5349 else
5350 fieldsize = output_constant (local->val, fieldsize, align2,
5351 local->reverse, false);
5352
5353 /* Count its size. */
5354 local->total_bytes += fieldsize;
5355 }
5356 }
5357
5358 /* Helper for output_constructor. From the current LOCAL state, output a
5359 field element that is not true bitfield or part of an outer one. */
5360
5361 static void
5362 output_constructor_regular_field (oc_local_state *local)
5363 {
5364 /* Field size and position. Since this structure is static, we know the
5365 positions are constant. */
5366 unsigned HOST_WIDE_INT fieldsize;
5367 HOST_WIDE_INT fieldpos;
5368
5369 unsigned int align2;
5370
5371 /* Output any buffered-up bit-fields preceding this element. */
5372 if (local->byte_buffer_in_use)
5373 {
5374 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5375 local->total_bytes++;
5376 local->byte_buffer_in_use = false;
5377 }
5378
5379 if (local->index != NULL_TREE)
5380 {
5381 /* Perform the index calculation in modulo arithmetic but
5382 sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
5383 but we are using an unsigned sizetype. */
5384 unsigned prec = TYPE_PRECISION (sizetype);
5385 offset_int idx = wi::sext (wi::to_offset (local->index)
5386 - wi::to_offset (local->min_index), prec);
5387 fieldpos = (idx * wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (local->val))))
5388 .to_short_addr ();
5389 }
5390 else if (local->field != NULL_TREE)
5391 fieldpos = int_byte_position (local->field);
5392 else
5393 fieldpos = 0;
5394
5395 /* Advance to offset of this element.
5396 Note no alignment needed in an array, since that is guaranteed
5397 if each element has the proper size. */
5398 if (local->field != NULL_TREE || local->index != NULL_TREE)
5399 {
5400 if (fieldpos > local->total_bytes)
5401 {
5402 assemble_zeros (fieldpos - local->total_bytes);
5403 local->total_bytes = fieldpos;
5404 }
5405 else
5406 /* Must not go backwards. */
5407 gcc_assert (fieldpos == local->total_bytes);
5408 }
5409
5410 /* Find the alignment of this element. */
5411 align2 = min_align (local->align, BITS_PER_UNIT * fieldpos);
5412
5413 /* Determine size this element should occupy. */
5414 if (local->field)
5415 {
5416 fieldsize = 0;
5417
5418 /* If this is an array with an unspecified upper bound,
5419 the initializer determines the size. */
5420 /* ??? This ought to only checked if DECL_SIZE_UNIT is NULL,
5421 but we cannot do this until the deprecated support for
5422 initializing zero-length array members is removed. */
5423 if (TREE_CODE (TREE_TYPE (local->field)) == ARRAY_TYPE
5424 && (!TYPE_DOMAIN (TREE_TYPE (local->field))
5425 || !TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (local->field)))))
5426 {
5427 fieldsize = array_size_for_constructor (local->val);
5428 /* Given a non-empty initialization, this field had better
5429 be last. Given a flexible array member, the next field
5430 on the chain is a TYPE_DECL of the enclosing struct. */
5431 const_tree next = DECL_CHAIN (local->field);
5432 gcc_assert (!fieldsize || !next || TREE_CODE (next) != FIELD_DECL);
5433 tree size = TYPE_SIZE_UNIT (TREE_TYPE (local->val));
5434 gcc_checking_assert (compare_tree_int (size, fieldsize) == 0);
5435 }
5436 else
5437 fieldsize = tree_to_uhwi (DECL_SIZE_UNIT (local->field));
5438 }
5439 else
5440 fieldsize = int_size_in_bytes (TREE_TYPE (local->type));
5441
5442 /* Output the element's initial value. */
5443 if (local->val == NULL_TREE)
5444 assemble_zeros (fieldsize);
5445 else
5446 fieldsize = output_constant (local->val, fieldsize, align2,
5447 local->reverse, false);
5448
5449 /* Count its size. */
5450 local->total_bytes += fieldsize;
5451 }
5452
5453 /* Helper for output_constructor. From the LOCAL state, output an element
5454 that is a true bitfield or part of an outer one. BIT_OFFSET is the offset
5455 from the start of a possibly ongoing outer byte buffer. */
5456
5457 static void
5458 output_constructor_bitfield (oc_local_state *local, unsigned int bit_offset)
5459 {
5460 /* Bit size of this element. */
5461 HOST_WIDE_INT ebitsize
5462 = (local->field
5463 ? tree_to_uhwi (DECL_SIZE (local->field))
5464 : tree_to_uhwi (TYPE_SIZE (TREE_TYPE (local->type))));
5465
5466 /* Relative index of this element if this is an array component. */
5467 HOST_WIDE_INT relative_index
5468 = (!local->field
5469 ? (local->index
5470 ? (tree_to_shwi (local->index)
5471 - tree_to_shwi (local->min_index))
5472 : local->last_relative_index + 1)
5473 : 0);
5474
5475 /* Bit position of this element from the start of the containing
5476 constructor. */
5477 HOST_WIDE_INT constructor_relative_ebitpos
5478 = (local->field
5479 ? int_bit_position (local->field)
5480 : ebitsize * relative_index);
5481
5482 /* Bit position of this element from the start of a possibly ongoing
5483 outer byte buffer. */
5484 HOST_WIDE_INT byte_relative_ebitpos
5485 = bit_offset + constructor_relative_ebitpos;
5486
5487 /* From the start of a possibly ongoing outer byte buffer, offsets to
5488 the first bit of this element and to the first bit past the end of
5489 this element. */
5490 HOST_WIDE_INT next_offset = byte_relative_ebitpos;
5491 HOST_WIDE_INT end_offset = byte_relative_ebitpos + ebitsize;
5492
5493 local->last_relative_index = relative_index;
5494
5495 if (local->val == NULL_TREE)
5496 local->val = integer_zero_node;
5497
5498 while (TREE_CODE (local->val) == VIEW_CONVERT_EXPR
5499 || TREE_CODE (local->val) == NON_LVALUE_EXPR)
5500 local->val = TREE_OPERAND (local->val, 0);
5501
5502 if (TREE_CODE (local->val) != INTEGER_CST
5503 && TREE_CODE (local->val) != CONSTRUCTOR)
5504 {
5505 error ("invalid initial value for member %qE", DECL_NAME (local->field));
5506 return;
5507 }
5508
5509 /* If this field does not start in this (or next) byte, skip some bytes. */
5510 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5511 {
5512 /* Output remnant of any bit field in previous bytes. */
5513 if (local->byte_buffer_in_use)
5514 {
5515 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5516 local->total_bytes++;
5517 local->byte_buffer_in_use = false;
5518 }
5519
5520 /* If still not at proper byte, advance to there. */
5521 if (next_offset / BITS_PER_UNIT != local->total_bytes)
5522 {
5523 gcc_assert (next_offset / BITS_PER_UNIT >= local->total_bytes);
5524 assemble_zeros (next_offset / BITS_PER_UNIT - local->total_bytes);
5525 local->total_bytes = next_offset / BITS_PER_UNIT;
5526 }
5527 }
5528
5529 /* Set up the buffer if necessary. */
5530 if (!local->byte_buffer_in_use)
5531 {
5532 local->byte = 0;
5533 if (ebitsize > 0)
5534 local->byte_buffer_in_use = true;
5535 }
5536
5537 /* If this is nested constructor, recurse passing the bit offset and the
5538 pending data, then retrieve the new pending data afterwards. */
5539 if (TREE_CODE (local->val) == CONSTRUCTOR)
5540 {
5541 oc_outer_state temp_state;
5542 temp_state.bit_offset = next_offset % BITS_PER_UNIT;
5543 temp_state.byte = local->byte;
5544 local->total_bytes
5545 += output_constructor (local->val, 0, 0, local->reverse, &temp_state);
5546 local->byte = temp_state.byte;
5547 return;
5548 }
5549
5550 /* Otherwise, we must split the element into pieces that fall within
5551 separate bytes, and combine each byte with previous or following
5552 bit-fields. */
5553 while (next_offset < end_offset)
5554 {
5555 int this_time;
5556 int shift;
5557 unsigned HOST_WIDE_INT value;
5558 HOST_WIDE_INT next_byte = next_offset / BITS_PER_UNIT;
5559 HOST_WIDE_INT next_bit = next_offset % BITS_PER_UNIT;
5560
5561 /* Advance from byte to byte within this element when necessary. */
5562 while (next_byte != local->total_bytes)
5563 {
5564 assemble_integer (GEN_INT (local->byte), 1, BITS_PER_UNIT, 1);
5565 local->total_bytes++;
5566 local->byte = 0;
5567 }
5568
5569 /* Number of bits we can process at once (all part of the same byte). */
5570 this_time = MIN (end_offset - next_offset, BITS_PER_UNIT - next_bit);
5571 if (local->reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5572 {
5573 /* For big-endian data, take the most significant bits (of the
5574 bits that are significant) first and put them into bytes from
5575 the most significant end. */
5576 shift = end_offset - next_offset - this_time;
5577
5578 /* Don't try to take a bunch of bits that cross
5579 the word boundary in the INTEGER_CST. We can
5580 only select bits from one element. */
5581 if ((shift / HOST_BITS_PER_WIDE_INT)
5582 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5583 {
5584 const int end = shift + this_time - 1;
5585 shift = end & -HOST_BITS_PER_WIDE_INT;
5586 this_time = end - shift + 1;
5587 }
5588
5589 /* Now get the bits we want to insert. */
5590 value = wi::extract_uhwi (wi::to_widest (local->val),
5591 shift, this_time);
5592
5593 /* Get the result. This works only when:
5594 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5595 local->byte |= value << (BITS_PER_UNIT - this_time - next_bit);
5596 }
5597 else
5598 {
5599 /* On little-endian machines, take the least significant bits of
5600 the value first and pack them starting at the least significant
5601 bits of the bytes. */
5602 shift = next_offset - byte_relative_ebitpos;
5603
5604 /* Don't try to take a bunch of bits that cross
5605 the word boundary in the INTEGER_CST. We can
5606 only select bits from one element. */
5607 if ((shift / HOST_BITS_PER_WIDE_INT)
5608 != ((shift + this_time - 1) / HOST_BITS_PER_WIDE_INT))
5609 this_time
5610 = HOST_BITS_PER_WIDE_INT - (shift & (HOST_BITS_PER_WIDE_INT - 1));
5611
5612 /* Now get the bits we want to insert. */
5613 value = wi::extract_uhwi (wi::to_widest (local->val),
5614 shift, this_time);
5615
5616 /* Get the result. This works only when:
5617 1 <= this_time <= HOST_BITS_PER_WIDE_INT. */
5618 local->byte |= value << next_bit;
5619 }
5620
5621 next_offset += this_time;
5622 local->byte_buffer_in_use = true;
5623 }
5624 }
5625
5626 /* Subroutine of output_constant, used for CONSTRUCTORs (aggregate constants).
5627 Generate at least SIZE bytes, padding if necessary. OUTER designates the
5628 caller output state of relevance in recursive invocations. */
5629
5630 static unsigned HOST_WIDE_INT
5631 output_constructor (tree exp, unsigned HOST_WIDE_INT size, unsigned int align,
5632 bool reverse, oc_outer_state *outer)
5633 {
5634 unsigned HOST_WIDE_INT cnt;
5635 constructor_elt *ce;
5636 oc_local_state local;
5637
5638 /* Setup our local state to communicate with helpers. */
5639 local.exp = exp;
5640 local.type = TREE_TYPE (exp);
5641 local.size = size;
5642 local.align = align;
5643 if (TREE_CODE (local.type) == ARRAY_TYPE && TYPE_DOMAIN (local.type))
5644 local.min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (local.type));
5645 else
5646 local.min_index = integer_zero_node;
5647
5648 local.total_bytes = 0;
5649 local.byte_buffer_in_use = outer != NULL;
5650 local.byte = outer ? outer->byte : 0;
5651 local.last_relative_index = -1;
5652 /* The storage order is specified for every aggregate type. */
5653 if (AGGREGATE_TYPE_P (local.type))
5654 local.reverse = TYPE_REVERSE_STORAGE_ORDER (local.type);
5655 else
5656 local.reverse = reverse;
5657
5658 gcc_assert (HOST_BITS_PER_WIDE_INT >= BITS_PER_UNIT);
5659
5660 /* As CE goes through the elements of the constant, FIELD goes through the
5661 structure fields if the constant is a structure. If the constant is a
5662 union, we override this by getting the field from the TREE_LIST element.
5663 But the constant could also be an array. Then FIELD is zero.
5664
5665 There is always a maximum of one element in the chain LINK for unions
5666 (even if the initializer in a source program incorrectly contains
5667 more one). */
5668
5669 if (TREE_CODE (local.type) == RECORD_TYPE)
5670 local.field = TYPE_FIELDS (local.type);
5671 else
5672 local.field = NULL_TREE;
5673
5674 for (cnt = 0;
5675 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce);
5676 cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
5677 {
5678 local.val = ce->value;
5679 local.index = NULL_TREE;
5680
5681 /* The element in a union constructor specifies the proper field
5682 or index. */
5683 if (RECORD_OR_UNION_TYPE_P (local.type) && ce->index != NULL_TREE)
5684 local.field = ce->index;
5685
5686 else if (TREE_CODE (local.type) == ARRAY_TYPE)
5687 local.index = ce->index;
5688
5689 if (local.field && flag_verbose_asm)
5690 fprintf (asm_out_file, "%s %s:\n",
5691 ASM_COMMENT_START,
5692 DECL_NAME (local.field)
5693 ? IDENTIFIER_POINTER (DECL_NAME (local.field))
5694 : "<anonymous>");
5695
5696 /* Eliminate the marker that makes a cast not be an lvalue. */
5697 if (local.val != NULL_TREE)
5698 STRIP_NOPS (local.val);
5699
5700 /* Output the current element, using the appropriate helper ... */
5701
5702 /* For an array slice not part of an outer bitfield. */
5703 if (!outer
5704 && local.index != NULL_TREE
5705 && TREE_CODE (local.index) == RANGE_EXPR)
5706 output_constructor_array_range (&local);
5707
5708 /* For a field that is neither a true bitfield nor part of an outer one,
5709 known to be at least byte aligned and multiple-of-bytes long. */
5710 else if (!outer
5711 && (local.field == NULL_TREE
5712 || !CONSTRUCTOR_BITFIELD_P (local.field)))
5713 output_constructor_regular_field (&local);
5714
5715 /* For a true bitfield or part of an outer one. Only INTEGER_CSTs are
5716 supported for scalar fields, so we may need to convert first. */
5717 else
5718 {
5719 if (TREE_CODE (local.val) == REAL_CST)
5720 local.val
5721 = fold_unary (VIEW_CONVERT_EXPR,
5722 build_nonstandard_integer_type
5723 (TYPE_PRECISION (TREE_TYPE (local.val)), 0),
5724 local.val);
5725 output_constructor_bitfield (&local, outer ? outer->bit_offset : 0);
5726 }
5727 }
5728
5729 /* If we are not at toplevel, save the pending data for our caller.
5730 Otherwise output the pending data and padding zeros as needed. */
5731 if (outer)
5732 outer->byte = local.byte;
5733 else
5734 {
5735 if (local.byte_buffer_in_use)
5736 {
5737 assemble_integer (GEN_INT (local.byte), 1, BITS_PER_UNIT, 1);
5738 local.total_bytes++;
5739 }
5740
5741 if ((unsigned HOST_WIDE_INT)local.total_bytes < local.size)
5742 {
5743 assemble_zeros (local.size - local.total_bytes);
5744 local.total_bytes = local.size;
5745 }
5746 }
5747
5748 return local.total_bytes;
5749 }
5750
5751 /* Mark DECL as weak. */
5752
5753 static void
5754 mark_weak (tree decl)
5755 {
5756 if (DECL_WEAK (decl))
5757 return;
5758
5759 struct symtab_node *n = symtab_node::get (decl);
5760 if (n && n->refuse_visibility_changes)
5761 error ("%+qD declared weak after being used", decl);
5762 DECL_WEAK (decl) = 1;
5763
5764 if (DECL_RTL_SET_P (decl)
5765 && MEM_P (DECL_RTL (decl))
5766 && XEXP (DECL_RTL (decl), 0)
5767 && GET_CODE (XEXP (DECL_RTL (decl), 0)) == SYMBOL_REF)
5768 SYMBOL_REF_WEAK (XEXP (DECL_RTL (decl), 0)) = 1;
5769 }
5770
5771 /* Merge weak status between NEWDECL and OLDDECL. */
5772
5773 void
5774 merge_weak (tree newdecl, tree olddecl)
5775 {
5776 if (DECL_WEAK (newdecl) == DECL_WEAK (olddecl))
5777 {
5778 if (DECL_WEAK (newdecl) && TARGET_SUPPORTS_WEAK)
5779 {
5780 tree *pwd;
5781 /* We put the NEWDECL on the weak_decls list at some point
5782 and OLDDECL as well. Keep just OLDDECL on the list. */
5783 for (pwd = &weak_decls; *pwd; pwd = &TREE_CHAIN (*pwd))
5784 if (TREE_VALUE (*pwd) == newdecl)
5785 {
5786 *pwd = TREE_CHAIN (*pwd);
5787 break;
5788 }
5789 }
5790 return;
5791 }
5792
5793 if (DECL_WEAK (newdecl))
5794 {
5795 tree wd;
5796
5797 /* NEWDECL is weak, but OLDDECL is not. */
5798
5799 /* If we already output the OLDDECL, we're in trouble; we can't
5800 go back and make it weak. This should never happen in
5801 unit-at-a-time compilation. */
5802 gcc_assert (!TREE_ASM_WRITTEN (olddecl));
5803
5804 /* If we've already generated rtl referencing OLDDECL, we may
5805 have done so in a way that will not function properly with
5806 a weak symbol. Again in unit-at-a-time this should be
5807 impossible. */
5808 gcc_assert (!TREE_USED (olddecl)
5809 || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (olddecl)));
5810
5811 /* PR 49899: You cannot convert a static function into a weak, public function. */
5812 if (! TREE_PUBLIC (olddecl) && TREE_PUBLIC (newdecl))
5813 error ("weak declaration of %q+D being applied to a already "
5814 "existing, static definition", newdecl);
5815
5816 if (TARGET_SUPPORTS_WEAK)
5817 {
5818 /* We put the NEWDECL on the weak_decls list at some point.
5819 Replace it with the OLDDECL. */
5820 for (wd = weak_decls; wd; wd = TREE_CHAIN (wd))
5821 if (TREE_VALUE (wd) == newdecl)
5822 {
5823 TREE_VALUE (wd) = olddecl;
5824 break;
5825 }
5826 /* We may not find the entry on the list. If NEWDECL is a
5827 weak alias, then we will have already called
5828 globalize_decl to remove the entry; in that case, we do
5829 not need to do anything. */
5830 }
5831
5832 /* Make the OLDDECL weak; it's OLDDECL that we'll be keeping. */
5833 mark_weak (olddecl);
5834 }
5835 else
5836 /* OLDDECL was weak, but NEWDECL was not explicitly marked as
5837 weak. Just update NEWDECL to indicate that it's weak too. */
5838 mark_weak (newdecl);
5839 }
5840
5841 /* Declare DECL to be a weak symbol. */
5842
5843 void
5844 declare_weak (tree decl)
5845 {
5846 gcc_assert (TREE_CODE (decl) != FUNCTION_DECL || !TREE_ASM_WRITTEN (decl));
5847 if (! TREE_PUBLIC (decl))
5848 {
5849 error ("weak declaration of %q+D must be public", decl);
5850 return;
5851 }
5852 else if (!TARGET_SUPPORTS_WEAK)
5853 warning (0, "weak declaration of %q+D not supported", decl);
5854
5855 mark_weak (decl);
5856 if (!lookup_attribute ("weak", DECL_ATTRIBUTES (decl)))
5857 DECL_ATTRIBUTES (decl)
5858 = tree_cons (get_identifier ("weak"), NULL, DECL_ATTRIBUTES (decl));
5859 }
5860
5861 static void
5862 weak_finish_1 (tree decl)
5863 {
5864 #if defined (ASM_WEAKEN_DECL) || defined (ASM_WEAKEN_LABEL)
5865 const char *const name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5866 #endif
5867
5868 if (! TREE_USED (decl))
5869 return;
5870
5871 #ifdef ASM_WEAKEN_DECL
5872 ASM_WEAKEN_DECL (asm_out_file, decl, name, NULL);
5873 #else
5874 #ifdef ASM_WEAKEN_LABEL
5875 ASM_WEAKEN_LABEL (asm_out_file, name);
5876 #else
5877 #ifdef ASM_OUTPUT_WEAK_ALIAS
5878 {
5879 static bool warn_once = 0;
5880 if (! warn_once)
5881 {
5882 warning (0, "only weak aliases are supported in this configuration");
5883 warn_once = 1;
5884 }
5885 return;
5886 }
5887 #endif
5888 #endif
5889 #endif
5890 }
5891
5892 /* Fiven an assembly name, find the decl it is associated with. */
5893 static tree
5894 find_decl (tree target)
5895 {
5896 symtab_node *node = symtab_node::get_for_asmname (target);
5897 if (node)
5898 return node->decl;
5899 return NULL_TREE;
5900 }
5901
5902 /* This TREE_LIST contains weakref targets. */
5903
5904 static GTY(()) tree weakref_targets;
5905
5906 /* Emit any pending weak declarations. */
5907
5908 void
5909 weak_finish (void)
5910 {
5911 tree t;
5912
5913 for (t = weakref_targets; t; t = TREE_CHAIN (t))
5914 {
5915 tree alias_decl = TREE_PURPOSE (t);
5916 tree target = ultimate_transparent_alias_target (&TREE_VALUE (t));
5917
5918 if (! TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (alias_decl))
5919 || TREE_SYMBOL_REFERENCED (target))
5920 /* Remove alias_decl from the weak list, but leave entries for
5921 the target alone. */
5922 target = NULL_TREE;
5923 #ifndef ASM_OUTPUT_WEAKREF
5924 else if (! TREE_SYMBOL_REFERENCED (target))
5925 {
5926 /* Use ASM_WEAKEN_LABEL only if ASM_WEAKEN_DECL is not
5927 defined, otherwise we and weak_finish_1 would use
5928 different macros. */
5929 # if defined ASM_WEAKEN_LABEL && ! defined ASM_WEAKEN_DECL
5930 ASM_WEAKEN_LABEL (asm_out_file, IDENTIFIER_POINTER (target));
5931 # else
5932 tree decl = find_decl (target);
5933
5934 if (! decl)
5935 {
5936 decl = build_decl (DECL_SOURCE_LOCATION (alias_decl),
5937 TREE_CODE (alias_decl), target,
5938 TREE_TYPE (alias_decl));
5939
5940 DECL_EXTERNAL (decl) = 1;
5941 TREE_PUBLIC (decl) = 1;
5942 DECL_ARTIFICIAL (decl) = 1;
5943 TREE_NOTHROW (decl) = TREE_NOTHROW (alias_decl);
5944 TREE_USED (decl) = 1;
5945 }
5946
5947 weak_finish_1 (decl);
5948 # endif
5949 }
5950 #endif
5951
5952 {
5953 tree *p;
5954 tree t2;
5955
5956 /* Remove the alias and the target from the pending weak list
5957 so that we do not emit any .weak directives for the former,
5958 nor multiple .weak directives for the latter. */
5959 for (p = &weak_decls; (t2 = *p) ; )
5960 {
5961 if (TREE_VALUE (t2) == alias_decl
5962 || target == DECL_ASSEMBLER_NAME (TREE_VALUE (t2)))
5963 *p = TREE_CHAIN (t2);
5964 else
5965 p = &TREE_CHAIN (t2);
5966 }
5967
5968 /* Remove other weakrefs to the same target, to speed things up. */
5969 for (p = &TREE_CHAIN (t); (t2 = *p) ; )
5970 {
5971 if (target == ultimate_transparent_alias_target (&TREE_VALUE (t2)))
5972 *p = TREE_CHAIN (t2);
5973 else
5974 p = &TREE_CHAIN (t2);
5975 }
5976 }
5977 }
5978
5979 for (t = weak_decls; t; t = TREE_CHAIN (t))
5980 {
5981 tree decl = TREE_VALUE (t);
5982
5983 weak_finish_1 (decl);
5984 }
5985 }
5986
5987 /* Emit the assembly bits to indicate that DECL is globally visible. */
5988
5989 static void
5990 globalize_decl (tree decl)
5991 {
5992
5993 #if defined (ASM_WEAKEN_LABEL) || defined (ASM_WEAKEN_DECL)
5994 if (DECL_WEAK (decl))
5995 {
5996 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
5997 tree *p, t;
5998
5999 #ifdef ASM_WEAKEN_DECL
6000 ASM_WEAKEN_DECL (asm_out_file, decl, name, 0);
6001 #else
6002 ASM_WEAKEN_LABEL (asm_out_file, name);
6003 #endif
6004
6005 /* Remove this function from the pending weak list so that
6006 we do not emit multiple .weak directives for it. */
6007 for (p = &weak_decls; (t = *p) ; )
6008 {
6009 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6010 *p = TREE_CHAIN (t);
6011 else
6012 p = &TREE_CHAIN (t);
6013 }
6014
6015 /* Remove weakrefs to the same target from the pending weakref
6016 list, for the same reason. */
6017 for (p = &weakref_targets; (t = *p) ; )
6018 {
6019 if (DECL_ASSEMBLER_NAME (decl)
6020 == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6021 *p = TREE_CHAIN (t);
6022 else
6023 p = &TREE_CHAIN (t);
6024 }
6025
6026 return;
6027 }
6028 #endif
6029
6030 targetm.asm_out.globalize_decl_name (asm_out_file, decl);
6031 }
6032
6033 vec<alias_pair, va_gc> *alias_pairs;
6034
6035 /* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF
6036 or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose
6037 tree node is DECL to have the value of the tree node TARGET. */
6038
6039 void
6040 do_assemble_alias (tree decl, tree target)
6041 {
6042 tree id;
6043
6044 /* Emulated TLS had better not get this var. */
6045 gcc_assert (!(!targetm.have_tls
6046 && VAR_P (decl)
6047 && DECL_THREAD_LOCAL_P (decl)));
6048
6049 if (TREE_ASM_WRITTEN (decl))
6050 return;
6051
6052 id = DECL_ASSEMBLER_NAME (decl);
6053 ultimate_transparent_alias_target (&id);
6054 ultimate_transparent_alias_target (&target);
6055
6056 /* We must force creation of DECL_RTL for debug info generation, even though
6057 we don't use it here. */
6058 make_decl_rtl (decl);
6059
6060 TREE_ASM_WRITTEN (decl) = 1;
6061 TREE_ASM_WRITTEN (DECL_ASSEMBLER_NAME (decl)) = 1;
6062 TREE_ASM_WRITTEN (id) = 1;
6063
6064 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6065 {
6066 if (!TREE_SYMBOL_REFERENCED (target))
6067 weakref_targets = tree_cons (decl, target, weakref_targets);
6068
6069 #ifdef ASM_OUTPUT_WEAKREF
6070 ASM_OUTPUT_WEAKREF (asm_out_file, decl,
6071 IDENTIFIER_POINTER (id),
6072 IDENTIFIER_POINTER (target));
6073 #else
6074 if (!TARGET_SUPPORTS_WEAK)
6075 {
6076 error_at (DECL_SOURCE_LOCATION (decl),
6077 "weakref is not supported in this configuration");
6078 return;
6079 }
6080 #endif
6081 return;
6082 }
6083
6084 #ifdef ASM_OUTPUT_DEF
6085 tree orig_decl = decl;
6086
6087 /* Make name accessible from other files, if appropriate. */
6088
6089 if (TREE_PUBLIC (decl) || TREE_PUBLIC (orig_decl))
6090 {
6091 globalize_decl (decl);
6092 maybe_assemble_visibility (decl);
6093 }
6094 if (TREE_CODE (decl) == FUNCTION_DECL
6095 && cgraph_node::get (decl)->ifunc_resolver)
6096 {
6097 #if defined (ASM_OUTPUT_TYPE_DIRECTIVE)
6098 if (targetm.has_ifunc_p ())
6099 ASM_OUTPUT_TYPE_DIRECTIVE
6100 (asm_out_file, IDENTIFIER_POINTER (id),
6101 IFUNC_ASM_TYPE);
6102 else
6103 #endif
6104 error_at (DECL_SOURCE_LOCATION (decl),
6105 "%qs is not supported on this target", "ifunc");
6106 }
6107
6108 # ifdef ASM_OUTPUT_DEF_FROM_DECLS
6109 ASM_OUTPUT_DEF_FROM_DECLS (asm_out_file, decl, target);
6110 # else
6111 ASM_OUTPUT_DEF (asm_out_file,
6112 IDENTIFIER_POINTER (id),
6113 IDENTIFIER_POINTER (target));
6114 # endif
6115 #elif defined (ASM_OUTPUT_WEAK_ALIAS) || defined (ASM_WEAKEN_DECL)
6116 {
6117 const char *name;
6118 tree *p, t;
6119
6120 name = IDENTIFIER_POINTER (id);
6121 # ifdef ASM_WEAKEN_DECL
6122 ASM_WEAKEN_DECL (asm_out_file, decl, name, IDENTIFIER_POINTER (target));
6123 # else
6124 ASM_OUTPUT_WEAK_ALIAS (asm_out_file, name, IDENTIFIER_POINTER (target));
6125 # endif
6126 /* Remove this function from the pending weak list so that
6127 we do not emit multiple .weak directives for it. */
6128 for (p = &weak_decls; (t = *p) ; )
6129 if (DECL_ASSEMBLER_NAME (decl) == DECL_ASSEMBLER_NAME (TREE_VALUE (t))
6130 || id == DECL_ASSEMBLER_NAME (TREE_VALUE (t)))
6131 *p = TREE_CHAIN (t);
6132 else
6133 p = &TREE_CHAIN (t);
6134
6135 /* Remove weakrefs to the same target from the pending weakref
6136 list, for the same reason. */
6137 for (p = &weakref_targets; (t = *p) ; )
6138 {
6139 if (id == ultimate_transparent_alias_target (&TREE_VALUE (t)))
6140 *p = TREE_CHAIN (t);
6141 else
6142 p = &TREE_CHAIN (t);
6143 }
6144 }
6145 #endif
6146 }
6147
6148 /* Output .symver directive. */
6149
6150 void
6151 do_assemble_symver (tree decl, tree target)
6152 {
6153 tree id = DECL_ASSEMBLER_NAME (decl);
6154 ultimate_transparent_alias_target (&id);
6155 ultimate_transparent_alias_target (&target);
6156 #ifdef ASM_OUTPUT_SYMVER_DIRECTIVE
6157 ASM_OUTPUT_SYMVER_DIRECTIVE (asm_out_file,
6158 IDENTIFIER_POINTER (target),
6159 IDENTIFIER_POINTER (id));
6160 #else
6161 error ("symver is only supported on ELF platforms");
6162 #endif
6163 }
6164
6165 /* Emit an assembler directive to make the symbol for DECL an alias to
6166 the symbol for TARGET. */
6167
6168 void
6169 assemble_alias (tree decl, tree target)
6170 {
6171 tree target_decl;
6172
6173 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
6174 {
6175 tree alias = DECL_ASSEMBLER_NAME (decl);
6176
6177 ultimate_transparent_alias_target (&target);
6178
6179 if (alias == target)
6180 error ("%qs symbol %q+D ultimately targets itself", "weakref", decl);
6181 if (TREE_PUBLIC (decl))
6182 error ("%qs symbol %q+D must have static linkage", "weakref", decl);
6183 }
6184 else
6185 {
6186 #if !defined (ASM_OUTPUT_DEF)
6187 # if !defined(ASM_OUTPUT_WEAK_ALIAS) && !defined (ASM_WEAKEN_DECL)
6188 error_at (DECL_SOURCE_LOCATION (decl),
6189 "alias definitions not supported in this configuration");
6190 TREE_ASM_WRITTEN (decl) = 1;
6191 return;
6192 # else
6193 if (!DECL_WEAK (decl))
6194 {
6195 /* NB: ifunc_resolver isn't set when an error is detected. */
6196 if (TREE_CODE (decl) == FUNCTION_DECL
6197 && lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
6198 error_at (DECL_SOURCE_LOCATION (decl),
6199 "%qs is not supported in this configuration", "ifunc");
6200 else
6201 error_at (DECL_SOURCE_LOCATION (decl),
6202 "only weak aliases are supported in this configuration");
6203 TREE_ASM_WRITTEN (decl) = 1;
6204 return;
6205 }
6206 # endif
6207 #endif
6208 }
6209 TREE_USED (decl) = 1;
6210
6211 /* Allow aliases to aliases. */
6212 if (TREE_CODE (decl) == FUNCTION_DECL)
6213 cgraph_node::get_create (decl)->alias = true;
6214 else
6215 varpool_node::get_create (decl)->alias = true;
6216
6217 /* If the target has already been emitted, we don't have to queue the
6218 alias. This saves a tad of memory. */
6219 if (symtab->global_info_ready)
6220 target_decl = find_decl (target);
6221 else
6222 target_decl= NULL;
6223 if ((target_decl && TREE_ASM_WRITTEN (target_decl))
6224 || symtab->state >= EXPANSION)
6225 do_assemble_alias (decl, target);
6226 else
6227 {
6228 alias_pair p = {decl, target};
6229 vec_safe_push (alias_pairs, p);
6230 }
6231 }
6232
6233 /* Record and output a table of translations from original function
6234 to its transaction aware clone. Note that tm_pure functions are
6235 considered to be their own clone. */
6236
6237 struct tm_clone_hasher : ggc_cache_ptr_hash<tree_map>
6238 {
6239 static hashval_t hash (tree_map *m) { return tree_map_hash (m); }
6240 static bool equal (tree_map *a, tree_map *b) { return tree_map_eq (a, b); }
6241
6242 static int
6243 keep_cache_entry (tree_map *&e)
6244 {
6245 return ggc_marked_p (e->base.from);
6246 }
6247 };
6248
6249 static GTY((cache)) hash_table<tm_clone_hasher> *tm_clone_hash;
6250
6251 void
6252 record_tm_clone_pair (tree o, tree n)
6253 {
6254 struct tree_map **slot, *h;
6255
6256 if (tm_clone_hash == NULL)
6257 tm_clone_hash = hash_table<tm_clone_hasher>::create_ggc (32);
6258
6259 h = ggc_alloc<tree_map> ();
6260 h->hash = htab_hash_pointer (o);
6261 h->base.from = o;
6262 h->to = n;
6263
6264 slot = tm_clone_hash->find_slot_with_hash (h, h->hash, INSERT);
6265 *slot = h;
6266 }
6267
6268 tree
6269 get_tm_clone_pair (tree o)
6270 {
6271 if (tm_clone_hash)
6272 {
6273 struct tree_map *h, in;
6274
6275 in.base.from = o;
6276 in.hash = htab_hash_pointer (o);
6277 h = tm_clone_hash->find_with_hash (&in, in.hash);
6278 if (h)
6279 return h->to;
6280 }
6281 return NULL_TREE;
6282 }
6283
6284 struct tm_alias_pair
6285 {
6286 unsigned int uid;
6287 tree from;
6288 tree to;
6289 };
6290
6291
6292 /* Dump the actual pairs to the .tm_clone_table section. */
6293
6294 static void
6295 dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
6296 {
6297 unsigned i;
6298 tm_alias_pair *p;
6299 bool switched = false;
6300
6301 FOR_EACH_VEC_ELT (tm_alias_pairs, i, p)
6302 {
6303 tree src = p->from;
6304 tree dst = p->to;
6305 struct cgraph_node *src_n = cgraph_node::get (src);
6306 struct cgraph_node *dst_n = cgraph_node::get (dst);
6307
6308 /* The function ipa_tm_create_version() marks the clone as needed if
6309 the original function was needed. But we also mark the clone as
6310 needed if we ever called the clone indirectly through
6311 TM_GETTMCLONE. If neither of these are true, we didn't generate
6312 a clone, and we didn't call it indirectly... no sense keeping it
6313 in the clone table. */
6314 if (!dst_n || !dst_n->definition)
6315 continue;
6316
6317 /* This covers the case where we have optimized the original
6318 function away, and only access the transactional clone. */
6319 if (!src_n || !src_n->definition)
6320 continue;
6321
6322 if (!switched)
6323 {
6324 switch_to_section (targetm.asm_out.tm_clone_table_section ());
6325 assemble_align (POINTER_SIZE);
6326 switched = true;
6327 }
6328
6329 assemble_integer (XEXP (DECL_RTL (src), 0),
6330 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6331 assemble_integer (XEXP (DECL_RTL (dst), 0),
6332 POINTER_SIZE_UNITS, POINTER_SIZE, 1);
6333 }
6334 }
6335
6336 /* Provide a default for the tm_clone_table section. */
6337
6338 section *
6339 default_clone_table_section (void)
6340 {
6341 return get_named_section (NULL, ".tm_clone_table", 3);
6342 }
6343
6344 /* Helper comparison function for qsorting by the DECL_UID stored in
6345 alias_pair->emitted_diags. */
6346
6347 static int
6348 tm_alias_pair_cmp (const void *x, const void *y)
6349 {
6350 const tm_alias_pair *p1 = (const tm_alias_pair *) x;
6351 const tm_alias_pair *p2 = (const tm_alias_pair *) y;
6352 if (p1->uid < p2->uid)
6353 return -1;
6354 if (p1->uid > p2->uid)
6355 return 1;
6356 return 0;
6357 }
6358
6359 void
6360 finish_tm_clone_pairs (void)
6361 {
6362 vec<tm_alias_pair> tm_alias_pairs = vNULL;
6363
6364 if (tm_clone_hash == NULL)
6365 return;
6366
6367 /* We need a determenistic order for the .tm_clone_table, otherwise
6368 we will get bootstrap comparison failures, so dump the hash table
6369 to a vector, sort it, and dump the vector. */
6370
6371 /* Dump the hashtable to a vector. */
6372 tree_map *map;
6373 hash_table<tm_clone_hasher>::iterator iter;
6374 FOR_EACH_HASH_TABLE_ELEMENT (*tm_clone_hash, map, tree_map *, iter)
6375 {
6376 tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
6377 tm_alias_pairs.safe_push (p);
6378 }
6379 /* Sort it. */
6380 tm_alias_pairs.qsort (tm_alias_pair_cmp);
6381
6382 /* Dump it. */
6383 dump_tm_clone_pairs (tm_alias_pairs);
6384
6385 tm_clone_hash->empty ();
6386 tm_clone_hash = NULL;
6387 tm_alias_pairs.release ();
6388 }
6389
6390
6391 /* Emit an assembler directive to set symbol for DECL visibility to
6392 the visibility type VIS, which must not be VISIBILITY_DEFAULT. */
6393
6394 void
6395 default_assemble_visibility (tree decl ATTRIBUTE_UNUSED,
6396 int vis ATTRIBUTE_UNUSED)
6397 {
6398 #ifdef HAVE_GAS_HIDDEN
6399 static const char * const visibility_types[] = {
6400 NULL, "protected", "hidden", "internal"
6401 };
6402
6403 const char *name, *type;
6404 tree id;
6405
6406 id = DECL_ASSEMBLER_NAME (decl);
6407 ultimate_transparent_alias_target (&id);
6408 name = IDENTIFIER_POINTER (id);
6409
6410 type = visibility_types[vis];
6411
6412 fprintf (asm_out_file, "\t.%s\t", type);
6413 assemble_name (asm_out_file, name);
6414 fprintf (asm_out_file, "\n");
6415 #else
6416 if (!DECL_ARTIFICIAL (decl))
6417 warning (OPT_Wattributes, "visibility attribute not supported "
6418 "in this configuration; ignored");
6419 #endif
6420 }
6421
6422 /* A helper function to call assemble_visibility when needed for a decl. */
6423
6424 int
6425 maybe_assemble_visibility (tree decl)
6426 {
6427 enum symbol_visibility vis = DECL_VISIBILITY (decl);
6428 if (vis != VISIBILITY_DEFAULT)
6429 {
6430 targetm.asm_out.assemble_visibility (decl, vis);
6431 return 1;
6432 }
6433 else
6434 return 0;
6435 }
6436
6437 /* Returns 1 if the target configuration supports defining public symbols
6438 so that one of them will be chosen at link time instead of generating a
6439 multiply-defined symbol error, whether through the use of weak symbols or
6440 a target-specific mechanism for having duplicates discarded. */
6441
6442 int
6443 supports_one_only (void)
6444 {
6445 if (SUPPORTS_ONE_ONLY)
6446 return 1;
6447 return TARGET_SUPPORTS_WEAK;
6448 }
6449
6450 /* Set up DECL as a public symbol that can be defined in multiple
6451 translation units without generating a linker error. */
6452
6453 void
6454 make_decl_one_only (tree decl, tree comdat_group)
6455 {
6456 struct symtab_node *symbol;
6457 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6458
6459 TREE_PUBLIC (decl) = 1;
6460
6461 if (VAR_P (decl))
6462 symbol = varpool_node::get_create (decl);
6463 else
6464 symbol = cgraph_node::get_create (decl);
6465
6466 if (SUPPORTS_ONE_ONLY)
6467 {
6468 #ifdef MAKE_DECL_ONE_ONLY
6469 MAKE_DECL_ONE_ONLY (decl);
6470 #endif
6471 symbol->set_comdat_group (comdat_group);
6472 }
6473 else if (VAR_P (decl)
6474 && (DECL_INITIAL (decl) == 0
6475 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
6476 DECL_COMMON (decl) = 1;
6477 else
6478 {
6479 gcc_assert (TARGET_SUPPORTS_WEAK);
6480 DECL_WEAK (decl) = 1;
6481 }
6482 }
6483
6484 void
6485 init_varasm_once (void)
6486 {
6487 section_htab = hash_table<section_hasher>::create_ggc (31);
6488 object_block_htab = hash_table<object_block_hasher>::create_ggc (31);
6489 const_desc_htab = hash_table<tree_descriptor_hasher>::create_ggc (1009);
6490
6491 shared_constant_pool = create_constant_pool ();
6492
6493 #ifdef TEXT_SECTION_ASM_OP
6494 text_section = get_unnamed_section (SECTION_CODE, output_section_asm_op,
6495 TEXT_SECTION_ASM_OP);
6496 #endif
6497
6498 #ifdef DATA_SECTION_ASM_OP
6499 data_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6500 DATA_SECTION_ASM_OP);
6501 #endif
6502
6503 #ifdef SDATA_SECTION_ASM_OP
6504 sdata_section = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
6505 SDATA_SECTION_ASM_OP);
6506 #endif
6507
6508 #ifdef READONLY_DATA_SECTION_ASM_OP
6509 readonly_data_section = get_unnamed_section (0, output_section_asm_op,
6510 READONLY_DATA_SECTION_ASM_OP);
6511 #endif
6512
6513 #ifdef CTORS_SECTION_ASM_OP
6514 ctors_section = get_unnamed_section (0, output_section_asm_op,
6515 CTORS_SECTION_ASM_OP);
6516 #endif
6517
6518 #ifdef DTORS_SECTION_ASM_OP
6519 dtors_section = get_unnamed_section (0, output_section_asm_op,
6520 DTORS_SECTION_ASM_OP);
6521 #endif
6522
6523 #ifdef BSS_SECTION_ASM_OP
6524 bss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6525 output_section_asm_op,
6526 BSS_SECTION_ASM_OP);
6527 #endif
6528
6529 #ifdef SBSS_SECTION_ASM_OP
6530 sbss_section = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
6531 output_section_asm_op,
6532 SBSS_SECTION_ASM_OP);
6533 #endif
6534
6535 tls_comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6536 | SECTION_COMMON, emit_tls_common);
6537 lcomm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6538 | SECTION_COMMON, emit_local);
6539 comm_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS
6540 | SECTION_COMMON, emit_common);
6541
6542 #if defined ASM_OUTPUT_ALIGNED_BSS
6543 bss_noswitch_section = get_noswitch_section (SECTION_WRITE | SECTION_BSS,
6544 emit_bss);
6545 #endif
6546
6547 targetm.asm_out.init_sections ();
6548
6549 if (readonly_data_section == NULL)
6550 readonly_data_section = text_section;
6551
6552 #ifdef ASM_OUTPUT_EXTERNAL
6553 pending_assemble_externals_set = new hash_set<tree>;
6554 #endif
6555 }
6556
6557 enum tls_model
6558 decl_default_tls_model (const_tree decl)
6559 {
6560 enum tls_model kind;
6561 bool is_local;
6562
6563 is_local = targetm.binds_local_p (decl);
6564 if (!flag_shlib)
6565 {
6566 if (is_local)
6567 kind = TLS_MODEL_LOCAL_EXEC;
6568 else
6569 kind = TLS_MODEL_INITIAL_EXEC;
6570 }
6571
6572 /* Local dynamic is inefficient when we're not combining the
6573 parts of the address. */
6574 else if (optimize && is_local)
6575 kind = TLS_MODEL_LOCAL_DYNAMIC;
6576 else
6577 kind = TLS_MODEL_GLOBAL_DYNAMIC;
6578 if (kind < flag_tls_default)
6579 kind = flag_tls_default;
6580
6581 return kind;
6582 }
6583
6584 /* Select a set of attributes for section NAME based on the properties
6585 of DECL and whether or not RELOC indicates that DECL's initializer
6586 might contain runtime relocations.
6587
6588 We make the section read-only and executable for a function decl,
6589 read-only for a const data decl, and writable for a non-const data decl. */
6590
6591 unsigned int
6592 default_section_type_flags (tree decl, const char *name, int reloc)
6593 {
6594 unsigned int flags;
6595
6596 if (decl && TREE_CODE (decl) == FUNCTION_DECL)
6597 flags = SECTION_CODE;
6598 else if (decl)
6599 {
6600 enum section_category category
6601 = categorize_decl_for_section (decl, reloc);
6602 if (decl_readonly_section_1 (category))
6603 flags = 0;
6604 else if (category == SECCAT_DATA_REL_RO
6605 || category == SECCAT_DATA_REL_RO_LOCAL)
6606 flags = SECTION_WRITE | SECTION_RELRO;
6607 else
6608 flags = SECTION_WRITE;
6609 }
6610 else
6611 {
6612 flags = SECTION_WRITE;
6613 if (strcmp (name, ".data.rel.ro") == 0
6614 || strcmp (name, ".data.rel.ro.local") == 0)
6615 flags |= SECTION_RELRO;
6616 }
6617
6618 if (decl && DECL_P (decl) && DECL_COMDAT_GROUP (decl))
6619 flags |= SECTION_LINKONCE;
6620
6621 if (strcmp (name, ".vtable_map_vars") == 0)
6622 flags |= SECTION_LINKONCE;
6623
6624 if (decl && VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6625 flags |= SECTION_TLS | SECTION_WRITE;
6626
6627 if (strcmp (name, ".bss") == 0
6628 || strncmp (name, ".bss.", 5) == 0
6629 || strncmp (name, ".gnu.linkonce.b.", 16) == 0
6630 || strcmp (name, ".persistent.bss") == 0
6631 || strcmp (name, ".sbss") == 0
6632 || strncmp (name, ".sbss.", 6) == 0
6633 || strncmp (name, ".gnu.linkonce.sb.", 17) == 0)
6634 flags |= SECTION_BSS;
6635
6636 if (strcmp (name, ".tdata") == 0
6637 || strncmp (name, ".tdata.", 7) == 0
6638 || strncmp (name, ".gnu.linkonce.td.", 17) == 0)
6639 flags |= SECTION_TLS;
6640
6641 if (strcmp (name, ".tbss") == 0
6642 || strncmp (name, ".tbss.", 6) == 0
6643 || strncmp (name, ".gnu.linkonce.tb.", 17) == 0)
6644 flags |= SECTION_TLS | SECTION_BSS;
6645
6646 if (strcmp (name, ".noinit") == 0)
6647 flags |= SECTION_WRITE | SECTION_BSS | SECTION_NOTYPE;
6648
6649 /* Various sections have special ELF types that the assembler will
6650 assign by default based on the name. They are neither SHT_PROGBITS
6651 nor SHT_NOBITS, so when changing sections we don't want to print a
6652 section type (@progbits or @nobits). Rather than duplicating the
6653 assembler's knowledge of what those special name patterns are, just
6654 let the assembler choose the type if we don't know a specific
6655 reason to set it to something other than the default. SHT_PROGBITS
6656 is the default for sections whose name is not specially known to
6657 the assembler, so it does no harm to leave the choice to the
6658 assembler when @progbits is the best thing we know to use. If
6659 someone is silly enough to emit code or TLS variables to one of
6660 these sections, then don't handle them specially.
6661
6662 default_elf_asm_named_section (below) handles the BSS, TLS, ENTSIZE, and
6663 LINKONCE cases when NOTYPE is not set, so leave those to its logic. */
6664 if (!(flags & (SECTION_CODE | SECTION_BSS | SECTION_TLS | SECTION_ENTSIZE))
6665 && !(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE)))
6666 flags |= SECTION_NOTYPE;
6667
6668 return flags;
6669 }
6670
6671 /* Return true if the target supports some form of global BSS,
6672 either through bss_noswitch_section, or by selecting a BSS
6673 section in TARGET_ASM_SELECT_SECTION. */
6674
6675 bool
6676 have_global_bss_p (void)
6677 {
6678 return bss_noswitch_section || targetm.have_switchable_bss_sections;
6679 }
6680
6681 /* Output assembly to switch to section NAME with attribute FLAGS.
6682 Four variants for common object file formats. */
6683
6684 void
6685 default_no_named_section (const char *name ATTRIBUTE_UNUSED,
6686 unsigned int flags ATTRIBUTE_UNUSED,
6687 tree decl ATTRIBUTE_UNUSED)
6688 {
6689 /* Some object formats don't support named sections at all. The
6690 front-end should already have flagged this as an error. */
6691 gcc_unreachable ();
6692 }
6693
6694 #ifndef TLS_SECTION_ASM_FLAG
6695 #define TLS_SECTION_ASM_FLAG 'T'
6696 #endif
6697
6698 void
6699 default_elf_asm_named_section (const char *name, unsigned int flags,
6700 tree decl)
6701 {
6702 char flagchars[11], *f = flagchars;
6703 unsigned int numeric_value = 0;
6704
6705 /* If we have already declared this section, we can use an
6706 abbreviated form to switch back to it -- unless this section is
6707 part of a COMDAT groups, in which case GAS requires the full
6708 declaration every time. */
6709 if (!(HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6710 && (flags & SECTION_DECLARED))
6711 {
6712 fprintf (asm_out_file, "\t.section\t%s\n", name);
6713 return;
6714 }
6715
6716 /* If we have a machine specific flag, then use the numeric value to pass
6717 this on to GAS. */
6718 if (targetm.asm_out.elf_flags_numeric (flags, &numeric_value))
6719 snprintf (f, sizeof (flagchars), "0x%08x", numeric_value);
6720 else
6721 {
6722 if (!(flags & SECTION_DEBUG))
6723 *f++ = 'a';
6724 #if HAVE_GAS_SECTION_EXCLUDE
6725 if (flags & SECTION_EXCLUDE)
6726 *f++ = 'e';
6727 #endif
6728 if (flags & SECTION_WRITE)
6729 *f++ = 'w';
6730 if (flags & SECTION_CODE)
6731 *f++ = 'x';
6732 if (flags & SECTION_SMALL)
6733 *f++ = 's';
6734 if (flags & SECTION_MERGE)
6735 *f++ = 'M';
6736 if (flags & SECTION_STRINGS)
6737 *f++ = 'S';
6738 if (flags & SECTION_TLS)
6739 *f++ = TLS_SECTION_ASM_FLAG;
6740 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6741 *f++ = 'G';
6742 #ifdef MACH_DEP_SECTION_ASM_FLAG
6743 if (flags & SECTION_MACH_DEP)
6744 *f++ = MACH_DEP_SECTION_ASM_FLAG;
6745 #endif
6746 *f = '\0';
6747 }
6748
6749 fprintf (asm_out_file, "\t.section\t%s,\"%s\"", name, flagchars);
6750
6751 /* default_section_type_flags (above) knows which flags need special
6752 handling here, and sets NOTYPE when none of these apply so that the
6753 assembler's logic for default types can apply to user-chosen
6754 section names. */
6755 if (!(flags & SECTION_NOTYPE))
6756 {
6757 const char *type;
6758 const char *format;
6759
6760 if (flags & SECTION_BSS)
6761 type = "nobits";
6762 else
6763 type = "progbits";
6764
6765 format = ",@%s";
6766 /* On platforms that use "@" as the assembly comment character,
6767 use "%" instead. */
6768 if (strcmp (ASM_COMMENT_START, "@") == 0)
6769 format = ",%%%s";
6770 fprintf (asm_out_file, format, type);
6771
6772 if (flags & SECTION_ENTSIZE)
6773 fprintf (asm_out_file, ",%d", flags & SECTION_ENTSIZE);
6774 if (HAVE_COMDAT_GROUP && (flags & SECTION_LINKONCE))
6775 {
6776 if (TREE_CODE (decl) == IDENTIFIER_NODE)
6777 fprintf (asm_out_file, ",%s,comdat", IDENTIFIER_POINTER (decl));
6778 else
6779 fprintf (asm_out_file, ",%s,comdat",
6780 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (decl)));
6781 }
6782 }
6783
6784 putc ('\n', asm_out_file);
6785 }
6786
6787 void
6788 default_coff_asm_named_section (const char *name, unsigned int flags,
6789 tree decl ATTRIBUTE_UNUSED)
6790 {
6791 char flagchars[8], *f = flagchars;
6792
6793 if (flags & SECTION_WRITE)
6794 *f++ = 'w';
6795 if (flags & SECTION_CODE)
6796 *f++ = 'x';
6797 *f = '\0';
6798
6799 fprintf (asm_out_file, "\t.section\t%s,\"%s\"\n", name, flagchars);
6800 }
6801
6802 void
6803 default_pe_asm_named_section (const char *name, unsigned int flags,
6804 tree decl)
6805 {
6806 default_coff_asm_named_section (name, flags, decl);
6807
6808 if (flags & SECTION_LINKONCE)
6809 {
6810 /* Functions may have been compiled at various levels of
6811 optimization so we can't use `same_size' here.
6812 Instead, have the linker pick one. */
6813 fprintf (asm_out_file, "\t.linkonce %s\n",
6814 (flags & SECTION_CODE ? "discard" : "same_size"));
6815 }
6816 }
6817 \f
6818 /* The lame default section selector. */
6819
6820 section *
6821 default_select_section (tree decl, int reloc,
6822 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
6823 {
6824 if (DECL_P (decl))
6825 {
6826 if (decl_readonly_section (decl, reloc))
6827 return readonly_data_section;
6828 }
6829 else if (TREE_CODE (decl) == CONSTRUCTOR)
6830 {
6831 if (! ((flag_pic && reloc)
6832 || !TREE_READONLY (decl)
6833 || TREE_SIDE_EFFECTS (decl)
6834 || !TREE_CONSTANT (decl)))
6835 return readonly_data_section;
6836 }
6837 else if (TREE_CODE (decl) == STRING_CST)
6838 return readonly_data_section;
6839 else if (! (flag_pic && reloc))
6840 return readonly_data_section;
6841
6842 return data_section;
6843 }
6844
6845 enum section_category
6846 categorize_decl_for_section (const_tree decl, int reloc)
6847 {
6848 enum section_category ret;
6849
6850 if (TREE_CODE (decl) == FUNCTION_DECL)
6851 return SECCAT_TEXT;
6852 else if (TREE_CODE (decl) == STRING_CST)
6853 {
6854 if ((flag_sanitize & SANITIZE_ADDRESS)
6855 && asan_protect_global (CONST_CAST_TREE (decl)))
6856 /* or !flag_merge_constants */
6857 return SECCAT_RODATA;
6858 else
6859 return SECCAT_RODATA_MERGE_STR;
6860 }
6861 else if (VAR_P (decl))
6862 {
6863 tree d = CONST_CAST_TREE (decl);
6864 if (bss_initializer_p (decl))
6865 ret = SECCAT_BSS;
6866 else if (! TREE_READONLY (decl)
6867 || TREE_SIDE_EFFECTS (decl)
6868 || (DECL_INITIAL (decl)
6869 && ! TREE_CONSTANT (DECL_INITIAL (decl))))
6870 {
6871 /* Here the reloc_rw_mask is not testing whether the section should
6872 be read-only or not, but whether the dynamic link will have to
6873 do something. If so, we wish to segregate the data in order to
6874 minimize cache misses inside the dynamic linker. */
6875 if (reloc & targetm.asm_out.reloc_rw_mask ())
6876 ret = reloc == 1 ? SECCAT_DATA_REL_LOCAL : SECCAT_DATA_REL;
6877 else
6878 ret = SECCAT_DATA;
6879 }
6880 else if (reloc & targetm.asm_out.reloc_rw_mask ())
6881 ret = reloc == 1 ? SECCAT_DATA_REL_RO_LOCAL : SECCAT_DATA_REL_RO;
6882 else if (reloc || flag_merge_constants < 2
6883 || ((flag_sanitize & SANITIZE_ADDRESS)
6884 /* PR 81697: for architectures that use section anchors we
6885 need to ignore DECL_RTL_SET_P (decl) for string constants
6886 inside this asan_protect_global call because otherwise
6887 we'll wrongly put them into SECCAT_RODATA_MERGE_CONST
6888 section, set DECL_RTL (decl) later on and add DECL to
6889 protected globals via successive asan_protect_global
6890 calls. In this scenario we'll end up with wrong
6891 alignment of these strings at runtime and possible ASan
6892 false positives. */
6893 && asan_protect_global (d, use_object_blocks_p ()
6894 && use_blocks_for_decl_p (d))))
6895 /* C and C++ don't allow different variables to share the same
6896 location. -fmerge-all-constants allows even that (at the
6897 expense of not conforming). */
6898 ret = SECCAT_RODATA;
6899 else if (DECL_INITIAL (decl)
6900 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST)
6901 ret = SECCAT_RODATA_MERGE_STR_INIT;
6902 else
6903 ret = SECCAT_RODATA_MERGE_CONST;
6904 }
6905 else if (TREE_CODE (decl) == CONSTRUCTOR)
6906 {
6907 if ((reloc & targetm.asm_out.reloc_rw_mask ())
6908 || TREE_SIDE_EFFECTS (decl)
6909 || ! TREE_CONSTANT (decl))
6910 ret = SECCAT_DATA;
6911 else
6912 ret = SECCAT_RODATA;
6913 }
6914 else
6915 ret = SECCAT_RODATA;
6916
6917 /* There are no read-only thread-local sections. */
6918 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
6919 {
6920 /* Note that this would be *just* SECCAT_BSS, except that there's
6921 no concept of a read-only thread-local-data section. */
6922 if (ret == SECCAT_BSS
6923 || DECL_INITIAL (decl) == NULL
6924 || (flag_zero_initialized_in_bss
6925 && initializer_zerop (DECL_INITIAL (decl))))
6926 ret = SECCAT_TBSS;
6927 else
6928 ret = SECCAT_TDATA;
6929 }
6930
6931 /* If the target uses small data sections, select it. */
6932 else if (targetm.in_small_data_p (decl))
6933 {
6934 if (ret == SECCAT_BSS)
6935 ret = SECCAT_SBSS;
6936 else if (targetm.have_srodata_section && ret == SECCAT_RODATA)
6937 ret = SECCAT_SRODATA;
6938 else
6939 ret = SECCAT_SDATA;
6940 }
6941
6942 return ret;
6943 }
6944
6945 static bool
6946 decl_readonly_section_1 (enum section_category category)
6947 {
6948 switch (category)
6949 {
6950 case SECCAT_RODATA:
6951 case SECCAT_RODATA_MERGE_STR:
6952 case SECCAT_RODATA_MERGE_STR_INIT:
6953 case SECCAT_RODATA_MERGE_CONST:
6954 case SECCAT_SRODATA:
6955 return true;
6956 default:
6957 return false;
6958 }
6959 }
6960
6961 bool
6962 decl_readonly_section (const_tree decl, int reloc)
6963 {
6964 return decl_readonly_section_1 (categorize_decl_for_section (decl, reloc));
6965 }
6966
6967 /* Select a section based on the above categorization. */
6968
6969 section *
6970 default_elf_select_section (tree decl, int reloc,
6971 unsigned HOST_WIDE_INT align)
6972 {
6973 const char *sname;
6974
6975 switch (categorize_decl_for_section (decl, reloc))
6976 {
6977 case SECCAT_TEXT:
6978 /* We're not supposed to be called on FUNCTION_DECLs. */
6979 gcc_unreachable ();
6980 case SECCAT_RODATA:
6981 return readonly_data_section;
6982 case SECCAT_RODATA_MERGE_STR:
6983 return mergeable_string_section (decl, align, 0);
6984 case SECCAT_RODATA_MERGE_STR_INIT:
6985 return mergeable_string_section (DECL_INITIAL (decl), align, 0);
6986 case SECCAT_RODATA_MERGE_CONST:
6987 return mergeable_constant_section (DECL_MODE (decl), align, 0);
6988 case SECCAT_SRODATA:
6989 sname = ".sdata2";
6990 break;
6991 case SECCAT_DATA:
6992 return data_section;
6993 case SECCAT_DATA_REL:
6994 sname = ".data.rel";
6995 break;
6996 case SECCAT_DATA_REL_LOCAL:
6997 sname = ".data.rel.local";
6998 break;
6999 case SECCAT_DATA_REL_RO:
7000 sname = ".data.rel.ro";
7001 break;
7002 case SECCAT_DATA_REL_RO_LOCAL:
7003 sname = ".data.rel.ro.local";
7004 break;
7005 case SECCAT_SDATA:
7006 sname = ".sdata";
7007 break;
7008 case SECCAT_TDATA:
7009 sname = ".tdata";
7010 break;
7011 case SECCAT_BSS:
7012 if (DECL_P (decl)
7013 && lookup_attribute ("noinit", DECL_ATTRIBUTES (decl)) != NULL_TREE)
7014 {
7015 sname = ".noinit";
7016 break;
7017 }
7018
7019 if (bss_section)
7020 return bss_section;
7021 sname = ".bss";
7022 break;
7023 case SECCAT_SBSS:
7024 sname = ".sbss";
7025 break;
7026 case SECCAT_TBSS:
7027 sname = ".tbss";
7028 break;
7029 default:
7030 gcc_unreachable ();
7031 }
7032
7033 return get_named_section (decl, sname, reloc);
7034 }
7035
7036 /* Construct a unique section name based on the decl name and the
7037 categorization performed above. */
7038
7039 void
7040 default_unique_section (tree decl, int reloc)
7041 {
7042 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
7043 bool one_only = DECL_ONE_ONLY (decl) && !HAVE_COMDAT_GROUP;
7044 const char *prefix, *name, *linkonce;
7045 char *string;
7046 tree id;
7047
7048 switch (categorize_decl_for_section (decl, reloc))
7049 {
7050 case SECCAT_TEXT:
7051 prefix = one_only ? ".t" : ".text";
7052 break;
7053 case SECCAT_RODATA:
7054 case SECCAT_RODATA_MERGE_STR:
7055 case SECCAT_RODATA_MERGE_STR_INIT:
7056 case SECCAT_RODATA_MERGE_CONST:
7057 prefix = one_only ? ".r" : ".rodata";
7058 break;
7059 case SECCAT_SRODATA:
7060 prefix = one_only ? ".s2" : ".sdata2";
7061 break;
7062 case SECCAT_DATA:
7063 prefix = one_only ? ".d" : ".data";
7064 break;
7065 case SECCAT_DATA_REL:
7066 prefix = one_only ? ".d.rel" : ".data.rel";
7067 break;
7068 case SECCAT_DATA_REL_LOCAL:
7069 prefix = one_only ? ".d.rel.local" : ".data.rel.local";
7070 break;
7071 case SECCAT_DATA_REL_RO:
7072 prefix = one_only ? ".d.rel.ro" : ".data.rel.ro";
7073 break;
7074 case SECCAT_DATA_REL_RO_LOCAL:
7075 prefix = one_only ? ".d.rel.ro.local" : ".data.rel.ro.local";
7076 break;
7077 case SECCAT_SDATA:
7078 prefix = one_only ? ".s" : ".sdata";
7079 break;
7080 case SECCAT_BSS:
7081 prefix = one_only ? ".b" : ".bss";
7082 break;
7083 case SECCAT_SBSS:
7084 prefix = one_only ? ".sb" : ".sbss";
7085 break;
7086 case SECCAT_TDATA:
7087 prefix = one_only ? ".td" : ".tdata";
7088 break;
7089 case SECCAT_TBSS:
7090 prefix = one_only ? ".tb" : ".tbss";
7091 break;
7092 default:
7093 gcc_unreachable ();
7094 }
7095
7096 id = DECL_ASSEMBLER_NAME (decl);
7097 ultimate_transparent_alias_target (&id);
7098 name = IDENTIFIER_POINTER (id);
7099 name = targetm.strip_name_encoding (name);
7100
7101 /* If we're using one_only, then there needs to be a .gnu.linkonce
7102 prefix to the section name. */
7103 linkonce = one_only ? ".gnu.linkonce" : "";
7104
7105 string = ACONCAT ((linkonce, prefix, ".", name, NULL));
7106
7107 set_decl_section_name (decl, string);
7108 }
7109
7110 /* Subroutine of compute_reloc_for_rtx for leaf rtxes. */
7111
7112 static int
7113 compute_reloc_for_rtx_1 (const_rtx x)
7114 {
7115 switch (GET_CODE (x))
7116 {
7117 case SYMBOL_REF:
7118 return SYMBOL_REF_LOCAL_P (x) ? 1 : 2;
7119 case LABEL_REF:
7120 return 1;
7121 default:
7122 return 0;
7123 }
7124 }
7125
7126 /* Like compute_reloc_for_constant, except for an RTX. The return value
7127 is a mask for which bit 1 indicates a global relocation, and bit 0
7128 indicates a local relocation. */
7129
7130 static int
7131 compute_reloc_for_rtx (const_rtx x)
7132 {
7133 switch (GET_CODE (x))
7134 {
7135 case SYMBOL_REF:
7136 case LABEL_REF:
7137 return compute_reloc_for_rtx_1 (x);
7138
7139 case CONST:
7140 {
7141 int reloc = 0;
7142 subrtx_iterator::array_type array;
7143 FOR_EACH_SUBRTX (iter, array, x, ALL)
7144 reloc |= compute_reloc_for_rtx_1 (*iter);
7145 return reloc;
7146 }
7147
7148 default:
7149 return 0;
7150 }
7151 }
7152
7153 section *
7154 default_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
7155 rtx x,
7156 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
7157 {
7158 if (compute_reloc_for_rtx (x) & targetm.asm_out.reloc_rw_mask ())
7159 return data_section;
7160 else
7161 return readonly_data_section;
7162 }
7163
7164 section *
7165 default_elf_select_rtx_section (machine_mode mode, rtx x,
7166 unsigned HOST_WIDE_INT align)
7167 {
7168 int reloc = compute_reloc_for_rtx (x);
7169
7170 /* ??? Handle small data here somehow. */
7171
7172 if (reloc & targetm.asm_out.reloc_rw_mask ())
7173 {
7174 if (reloc == 1)
7175 return get_named_section (NULL, ".data.rel.ro.local", 1);
7176 else
7177 return get_named_section (NULL, ".data.rel.ro", 3);
7178 }
7179
7180 return mergeable_constant_section (mode, align, 0);
7181 }
7182
7183 /* Set the generally applicable flags on the SYMBOL_REF for EXP. */
7184
7185 void
7186 default_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
7187 {
7188 rtx symbol;
7189 int flags;
7190
7191 /* Careful not to prod global register variables. */
7192 if (!MEM_P (rtl))
7193 return;
7194 symbol = XEXP (rtl, 0);
7195 if (GET_CODE (symbol) != SYMBOL_REF)
7196 return;
7197
7198 flags = SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_HAS_BLOCK_INFO;
7199 if (TREE_CODE (decl) == FUNCTION_DECL)
7200 flags |= SYMBOL_FLAG_FUNCTION;
7201 if (targetm.binds_local_p (decl))
7202 flags |= SYMBOL_FLAG_LOCAL;
7203 if (VAR_P (decl) && DECL_THREAD_LOCAL_P (decl))
7204 flags |= DECL_TLS_MODEL (decl) << SYMBOL_FLAG_TLS_SHIFT;
7205 else if (targetm.in_small_data_p (decl))
7206 flags |= SYMBOL_FLAG_SMALL;
7207 /* ??? Why is DECL_EXTERNAL ever set for non-PUBLIC names? Without
7208 being PUBLIC, the thing *must* be defined in this translation unit.
7209 Prevent this buglet from being propagated into rtl code as well. */
7210 if (DECL_P (decl) && DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
7211 flags |= SYMBOL_FLAG_EXTERNAL;
7212
7213 SYMBOL_REF_FLAGS (symbol) = flags;
7214 }
7215
7216 /* By default, we do nothing for encode_section_info, so we need not
7217 do anything but discard the '*' marker. */
7218
7219 const char *
7220 default_strip_name_encoding (const char *str)
7221 {
7222 return str + (*str == '*');
7223 }
7224
7225 #ifdef ASM_OUTPUT_DEF
7226 /* The default implementation of TARGET_ASM_OUTPUT_ANCHOR. Define the
7227 anchor relative to ".", the current section position. */
7228
7229 void
7230 default_asm_output_anchor (rtx symbol)
7231 {
7232 char buffer[100];
7233
7234 sprintf (buffer, "*. + " HOST_WIDE_INT_PRINT_DEC,
7235 SYMBOL_REF_BLOCK_OFFSET (symbol));
7236 ASM_OUTPUT_DEF (asm_out_file, XSTR (symbol, 0), buffer);
7237 }
7238 #endif
7239
7240 /* The default implementation of TARGET_USE_ANCHORS_FOR_SYMBOL_P. */
7241
7242 bool
7243 default_use_anchors_for_symbol_p (const_rtx symbol)
7244 {
7245 tree decl;
7246 section *sect = SYMBOL_REF_BLOCK (symbol)->sect;
7247
7248 /* This function should only be called with non-zero SYMBOL_REF_BLOCK,
7249 furthermore get_block_for_section should not create object blocks
7250 for mergeable sections. */
7251 gcc_checking_assert (sect && !(sect->common.flags & SECTION_MERGE));
7252
7253 /* Don't use anchors for small data sections. The small data register
7254 acts as an anchor for such sections. */
7255 if (sect->common.flags & SECTION_SMALL)
7256 return false;
7257
7258 decl = SYMBOL_REF_DECL (symbol);
7259 if (decl && DECL_P (decl))
7260 {
7261 /* Don't use section anchors for decls that might be defined or
7262 usurped by other modules. */
7263 if (TREE_PUBLIC (decl) && !decl_binds_to_current_def_p (decl))
7264 return false;
7265
7266 /* Don't use section anchors for decls that will be placed in a
7267 small data section. */
7268 /* ??? Ideally, this check would be redundant with the SECTION_SMALL
7269 one above. The problem is that we only use SECTION_SMALL for
7270 sections that should be marked as small in the section directive. */
7271 if (targetm.in_small_data_p (decl))
7272 return false;
7273
7274 /* Don't use section anchors for decls that won't fit inside a single
7275 anchor range to reduce the amount of instructions required to refer
7276 to the entire declaration. */
7277 if (DECL_SIZE_UNIT (decl) == NULL_TREE
7278 || !tree_fits_uhwi_p (DECL_SIZE_UNIT (decl))
7279 || (tree_to_uhwi (DECL_SIZE_UNIT (decl))
7280 >= (unsigned HOST_WIDE_INT) targetm.max_anchor_offset))
7281 return false;
7282
7283 }
7284 return true;
7285 }
7286
7287 /* Return true when RESOLUTION indicate that symbol will be bound to the
7288 definition provided by current .o file. */
7289
7290 static bool
7291 resolution_to_local_definition_p (enum ld_plugin_symbol_resolution resolution)
7292 {
7293 return (resolution == LDPR_PREVAILING_DEF
7294 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7295 || resolution == LDPR_PREVAILING_DEF_IRONLY);
7296 }
7297
7298 /* Return true when RESOLUTION indicate that symbol will be bound locally
7299 within current executable or DSO. */
7300
7301 static bool
7302 resolution_local_p (enum ld_plugin_symbol_resolution resolution)
7303 {
7304 return (resolution == LDPR_PREVAILING_DEF
7305 || resolution == LDPR_PREVAILING_DEF_IRONLY
7306 || resolution == LDPR_PREVAILING_DEF_IRONLY_EXP
7307 || resolution == LDPR_PREEMPTED_REG
7308 || resolution == LDPR_PREEMPTED_IR
7309 || resolution == LDPR_RESOLVED_IR
7310 || resolution == LDPR_RESOLVED_EXEC);
7311 }
7312
7313 /* COMMON_LOCAL_P is true means that the linker can guarantee that an
7314 uninitialized common symbol in the executable will still be defined
7315 (through COPY relocation) in the executable. */
7316
7317 bool
7318 default_binds_local_p_3 (const_tree exp, bool shlib, bool weak_dominate,
7319 bool extern_protected_data, bool common_local_p)
7320 {
7321 /* A non-decl is an entry in the constant pool. */
7322 if (!DECL_P (exp))
7323 return true;
7324
7325 /* Weakrefs may not bind locally, even though the weakref itself is always
7326 static and therefore local. Similarly, the resolver for ifunc functions
7327 might resolve to a non-local function.
7328 FIXME: We can resolve the weakref case more curefuly by looking at the
7329 weakref alias. */
7330 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (exp))
7331 || (TREE_CODE (exp) == FUNCTION_DECL
7332 && cgraph_node::get (exp)
7333 && cgraph_node::get (exp)->ifunc_resolver))
7334 return false;
7335
7336 /* Static variables are always local. */
7337 if (! TREE_PUBLIC (exp))
7338 return true;
7339
7340 /* With resolution file in hand, take look into resolutions.
7341 We can't just return true for resolved_locally symbols,
7342 because dynamic linking might overwrite symbols
7343 in shared libraries. */
7344 bool resolved_locally = false;
7345
7346 bool uninited_common = (DECL_COMMON (exp)
7347 && (DECL_INITIAL (exp) == NULL
7348 || (!in_lto_p
7349 && DECL_INITIAL (exp) == error_mark_node)));
7350
7351 /* A non-external variable is defined locally only if it isn't
7352 uninitialized COMMON variable or common_local_p is true. */
7353 bool defined_locally = (!DECL_EXTERNAL (exp)
7354 && (!uninited_common || common_local_p));
7355 if (symtab_node *node = symtab_node::get (exp))
7356 {
7357 if (node->in_other_partition)
7358 defined_locally = true;
7359 if (node->can_be_discarded_p ())
7360 ;
7361 else if (resolution_to_local_definition_p (node->resolution))
7362 defined_locally = resolved_locally = true;
7363 else if (resolution_local_p (node->resolution))
7364 resolved_locally = true;
7365 }
7366 if (defined_locally && weak_dominate && !shlib)
7367 resolved_locally = true;
7368
7369 /* Undefined weak symbols are never defined locally. */
7370 if (DECL_WEAK (exp) && !defined_locally)
7371 return false;
7372
7373 /* A symbol is local if the user has said explicitly that it will be,
7374 or if we have a definition for the symbol. We cannot infer visibility
7375 for undefined symbols. */
7376 if (DECL_VISIBILITY (exp) != VISIBILITY_DEFAULT
7377 && (TREE_CODE (exp) == FUNCTION_DECL
7378 || !extern_protected_data
7379 || DECL_VISIBILITY (exp) != VISIBILITY_PROTECTED)
7380 && (DECL_VISIBILITY_SPECIFIED (exp) || defined_locally))
7381 return true;
7382
7383 /* If PIC, then assume that any global name can be overridden by
7384 symbols resolved from other modules. */
7385 if (shlib)
7386 return false;
7387
7388 /* Variables defined outside this object might not be local. */
7389 if (DECL_EXTERNAL (exp) && !resolved_locally)
7390 return false;
7391
7392 /* Non-dominant weak symbols are not defined locally. */
7393 if (DECL_WEAK (exp) && !resolved_locally)
7394 return false;
7395
7396 /* Uninitialized COMMON variable may be unified with symbols
7397 resolved from other modules. */
7398 if (uninited_common && !resolved_locally)
7399 return false;
7400
7401 /* Otherwise we're left with initialized (or non-common) global data
7402 which is of necessity defined locally. */
7403 return true;
7404 }
7405
7406 /* Assume ELF-ish defaults, since that's pretty much the most liberal
7407 wrt cross-module name binding. */
7408
7409 bool
7410 default_binds_local_p (const_tree exp)
7411 {
7412 return default_binds_local_p_3 (exp, flag_shlib != 0, true, false, false);
7413 }
7414
7415 /* Similar to default_binds_local_p, but common symbol may be local and
7416 extern protected data is non-local. */
7417
7418 bool
7419 default_binds_local_p_2 (const_tree exp)
7420 {
7421 return default_binds_local_p_3 (exp, flag_shlib != 0, true, true,
7422 !flag_pic);
7423 }
7424
7425 bool
7426 default_binds_local_p_1 (const_tree exp, int shlib)
7427 {
7428 return default_binds_local_p_3 (exp, shlib != 0, false, false, false);
7429 }
7430
7431 /* Return true when references to DECL must bind to current definition in
7432 final executable.
7433
7434 The condition is usually equivalent to whether the function binds to the
7435 current module (shared library or executable), that is to binds_local_p.
7436 We use this fact to avoid need for another target hook and implement
7437 the logic using binds_local_p and just special cases where
7438 decl_binds_to_current_def_p is stronger than binds_local_p. In particular
7439 the weak definitions (that can be overwritten at linktime by other
7440 definition from different object file) and when resolution info is available
7441 we simply use the knowledge passed to us by linker plugin. */
7442 bool
7443 decl_binds_to_current_def_p (const_tree decl)
7444 {
7445 gcc_assert (DECL_P (decl));
7446 if (!targetm.binds_local_p (decl))
7447 return false;
7448 if (!TREE_PUBLIC (decl))
7449 return true;
7450
7451 /* When resolution is available, just use it. */
7452 if (symtab_node *node = symtab_node::get (decl))
7453 {
7454 if (node->resolution != LDPR_UNKNOWN
7455 && !node->can_be_discarded_p ())
7456 return resolution_to_local_definition_p (node->resolution);
7457 }
7458
7459 /* Otherwise we have to assume the worst for DECL_WEAK (hidden weaks
7460 binds locally but still can be overwritten), DECL_COMMON (can be merged
7461 with a non-common definition somewhere in the same module) or
7462 DECL_EXTERNAL.
7463 This rely on fact that binds_local_p behave as decl_replaceable_p
7464 for all other declaration types. */
7465 if (DECL_WEAK (decl))
7466 return false;
7467 if (DECL_COMMON (decl)
7468 && (DECL_INITIAL (decl) == NULL
7469 || (!in_lto_p && DECL_INITIAL (decl) == error_mark_node)))
7470 return false;
7471 if (DECL_EXTERNAL (decl))
7472 return false;
7473 return true;
7474 }
7475
7476 /* A replaceable function or variable is one which may be replaced
7477 at link-time with an entirely different definition, provided that the
7478 replacement has the same type. For example, functions declared
7479 with __attribute__((weak)) on most systems are replaceable.
7480
7481 COMDAT functions are not replaceable, since all definitions of the
7482 function must be equivalent. It is important that COMDAT functions
7483 not be treated as replaceable so that use of C++ template
7484 instantiations is not penalized. */
7485
7486 bool
7487 decl_replaceable_p (tree decl)
7488 {
7489 gcc_assert (DECL_P (decl));
7490 if (!TREE_PUBLIC (decl) || DECL_COMDAT (decl))
7491 return false;
7492 if (!flag_semantic_interposition
7493 && !DECL_WEAK (decl))
7494 return false;
7495 return !decl_binds_to_current_def_p (decl);
7496 }
7497
7498 /* Default function to output code that will globalize a label. A
7499 target must define GLOBAL_ASM_OP or provide its own function to
7500 globalize a label. */
7501 #ifdef GLOBAL_ASM_OP
7502 void
7503 default_globalize_label (FILE * stream, const char *name)
7504 {
7505 fputs (GLOBAL_ASM_OP, stream);
7506 assemble_name (stream, name);
7507 putc ('\n', stream);
7508 }
7509 #endif /* GLOBAL_ASM_OP */
7510
7511 /* Default function to output code that will globalize a declaration. */
7512 void
7513 default_globalize_decl_name (FILE * stream, tree decl)
7514 {
7515 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
7516 targetm.asm_out.globalize_label (stream, name);
7517 }
7518
7519 /* Default function to output a label for unwind information. The
7520 default is to do nothing. A target that needs nonlocal labels for
7521 unwind information must provide its own function to do this. */
7522 void
7523 default_emit_unwind_label (FILE * stream ATTRIBUTE_UNUSED,
7524 tree decl ATTRIBUTE_UNUSED,
7525 int for_eh ATTRIBUTE_UNUSED,
7526 int empty ATTRIBUTE_UNUSED)
7527 {
7528 }
7529
7530 /* Default function to output a label to divide up the exception table.
7531 The default is to do nothing. A target that needs/wants to divide
7532 up the table must provide it's own function to do this. */
7533 void
7534 default_emit_except_table_label (FILE * stream ATTRIBUTE_UNUSED)
7535 {
7536 }
7537
7538 /* This is how to output an internal numbered label where PREFIX is
7539 the class of label and LABELNO is the number within the class. */
7540
7541 void
7542 default_generate_internal_label (char *buf, const char *prefix,
7543 unsigned long labelno)
7544 {
7545 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7546 }
7547
7548 /* This is how to output an internal numbered label where PREFIX is
7549 the class of label and LABELNO is the number within the class. */
7550
7551 void
7552 default_internal_label (FILE *stream, const char *prefix,
7553 unsigned long labelno)
7554 {
7555 char *const buf = (char *) alloca (40 + strlen (prefix));
7556 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, labelno);
7557 ASM_OUTPUT_INTERNAL_LABEL (stream, buf);
7558 }
7559
7560
7561 /* The default implementation of ASM_DECLARE_CONSTANT_NAME. */
7562
7563 void
7564 default_asm_declare_constant_name (FILE *file, const char *name,
7565 const_tree exp ATTRIBUTE_UNUSED,
7566 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
7567 {
7568 assemble_label (file, name);
7569 }
7570
7571 /* This is the default behavior at the beginning of a file. It's
7572 controlled by two other target-hook toggles. */
7573 void
7574 default_file_start (void)
7575 {
7576 if (targetm.asm_file_start_app_off
7577 && !(flag_verbose_asm || flag_debug_asm || flag_dump_rtl_in_asm))
7578 fputs (ASM_APP_OFF, asm_out_file);
7579
7580 if (targetm.asm_file_start_file_directive)
7581 {
7582 /* LTO produced units have no meaningful main_input_filename. */
7583 if (in_lto_p)
7584 output_file_directive (asm_out_file, "<artificial>");
7585 else
7586 output_file_directive (asm_out_file, main_input_filename);
7587 }
7588 }
7589
7590 /* This is a generic routine suitable for use as TARGET_ASM_FILE_END
7591 which emits a special section directive used to indicate whether or
7592 not this object file needs an executable stack. This is primarily
7593 a GNU extension to ELF but could be used on other targets. */
7594
7595 int trampolines_created;
7596
7597 void
7598 file_end_indicate_exec_stack (void)
7599 {
7600 unsigned int flags = SECTION_DEBUG;
7601 if (trampolines_created)
7602 flags |= SECTION_CODE;
7603
7604 switch_to_section (get_section (".note.GNU-stack", flags, NULL));
7605 }
7606
7607 /* Emit a special section directive to indicate that this object file
7608 was compiled with -fsplit-stack. This is used to let the linker
7609 detect calls between split-stack code and non-split-stack code, so
7610 that it can modify the split-stack code to allocate a sufficiently
7611 large stack. We emit another special section if there are any
7612 functions in this file which have the no_split_stack attribute, to
7613 prevent the linker from warning about being unable to convert the
7614 functions if they call non-split-stack code. */
7615
7616 void
7617 file_end_indicate_split_stack (void)
7618 {
7619 if (flag_split_stack)
7620 {
7621 switch_to_section (get_section (".note.GNU-split-stack", SECTION_DEBUG,
7622 NULL));
7623 if (saw_no_split_stack)
7624 switch_to_section (get_section (".note.GNU-no-split-stack",
7625 SECTION_DEBUG, NULL));
7626 }
7627 }
7628
7629 /* Output DIRECTIVE (a C string) followed by a newline. This is used as
7630 a get_unnamed_section callback. */
7631
7632 void
7633 output_section_asm_op (const void *directive)
7634 {
7635 fprintf (asm_out_file, "%s\n", (const char *) directive);
7636 }
7637
7638 /* Emit assembly code to switch to section NEW_SECTION. Do nothing if
7639 the current section is NEW_SECTION. */
7640
7641 void
7642 switch_to_section (section *new_section)
7643 {
7644 if (in_section == new_section)
7645 return;
7646
7647 if (new_section->common.flags & SECTION_FORGET)
7648 in_section = NULL;
7649 else
7650 in_section = new_section;
7651
7652 switch (SECTION_STYLE (new_section))
7653 {
7654 case SECTION_NAMED:
7655 targetm.asm_out.named_section (new_section->named.name,
7656 new_section->named.common.flags,
7657 new_section->named.decl);
7658 break;
7659
7660 case SECTION_UNNAMED:
7661 new_section->unnamed.callback (new_section->unnamed.data);
7662 break;
7663
7664 case SECTION_NOSWITCH:
7665 gcc_unreachable ();
7666 break;
7667 }
7668
7669 new_section->common.flags |= SECTION_DECLARED;
7670 }
7671
7672 /* If block symbol SYMBOL has not yet been assigned an offset, place
7673 it at the end of its block. */
7674
7675 void
7676 place_block_symbol (rtx symbol)
7677 {
7678 unsigned HOST_WIDE_INT size, mask, offset;
7679 class constant_descriptor_rtx *desc;
7680 unsigned int alignment;
7681 struct object_block *block;
7682 tree decl;
7683
7684 gcc_assert (SYMBOL_REF_BLOCK (symbol));
7685 if (SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0)
7686 return;
7687
7688 /* Work out the symbol's size and alignment. */
7689 if (CONSTANT_POOL_ADDRESS_P (symbol))
7690 {
7691 desc = SYMBOL_REF_CONSTANT (symbol);
7692 alignment = desc->align;
7693 size = GET_MODE_SIZE (desc->mode);
7694 }
7695 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7696 {
7697 decl = SYMBOL_REF_DECL (symbol);
7698 gcc_checking_assert (DECL_IN_CONSTANT_POOL (decl));
7699 alignment = DECL_ALIGN (decl);
7700 size = get_constant_size (DECL_INITIAL (decl));
7701 if ((flag_sanitize & SANITIZE_ADDRESS)
7702 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7703 && asan_protect_global (DECL_INITIAL (decl)))
7704 {
7705 size += asan_red_zone_size (size);
7706 alignment = MAX (alignment,
7707 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7708 }
7709 }
7710 else
7711 {
7712 struct symtab_node *snode;
7713 decl = SYMBOL_REF_DECL (symbol);
7714
7715 snode = symtab_node::get (decl);
7716 if (snode->alias)
7717 {
7718 rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
7719
7720 gcc_assert (MEM_P (target)
7721 && GET_CODE (XEXP (target, 0)) == SYMBOL_REF
7722 && SYMBOL_REF_HAS_BLOCK_INFO_P (XEXP (target, 0)));
7723 target = XEXP (target, 0);
7724 place_block_symbol (target);
7725 SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
7726 return;
7727 }
7728 alignment = get_variable_align (decl);
7729 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7730 if ((flag_sanitize & SANITIZE_ADDRESS)
7731 && asan_protect_global (decl))
7732 {
7733 size += asan_red_zone_size (size);
7734 alignment = MAX (alignment,
7735 ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
7736 }
7737 }
7738
7739 /* Calculate the object's offset from the start of the block. */
7740 block = SYMBOL_REF_BLOCK (symbol);
7741 mask = alignment / BITS_PER_UNIT - 1;
7742 offset = (block->size + mask) & ~mask;
7743 SYMBOL_REF_BLOCK_OFFSET (symbol) = offset;
7744
7745 /* Record the block's new alignment and size. */
7746 block->alignment = MAX (block->alignment, alignment);
7747 block->size = offset + size;
7748
7749 vec_safe_push (block->objects, symbol);
7750 }
7751
7752 /* Return the anchor that should be used to address byte offset OFFSET
7753 from the first object in BLOCK. MODEL is the TLS model used
7754 to access it. */
7755
7756 rtx
7757 get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
7758 enum tls_model model)
7759 {
7760 char label[100];
7761 unsigned int begin, middle, end;
7762 unsigned HOST_WIDE_INT min_offset, max_offset, range, bias, delta;
7763 rtx anchor;
7764
7765 /* Work out the anchor's offset. Use an offset of 0 for the first
7766 anchor so that we don't pessimize the case where we take the address
7767 of a variable at the beginning of the block. This is particularly
7768 useful when a block has only one variable assigned to it.
7769
7770 We try to place anchors RANGE bytes apart, so there can then be
7771 anchors at +/-RANGE, +/-2 * RANGE, and so on, up to the limits of
7772 a ptr_mode offset. With some target settings, the lowest such
7773 anchor might be out of range for the lowest ptr_mode offset;
7774 likewise the highest anchor for the highest offset. Use anchors
7775 at the extreme ends of the ptr_mode range in such cases.
7776
7777 All arithmetic uses unsigned integers in order to avoid
7778 signed overflow. */
7779 max_offset = (unsigned HOST_WIDE_INT) targetm.max_anchor_offset;
7780 min_offset = (unsigned HOST_WIDE_INT) targetm.min_anchor_offset;
7781 range = max_offset - min_offset + 1;
7782 if (range == 0)
7783 offset = 0;
7784 else
7785 {
7786 bias = HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (ptr_mode) - 1);
7787 if (offset < 0)
7788 {
7789 delta = -(unsigned HOST_WIDE_INT) offset + max_offset;
7790 delta -= delta % range;
7791 if (delta > bias)
7792 delta = bias;
7793 offset = (HOST_WIDE_INT) (-delta);
7794 }
7795 else
7796 {
7797 delta = (unsigned HOST_WIDE_INT) offset - min_offset;
7798 delta -= delta % range;
7799 if (delta > bias - 1)
7800 delta = bias - 1;
7801 offset = (HOST_WIDE_INT) delta;
7802 }
7803 }
7804
7805 /* Do a binary search to see if there's already an anchor we can use.
7806 Set BEGIN to the new anchor's index if not. */
7807 begin = 0;
7808 end = vec_safe_length (block->anchors);
7809 while (begin != end)
7810 {
7811 middle = (end + begin) / 2;
7812 anchor = (*block->anchors)[middle];
7813 if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
7814 end = middle;
7815 else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
7816 begin = middle + 1;
7817 else if (SYMBOL_REF_TLS_MODEL (anchor) > model)
7818 end = middle;
7819 else if (SYMBOL_REF_TLS_MODEL (anchor) < model)
7820 begin = middle + 1;
7821 else
7822 return anchor;
7823 }
7824
7825 /* Create a new anchor with a unique label. */
7826 ASM_GENERATE_INTERNAL_LABEL (label, "LANCHOR", anchor_labelno++);
7827 anchor = create_block_symbol (ggc_strdup (label), block, offset);
7828 SYMBOL_REF_FLAGS (anchor) |= SYMBOL_FLAG_LOCAL | SYMBOL_FLAG_ANCHOR;
7829 SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
7830
7831 /* Insert it at index BEGIN. */
7832 vec_safe_insert (block->anchors, begin, anchor);
7833 return anchor;
7834 }
7835
7836 /* Output the objects in BLOCK. */
7837
7838 static void
7839 output_object_block (struct object_block *block)
7840 {
7841 class constant_descriptor_rtx *desc;
7842 unsigned int i;
7843 HOST_WIDE_INT offset;
7844 tree decl;
7845 rtx symbol;
7846
7847 if (!block->objects)
7848 return;
7849
7850 /* Switch to the section and make sure that the first byte is
7851 suitably aligned. */
7852 /* Special case VTV comdat sections similar to assemble_variable. */
7853 if (SECTION_STYLE (block->sect) == SECTION_NAMED
7854 && block->sect->named.name
7855 && (strcmp (block->sect->named.name, ".vtable_map_vars") == 0))
7856 handle_vtv_comdat_section (block->sect, block->sect->named.decl);
7857 else
7858 switch_to_section (block->sect);
7859
7860 gcc_checking_assert (!(block->sect->common.flags & SECTION_MERGE));
7861 assemble_align (block->alignment);
7862
7863 /* Define the values of all anchors relative to the current section
7864 position. */
7865 FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol)
7866 targetm.asm_out.output_anchor (symbol);
7867
7868 /* Output the objects themselves. */
7869 offset = 0;
7870 FOR_EACH_VEC_ELT (*block->objects, i, symbol)
7871 {
7872 /* Move to the object's offset, padding with zeros if necessary. */
7873 assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
7874 offset = SYMBOL_REF_BLOCK_OFFSET (symbol);
7875 if (CONSTANT_POOL_ADDRESS_P (symbol))
7876 {
7877 desc = SYMBOL_REF_CONSTANT (symbol);
7878 /* Pass 1 for align as we have already laid out everything in the block.
7879 So aligning shouldn't be necessary. */
7880 output_constant_pool_1 (desc, 1);
7881 offset += GET_MODE_SIZE (desc->mode);
7882 }
7883 else if (TREE_CONSTANT_POOL_ADDRESS_P (symbol))
7884 {
7885 HOST_WIDE_INT size;
7886 decl = SYMBOL_REF_DECL (symbol);
7887 assemble_constant_contents (DECL_INITIAL (decl), XSTR (symbol, 0),
7888 DECL_ALIGN (decl), false);
7889
7890 size = get_constant_size (DECL_INITIAL (decl));
7891 offset += size;
7892 if ((flag_sanitize & SANITIZE_ADDRESS)
7893 && TREE_CODE (DECL_INITIAL (decl)) == STRING_CST
7894 && asan_protect_global (DECL_INITIAL (decl)))
7895 {
7896 size = asan_red_zone_size (size);
7897 assemble_zeros (size);
7898 offset += size;
7899 }
7900 }
7901 else
7902 {
7903 HOST_WIDE_INT size;
7904 decl = SYMBOL_REF_DECL (symbol);
7905 assemble_variable_contents (decl, XSTR (symbol, 0), false, false);
7906 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
7907 offset += size;
7908 if ((flag_sanitize & SANITIZE_ADDRESS)
7909 && asan_protect_global (decl))
7910 {
7911 size = asan_red_zone_size (size);
7912 assemble_zeros (size);
7913 offset += size;
7914 }
7915 }
7916 }
7917 }
7918
7919 /* A callback for qsort to compare object_blocks. */
7920
7921 static int
7922 output_object_block_compare (const void *x, const void *y)
7923 {
7924 object_block *p1 = *(object_block * const*)x;
7925 object_block *p2 = *(object_block * const*)y;
7926
7927 if (p1->sect->common.flags & SECTION_NAMED
7928 && !(p2->sect->common.flags & SECTION_NAMED))
7929 return 1;
7930
7931 if (!(p1->sect->common.flags & SECTION_NAMED)
7932 && p2->sect->common.flags & SECTION_NAMED)
7933 return -1;
7934
7935 if (p1->sect->common.flags & SECTION_NAMED
7936 && p2->sect->common.flags & SECTION_NAMED)
7937 return strcmp (p1->sect->named.name, p2->sect->named.name);
7938
7939 unsigned f1 = p1->sect->common.flags;
7940 unsigned f2 = p2->sect->common.flags;
7941 if (f1 == f2)
7942 return 0;
7943 return f1 < f2 ? -1 : 1;
7944 }
7945
7946 /* Output the definitions of all object_blocks. */
7947
7948 void
7949 output_object_blocks (void)
7950 {
7951 vec<object_block *, va_heap> v;
7952 v.create (object_block_htab->elements ());
7953 object_block *obj;
7954 hash_table<object_block_hasher>::iterator hi;
7955
7956 FOR_EACH_HASH_TABLE_ELEMENT (*object_block_htab, obj, object_block *, hi)
7957 v.quick_push (obj);
7958
7959 /* Sort them in order to output them in a deterministic manner,
7960 otherwise we may get .rodata sections in different orders with
7961 and without -g. */
7962 v.qsort (output_object_block_compare);
7963 unsigned i;
7964 FOR_EACH_VEC_ELT (v, i, obj)
7965 output_object_block (obj);
7966
7967 v.release ();
7968 }
7969
7970 /* This function provides a possible implementation of the
7971 TARGET_ASM_RECORD_GCC_SWITCHES target hook for ELF targets. When triggered
7972 by -frecord-gcc-switches it creates a new mergeable, string section in the
7973 assembler output file called TARGET_ASM_RECORD_GCC_SWITCHES_SECTION which
7974 contains the switches in ASCII format.
7975
7976 FIXME: This code does not correctly handle double quote characters
7977 that appear inside strings, (it strips them rather than preserving them).
7978 FIXME: ASM_OUTPUT_ASCII, as defined in config/elfos.h will not emit NUL
7979 characters - instead it treats them as sub-string separators. Since
7980 we want to emit NUL strings terminators into the object file we have to use
7981 ASM_OUTPUT_SKIP. */
7982
7983 int
7984 elf_record_gcc_switches (print_switch_type type, const char * name)
7985 {
7986 switch (type)
7987 {
7988 case SWITCH_TYPE_PASSED:
7989 ASM_OUTPUT_ASCII (asm_out_file, name, strlen (name));
7990 ASM_OUTPUT_SKIP (asm_out_file, HOST_WIDE_INT_1U);
7991 break;
7992
7993 case SWITCH_TYPE_DESCRIPTIVE:
7994 if (name == NULL)
7995 {
7996 /* Distinguish between invocations where name is NULL. */
7997 static bool started = false;
7998
7999 if (!started)
8000 {
8001 section * sec;
8002
8003 sec = get_section (targetm.asm_out.record_gcc_switches_section,
8004 SECTION_DEBUG
8005 | SECTION_MERGE
8006 | SECTION_STRINGS
8007 | (SECTION_ENTSIZE & 1),
8008 NULL);
8009 switch_to_section (sec);
8010 started = true;
8011 }
8012 }
8013
8014 default:
8015 break;
8016 }
8017
8018 /* The return value is currently ignored by the caller, but must be 0.
8019 For -fverbose-asm the return value would be the number of characters
8020 emitted into the assembler file. */
8021 return 0;
8022 }
8023
8024 /* Emit text to declare externally defined symbols. It is needed to
8025 properly support non-default visibility. */
8026 void
8027 default_elf_asm_output_external (FILE *file ATTRIBUTE_UNUSED,
8028 tree decl,
8029 const char *name ATTRIBUTE_UNUSED)
8030 {
8031 /* We output the name if and only if TREE_SYMBOL_REFERENCED is
8032 set in order to avoid putting out names that are never really
8033 used. Always output visibility specified in the source. */
8034 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
8035 && (DECL_VISIBILITY_SPECIFIED (decl)
8036 || targetm.binds_local_p (decl)))
8037 maybe_assemble_visibility (decl);
8038 }
8039
8040 /* The default hook for TARGET_ASM_OUTPUT_SOURCE_FILENAME. */
8041
8042 void
8043 default_asm_output_source_filename (FILE *file, const char *name)
8044 {
8045 #ifdef ASM_OUTPUT_SOURCE_FILENAME
8046 ASM_OUTPUT_SOURCE_FILENAME (file, name);
8047 #else
8048 fprintf (file, "\t.file\t");
8049 output_quoted_string (file, name);
8050 putc ('\n', file);
8051 #endif
8052 }
8053
8054 /* Output a file name in the form wanted by System V. */
8055
8056 void
8057 output_file_directive (FILE *asm_file, const char *input_name)
8058 {
8059 int len;
8060 const char *na;
8061
8062 if (input_name == NULL)
8063 input_name = "<stdin>";
8064 else
8065 input_name = remap_debug_filename (input_name);
8066
8067 len = strlen (input_name);
8068 na = input_name + len;
8069
8070 /* NA gets INPUT_NAME sans directory names. */
8071 while (na > input_name)
8072 {
8073 if (IS_DIR_SEPARATOR (na[-1]))
8074 break;
8075 na--;
8076 }
8077
8078 targetm.asm_out.output_source_filename (asm_file, na);
8079 }
8080
8081 /* Create a DEBUG_EXPR_DECL / DEBUG_EXPR pair from RTL expression
8082 EXP. */
8083 rtx
8084 make_debug_expr_from_rtl (const_rtx exp)
8085 {
8086 tree ddecl = make_node (DEBUG_EXPR_DECL), type;
8087 machine_mode mode = GET_MODE (exp);
8088 rtx dval;
8089
8090 DECL_ARTIFICIAL (ddecl) = 1;
8091 if (REG_P (exp) && REG_EXPR (exp))
8092 type = TREE_TYPE (REG_EXPR (exp));
8093 else if (MEM_P (exp) && MEM_EXPR (exp))
8094 type = TREE_TYPE (MEM_EXPR (exp));
8095 else
8096 type = NULL_TREE;
8097 if (type && TYPE_MODE (type) == mode)
8098 TREE_TYPE (ddecl) = type;
8099 else
8100 TREE_TYPE (ddecl) = lang_hooks.types.type_for_mode (mode, 1);
8101 SET_DECL_MODE (ddecl, mode);
8102 dval = gen_rtx_DEBUG_EXPR (mode);
8103 DEBUG_EXPR_TREE_DECL (dval) = ddecl;
8104 SET_DECL_RTL (ddecl, dval);
8105 return dval;
8106 }
8107
8108 #ifdef ELF_ASCII_ESCAPES
8109 /* Default ASM_OUTPUT_LIMITED_STRING for ELF targets. */
8110
8111 void
8112 default_elf_asm_output_limited_string (FILE *f, const char *s)
8113 {
8114 int escape;
8115 unsigned char c;
8116
8117 fputs (STRING_ASM_OP, f);
8118 putc ('"', f);
8119 while (*s != '\0')
8120 {
8121 c = *s;
8122 escape = ELF_ASCII_ESCAPES[c];
8123 switch (escape)
8124 {
8125 case 0:
8126 putc (c, f);
8127 break;
8128 case 1:
8129 putc ('\\', f);
8130 putc ('0'+((c>>6)&7), f);
8131 putc ('0'+((c>>3)&7), f);
8132 putc ('0'+(c&7), f);
8133 break;
8134 default:
8135 putc ('\\', f);
8136 putc (escape, f);
8137 break;
8138 }
8139 s++;
8140 }
8141 putc ('\"', f);
8142 putc ('\n', f);
8143 }
8144
8145 /* Default ASM_OUTPUT_ASCII for ELF targets. */
8146
8147 void
8148 default_elf_asm_output_ascii (FILE *f, const char *s, unsigned int len)
8149 {
8150 const char *limit = s + len;
8151 const char *last_null = NULL;
8152 unsigned bytes_in_chunk = 0;
8153 unsigned char c;
8154 int escape;
8155
8156 for (; s < limit; s++)
8157 {
8158 const char *p;
8159
8160 if (bytes_in_chunk >= 60)
8161 {
8162 putc ('\"', f);
8163 putc ('\n', f);
8164 bytes_in_chunk = 0;
8165 }
8166
8167 if (s > last_null)
8168 {
8169 for (p = s; p < limit && *p != '\0'; p++)
8170 continue;
8171 last_null = p;
8172 }
8173 else
8174 p = last_null;
8175
8176 if (p < limit && (p - s) <= (long) ELF_STRING_LIMIT)
8177 {
8178 if (bytes_in_chunk > 0)
8179 {
8180 putc ('\"', f);
8181 putc ('\n', f);
8182 bytes_in_chunk = 0;
8183 }
8184
8185 default_elf_asm_output_limited_string (f, s);
8186 s = p;
8187 }
8188 else
8189 {
8190 if (bytes_in_chunk == 0)
8191 fputs (ASCII_DATA_ASM_OP "\"", f);
8192
8193 c = *s;
8194 escape = ELF_ASCII_ESCAPES[c];
8195 switch (escape)
8196 {
8197 case 0:
8198 putc (c, f);
8199 bytes_in_chunk++;
8200 break;
8201 case 1:
8202 putc ('\\', f);
8203 putc ('0'+((c>>6)&7), f);
8204 putc ('0'+((c>>3)&7), f);
8205 putc ('0'+(c&7), f);
8206 bytes_in_chunk += 4;
8207 break;
8208 default:
8209 putc ('\\', f);
8210 putc (escape, f);
8211 bytes_in_chunk += 2;
8212 break;
8213 }
8214
8215 }
8216 }
8217
8218 if (bytes_in_chunk > 0)
8219 {
8220 putc ('\"', f);
8221 putc ('\n', f);
8222 }
8223 }
8224 #endif
8225
8226 static GTY(()) section *elf_init_array_section;
8227 static GTY(()) section *elf_fini_array_section;
8228
8229 static section *
8230 get_elf_initfini_array_priority_section (int priority,
8231 bool constructor_p)
8232 {
8233 section *sec;
8234 if (priority != DEFAULT_INIT_PRIORITY)
8235 {
8236 char buf[18];
8237 sprintf (buf, "%s.%.5u",
8238 constructor_p ? ".init_array" : ".fini_array",
8239 priority);
8240 sec = get_section (buf, SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8241 }
8242 else
8243 {
8244 if (constructor_p)
8245 {
8246 if (elf_init_array_section == NULL)
8247 elf_init_array_section
8248 = get_section (".init_array",
8249 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8250 sec = elf_init_array_section;
8251 }
8252 else
8253 {
8254 if (elf_fini_array_section == NULL)
8255 elf_fini_array_section
8256 = get_section (".fini_array",
8257 SECTION_WRITE | SECTION_NOTYPE, NULL_TREE);
8258 sec = elf_fini_array_section;
8259 }
8260 }
8261 return sec;
8262 }
8263
8264 /* Use .init_array section for constructors. */
8265
8266 void
8267 default_elf_init_array_asm_out_constructor (rtx symbol, int priority)
8268 {
8269 section *sec = get_elf_initfini_array_priority_section (priority,
8270 true);
8271 assemble_addr_to_section (symbol, sec);
8272 }
8273
8274 /* Use .fini_array section for destructors. */
8275
8276 void
8277 default_elf_fini_array_asm_out_destructor (rtx symbol, int priority)
8278 {
8279 section *sec = get_elf_initfini_array_priority_section (priority,
8280 false);
8281 assemble_addr_to_section (symbol, sec);
8282 }
8283
8284 /* Default TARGET_ASM_OUTPUT_IDENT hook.
8285
8286 This is a bit of a cheat. The real default is a no-op, but this
8287 hook is the default for all targets with a .ident directive. */
8288
8289 void
8290 default_asm_output_ident_directive (const char *ident_str)
8291 {
8292 const char *ident_asm_op = "\t.ident\t";
8293
8294 /* If we are still in the front end, do not write out the string
8295 to asm_out_file. Instead, add a fake top-level asm statement.
8296 This allows the front ends to use this hook without actually
8297 writing to asm_out_file, to handle #ident or Pragma Ident. */
8298 if (symtab->state == PARSING)
8299 {
8300 char *buf = ACONCAT ((ident_asm_op, "\"", ident_str, "\"\n", NULL));
8301 symtab->finalize_toplevel_asm (build_string (strlen (buf), buf));
8302 }
8303 else
8304 fprintf (asm_out_file, "%s\"%s\"\n", ident_asm_op, ident_str);
8305 }
8306
8307
8308 /* This function ensures that vtable_map variables are not only
8309 in the comdat section, but that each variable has its own unique
8310 comdat name. Without this the variables end up in the same section
8311 with a single comdat name.
8312
8313 FIXME: resolve_unique_section needs to deal better with
8314 decls with both DECL_SECTION_NAME and DECL_ONE_ONLY. Once
8315 that is fixed, this if-else statement can be replaced with
8316 a single call to "switch_to_section (sect)". */
8317
8318 static void
8319 handle_vtv_comdat_section (section *sect, const_tree decl ATTRIBUTE_UNUSED)
8320 {
8321 #if defined (OBJECT_FORMAT_ELF)
8322 targetm.asm_out.named_section (sect->named.name,
8323 sect->named.common.flags
8324 | SECTION_LINKONCE,
8325 DECL_NAME (decl));
8326 in_section = sect;
8327 #else
8328 /* Neither OBJECT_FORMAT_PE, nor OBJECT_FORMAT_COFF is set here.
8329 Therefore the following check is used.
8330 In case a the target is PE or COFF a comdat group section
8331 is created, e.g. .vtable_map_vars$foo. The linker places
8332 everything in .vtable_map_vars at the end.
8333
8334 A fix could be made in
8335 gcc/config/i386/winnt.c: i386_pe_unique_section. */
8336 if (TARGET_PECOFF)
8337 {
8338 char *name;
8339
8340 if (TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE)
8341 name = ACONCAT ((sect->named.name, "$",
8342 IDENTIFIER_POINTER (DECL_NAME (decl)), NULL));
8343 else
8344 name = ACONCAT ((sect->named.name, "$",
8345 IDENTIFIER_POINTER (DECL_COMDAT_GROUP (DECL_NAME (decl))),
8346 NULL));
8347
8348 targetm.asm_out.named_section (name,
8349 sect->named.common.flags
8350 | SECTION_LINKONCE,
8351 DECL_NAME (decl));
8352 in_section = sect;
8353 }
8354 else
8355 switch_to_section (sect);
8356 #endif
8357 }
8358
8359 #include "gt-varasm.h"