re PR c++/10690 ([DR 115] Even when used within typeid(), a template-id generating...
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
24
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
30
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
35
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
38
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
42
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
46
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "real.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "toplev.h"
67 #include "reload.h"
68 #include "intl.h"
69 #include "basic-block.h"
70 #include "target.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "cfglayout.h"
74 #include "tree-pass.h"
75 #include "tree-flow.h"
76 #include "timevar.h"
77 #include "cgraph.h"
78 #include "coverage.h"
79 #include "df.h"
80 #include "vecprim.h"
81 #include "ggc.h"
82 #include "cfgloop.h"
83 #include "params.h"
84
85 #ifdef XCOFF_DEBUGGING_INFO
86 #include "xcoffout.h" /* Needed for external data
87 declarations for e.g. AIX 4.x. */
88 #endif
89
90 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
91 #include "dwarf2out.h"
92 #endif
93
94 #ifdef DBX_DEBUGGING_INFO
95 #include "dbxout.h"
96 #endif
97
98 #ifdef SDB_DEBUGGING_INFO
99 #include "sdbout.h"
100 #endif
101
102 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
103 null default for it to save conditionalization later. */
104 #ifndef CC_STATUS_INIT
105 #define CC_STATUS_INIT
106 #endif
107
108 /* How to start an assembler comment. */
109 #ifndef ASM_COMMENT_START
110 #define ASM_COMMENT_START ";#"
111 #endif
112
113 /* Is the given character a logical line separator for the assembler? */
114 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
115 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C, STR) ((C) == ';')
116 #endif
117
118 #ifndef JUMP_TABLES_IN_TEXT_SECTION
119 #define JUMP_TABLES_IN_TEXT_SECTION 0
120 #endif
121
122 /* Bitflags used by final_scan_insn. */
123 #define SEEN_BB 1
124 #define SEEN_NOTE 2
125 #define SEEN_EMITTED 4
126
127 /* Last insn processed by final_scan_insn. */
128 static rtx debug_insn;
129 rtx current_output_insn;
130
131 /* Line number of last NOTE. */
132 static int last_linenum;
133
134 /* Last discriminator written to assembly. */
135 static int last_discriminator;
136
137 /* Discriminator of current block. */
138 static int discriminator;
139
140 /* Highest line number in current block. */
141 static int high_block_linenum;
142
143 /* Likewise for function. */
144 static int high_function_linenum;
145
146 /* Filename of last NOTE. */
147 static const char *last_filename;
148
149 /* Override filename and line number. */
150 static const char *override_filename;
151 static int override_linenum;
152
153 /* Whether to force emission of a line note before the next insn. */
154 static bool force_source_line = false;
155
156 extern const int length_unit_log; /* This is defined in insn-attrtab.c. */
157
158 /* Nonzero while outputting an `asm' with operands.
159 This means that inconsistencies are the user's fault, so don't die.
160 The precise value is the insn being output, to pass to error_for_asm. */
161 rtx this_is_asm_operands;
162
163 /* Number of operands of this insn, for an `asm' with operands. */
164 static unsigned int insn_noperands;
165
166 /* Compare optimization flag. */
167
168 static rtx last_ignored_compare = 0;
169
170 /* Assign a unique number to each insn that is output.
171 This can be used to generate unique local labels. */
172
173 static int insn_counter = 0;
174
175 #ifdef HAVE_cc0
176 /* This variable contains machine-dependent flags (defined in tm.h)
177 set and examined by output routines
178 that describe how to interpret the condition codes properly. */
179
180 CC_STATUS cc_status;
181
182 /* During output of an insn, this contains a copy of cc_status
183 from before the insn. */
184
185 CC_STATUS cc_prev_status;
186 #endif
187
188 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
189
190 static int block_depth;
191
192 /* Nonzero if have enabled APP processing of our assembler output. */
193
194 static int app_on;
195
196 /* If we are outputting an insn sequence, this contains the sequence rtx.
197 Zero otherwise. */
198
199 rtx final_sequence;
200
201 #ifdef ASSEMBLER_DIALECT
202
203 /* Number of the assembler dialect to use, starting at 0. */
204 static int dialect_number;
205 #endif
206
207 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
208 rtx current_insn_predicate;
209
210 #ifdef HAVE_ATTR_length
211 static int asm_insn_count (rtx);
212 #endif
213 static void profile_function (FILE *);
214 static void profile_after_prologue (FILE *);
215 static bool notice_source_line (rtx, bool *);
216 static rtx walk_alter_subreg (rtx *, bool *);
217 static void output_asm_name (void);
218 static void output_alternate_entry_point (FILE *, rtx);
219 static tree get_mem_expr_from_op (rtx, int *);
220 static void output_asm_operand_names (rtx *, int *, int);
221 static void output_operand (rtx, int);
222 #ifdef LEAF_REGISTERS
223 static void leaf_renumber_regs (rtx);
224 #endif
225 #ifdef HAVE_cc0
226 static int alter_cond (rtx);
227 #endif
228 #ifndef ADDR_VEC_ALIGN
229 static int final_addr_vec_align (rtx);
230 #endif
231 #ifdef HAVE_ATTR_length
232 static int align_fuzz (rtx, rtx, int, unsigned);
233 #endif
234 \f
235 /* Initialize data in final at the beginning of a compilation. */
236
237 void
238 init_final (const char *filename ATTRIBUTE_UNUSED)
239 {
240 app_on = 0;
241 final_sequence = 0;
242
243 #ifdef ASSEMBLER_DIALECT
244 dialect_number = ASSEMBLER_DIALECT;
245 #endif
246 }
247
248 /* Default target function prologue and epilogue assembler output.
249
250 If not overridden for epilogue code, then the function body itself
251 contains return instructions wherever needed. */
252 void
253 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
254 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
255 {
256 }
257
258 /* Default target hook that outputs nothing to a stream. */
259 void
260 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
261 {
262 }
263
264 /* Enable APP processing of subsequent output.
265 Used before the output from an `asm' statement. */
266
267 void
268 app_enable (void)
269 {
270 if (! app_on)
271 {
272 fputs (ASM_APP_ON, asm_out_file);
273 app_on = 1;
274 }
275 }
276
277 /* Disable APP processing of subsequent output.
278 Called from varasm.c before most kinds of output. */
279
280 void
281 app_disable (void)
282 {
283 if (app_on)
284 {
285 fputs (ASM_APP_OFF, asm_out_file);
286 app_on = 0;
287 }
288 }
289 \f
290 /* Return the number of slots filled in the current
291 delayed branch sequence (we don't count the insn needing the
292 delay slot). Zero if not in a delayed branch sequence. */
293
294 #ifdef DELAY_SLOTS
295 int
296 dbr_sequence_length (void)
297 {
298 if (final_sequence != 0)
299 return XVECLEN (final_sequence, 0) - 1;
300 else
301 return 0;
302 }
303 #endif
304 \f
305 /* The next two pages contain routines used to compute the length of an insn
306 and to shorten branches. */
307
308 /* Arrays for insn lengths, and addresses. The latter is referenced by
309 `insn_current_length'. */
310
311 static int *insn_lengths;
312
313 VEC(int,heap) *insn_addresses_;
314
315 /* Max uid for which the above arrays are valid. */
316 static int insn_lengths_max_uid;
317
318 /* Address of insn being processed. Used by `insn_current_length'. */
319 int insn_current_address;
320
321 /* Address of insn being processed in previous iteration. */
322 int insn_last_address;
323
324 /* known invariant alignment of insn being processed. */
325 int insn_current_align;
326
327 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
328 gives the next following alignment insn that increases the known
329 alignment, or NULL_RTX if there is no such insn.
330 For any alignment obtained this way, we can again index uid_align with
331 its uid to obtain the next following align that in turn increases the
332 alignment, till we reach NULL_RTX; the sequence obtained this way
333 for each insn we'll call the alignment chain of this insn in the following
334 comments. */
335
336 struct label_alignment
337 {
338 short alignment;
339 short max_skip;
340 };
341
342 static rtx *uid_align;
343 static int *uid_shuid;
344 static struct label_alignment *label_align;
345
346 /* Indicate that branch shortening hasn't yet been done. */
347
348 void
349 init_insn_lengths (void)
350 {
351 if (uid_shuid)
352 {
353 free (uid_shuid);
354 uid_shuid = 0;
355 }
356 if (insn_lengths)
357 {
358 free (insn_lengths);
359 insn_lengths = 0;
360 insn_lengths_max_uid = 0;
361 }
362 #ifdef HAVE_ATTR_length
363 INSN_ADDRESSES_FREE ();
364 #endif
365 if (uid_align)
366 {
367 free (uid_align);
368 uid_align = 0;
369 }
370 }
371
372 /* Obtain the current length of an insn. If branch shortening has been done,
373 get its actual length. Otherwise, use FALLBACK_FN to calculate the
374 length. */
375 static inline int
376 get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED,
377 int (*fallback_fn) (rtx) ATTRIBUTE_UNUSED)
378 {
379 #ifdef HAVE_ATTR_length
380 rtx body;
381 int i;
382 int length = 0;
383
384 if (insn_lengths_max_uid > INSN_UID (insn))
385 return insn_lengths[INSN_UID (insn)];
386 else
387 switch (GET_CODE (insn))
388 {
389 case NOTE:
390 case BARRIER:
391 case CODE_LABEL:
392 case DEBUG_INSN:
393 return 0;
394
395 case CALL_INSN:
396 length = fallback_fn (insn);
397 break;
398
399 case JUMP_INSN:
400 body = PATTERN (insn);
401 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
402 {
403 /* Alignment is machine-dependent and should be handled by
404 ADDR_VEC_ALIGN. */
405 }
406 else
407 length = fallback_fn (insn);
408 break;
409
410 case INSN:
411 body = PATTERN (insn);
412 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
413 return 0;
414
415 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
416 length = asm_insn_count (body) * fallback_fn (insn);
417 else if (GET_CODE (body) == SEQUENCE)
418 for (i = 0; i < XVECLEN (body, 0); i++)
419 length += get_attr_length_1 (XVECEXP (body, 0, i), fallback_fn);
420 else
421 length = fallback_fn (insn);
422 break;
423
424 default:
425 break;
426 }
427
428 #ifdef ADJUST_INSN_LENGTH
429 ADJUST_INSN_LENGTH (insn, length);
430 #endif
431 return length;
432 #else /* not HAVE_ATTR_length */
433 return 0;
434 #define insn_default_length 0
435 #define insn_min_length 0
436 #endif /* not HAVE_ATTR_length */
437 }
438
439 /* Obtain the current length of an insn. If branch shortening has been done,
440 get its actual length. Otherwise, get its maximum length. */
441 int
442 get_attr_length (rtx insn)
443 {
444 return get_attr_length_1 (insn, insn_default_length);
445 }
446
447 /* Obtain the current length of an insn. If branch shortening has been done,
448 get its actual length. Otherwise, get its minimum length. */
449 int
450 get_attr_min_length (rtx insn)
451 {
452 return get_attr_length_1 (insn, insn_min_length);
453 }
454 \f
455 /* Code to handle alignment inside shorten_branches. */
456
457 /* Here is an explanation how the algorithm in align_fuzz can give
458 proper results:
459
460 Call a sequence of instructions beginning with alignment point X
461 and continuing until the next alignment point `block X'. When `X'
462 is used in an expression, it means the alignment value of the
463 alignment point.
464
465 Call the distance between the start of the first insn of block X, and
466 the end of the last insn of block X `IX', for the `inner size of X'.
467 This is clearly the sum of the instruction lengths.
468
469 Likewise with the next alignment-delimited block following X, which we
470 shall call block Y.
471
472 Call the distance between the start of the first insn of block X, and
473 the start of the first insn of block Y `OX', for the `outer size of X'.
474
475 The estimated padding is then OX - IX.
476
477 OX can be safely estimated as
478
479 if (X >= Y)
480 OX = round_up(IX, Y)
481 else
482 OX = round_up(IX, X) + Y - X
483
484 Clearly est(IX) >= real(IX), because that only depends on the
485 instruction lengths, and those being overestimated is a given.
486
487 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
488 we needn't worry about that when thinking about OX.
489
490 When X >= Y, the alignment provided by Y adds no uncertainty factor
491 for branch ranges starting before X, so we can just round what we have.
492 But when X < Y, we don't know anything about the, so to speak,
493 `middle bits', so we have to assume the worst when aligning up from an
494 address mod X to one mod Y, which is Y - X. */
495
496 #ifndef LABEL_ALIGN
497 #define LABEL_ALIGN(LABEL) align_labels_log
498 #endif
499
500 #ifndef LABEL_ALIGN_MAX_SKIP
501 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
502 #endif
503
504 #ifndef LOOP_ALIGN
505 #define LOOP_ALIGN(LABEL) align_loops_log
506 #endif
507
508 #ifndef LOOP_ALIGN_MAX_SKIP
509 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
510 #endif
511
512 #ifndef LABEL_ALIGN_AFTER_BARRIER
513 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
514 #endif
515
516 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
517 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
518 #endif
519
520 #ifndef JUMP_ALIGN
521 #define JUMP_ALIGN(LABEL) align_jumps_log
522 #endif
523
524 #ifndef JUMP_ALIGN_MAX_SKIP
525 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
526 #endif
527
528 #ifndef ADDR_VEC_ALIGN
529 static int
530 final_addr_vec_align (rtx addr_vec)
531 {
532 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
533
534 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
535 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
536 return exact_log2 (align);
537
538 }
539
540 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
541 #endif
542
543 #ifndef INSN_LENGTH_ALIGNMENT
544 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
545 #endif
546
547 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
548
549 static int min_labelno, max_labelno;
550
551 #define LABEL_TO_ALIGNMENT(LABEL) \
552 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
553
554 #define LABEL_TO_MAX_SKIP(LABEL) \
555 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
556
557 /* For the benefit of port specific code do this also as a function. */
558
559 int
560 label_to_alignment (rtx label)
561 {
562 if (CODE_LABEL_NUMBER (label) <= max_labelno)
563 return LABEL_TO_ALIGNMENT (label);
564 return 0;
565 }
566
567 int
568 label_to_max_skip (rtx label)
569 {
570 if (CODE_LABEL_NUMBER (label) <= max_labelno)
571 return LABEL_TO_MAX_SKIP (label);
572 return 0;
573 }
574
575 #ifdef HAVE_ATTR_length
576 /* The differences in addresses
577 between a branch and its target might grow or shrink depending on
578 the alignment the start insn of the range (the branch for a forward
579 branch or the label for a backward branch) starts out on; if these
580 differences are used naively, they can even oscillate infinitely.
581 We therefore want to compute a 'worst case' address difference that
582 is independent of the alignment the start insn of the range end
583 up on, and that is at least as large as the actual difference.
584 The function align_fuzz calculates the amount we have to add to the
585 naively computed difference, by traversing the part of the alignment
586 chain of the start insn of the range that is in front of the end insn
587 of the range, and considering for each alignment the maximum amount
588 that it might contribute to a size increase.
589
590 For casesi tables, we also want to know worst case minimum amounts of
591 address difference, in case a machine description wants to introduce
592 some common offset that is added to all offsets in a table.
593 For this purpose, align_fuzz with a growth argument of 0 computes the
594 appropriate adjustment. */
595
596 /* Compute the maximum delta by which the difference of the addresses of
597 START and END might grow / shrink due to a different address for start
598 which changes the size of alignment insns between START and END.
599 KNOWN_ALIGN_LOG is the alignment known for START.
600 GROWTH should be ~0 if the objective is to compute potential code size
601 increase, and 0 if the objective is to compute potential shrink.
602 The return value is undefined for any other value of GROWTH. */
603
604 static int
605 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
606 {
607 int uid = INSN_UID (start);
608 rtx align_label;
609 int known_align = 1 << known_align_log;
610 int end_shuid = INSN_SHUID (end);
611 int fuzz = 0;
612
613 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
614 {
615 int align_addr, new_align;
616
617 uid = INSN_UID (align_label);
618 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
619 if (uid_shuid[uid] > end_shuid)
620 break;
621 known_align_log = LABEL_TO_ALIGNMENT (align_label);
622 new_align = 1 << known_align_log;
623 if (new_align < known_align)
624 continue;
625 fuzz += (-align_addr ^ growth) & (new_align - known_align);
626 known_align = new_align;
627 }
628 return fuzz;
629 }
630
631 /* Compute a worst-case reference address of a branch so that it
632 can be safely used in the presence of aligned labels. Since the
633 size of the branch itself is unknown, the size of the branch is
634 not included in the range. I.e. for a forward branch, the reference
635 address is the end address of the branch as known from the previous
636 branch shortening pass, minus a value to account for possible size
637 increase due to alignment. For a backward branch, it is the start
638 address of the branch as known from the current pass, plus a value
639 to account for possible size increase due to alignment.
640 NB.: Therefore, the maximum offset allowed for backward branches needs
641 to exclude the branch size. */
642
643 int
644 insn_current_reference_address (rtx branch)
645 {
646 rtx dest, seq;
647 int seq_uid;
648
649 if (! INSN_ADDRESSES_SET_P ())
650 return 0;
651
652 seq = NEXT_INSN (PREV_INSN (branch));
653 seq_uid = INSN_UID (seq);
654 if (!JUMP_P (branch))
655 /* This can happen for example on the PA; the objective is to know the
656 offset to address something in front of the start of the function.
657 Thus, we can treat it like a backward branch.
658 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
659 any alignment we'd encounter, so we skip the call to align_fuzz. */
660 return insn_current_address;
661 dest = JUMP_LABEL (branch);
662
663 /* BRANCH has no proper alignment chain set, so use SEQ.
664 BRANCH also has no INSN_SHUID. */
665 if (INSN_SHUID (seq) < INSN_SHUID (dest))
666 {
667 /* Forward branch. */
668 return (insn_last_address + insn_lengths[seq_uid]
669 - align_fuzz (seq, dest, length_unit_log, ~0));
670 }
671 else
672 {
673 /* Backward branch. */
674 return (insn_current_address
675 + align_fuzz (dest, seq, length_unit_log, ~0));
676 }
677 }
678 #endif /* HAVE_ATTR_length */
679 \f
680 /* Compute branch alignments based on frequency information in the
681 CFG. */
682
683 unsigned int
684 compute_alignments (void)
685 {
686 int log, max_skip, max_log;
687 basic_block bb;
688 int freq_max = 0;
689 int freq_threshold = 0;
690
691 if (label_align)
692 {
693 free (label_align);
694 label_align = 0;
695 }
696
697 max_labelno = max_label_num ();
698 min_labelno = get_first_label_num ();
699 label_align = XCNEWVEC (struct label_alignment, max_labelno - min_labelno + 1);
700
701 /* If not optimizing or optimizing for size, don't assign any alignments. */
702 if (! optimize || optimize_function_for_size_p (cfun))
703 return 0;
704
705 if (dump_file)
706 {
707 dump_flow_info (dump_file, TDF_DETAILS);
708 flow_loops_dump (dump_file, NULL, 1);
709 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
710 }
711 FOR_EACH_BB (bb)
712 if (bb->frequency > freq_max)
713 freq_max = bb->frequency;
714 freq_threshold = freq_max / PARAM_VALUE (PARAM_ALIGN_THRESHOLD);
715
716 if (dump_file)
717 fprintf(dump_file, "freq_max: %i\n",freq_max);
718 FOR_EACH_BB (bb)
719 {
720 rtx label = BB_HEAD (bb);
721 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
722 edge e;
723 edge_iterator ei;
724
725 if (!LABEL_P (label)
726 || optimize_bb_for_size_p (bb))
727 {
728 if (dump_file)
729 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i skipped.\n",
730 bb->index, bb->frequency, bb->loop_father->num, bb->loop_depth);
731 continue;
732 }
733 max_log = LABEL_ALIGN (label);
734 max_skip = LABEL_ALIGN_MAX_SKIP;
735
736 FOR_EACH_EDGE (e, ei, bb->preds)
737 {
738 if (e->flags & EDGE_FALLTHRU)
739 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
740 else
741 branch_frequency += EDGE_FREQUENCY (e);
742 }
743 if (dump_file)
744 {
745 fprintf(dump_file, "BB %4i freq %4i loop %2i loop_depth %2i fall %4i branch %4i",
746 bb->index, bb->frequency, bb->loop_father->num,
747 bb->loop_depth,
748 fallthru_frequency, branch_frequency);
749 if (!bb->loop_father->inner && bb->loop_father->num)
750 fprintf (dump_file, " inner_loop");
751 if (bb->loop_father->header == bb)
752 fprintf (dump_file, " loop_header");
753 fprintf (dump_file, "\n");
754 }
755
756 /* There are two purposes to align block with no fallthru incoming edge:
757 1) to avoid fetch stalls when branch destination is near cache boundary
758 2) to improve cache efficiency in case the previous block is not executed
759 (so it does not need to be in the cache).
760
761 We to catch first case, we align frequently executed blocks.
762 To catch the second, we align blocks that are executed more frequently
763 than the predecessor and the predecessor is likely to not be executed
764 when function is called. */
765
766 if (!has_fallthru
767 && (branch_frequency > freq_threshold
768 || (bb->frequency > bb->prev_bb->frequency * 10
769 && (bb->prev_bb->frequency
770 <= ENTRY_BLOCK_PTR->frequency / 2))))
771 {
772 log = JUMP_ALIGN (label);
773 if (dump_file)
774 fprintf(dump_file, " jump alignment added.\n");
775 if (max_log < log)
776 {
777 max_log = log;
778 max_skip = JUMP_ALIGN_MAX_SKIP;
779 }
780 }
781 /* In case block is frequent and reached mostly by non-fallthru edge,
782 align it. It is most likely a first block of loop. */
783 if (has_fallthru
784 && optimize_bb_for_speed_p (bb)
785 && branch_frequency + fallthru_frequency > freq_threshold
786 && (branch_frequency
787 > fallthru_frequency * PARAM_VALUE (PARAM_ALIGN_LOOP_ITERATIONS)))
788 {
789 log = LOOP_ALIGN (label);
790 if (dump_file)
791 fprintf(dump_file, " internal loop alignment added.\n");
792 if (max_log < log)
793 {
794 max_log = log;
795 max_skip = LOOP_ALIGN_MAX_SKIP;
796 }
797 }
798 LABEL_TO_ALIGNMENT (label) = max_log;
799 LABEL_TO_MAX_SKIP (label) = max_skip;
800 }
801
802 if (dump_file)
803 {
804 loop_optimizer_finalize ();
805 free_dominance_info (CDI_DOMINATORS);
806 }
807 return 0;
808 }
809
810 struct rtl_opt_pass pass_compute_alignments =
811 {
812 {
813 RTL_PASS,
814 "alignments", /* name */
815 NULL, /* gate */
816 compute_alignments, /* execute */
817 NULL, /* sub */
818 NULL, /* next */
819 0, /* static_pass_number */
820 TV_NONE, /* tv_id */
821 0, /* properties_required */
822 0, /* properties_provided */
823 0, /* properties_destroyed */
824 0, /* todo_flags_start */
825 TODO_dump_func | TODO_verify_rtl_sharing
826 | TODO_ggc_collect /* todo_flags_finish */
827 }
828 };
829
830 \f
831 /* Make a pass over all insns and compute their actual lengths by shortening
832 any branches of variable length if possible. */
833
834 /* shorten_branches might be called multiple times: for example, the SH
835 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
836 In order to do this, it needs proper length information, which it obtains
837 by calling shorten_branches. This cannot be collapsed with
838 shorten_branches itself into a single pass unless we also want to integrate
839 reorg.c, since the branch splitting exposes new instructions with delay
840 slots. */
841
842 void
843 shorten_branches (rtx first ATTRIBUTE_UNUSED)
844 {
845 rtx insn;
846 int max_uid;
847 int i;
848 int max_log;
849 int max_skip;
850 #ifdef HAVE_ATTR_length
851 #define MAX_CODE_ALIGN 16
852 rtx seq;
853 int something_changed = 1;
854 char *varying_length;
855 rtx body;
856 int uid;
857 rtx align_tab[MAX_CODE_ALIGN];
858
859 #endif
860
861 /* Compute maximum UID and allocate label_align / uid_shuid. */
862 max_uid = get_max_uid ();
863
864 /* Free uid_shuid before reallocating it. */
865 free (uid_shuid);
866
867 uid_shuid = XNEWVEC (int, max_uid);
868
869 if (max_labelno != max_label_num ())
870 {
871 int old = max_labelno;
872 int n_labels;
873 int n_old_labels;
874
875 max_labelno = max_label_num ();
876
877 n_labels = max_labelno - min_labelno + 1;
878 n_old_labels = old - min_labelno + 1;
879
880 label_align = XRESIZEVEC (struct label_alignment, label_align, n_labels);
881
882 /* Range of labels grows monotonically in the function. Failing here
883 means that the initialization of array got lost. */
884 gcc_assert (n_old_labels <= n_labels);
885
886 memset (label_align + n_old_labels, 0,
887 (n_labels - n_old_labels) * sizeof (struct label_alignment));
888 }
889
890 /* Initialize label_align and set up uid_shuid to be strictly
891 monotonically rising with insn order. */
892 /* We use max_log here to keep track of the maximum alignment we want to
893 impose on the next CODE_LABEL (or the current one if we are processing
894 the CODE_LABEL itself). */
895
896 max_log = 0;
897 max_skip = 0;
898
899 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
900 {
901 int log;
902
903 INSN_SHUID (insn) = i++;
904 if (INSN_P (insn))
905 continue;
906
907 if (LABEL_P (insn))
908 {
909 rtx next;
910 bool next_is_jumptable;
911
912 /* Merge in alignments computed by compute_alignments. */
913 log = LABEL_TO_ALIGNMENT (insn);
914 if (max_log < log)
915 {
916 max_log = log;
917 max_skip = LABEL_TO_MAX_SKIP (insn);
918 }
919
920 next = next_nonnote_insn (insn);
921 next_is_jumptable = next && JUMP_TABLE_DATA_P (next);
922 if (!next_is_jumptable)
923 {
924 log = LABEL_ALIGN (insn);
925 if (max_log < log)
926 {
927 max_log = log;
928 max_skip = LABEL_ALIGN_MAX_SKIP;
929 }
930 }
931 /* ADDR_VECs only take room if read-only data goes into the text
932 section. */
933 if ((JUMP_TABLES_IN_TEXT_SECTION
934 || readonly_data_section == text_section)
935 && next_is_jumptable)
936 {
937 log = ADDR_VEC_ALIGN (next);
938 if (max_log < log)
939 {
940 max_log = log;
941 max_skip = LABEL_ALIGN_MAX_SKIP;
942 }
943 }
944 LABEL_TO_ALIGNMENT (insn) = max_log;
945 LABEL_TO_MAX_SKIP (insn) = max_skip;
946 max_log = 0;
947 max_skip = 0;
948 }
949 else if (BARRIER_P (insn))
950 {
951 rtx label;
952
953 for (label = insn; label && ! INSN_P (label);
954 label = NEXT_INSN (label))
955 if (LABEL_P (label))
956 {
957 log = LABEL_ALIGN_AFTER_BARRIER (insn);
958 if (max_log < log)
959 {
960 max_log = log;
961 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
962 }
963 break;
964 }
965 }
966 }
967 #ifdef HAVE_ATTR_length
968
969 /* Allocate the rest of the arrays. */
970 insn_lengths = XNEWVEC (int, max_uid);
971 insn_lengths_max_uid = max_uid;
972 /* Syntax errors can lead to labels being outside of the main insn stream.
973 Initialize insn_addresses, so that we get reproducible results. */
974 INSN_ADDRESSES_ALLOC (max_uid);
975
976 varying_length = XCNEWVEC (char, max_uid);
977
978 /* Initialize uid_align. We scan instructions
979 from end to start, and keep in align_tab[n] the last seen insn
980 that does an alignment of at least n+1, i.e. the successor
981 in the alignment chain for an insn that does / has a known
982 alignment of n. */
983 uid_align = XCNEWVEC (rtx, max_uid);
984
985 for (i = MAX_CODE_ALIGN; --i >= 0;)
986 align_tab[i] = NULL_RTX;
987 seq = get_last_insn ();
988 for (; seq; seq = PREV_INSN (seq))
989 {
990 int uid = INSN_UID (seq);
991 int log;
992 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
993 uid_align[uid] = align_tab[0];
994 if (log)
995 {
996 /* Found an alignment label. */
997 uid_align[uid] = align_tab[log];
998 for (i = log - 1; i >= 0; i--)
999 align_tab[i] = seq;
1000 }
1001 }
1002 #ifdef CASE_VECTOR_SHORTEN_MODE
1003 if (optimize)
1004 {
1005 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
1006 label fields. */
1007
1008 int min_shuid = INSN_SHUID (get_insns ()) - 1;
1009 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
1010 int rel;
1011
1012 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
1013 {
1014 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
1015 int len, i, min, max, insn_shuid;
1016 int min_align;
1017 addr_diff_vec_flags flags;
1018
1019 if (!JUMP_P (insn)
1020 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
1021 continue;
1022 pat = PATTERN (insn);
1023 len = XVECLEN (pat, 1);
1024 gcc_assert (len > 0);
1025 min_align = MAX_CODE_ALIGN;
1026 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
1027 {
1028 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
1029 int shuid = INSN_SHUID (lab);
1030 if (shuid < min)
1031 {
1032 min = shuid;
1033 min_lab = lab;
1034 }
1035 if (shuid > max)
1036 {
1037 max = shuid;
1038 max_lab = lab;
1039 }
1040 if (min_align > LABEL_TO_ALIGNMENT (lab))
1041 min_align = LABEL_TO_ALIGNMENT (lab);
1042 }
1043 XEXP (pat, 2) = gen_rtx_LABEL_REF (Pmode, min_lab);
1044 XEXP (pat, 3) = gen_rtx_LABEL_REF (Pmode, max_lab);
1045 insn_shuid = INSN_SHUID (insn);
1046 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
1047 memset (&flags, 0, sizeof (flags));
1048 flags.min_align = min_align;
1049 flags.base_after_vec = rel > insn_shuid;
1050 flags.min_after_vec = min > insn_shuid;
1051 flags.max_after_vec = max > insn_shuid;
1052 flags.min_after_base = min > rel;
1053 flags.max_after_base = max > rel;
1054 ADDR_DIFF_VEC_FLAGS (pat) = flags;
1055 }
1056 }
1057 #endif /* CASE_VECTOR_SHORTEN_MODE */
1058
1059 /* Compute initial lengths, addresses, and varying flags for each insn. */
1060 for (insn_current_address = 0, insn = first;
1061 insn != 0;
1062 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
1063 {
1064 uid = INSN_UID (insn);
1065
1066 insn_lengths[uid] = 0;
1067
1068 if (LABEL_P (insn))
1069 {
1070 int log = LABEL_TO_ALIGNMENT (insn);
1071 if (log)
1072 {
1073 int align = 1 << log;
1074 int new_address = (insn_current_address + align - 1) & -align;
1075 insn_lengths[uid] = new_address - insn_current_address;
1076 }
1077 }
1078
1079 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
1080
1081 if (NOTE_P (insn) || BARRIER_P (insn)
1082 || LABEL_P (insn) || DEBUG_INSN_P(insn))
1083 continue;
1084 if (INSN_DELETED_P (insn))
1085 continue;
1086
1087 body = PATTERN (insn);
1088 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1089 {
1090 /* This only takes room if read-only data goes into the text
1091 section. */
1092 if (JUMP_TABLES_IN_TEXT_SECTION
1093 || readonly_data_section == text_section)
1094 insn_lengths[uid] = (XVECLEN (body,
1095 GET_CODE (body) == ADDR_DIFF_VEC)
1096 * GET_MODE_SIZE (GET_MODE (body)));
1097 /* Alignment is handled by ADDR_VEC_ALIGN. */
1098 }
1099 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1100 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1101 else if (GET_CODE (body) == SEQUENCE)
1102 {
1103 int i;
1104 int const_delay_slots;
1105 #ifdef DELAY_SLOTS
1106 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1107 #else
1108 const_delay_slots = 0;
1109 #endif
1110 /* Inside a delay slot sequence, we do not do any branch shortening
1111 if the shortening could change the number of delay slots
1112 of the branch. */
1113 for (i = 0; i < XVECLEN (body, 0); i++)
1114 {
1115 rtx inner_insn = XVECEXP (body, 0, i);
1116 int inner_uid = INSN_UID (inner_insn);
1117 int inner_length;
1118
1119 if (GET_CODE (body) == ASM_INPUT
1120 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1121 inner_length = (asm_insn_count (PATTERN (inner_insn))
1122 * insn_default_length (inner_insn));
1123 else
1124 inner_length = insn_default_length (inner_insn);
1125
1126 insn_lengths[inner_uid] = inner_length;
1127 if (const_delay_slots)
1128 {
1129 if ((varying_length[inner_uid]
1130 = insn_variable_length_p (inner_insn)) != 0)
1131 varying_length[uid] = 1;
1132 INSN_ADDRESSES (inner_uid) = (insn_current_address
1133 + insn_lengths[uid]);
1134 }
1135 else
1136 varying_length[inner_uid] = 0;
1137 insn_lengths[uid] += inner_length;
1138 }
1139 }
1140 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1141 {
1142 insn_lengths[uid] = insn_default_length (insn);
1143 varying_length[uid] = insn_variable_length_p (insn);
1144 }
1145
1146 /* If needed, do any adjustment. */
1147 #ifdef ADJUST_INSN_LENGTH
1148 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1149 if (insn_lengths[uid] < 0)
1150 fatal_insn ("negative insn length", insn);
1151 #endif
1152 }
1153
1154 /* Now loop over all the insns finding varying length insns. For each,
1155 get the current insn length. If it has changed, reflect the change.
1156 When nothing changes for a full pass, we are done. */
1157
1158 while (something_changed)
1159 {
1160 something_changed = 0;
1161 insn_current_align = MAX_CODE_ALIGN - 1;
1162 for (insn_current_address = 0, insn = first;
1163 insn != 0;
1164 insn = NEXT_INSN (insn))
1165 {
1166 int new_length;
1167 #ifdef ADJUST_INSN_LENGTH
1168 int tmp_length;
1169 #endif
1170 int length_align;
1171
1172 uid = INSN_UID (insn);
1173
1174 if (LABEL_P (insn))
1175 {
1176 int log = LABEL_TO_ALIGNMENT (insn);
1177 if (log > insn_current_align)
1178 {
1179 int align = 1 << log;
1180 int new_address= (insn_current_address + align - 1) & -align;
1181 insn_lengths[uid] = new_address - insn_current_address;
1182 insn_current_align = log;
1183 insn_current_address = new_address;
1184 }
1185 else
1186 insn_lengths[uid] = 0;
1187 INSN_ADDRESSES (uid) = insn_current_address;
1188 continue;
1189 }
1190
1191 length_align = INSN_LENGTH_ALIGNMENT (insn);
1192 if (length_align < insn_current_align)
1193 insn_current_align = length_align;
1194
1195 insn_last_address = INSN_ADDRESSES (uid);
1196 INSN_ADDRESSES (uid) = insn_current_address;
1197
1198 #ifdef CASE_VECTOR_SHORTEN_MODE
1199 if (optimize && JUMP_P (insn)
1200 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1201 {
1202 rtx body = PATTERN (insn);
1203 int old_length = insn_lengths[uid];
1204 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1205 rtx min_lab = XEXP (XEXP (body, 2), 0);
1206 rtx max_lab = XEXP (XEXP (body, 3), 0);
1207 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1208 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1209 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1210 rtx prev;
1211 int rel_align = 0;
1212 addr_diff_vec_flags flags;
1213
1214 /* Avoid automatic aggregate initialization. */
1215 flags = ADDR_DIFF_VEC_FLAGS (body);
1216
1217 /* Try to find a known alignment for rel_lab. */
1218 for (prev = rel_lab;
1219 prev
1220 && ! insn_lengths[INSN_UID (prev)]
1221 && ! (varying_length[INSN_UID (prev)] & 1);
1222 prev = PREV_INSN (prev))
1223 if (varying_length[INSN_UID (prev)] & 2)
1224 {
1225 rel_align = LABEL_TO_ALIGNMENT (prev);
1226 break;
1227 }
1228
1229 /* See the comment on addr_diff_vec_flags in rtl.h for the
1230 meaning of the flags values. base: REL_LAB vec: INSN */
1231 /* Anything after INSN has still addresses from the last
1232 pass; adjust these so that they reflect our current
1233 estimate for this pass. */
1234 if (flags.base_after_vec)
1235 rel_addr += insn_current_address - insn_last_address;
1236 if (flags.min_after_vec)
1237 min_addr += insn_current_address - insn_last_address;
1238 if (flags.max_after_vec)
1239 max_addr += insn_current_address - insn_last_address;
1240 /* We want to know the worst case, i.e. lowest possible value
1241 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1242 its offset is positive, and we have to be wary of code shrink;
1243 otherwise, it is negative, and we have to be vary of code
1244 size increase. */
1245 if (flags.min_after_base)
1246 {
1247 /* If INSN is between REL_LAB and MIN_LAB, the size
1248 changes we are about to make can change the alignment
1249 within the observed offset, therefore we have to break
1250 it up into two parts that are independent. */
1251 if (! flags.base_after_vec && flags.min_after_vec)
1252 {
1253 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1254 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1255 }
1256 else
1257 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1258 }
1259 else
1260 {
1261 if (flags.base_after_vec && ! flags.min_after_vec)
1262 {
1263 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1264 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1265 }
1266 else
1267 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1268 }
1269 /* Likewise, determine the highest lowest possible value
1270 for the offset of MAX_LAB. */
1271 if (flags.max_after_base)
1272 {
1273 if (! flags.base_after_vec && flags.max_after_vec)
1274 {
1275 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1276 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1277 }
1278 else
1279 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1280 }
1281 else
1282 {
1283 if (flags.base_after_vec && ! flags.max_after_vec)
1284 {
1285 max_addr += align_fuzz (max_lab, insn, 0, 0);
1286 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1287 }
1288 else
1289 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1290 }
1291 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1292 max_addr - rel_addr,
1293 body));
1294 if (JUMP_TABLES_IN_TEXT_SECTION
1295 || readonly_data_section == text_section)
1296 {
1297 insn_lengths[uid]
1298 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1299 insn_current_address += insn_lengths[uid];
1300 if (insn_lengths[uid] != old_length)
1301 something_changed = 1;
1302 }
1303
1304 continue;
1305 }
1306 #endif /* CASE_VECTOR_SHORTEN_MODE */
1307
1308 if (! (varying_length[uid]))
1309 {
1310 if (NONJUMP_INSN_P (insn)
1311 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1312 {
1313 int i;
1314
1315 body = PATTERN (insn);
1316 for (i = 0; i < XVECLEN (body, 0); i++)
1317 {
1318 rtx inner_insn = XVECEXP (body, 0, i);
1319 int inner_uid = INSN_UID (inner_insn);
1320
1321 INSN_ADDRESSES (inner_uid) = insn_current_address;
1322
1323 insn_current_address += insn_lengths[inner_uid];
1324 }
1325 }
1326 else
1327 insn_current_address += insn_lengths[uid];
1328
1329 continue;
1330 }
1331
1332 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1333 {
1334 int i;
1335
1336 body = PATTERN (insn);
1337 new_length = 0;
1338 for (i = 0; i < XVECLEN (body, 0); i++)
1339 {
1340 rtx inner_insn = XVECEXP (body, 0, i);
1341 int inner_uid = INSN_UID (inner_insn);
1342 int inner_length;
1343
1344 INSN_ADDRESSES (inner_uid) = insn_current_address;
1345
1346 /* insn_current_length returns 0 for insns with a
1347 non-varying length. */
1348 if (! varying_length[inner_uid])
1349 inner_length = insn_lengths[inner_uid];
1350 else
1351 inner_length = insn_current_length (inner_insn);
1352
1353 if (inner_length != insn_lengths[inner_uid])
1354 {
1355 insn_lengths[inner_uid] = inner_length;
1356 something_changed = 1;
1357 }
1358 insn_current_address += insn_lengths[inner_uid];
1359 new_length += inner_length;
1360 }
1361 }
1362 else
1363 {
1364 new_length = insn_current_length (insn);
1365 insn_current_address += new_length;
1366 }
1367
1368 #ifdef ADJUST_INSN_LENGTH
1369 /* If needed, do any adjustment. */
1370 tmp_length = new_length;
1371 ADJUST_INSN_LENGTH (insn, new_length);
1372 insn_current_address += (new_length - tmp_length);
1373 #endif
1374
1375 if (new_length != insn_lengths[uid])
1376 {
1377 insn_lengths[uid] = new_length;
1378 something_changed = 1;
1379 }
1380 }
1381 /* For a non-optimizing compile, do only a single pass. */
1382 if (!optimize)
1383 break;
1384 }
1385
1386 free (varying_length);
1387
1388 #endif /* HAVE_ATTR_length */
1389 }
1390
1391 #ifdef HAVE_ATTR_length
1392 /* Given the body of an INSN known to be generated by an ASM statement, return
1393 the number of machine instructions likely to be generated for this insn.
1394 This is used to compute its length. */
1395
1396 static int
1397 asm_insn_count (rtx body)
1398 {
1399 const char *templ;
1400
1401 if (GET_CODE (body) == ASM_INPUT)
1402 templ = XSTR (body, 0);
1403 else
1404 templ = decode_asm_operands (body, NULL, NULL, NULL, NULL, NULL);
1405
1406 return asm_str_count (templ);
1407 }
1408 #endif
1409
1410 /* Return the number of machine instructions likely to be generated for the
1411 inline-asm template. */
1412 int
1413 asm_str_count (const char *templ)
1414 {
1415 int count = 1;
1416
1417 if (!*templ)
1418 return 0;
1419
1420 for (; *templ; templ++)
1421 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*templ, templ)
1422 || *templ == '\n')
1423 count++;
1424
1425 return count;
1426 }
1427 \f
1428 /* ??? This is probably the wrong place for these. */
1429 /* Structure recording the mapping from source file and directory
1430 names at compile time to those to be embedded in debug
1431 information. */
1432 typedef struct debug_prefix_map
1433 {
1434 const char *old_prefix;
1435 const char *new_prefix;
1436 size_t old_len;
1437 size_t new_len;
1438 struct debug_prefix_map *next;
1439 } debug_prefix_map;
1440
1441 /* Linked list of such structures. */
1442 debug_prefix_map *debug_prefix_maps;
1443
1444
1445 /* Record a debug file prefix mapping. ARG is the argument to
1446 -fdebug-prefix-map and must be of the form OLD=NEW. */
1447
1448 void
1449 add_debug_prefix_map (const char *arg)
1450 {
1451 debug_prefix_map *map;
1452 const char *p;
1453
1454 p = strchr (arg, '=');
1455 if (!p)
1456 {
1457 error ("invalid argument %qs to -fdebug-prefix-map", arg);
1458 return;
1459 }
1460 map = XNEW (debug_prefix_map);
1461 map->old_prefix = xstrndup (arg, p - arg);
1462 map->old_len = p - arg;
1463 p++;
1464 map->new_prefix = xstrdup (p);
1465 map->new_len = strlen (p);
1466 map->next = debug_prefix_maps;
1467 debug_prefix_maps = map;
1468 }
1469
1470 /* Perform user-specified mapping of debug filename prefixes. Return
1471 the new name corresponding to FILENAME. */
1472
1473 const char *
1474 remap_debug_filename (const char *filename)
1475 {
1476 debug_prefix_map *map;
1477 char *s;
1478 const char *name;
1479 size_t name_len;
1480
1481 for (map = debug_prefix_maps; map; map = map->next)
1482 if (strncmp (filename, map->old_prefix, map->old_len) == 0)
1483 break;
1484 if (!map)
1485 return filename;
1486 name = filename + map->old_len;
1487 name_len = strlen (name) + 1;
1488 s = (char *) alloca (name_len + map->new_len);
1489 memcpy (s, map->new_prefix, map->new_len);
1490 memcpy (s + map->new_len, name, name_len);
1491 return ggc_strdup (s);
1492 }
1493 \f
1494 /* Return true if DWARF2 debug info can be emitted for DECL. */
1495
1496 static bool
1497 dwarf2_debug_info_emitted_p (tree decl)
1498 {
1499 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1500 return false;
1501
1502 if (DECL_IGNORED_P (decl))
1503 return false;
1504
1505 return true;
1506 }
1507
1508 /* Output assembler code for the start of a function,
1509 and initialize some of the variables in this file
1510 for the new function. The label for the function and associated
1511 assembler pseudo-ops have already been output in `assemble_start_function'.
1512
1513 FIRST is the first insn of the rtl for the function being compiled.
1514 FILE is the file to write assembler code to.
1515 OPTIMIZE is nonzero if we should eliminate redundant
1516 test and compare insns. */
1517
1518 void
1519 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1520 int optimize ATTRIBUTE_UNUSED)
1521 {
1522 block_depth = 0;
1523
1524 this_is_asm_operands = 0;
1525
1526 last_filename = locator_file (prologue_locator);
1527 last_linenum = locator_line (prologue_locator);
1528 last_discriminator = discriminator = 0;
1529
1530 high_block_linenum = high_function_linenum = last_linenum;
1531
1532 if (!DECL_IGNORED_P (current_function_decl))
1533 debug_hooks->begin_prologue (last_linenum, last_filename);
1534
1535 #if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1536 if (!dwarf2_debug_info_emitted_p (current_function_decl))
1537 dwarf2out_begin_prologue (0, NULL);
1538 #endif
1539
1540 #ifdef LEAF_REG_REMAP
1541 if (current_function_uses_only_leaf_regs)
1542 leaf_renumber_regs (first);
1543 #endif
1544
1545 /* The Sun386i and perhaps other machines don't work right
1546 if the profiling code comes after the prologue. */
1547 #ifdef PROFILE_BEFORE_PROLOGUE
1548 if (crtl->profile)
1549 profile_function (file);
1550 #endif /* PROFILE_BEFORE_PROLOGUE */
1551
1552 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1553 if (dwarf2out_do_frame ())
1554 dwarf2out_frame_debug (NULL_RTX, false);
1555 #endif
1556
1557 /* If debugging, assign block numbers to all of the blocks in this
1558 function. */
1559 if (write_symbols)
1560 {
1561 reemit_insn_block_notes ();
1562 number_blocks (current_function_decl);
1563 /* We never actually put out begin/end notes for the top-level
1564 block in the function. But, conceptually, that block is
1565 always needed. */
1566 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1567 }
1568
1569 if (warn_frame_larger_than
1570 && get_frame_size () > frame_larger_than_size)
1571 {
1572 /* Issue a warning */
1573 warning (OPT_Wframe_larger_than_,
1574 "the frame size of %wd bytes is larger than %wd bytes",
1575 get_frame_size (), frame_larger_than_size);
1576 }
1577
1578 /* First output the function prologue: code to set up the stack frame. */
1579 targetm.asm_out.function_prologue (file, get_frame_size ());
1580
1581 /* If the machine represents the prologue as RTL, the profiling code must
1582 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1583 #ifdef HAVE_prologue
1584 if (! HAVE_prologue)
1585 #endif
1586 profile_after_prologue (file);
1587 }
1588
1589 static void
1590 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1591 {
1592 #ifndef PROFILE_BEFORE_PROLOGUE
1593 if (crtl->profile)
1594 profile_function (file);
1595 #endif /* not PROFILE_BEFORE_PROLOGUE */
1596 }
1597
1598 static void
1599 profile_function (FILE *file ATTRIBUTE_UNUSED)
1600 {
1601 #ifndef NO_PROFILE_COUNTERS
1602 # define NO_PROFILE_COUNTERS 0
1603 #endif
1604 #ifdef ASM_OUTPUT_REG_PUSH
1605 rtx sval = NULL, chain = NULL;
1606
1607 if (cfun->returns_struct)
1608 sval = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl),
1609 true);
1610 if (cfun->static_chain_decl)
1611 chain = targetm.calls.static_chain (current_function_decl, true);
1612 #endif /* ASM_OUTPUT_REG_PUSH */
1613
1614 if (! NO_PROFILE_COUNTERS)
1615 {
1616 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1617 switch_to_section (data_section);
1618 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1619 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1620 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1621 }
1622
1623 switch_to_section (current_function_section ());
1624
1625 #ifdef ASM_OUTPUT_REG_PUSH
1626 if (sval && REG_P (sval))
1627 ASM_OUTPUT_REG_PUSH (file, REGNO (sval));
1628 if (chain && REG_P (chain))
1629 ASM_OUTPUT_REG_PUSH (file, REGNO (chain));
1630 #endif
1631
1632 FUNCTION_PROFILER (file, current_function_funcdef_no);
1633
1634 #ifdef ASM_OUTPUT_REG_PUSH
1635 if (chain && REG_P (chain))
1636 ASM_OUTPUT_REG_POP (file, REGNO (chain));
1637 if (sval && REG_P (sval))
1638 ASM_OUTPUT_REG_POP (file, REGNO (sval));
1639 #endif
1640 }
1641
1642 /* Output assembler code for the end of a function.
1643 For clarity, args are same as those of `final_start_function'
1644 even though not all of them are needed. */
1645
1646 void
1647 final_end_function (void)
1648 {
1649 app_disable ();
1650
1651 if (!DECL_IGNORED_P (current_function_decl))
1652 debug_hooks->end_function (high_function_linenum);
1653
1654 /* Finally, output the function epilogue:
1655 code to restore the stack frame and return to the caller. */
1656 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1657
1658 /* And debug output. */
1659 if (!DECL_IGNORED_P (current_function_decl))
1660 debug_hooks->end_epilogue (last_linenum, last_filename);
1661
1662 #if defined (DWARF2_UNWIND_INFO)
1663 if (!dwarf2_debug_info_emitted_p (current_function_decl)
1664 && dwarf2out_do_frame ())
1665 dwarf2out_end_epilogue (last_linenum, last_filename);
1666 #endif
1667 }
1668 \f
1669 /* Output assembler code for some insns: all or part of a function.
1670 For description of args, see `final_start_function', above. */
1671
1672 void
1673 final (rtx first, FILE *file, int optimize)
1674 {
1675 rtx insn;
1676 int max_uid = 0;
1677 int seen = 0;
1678
1679 last_ignored_compare = 0;
1680
1681 for (insn = first; insn; insn = NEXT_INSN (insn))
1682 {
1683 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1684 max_uid = INSN_UID (insn);
1685 #ifdef HAVE_cc0
1686 /* If CC tracking across branches is enabled, record the insn which
1687 jumps to each branch only reached from one place. */
1688 if (optimize && JUMP_P (insn))
1689 {
1690 rtx lab = JUMP_LABEL (insn);
1691 if (lab && LABEL_NUSES (lab) == 1)
1692 {
1693 LABEL_REFS (lab) = insn;
1694 }
1695 }
1696 #endif
1697 }
1698
1699 init_recog ();
1700
1701 CC_STATUS_INIT;
1702
1703 /* Output the insns. */
1704 for (insn = first; insn;)
1705 {
1706 #ifdef HAVE_ATTR_length
1707 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1708 {
1709 /* This can be triggered by bugs elsewhere in the compiler if
1710 new insns are created after init_insn_lengths is called. */
1711 gcc_assert (NOTE_P (insn));
1712 insn_current_address = -1;
1713 }
1714 else
1715 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1716 #endif /* HAVE_ATTR_length */
1717
1718 insn = final_scan_insn (insn, file, optimize, 0, &seen);
1719 }
1720 }
1721 \f
1722 const char *
1723 get_insn_template (int code, rtx insn)
1724 {
1725 switch (insn_data[code].output_format)
1726 {
1727 case INSN_OUTPUT_FORMAT_SINGLE:
1728 return insn_data[code].output.single;
1729 case INSN_OUTPUT_FORMAT_MULTI:
1730 return insn_data[code].output.multi[which_alternative];
1731 case INSN_OUTPUT_FORMAT_FUNCTION:
1732 gcc_assert (insn);
1733 return (*insn_data[code].output.function) (recog_data.operand, insn);
1734
1735 default:
1736 gcc_unreachable ();
1737 }
1738 }
1739
1740 /* Emit the appropriate declaration for an alternate-entry-point
1741 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1742 LABEL_KIND != LABEL_NORMAL.
1743
1744 The case fall-through in this function is intentional. */
1745 static void
1746 output_alternate_entry_point (FILE *file, rtx insn)
1747 {
1748 const char *name = LABEL_NAME (insn);
1749
1750 switch (LABEL_KIND (insn))
1751 {
1752 case LABEL_WEAK_ENTRY:
1753 #ifdef ASM_WEAKEN_LABEL
1754 ASM_WEAKEN_LABEL (file, name);
1755 #endif
1756 case LABEL_GLOBAL_ENTRY:
1757 targetm.asm_out.globalize_label (file, name);
1758 case LABEL_STATIC_ENTRY:
1759 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1760 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1761 #endif
1762 ASM_OUTPUT_LABEL (file, name);
1763 break;
1764
1765 case LABEL_NORMAL:
1766 default:
1767 gcc_unreachable ();
1768 }
1769 }
1770
1771 /* Given a CALL_INSN, find and return the nested CALL. */
1772 static rtx
1773 call_from_call_insn (rtx insn)
1774 {
1775 rtx x;
1776 gcc_assert (CALL_P (insn));
1777 x = PATTERN (insn);
1778
1779 while (GET_CODE (x) != CALL)
1780 {
1781 switch (GET_CODE (x))
1782 {
1783 default:
1784 gcc_unreachable ();
1785 case COND_EXEC:
1786 x = COND_EXEC_CODE (x);
1787 break;
1788 case PARALLEL:
1789 x = XVECEXP (x, 0, 0);
1790 break;
1791 case SET:
1792 x = XEXP (x, 1);
1793 break;
1794 }
1795 }
1796 return x;
1797 }
1798
1799 /* The final scan for one insn, INSN.
1800 Args are same as in `final', except that INSN
1801 is the insn being scanned.
1802 Value returned is the next insn to be scanned.
1803
1804 NOPEEPHOLES is the flag to disallow peephole processing (currently
1805 used for within delayed branch sequence output).
1806
1807 SEEN is used to track the end of the prologue, for emitting
1808 debug information. We force the emission of a line note after
1809 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1810 at the beginning of the second basic block, whichever comes
1811 first. */
1812
1813 rtx
1814 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1815 int nopeepholes ATTRIBUTE_UNUSED, int *seen)
1816 {
1817 #ifdef HAVE_cc0
1818 rtx set;
1819 #endif
1820 rtx next;
1821
1822 insn_counter++;
1823
1824 /* Ignore deleted insns. These can occur when we split insns (due to a
1825 template of "#") while not optimizing. */
1826 if (INSN_DELETED_P (insn))
1827 return NEXT_INSN (insn);
1828
1829 switch (GET_CODE (insn))
1830 {
1831 case NOTE:
1832 switch (NOTE_KIND (insn))
1833 {
1834 case NOTE_INSN_DELETED:
1835 break;
1836
1837 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
1838 in_cold_section_p = !in_cold_section_p;
1839 #ifdef DWARF2_UNWIND_INFO
1840 if (dwarf2out_do_frame ())
1841 dwarf2out_switch_text_section ();
1842 else
1843 #endif
1844 if (!DECL_IGNORED_P (current_function_decl))
1845 debug_hooks->switch_text_section ();
1846
1847 switch_to_section (current_function_section ());
1848 break;
1849
1850 case NOTE_INSN_BASIC_BLOCK:
1851 #ifdef TARGET_UNWIND_INFO
1852 targetm.asm_out.unwind_emit (asm_out_file, insn);
1853 #endif
1854
1855 if (flag_debug_asm)
1856 fprintf (asm_out_file, "\t%s basic block %d\n",
1857 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1858
1859 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1860 {
1861 *seen |= SEEN_EMITTED;
1862 force_source_line = true;
1863 }
1864 else
1865 *seen |= SEEN_BB;
1866
1867 discriminator = NOTE_BASIC_BLOCK (insn)->discriminator;
1868
1869 break;
1870
1871 case NOTE_INSN_EH_REGION_BEG:
1872 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1873 NOTE_EH_HANDLER (insn));
1874 break;
1875
1876 case NOTE_INSN_EH_REGION_END:
1877 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1878 NOTE_EH_HANDLER (insn));
1879 break;
1880
1881 case NOTE_INSN_PROLOGUE_END:
1882 targetm.asm_out.function_end_prologue (file);
1883 profile_after_prologue (file);
1884
1885 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1886 {
1887 *seen |= SEEN_EMITTED;
1888 force_source_line = true;
1889 }
1890 else
1891 *seen |= SEEN_NOTE;
1892
1893 break;
1894
1895 case NOTE_INSN_EPILOGUE_BEG:
1896 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_epilogue)
1897 if (dwarf2out_do_frame ())
1898 dwarf2out_begin_epilogue (insn);
1899 #endif
1900 targetm.asm_out.function_begin_epilogue (file);
1901 break;
1902
1903 case NOTE_INSN_CFA_RESTORE_STATE:
1904 #if defined (DWARF2_UNWIND_INFO)
1905 dwarf2out_frame_debug_restore_state ();
1906 #endif
1907 break;
1908
1909 case NOTE_INSN_FUNCTION_BEG:
1910 app_disable ();
1911 if (!DECL_IGNORED_P (current_function_decl))
1912 debug_hooks->end_prologue (last_linenum, last_filename);
1913
1914 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1915 {
1916 *seen |= SEEN_EMITTED;
1917 force_source_line = true;
1918 }
1919 else
1920 *seen |= SEEN_NOTE;
1921
1922 break;
1923
1924 case NOTE_INSN_BLOCK_BEG:
1925 if (debug_info_level == DINFO_LEVEL_NORMAL
1926 || debug_info_level == DINFO_LEVEL_VERBOSE
1927 || write_symbols == DWARF2_DEBUG
1928 || write_symbols == VMS_AND_DWARF2_DEBUG
1929 || write_symbols == VMS_DEBUG)
1930 {
1931 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1932
1933 app_disable ();
1934 ++block_depth;
1935 high_block_linenum = last_linenum;
1936
1937 /* Output debugging info about the symbol-block beginning. */
1938 if (!DECL_IGNORED_P (current_function_decl))
1939 debug_hooks->begin_block (last_linenum, n);
1940
1941 /* Mark this block as output. */
1942 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1943 }
1944 if (write_symbols == DBX_DEBUG
1945 || write_symbols == SDB_DEBUG)
1946 {
1947 location_t *locus_ptr
1948 = block_nonartificial_location (NOTE_BLOCK (insn));
1949
1950 if (locus_ptr != NULL)
1951 {
1952 override_filename = LOCATION_FILE (*locus_ptr);
1953 override_linenum = LOCATION_LINE (*locus_ptr);
1954 }
1955 }
1956 break;
1957
1958 case NOTE_INSN_BLOCK_END:
1959 if (debug_info_level == DINFO_LEVEL_NORMAL
1960 || debug_info_level == DINFO_LEVEL_VERBOSE
1961 || write_symbols == DWARF2_DEBUG
1962 || write_symbols == VMS_AND_DWARF2_DEBUG
1963 || write_symbols == VMS_DEBUG)
1964 {
1965 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1966
1967 app_disable ();
1968
1969 /* End of a symbol-block. */
1970 --block_depth;
1971 gcc_assert (block_depth >= 0);
1972
1973 if (!DECL_IGNORED_P (current_function_decl))
1974 debug_hooks->end_block (high_block_linenum, n);
1975 }
1976 if (write_symbols == DBX_DEBUG
1977 || write_symbols == SDB_DEBUG)
1978 {
1979 tree outer_block = BLOCK_SUPERCONTEXT (NOTE_BLOCK (insn));
1980 location_t *locus_ptr
1981 = block_nonartificial_location (outer_block);
1982
1983 if (locus_ptr != NULL)
1984 {
1985 override_filename = LOCATION_FILE (*locus_ptr);
1986 override_linenum = LOCATION_LINE (*locus_ptr);
1987 }
1988 else
1989 {
1990 override_filename = NULL;
1991 override_linenum = 0;
1992 }
1993 }
1994 break;
1995
1996 case NOTE_INSN_DELETED_LABEL:
1997 /* Emit the label. We may have deleted the CODE_LABEL because
1998 the label could be proved to be unreachable, though still
1999 referenced (in the form of having its address taken. */
2000 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
2001 break;
2002
2003 case NOTE_INSN_VAR_LOCATION:
2004 if (!DECL_IGNORED_P (current_function_decl))
2005 debug_hooks->var_location (insn);
2006 break;
2007
2008 default:
2009 gcc_unreachable ();
2010 break;
2011 }
2012 break;
2013
2014 case BARRIER:
2015 #if defined (DWARF2_UNWIND_INFO)
2016 if (dwarf2out_do_frame ())
2017 dwarf2out_frame_debug (insn, false);
2018 #endif
2019 break;
2020
2021 case CODE_LABEL:
2022 /* The target port might emit labels in the output function for
2023 some insn, e.g. sh.c output_branchy_insn. */
2024 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
2025 {
2026 int align = LABEL_TO_ALIGNMENT (insn);
2027 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2028 int max_skip = LABEL_TO_MAX_SKIP (insn);
2029 #endif
2030
2031 if (align && NEXT_INSN (insn))
2032 {
2033 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
2034 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
2035 #else
2036 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
2037 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
2038 #else
2039 ASM_OUTPUT_ALIGN (file, align);
2040 #endif
2041 #endif
2042 }
2043 }
2044 #ifdef HAVE_cc0
2045 CC_STATUS_INIT;
2046 #endif
2047
2048 if (!DECL_IGNORED_P (current_function_decl) && LABEL_NAME (insn))
2049 debug_hooks->label (insn);
2050
2051 app_disable ();
2052
2053 next = next_nonnote_insn (insn);
2054 /* If this label is followed by a jump-table, make sure we put
2055 the label in the read-only section. Also possibly write the
2056 label and jump table together. */
2057 if (next != 0 && JUMP_TABLE_DATA_P (next))
2058 {
2059 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2060 /* In this case, the case vector is being moved by the
2061 target, so don't output the label at all. Leave that
2062 to the back end macros. */
2063 #else
2064 if (! JUMP_TABLES_IN_TEXT_SECTION)
2065 {
2066 int log_align;
2067
2068 switch_to_section (targetm.asm_out.function_rodata_section
2069 (current_function_decl));
2070
2071 #ifdef ADDR_VEC_ALIGN
2072 log_align = ADDR_VEC_ALIGN (next);
2073 #else
2074 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2075 #endif
2076 ASM_OUTPUT_ALIGN (file, log_align);
2077 }
2078 else
2079 switch_to_section (current_function_section ());
2080
2081 #ifdef ASM_OUTPUT_CASE_LABEL
2082 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
2083 next);
2084 #else
2085 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2086 #endif
2087 #endif
2088 break;
2089 }
2090 if (LABEL_ALT_ENTRY_P (insn))
2091 output_alternate_entry_point (file, insn);
2092 else
2093 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
2094 break;
2095
2096 default:
2097 {
2098 rtx body = PATTERN (insn);
2099 int insn_code_number;
2100 const char *templ;
2101 bool is_stmt;
2102
2103 /* Reset this early so it is correct for ASM statements. */
2104 current_insn_predicate = NULL_RTX;
2105
2106 /* An INSN, JUMP_INSN or CALL_INSN.
2107 First check for special kinds that recog doesn't recognize. */
2108
2109 if (GET_CODE (body) == USE /* These are just declarations. */
2110 || GET_CODE (body) == CLOBBER)
2111 break;
2112
2113 #ifdef HAVE_cc0
2114 {
2115 /* If there is a REG_CC_SETTER note on this insn, it means that
2116 the setting of the condition code was done in the delay slot
2117 of the insn that branched here. So recover the cc status
2118 from the insn that set it. */
2119
2120 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2121 if (note)
2122 {
2123 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2124 cc_prev_status = cc_status;
2125 }
2126 }
2127 #endif
2128
2129 /* Detect insns that are really jump-tables
2130 and output them as such. */
2131
2132 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2133 {
2134 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2135 int vlen, idx;
2136 #endif
2137
2138 if (! JUMP_TABLES_IN_TEXT_SECTION)
2139 switch_to_section (targetm.asm_out.function_rodata_section
2140 (current_function_decl));
2141 else
2142 switch_to_section (current_function_section ());
2143
2144 app_disable ();
2145
2146 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2147 if (GET_CODE (body) == ADDR_VEC)
2148 {
2149 #ifdef ASM_OUTPUT_ADDR_VEC
2150 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2151 #else
2152 gcc_unreachable ();
2153 #endif
2154 }
2155 else
2156 {
2157 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2158 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2159 #else
2160 gcc_unreachable ();
2161 #endif
2162 }
2163 #else
2164 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2165 for (idx = 0; idx < vlen; idx++)
2166 {
2167 if (GET_CODE (body) == ADDR_VEC)
2168 {
2169 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2170 ASM_OUTPUT_ADDR_VEC_ELT
2171 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2172 #else
2173 gcc_unreachable ();
2174 #endif
2175 }
2176 else
2177 {
2178 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2179 ASM_OUTPUT_ADDR_DIFF_ELT
2180 (file,
2181 body,
2182 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2183 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2184 #else
2185 gcc_unreachable ();
2186 #endif
2187 }
2188 }
2189 #ifdef ASM_OUTPUT_CASE_END
2190 ASM_OUTPUT_CASE_END (file,
2191 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2192 insn);
2193 #endif
2194 #endif
2195
2196 switch_to_section (current_function_section ());
2197
2198 break;
2199 }
2200 /* Output this line note if it is the first or the last line
2201 note in a row. */
2202 if (!DECL_IGNORED_P (current_function_decl)
2203 && notice_source_line (insn, &is_stmt))
2204 (*debug_hooks->source_line) (last_linenum, last_filename,
2205 last_discriminator, is_stmt);
2206
2207 if (GET_CODE (body) == ASM_INPUT)
2208 {
2209 const char *string = XSTR (body, 0);
2210
2211 /* There's no telling what that did to the condition codes. */
2212 CC_STATUS_INIT;
2213
2214 if (string[0])
2215 {
2216 expanded_location loc;
2217
2218 app_enable ();
2219 loc = expand_location (ASM_INPUT_SOURCE_LOCATION (body));
2220 if (*loc.file && loc.line)
2221 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2222 ASM_COMMENT_START, loc.line, loc.file);
2223 fprintf (asm_out_file, "\t%s\n", string);
2224 #if HAVE_AS_LINE_ZERO
2225 if (*loc.file && loc.line)
2226 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2227 #endif
2228 }
2229 break;
2230 }
2231
2232 /* Detect `asm' construct with operands. */
2233 if (asm_noperands (body) >= 0)
2234 {
2235 unsigned int noperands = asm_noperands (body);
2236 rtx *ops = XALLOCAVEC (rtx, noperands);
2237 const char *string;
2238 location_t loc;
2239 expanded_location expanded;
2240
2241 /* There's no telling what that did to the condition codes. */
2242 CC_STATUS_INIT;
2243
2244 /* Get out the operand values. */
2245 string = decode_asm_operands (body, ops, NULL, NULL, NULL, &loc);
2246 /* Inhibit dying on what would otherwise be compiler bugs. */
2247 insn_noperands = noperands;
2248 this_is_asm_operands = insn;
2249 expanded = expand_location (loc);
2250
2251 #ifdef FINAL_PRESCAN_INSN
2252 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2253 #endif
2254
2255 /* Output the insn using them. */
2256 if (string[0])
2257 {
2258 app_enable ();
2259 if (expanded.file && expanded.line)
2260 fprintf (asm_out_file, "%s %i \"%s\" 1\n",
2261 ASM_COMMENT_START, expanded.line, expanded.file);
2262 output_asm_insn (string, ops);
2263 #if HAVE_AS_LINE_ZERO
2264 if (expanded.file && expanded.line)
2265 fprintf (asm_out_file, "%s 0 \"\" 2\n", ASM_COMMENT_START);
2266 #endif
2267 }
2268
2269 if (targetm.asm_out.final_postscan_insn)
2270 targetm.asm_out.final_postscan_insn (file, insn, ops,
2271 insn_noperands);
2272
2273 this_is_asm_operands = 0;
2274 break;
2275 }
2276
2277 app_disable ();
2278
2279 if (GET_CODE (body) == SEQUENCE)
2280 {
2281 /* A delayed-branch sequence */
2282 int i;
2283
2284 final_sequence = body;
2285
2286 /* Record the delay slots' frame information before the branch.
2287 This is needed for delayed calls: see execute_cfa_program(). */
2288 #if defined (DWARF2_UNWIND_INFO)
2289 if (dwarf2out_do_frame ())
2290 for (i = 1; i < XVECLEN (body, 0); i++)
2291 dwarf2out_frame_debug (XVECEXP (body, 0, i), false);
2292 #endif
2293
2294 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2295 force the restoration of a comparison that was previously
2296 thought unnecessary. If that happens, cancel this sequence
2297 and cause that insn to be restored. */
2298
2299 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, 1, seen);
2300 if (next != XVECEXP (body, 0, 1))
2301 {
2302 final_sequence = 0;
2303 return next;
2304 }
2305
2306 for (i = 1; i < XVECLEN (body, 0); i++)
2307 {
2308 rtx insn = XVECEXP (body, 0, i);
2309 rtx next = NEXT_INSN (insn);
2310 /* We loop in case any instruction in a delay slot gets
2311 split. */
2312 do
2313 insn = final_scan_insn (insn, file, 0, 1, seen);
2314 while (insn != next);
2315 }
2316 #ifdef DBR_OUTPUT_SEQEND
2317 DBR_OUTPUT_SEQEND (file);
2318 #endif
2319 final_sequence = 0;
2320
2321 /* If the insn requiring the delay slot was a CALL_INSN, the
2322 insns in the delay slot are actually executed before the
2323 called function. Hence we don't preserve any CC-setting
2324 actions in these insns and the CC must be marked as being
2325 clobbered by the function. */
2326 if (CALL_P (XVECEXP (body, 0, 0)))
2327 {
2328 CC_STATUS_INIT;
2329 }
2330 break;
2331 }
2332
2333 /* We have a real machine instruction as rtl. */
2334
2335 body = PATTERN (insn);
2336
2337 #ifdef HAVE_cc0
2338 set = single_set (insn);
2339
2340 /* Check for redundant test and compare instructions
2341 (when the condition codes are already set up as desired).
2342 This is done only when optimizing; if not optimizing,
2343 it should be possible for the user to alter a variable
2344 with the debugger in between statements
2345 and the next statement should reexamine the variable
2346 to compute the condition codes. */
2347
2348 if (optimize)
2349 {
2350 if (set
2351 && GET_CODE (SET_DEST (set)) == CC0
2352 && insn != last_ignored_compare)
2353 {
2354 rtx src1, src2;
2355 if (GET_CODE (SET_SRC (set)) == SUBREG)
2356 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2357
2358 src1 = SET_SRC (set);
2359 src2 = NULL_RTX;
2360 if (GET_CODE (SET_SRC (set)) == COMPARE)
2361 {
2362 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2363 XEXP (SET_SRC (set), 0)
2364 = alter_subreg (&XEXP (SET_SRC (set), 0));
2365 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2366 XEXP (SET_SRC (set), 1)
2367 = alter_subreg (&XEXP (SET_SRC (set), 1));
2368 if (XEXP (SET_SRC (set), 1)
2369 == CONST0_RTX (GET_MODE (XEXP (SET_SRC (set), 0))))
2370 src2 = XEXP (SET_SRC (set), 0);
2371 }
2372 if ((cc_status.value1 != 0
2373 && rtx_equal_p (src1, cc_status.value1))
2374 || (cc_status.value2 != 0
2375 && rtx_equal_p (src1, cc_status.value2))
2376 || (src2 != 0 && cc_status.value1 != 0
2377 && rtx_equal_p (src2, cc_status.value1))
2378 || (src2 != 0 && cc_status.value2 != 0
2379 && rtx_equal_p (src2, cc_status.value2)))
2380 {
2381 /* Don't delete insn if it has an addressing side-effect. */
2382 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2383 /* or if anything in it is volatile. */
2384 && ! volatile_refs_p (PATTERN (insn)))
2385 {
2386 /* We don't really delete the insn; just ignore it. */
2387 last_ignored_compare = insn;
2388 break;
2389 }
2390 }
2391 }
2392 }
2393
2394 /* If this is a conditional branch, maybe modify it
2395 if the cc's are in a nonstandard state
2396 so that it accomplishes the same thing that it would
2397 do straightforwardly if the cc's were set up normally. */
2398
2399 if (cc_status.flags != 0
2400 && JUMP_P (insn)
2401 && GET_CODE (body) == SET
2402 && SET_DEST (body) == pc_rtx
2403 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2404 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2405 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx)
2406 {
2407 /* This function may alter the contents of its argument
2408 and clear some of the cc_status.flags bits.
2409 It may also return 1 meaning condition now always true
2410 or -1 meaning condition now always false
2411 or 2 meaning condition nontrivial but altered. */
2412 int result = alter_cond (XEXP (SET_SRC (body), 0));
2413 /* If condition now has fixed value, replace the IF_THEN_ELSE
2414 with its then-operand or its else-operand. */
2415 if (result == 1)
2416 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2417 if (result == -1)
2418 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2419
2420 /* The jump is now either unconditional or a no-op.
2421 If it has become a no-op, don't try to output it.
2422 (It would not be recognized.) */
2423 if (SET_SRC (body) == pc_rtx)
2424 {
2425 delete_insn (insn);
2426 break;
2427 }
2428 else if (GET_CODE (SET_SRC (body)) == RETURN)
2429 /* Replace (set (pc) (return)) with (return). */
2430 PATTERN (insn) = body = SET_SRC (body);
2431
2432 /* Rerecognize the instruction if it has changed. */
2433 if (result != 0)
2434 INSN_CODE (insn) = -1;
2435 }
2436
2437 /* If this is a conditional trap, maybe modify it if the cc's
2438 are in a nonstandard state so that it accomplishes the same
2439 thing that it would do straightforwardly if the cc's were
2440 set up normally. */
2441 if (cc_status.flags != 0
2442 && NONJUMP_INSN_P (insn)
2443 && GET_CODE (body) == TRAP_IF
2444 && COMPARISON_P (TRAP_CONDITION (body))
2445 && XEXP (TRAP_CONDITION (body), 0) == cc0_rtx)
2446 {
2447 /* This function may alter the contents of its argument
2448 and clear some of the cc_status.flags bits.
2449 It may also return 1 meaning condition now always true
2450 or -1 meaning condition now always false
2451 or 2 meaning condition nontrivial but altered. */
2452 int result = alter_cond (TRAP_CONDITION (body));
2453
2454 /* If TRAP_CONDITION has become always false, delete the
2455 instruction. */
2456 if (result == -1)
2457 {
2458 delete_insn (insn);
2459 break;
2460 }
2461
2462 /* If TRAP_CONDITION has become always true, replace
2463 TRAP_CONDITION with const_true_rtx. */
2464 if (result == 1)
2465 TRAP_CONDITION (body) = const_true_rtx;
2466
2467 /* Rerecognize the instruction if it has changed. */
2468 if (result != 0)
2469 INSN_CODE (insn) = -1;
2470 }
2471
2472 /* Make same adjustments to instructions that examine the
2473 condition codes without jumping and instructions that
2474 handle conditional moves (if this machine has either one). */
2475
2476 if (cc_status.flags != 0
2477 && set != 0)
2478 {
2479 rtx cond_rtx, then_rtx, else_rtx;
2480
2481 if (!JUMP_P (insn)
2482 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2483 {
2484 cond_rtx = XEXP (SET_SRC (set), 0);
2485 then_rtx = XEXP (SET_SRC (set), 1);
2486 else_rtx = XEXP (SET_SRC (set), 2);
2487 }
2488 else
2489 {
2490 cond_rtx = SET_SRC (set);
2491 then_rtx = const_true_rtx;
2492 else_rtx = const0_rtx;
2493 }
2494
2495 switch (GET_CODE (cond_rtx))
2496 {
2497 case GTU:
2498 case GT:
2499 case LTU:
2500 case LT:
2501 case GEU:
2502 case GE:
2503 case LEU:
2504 case LE:
2505 case EQ:
2506 case NE:
2507 {
2508 int result;
2509 if (XEXP (cond_rtx, 0) != cc0_rtx)
2510 break;
2511 result = alter_cond (cond_rtx);
2512 if (result == 1)
2513 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2514 else if (result == -1)
2515 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2516 else if (result == 2)
2517 INSN_CODE (insn) = -1;
2518 if (SET_DEST (set) == SET_SRC (set))
2519 delete_insn (insn);
2520 }
2521 break;
2522
2523 default:
2524 break;
2525 }
2526 }
2527
2528 #endif
2529
2530 #ifdef HAVE_peephole
2531 /* Do machine-specific peephole optimizations if desired. */
2532
2533 if (optimize && !flag_no_peephole && !nopeepholes)
2534 {
2535 rtx next = peephole (insn);
2536 /* When peepholing, if there were notes within the peephole,
2537 emit them before the peephole. */
2538 if (next != 0 && next != NEXT_INSN (insn))
2539 {
2540 rtx note, prev = PREV_INSN (insn);
2541
2542 for (note = NEXT_INSN (insn); note != next;
2543 note = NEXT_INSN (note))
2544 final_scan_insn (note, file, optimize, nopeepholes, seen);
2545
2546 /* Put the notes in the proper position for a later
2547 rescan. For example, the SH target can do this
2548 when generating a far jump in a delayed branch
2549 sequence. */
2550 note = NEXT_INSN (insn);
2551 PREV_INSN (note) = prev;
2552 NEXT_INSN (prev) = note;
2553 NEXT_INSN (PREV_INSN (next)) = insn;
2554 PREV_INSN (insn) = PREV_INSN (next);
2555 NEXT_INSN (insn) = next;
2556 PREV_INSN (next) = insn;
2557 }
2558
2559 /* PEEPHOLE might have changed this. */
2560 body = PATTERN (insn);
2561 }
2562 #endif
2563
2564 /* Try to recognize the instruction.
2565 If successful, verify that the operands satisfy the
2566 constraints for the instruction. Crash if they don't,
2567 since `reload' should have changed them so that they do. */
2568
2569 insn_code_number = recog_memoized (insn);
2570 cleanup_subreg_operands (insn);
2571
2572 /* Dump the insn in the assembly for debugging. */
2573 if (flag_dump_rtl_in_asm)
2574 {
2575 print_rtx_head = ASM_COMMENT_START;
2576 print_rtl_single (asm_out_file, insn);
2577 print_rtx_head = "";
2578 }
2579
2580 if (! constrain_operands_cached (1))
2581 fatal_insn_not_found (insn);
2582
2583 /* Some target machines need to prescan each insn before
2584 it is output. */
2585
2586 #ifdef FINAL_PRESCAN_INSN
2587 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2588 #endif
2589
2590 if (targetm.have_conditional_execution ()
2591 && GET_CODE (PATTERN (insn)) == COND_EXEC)
2592 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2593
2594 #ifdef HAVE_cc0
2595 cc_prev_status = cc_status;
2596
2597 /* Update `cc_status' for this instruction.
2598 The instruction's output routine may change it further.
2599 If the output routine for a jump insn needs to depend
2600 on the cc status, it should look at cc_prev_status. */
2601
2602 NOTICE_UPDATE_CC (body, insn);
2603 #endif
2604
2605 current_output_insn = debug_insn = insn;
2606
2607 #if defined (DWARF2_UNWIND_INFO)
2608 if (CALL_P (insn) && dwarf2out_do_frame ())
2609 dwarf2out_frame_debug (insn, false);
2610 #endif
2611
2612 /* Find the proper template for this insn. */
2613 templ = get_insn_template (insn_code_number, insn);
2614
2615 /* If the C code returns 0, it means that it is a jump insn
2616 which follows a deleted test insn, and that test insn
2617 needs to be reinserted. */
2618 if (templ == 0)
2619 {
2620 rtx prev;
2621
2622 gcc_assert (prev_nonnote_insn (insn) == last_ignored_compare);
2623
2624 /* We have already processed the notes between the setter and
2625 the user. Make sure we don't process them again, this is
2626 particularly important if one of the notes is a block
2627 scope note or an EH note. */
2628 for (prev = insn;
2629 prev != last_ignored_compare;
2630 prev = PREV_INSN (prev))
2631 {
2632 if (NOTE_P (prev))
2633 delete_insn (prev); /* Use delete_note. */
2634 }
2635
2636 return prev;
2637 }
2638
2639 /* If the template is the string "#", it means that this insn must
2640 be split. */
2641 if (templ[0] == '#' && templ[1] == '\0')
2642 {
2643 rtx new_rtx = try_split (body, insn, 0);
2644
2645 /* If we didn't split the insn, go away. */
2646 if (new_rtx == insn && PATTERN (new_rtx) == body)
2647 fatal_insn ("could not split insn", insn);
2648
2649 #ifdef HAVE_ATTR_length
2650 /* This instruction should have been split in shorten_branches,
2651 to ensure that we would have valid length info for the
2652 splitees. */
2653 gcc_unreachable ();
2654 #endif
2655
2656 return new_rtx;
2657 }
2658
2659 #ifdef TARGET_UNWIND_INFO
2660 /* ??? This will put the directives in the wrong place if
2661 get_insn_template outputs assembly directly. However calling it
2662 before get_insn_template breaks if the insns is split. */
2663 targetm.asm_out.unwind_emit (asm_out_file, insn);
2664 #endif
2665
2666 if (CALL_P (insn))
2667 {
2668 rtx x = call_from_call_insn (insn);
2669 x = XEXP (x, 0);
2670 if (x && MEM_P (x) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2671 {
2672 tree t;
2673 x = XEXP (x, 0);
2674 t = SYMBOL_REF_DECL (x);
2675 if (t)
2676 assemble_external (t);
2677 }
2678 }
2679
2680 /* Output assembler code from the template. */
2681 output_asm_insn (templ, recog_data.operand);
2682
2683 /* Record point-of-call information for ICF debugging. */
2684 if (flag_enable_icf_debug && CALL_P (insn))
2685 {
2686 rtx x = call_from_call_insn (insn);
2687 x = XEXP (x, 0);
2688 if (x && MEM_P (x))
2689 {
2690 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2691 {
2692 tree t;
2693 x = XEXP (x, 0);
2694 t = SYMBOL_REF_DECL (x);
2695 if (t)
2696 (*debug_hooks->direct_call) (t);
2697 }
2698 else
2699 (*debug_hooks->virtual_call) (INSN_UID (insn));
2700 }
2701 }
2702
2703 /* Some target machines need to postscan each insn after
2704 it is output. */
2705 if (targetm.asm_out.final_postscan_insn)
2706 targetm.asm_out.final_postscan_insn (file, insn, recog_data.operand,
2707 recog_data.n_operands);
2708
2709 /* If necessary, report the effect that the instruction has on
2710 the unwind info. We've already done this for delay slots
2711 and call instructions. */
2712 #if defined (DWARF2_UNWIND_INFO)
2713 if (final_sequence == 0
2714 #if !defined (HAVE_prologue)
2715 && !ACCUMULATE_OUTGOING_ARGS
2716 #endif
2717 && dwarf2out_do_frame ())
2718 dwarf2out_frame_debug (insn, true);
2719 #endif
2720
2721 current_output_insn = debug_insn = 0;
2722 }
2723 }
2724 return NEXT_INSN (insn);
2725 }
2726 \f
2727 /* Return whether a source line note needs to be emitted before INSN.
2728 Sets IS_STMT to TRUE if the line should be marked as a possible
2729 breakpoint location. */
2730
2731 static bool
2732 notice_source_line (rtx insn, bool *is_stmt)
2733 {
2734 const char *filename;
2735 int linenum;
2736
2737 if (override_filename)
2738 {
2739 filename = override_filename;
2740 linenum = override_linenum;
2741 }
2742 else
2743 {
2744 filename = insn_file (insn);
2745 linenum = insn_line (insn);
2746 }
2747
2748 if (filename == NULL)
2749 return false;
2750
2751 if (force_source_line
2752 || filename != last_filename
2753 || last_linenum != linenum)
2754 {
2755 force_source_line = false;
2756 last_filename = filename;
2757 last_linenum = linenum;
2758 last_discriminator = discriminator;
2759 *is_stmt = true;
2760 high_block_linenum = MAX (last_linenum, high_block_linenum);
2761 high_function_linenum = MAX (last_linenum, high_function_linenum);
2762 return true;
2763 }
2764
2765 if (SUPPORTS_DISCRIMINATOR && last_discriminator != discriminator)
2766 {
2767 /* If the discriminator changed, but the line number did not,
2768 output the line table entry with is_stmt false so the
2769 debugger does not treat this as a breakpoint location. */
2770 last_discriminator = discriminator;
2771 *is_stmt = false;
2772 return true;
2773 }
2774
2775 return false;
2776 }
2777 \f
2778 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2779 directly to the desired hard register. */
2780
2781 void
2782 cleanup_subreg_operands (rtx insn)
2783 {
2784 int i;
2785 bool changed = false;
2786 extract_insn_cached (insn);
2787 for (i = 0; i < recog_data.n_operands; i++)
2788 {
2789 /* The following test cannot use recog_data.operand when testing
2790 for a SUBREG: the underlying object might have been changed
2791 already if we are inside a match_operator expression that
2792 matches the else clause. Instead we test the underlying
2793 expression directly. */
2794 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2795 {
2796 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2797 changed = true;
2798 }
2799 else if (GET_CODE (recog_data.operand[i]) == PLUS
2800 || GET_CODE (recog_data.operand[i]) == MULT
2801 || MEM_P (recog_data.operand[i]))
2802 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i], &changed);
2803 }
2804
2805 for (i = 0; i < recog_data.n_dups; i++)
2806 {
2807 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2808 {
2809 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2810 changed = true;
2811 }
2812 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2813 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2814 || MEM_P (*recog_data.dup_loc[i]))
2815 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i], &changed);
2816 }
2817 if (changed)
2818 df_insn_rescan (insn);
2819 }
2820
2821 /* If X is a SUBREG, replace it with a REG or a MEM,
2822 based on the thing it is a subreg of. */
2823
2824 rtx
2825 alter_subreg (rtx *xp)
2826 {
2827 rtx x = *xp;
2828 rtx y = SUBREG_REG (x);
2829
2830 /* simplify_subreg does not remove subreg from volatile references.
2831 We are required to. */
2832 if (MEM_P (y))
2833 {
2834 int offset = SUBREG_BYTE (x);
2835
2836 /* For paradoxical subregs on big-endian machines, SUBREG_BYTE
2837 contains 0 instead of the proper offset. See simplify_subreg. */
2838 if (offset == 0
2839 && GET_MODE_SIZE (GET_MODE (y)) < GET_MODE_SIZE (GET_MODE (x)))
2840 {
2841 int difference = GET_MODE_SIZE (GET_MODE (y))
2842 - GET_MODE_SIZE (GET_MODE (x));
2843 if (WORDS_BIG_ENDIAN)
2844 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
2845 if (BYTES_BIG_ENDIAN)
2846 offset += difference % UNITS_PER_WORD;
2847 }
2848
2849 *xp = adjust_address (y, GET_MODE (x), offset);
2850 }
2851 else
2852 {
2853 rtx new_rtx = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2854 SUBREG_BYTE (x));
2855
2856 if (new_rtx != 0)
2857 *xp = new_rtx;
2858 else if (REG_P (y))
2859 {
2860 /* Simplify_subreg can't handle some REG cases, but we have to. */
2861 unsigned int regno;
2862 HOST_WIDE_INT offset;
2863
2864 regno = subreg_regno (x);
2865 if (subreg_lowpart_p (x))
2866 offset = byte_lowpart_offset (GET_MODE (x), GET_MODE (y));
2867 else
2868 offset = SUBREG_BYTE (x);
2869 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, offset);
2870 }
2871 }
2872
2873 return *xp;
2874 }
2875
2876 /* Do alter_subreg on all the SUBREGs contained in X. */
2877
2878 static rtx
2879 walk_alter_subreg (rtx *xp, bool *changed)
2880 {
2881 rtx x = *xp;
2882 switch (GET_CODE (x))
2883 {
2884 case PLUS:
2885 case MULT:
2886 case AND:
2887 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2888 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1), changed);
2889 break;
2890
2891 case MEM:
2892 case ZERO_EXTEND:
2893 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0), changed);
2894 break;
2895
2896 case SUBREG:
2897 *changed = true;
2898 return alter_subreg (xp);
2899
2900 default:
2901 break;
2902 }
2903
2904 return *xp;
2905 }
2906 \f
2907 #ifdef HAVE_cc0
2908
2909 /* Given BODY, the body of a jump instruction, alter the jump condition
2910 as required by the bits that are set in cc_status.flags.
2911 Not all of the bits there can be handled at this level in all cases.
2912
2913 The value is normally 0.
2914 1 means that the condition has become always true.
2915 -1 means that the condition has become always false.
2916 2 means that COND has been altered. */
2917
2918 static int
2919 alter_cond (rtx cond)
2920 {
2921 int value = 0;
2922
2923 if (cc_status.flags & CC_REVERSED)
2924 {
2925 value = 2;
2926 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2927 }
2928
2929 if (cc_status.flags & CC_INVERTED)
2930 {
2931 value = 2;
2932 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2933 }
2934
2935 if (cc_status.flags & CC_NOT_POSITIVE)
2936 switch (GET_CODE (cond))
2937 {
2938 case LE:
2939 case LEU:
2940 case GEU:
2941 /* Jump becomes unconditional. */
2942 return 1;
2943
2944 case GT:
2945 case GTU:
2946 case LTU:
2947 /* Jump becomes no-op. */
2948 return -1;
2949
2950 case GE:
2951 PUT_CODE (cond, EQ);
2952 value = 2;
2953 break;
2954
2955 case LT:
2956 PUT_CODE (cond, NE);
2957 value = 2;
2958 break;
2959
2960 default:
2961 break;
2962 }
2963
2964 if (cc_status.flags & CC_NOT_NEGATIVE)
2965 switch (GET_CODE (cond))
2966 {
2967 case GE:
2968 case GEU:
2969 /* Jump becomes unconditional. */
2970 return 1;
2971
2972 case LT:
2973 case LTU:
2974 /* Jump becomes no-op. */
2975 return -1;
2976
2977 case LE:
2978 case LEU:
2979 PUT_CODE (cond, EQ);
2980 value = 2;
2981 break;
2982
2983 case GT:
2984 case GTU:
2985 PUT_CODE (cond, NE);
2986 value = 2;
2987 break;
2988
2989 default:
2990 break;
2991 }
2992
2993 if (cc_status.flags & CC_NO_OVERFLOW)
2994 switch (GET_CODE (cond))
2995 {
2996 case GEU:
2997 /* Jump becomes unconditional. */
2998 return 1;
2999
3000 case LEU:
3001 PUT_CODE (cond, EQ);
3002 value = 2;
3003 break;
3004
3005 case GTU:
3006 PUT_CODE (cond, NE);
3007 value = 2;
3008 break;
3009
3010 case LTU:
3011 /* Jump becomes no-op. */
3012 return -1;
3013
3014 default:
3015 break;
3016 }
3017
3018 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
3019 switch (GET_CODE (cond))
3020 {
3021 default:
3022 gcc_unreachable ();
3023
3024 case NE:
3025 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
3026 value = 2;
3027 break;
3028
3029 case EQ:
3030 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
3031 value = 2;
3032 break;
3033 }
3034
3035 if (cc_status.flags & CC_NOT_SIGNED)
3036 /* The flags are valid if signed condition operators are converted
3037 to unsigned. */
3038 switch (GET_CODE (cond))
3039 {
3040 case LE:
3041 PUT_CODE (cond, LEU);
3042 value = 2;
3043 break;
3044
3045 case LT:
3046 PUT_CODE (cond, LTU);
3047 value = 2;
3048 break;
3049
3050 case GT:
3051 PUT_CODE (cond, GTU);
3052 value = 2;
3053 break;
3054
3055 case GE:
3056 PUT_CODE (cond, GEU);
3057 value = 2;
3058 break;
3059
3060 default:
3061 break;
3062 }
3063
3064 return value;
3065 }
3066 #endif
3067 \f
3068 /* Report inconsistency between the assembler template and the operands.
3069 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
3070
3071 void
3072 output_operand_lossage (const char *cmsgid, ...)
3073 {
3074 char *fmt_string;
3075 char *new_message;
3076 const char *pfx_str;
3077 va_list ap;
3078
3079 va_start (ap, cmsgid);
3080
3081 pfx_str = this_is_asm_operands ? _("invalid 'asm': ") : "output_operand: ";
3082 asprintf (&fmt_string, "%s%s", pfx_str, _(cmsgid));
3083 vasprintf (&new_message, fmt_string, ap);
3084
3085 if (this_is_asm_operands)
3086 error_for_asm (this_is_asm_operands, "%s", new_message);
3087 else
3088 internal_error ("%s", new_message);
3089
3090 free (fmt_string);
3091 free (new_message);
3092 va_end (ap);
3093 }
3094 \f
3095 /* Output of assembler code from a template, and its subroutines. */
3096
3097 /* Annotate the assembly with a comment describing the pattern and
3098 alternative used. */
3099
3100 static void
3101 output_asm_name (void)
3102 {
3103 if (debug_insn)
3104 {
3105 int num = INSN_CODE (debug_insn);
3106 fprintf (asm_out_file, "\t%s %d\t%s",
3107 ASM_COMMENT_START, INSN_UID (debug_insn),
3108 insn_data[num].name);
3109 if (insn_data[num].n_alternatives > 1)
3110 fprintf (asm_out_file, "/%d", which_alternative + 1);
3111 #ifdef HAVE_ATTR_length
3112 fprintf (asm_out_file, "\t[length = %d]",
3113 get_attr_length (debug_insn));
3114 #endif
3115 /* Clear this so only the first assembler insn
3116 of any rtl insn will get the special comment for -dp. */
3117 debug_insn = 0;
3118 }
3119 }
3120
3121 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
3122 or its address, return that expr . Set *PADDRESSP to 1 if the expr
3123 corresponds to the address of the object and 0 if to the object. */
3124
3125 static tree
3126 get_mem_expr_from_op (rtx op, int *paddressp)
3127 {
3128 tree expr;
3129 int inner_addressp;
3130
3131 *paddressp = 0;
3132
3133 if (REG_P (op))
3134 return REG_EXPR (op);
3135 else if (!MEM_P (op))
3136 return 0;
3137
3138 if (MEM_EXPR (op) != 0)
3139 return MEM_EXPR (op);
3140
3141 /* Otherwise we have an address, so indicate it and look at the address. */
3142 *paddressp = 1;
3143 op = XEXP (op, 0);
3144
3145 /* First check if we have a decl for the address, then look at the right side
3146 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
3147 But don't allow the address to itself be indirect. */
3148 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
3149 return expr;
3150 else if (GET_CODE (op) == PLUS
3151 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
3152 return expr;
3153
3154 while (UNARY_P (op)
3155 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
3156 op = XEXP (op, 0);
3157
3158 expr = get_mem_expr_from_op (op, &inner_addressp);
3159 return inner_addressp ? 0 : expr;
3160 }
3161
3162 /* Output operand names for assembler instructions. OPERANDS is the
3163 operand vector, OPORDER is the order to write the operands, and NOPS
3164 is the number of operands to write. */
3165
3166 static void
3167 output_asm_operand_names (rtx *operands, int *oporder, int nops)
3168 {
3169 int wrote = 0;
3170 int i;
3171
3172 for (i = 0; i < nops; i++)
3173 {
3174 int addressp;
3175 rtx op = operands[oporder[i]];
3176 tree expr = get_mem_expr_from_op (op, &addressp);
3177
3178 fprintf (asm_out_file, "%c%s",
3179 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
3180 wrote = 1;
3181 if (expr)
3182 {
3183 fprintf (asm_out_file, "%s",
3184 addressp ? "*" : "");
3185 print_mem_expr (asm_out_file, expr);
3186 wrote = 1;
3187 }
3188 else if (REG_P (op) && ORIGINAL_REGNO (op)
3189 && ORIGINAL_REGNO (op) != REGNO (op))
3190 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
3191 }
3192 }
3193
3194 /* Output text from TEMPLATE to the assembler output file,
3195 obeying %-directions to substitute operands taken from
3196 the vector OPERANDS.
3197
3198 %N (for N a digit) means print operand N in usual manner.
3199 %lN means require operand N to be a CODE_LABEL or LABEL_REF
3200 and print the label name with no punctuation.
3201 %cN means require operand N to be a constant
3202 and print the constant expression with no punctuation.
3203 %aN means expect operand N to be a memory address
3204 (not a memory reference!) and print a reference
3205 to that address.
3206 %nN means expect operand N to be a constant
3207 and print a constant expression for minus the value
3208 of the operand, with no other punctuation. */
3209
3210 void
3211 output_asm_insn (const char *templ, rtx *operands)
3212 {
3213 const char *p;
3214 int c;
3215 #ifdef ASSEMBLER_DIALECT
3216 int dialect = 0;
3217 #endif
3218 int oporder[MAX_RECOG_OPERANDS];
3219 char opoutput[MAX_RECOG_OPERANDS];
3220 int ops = 0;
3221
3222 /* An insn may return a null string template
3223 in a case where no assembler code is needed. */
3224 if (*templ == 0)
3225 return;
3226
3227 memset (opoutput, 0, sizeof opoutput);
3228 p = templ;
3229 putc ('\t', asm_out_file);
3230
3231 #ifdef ASM_OUTPUT_OPCODE
3232 ASM_OUTPUT_OPCODE (asm_out_file, p);
3233 #endif
3234
3235 while ((c = *p++))
3236 switch (c)
3237 {
3238 case '\n':
3239 if (flag_verbose_asm)
3240 output_asm_operand_names (operands, oporder, ops);
3241 if (flag_print_asm_name)
3242 output_asm_name ();
3243
3244 ops = 0;
3245 memset (opoutput, 0, sizeof opoutput);
3246
3247 putc (c, asm_out_file);
3248 #ifdef ASM_OUTPUT_OPCODE
3249 while ((c = *p) == '\t')
3250 {
3251 putc (c, asm_out_file);
3252 p++;
3253 }
3254 ASM_OUTPUT_OPCODE (asm_out_file, p);
3255 #endif
3256 break;
3257
3258 #ifdef ASSEMBLER_DIALECT
3259 case '{':
3260 {
3261 int i;
3262
3263 if (dialect)
3264 output_operand_lossage ("nested assembly dialect alternatives");
3265 else
3266 dialect = 1;
3267
3268 /* If we want the first dialect, do nothing. Otherwise, skip
3269 DIALECT_NUMBER of strings ending with '|'. */
3270 for (i = 0; i < dialect_number; i++)
3271 {
3272 while (*p && *p != '}' && *p++ != '|')
3273 ;
3274 if (*p == '}')
3275 break;
3276 if (*p == '|')
3277 p++;
3278 }
3279
3280 if (*p == '\0')
3281 output_operand_lossage ("unterminated assembly dialect alternative");
3282 }
3283 break;
3284
3285 case '|':
3286 if (dialect)
3287 {
3288 /* Skip to close brace. */
3289 do
3290 {
3291 if (*p == '\0')
3292 {
3293 output_operand_lossage ("unterminated assembly dialect alternative");
3294 break;
3295 }
3296 }
3297 while (*p++ != '}');
3298 dialect = 0;
3299 }
3300 else
3301 putc (c, asm_out_file);
3302 break;
3303
3304 case '}':
3305 if (! dialect)
3306 putc (c, asm_out_file);
3307 dialect = 0;
3308 break;
3309 #endif
3310
3311 case '%':
3312 /* %% outputs a single %. */
3313 if (*p == '%')
3314 {
3315 p++;
3316 putc (c, asm_out_file);
3317 }
3318 /* %= outputs a number which is unique to each insn in the entire
3319 compilation. This is useful for making local labels that are
3320 referred to more than once in a given insn. */
3321 else if (*p == '=')
3322 {
3323 p++;
3324 fprintf (asm_out_file, "%d", insn_counter);
3325 }
3326 /* % followed by a letter and some digits
3327 outputs an operand in a special way depending on the letter.
3328 Letters `acln' are implemented directly.
3329 Other letters are passed to `output_operand' so that
3330 the PRINT_OPERAND macro can define them. */
3331 else if (ISALPHA (*p))
3332 {
3333 int letter = *p++;
3334 unsigned long opnum;
3335 char *endptr;
3336
3337 opnum = strtoul (p, &endptr, 10);
3338
3339 if (endptr == p)
3340 output_operand_lossage ("operand number missing "
3341 "after %%-letter");
3342 else if (this_is_asm_operands && opnum >= insn_noperands)
3343 output_operand_lossage ("operand number out of range");
3344 else if (letter == 'l')
3345 output_asm_label (operands[opnum]);
3346 else if (letter == 'a')
3347 output_address (operands[opnum]);
3348 else if (letter == 'c')
3349 {
3350 if (CONSTANT_ADDRESS_P (operands[opnum]))
3351 output_addr_const (asm_out_file, operands[opnum]);
3352 else
3353 output_operand (operands[opnum], 'c');
3354 }
3355 else if (letter == 'n')
3356 {
3357 if (CONST_INT_P (operands[opnum]))
3358 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3359 - INTVAL (operands[opnum]));
3360 else
3361 {
3362 putc ('-', asm_out_file);
3363 output_addr_const (asm_out_file, operands[opnum]);
3364 }
3365 }
3366 else
3367 output_operand (operands[opnum], letter);
3368
3369 if (!opoutput[opnum])
3370 oporder[ops++] = opnum;
3371 opoutput[opnum] = 1;
3372
3373 p = endptr;
3374 c = *p;
3375 }
3376 /* % followed by a digit outputs an operand the default way. */
3377 else if (ISDIGIT (*p))
3378 {
3379 unsigned long opnum;
3380 char *endptr;
3381
3382 opnum = strtoul (p, &endptr, 10);
3383 if (this_is_asm_operands && opnum >= insn_noperands)
3384 output_operand_lossage ("operand number out of range");
3385 else
3386 output_operand (operands[opnum], 0);
3387
3388 if (!opoutput[opnum])
3389 oporder[ops++] = opnum;
3390 opoutput[opnum] = 1;
3391
3392 p = endptr;
3393 c = *p;
3394 }
3395 /* % followed by punctuation: output something for that
3396 punctuation character alone, with no operand.
3397 The PRINT_OPERAND macro decides what is actually done. */
3398 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3399 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3400 output_operand (NULL_RTX, *p++);
3401 #endif
3402 else
3403 output_operand_lossage ("invalid %%-code");
3404 break;
3405
3406 default:
3407 putc (c, asm_out_file);
3408 }
3409
3410 /* Write out the variable names for operands, if we know them. */
3411 if (flag_verbose_asm)
3412 output_asm_operand_names (operands, oporder, ops);
3413 if (flag_print_asm_name)
3414 output_asm_name ();
3415
3416 putc ('\n', asm_out_file);
3417 }
3418 \f
3419 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3420
3421 void
3422 output_asm_label (rtx x)
3423 {
3424 char buf[256];
3425
3426 if (GET_CODE (x) == LABEL_REF)
3427 x = XEXP (x, 0);
3428 if (LABEL_P (x)
3429 || (NOTE_P (x)
3430 && NOTE_KIND (x) == NOTE_INSN_DELETED_LABEL))
3431 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3432 else
3433 output_operand_lossage ("'%%l' operand isn't a label");
3434
3435 assemble_name (asm_out_file, buf);
3436 }
3437
3438 /* Helper rtx-iteration-function for mark_symbol_refs_as_used and
3439 output_operand. Marks SYMBOL_REFs as referenced through use of
3440 assemble_external. */
3441
3442 static int
3443 mark_symbol_ref_as_used (rtx *xp, void *dummy ATTRIBUTE_UNUSED)
3444 {
3445 rtx x = *xp;
3446
3447 /* If we have a used symbol, we may have to emit assembly
3448 annotations corresponding to whether the symbol is external, weak
3449 or has non-default visibility. */
3450 if (GET_CODE (x) == SYMBOL_REF)
3451 {
3452 tree t;
3453
3454 t = SYMBOL_REF_DECL (x);
3455 if (t)
3456 assemble_external (t);
3457
3458 return -1;
3459 }
3460
3461 return 0;
3462 }
3463
3464 /* Marks SYMBOL_REFs in x as referenced through use of assemble_external. */
3465
3466 void
3467 mark_symbol_refs_as_used (rtx x)
3468 {
3469 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3470 }
3471
3472 /* Print operand X using machine-dependent assembler syntax.
3473 The macro PRINT_OPERAND is defined just to control this function.
3474 CODE is a non-digit that preceded the operand-number in the % spec,
3475 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3476 between the % and the digits.
3477 When CODE is a non-letter, X is 0.
3478
3479 The meanings of the letters are machine-dependent and controlled
3480 by PRINT_OPERAND. */
3481
3482 static void
3483 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3484 {
3485 if (x && GET_CODE (x) == SUBREG)
3486 x = alter_subreg (&x);
3487
3488 /* X must not be a pseudo reg. */
3489 gcc_assert (!x || !REG_P (x) || REGNO (x) < FIRST_PSEUDO_REGISTER);
3490
3491 PRINT_OPERAND (asm_out_file, x, code);
3492
3493 if (x == NULL_RTX)
3494 return;
3495
3496 for_each_rtx (&x, mark_symbol_ref_as_used, NULL);
3497 }
3498
3499 /* Print a memory reference operand for address X
3500 using machine-dependent assembler syntax.
3501 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3502
3503 void
3504 output_address (rtx x)
3505 {
3506 bool changed = false;
3507 walk_alter_subreg (&x, &changed);
3508 PRINT_OPERAND_ADDRESS (asm_out_file, x);
3509 }
3510 \f
3511 /* Print an integer constant expression in assembler syntax.
3512 Addition and subtraction are the only arithmetic
3513 that may appear in these expressions. */
3514
3515 void
3516 output_addr_const (FILE *file, rtx x)
3517 {
3518 char buf[256];
3519
3520 restart:
3521 switch (GET_CODE (x))
3522 {
3523 case PC:
3524 putc ('.', file);
3525 break;
3526
3527 case SYMBOL_REF:
3528 if (SYMBOL_REF_DECL (x))
3529 {
3530 mark_decl_referenced (SYMBOL_REF_DECL (x));
3531 assemble_external (SYMBOL_REF_DECL (x));
3532 }
3533 #ifdef ASM_OUTPUT_SYMBOL_REF
3534 ASM_OUTPUT_SYMBOL_REF (file, x);
3535 #else
3536 assemble_name (file, XSTR (x, 0));
3537 #endif
3538 break;
3539
3540 case LABEL_REF:
3541 x = XEXP (x, 0);
3542 /* Fall through. */
3543 case CODE_LABEL:
3544 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3545 #ifdef ASM_OUTPUT_LABEL_REF
3546 ASM_OUTPUT_LABEL_REF (file, buf);
3547 #else
3548 assemble_name (file, buf);
3549 #endif
3550 break;
3551
3552 case CONST_INT:
3553 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3554 break;
3555
3556 case CONST:
3557 /* This used to output parentheses around the expression,
3558 but that does not work on the 386 (either ATT or BSD assembler). */
3559 output_addr_const (file, XEXP (x, 0));
3560 break;
3561
3562 case CONST_DOUBLE:
3563 if (GET_MODE (x) == VOIDmode)
3564 {
3565 /* We can use %d if the number is one word and positive. */
3566 if (CONST_DOUBLE_HIGH (x))
3567 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3568 (unsigned HOST_WIDE_INT) CONST_DOUBLE_HIGH (x),
3569 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3570 else if (CONST_DOUBLE_LOW (x) < 0)
3571 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3572 (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x));
3573 else
3574 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3575 }
3576 else
3577 /* We can't handle floating point constants;
3578 PRINT_OPERAND must handle them. */
3579 output_operand_lossage ("floating constant misused");
3580 break;
3581
3582 case CONST_FIXED:
3583 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
3584 (unsigned HOST_WIDE_INT) CONST_FIXED_VALUE_LOW (x));
3585 break;
3586
3587 case PLUS:
3588 /* Some assemblers need integer constants to appear last (eg masm). */
3589 if (CONST_INT_P (XEXP (x, 0)))
3590 {
3591 output_addr_const (file, XEXP (x, 1));
3592 if (INTVAL (XEXP (x, 0)) >= 0)
3593 fprintf (file, "+");
3594 output_addr_const (file, XEXP (x, 0));
3595 }
3596 else
3597 {
3598 output_addr_const (file, XEXP (x, 0));
3599 if (!CONST_INT_P (XEXP (x, 1))
3600 || INTVAL (XEXP (x, 1)) >= 0)
3601 fprintf (file, "+");
3602 output_addr_const (file, XEXP (x, 1));
3603 }
3604 break;
3605
3606 case MINUS:
3607 /* Avoid outputting things like x-x or x+5-x,
3608 since some assemblers can't handle that. */
3609 x = simplify_subtraction (x);
3610 if (GET_CODE (x) != MINUS)
3611 goto restart;
3612
3613 output_addr_const (file, XEXP (x, 0));
3614 fprintf (file, "-");
3615 if ((CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0)
3616 || GET_CODE (XEXP (x, 1)) == PC
3617 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3618 output_addr_const (file, XEXP (x, 1));
3619 else
3620 {
3621 fputs (targetm.asm_out.open_paren, file);
3622 output_addr_const (file, XEXP (x, 1));
3623 fputs (targetm.asm_out.close_paren, file);
3624 }
3625 break;
3626
3627 case ZERO_EXTEND:
3628 case SIGN_EXTEND:
3629 case SUBREG:
3630 case TRUNCATE:
3631 output_addr_const (file, XEXP (x, 0));
3632 break;
3633
3634 default:
3635 #ifdef OUTPUT_ADDR_CONST_EXTRA
3636 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3637 break;
3638
3639 fail:
3640 #endif
3641 output_operand_lossage ("invalid expression as operand");
3642 }
3643 }
3644 \f
3645 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3646 %R prints the value of REGISTER_PREFIX.
3647 %L prints the value of LOCAL_LABEL_PREFIX.
3648 %U prints the value of USER_LABEL_PREFIX.
3649 %I prints the value of IMMEDIATE_PREFIX.
3650 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3651 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3652
3653 We handle alternate assembler dialects here, just like output_asm_insn. */
3654
3655 void
3656 asm_fprintf (FILE *file, const char *p, ...)
3657 {
3658 char buf[10];
3659 char *q, c;
3660 va_list argptr;
3661
3662 va_start (argptr, p);
3663
3664 buf[0] = '%';
3665
3666 while ((c = *p++))
3667 switch (c)
3668 {
3669 #ifdef ASSEMBLER_DIALECT
3670 case '{':
3671 {
3672 int i;
3673
3674 /* If we want the first dialect, do nothing. Otherwise, skip
3675 DIALECT_NUMBER of strings ending with '|'. */
3676 for (i = 0; i < dialect_number; i++)
3677 {
3678 while (*p && *p++ != '|')
3679 ;
3680
3681 if (*p == '|')
3682 p++;
3683 }
3684 }
3685 break;
3686
3687 case '|':
3688 /* Skip to close brace. */
3689 while (*p && *p++ != '}')
3690 ;
3691 break;
3692
3693 case '}':
3694 break;
3695 #endif
3696
3697 case '%':
3698 c = *p++;
3699 q = &buf[1];
3700 while (strchr ("-+ #0", c))
3701 {
3702 *q++ = c;
3703 c = *p++;
3704 }
3705 while (ISDIGIT (c) || c == '.')
3706 {
3707 *q++ = c;
3708 c = *p++;
3709 }
3710 switch (c)
3711 {
3712 case '%':
3713 putc ('%', file);
3714 break;
3715
3716 case 'd': case 'i': case 'u':
3717 case 'x': case 'X': case 'o':
3718 case 'c':
3719 *q++ = c;
3720 *q = 0;
3721 fprintf (file, buf, va_arg (argptr, int));
3722 break;
3723
3724 case 'w':
3725 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3726 'o' cases, but we do not check for those cases. It
3727 means that the value is a HOST_WIDE_INT, which may be
3728 either `long' or `long long'. */
3729 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3730 q += strlen (HOST_WIDE_INT_PRINT);
3731 *q++ = *p++;
3732 *q = 0;
3733 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3734 break;
3735
3736 case 'l':
3737 *q++ = c;
3738 #ifdef HAVE_LONG_LONG
3739 if (*p == 'l')
3740 {
3741 *q++ = *p++;
3742 *q++ = *p++;
3743 *q = 0;
3744 fprintf (file, buf, va_arg (argptr, long long));
3745 }
3746 else
3747 #endif
3748 {
3749 *q++ = *p++;
3750 *q = 0;
3751 fprintf (file, buf, va_arg (argptr, long));
3752 }
3753
3754 break;
3755
3756 case 's':
3757 *q++ = c;
3758 *q = 0;
3759 fprintf (file, buf, va_arg (argptr, char *));
3760 break;
3761
3762 case 'O':
3763 #ifdef ASM_OUTPUT_OPCODE
3764 ASM_OUTPUT_OPCODE (asm_out_file, p);
3765 #endif
3766 break;
3767
3768 case 'R':
3769 #ifdef REGISTER_PREFIX
3770 fprintf (file, "%s", REGISTER_PREFIX);
3771 #endif
3772 break;
3773
3774 case 'I':
3775 #ifdef IMMEDIATE_PREFIX
3776 fprintf (file, "%s", IMMEDIATE_PREFIX);
3777 #endif
3778 break;
3779
3780 case 'L':
3781 #ifdef LOCAL_LABEL_PREFIX
3782 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3783 #endif
3784 break;
3785
3786 case 'U':
3787 fputs (user_label_prefix, file);
3788 break;
3789
3790 #ifdef ASM_FPRINTF_EXTENSIONS
3791 /* Uppercase letters are reserved for general use by asm_fprintf
3792 and so are not available to target specific code. In order to
3793 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3794 they are defined here. As they get turned into real extensions
3795 to asm_fprintf they should be removed from this list. */
3796 case 'A': case 'B': case 'C': case 'D': case 'E':
3797 case 'F': case 'G': case 'H': case 'J': case 'K':
3798 case 'M': case 'N': case 'P': case 'Q': case 'S':
3799 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3800 break;
3801
3802 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3803 #endif
3804 default:
3805 gcc_unreachable ();
3806 }
3807 break;
3808
3809 default:
3810 putc (c, file);
3811 }
3812 va_end (argptr);
3813 }
3814 \f
3815 /* Split up a CONST_DOUBLE or integer constant rtx
3816 into two rtx's for single words,
3817 storing in *FIRST the word that comes first in memory in the target
3818 and in *SECOND the other. */
3819
3820 void
3821 split_double (rtx value, rtx *first, rtx *second)
3822 {
3823 if (CONST_INT_P (value))
3824 {
3825 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3826 {
3827 /* In this case the CONST_INT holds both target words.
3828 Extract the bits from it into two word-sized pieces.
3829 Sign extend each half to HOST_WIDE_INT. */
3830 unsigned HOST_WIDE_INT low, high;
3831 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3832
3833 /* Set sign_bit to the most significant bit of a word. */
3834 sign_bit = 1;
3835 sign_bit <<= BITS_PER_WORD - 1;
3836
3837 /* Set mask so that all bits of the word are set. We could
3838 have used 1 << BITS_PER_WORD instead of basing the
3839 calculation on sign_bit. However, on machines where
3840 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3841 compiler warning, even though the code would never be
3842 executed. */
3843 mask = sign_bit << 1;
3844 mask--;
3845
3846 /* Set sign_extend as any remaining bits. */
3847 sign_extend = ~mask;
3848
3849 /* Pick the lower word and sign-extend it. */
3850 low = INTVAL (value);
3851 low &= mask;
3852 if (low & sign_bit)
3853 low |= sign_extend;
3854
3855 /* Pick the higher word, shifted to the least significant
3856 bits, and sign-extend it. */
3857 high = INTVAL (value);
3858 high >>= BITS_PER_WORD - 1;
3859 high >>= 1;
3860 high &= mask;
3861 if (high & sign_bit)
3862 high |= sign_extend;
3863
3864 /* Store the words in the target machine order. */
3865 if (WORDS_BIG_ENDIAN)
3866 {
3867 *first = GEN_INT (high);
3868 *second = GEN_INT (low);
3869 }
3870 else
3871 {
3872 *first = GEN_INT (low);
3873 *second = GEN_INT (high);
3874 }
3875 }
3876 else
3877 {
3878 /* The rule for using CONST_INT for a wider mode
3879 is that we regard the value as signed.
3880 So sign-extend it. */
3881 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3882 if (WORDS_BIG_ENDIAN)
3883 {
3884 *first = high;
3885 *second = value;
3886 }
3887 else
3888 {
3889 *first = value;
3890 *second = high;
3891 }
3892 }
3893 }
3894 else if (GET_CODE (value) != CONST_DOUBLE)
3895 {
3896 if (WORDS_BIG_ENDIAN)
3897 {
3898 *first = const0_rtx;
3899 *second = value;
3900 }
3901 else
3902 {
3903 *first = value;
3904 *second = const0_rtx;
3905 }
3906 }
3907 else if (GET_MODE (value) == VOIDmode
3908 /* This is the old way we did CONST_DOUBLE integers. */
3909 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3910 {
3911 /* In an integer, the words are defined as most and least significant.
3912 So order them by the target's convention. */
3913 if (WORDS_BIG_ENDIAN)
3914 {
3915 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3916 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3917 }
3918 else
3919 {
3920 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3921 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3922 }
3923 }
3924 else
3925 {
3926 REAL_VALUE_TYPE r;
3927 long l[2];
3928 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3929
3930 /* Note, this converts the REAL_VALUE_TYPE to the target's
3931 format, splits up the floating point double and outputs
3932 exactly 32 bits of it into each of l[0] and l[1] --
3933 not necessarily BITS_PER_WORD bits. */
3934 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3935
3936 /* If 32 bits is an entire word for the target, but not for the host,
3937 then sign-extend on the host so that the number will look the same
3938 way on the host that it would on the target. See for instance
3939 simplify_unary_operation. The #if is needed to avoid compiler
3940 warnings. */
3941
3942 #if HOST_BITS_PER_LONG > 32
3943 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3944 {
3945 if (l[0] & ((long) 1 << 31))
3946 l[0] |= ((long) (-1) << 32);
3947 if (l[1] & ((long) 1 << 31))
3948 l[1] |= ((long) (-1) << 32);
3949 }
3950 #endif
3951
3952 *first = GEN_INT (l[0]);
3953 *second = GEN_INT (l[1]);
3954 }
3955 }
3956 \f
3957 /* Return nonzero if this function has no function calls. */
3958
3959 int
3960 leaf_function_p (void)
3961 {
3962 rtx insn;
3963 rtx link;
3964
3965 if (crtl->profile || profile_arc_flag)
3966 return 0;
3967
3968 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3969 {
3970 if (CALL_P (insn)
3971 && ! SIBLING_CALL_P (insn))
3972 return 0;
3973 if (NONJUMP_INSN_P (insn)
3974 && GET_CODE (PATTERN (insn)) == SEQUENCE
3975 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3976 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3977 return 0;
3978 }
3979 for (link = crtl->epilogue_delay_list;
3980 link;
3981 link = XEXP (link, 1))
3982 {
3983 insn = XEXP (link, 0);
3984
3985 if (CALL_P (insn)
3986 && ! SIBLING_CALL_P (insn))
3987 return 0;
3988 if (NONJUMP_INSN_P (insn)
3989 && GET_CODE (PATTERN (insn)) == SEQUENCE
3990 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3991 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3992 return 0;
3993 }
3994
3995 return 1;
3996 }
3997
3998 /* Return 1 if branch is a forward branch.
3999 Uses insn_shuid array, so it works only in the final pass. May be used by
4000 output templates to customary add branch prediction hints.
4001 */
4002 int
4003 final_forward_branch_p (rtx insn)
4004 {
4005 int insn_id, label_id;
4006
4007 gcc_assert (uid_shuid);
4008 insn_id = INSN_SHUID (insn);
4009 label_id = INSN_SHUID (JUMP_LABEL (insn));
4010 /* We've hit some insns that does not have id information available. */
4011 gcc_assert (insn_id && label_id);
4012 return insn_id < label_id;
4013 }
4014
4015 /* On some machines, a function with no call insns
4016 can run faster if it doesn't create its own register window.
4017 When output, the leaf function should use only the "output"
4018 registers. Ordinarily, the function would be compiled to use
4019 the "input" registers to find its arguments; it is a candidate
4020 for leaf treatment if it uses only the "input" registers.
4021 Leaf function treatment means renumbering so the function
4022 uses the "output" registers instead. */
4023
4024 #ifdef LEAF_REGISTERS
4025
4026 /* Return 1 if this function uses only the registers that can be
4027 safely renumbered. */
4028
4029 int
4030 only_leaf_regs_used (void)
4031 {
4032 int i;
4033 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
4034
4035 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4036 if ((df_regs_ever_live_p (i) || global_regs[i])
4037 && ! permitted_reg_in_leaf_functions[i])
4038 return 0;
4039
4040 if (crtl->uses_pic_offset_table
4041 && pic_offset_table_rtx != 0
4042 && REG_P (pic_offset_table_rtx)
4043 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
4044 return 0;
4045
4046 return 1;
4047 }
4048
4049 /* Scan all instructions and renumber all registers into those
4050 available in leaf functions. */
4051
4052 static void
4053 leaf_renumber_regs (rtx first)
4054 {
4055 rtx insn;
4056
4057 /* Renumber only the actual patterns.
4058 The reg-notes can contain frame pointer refs,
4059 and renumbering them could crash, and should not be needed. */
4060 for (insn = first; insn; insn = NEXT_INSN (insn))
4061 if (INSN_P (insn))
4062 leaf_renumber_regs_insn (PATTERN (insn));
4063 for (insn = crtl->epilogue_delay_list;
4064 insn;
4065 insn = XEXP (insn, 1))
4066 if (INSN_P (XEXP (insn, 0)))
4067 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
4068 }
4069
4070 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
4071 available in leaf functions. */
4072
4073 void
4074 leaf_renumber_regs_insn (rtx in_rtx)
4075 {
4076 int i, j;
4077 const char *format_ptr;
4078
4079 if (in_rtx == 0)
4080 return;
4081
4082 /* Renumber all input-registers into output-registers.
4083 renumbered_regs would be 1 for an output-register;
4084 they */
4085
4086 if (REG_P (in_rtx))
4087 {
4088 int newreg;
4089
4090 /* Don't renumber the same reg twice. */
4091 if (in_rtx->used)
4092 return;
4093
4094 newreg = REGNO (in_rtx);
4095 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
4096 to reach here as part of a REG_NOTE. */
4097 if (newreg >= FIRST_PSEUDO_REGISTER)
4098 {
4099 in_rtx->used = 1;
4100 return;
4101 }
4102 newreg = LEAF_REG_REMAP (newreg);
4103 gcc_assert (newreg >= 0);
4104 df_set_regs_ever_live (REGNO (in_rtx), false);
4105 df_set_regs_ever_live (newreg, true);
4106 SET_REGNO (in_rtx, newreg);
4107 in_rtx->used = 1;
4108 }
4109
4110 if (INSN_P (in_rtx))
4111 {
4112 /* Inside a SEQUENCE, we find insns.
4113 Renumber just the patterns of these insns,
4114 just as we do for the top-level insns. */
4115 leaf_renumber_regs_insn (PATTERN (in_rtx));
4116 return;
4117 }
4118
4119 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
4120
4121 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
4122 switch (*format_ptr++)
4123 {
4124 case 'e':
4125 leaf_renumber_regs_insn (XEXP (in_rtx, i));
4126 break;
4127
4128 case 'E':
4129 if (NULL != XVEC (in_rtx, i))
4130 {
4131 for (j = 0; j < XVECLEN (in_rtx, i); j++)
4132 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
4133 }
4134 break;
4135
4136 case 'S':
4137 case 's':
4138 case '0':
4139 case 'i':
4140 case 'w':
4141 case 'n':
4142 case 'u':
4143 break;
4144
4145 default:
4146 gcc_unreachable ();
4147 }
4148 }
4149 #endif
4150
4151
4152 /* When -gused is used, emit debug info for only used symbols. But in
4153 addition to the standard intercepted debug_hooks there are some direct
4154 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
4155 Those routines may also be called from a higher level intercepted routine. So
4156 to prevent recording data for an inner call to one of these for an intercept,
4157 we maintain an intercept nesting counter (debug_nesting). We only save the
4158 intercepted arguments if the nesting is 1. */
4159 int debug_nesting = 0;
4160
4161 static tree *symbol_queue;
4162 int symbol_queue_index = 0;
4163 static int symbol_queue_size = 0;
4164
4165 /* Generate the symbols for any queued up type symbols we encountered
4166 while generating the type info for some originally used symbol.
4167 This might generate additional entries in the queue. Only when
4168 the nesting depth goes to 0 is this routine called. */
4169
4170 void
4171 debug_flush_symbol_queue (void)
4172 {
4173 int i;
4174
4175 /* Make sure that additionally queued items are not flushed
4176 prematurely. */
4177
4178 ++debug_nesting;
4179
4180 for (i = 0; i < symbol_queue_index; ++i)
4181 {
4182 /* If we pushed queued symbols then such symbols must be
4183 output no matter what anyone else says. Specifically,
4184 we need to make sure dbxout_symbol() thinks the symbol was
4185 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
4186 which may be set for outside reasons. */
4187 int saved_tree_used = TREE_USED (symbol_queue[i]);
4188 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
4189 TREE_USED (symbol_queue[i]) = 1;
4190 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
4191
4192 #ifdef DBX_DEBUGGING_INFO
4193 dbxout_symbol (symbol_queue[i], 0);
4194 #endif
4195
4196 TREE_USED (symbol_queue[i]) = saved_tree_used;
4197 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
4198 }
4199
4200 symbol_queue_index = 0;
4201 --debug_nesting;
4202 }
4203
4204 /* Queue a type symbol needed as part of the definition of a decl
4205 symbol. These symbols are generated when debug_flush_symbol_queue()
4206 is called. */
4207
4208 void
4209 debug_queue_symbol (tree decl)
4210 {
4211 if (symbol_queue_index >= symbol_queue_size)
4212 {
4213 symbol_queue_size += 10;
4214 symbol_queue = XRESIZEVEC (tree, symbol_queue, symbol_queue_size);
4215 }
4216
4217 symbol_queue[symbol_queue_index++] = decl;
4218 }
4219
4220 /* Free symbol queue. */
4221 void
4222 debug_free_queue (void)
4223 {
4224 if (symbol_queue)
4225 {
4226 free (symbol_queue);
4227 symbol_queue = NULL;
4228 symbol_queue_size = 0;
4229 }
4230 }
4231 \f
4232 /* Turn the RTL into assembly. */
4233 static unsigned int
4234 rest_of_handle_final (void)
4235 {
4236 rtx x;
4237 const char *fnname;
4238
4239 /* Get the function's name, as described by its RTL. This may be
4240 different from the DECL_NAME name used in the source file. */
4241
4242 x = DECL_RTL (current_function_decl);
4243 gcc_assert (MEM_P (x));
4244 x = XEXP (x, 0);
4245 gcc_assert (GET_CODE (x) == SYMBOL_REF);
4246 fnname = XSTR (x, 0);
4247
4248 assemble_start_function (current_function_decl, fnname);
4249 final_start_function (get_insns (), asm_out_file, optimize);
4250 final (get_insns (), asm_out_file, optimize);
4251 final_end_function ();
4252
4253 #ifdef TARGET_UNWIND_INFO
4254 /* ??? The IA-64 ".handlerdata" directive must be issued before
4255 the ".endp" directive that closes the procedure descriptor. */
4256 output_function_exception_table (fnname);
4257 #endif
4258
4259 assemble_end_function (current_function_decl, fnname);
4260
4261 #ifndef TARGET_UNWIND_INFO
4262 /* Otherwise, it feels unclean to switch sections in the middle. */
4263 output_function_exception_table (fnname);
4264 #endif
4265
4266 user_defined_section_attribute = false;
4267
4268 /* Free up reg info memory. */
4269 free_reg_info ();
4270
4271 if (! quiet_flag)
4272 fflush (asm_out_file);
4273
4274 /* Write DBX symbols if requested. */
4275
4276 /* Note that for those inline functions where we don't initially
4277 know for certain that we will be generating an out-of-line copy,
4278 the first invocation of this routine (rest_of_compilation) will
4279 skip over this code by doing a `goto exit_rest_of_compilation;'.
4280 Later on, wrapup_global_declarations will (indirectly) call
4281 rest_of_compilation again for those inline functions that need
4282 to have out-of-line copies generated. During that call, we
4283 *will* be routed past here. */
4284
4285 timevar_push (TV_SYMOUT);
4286 if (!DECL_IGNORED_P (current_function_decl))
4287 debug_hooks->function_decl (current_function_decl);
4288 timevar_pop (TV_SYMOUT);
4289
4290 /* Release the blocks that are linked to DECL_INITIAL() to free the memory. */
4291 DECL_INITIAL (current_function_decl) = error_mark_node;
4292
4293 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
4294 && targetm.have_ctors_dtors)
4295 targetm.asm_out.constructor (XEXP (DECL_RTL (current_function_decl), 0),
4296 decl_init_priority_lookup
4297 (current_function_decl));
4298 if (DECL_STATIC_DESTRUCTOR (current_function_decl)
4299 && targetm.have_ctors_dtors)
4300 targetm.asm_out.destructor (XEXP (DECL_RTL (current_function_decl), 0),
4301 decl_fini_priority_lookup
4302 (current_function_decl));
4303 return 0;
4304 }
4305
4306 struct rtl_opt_pass pass_final =
4307 {
4308 {
4309 RTL_PASS,
4310 "final", /* name */
4311 NULL, /* gate */
4312 rest_of_handle_final, /* execute */
4313 NULL, /* sub */
4314 NULL, /* next */
4315 0, /* static_pass_number */
4316 TV_FINAL, /* tv_id */
4317 0, /* properties_required */
4318 0, /* properties_provided */
4319 0, /* properties_destroyed */
4320 0, /* todo_flags_start */
4321 TODO_ggc_collect /* todo_flags_finish */
4322 }
4323 };
4324
4325
4326 static unsigned int
4327 rest_of_handle_shorten_branches (void)
4328 {
4329 /* Shorten branches. */
4330 shorten_branches (get_insns ());
4331 return 0;
4332 }
4333
4334 struct rtl_opt_pass pass_shorten_branches =
4335 {
4336 {
4337 RTL_PASS,
4338 "shorten", /* name */
4339 NULL, /* gate */
4340 rest_of_handle_shorten_branches, /* execute */
4341 NULL, /* sub */
4342 NULL, /* next */
4343 0, /* static_pass_number */
4344 TV_FINAL, /* tv_id */
4345 0, /* properties_required */
4346 0, /* properties_provided */
4347 0, /* properties_destroyed */
4348 0, /* todo_flags_start */
4349 TODO_dump_func /* todo_flags_finish */
4350 }
4351 };
4352
4353
4354 static unsigned int
4355 rest_of_clean_state (void)
4356 {
4357 rtx insn, next;
4358 FILE *final_output = NULL;
4359 int save_unnumbered = flag_dump_unnumbered;
4360 int save_noaddr = flag_dump_noaddr;
4361
4362 if (flag_dump_final_insns)
4363 {
4364 final_output = fopen (flag_dump_final_insns, "a");
4365 if (!final_output)
4366 {
4367 error ("could not open final insn dump file %qs: %s",
4368 flag_dump_final_insns, strerror (errno));
4369 flag_dump_final_insns = NULL;
4370 }
4371 else
4372 {
4373 const char *aname;
4374
4375 aname = (IDENTIFIER_POINTER
4376 (DECL_ASSEMBLER_NAME (current_function_decl)));
4377 fprintf (final_output, "\n;; Function (%s) %s\n\n", aname,
4378 cfun->function_frequency == FUNCTION_FREQUENCY_HOT
4379 ? " (hot)"
4380 : cfun->function_frequency == FUNCTION_FREQUENCY_UNLIKELY_EXECUTED
4381 ? " (unlikely executed)"
4382 : "");
4383
4384 flag_dump_noaddr = flag_dump_unnumbered = 1;
4385
4386 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4387 if (LABEL_P (insn))
4388 INSN_UID (insn) = CODE_LABEL_NUMBER (insn);
4389 else
4390 INSN_UID (insn) = 0;
4391 }
4392 }
4393
4394 /* It is very important to decompose the RTL instruction chain here:
4395 debug information keeps pointing into CODE_LABEL insns inside the function
4396 body. If these remain pointing to the other insns, we end up preserving
4397 whole RTL chain and attached detailed debug info in memory. */
4398 for (insn = get_insns (); insn; insn = next)
4399 {
4400 next = NEXT_INSN (insn);
4401 NEXT_INSN (insn) = NULL;
4402 PREV_INSN (insn) = NULL;
4403
4404 if (final_output
4405 && (!NOTE_P (insn) ||
4406 (NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION
4407 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_BEG
4408 && NOTE_KIND (insn) != NOTE_INSN_BLOCK_END
4409 && NOTE_KIND (insn) != NOTE_INSN_CFA_RESTORE_STATE)))
4410 print_rtl_single (final_output, insn);
4411
4412 }
4413
4414 if (final_output)
4415 {
4416 flag_dump_noaddr = save_noaddr;
4417 flag_dump_unnumbered = save_unnumbered;
4418
4419 if (fclose (final_output))
4420 {
4421 error ("could not close final insn dump file %qs: %s",
4422 flag_dump_final_insns, strerror (errno));
4423 flag_dump_final_insns = NULL;
4424 }
4425 }
4426
4427 /* In case the function was not output,
4428 don't leave any temporary anonymous types
4429 queued up for sdb output. */
4430 #ifdef SDB_DEBUGGING_INFO
4431 if (write_symbols == SDB_DEBUG)
4432 sdbout_types (NULL_TREE);
4433 #endif
4434
4435 flag_rerun_cse_after_global_opts = 0;
4436 reload_completed = 0;
4437 epilogue_completed = 0;
4438 #ifdef STACK_REGS
4439 regstack_completed = 0;
4440 #endif
4441
4442 /* Clear out the insn_length contents now that they are no
4443 longer valid. */
4444 init_insn_lengths ();
4445
4446 /* Show no temporary slots allocated. */
4447 init_temp_slots ();
4448
4449 free_bb_for_insn ();
4450
4451 delete_tree_ssa ();
4452
4453 if (targetm.binds_local_p (current_function_decl))
4454 {
4455 unsigned int pref = crtl->preferred_stack_boundary;
4456 if (crtl->stack_alignment_needed > crtl->preferred_stack_boundary)
4457 pref = crtl->stack_alignment_needed;
4458 cgraph_rtl_info (current_function_decl)->preferred_incoming_stack_boundary
4459 = pref;
4460 }
4461
4462 /* Make sure volatile mem refs aren't considered valid operands for
4463 arithmetic insns. We must call this here if this is a nested inline
4464 function, since the above code leaves us in the init_recog state,
4465 and the function context push/pop code does not save/restore volatile_ok.
4466
4467 ??? Maybe it isn't necessary for expand_start_function to call this
4468 anymore if we do it here? */
4469
4470 init_recog_no_volatile ();
4471
4472 /* We're done with this function. Free up memory if we can. */
4473 free_after_parsing (cfun);
4474 free_after_compilation (cfun);
4475 return 0;
4476 }
4477
4478 struct rtl_opt_pass pass_clean_state =
4479 {
4480 {
4481 RTL_PASS,
4482 "*clean_state", /* name */
4483 NULL, /* gate */
4484 rest_of_clean_state, /* execute */
4485 NULL, /* sub */
4486 NULL, /* next */
4487 0, /* static_pass_number */
4488 TV_FINAL, /* tv_id */
4489 0, /* properties_required */
4490 0, /* properties_provided */
4491 PROP_rtl, /* properties_destroyed */
4492 0, /* todo_flags_start */
4493 0 /* todo_flags_finish */
4494 }
4495 };