final.c (output_in_slot): New global variable.
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
24
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
30
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
35
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
38
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
42
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
46
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "real.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "toplev.h"
67 #include "reload.h"
68 #include "intl.h"
69 #include "basic-block.h"
70 #include "target.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "cfglayout.h"
74
75 #ifdef XCOFF_DEBUGGING_INFO
76 #include "xcoffout.h" /* Needed for external data
77 declarations for e.g. AIX 4.x. */
78 #endif
79
80 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
81 #include "dwarf2out.h"
82 #endif
83
84 #ifdef DBX_DEBUGGING_INFO
85 #include "dbxout.h"
86 #endif
87
88 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
89 null default for it to save conditionalization later. */
90 #ifndef CC_STATUS_INIT
91 #define CC_STATUS_INIT
92 #endif
93
94 /* How to start an assembler comment. */
95 #ifndef ASM_COMMENT_START
96 #define ASM_COMMENT_START ";#"
97 #endif
98
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
102 #endif
103
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
106 #endif
107
108 #if defined(READONLY_DATA_SECTION) || defined(READONLY_DATA_SECTION_ASM_OP)
109 #define HAVE_READONLY_DATA_SECTION 1
110 #else
111 #define HAVE_READONLY_DATA_SECTION 0
112 #endif
113
114 /* Bitflags used by final_scan_insn. */
115 #define SEEN_BB 1
116 #define SEEN_NOTE 2
117 #define SEEN_EMITTED 4
118
119 /* Last insn processed by final_scan_insn. */
120 static rtx debug_insn;
121 rtx current_output_insn;
122
123 /* Line number of last NOTE. */
124 static int last_linenum;
125
126 /* Highest line number in current block. */
127 static int high_block_linenum;
128
129 /* Likewise for function. */
130 static int high_function_linenum;
131
132 /* Filename of last NOTE. */
133 static const char *last_filename;
134
135 extern int length_unit_log; /* This is defined in insn-attrtab.c. */
136
137 /* Nonzero while outputting an `asm' with operands.
138 This means that inconsistencies are the user's fault, so don't abort.
139 The precise value is the insn being output, to pass to error_for_asm. */
140 rtx this_is_asm_operands;
141
142 /* Number of operands of this insn, for an `asm' with operands. */
143 static unsigned int insn_noperands;
144
145 /* Compare optimization flag. */
146
147 static rtx last_ignored_compare = 0;
148
149 /* Assign a unique number to each insn that is output.
150 This can be used to generate unique local labels. */
151
152 static int insn_counter = 0;
153
154 #ifdef HAVE_cc0
155 /* This variable contains machine-dependent flags (defined in tm.h)
156 set and examined by output routines
157 that describe how to interpret the condition codes properly. */
158
159 CC_STATUS cc_status;
160
161 /* During output of an insn, this contains a copy of cc_status
162 from before the insn. */
163
164 CC_STATUS cc_prev_status;
165 #endif
166
167 /* Indexed by hardware reg number, is 1 if that register is ever
168 used in the current function.
169
170 In life_analysis, or in stupid_life_analysis, this is set
171 up to record the hard regs used explicitly. Reload adds
172 in the hard regs used for holding pseudo regs. Final uses
173 it to generate the code in the function prologue and epilogue
174 to save and restore registers as needed. */
175
176 char regs_ever_live[FIRST_PSEUDO_REGISTER];
177
178 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
179 Unlike regs_ever_live, elements of this array corresponding to
180 eliminable regs like the frame pointer are set if an asm sets them. */
181
182 char regs_asm_clobbered[FIRST_PSEUDO_REGISTER];
183
184 /* Nonzero means current function must be given a frame pointer.
185 Initialized in function.c to 0. Set only in reload1.c as per
186 the needs of the function. */
187
188 int frame_pointer_needed;
189
190 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
191
192 static int block_depth;
193
194 /* Nonzero if have enabled APP processing of our assembler output. */
195
196 static int app_on;
197
198 /* If we are outputting an insn sequence, this contains the sequence rtx.
199 Zero otherwise. */
200
201 rtx final_sequence;
202
203 /* True if we are outputting insns in a delay slot. This is used
204 to prettify the assembly. */
205 static bool output_in_slot;
206
207 #ifdef ASSEMBLER_DIALECT
208
209 /* Number of the assembler dialect to use, starting at 0. */
210 static int dialect_number;
211 #endif
212
213 #ifdef HAVE_conditional_execution
214 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
215 rtx current_insn_predicate;
216 #endif
217
218 #ifdef HAVE_ATTR_length
219 static int asm_insn_count (rtx);
220 #endif
221 static void profile_function (FILE *);
222 static void profile_after_prologue (FILE *);
223 static bool notice_source_line (rtx);
224 static rtx walk_alter_subreg (rtx *);
225 static void output_asm_name (void);
226 static void output_alternate_entry_point (FILE *, rtx);
227 static tree get_mem_expr_from_op (rtx, int *);
228 static void output_asm_operand_names (rtx *, int *, int);
229 static void output_operand (rtx, int);
230 #ifdef LEAF_REGISTERS
231 static void leaf_renumber_regs (rtx);
232 #endif
233 #ifdef HAVE_cc0
234 static int alter_cond (rtx);
235 #endif
236 #ifndef ADDR_VEC_ALIGN
237 static int final_addr_vec_align (rtx);
238 #endif
239 #ifdef HAVE_ATTR_length
240 static int align_fuzz (rtx, rtx, int, unsigned);
241 #endif
242 \f
243 /* Initialize data in final at the beginning of a compilation. */
244
245 void
246 init_final (const char *filename ATTRIBUTE_UNUSED)
247 {
248 app_on = 0;
249 final_sequence = 0;
250
251 #ifdef ASSEMBLER_DIALECT
252 dialect_number = ASSEMBLER_DIALECT;
253 #endif
254 }
255
256 /* Default target function prologue and epilogue assembler output.
257
258 If not overridden for epilogue code, then the function body itself
259 contains return instructions wherever needed. */
260 void
261 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
262 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
263 {
264 }
265
266 /* Default target hook that outputs nothing to a stream. */
267 void
268 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
269 {
270 }
271
272 /* Enable APP processing of subsequent output.
273 Used before the output from an `asm' statement. */
274
275 void
276 app_enable (void)
277 {
278 if (! app_on)
279 {
280 fputs (ASM_APP_ON, asm_out_file);
281 app_on = 1;
282 }
283 }
284
285 /* Disable APP processing of subsequent output.
286 Called from varasm.c before most kinds of output. */
287
288 void
289 app_disable (void)
290 {
291 if (app_on)
292 {
293 fputs (ASM_APP_OFF, asm_out_file);
294 app_on = 0;
295 }
296 }
297 \f
298 /* Return the number of slots filled in the current
299 delayed branch sequence (we don't count the insn needing the
300 delay slot). Zero if not in a delayed branch sequence. */
301
302 #ifdef DELAY_SLOTS
303 int
304 dbr_sequence_length (void)
305 {
306 if (final_sequence != 0)
307 return XVECLEN (final_sequence, 0) - 1;
308 else
309 return 0;
310 }
311 #endif
312 \f
313 /* The next two pages contain routines used to compute the length of an insn
314 and to shorten branches. */
315
316 /* Arrays for insn lengths, and addresses. The latter is referenced by
317 `insn_current_length'. */
318
319 static int *insn_lengths;
320
321 varray_type insn_addresses_;
322
323 /* Max uid for which the above arrays are valid. */
324 static int insn_lengths_max_uid;
325
326 /* Address of insn being processed. Used by `insn_current_length'. */
327 int insn_current_address;
328
329 /* Address of insn being processed in previous iteration. */
330 int insn_last_address;
331
332 /* known invariant alignment of insn being processed. */
333 int insn_current_align;
334
335 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
336 gives the next following alignment insn that increases the known
337 alignment, or NULL_RTX if there is no such insn.
338 For any alignment obtained this way, we can again index uid_align with
339 its uid to obtain the next following align that in turn increases the
340 alignment, till we reach NULL_RTX; the sequence obtained this way
341 for each insn we'll call the alignment chain of this insn in the following
342 comments. */
343
344 struct label_alignment
345 {
346 short alignment;
347 short max_skip;
348 };
349
350 static rtx *uid_align;
351 static int *uid_shuid;
352 static struct label_alignment *label_align;
353
354 /* Indicate that branch shortening hasn't yet been done. */
355
356 void
357 init_insn_lengths (void)
358 {
359 if (uid_shuid)
360 {
361 free (uid_shuid);
362 uid_shuid = 0;
363 }
364 if (insn_lengths)
365 {
366 free (insn_lengths);
367 insn_lengths = 0;
368 insn_lengths_max_uid = 0;
369 }
370 #ifdef HAVE_ATTR_length
371 INSN_ADDRESSES_FREE ();
372 #endif
373 if (uid_align)
374 {
375 free (uid_align);
376 uid_align = 0;
377 }
378 }
379
380 /* Obtain the current length of an insn. If branch shortening has been done,
381 get its actual length. Otherwise, get its maximum length. */
382
383 int
384 get_attr_length (rtx insn ATTRIBUTE_UNUSED)
385 {
386 #ifdef HAVE_ATTR_length
387 rtx body;
388 int i;
389 int length = 0;
390
391 if (insn_lengths_max_uid > INSN_UID (insn))
392 return insn_lengths[INSN_UID (insn)];
393 else
394 switch (GET_CODE (insn))
395 {
396 case NOTE:
397 case BARRIER:
398 case CODE_LABEL:
399 return 0;
400
401 case CALL_INSN:
402 length = insn_default_length (insn);
403 break;
404
405 case JUMP_INSN:
406 body = PATTERN (insn);
407 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
408 {
409 /* Alignment is machine-dependent and should be handled by
410 ADDR_VEC_ALIGN. */
411 }
412 else
413 length = insn_default_length (insn);
414 break;
415
416 case INSN:
417 body = PATTERN (insn);
418 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
419 return 0;
420
421 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
422 length = asm_insn_count (body) * insn_default_length (insn);
423 else if (GET_CODE (body) == SEQUENCE)
424 for (i = 0; i < XVECLEN (body, 0); i++)
425 length += get_attr_length (XVECEXP (body, 0, i));
426 else
427 length = insn_default_length (insn);
428 break;
429
430 default:
431 break;
432 }
433
434 #ifdef ADJUST_INSN_LENGTH
435 ADJUST_INSN_LENGTH (insn, length);
436 #endif
437 return length;
438 #else /* not HAVE_ATTR_length */
439 return 0;
440 #endif /* not HAVE_ATTR_length */
441 }
442 \f
443 /* Code to handle alignment inside shorten_branches. */
444
445 /* Here is an explanation how the algorithm in align_fuzz can give
446 proper results:
447
448 Call a sequence of instructions beginning with alignment point X
449 and continuing until the next alignment point `block X'. When `X'
450 is used in an expression, it means the alignment value of the
451 alignment point.
452
453 Call the distance between the start of the first insn of block X, and
454 the end of the last insn of block X `IX', for the `inner size of X'.
455 This is clearly the sum of the instruction lengths.
456
457 Likewise with the next alignment-delimited block following X, which we
458 shall call block Y.
459
460 Call the distance between the start of the first insn of block X, and
461 the start of the first insn of block Y `OX', for the `outer size of X'.
462
463 The estimated padding is then OX - IX.
464
465 OX can be safely estimated as
466
467 if (X >= Y)
468 OX = round_up(IX, Y)
469 else
470 OX = round_up(IX, X) + Y - X
471
472 Clearly est(IX) >= real(IX), because that only depends on the
473 instruction lengths, and those being overestimated is a given.
474
475 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
476 we needn't worry about that when thinking about OX.
477
478 When X >= Y, the alignment provided by Y adds no uncertainty factor
479 for branch ranges starting before X, so we can just round what we have.
480 But when X < Y, we don't know anything about the, so to speak,
481 `middle bits', so we have to assume the worst when aligning up from an
482 address mod X to one mod Y, which is Y - X. */
483
484 #ifndef LABEL_ALIGN
485 #define LABEL_ALIGN(LABEL) align_labels_log
486 #endif
487
488 #ifndef LABEL_ALIGN_MAX_SKIP
489 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
490 #endif
491
492 #ifndef LOOP_ALIGN
493 #define LOOP_ALIGN(LABEL) align_loops_log
494 #endif
495
496 #ifndef LOOP_ALIGN_MAX_SKIP
497 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
498 #endif
499
500 #ifndef LABEL_ALIGN_AFTER_BARRIER
501 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
502 #endif
503
504 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
505 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
506 #endif
507
508 #ifndef JUMP_ALIGN
509 #define JUMP_ALIGN(LABEL) align_jumps_log
510 #endif
511
512 #ifndef JUMP_ALIGN_MAX_SKIP
513 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
514 #endif
515
516 #ifndef ADDR_VEC_ALIGN
517 static int
518 final_addr_vec_align (rtx addr_vec)
519 {
520 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
521
522 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
523 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
524 return exact_log2 (align);
525
526 }
527
528 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
529 #endif
530
531 #ifndef INSN_LENGTH_ALIGNMENT
532 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
533 #endif
534
535 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
536
537 static int min_labelno, max_labelno;
538
539 #define LABEL_TO_ALIGNMENT(LABEL) \
540 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
541
542 #define LABEL_TO_MAX_SKIP(LABEL) \
543 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
544
545 /* For the benefit of port specific code do this also as a function. */
546
547 int
548 label_to_alignment (rtx label)
549 {
550 return LABEL_TO_ALIGNMENT (label);
551 }
552
553 #ifdef HAVE_ATTR_length
554 /* The differences in addresses
555 between a branch and its target might grow or shrink depending on
556 the alignment the start insn of the range (the branch for a forward
557 branch or the label for a backward branch) starts out on; if these
558 differences are used naively, they can even oscillate infinitely.
559 We therefore want to compute a 'worst case' address difference that
560 is independent of the alignment the start insn of the range end
561 up on, and that is at least as large as the actual difference.
562 The function align_fuzz calculates the amount we have to add to the
563 naively computed difference, by traversing the part of the alignment
564 chain of the start insn of the range that is in front of the end insn
565 of the range, and considering for each alignment the maximum amount
566 that it might contribute to a size increase.
567
568 For casesi tables, we also want to know worst case minimum amounts of
569 address difference, in case a machine description wants to introduce
570 some common offset that is added to all offsets in a table.
571 For this purpose, align_fuzz with a growth argument of 0 computes the
572 appropriate adjustment. */
573
574 /* Compute the maximum delta by which the difference of the addresses of
575 START and END might grow / shrink due to a different address for start
576 which changes the size of alignment insns between START and END.
577 KNOWN_ALIGN_LOG is the alignment known for START.
578 GROWTH should be ~0 if the objective is to compute potential code size
579 increase, and 0 if the objective is to compute potential shrink.
580 The return value is undefined for any other value of GROWTH. */
581
582 static int
583 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
584 {
585 int uid = INSN_UID (start);
586 rtx align_label;
587 int known_align = 1 << known_align_log;
588 int end_shuid = INSN_SHUID (end);
589 int fuzz = 0;
590
591 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
592 {
593 int align_addr, new_align;
594
595 uid = INSN_UID (align_label);
596 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
597 if (uid_shuid[uid] > end_shuid)
598 break;
599 known_align_log = LABEL_TO_ALIGNMENT (align_label);
600 new_align = 1 << known_align_log;
601 if (new_align < known_align)
602 continue;
603 fuzz += (-align_addr ^ growth) & (new_align - known_align);
604 known_align = new_align;
605 }
606 return fuzz;
607 }
608
609 /* Compute a worst-case reference address of a branch so that it
610 can be safely used in the presence of aligned labels. Since the
611 size of the branch itself is unknown, the size of the branch is
612 not included in the range. I.e. for a forward branch, the reference
613 address is the end address of the branch as known from the previous
614 branch shortening pass, minus a value to account for possible size
615 increase due to alignment. For a backward branch, it is the start
616 address of the branch as known from the current pass, plus a value
617 to account for possible size increase due to alignment.
618 NB.: Therefore, the maximum offset allowed for backward branches needs
619 to exclude the branch size. */
620
621 int
622 insn_current_reference_address (rtx branch)
623 {
624 rtx dest, seq;
625 int seq_uid;
626
627 if (! INSN_ADDRESSES_SET_P ())
628 return 0;
629
630 seq = NEXT_INSN (PREV_INSN (branch));
631 seq_uid = INSN_UID (seq);
632 if (!JUMP_P (branch))
633 /* This can happen for example on the PA; the objective is to know the
634 offset to address something in front of the start of the function.
635 Thus, we can treat it like a backward branch.
636 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
637 any alignment we'd encounter, so we skip the call to align_fuzz. */
638 return insn_current_address;
639 dest = JUMP_LABEL (branch);
640
641 /* BRANCH has no proper alignment chain set, so use SEQ.
642 BRANCH also has no INSN_SHUID. */
643 if (INSN_SHUID (seq) < INSN_SHUID (dest))
644 {
645 /* Forward branch. */
646 return (insn_last_address + insn_lengths[seq_uid]
647 - align_fuzz (seq, dest, length_unit_log, ~0));
648 }
649 else
650 {
651 /* Backward branch. */
652 return (insn_current_address
653 + align_fuzz (dest, seq, length_unit_log, ~0));
654 }
655 }
656 #endif /* HAVE_ATTR_length */
657 \f
658 void
659 compute_alignments (void)
660 {
661 int log, max_skip, max_log;
662 basic_block bb;
663
664 if (label_align)
665 {
666 free (label_align);
667 label_align = 0;
668 }
669
670 max_labelno = max_label_num ();
671 min_labelno = get_first_label_num ();
672 label_align = xcalloc (max_labelno - min_labelno + 1,
673 sizeof (struct label_alignment));
674
675 /* If not optimizing or optimizing for size, don't assign any alignments. */
676 if (! optimize || optimize_size)
677 return;
678
679 FOR_EACH_BB (bb)
680 {
681 rtx label = BB_HEAD (bb);
682 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
683 edge e;
684
685 if (!LABEL_P (label)
686 || probably_never_executed_bb_p (bb))
687 continue;
688 max_log = LABEL_ALIGN (label);
689 max_skip = LABEL_ALIGN_MAX_SKIP;
690
691 for (e = bb->pred; e; e = e->pred_next)
692 {
693 if (e->flags & EDGE_FALLTHRU)
694 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
695 else
696 branch_frequency += EDGE_FREQUENCY (e);
697 }
698
699 /* There are two purposes to align block with no fallthru incoming edge:
700 1) to avoid fetch stalls when branch destination is near cache boundary
701 2) to improve cache efficiency in case the previous block is not executed
702 (so it does not need to be in the cache).
703
704 We to catch first case, we align frequently executed blocks.
705 To catch the second, we align blocks that are executed more frequently
706 than the predecessor and the predecessor is likely to not be executed
707 when function is called. */
708
709 if (!has_fallthru
710 && (branch_frequency > BB_FREQ_MAX / 10
711 || (bb->frequency > bb->prev_bb->frequency * 10
712 && (bb->prev_bb->frequency
713 <= ENTRY_BLOCK_PTR->frequency / 2))))
714 {
715 log = JUMP_ALIGN (label);
716 if (max_log < log)
717 {
718 max_log = log;
719 max_skip = JUMP_ALIGN_MAX_SKIP;
720 }
721 }
722 /* In case block is frequent and reached mostly by non-fallthru edge,
723 align it. It is most likely a first block of loop. */
724 if (has_fallthru
725 && maybe_hot_bb_p (bb)
726 && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
727 && branch_frequency > fallthru_frequency * 2)
728 {
729 log = LOOP_ALIGN (label);
730 if (max_log < log)
731 {
732 max_log = log;
733 max_skip = LOOP_ALIGN_MAX_SKIP;
734 }
735 }
736 LABEL_TO_ALIGNMENT (label) = max_log;
737 LABEL_TO_MAX_SKIP (label) = max_skip;
738 }
739 }
740 \f
741 /* Make a pass over all insns and compute their actual lengths by shortening
742 any branches of variable length if possible. */
743
744 /* shorten_branches might be called multiple times: for example, the SH
745 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
746 In order to do this, it needs proper length information, which it obtains
747 by calling shorten_branches. This cannot be collapsed with
748 shorten_branches itself into a single pass unless we also want to integrate
749 reorg.c, since the branch splitting exposes new instructions with delay
750 slots. */
751
752 void
753 shorten_branches (rtx first ATTRIBUTE_UNUSED)
754 {
755 rtx insn;
756 int max_uid;
757 int i;
758 int max_log;
759 int max_skip;
760 #ifdef HAVE_ATTR_length
761 #define MAX_CODE_ALIGN 16
762 rtx seq;
763 int something_changed = 1;
764 char *varying_length;
765 rtx body;
766 int uid;
767 rtx align_tab[MAX_CODE_ALIGN];
768
769 #endif
770
771 /* Compute maximum UID and allocate label_align / uid_shuid. */
772 max_uid = get_max_uid ();
773
774 /* Free uid_shuid before reallocating it. */
775 free (uid_shuid);
776
777 uid_shuid = xmalloc (max_uid * sizeof *uid_shuid);
778
779 if (max_labelno != max_label_num ())
780 {
781 int old = max_labelno;
782 int n_labels;
783 int n_old_labels;
784
785 max_labelno = max_label_num ();
786
787 n_labels = max_labelno - min_labelno + 1;
788 n_old_labels = old - min_labelno + 1;
789
790 label_align = xrealloc (label_align,
791 n_labels * sizeof (struct label_alignment));
792
793 /* Range of labels grows monotonically in the function. Abort here
794 means that the initialization of array got lost. */
795 if (n_old_labels > n_labels)
796 abort ();
797
798 memset (label_align + n_old_labels, 0,
799 (n_labels - n_old_labels) * sizeof (struct label_alignment));
800 }
801
802 /* Initialize label_align and set up uid_shuid to be strictly
803 monotonically rising with insn order. */
804 /* We use max_log here to keep track of the maximum alignment we want to
805 impose on the next CODE_LABEL (or the current one if we are processing
806 the CODE_LABEL itself). */
807
808 max_log = 0;
809 max_skip = 0;
810
811 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
812 {
813 int log;
814
815 INSN_SHUID (insn) = i++;
816 if (INSN_P (insn))
817 {
818 /* reorg might make the first insn of a loop being run once only,
819 and delete the label in front of it. Then we want to apply
820 the loop alignment to the new label created by reorg, which
821 is separated by the former loop start insn from the
822 NOTE_INSN_LOOP_BEG. */
823 }
824 else if (LABEL_P (insn))
825 {
826 rtx next;
827
828 /* Merge in alignments computed by compute_alignments. */
829 log = LABEL_TO_ALIGNMENT (insn);
830 if (max_log < log)
831 {
832 max_log = log;
833 max_skip = LABEL_TO_MAX_SKIP (insn);
834 }
835
836 log = LABEL_ALIGN (insn);
837 if (max_log < log)
838 {
839 max_log = log;
840 max_skip = LABEL_ALIGN_MAX_SKIP;
841 }
842 next = NEXT_INSN (insn);
843 /* ADDR_VECs only take room if read-only data goes into the text
844 section. */
845 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
846 if (next && JUMP_P (next))
847 {
848 rtx nextbody = PATTERN (next);
849 if (GET_CODE (nextbody) == ADDR_VEC
850 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
851 {
852 log = ADDR_VEC_ALIGN (next);
853 if (max_log < log)
854 {
855 max_log = log;
856 max_skip = LABEL_ALIGN_MAX_SKIP;
857 }
858 }
859 }
860 LABEL_TO_ALIGNMENT (insn) = max_log;
861 LABEL_TO_MAX_SKIP (insn) = max_skip;
862 max_log = 0;
863 max_skip = 0;
864 }
865 else if (BARRIER_P (insn))
866 {
867 rtx label;
868
869 for (label = insn; label && ! INSN_P (label);
870 label = NEXT_INSN (label))
871 if (LABEL_P (label))
872 {
873 log = LABEL_ALIGN_AFTER_BARRIER (insn);
874 if (max_log < log)
875 {
876 max_log = log;
877 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
878 }
879 break;
880 }
881 }
882 }
883 #ifdef HAVE_ATTR_length
884
885 /* Allocate the rest of the arrays. */
886 insn_lengths = xmalloc (max_uid * sizeof (*insn_lengths));
887 insn_lengths_max_uid = max_uid;
888 /* Syntax errors can lead to labels being outside of the main insn stream.
889 Initialize insn_addresses, so that we get reproducible results. */
890 INSN_ADDRESSES_ALLOC (max_uid);
891
892 varying_length = xcalloc (max_uid, sizeof (char));
893
894 /* Initialize uid_align. We scan instructions
895 from end to start, and keep in align_tab[n] the last seen insn
896 that does an alignment of at least n+1, i.e. the successor
897 in the alignment chain for an insn that does / has a known
898 alignment of n. */
899 uid_align = xcalloc (max_uid, sizeof *uid_align);
900
901 for (i = MAX_CODE_ALIGN; --i >= 0;)
902 align_tab[i] = NULL_RTX;
903 seq = get_last_insn ();
904 for (; seq; seq = PREV_INSN (seq))
905 {
906 int uid = INSN_UID (seq);
907 int log;
908 log = (LABEL_P (seq) ? LABEL_TO_ALIGNMENT (seq) : 0);
909 uid_align[uid] = align_tab[0];
910 if (log)
911 {
912 /* Found an alignment label. */
913 uid_align[uid] = align_tab[log];
914 for (i = log - 1; i >= 0; i--)
915 align_tab[i] = seq;
916 }
917 }
918 #ifdef CASE_VECTOR_SHORTEN_MODE
919 if (optimize)
920 {
921 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
922 label fields. */
923
924 int min_shuid = INSN_SHUID (get_insns ()) - 1;
925 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
926 int rel;
927
928 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
929 {
930 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
931 int len, i, min, max, insn_shuid;
932 int min_align;
933 addr_diff_vec_flags flags;
934
935 if (!JUMP_P (insn)
936 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
937 continue;
938 pat = PATTERN (insn);
939 len = XVECLEN (pat, 1);
940 if (len <= 0)
941 abort ();
942 min_align = MAX_CODE_ALIGN;
943 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
944 {
945 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
946 int shuid = INSN_SHUID (lab);
947 if (shuid < min)
948 {
949 min = shuid;
950 min_lab = lab;
951 }
952 if (shuid > max)
953 {
954 max = shuid;
955 max_lab = lab;
956 }
957 if (min_align > LABEL_TO_ALIGNMENT (lab))
958 min_align = LABEL_TO_ALIGNMENT (lab);
959 }
960 XEXP (pat, 2) = gen_rtx_LABEL_REF (VOIDmode, min_lab);
961 XEXP (pat, 3) = gen_rtx_LABEL_REF (VOIDmode, max_lab);
962 insn_shuid = INSN_SHUID (insn);
963 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
964 flags.min_align = min_align;
965 flags.base_after_vec = rel > insn_shuid;
966 flags.min_after_vec = min > insn_shuid;
967 flags.max_after_vec = max > insn_shuid;
968 flags.min_after_base = min > rel;
969 flags.max_after_base = max > rel;
970 ADDR_DIFF_VEC_FLAGS (pat) = flags;
971 }
972 }
973 #endif /* CASE_VECTOR_SHORTEN_MODE */
974
975 /* Compute initial lengths, addresses, and varying flags for each insn. */
976 for (insn_current_address = 0, insn = first;
977 insn != 0;
978 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
979 {
980 uid = INSN_UID (insn);
981
982 insn_lengths[uid] = 0;
983
984 if (LABEL_P (insn))
985 {
986 int log = LABEL_TO_ALIGNMENT (insn);
987 if (log)
988 {
989 int align = 1 << log;
990 int new_address = (insn_current_address + align - 1) & -align;
991 insn_lengths[uid] = new_address - insn_current_address;
992 }
993 }
994
995 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
996
997 if (NOTE_P (insn) || BARRIER_P (insn)
998 || LABEL_P (insn))
999 continue;
1000 if (INSN_DELETED_P (insn))
1001 continue;
1002
1003 body = PATTERN (insn);
1004 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1005 {
1006 /* This only takes room if read-only data goes into the text
1007 section. */
1008 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1009 insn_lengths[uid] = (XVECLEN (body,
1010 GET_CODE (body) == ADDR_DIFF_VEC)
1011 * GET_MODE_SIZE (GET_MODE (body)));
1012 /* Alignment is handled by ADDR_VEC_ALIGN. */
1013 }
1014 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1015 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1016 else if (GET_CODE (body) == SEQUENCE)
1017 {
1018 int i;
1019 int const_delay_slots;
1020 #ifdef DELAY_SLOTS
1021 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1022 #else
1023 const_delay_slots = 0;
1024 #endif
1025 /* Inside a delay slot sequence, we do not do any branch shortening
1026 if the shortening could change the number of delay slots
1027 of the branch. */
1028 for (i = 0; i < XVECLEN (body, 0); i++)
1029 {
1030 rtx inner_insn = XVECEXP (body, 0, i);
1031 int inner_uid = INSN_UID (inner_insn);
1032 int inner_length;
1033
1034 if (GET_CODE (body) == ASM_INPUT
1035 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1036 inner_length = (asm_insn_count (PATTERN (inner_insn))
1037 * insn_default_length (inner_insn));
1038 else
1039 inner_length = insn_default_length (inner_insn);
1040
1041 insn_lengths[inner_uid] = inner_length;
1042 if (const_delay_slots)
1043 {
1044 if ((varying_length[inner_uid]
1045 = insn_variable_length_p (inner_insn)) != 0)
1046 varying_length[uid] = 1;
1047 INSN_ADDRESSES (inner_uid) = (insn_current_address
1048 + insn_lengths[uid]);
1049 }
1050 else
1051 varying_length[inner_uid] = 0;
1052 insn_lengths[uid] += inner_length;
1053 }
1054 }
1055 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1056 {
1057 insn_lengths[uid] = insn_default_length (insn);
1058 varying_length[uid] = insn_variable_length_p (insn);
1059 }
1060
1061 /* If needed, do any adjustment. */
1062 #ifdef ADJUST_INSN_LENGTH
1063 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1064 if (insn_lengths[uid] < 0)
1065 fatal_insn ("negative insn length", insn);
1066 #endif
1067 }
1068
1069 /* Now loop over all the insns finding varying length insns. For each,
1070 get the current insn length. If it has changed, reflect the change.
1071 When nothing changes for a full pass, we are done. */
1072
1073 while (something_changed)
1074 {
1075 something_changed = 0;
1076 insn_current_align = MAX_CODE_ALIGN - 1;
1077 for (insn_current_address = 0, insn = first;
1078 insn != 0;
1079 insn = NEXT_INSN (insn))
1080 {
1081 int new_length;
1082 #ifdef ADJUST_INSN_LENGTH
1083 int tmp_length;
1084 #endif
1085 int length_align;
1086
1087 uid = INSN_UID (insn);
1088
1089 if (LABEL_P (insn))
1090 {
1091 int log = LABEL_TO_ALIGNMENT (insn);
1092 if (log > insn_current_align)
1093 {
1094 int align = 1 << log;
1095 int new_address= (insn_current_address + align - 1) & -align;
1096 insn_lengths[uid] = new_address - insn_current_address;
1097 insn_current_align = log;
1098 insn_current_address = new_address;
1099 }
1100 else
1101 insn_lengths[uid] = 0;
1102 INSN_ADDRESSES (uid) = insn_current_address;
1103 continue;
1104 }
1105
1106 length_align = INSN_LENGTH_ALIGNMENT (insn);
1107 if (length_align < insn_current_align)
1108 insn_current_align = length_align;
1109
1110 insn_last_address = INSN_ADDRESSES (uid);
1111 INSN_ADDRESSES (uid) = insn_current_address;
1112
1113 #ifdef CASE_VECTOR_SHORTEN_MODE
1114 if (optimize && JUMP_P (insn)
1115 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1116 {
1117 rtx body = PATTERN (insn);
1118 int old_length = insn_lengths[uid];
1119 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1120 rtx min_lab = XEXP (XEXP (body, 2), 0);
1121 rtx max_lab = XEXP (XEXP (body, 3), 0);
1122 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1123 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1124 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1125 rtx prev;
1126 int rel_align = 0;
1127 addr_diff_vec_flags flags;
1128
1129 /* Avoid automatic aggregate initialization. */
1130 flags = ADDR_DIFF_VEC_FLAGS (body);
1131
1132 /* Try to find a known alignment for rel_lab. */
1133 for (prev = rel_lab;
1134 prev
1135 && ! insn_lengths[INSN_UID (prev)]
1136 && ! (varying_length[INSN_UID (prev)] & 1);
1137 prev = PREV_INSN (prev))
1138 if (varying_length[INSN_UID (prev)] & 2)
1139 {
1140 rel_align = LABEL_TO_ALIGNMENT (prev);
1141 break;
1142 }
1143
1144 /* See the comment on addr_diff_vec_flags in rtl.h for the
1145 meaning of the flags values. base: REL_LAB vec: INSN */
1146 /* Anything after INSN has still addresses from the last
1147 pass; adjust these so that they reflect our current
1148 estimate for this pass. */
1149 if (flags.base_after_vec)
1150 rel_addr += insn_current_address - insn_last_address;
1151 if (flags.min_after_vec)
1152 min_addr += insn_current_address - insn_last_address;
1153 if (flags.max_after_vec)
1154 max_addr += insn_current_address - insn_last_address;
1155 /* We want to know the worst case, i.e. lowest possible value
1156 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1157 its offset is positive, and we have to be wary of code shrink;
1158 otherwise, it is negative, and we have to be vary of code
1159 size increase. */
1160 if (flags.min_after_base)
1161 {
1162 /* If INSN is between REL_LAB and MIN_LAB, the size
1163 changes we are about to make can change the alignment
1164 within the observed offset, therefore we have to break
1165 it up into two parts that are independent. */
1166 if (! flags.base_after_vec && flags.min_after_vec)
1167 {
1168 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1169 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1170 }
1171 else
1172 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1173 }
1174 else
1175 {
1176 if (flags.base_after_vec && ! flags.min_after_vec)
1177 {
1178 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1179 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1180 }
1181 else
1182 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1183 }
1184 /* Likewise, determine the highest lowest possible value
1185 for the offset of MAX_LAB. */
1186 if (flags.max_after_base)
1187 {
1188 if (! flags.base_after_vec && flags.max_after_vec)
1189 {
1190 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1191 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1192 }
1193 else
1194 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1195 }
1196 else
1197 {
1198 if (flags.base_after_vec && ! flags.max_after_vec)
1199 {
1200 max_addr += align_fuzz (max_lab, insn, 0, 0);
1201 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1202 }
1203 else
1204 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1205 }
1206 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1207 max_addr - rel_addr,
1208 body));
1209 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1210 {
1211 insn_lengths[uid]
1212 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1213 insn_current_address += insn_lengths[uid];
1214 if (insn_lengths[uid] != old_length)
1215 something_changed = 1;
1216 }
1217
1218 continue;
1219 }
1220 #endif /* CASE_VECTOR_SHORTEN_MODE */
1221
1222 if (! (varying_length[uid]))
1223 {
1224 if (NONJUMP_INSN_P (insn)
1225 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1226 {
1227 int i;
1228
1229 body = PATTERN (insn);
1230 for (i = 0; i < XVECLEN (body, 0); i++)
1231 {
1232 rtx inner_insn = XVECEXP (body, 0, i);
1233 int inner_uid = INSN_UID (inner_insn);
1234
1235 INSN_ADDRESSES (inner_uid) = insn_current_address;
1236
1237 insn_current_address += insn_lengths[inner_uid];
1238 }
1239 }
1240 else
1241 insn_current_address += insn_lengths[uid];
1242
1243 continue;
1244 }
1245
1246 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
1247 {
1248 int i;
1249
1250 body = PATTERN (insn);
1251 new_length = 0;
1252 for (i = 0; i < XVECLEN (body, 0); i++)
1253 {
1254 rtx inner_insn = XVECEXP (body, 0, i);
1255 int inner_uid = INSN_UID (inner_insn);
1256 int inner_length;
1257
1258 INSN_ADDRESSES (inner_uid) = insn_current_address;
1259
1260 /* insn_current_length returns 0 for insns with a
1261 non-varying length. */
1262 if (! varying_length[inner_uid])
1263 inner_length = insn_lengths[inner_uid];
1264 else
1265 inner_length = insn_current_length (inner_insn);
1266
1267 if (inner_length != insn_lengths[inner_uid])
1268 {
1269 insn_lengths[inner_uid] = inner_length;
1270 something_changed = 1;
1271 }
1272 insn_current_address += insn_lengths[inner_uid];
1273 new_length += inner_length;
1274 }
1275 }
1276 else
1277 {
1278 new_length = insn_current_length (insn);
1279 insn_current_address += new_length;
1280 }
1281
1282 #ifdef ADJUST_INSN_LENGTH
1283 /* If needed, do any adjustment. */
1284 tmp_length = new_length;
1285 ADJUST_INSN_LENGTH (insn, new_length);
1286 insn_current_address += (new_length - tmp_length);
1287 #endif
1288
1289 if (new_length != insn_lengths[uid])
1290 {
1291 insn_lengths[uid] = new_length;
1292 something_changed = 1;
1293 }
1294 }
1295 /* For a non-optimizing compile, do only a single pass. */
1296 if (!optimize)
1297 break;
1298 }
1299
1300 free (varying_length);
1301
1302 #endif /* HAVE_ATTR_length */
1303 }
1304
1305 #ifdef HAVE_ATTR_length
1306 /* Given the body of an INSN known to be generated by an ASM statement, return
1307 the number of machine instructions likely to be generated for this insn.
1308 This is used to compute its length. */
1309
1310 static int
1311 asm_insn_count (rtx body)
1312 {
1313 const char *template;
1314 int count = 1;
1315
1316 if (GET_CODE (body) == ASM_INPUT)
1317 template = XSTR (body, 0);
1318 else
1319 template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1320
1321 for (; *template; template++)
1322 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1323 count++;
1324
1325 return count;
1326 }
1327 #endif
1328 \f
1329 /* Output assembler code for the start of a function,
1330 and initialize some of the variables in this file
1331 for the new function. The label for the function and associated
1332 assembler pseudo-ops have already been output in `assemble_start_function'.
1333
1334 FIRST is the first insn of the rtl for the function being compiled.
1335 FILE is the file to write assembler code to.
1336 OPTIMIZE is nonzero if we should eliminate redundant
1337 test and compare insns. */
1338
1339 void
1340 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1341 int optimize ATTRIBUTE_UNUSED)
1342 {
1343 block_depth = 0;
1344
1345 this_is_asm_operands = 0;
1346
1347 last_filename = locator_file (prologue_locator);
1348 last_linenum = locator_line (prologue_locator);
1349
1350 high_block_linenum = high_function_linenum = last_linenum;
1351
1352 (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1353
1354 #if defined (DWARF2_UNWIND_INFO) || defined (TARGET_UNWIND_INFO)
1355 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1356 dwarf2out_begin_prologue (0, NULL);
1357 #endif
1358
1359 #ifdef LEAF_REG_REMAP
1360 if (current_function_uses_only_leaf_regs)
1361 leaf_renumber_regs (first);
1362 #endif
1363
1364 /* The Sun386i and perhaps other machines don't work right
1365 if the profiling code comes after the prologue. */
1366 #ifdef PROFILE_BEFORE_PROLOGUE
1367 if (current_function_profile)
1368 profile_function (file);
1369 #endif /* PROFILE_BEFORE_PROLOGUE */
1370
1371 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1372 if (dwarf2out_do_frame ())
1373 dwarf2out_frame_debug (NULL_RTX);
1374 #endif
1375
1376 /* If debugging, assign block numbers to all of the blocks in this
1377 function. */
1378 if (write_symbols)
1379 {
1380 remove_unnecessary_notes ();
1381 reemit_insn_block_notes ();
1382 number_blocks (current_function_decl);
1383 /* We never actually put out begin/end notes for the top-level
1384 block in the function. But, conceptually, that block is
1385 always needed. */
1386 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1387 }
1388
1389 /* First output the function prologue: code to set up the stack frame. */
1390 targetm.asm_out.function_prologue (file, get_frame_size ());
1391
1392 /* If the machine represents the prologue as RTL, the profiling code must
1393 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1394 #ifdef HAVE_prologue
1395 if (! HAVE_prologue)
1396 #endif
1397 profile_after_prologue (file);
1398 }
1399
1400 static void
1401 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1402 {
1403 #ifndef PROFILE_BEFORE_PROLOGUE
1404 if (current_function_profile)
1405 profile_function (file);
1406 #endif /* not PROFILE_BEFORE_PROLOGUE */
1407 }
1408
1409 static void
1410 profile_function (FILE *file ATTRIBUTE_UNUSED)
1411 {
1412 #ifndef NO_PROFILE_COUNTERS
1413 # define NO_PROFILE_COUNTERS 0
1414 #endif
1415 #if defined(ASM_OUTPUT_REG_PUSH)
1416 int sval = current_function_returns_struct;
1417 rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1418 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1419 int cxt = cfun->static_chain_decl != NULL;
1420 #endif
1421 #endif /* ASM_OUTPUT_REG_PUSH */
1422
1423 if (! NO_PROFILE_COUNTERS)
1424 {
1425 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1426 data_section ();
1427 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1428 targetm.asm_out.internal_label (file, "LP", current_function_funcdef_no);
1429 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1430 }
1431
1432 function_section (current_function_decl);
1433
1434 #if defined(ASM_OUTPUT_REG_PUSH)
1435 if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1436 ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1437 #endif
1438
1439 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1440 if (cxt)
1441 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1442 #else
1443 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1444 if (cxt)
1445 {
1446 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1447 }
1448 #endif
1449 #endif
1450
1451 FUNCTION_PROFILER (file, current_function_funcdef_no);
1452
1453 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1454 if (cxt)
1455 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1456 #else
1457 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1458 if (cxt)
1459 {
1460 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1461 }
1462 #endif
1463 #endif
1464
1465 #if defined(ASM_OUTPUT_REG_PUSH)
1466 if (sval && svrtx != NULL_RTX && REG_P (svrtx))
1467 ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1468 #endif
1469 }
1470
1471 /* Output assembler code for the end of a function.
1472 For clarity, args are same as those of `final_start_function'
1473 even though not all of them are needed. */
1474
1475 void
1476 final_end_function (void)
1477 {
1478 app_disable ();
1479
1480 (*debug_hooks->end_function) (high_function_linenum);
1481
1482 /* Finally, output the function epilogue:
1483 code to restore the stack frame and return to the caller. */
1484 targetm.asm_out.function_epilogue (asm_out_file, get_frame_size ());
1485
1486 /* And debug output. */
1487 (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1488
1489 #if defined (DWARF2_UNWIND_INFO)
1490 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1491 && dwarf2out_do_frame ())
1492 dwarf2out_end_epilogue (last_linenum, last_filename);
1493 #endif
1494 }
1495 \f
1496 /* Output assembler code for some insns: all or part of a function.
1497 For description of args, see `final_start_function', above.
1498
1499 PRESCAN is 1 if we are not really outputting,
1500 just scanning as if we were outputting.
1501 Prescanning deletes and rearranges insns just like ordinary output.
1502 PRESCAN is -2 if we are outputting after having prescanned.
1503 In this case, don't try to delete or rearrange insns
1504 because that has already been done.
1505 Prescanning is done only on certain machines. */
1506
1507 void
1508 final (rtx first, FILE *file, int optimize, int prescan)
1509 {
1510 rtx insn;
1511 int max_uid = 0;
1512 int seen = 0;
1513
1514 last_ignored_compare = 0;
1515
1516 #ifdef SDB_DEBUGGING_INFO
1517 /* When producing SDB debugging info, delete troublesome line number
1518 notes from inlined functions in other files as well as duplicate
1519 line number notes. */
1520 if (write_symbols == SDB_DEBUG)
1521 {
1522 rtx last = 0;
1523 for (insn = first; insn; insn = NEXT_INSN (insn))
1524 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
1525 {
1526 if (last != 0
1527 #ifdef USE_MAPPED_LOCATION
1528 && NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last)
1529 #else
1530 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1531 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)
1532 #endif
1533 )
1534 {
1535 delete_insn (insn); /* Use delete_note. */
1536 continue;
1537 }
1538 last = insn;
1539 }
1540 }
1541 #endif
1542
1543 for (insn = first; insn; insn = NEXT_INSN (insn))
1544 {
1545 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1546 max_uid = INSN_UID (insn);
1547 #ifdef HAVE_cc0
1548 /* If CC tracking across branches is enabled, record the insn which
1549 jumps to each branch only reached from one place. */
1550 if (optimize && JUMP_P (insn))
1551 {
1552 rtx lab = JUMP_LABEL (insn);
1553 if (lab && LABEL_NUSES (lab) == 1)
1554 {
1555 LABEL_REFS (lab) = insn;
1556 }
1557 }
1558 #endif
1559 }
1560
1561 init_recog ();
1562
1563 CC_STATUS_INIT;
1564
1565 /* Output the insns. */
1566 for (insn = NEXT_INSN (first); insn;)
1567 {
1568 #ifdef HAVE_ATTR_length
1569 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1570 {
1571 /* This can be triggered by bugs elsewhere in the compiler if
1572 new insns are created after init_insn_lengths is called. */
1573 if (NOTE_P (insn))
1574 insn_current_address = -1;
1575 else
1576 abort ();
1577 }
1578 else
1579 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1580 #endif /* HAVE_ATTR_length */
1581
1582 insn = final_scan_insn (insn, file, optimize, prescan, 0, &seen);
1583 }
1584 }
1585 \f
1586 const char *
1587 get_insn_template (int code, rtx insn)
1588 {
1589 switch (insn_data[code].output_format)
1590 {
1591 case INSN_OUTPUT_FORMAT_SINGLE:
1592 return insn_data[code].output.single;
1593 case INSN_OUTPUT_FORMAT_MULTI:
1594 return insn_data[code].output.multi[which_alternative];
1595 case INSN_OUTPUT_FORMAT_FUNCTION:
1596 if (insn == NULL)
1597 abort ();
1598 return (*insn_data[code].output.function) (recog_data.operand, insn);
1599
1600 default:
1601 abort ();
1602 }
1603 }
1604
1605 /* Emit the appropriate declaration for an alternate-entry-point
1606 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1607 LABEL_KIND != LABEL_NORMAL.
1608
1609 The case fall-through in this function is intentional. */
1610 static void
1611 output_alternate_entry_point (FILE *file, rtx insn)
1612 {
1613 const char *name = LABEL_NAME (insn);
1614
1615 switch (LABEL_KIND (insn))
1616 {
1617 case LABEL_WEAK_ENTRY:
1618 #ifdef ASM_WEAKEN_LABEL
1619 ASM_WEAKEN_LABEL (file, name);
1620 #endif
1621 case LABEL_GLOBAL_ENTRY:
1622 targetm.asm_out.globalize_label (file, name);
1623 case LABEL_STATIC_ENTRY:
1624 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1625 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1626 #endif
1627 ASM_OUTPUT_LABEL (file, name);
1628 break;
1629
1630 case LABEL_NORMAL:
1631 default:
1632 abort ();
1633 }
1634 }
1635
1636 /* Return boolean indicating if there is a NOTE_INSN_UNLIKELY_EXECUTED_CODE
1637 note in the instruction chain (going forward) between the current
1638 instruction, and the next 'executable' instruction. */
1639
1640 bool
1641 scan_ahead_for_unlikely_executed_note (rtx insn)
1642 {
1643 rtx temp;
1644 int bb_note_count = 0;
1645
1646 for (temp = insn; temp; temp = NEXT_INSN (temp))
1647 {
1648 if (NOTE_P (temp)
1649 && NOTE_LINE_NUMBER (temp) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
1650 return true;
1651 if (NOTE_P (temp)
1652 && NOTE_LINE_NUMBER (temp) == NOTE_INSN_BASIC_BLOCK)
1653 {
1654 bb_note_count++;
1655 if (bb_note_count > 1)
1656 return false;
1657 }
1658 if (INSN_P (temp))
1659 return false;
1660 }
1661
1662 return false;
1663 }
1664
1665 /* The final scan for one insn, INSN.
1666 Args are same as in `final', except that INSN
1667 is the insn being scanned.
1668 Value returned is the next insn to be scanned.
1669
1670 NOPEEPHOLES is used to disallow peephole processing:
1671 - 0: peepholes are allowed,
1672 - 1: peepholes are not allowed,
1673 - 2: peepholes are not allowed and we are in the
1674 slot of a delayed branch.
1675
1676 SEEN is used to track the end of the prologue, for emitting
1677 debug information. We force the emission of a line note after
1678 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1679 at the beginning of the second basic block, whichever comes
1680 first. */
1681
1682 rtx
1683 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1684 int prescan, int nopeepholes, int *seen)
1685 {
1686 #ifdef HAVE_cc0
1687 rtx set;
1688 #endif
1689
1690 insn_counter++;
1691
1692 /* Ignore deleted insns. These can occur when we split insns (due to a
1693 template of "#") while not optimizing. */
1694 if (INSN_DELETED_P (insn))
1695 return NEXT_INSN (insn);
1696
1697 switch (GET_CODE (insn))
1698 {
1699 case NOTE:
1700 if (prescan > 0)
1701 break;
1702
1703 switch (NOTE_LINE_NUMBER (insn))
1704 {
1705 case NOTE_INSN_DELETED:
1706 case NOTE_INSN_LOOP_BEG:
1707 case NOTE_INSN_LOOP_END:
1708 case NOTE_INSN_FUNCTION_END:
1709 case NOTE_INSN_REPEATED_LINE_NUMBER:
1710 case NOTE_INSN_EXPECTED_VALUE:
1711 break;
1712
1713 case NOTE_INSN_UNLIKELY_EXECUTED_CODE:
1714
1715 /* The presence of this note indicates that this basic block
1716 belongs in the "cold" section of the .o file. If we are
1717 not already writing to the cold section we need to change
1718 to it. */
1719
1720 unlikely_text_section ();
1721 break;
1722
1723 case NOTE_INSN_BASIC_BLOCK:
1724
1725 /* If we are performing the optimization that partitions
1726 basic blocks into hot & cold sections of the .o file,
1727 then at the start of each new basic block, before
1728 beginning to write code for the basic block, we need to
1729 check to see whether the basic block belongs in the hot
1730 or cold section of the .o file, and change the section we
1731 are writing to appropriately. */
1732
1733 if (flag_reorder_blocks_and_partition
1734 && !scan_ahead_for_unlikely_executed_note (insn))
1735 function_section (current_function_decl);
1736
1737 #ifdef TARGET_UNWIND_INFO
1738 targetm.asm_out.unwind_emit (asm_out_file, insn);
1739 #endif
1740
1741 if (flag_debug_asm)
1742 fprintf (asm_out_file, "\t%s basic block %d\n",
1743 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1744
1745 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1746 {
1747 *seen |= SEEN_EMITTED;
1748 last_filename = NULL;
1749 }
1750 else
1751 *seen |= SEEN_BB;
1752
1753 break;
1754
1755 case NOTE_INSN_EH_REGION_BEG:
1756 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1757 NOTE_EH_HANDLER (insn));
1758 break;
1759
1760 case NOTE_INSN_EH_REGION_END:
1761 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1762 NOTE_EH_HANDLER (insn));
1763 break;
1764
1765 case NOTE_INSN_PROLOGUE_END:
1766 targetm.asm_out.function_end_prologue (file);
1767 profile_after_prologue (file);
1768
1769 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1770 {
1771 *seen |= SEEN_EMITTED;
1772 last_filename = NULL;
1773 }
1774 else
1775 *seen |= SEEN_NOTE;
1776
1777 break;
1778
1779 case NOTE_INSN_EPILOGUE_BEG:
1780 targetm.asm_out.function_begin_epilogue (file);
1781 break;
1782
1783 case NOTE_INSN_FUNCTION_BEG:
1784 app_disable ();
1785 (*debug_hooks->end_prologue) (last_linenum, last_filename);
1786
1787 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1788 {
1789 *seen |= SEEN_EMITTED;
1790 last_filename = NULL;
1791 }
1792 else
1793 *seen |= SEEN_NOTE;
1794
1795 break;
1796
1797 case NOTE_INSN_BLOCK_BEG:
1798 if (debug_info_level == DINFO_LEVEL_NORMAL
1799 || debug_info_level == DINFO_LEVEL_VERBOSE
1800 || write_symbols == DWARF2_DEBUG
1801 || write_symbols == VMS_AND_DWARF2_DEBUG
1802 || write_symbols == VMS_DEBUG)
1803 {
1804 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1805
1806 app_disable ();
1807 ++block_depth;
1808 high_block_linenum = last_linenum;
1809
1810 /* Output debugging info about the symbol-block beginning. */
1811 (*debug_hooks->begin_block) (last_linenum, n);
1812
1813 /* Mark this block as output. */
1814 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1815 }
1816 break;
1817
1818 case NOTE_INSN_BLOCK_END:
1819 if (debug_info_level == DINFO_LEVEL_NORMAL
1820 || debug_info_level == DINFO_LEVEL_VERBOSE
1821 || write_symbols == DWARF2_DEBUG
1822 || write_symbols == VMS_AND_DWARF2_DEBUG
1823 || write_symbols == VMS_DEBUG)
1824 {
1825 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1826
1827 app_disable ();
1828
1829 /* End of a symbol-block. */
1830 --block_depth;
1831 if (block_depth < 0)
1832 abort ();
1833
1834 (*debug_hooks->end_block) (high_block_linenum, n);
1835 }
1836 break;
1837
1838 case NOTE_INSN_DELETED_LABEL:
1839 /* Emit the label. We may have deleted the CODE_LABEL because
1840 the label could be proved to be unreachable, though still
1841 referenced (in the form of having its address taken. */
1842 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1843 break;
1844
1845 case NOTE_INSN_VAR_LOCATION:
1846 (*debug_hooks->var_location) (insn);
1847 break;
1848
1849 case 0:
1850 break;
1851
1852 default:
1853 if (NOTE_LINE_NUMBER (insn) <= 0)
1854 abort ();
1855 break;
1856 }
1857 break;
1858
1859 case BARRIER:
1860 #if defined (DWARF2_UNWIND_INFO)
1861 if (dwarf2out_do_frame ())
1862 dwarf2out_frame_debug (insn);
1863 #endif
1864 break;
1865
1866 case CODE_LABEL:
1867 /* The target port might emit labels in the output function for
1868 some insn, e.g. sh.c output_branchy_insn. */
1869 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
1870 {
1871 int align = LABEL_TO_ALIGNMENT (insn);
1872 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1873 int max_skip = LABEL_TO_MAX_SKIP (insn);
1874 #endif
1875
1876 if (align && NEXT_INSN (insn))
1877 {
1878 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1879 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1880 #else
1881 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1882 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1883 #else
1884 ASM_OUTPUT_ALIGN (file, align);
1885 #endif
1886 #endif
1887 }
1888 }
1889 #ifdef HAVE_cc0
1890 CC_STATUS_INIT;
1891 /* If this label is reached from only one place, set the condition
1892 codes from the instruction just before the branch. */
1893
1894 /* Disabled because some insns set cc_status in the C output code
1895 and NOTICE_UPDATE_CC alone can set incorrect status. */
1896 if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1897 {
1898 rtx jump = LABEL_REFS (insn);
1899 rtx barrier = prev_nonnote_insn (insn);
1900 rtx prev;
1901 /* If the LABEL_REFS field of this label has been set to point
1902 at a branch, the predecessor of the branch is a regular
1903 insn, and that branch is the only way to reach this label,
1904 set the condition codes based on the branch and its
1905 predecessor. */
1906 if (barrier && BARRIER_P (barrier)
1907 && jump && JUMP_P (jump)
1908 && (prev = prev_nonnote_insn (jump))
1909 && NONJUMP_INSN_P (prev))
1910 {
1911 NOTICE_UPDATE_CC (PATTERN (prev), prev);
1912 NOTICE_UPDATE_CC (PATTERN (jump), jump);
1913 }
1914 }
1915 #endif
1916 if (prescan > 0)
1917 break;
1918
1919 if (LABEL_NAME (insn))
1920 (*debug_hooks->label) (insn);
1921
1922 /* If we are doing the optimization that partitions hot & cold
1923 basic blocks into separate sections of the .o file, we need
1924 to ensure the jump table ends up in the correct section... */
1925
1926 if (flag_reorder_blocks_and_partition
1927 && targetm.have_named_sections)
1928 {
1929 rtx tmp_table, tmp_label;
1930 if (LABEL_P (insn)
1931 && tablejump_p (NEXT_INSN (insn), &tmp_label, &tmp_table))
1932 {
1933 /* Do nothing; Do NOT change the current section. */
1934 }
1935 else if (scan_ahead_for_unlikely_executed_note (insn))
1936 unlikely_text_section ();
1937 else if (in_unlikely_text_section ())
1938 function_section (current_function_decl);
1939 }
1940
1941 if (app_on)
1942 {
1943 fputs (ASM_APP_OFF, file);
1944 app_on = 0;
1945 }
1946 if (NEXT_INSN (insn) != 0
1947 && JUMP_P (NEXT_INSN (insn)))
1948 {
1949 rtx nextbody = PATTERN (NEXT_INSN (insn));
1950
1951 /* If this label is followed by a jump-table,
1952 make sure we put the label in the read-only section. Also
1953 possibly write the label and jump table together. */
1954
1955 if (GET_CODE (nextbody) == ADDR_VEC
1956 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1957 {
1958 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1959 /* In this case, the case vector is being moved by the
1960 target, so don't output the label at all. Leave that
1961 to the back end macros. */
1962 #else
1963 if (! JUMP_TABLES_IN_TEXT_SECTION)
1964 {
1965 int log_align;
1966
1967 targetm.asm_out.function_rodata_section (current_function_decl);
1968
1969 #ifdef ADDR_VEC_ALIGN
1970 log_align = ADDR_VEC_ALIGN (NEXT_INSN (insn));
1971 #else
1972 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1973 #endif
1974 ASM_OUTPUT_ALIGN (file, log_align);
1975 }
1976 else
1977 function_section (current_function_decl);
1978
1979 #ifdef ASM_OUTPUT_CASE_LABEL
1980 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1981 NEXT_INSN (insn));
1982 #else
1983 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1984 #endif
1985 #endif
1986 break;
1987 }
1988 }
1989 if (LABEL_ALT_ENTRY_P (insn))
1990 output_alternate_entry_point (file, insn);
1991 else
1992 targetm.asm_out.internal_label (file, "L", CODE_LABEL_NUMBER (insn));
1993 break;
1994
1995 default:
1996 {
1997 rtx body = PATTERN (insn);
1998 int insn_code_number;
1999 const char *template;
2000
2001 /* An INSN, JUMP_INSN or CALL_INSN.
2002 First check for special kinds that recog doesn't recognize. */
2003
2004 if (GET_CODE (body) == USE /* These are just declarations. */
2005 || GET_CODE (body) == CLOBBER)
2006 break;
2007
2008 #ifdef HAVE_cc0
2009 {
2010 /* If there is a REG_CC_SETTER note on this insn, it means that
2011 the setting of the condition code was done in the delay slot
2012 of the insn that branched here. So recover the cc status
2013 from the insn that set it. */
2014
2015 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2016 if (note)
2017 {
2018 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
2019 cc_prev_status = cc_status;
2020 }
2021 }
2022 #endif
2023
2024 /* Detect insns that are really jump-tables
2025 and output them as such. */
2026
2027 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
2028 {
2029 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
2030 int vlen, idx;
2031 #endif
2032
2033 if (prescan > 0)
2034 break;
2035
2036 if (app_on)
2037 {
2038 fputs (ASM_APP_OFF, file);
2039 app_on = 0;
2040 }
2041
2042 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
2043 if (GET_CODE (body) == ADDR_VEC)
2044 {
2045 #ifdef ASM_OUTPUT_ADDR_VEC
2046 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
2047 #else
2048 abort ();
2049 #endif
2050 }
2051 else
2052 {
2053 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
2054 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
2055 #else
2056 abort ();
2057 #endif
2058 }
2059 #else
2060 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
2061 for (idx = 0; idx < vlen; idx++)
2062 {
2063 if (GET_CODE (body) == ADDR_VEC)
2064 {
2065 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
2066 ASM_OUTPUT_ADDR_VEC_ELT
2067 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
2068 #else
2069 abort ();
2070 #endif
2071 }
2072 else
2073 {
2074 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
2075 ASM_OUTPUT_ADDR_DIFF_ELT
2076 (file,
2077 body,
2078 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
2079 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
2080 #else
2081 abort ();
2082 #endif
2083 }
2084 }
2085 #ifdef ASM_OUTPUT_CASE_END
2086 ASM_OUTPUT_CASE_END (file,
2087 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2088 insn);
2089 #endif
2090 #endif
2091
2092 function_section (current_function_decl);
2093
2094 break;
2095 }
2096 /* Output this line note if it is the first or the last line
2097 note in a row. */
2098 if (notice_source_line (insn))
2099 {
2100 (*debug_hooks->source_line) (last_linenum, last_filename);
2101 }
2102
2103 if (GET_CODE (body) == ASM_INPUT)
2104 {
2105 const char *string = XSTR (body, 0);
2106
2107 /* There's no telling what that did to the condition codes. */
2108 CC_STATUS_INIT;
2109 if (prescan > 0)
2110 break;
2111
2112 if (string[0])
2113 {
2114 if (! app_on)
2115 {
2116 fputs (ASM_APP_ON, file);
2117 app_on = 1;
2118 }
2119 fprintf (asm_out_file, "\t%s\n", string);
2120 }
2121 break;
2122 }
2123
2124 /* Detect `asm' construct with operands. */
2125 if (asm_noperands (body) >= 0)
2126 {
2127 unsigned int noperands = asm_noperands (body);
2128 rtx *ops = alloca (noperands * sizeof (rtx));
2129 const char *string;
2130
2131 /* There's no telling what that did to the condition codes. */
2132 CC_STATUS_INIT;
2133 if (prescan > 0)
2134 break;
2135
2136 /* Get out the operand values. */
2137 string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2138 /* Inhibit aborts on what would otherwise be compiler bugs. */
2139 insn_noperands = noperands;
2140 this_is_asm_operands = insn;
2141
2142 #ifdef FINAL_PRESCAN_INSN
2143 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2144 #endif
2145
2146 /* Output the insn using them. */
2147 if (string[0])
2148 {
2149 if (! app_on)
2150 {
2151 fputs (ASM_APP_ON, file);
2152 app_on = 1;
2153 }
2154 output_asm_insn (string, ops);
2155 }
2156
2157 this_is_asm_operands = 0;
2158 break;
2159 }
2160
2161 if (prescan <= 0 && app_on)
2162 {
2163 fputs (ASM_APP_OFF, file);
2164 app_on = 0;
2165 }
2166
2167 if (GET_CODE (body) == SEQUENCE)
2168 {
2169 /* A delayed-branch sequence */
2170 int i;
2171 rtx next;
2172
2173 if (prescan > 0)
2174 break;
2175 final_sequence = body;
2176
2177 /* Record the delay slots' frame information before the branch.
2178 This is needed for delayed calls: see execute_cfa_program(). */
2179 #if defined (DWARF2_UNWIND_INFO)
2180 if (dwarf2out_do_frame ())
2181 for (i = 1; i < XVECLEN (body, 0); i++)
2182 dwarf2out_frame_debug (XVECEXP (body, 0, i));
2183 #endif
2184
2185 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2186 force the restoration of a comparison that was previously
2187 thought unnecessary. If that happens, cancel this sequence
2188 and cause that insn to be restored. */
2189
2190 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, prescan, 1, seen);
2191 if (next != XVECEXP (body, 0, 1))
2192 {
2193 final_sequence = 0;
2194 return next;
2195 }
2196
2197 for (i = 1; i < XVECLEN (body, 0); i++)
2198 {
2199 rtx insn = XVECEXP (body, 0, i);
2200 rtx next = NEXT_INSN (insn);
2201 /* We loop in case any instruction in a delay slot gets
2202 split. */
2203 do
2204 insn = final_scan_insn (insn, file, 0, prescan, 2, seen);
2205 while (insn != next);
2206 }
2207 #ifdef DBR_OUTPUT_SEQEND
2208 DBR_OUTPUT_SEQEND (file);
2209 #endif
2210 final_sequence = 0;
2211
2212 /* If the insn requiring the delay slot was a CALL_INSN, the
2213 insns in the delay slot are actually executed before the
2214 called function. Hence we don't preserve any CC-setting
2215 actions in these insns and the CC must be marked as being
2216 clobbered by the function. */
2217 if (CALL_P (XVECEXP (body, 0, 0)))
2218 {
2219 CC_STATUS_INIT;
2220 }
2221 break;
2222 }
2223
2224 /* We have a real machine instruction as rtl. */
2225
2226 body = PATTERN (insn);
2227
2228 #ifdef HAVE_cc0
2229 set = single_set (insn);
2230
2231 /* Check for redundant test and compare instructions
2232 (when the condition codes are already set up as desired).
2233 This is done only when optimizing; if not optimizing,
2234 it should be possible for the user to alter a variable
2235 with the debugger in between statements
2236 and the next statement should reexamine the variable
2237 to compute the condition codes. */
2238
2239 if (optimize)
2240 {
2241 if (set
2242 && GET_CODE (SET_DEST (set)) == CC0
2243 && insn != last_ignored_compare)
2244 {
2245 if (GET_CODE (SET_SRC (set)) == SUBREG)
2246 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2247 else if (GET_CODE (SET_SRC (set)) == COMPARE)
2248 {
2249 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2250 XEXP (SET_SRC (set), 0)
2251 = alter_subreg (&XEXP (SET_SRC (set), 0));
2252 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2253 XEXP (SET_SRC (set), 1)
2254 = alter_subreg (&XEXP (SET_SRC (set), 1));
2255 }
2256 if ((cc_status.value1 != 0
2257 && rtx_equal_p (SET_SRC (set), cc_status.value1))
2258 || (cc_status.value2 != 0
2259 && rtx_equal_p (SET_SRC (set), cc_status.value2)))
2260 {
2261 /* Don't delete insn if it has an addressing side-effect. */
2262 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2263 /* or if anything in it is volatile. */
2264 && ! volatile_refs_p (PATTERN (insn)))
2265 {
2266 /* We don't really delete the insn; just ignore it. */
2267 last_ignored_compare = insn;
2268 break;
2269 }
2270 }
2271 }
2272 }
2273 #endif
2274
2275 #ifndef STACK_REGS
2276 /* Don't bother outputting obvious no-ops, even without -O.
2277 This optimization is fast and doesn't interfere with debugging.
2278 Don't do this if the insn is in a delay slot, since this
2279 will cause an improper number of delay insns to be written. */
2280 if (final_sequence == 0
2281 && prescan >= 0
2282 && NONJUMP_INSN_P (insn) && GET_CODE (body) == SET
2283 && REG_P (SET_SRC (body))
2284 && REG_P (SET_DEST (body))
2285 && REGNO (SET_SRC (body)) == REGNO (SET_DEST (body)))
2286 break;
2287 #endif
2288
2289 #ifdef HAVE_cc0
2290 /* If this is a conditional branch, maybe modify it
2291 if the cc's are in a nonstandard state
2292 so that it accomplishes the same thing that it would
2293 do straightforwardly if the cc's were set up normally. */
2294
2295 if (cc_status.flags != 0
2296 && JUMP_P (insn)
2297 && GET_CODE (body) == SET
2298 && SET_DEST (body) == pc_rtx
2299 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2300 && COMPARISON_P (XEXP (SET_SRC (body), 0))
2301 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx
2302 /* This is done during prescan; it is not done again
2303 in final scan when prescan has been done. */
2304 && prescan >= 0)
2305 {
2306 /* This function may alter the contents of its argument
2307 and clear some of the cc_status.flags bits.
2308 It may also return 1 meaning condition now always true
2309 or -1 meaning condition now always false
2310 or 2 meaning condition nontrivial but altered. */
2311 int result = alter_cond (XEXP (SET_SRC (body), 0));
2312 /* If condition now has fixed value, replace the IF_THEN_ELSE
2313 with its then-operand or its else-operand. */
2314 if (result == 1)
2315 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2316 if (result == -1)
2317 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2318
2319 /* The jump is now either unconditional or a no-op.
2320 If it has become a no-op, don't try to output it.
2321 (It would not be recognized.) */
2322 if (SET_SRC (body) == pc_rtx)
2323 {
2324 delete_insn (insn);
2325 break;
2326 }
2327 else if (GET_CODE (SET_SRC (body)) == RETURN)
2328 /* Replace (set (pc) (return)) with (return). */
2329 PATTERN (insn) = body = SET_SRC (body);
2330
2331 /* Rerecognize the instruction if it has changed. */
2332 if (result != 0)
2333 INSN_CODE (insn) = -1;
2334 }
2335
2336 /* Make same adjustments to instructions that examine the
2337 condition codes without jumping and instructions that
2338 handle conditional moves (if this machine has either one). */
2339
2340 if (cc_status.flags != 0
2341 && set != 0)
2342 {
2343 rtx cond_rtx, then_rtx, else_rtx;
2344
2345 if (!JUMP_P (insn)
2346 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2347 {
2348 cond_rtx = XEXP (SET_SRC (set), 0);
2349 then_rtx = XEXP (SET_SRC (set), 1);
2350 else_rtx = XEXP (SET_SRC (set), 2);
2351 }
2352 else
2353 {
2354 cond_rtx = SET_SRC (set);
2355 then_rtx = const_true_rtx;
2356 else_rtx = const0_rtx;
2357 }
2358
2359 switch (GET_CODE (cond_rtx))
2360 {
2361 case GTU:
2362 case GT:
2363 case LTU:
2364 case LT:
2365 case GEU:
2366 case GE:
2367 case LEU:
2368 case LE:
2369 case EQ:
2370 case NE:
2371 {
2372 int result;
2373 if (XEXP (cond_rtx, 0) != cc0_rtx)
2374 break;
2375 result = alter_cond (cond_rtx);
2376 if (result == 1)
2377 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2378 else if (result == -1)
2379 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2380 else if (result == 2)
2381 INSN_CODE (insn) = -1;
2382 if (SET_DEST (set) == SET_SRC (set))
2383 delete_insn (insn);
2384 }
2385 break;
2386
2387 default:
2388 break;
2389 }
2390 }
2391
2392 #endif
2393
2394 #ifdef HAVE_peephole
2395 /* Do machine-specific peephole optimizations if desired. */
2396
2397 if (optimize && !flag_no_peephole && !nopeepholes)
2398 {
2399 rtx next = peephole (insn);
2400 /* When peepholing, if there were notes within the peephole,
2401 emit them before the peephole. */
2402 if (next != 0 && next != NEXT_INSN (insn))
2403 {
2404 rtx note, prev = PREV_INSN (insn);
2405
2406 for (note = NEXT_INSN (insn); note != next;
2407 note = NEXT_INSN (note))
2408 final_scan_insn (note, file, optimize, prescan, nopeepholes, seen);
2409
2410 /* In case this is prescan, put the notes
2411 in proper position for later rescan. */
2412 note = NEXT_INSN (insn);
2413 PREV_INSN (note) = prev;
2414 NEXT_INSN (prev) = note;
2415 NEXT_INSN (PREV_INSN (next)) = insn;
2416 PREV_INSN (insn) = PREV_INSN (next);
2417 NEXT_INSN (insn) = next;
2418 PREV_INSN (next) = insn;
2419 }
2420
2421 /* PEEPHOLE might have changed this. */
2422 body = PATTERN (insn);
2423 }
2424 #endif
2425
2426 /* Try to recognize the instruction.
2427 If successful, verify that the operands satisfy the
2428 constraints for the instruction. Crash if they don't,
2429 since `reload' should have changed them so that they do. */
2430
2431 insn_code_number = recog_memoized (insn);
2432 cleanup_subreg_operands (insn);
2433
2434 /* Dump the insn in the assembly for debugging. */
2435 if (flag_dump_rtl_in_asm)
2436 {
2437 print_rtx_head = ASM_COMMENT_START;
2438 print_rtl_single (asm_out_file, insn);
2439 print_rtx_head = "";
2440 }
2441
2442 if (! constrain_operands_cached (1))
2443 fatal_insn_not_found (insn);
2444
2445 /* Some target machines need to prescan each insn before
2446 it is output. */
2447
2448 #ifdef FINAL_PRESCAN_INSN
2449 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2450 #endif
2451
2452 #ifdef HAVE_conditional_execution
2453 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2454 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2455 else
2456 current_insn_predicate = NULL_RTX;
2457 #endif
2458
2459 #ifdef HAVE_cc0
2460 cc_prev_status = cc_status;
2461
2462 /* Update `cc_status' for this instruction.
2463 The instruction's output routine may change it further.
2464 If the output routine for a jump insn needs to depend
2465 on the cc status, it should look at cc_prev_status. */
2466
2467 NOTICE_UPDATE_CC (body, insn);
2468 #endif
2469
2470 current_output_insn = debug_insn = insn;
2471
2472 #if defined (DWARF2_UNWIND_INFO)
2473 if (CALL_P (insn) && dwarf2out_do_frame ())
2474 dwarf2out_frame_debug (insn);
2475 #endif
2476
2477 /* Find the proper template for this insn. */
2478 template = get_insn_template (insn_code_number, insn);
2479
2480 /* If the C code returns 0, it means that it is a jump insn
2481 which follows a deleted test insn, and that test insn
2482 needs to be reinserted. */
2483 if (template == 0)
2484 {
2485 rtx prev;
2486
2487 if (prev_nonnote_insn (insn) != last_ignored_compare)
2488 abort ();
2489
2490 /* We have already processed the notes between the setter and
2491 the user. Make sure we don't process them again, this is
2492 particularly important if one of the notes is a block
2493 scope note or an EH note. */
2494 for (prev = insn;
2495 prev != last_ignored_compare;
2496 prev = PREV_INSN (prev))
2497 {
2498 if (NOTE_P (prev))
2499 delete_insn (prev); /* Use delete_note. */
2500 }
2501
2502 return prev;
2503 }
2504
2505 /* If the template is the string "#", it means that this insn must
2506 be split. */
2507 if (template[0] == '#' && template[1] == '\0')
2508 {
2509 rtx new = try_split (body, insn, 0);
2510
2511 /* If we didn't split the insn, go away. */
2512 if (new == insn && PATTERN (new) == body)
2513 fatal_insn ("could not split insn", insn);
2514
2515 #ifdef HAVE_ATTR_length
2516 /* This instruction should have been split in shorten_branches,
2517 to ensure that we would have valid length info for the
2518 splitees. */
2519 abort ();
2520 #endif
2521
2522 return new;
2523 }
2524
2525 if (prescan > 0)
2526 break;
2527
2528 #ifdef TARGET_UNWIND_INFO
2529 /* ??? This will put the directives in the wrong place if
2530 get_insn_template outputs assembly directly. However calling it
2531 before get_insn_template breaks if the insns is split. */
2532 targetm.asm_out.unwind_emit (asm_out_file, insn);
2533 #endif
2534
2535 /* Output assembler code from the template. */
2536 output_in_slot = (nopeepholes > 1);
2537 output_asm_insn (template, recog_data.operand);
2538 output_in_slot = false;
2539
2540 /* If necessary, report the effect that the instruction has on
2541 the unwind info. We've already done this for delay slots
2542 and call instructions. */
2543 #if defined (DWARF2_UNWIND_INFO)
2544 if (NONJUMP_INSN_P (insn)
2545 #if !defined (HAVE_prologue)
2546 && !ACCUMULATE_OUTGOING_ARGS
2547 #endif
2548 && final_sequence == 0
2549 && dwarf2out_do_frame ())
2550 dwarf2out_frame_debug (insn);
2551 #endif
2552
2553 current_output_insn = debug_insn = 0;
2554 }
2555 }
2556 return NEXT_INSN (insn);
2557 }
2558 \f
2559 /* Output debugging info to the assembler file FILE
2560 based on the NOTE-insn INSN, assumed to be a line number. */
2561
2562 static bool
2563 notice_source_line (rtx insn)
2564 {
2565 const char *filename = insn_file (insn);
2566 int linenum = insn_line (insn);
2567
2568 if (filename && (filename != last_filename || last_linenum != linenum))
2569 {
2570 last_filename = filename;
2571 last_linenum = linenum;
2572 high_block_linenum = MAX (last_linenum, high_block_linenum);
2573 high_function_linenum = MAX (last_linenum, high_function_linenum);
2574 return true;
2575 }
2576 return false;
2577 }
2578 \f
2579 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2580 directly to the desired hard register. */
2581
2582 void
2583 cleanup_subreg_operands (rtx insn)
2584 {
2585 int i;
2586 extract_insn_cached (insn);
2587 for (i = 0; i < recog_data.n_operands; i++)
2588 {
2589 /* The following test cannot use recog_data.operand when testing
2590 for a SUBREG: the underlying object might have been changed
2591 already if we are inside a match_operator expression that
2592 matches the else clause. Instead we test the underlying
2593 expression directly. */
2594 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2595 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2596 else if (GET_CODE (recog_data.operand[i]) == PLUS
2597 || GET_CODE (recog_data.operand[i]) == MULT
2598 || MEM_P (recog_data.operand[i]))
2599 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2600 }
2601
2602 for (i = 0; i < recog_data.n_dups; i++)
2603 {
2604 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2605 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2606 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2607 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2608 || MEM_P (*recog_data.dup_loc[i]))
2609 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2610 }
2611 }
2612
2613 /* If X is a SUBREG, replace it with a REG or a MEM,
2614 based on the thing it is a subreg of. */
2615
2616 rtx
2617 alter_subreg (rtx *xp)
2618 {
2619 rtx x = *xp;
2620 rtx y = SUBREG_REG (x);
2621
2622 /* simplify_subreg does not remove subreg from volatile references.
2623 We are required to. */
2624 if (MEM_P (y))
2625 *xp = adjust_address (y, GET_MODE (x), SUBREG_BYTE (x));
2626 else
2627 {
2628 rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2629 SUBREG_BYTE (x));
2630
2631 if (new != 0)
2632 *xp = new;
2633 /* Simplify_subreg can't handle some REG cases, but we have to. */
2634 else if (REG_P (y))
2635 {
2636 unsigned int regno = subreg_hard_regno (x, 1);
2637 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2638 }
2639 else
2640 abort ();
2641 }
2642
2643 return *xp;
2644 }
2645
2646 /* Do alter_subreg on all the SUBREGs contained in X. */
2647
2648 static rtx
2649 walk_alter_subreg (rtx *xp)
2650 {
2651 rtx x = *xp;
2652 switch (GET_CODE (x))
2653 {
2654 case PLUS:
2655 case MULT:
2656 case AND:
2657 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2658 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2659 break;
2660
2661 case MEM:
2662 case ZERO_EXTEND:
2663 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2664 break;
2665
2666 case SUBREG:
2667 return alter_subreg (xp);
2668
2669 default:
2670 break;
2671 }
2672
2673 return *xp;
2674 }
2675 \f
2676 #ifdef HAVE_cc0
2677
2678 /* Given BODY, the body of a jump instruction, alter the jump condition
2679 as required by the bits that are set in cc_status.flags.
2680 Not all of the bits there can be handled at this level in all cases.
2681
2682 The value is normally 0.
2683 1 means that the condition has become always true.
2684 -1 means that the condition has become always false.
2685 2 means that COND has been altered. */
2686
2687 static int
2688 alter_cond (rtx cond)
2689 {
2690 int value = 0;
2691
2692 if (cc_status.flags & CC_REVERSED)
2693 {
2694 value = 2;
2695 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2696 }
2697
2698 if (cc_status.flags & CC_INVERTED)
2699 {
2700 value = 2;
2701 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2702 }
2703
2704 if (cc_status.flags & CC_NOT_POSITIVE)
2705 switch (GET_CODE (cond))
2706 {
2707 case LE:
2708 case LEU:
2709 case GEU:
2710 /* Jump becomes unconditional. */
2711 return 1;
2712
2713 case GT:
2714 case GTU:
2715 case LTU:
2716 /* Jump becomes no-op. */
2717 return -1;
2718
2719 case GE:
2720 PUT_CODE (cond, EQ);
2721 value = 2;
2722 break;
2723
2724 case LT:
2725 PUT_CODE (cond, NE);
2726 value = 2;
2727 break;
2728
2729 default:
2730 break;
2731 }
2732
2733 if (cc_status.flags & CC_NOT_NEGATIVE)
2734 switch (GET_CODE (cond))
2735 {
2736 case GE:
2737 case GEU:
2738 /* Jump becomes unconditional. */
2739 return 1;
2740
2741 case LT:
2742 case LTU:
2743 /* Jump becomes no-op. */
2744 return -1;
2745
2746 case LE:
2747 case LEU:
2748 PUT_CODE (cond, EQ);
2749 value = 2;
2750 break;
2751
2752 case GT:
2753 case GTU:
2754 PUT_CODE (cond, NE);
2755 value = 2;
2756 break;
2757
2758 default:
2759 break;
2760 }
2761
2762 if (cc_status.flags & CC_NO_OVERFLOW)
2763 switch (GET_CODE (cond))
2764 {
2765 case GEU:
2766 /* Jump becomes unconditional. */
2767 return 1;
2768
2769 case LEU:
2770 PUT_CODE (cond, EQ);
2771 value = 2;
2772 break;
2773
2774 case GTU:
2775 PUT_CODE (cond, NE);
2776 value = 2;
2777 break;
2778
2779 case LTU:
2780 /* Jump becomes no-op. */
2781 return -1;
2782
2783 default:
2784 break;
2785 }
2786
2787 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2788 switch (GET_CODE (cond))
2789 {
2790 default:
2791 abort ();
2792
2793 case NE:
2794 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2795 value = 2;
2796 break;
2797
2798 case EQ:
2799 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2800 value = 2;
2801 break;
2802 }
2803
2804 if (cc_status.flags & CC_NOT_SIGNED)
2805 /* The flags are valid if signed condition operators are converted
2806 to unsigned. */
2807 switch (GET_CODE (cond))
2808 {
2809 case LE:
2810 PUT_CODE (cond, LEU);
2811 value = 2;
2812 break;
2813
2814 case LT:
2815 PUT_CODE (cond, LTU);
2816 value = 2;
2817 break;
2818
2819 case GT:
2820 PUT_CODE (cond, GTU);
2821 value = 2;
2822 break;
2823
2824 case GE:
2825 PUT_CODE (cond, GEU);
2826 value = 2;
2827 break;
2828
2829 default:
2830 break;
2831 }
2832
2833 return value;
2834 }
2835 #endif
2836 \f
2837 /* Report inconsistency between the assembler template and the operands.
2838 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
2839
2840 void
2841 output_operand_lossage (const char *msgid, ...)
2842 {
2843 char *fmt_string;
2844 char *new_message;
2845 const char *pfx_str;
2846 va_list ap;
2847
2848 va_start (ap, msgid);
2849
2850 pfx_str = this_is_asm_operands ? _("invalid `asm': ") : "output_operand: ";
2851 asprintf (&fmt_string, "%s%s", pfx_str, _(msgid));
2852 vasprintf (&new_message, fmt_string, ap);
2853
2854 if (this_is_asm_operands)
2855 error_for_asm (this_is_asm_operands, "%s", new_message);
2856 else
2857 internal_error ("%s", new_message);
2858
2859 free (fmt_string);
2860 free (new_message);
2861 va_end (ap);
2862 }
2863 \f
2864 /* Output of assembler code from a template, and its subroutines. */
2865
2866 /* Annotate the assembly with a comment describing the pattern and
2867 alternative used. */
2868
2869 static void
2870 output_asm_name (void)
2871 {
2872 if (debug_insn)
2873 {
2874 int num = INSN_CODE (debug_insn);
2875 fprintf (asm_out_file, "\t%s %d\t%s",
2876 ASM_COMMENT_START, INSN_UID (debug_insn),
2877 insn_data[num].name);
2878 if (insn_data[num].n_alternatives > 1)
2879 fprintf (asm_out_file, "/%d", which_alternative + 1);
2880 #ifdef HAVE_ATTR_length
2881 fprintf (asm_out_file, "\t[length = %d]",
2882 get_attr_length (debug_insn));
2883 #endif
2884 /* Clear this so only the first assembler insn
2885 of any rtl insn will get the special comment for -dp. */
2886 debug_insn = 0;
2887 }
2888 }
2889
2890 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2891 or its address, return that expr . Set *PADDRESSP to 1 if the expr
2892 corresponds to the address of the object and 0 if to the object. */
2893
2894 static tree
2895 get_mem_expr_from_op (rtx op, int *paddressp)
2896 {
2897 tree expr;
2898 int inner_addressp;
2899
2900 *paddressp = 0;
2901
2902 if (REG_P (op))
2903 return REG_EXPR (op);
2904 else if (!MEM_P (op))
2905 return 0;
2906
2907 if (MEM_EXPR (op) != 0)
2908 return MEM_EXPR (op);
2909
2910 /* Otherwise we have an address, so indicate it and look at the address. */
2911 *paddressp = 1;
2912 op = XEXP (op, 0);
2913
2914 /* First check if we have a decl for the address, then look at the right side
2915 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
2916 But don't allow the address to itself be indirect. */
2917 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2918 return expr;
2919 else if (GET_CODE (op) == PLUS
2920 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2921 return expr;
2922
2923 while (GET_RTX_CLASS (GET_CODE (op)) == RTX_UNARY
2924 || GET_RTX_CLASS (GET_CODE (op)) == RTX_BIN_ARITH)
2925 op = XEXP (op, 0);
2926
2927 expr = get_mem_expr_from_op (op, &inner_addressp);
2928 return inner_addressp ? 0 : expr;
2929 }
2930
2931 /* Output operand names for assembler instructions. OPERANDS is the
2932 operand vector, OPORDER is the order to write the operands, and NOPS
2933 is the number of operands to write. */
2934
2935 static void
2936 output_asm_operand_names (rtx *operands, int *oporder, int nops)
2937 {
2938 int wrote = 0;
2939 int i;
2940
2941 for (i = 0; i < nops; i++)
2942 {
2943 int addressp;
2944 rtx op = operands[oporder[i]];
2945 tree expr = get_mem_expr_from_op (op, &addressp);
2946
2947 fprintf (asm_out_file, "%c%s",
2948 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2949 wrote = 1;
2950 if (expr)
2951 {
2952 fprintf (asm_out_file, "%s",
2953 addressp ? "*" : "");
2954 print_mem_expr (asm_out_file, expr);
2955 wrote = 1;
2956 }
2957 else if (REG_P (op) && ORIGINAL_REGNO (op)
2958 && ORIGINAL_REGNO (op) != REGNO (op))
2959 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2960 }
2961 }
2962
2963 /* Output text from TEMPLATE to the assembler output file,
2964 obeying %-directions to substitute operands taken from
2965 the vector OPERANDS.
2966
2967 %N (for N a digit) means print operand N in usual manner.
2968 %lN means require operand N to be a CODE_LABEL or LABEL_REF
2969 and print the label name with no punctuation.
2970 %cN means require operand N to be a constant
2971 and print the constant expression with no punctuation.
2972 %aN means expect operand N to be a memory address
2973 (not a memory reference!) and print a reference
2974 to that address.
2975 %nN means expect operand N to be a constant
2976 and print a constant expression for minus the value
2977 of the operand, with no other punctuation. */
2978
2979 void
2980 output_asm_insn (const char *template, rtx *operands)
2981 {
2982 const char *p;
2983 int c;
2984 #ifdef ASSEMBLER_DIALECT
2985 int dialect = 0;
2986 #endif
2987 int oporder[MAX_RECOG_OPERANDS];
2988 char opoutput[MAX_RECOG_OPERANDS];
2989 int ops = 0;
2990
2991 /* An insn may return a null string template
2992 in a case where no assembler code is needed. */
2993 if (*template == 0)
2994 return;
2995
2996 memset (opoutput, 0, sizeof opoutput);
2997 p = template;
2998 putc ('\t', asm_out_file);
2999 if (output_in_slot)
3000 putc (' ', asm_out_file);
3001
3002 #ifdef ASM_OUTPUT_OPCODE
3003 ASM_OUTPUT_OPCODE (asm_out_file, p);
3004 #endif
3005
3006 while ((c = *p++))
3007 switch (c)
3008 {
3009 case '\n':
3010 if (flag_verbose_asm)
3011 output_asm_operand_names (operands, oporder, ops);
3012 if (flag_print_asm_name)
3013 output_asm_name ();
3014
3015 ops = 0;
3016 memset (opoutput, 0, sizeof opoutput);
3017
3018 putc (c, asm_out_file);
3019 #ifdef ASM_OUTPUT_OPCODE
3020 while ((c = *p) == '\t')
3021 {
3022 putc (c, asm_out_file);
3023 p++;
3024 }
3025 ASM_OUTPUT_OPCODE (asm_out_file, p);
3026 #endif
3027 break;
3028
3029 #ifdef ASSEMBLER_DIALECT
3030 case '{':
3031 {
3032 int i;
3033
3034 if (dialect)
3035 output_operand_lossage ("nested assembly dialect alternatives");
3036 else
3037 dialect = 1;
3038
3039 /* If we want the first dialect, do nothing. Otherwise, skip
3040 DIALECT_NUMBER of strings ending with '|'. */
3041 for (i = 0; i < dialect_number; i++)
3042 {
3043 while (*p && *p != '}' && *p++ != '|')
3044 ;
3045 if (*p == '}')
3046 break;
3047 if (*p == '|')
3048 p++;
3049 }
3050
3051 if (*p == '\0')
3052 output_operand_lossage ("unterminated assembly dialect alternative");
3053 }
3054 break;
3055
3056 case '|':
3057 if (dialect)
3058 {
3059 /* Skip to close brace. */
3060 do
3061 {
3062 if (*p == '\0')
3063 {
3064 output_operand_lossage ("unterminated assembly dialect alternative");
3065 break;
3066 }
3067 }
3068 while (*p++ != '}');
3069 dialect = 0;
3070 }
3071 else
3072 putc (c, asm_out_file);
3073 break;
3074
3075 case '}':
3076 if (! dialect)
3077 putc (c, asm_out_file);
3078 dialect = 0;
3079 break;
3080 #endif
3081
3082 case '%':
3083 /* %% outputs a single %. */
3084 if (*p == '%')
3085 {
3086 p++;
3087 putc (c, asm_out_file);
3088 }
3089 /* %= outputs a number which is unique to each insn in the entire
3090 compilation. This is useful for making local labels that are
3091 referred to more than once in a given insn. */
3092 else if (*p == '=')
3093 {
3094 p++;
3095 fprintf (asm_out_file, "%d", insn_counter);
3096 }
3097 /* % followed by a letter and some digits
3098 outputs an operand in a special way depending on the letter.
3099 Letters `acln' are implemented directly.
3100 Other letters are passed to `output_operand' so that
3101 the PRINT_OPERAND macro can define them. */
3102 else if (ISALPHA (*p))
3103 {
3104 int letter = *p++;
3105 c = atoi (p);
3106
3107 if (! ISDIGIT (*p))
3108 output_operand_lossage ("operand number missing after %%-letter");
3109 else if (this_is_asm_operands
3110 && (c < 0 || (unsigned int) c >= insn_noperands))
3111 output_operand_lossage ("operand number out of range");
3112 else if (letter == 'l')
3113 output_asm_label (operands[c]);
3114 else if (letter == 'a')
3115 output_address (operands[c]);
3116 else if (letter == 'c')
3117 {
3118 if (CONSTANT_ADDRESS_P (operands[c]))
3119 output_addr_const (asm_out_file, operands[c]);
3120 else
3121 output_operand (operands[c], 'c');
3122 }
3123 else if (letter == 'n')
3124 {
3125 if (GET_CODE (operands[c]) == CONST_INT)
3126 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3127 - INTVAL (operands[c]));
3128 else
3129 {
3130 putc ('-', asm_out_file);
3131 output_addr_const (asm_out_file, operands[c]);
3132 }
3133 }
3134 else
3135 output_operand (operands[c], letter);
3136
3137 if (!opoutput[c])
3138 oporder[ops++] = c;
3139 opoutput[c] = 1;
3140
3141 while (ISDIGIT (c = *p))
3142 p++;
3143 }
3144 /* % followed by a digit outputs an operand the default way. */
3145 else if (ISDIGIT (*p))
3146 {
3147 c = atoi (p);
3148 if (this_is_asm_operands
3149 && (c < 0 || (unsigned int) c >= insn_noperands))
3150 output_operand_lossage ("operand number out of range");
3151 else
3152 output_operand (operands[c], 0);
3153
3154 if (!opoutput[c])
3155 oporder[ops++] = c;
3156 opoutput[c] = 1;
3157
3158 while (ISDIGIT (c = *p))
3159 p++;
3160 }
3161 /* % followed by punctuation: output something for that
3162 punctuation character alone, with no operand.
3163 The PRINT_OPERAND macro decides what is actually done. */
3164 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3165 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3166 output_operand (NULL_RTX, *p++);
3167 #endif
3168 else
3169 output_operand_lossage ("invalid %%-code");
3170 break;
3171
3172 default:
3173 putc (c, asm_out_file);
3174 }
3175
3176 /* Write out the variable names for operands, if we know them. */
3177 if (flag_verbose_asm)
3178 output_asm_operand_names (operands, oporder, ops);
3179 if (flag_print_asm_name)
3180 output_asm_name ();
3181
3182 putc ('\n', asm_out_file);
3183 }
3184 \f
3185 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3186
3187 void
3188 output_asm_label (rtx x)
3189 {
3190 char buf[256];
3191
3192 if (GET_CODE (x) == LABEL_REF)
3193 x = XEXP (x, 0);
3194 if (LABEL_P (x)
3195 || (NOTE_P (x)
3196 && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3197 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3198 else
3199 output_operand_lossage ("`%%l' operand isn't a label");
3200
3201 assemble_name (asm_out_file, buf);
3202 }
3203
3204 /* Print operand X using machine-dependent assembler syntax.
3205 The macro PRINT_OPERAND is defined just to control this function.
3206 CODE is a non-digit that preceded the operand-number in the % spec,
3207 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3208 between the % and the digits.
3209 When CODE is a non-letter, X is 0.
3210
3211 The meanings of the letters are machine-dependent and controlled
3212 by PRINT_OPERAND. */
3213
3214 static void
3215 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3216 {
3217 if (x && GET_CODE (x) == SUBREG)
3218 x = alter_subreg (&x);
3219
3220 /* If X is a pseudo-register, abort now rather than writing trash to the
3221 assembler file. */
3222
3223 if (x && REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
3224 abort ();
3225
3226 PRINT_OPERAND (asm_out_file, x, code);
3227 }
3228
3229 /* Print a memory reference operand for address X
3230 using machine-dependent assembler syntax.
3231 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3232
3233 void
3234 output_address (rtx x)
3235 {
3236 walk_alter_subreg (&x);
3237 PRINT_OPERAND_ADDRESS (asm_out_file, x);
3238 }
3239 \f
3240 /* Print an integer constant expression in assembler syntax.
3241 Addition and subtraction are the only arithmetic
3242 that may appear in these expressions. */
3243
3244 void
3245 output_addr_const (FILE *file, rtx x)
3246 {
3247 char buf[256];
3248
3249 restart:
3250 switch (GET_CODE (x))
3251 {
3252 case PC:
3253 putc ('.', file);
3254 break;
3255
3256 case SYMBOL_REF:
3257 if (SYMBOL_REF_DECL (x))
3258 mark_decl_referenced (SYMBOL_REF_DECL (x));
3259 #ifdef ASM_OUTPUT_SYMBOL_REF
3260 ASM_OUTPUT_SYMBOL_REF (file, x);
3261 #else
3262 assemble_name (file, XSTR (x, 0));
3263 #endif
3264 break;
3265
3266 case LABEL_REF:
3267 x = XEXP (x, 0);
3268 /* Fall through. */
3269 case CODE_LABEL:
3270 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3271 #ifdef ASM_OUTPUT_LABEL_REF
3272 ASM_OUTPUT_LABEL_REF (file, buf);
3273 #else
3274 assemble_name (file, buf);
3275 #endif
3276 break;
3277
3278 case CONST_INT:
3279 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3280 break;
3281
3282 case CONST:
3283 /* This used to output parentheses around the expression,
3284 but that does not work on the 386 (either ATT or BSD assembler). */
3285 output_addr_const (file, XEXP (x, 0));
3286 break;
3287
3288 case CONST_DOUBLE:
3289 if (GET_MODE (x) == VOIDmode)
3290 {
3291 /* We can use %d if the number is one word and positive. */
3292 if (CONST_DOUBLE_HIGH (x))
3293 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3294 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3295 else if (CONST_DOUBLE_LOW (x) < 0)
3296 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3297 else
3298 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3299 }
3300 else
3301 /* We can't handle floating point constants;
3302 PRINT_OPERAND must handle them. */
3303 output_operand_lossage ("floating constant misused");
3304 break;
3305
3306 case PLUS:
3307 /* Some assemblers need integer constants to appear last (eg masm). */
3308 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3309 {
3310 output_addr_const (file, XEXP (x, 1));
3311 if (INTVAL (XEXP (x, 0)) >= 0)
3312 fprintf (file, "+");
3313 output_addr_const (file, XEXP (x, 0));
3314 }
3315 else
3316 {
3317 output_addr_const (file, XEXP (x, 0));
3318 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3319 || INTVAL (XEXP (x, 1)) >= 0)
3320 fprintf (file, "+");
3321 output_addr_const (file, XEXP (x, 1));
3322 }
3323 break;
3324
3325 case MINUS:
3326 /* Avoid outputting things like x-x or x+5-x,
3327 since some assemblers can't handle that. */
3328 x = simplify_subtraction (x);
3329 if (GET_CODE (x) != MINUS)
3330 goto restart;
3331
3332 output_addr_const (file, XEXP (x, 0));
3333 fprintf (file, "-");
3334 if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3335 || GET_CODE (XEXP (x, 1)) == PC
3336 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3337 output_addr_const (file, XEXP (x, 1));
3338 else
3339 {
3340 fputs (targetm.asm_out.open_paren, file);
3341 output_addr_const (file, XEXP (x, 1));
3342 fputs (targetm.asm_out.close_paren, file);
3343 }
3344 break;
3345
3346 case ZERO_EXTEND:
3347 case SIGN_EXTEND:
3348 case SUBREG:
3349 output_addr_const (file, XEXP (x, 0));
3350 break;
3351
3352 default:
3353 #ifdef OUTPUT_ADDR_CONST_EXTRA
3354 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3355 break;
3356
3357 fail:
3358 #endif
3359 output_operand_lossage ("invalid expression as operand");
3360 }
3361 }
3362 \f
3363 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3364 %R prints the value of REGISTER_PREFIX.
3365 %L prints the value of LOCAL_LABEL_PREFIX.
3366 %U prints the value of USER_LABEL_PREFIX.
3367 %I prints the value of IMMEDIATE_PREFIX.
3368 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3369 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3370
3371 We handle alternate assembler dialects here, just like output_asm_insn. */
3372
3373 void
3374 asm_fprintf (FILE *file, const char *p, ...)
3375 {
3376 char buf[10];
3377 char *q, c;
3378 va_list argptr;
3379
3380 va_start (argptr, p);
3381
3382 buf[0] = '%';
3383
3384 while ((c = *p++))
3385 switch (c)
3386 {
3387 #ifdef ASSEMBLER_DIALECT
3388 case '{':
3389 {
3390 int i;
3391
3392 /* If we want the first dialect, do nothing. Otherwise, skip
3393 DIALECT_NUMBER of strings ending with '|'. */
3394 for (i = 0; i < dialect_number; i++)
3395 {
3396 while (*p && *p++ != '|')
3397 ;
3398
3399 if (*p == '|')
3400 p++;
3401 }
3402 }
3403 break;
3404
3405 case '|':
3406 /* Skip to close brace. */
3407 while (*p && *p++ != '}')
3408 ;
3409 break;
3410
3411 case '}':
3412 break;
3413 #endif
3414
3415 case '%':
3416 c = *p++;
3417 q = &buf[1];
3418 while (strchr ("-+ #0", c))
3419 {
3420 *q++ = c;
3421 c = *p++;
3422 }
3423 while (ISDIGIT (c) || c == '.')
3424 {
3425 *q++ = c;
3426 c = *p++;
3427 }
3428 switch (c)
3429 {
3430 case '%':
3431 putc ('%', file);
3432 break;
3433
3434 case 'd': case 'i': case 'u':
3435 case 'x': case 'X': case 'o':
3436 case 'c':
3437 *q++ = c;
3438 *q = 0;
3439 fprintf (file, buf, va_arg (argptr, int));
3440 break;
3441
3442 case 'w':
3443 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3444 'o' cases, but we do not check for those cases. It
3445 means that the value is a HOST_WIDE_INT, which may be
3446 either `long' or `long long'. */
3447 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3448 q += strlen (HOST_WIDE_INT_PRINT);
3449 *q++ = *p++;
3450 *q = 0;
3451 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3452 break;
3453
3454 case 'l':
3455 *q++ = c;
3456 #ifdef HAVE_LONG_LONG
3457 if (*p == 'l')
3458 {
3459 *q++ = *p++;
3460 *q++ = *p++;
3461 *q = 0;
3462 fprintf (file, buf, va_arg (argptr, long long));
3463 }
3464 else
3465 #endif
3466 {
3467 *q++ = *p++;
3468 *q = 0;
3469 fprintf (file, buf, va_arg (argptr, long));
3470 }
3471
3472 break;
3473
3474 case 's':
3475 *q++ = c;
3476 *q = 0;
3477 fprintf (file, buf, va_arg (argptr, char *));
3478 break;
3479
3480 case 'O':
3481 #ifdef ASM_OUTPUT_OPCODE
3482 ASM_OUTPUT_OPCODE (asm_out_file, p);
3483 #endif
3484 break;
3485
3486 case 'R':
3487 #ifdef REGISTER_PREFIX
3488 fprintf (file, "%s", REGISTER_PREFIX);
3489 #endif
3490 break;
3491
3492 case 'I':
3493 #ifdef IMMEDIATE_PREFIX
3494 fprintf (file, "%s", IMMEDIATE_PREFIX);
3495 #endif
3496 break;
3497
3498 case 'L':
3499 #ifdef LOCAL_LABEL_PREFIX
3500 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3501 #endif
3502 break;
3503
3504 case 'U':
3505 fputs (user_label_prefix, file);
3506 break;
3507
3508 #ifdef ASM_FPRINTF_EXTENSIONS
3509 /* Uppercase letters are reserved for general use by asm_fprintf
3510 and so are not available to target specific code. In order to
3511 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3512 they are defined here. As they get turned into real extensions
3513 to asm_fprintf they should be removed from this list. */
3514 case 'A': case 'B': case 'C': case 'D': case 'E':
3515 case 'F': case 'G': case 'H': case 'J': case 'K':
3516 case 'M': case 'N': case 'P': case 'Q': case 'S':
3517 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3518 break;
3519
3520 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3521 #endif
3522 default:
3523 abort ();
3524 }
3525 break;
3526
3527 default:
3528 putc (c, file);
3529 }
3530 va_end (argptr);
3531 }
3532 \f
3533 /* Split up a CONST_DOUBLE or integer constant rtx
3534 into two rtx's for single words,
3535 storing in *FIRST the word that comes first in memory in the target
3536 and in *SECOND the other. */
3537
3538 void
3539 split_double (rtx value, rtx *first, rtx *second)
3540 {
3541 if (GET_CODE (value) == CONST_INT)
3542 {
3543 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3544 {
3545 /* In this case the CONST_INT holds both target words.
3546 Extract the bits from it into two word-sized pieces.
3547 Sign extend each half to HOST_WIDE_INT. */
3548 unsigned HOST_WIDE_INT low, high;
3549 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3550
3551 /* Set sign_bit to the most significant bit of a word. */
3552 sign_bit = 1;
3553 sign_bit <<= BITS_PER_WORD - 1;
3554
3555 /* Set mask so that all bits of the word are set. We could
3556 have used 1 << BITS_PER_WORD instead of basing the
3557 calculation on sign_bit. However, on machines where
3558 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3559 compiler warning, even though the code would never be
3560 executed. */
3561 mask = sign_bit << 1;
3562 mask--;
3563
3564 /* Set sign_extend as any remaining bits. */
3565 sign_extend = ~mask;
3566
3567 /* Pick the lower word and sign-extend it. */
3568 low = INTVAL (value);
3569 low &= mask;
3570 if (low & sign_bit)
3571 low |= sign_extend;
3572
3573 /* Pick the higher word, shifted to the least significant
3574 bits, and sign-extend it. */
3575 high = INTVAL (value);
3576 high >>= BITS_PER_WORD - 1;
3577 high >>= 1;
3578 high &= mask;
3579 if (high & sign_bit)
3580 high |= sign_extend;
3581
3582 /* Store the words in the target machine order. */
3583 if (WORDS_BIG_ENDIAN)
3584 {
3585 *first = GEN_INT (high);
3586 *second = GEN_INT (low);
3587 }
3588 else
3589 {
3590 *first = GEN_INT (low);
3591 *second = GEN_INT (high);
3592 }
3593 }
3594 else
3595 {
3596 /* The rule for using CONST_INT for a wider mode
3597 is that we regard the value as signed.
3598 So sign-extend it. */
3599 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3600 if (WORDS_BIG_ENDIAN)
3601 {
3602 *first = high;
3603 *second = value;
3604 }
3605 else
3606 {
3607 *first = value;
3608 *second = high;
3609 }
3610 }
3611 }
3612 else if (GET_CODE (value) != CONST_DOUBLE)
3613 {
3614 if (WORDS_BIG_ENDIAN)
3615 {
3616 *first = const0_rtx;
3617 *second = value;
3618 }
3619 else
3620 {
3621 *first = value;
3622 *second = const0_rtx;
3623 }
3624 }
3625 else if (GET_MODE (value) == VOIDmode
3626 /* This is the old way we did CONST_DOUBLE integers. */
3627 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3628 {
3629 /* In an integer, the words are defined as most and least significant.
3630 So order them by the target's convention. */
3631 if (WORDS_BIG_ENDIAN)
3632 {
3633 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3634 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3635 }
3636 else
3637 {
3638 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3639 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3640 }
3641 }
3642 else
3643 {
3644 REAL_VALUE_TYPE r;
3645 long l[2];
3646 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3647
3648 /* Note, this converts the REAL_VALUE_TYPE to the target's
3649 format, splits up the floating point double and outputs
3650 exactly 32 bits of it into each of l[0] and l[1] --
3651 not necessarily BITS_PER_WORD bits. */
3652 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3653
3654 /* If 32 bits is an entire word for the target, but not for the host,
3655 then sign-extend on the host so that the number will look the same
3656 way on the host that it would on the target. See for instance
3657 simplify_unary_operation. The #if is needed to avoid compiler
3658 warnings. */
3659
3660 #if HOST_BITS_PER_LONG > 32
3661 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3662 {
3663 if (l[0] & ((long) 1 << 31))
3664 l[0] |= ((long) (-1) << 32);
3665 if (l[1] & ((long) 1 << 31))
3666 l[1] |= ((long) (-1) << 32);
3667 }
3668 #endif
3669
3670 *first = GEN_INT (l[0]);
3671 *second = GEN_INT (l[1]);
3672 }
3673 }
3674 \f
3675 /* Return nonzero if this function has no function calls. */
3676
3677 int
3678 leaf_function_p (void)
3679 {
3680 rtx insn;
3681 rtx link;
3682
3683 if (current_function_profile || profile_arc_flag)
3684 return 0;
3685
3686 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3687 {
3688 if (CALL_P (insn)
3689 && ! SIBLING_CALL_P (insn))
3690 return 0;
3691 if (NONJUMP_INSN_P (insn)
3692 && GET_CODE (PATTERN (insn)) == SEQUENCE
3693 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3694 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3695 return 0;
3696 }
3697 for (link = current_function_epilogue_delay_list;
3698 link;
3699 link = XEXP (link, 1))
3700 {
3701 insn = XEXP (link, 0);
3702
3703 if (CALL_P (insn)
3704 && ! SIBLING_CALL_P (insn))
3705 return 0;
3706 if (NONJUMP_INSN_P (insn)
3707 && GET_CODE (PATTERN (insn)) == SEQUENCE
3708 && CALL_P (XVECEXP (PATTERN (insn), 0, 0))
3709 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3710 return 0;
3711 }
3712
3713 return 1;
3714 }
3715
3716 /* Return 1 if branch is a forward branch.
3717 Uses insn_shuid array, so it works only in the final pass. May be used by
3718 output templates to customary add branch prediction hints.
3719 */
3720 int
3721 final_forward_branch_p (rtx insn)
3722 {
3723 int insn_id, label_id;
3724 if (!uid_shuid)
3725 abort ();
3726 insn_id = INSN_SHUID (insn);
3727 label_id = INSN_SHUID (JUMP_LABEL (insn));
3728 /* We've hit some insns that does not have id information available. */
3729 if (!insn_id || !label_id)
3730 abort ();
3731 return insn_id < label_id;
3732 }
3733
3734 /* On some machines, a function with no call insns
3735 can run faster if it doesn't create its own register window.
3736 When output, the leaf function should use only the "output"
3737 registers. Ordinarily, the function would be compiled to use
3738 the "input" registers to find its arguments; it is a candidate
3739 for leaf treatment if it uses only the "input" registers.
3740 Leaf function treatment means renumbering so the function
3741 uses the "output" registers instead. */
3742
3743 #ifdef LEAF_REGISTERS
3744
3745 /* Return 1 if this function uses only the registers that can be
3746 safely renumbered. */
3747
3748 int
3749 only_leaf_regs_used (void)
3750 {
3751 int i;
3752 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3753
3754 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3755 if ((regs_ever_live[i] || global_regs[i])
3756 && ! permitted_reg_in_leaf_functions[i])
3757 return 0;
3758
3759 if (current_function_uses_pic_offset_table
3760 && pic_offset_table_rtx != 0
3761 && REG_P (pic_offset_table_rtx)
3762 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3763 return 0;
3764
3765 return 1;
3766 }
3767
3768 /* Scan all instructions and renumber all registers into those
3769 available in leaf functions. */
3770
3771 static void
3772 leaf_renumber_regs (rtx first)
3773 {
3774 rtx insn;
3775
3776 /* Renumber only the actual patterns.
3777 The reg-notes can contain frame pointer refs,
3778 and renumbering them could crash, and should not be needed. */
3779 for (insn = first; insn; insn = NEXT_INSN (insn))
3780 if (INSN_P (insn))
3781 leaf_renumber_regs_insn (PATTERN (insn));
3782 for (insn = current_function_epilogue_delay_list;
3783 insn;
3784 insn = XEXP (insn, 1))
3785 if (INSN_P (XEXP (insn, 0)))
3786 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3787 }
3788
3789 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3790 available in leaf functions. */
3791
3792 void
3793 leaf_renumber_regs_insn (rtx in_rtx)
3794 {
3795 int i, j;
3796 const char *format_ptr;
3797
3798 if (in_rtx == 0)
3799 return;
3800
3801 /* Renumber all input-registers into output-registers.
3802 renumbered_regs would be 1 for an output-register;
3803 they */
3804
3805 if (REG_P (in_rtx))
3806 {
3807 int newreg;
3808
3809 /* Don't renumber the same reg twice. */
3810 if (in_rtx->used)
3811 return;
3812
3813 newreg = REGNO (in_rtx);
3814 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
3815 to reach here as part of a REG_NOTE. */
3816 if (newreg >= FIRST_PSEUDO_REGISTER)
3817 {
3818 in_rtx->used = 1;
3819 return;
3820 }
3821 newreg = LEAF_REG_REMAP (newreg);
3822 if (newreg < 0)
3823 abort ();
3824 regs_ever_live[REGNO (in_rtx)] = 0;
3825 regs_ever_live[newreg] = 1;
3826 REGNO (in_rtx) = newreg;
3827 in_rtx->used = 1;
3828 }
3829
3830 if (INSN_P (in_rtx))
3831 {
3832 /* Inside a SEQUENCE, we find insns.
3833 Renumber just the patterns of these insns,
3834 just as we do for the top-level insns. */
3835 leaf_renumber_regs_insn (PATTERN (in_rtx));
3836 return;
3837 }
3838
3839 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3840
3841 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3842 switch (*format_ptr++)
3843 {
3844 case 'e':
3845 leaf_renumber_regs_insn (XEXP (in_rtx, i));
3846 break;
3847
3848 case 'E':
3849 if (NULL != XVEC (in_rtx, i))
3850 {
3851 for (j = 0; j < XVECLEN (in_rtx, i); j++)
3852 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3853 }
3854 break;
3855
3856 case 'S':
3857 case 's':
3858 case '0':
3859 case 'i':
3860 case 'w':
3861 case 'n':
3862 case 'u':
3863 break;
3864
3865 default:
3866 abort ();
3867 }
3868 }
3869 #endif
3870
3871
3872 /* When -gused is used, emit debug info for only used symbols. But in
3873 addition to the standard intercepted debug_hooks there are some direct
3874 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3875 Those routines may also be called from a higher level intercepted routine. So
3876 to prevent recording data for an inner call to one of these for an intercept,
3877 we maintain an intercept nesting counter (debug_nesting). We only save the
3878 intercepted arguments if the nesting is 1. */
3879 int debug_nesting = 0;
3880
3881 static tree *symbol_queue;
3882 int symbol_queue_index = 0;
3883 static int symbol_queue_size = 0;
3884
3885 /* Generate the symbols for any queued up type symbols we encountered
3886 while generating the type info for some originally used symbol.
3887 This might generate additional entries in the queue. Only when
3888 the nesting depth goes to 0 is this routine called. */
3889
3890 void
3891 debug_flush_symbol_queue (void)
3892 {
3893 int i;
3894
3895 /* Make sure that additionally queued items are not flushed
3896 prematurely. */
3897
3898 ++debug_nesting;
3899
3900 for (i = 0; i < symbol_queue_index; ++i)
3901 {
3902 /* If we pushed queued symbols then such symbols are must be
3903 output no matter what anyone else says. Specifically,
3904 we need to make sure dbxout_symbol() thinks the symbol was
3905 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3906 which may be set for outside reasons. */
3907 int saved_tree_used = TREE_USED (symbol_queue[i]);
3908 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3909 TREE_USED (symbol_queue[i]) = 1;
3910 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3911
3912 #ifdef DBX_DEBUGGING_INFO
3913 dbxout_symbol (symbol_queue[i], 0);
3914 #endif
3915
3916 TREE_USED (symbol_queue[i]) = saved_tree_used;
3917 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3918 }
3919
3920 symbol_queue_index = 0;
3921 --debug_nesting;
3922 }
3923
3924 /* Queue a type symbol needed as part of the definition of a decl
3925 symbol. These symbols are generated when debug_flush_symbol_queue()
3926 is called. */
3927
3928 void
3929 debug_queue_symbol (tree decl)
3930 {
3931 if (symbol_queue_index >= symbol_queue_size)
3932 {
3933 symbol_queue_size += 10;
3934 symbol_queue = xrealloc (symbol_queue,
3935 symbol_queue_size * sizeof (tree));
3936 }
3937
3938 symbol_queue[symbol_queue_index++] = decl;
3939 }
3940
3941 /* Free symbol queue. */
3942 void
3943 debug_free_queue (void)
3944 {
3945 if (symbol_queue)
3946 {
3947 free (symbol_queue);
3948 symbol_queue = NULL;
3949 symbol_queue_size = 0;
3950 }
3951 }