final.c (final_scan_insn): Don't use FINAL_PRESCAN_LABEL.
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
24
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
30
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
35
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
38
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
42
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
46
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "real.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "toplev.h"
67 #include "reload.h"
68 #include "intl.h"
69 #include "basic-block.h"
70 #include "target.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "cfglayout.h"
74
75 #ifdef XCOFF_DEBUGGING_INFO
76 #include "xcoffout.h" /* Needed for external data
77 declarations for e.g. AIX 4.x. */
78 #endif
79
80 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
81 #include "dwarf2out.h"
82 #endif
83
84 #ifdef DBX_DEBUGGING_INFO
85 #include "dbxout.h"
86 #endif
87
88 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
89 null default for it to save conditionalization later. */
90 #ifndef CC_STATUS_INIT
91 #define CC_STATUS_INIT
92 #endif
93
94 /* How to start an assembler comment. */
95 #ifndef ASM_COMMENT_START
96 #define ASM_COMMENT_START ";#"
97 #endif
98
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
102 #endif
103
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
106 #endif
107
108 #if defined(READONLY_DATA_SECTION) || defined(READONLY_DATA_SECTION_ASM_OP)
109 #define HAVE_READONLY_DATA_SECTION 1
110 #else
111 #define HAVE_READONLY_DATA_SECTION 0
112 #endif
113
114 /* Last insn processed by final_scan_insn. */
115 static rtx debug_insn;
116 rtx current_output_insn;
117
118 /* Line number of last NOTE. */
119 static int last_linenum;
120
121 /* Highest line number in current block. */
122 static int high_block_linenum;
123
124 /* Likewise for function. */
125 static int high_function_linenum;
126
127 /* Filename of last NOTE. */
128 static const char *last_filename;
129
130 extern int length_unit_log; /* This is defined in insn-attrtab.c. */
131
132 /* Nonzero while outputting an `asm' with operands.
133 This means that inconsistencies are the user's fault, so don't abort.
134 The precise value is the insn being output, to pass to error_for_asm. */
135 rtx this_is_asm_operands;
136
137 /* Number of operands of this insn, for an `asm' with operands. */
138 static unsigned int insn_noperands;
139
140 /* Compare optimization flag. */
141
142 static rtx last_ignored_compare = 0;
143
144 /* Assign a unique number to each insn that is output.
145 This can be used to generate unique local labels. */
146
147 static int insn_counter = 0;
148
149 #ifdef HAVE_cc0
150 /* This variable contains machine-dependent flags (defined in tm.h)
151 set and examined by output routines
152 that describe how to interpret the condition codes properly. */
153
154 CC_STATUS cc_status;
155
156 /* During output of an insn, this contains a copy of cc_status
157 from before the insn. */
158
159 CC_STATUS cc_prev_status;
160 #endif
161
162 /* Indexed by hardware reg number, is 1 if that register is ever
163 used in the current function.
164
165 In life_analysis, or in stupid_life_analysis, this is set
166 up to record the hard regs used explicitly. Reload adds
167 in the hard regs used for holding pseudo regs. Final uses
168 it to generate the code in the function prologue and epilogue
169 to save and restore registers as needed. */
170
171 char regs_ever_live[FIRST_PSEUDO_REGISTER];
172
173 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
174 Unlike regs_ever_live, elements of this array corresponding to
175 eliminable regs like the frame pointer are set if an asm sets them. */
176
177 char regs_asm_clobbered[FIRST_PSEUDO_REGISTER];
178
179 /* Nonzero means current function must be given a frame pointer.
180 Initialized in function.c to 0. Set only in reload1.c as per
181 the needs of the function. */
182
183 int frame_pointer_needed;
184
185 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
186
187 static int block_depth;
188
189 /* Nonzero if have enabled APP processing of our assembler output. */
190
191 static int app_on;
192
193 /* If we are outputting an insn sequence, this contains the sequence rtx.
194 Zero otherwise. */
195
196 rtx final_sequence;
197
198 #ifdef ASSEMBLER_DIALECT
199
200 /* Number of the assembler dialect to use, starting at 0. */
201 static int dialect_number;
202 #endif
203
204 /* Indexed by line number, nonzero if there is a note for that line. */
205
206 static char *line_note_exists;
207
208 #ifdef HAVE_conditional_execution
209 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
210 rtx current_insn_predicate;
211 #endif
212
213 #ifdef HAVE_ATTR_length
214 static int asm_insn_count (rtx);
215 #endif
216 static void profile_function (FILE *);
217 static void profile_after_prologue (FILE *);
218 static bool notice_source_line (rtx);
219 static rtx walk_alter_subreg (rtx *);
220 static void output_asm_name (void);
221 static void output_alternate_entry_point (FILE *, rtx);
222 static tree get_mem_expr_from_op (rtx, int *);
223 static void output_asm_operand_names (rtx *, int *, int);
224 static void output_operand (rtx, int);
225 #ifdef LEAF_REGISTERS
226 static void leaf_renumber_regs (rtx);
227 #endif
228 #ifdef HAVE_cc0
229 static int alter_cond (rtx);
230 #endif
231 #ifndef ADDR_VEC_ALIGN
232 static int final_addr_vec_align (rtx);
233 #endif
234 #ifdef HAVE_ATTR_length
235 static int align_fuzz (rtx, rtx, int, unsigned);
236 #endif
237 \f
238 /* Initialize data in final at the beginning of a compilation. */
239
240 void
241 init_final (const char *filename ATTRIBUTE_UNUSED)
242 {
243 app_on = 0;
244 final_sequence = 0;
245
246 #ifdef ASSEMBLER_DIALECT
247 dialect_number = ASSEMBLER_DIALECT;
248 #endif
249 }
250
251 /* Default target function prologue and epilogue assembler output.
252
253 If not overridden for epilogue code, then the function body itself
254 contains return instructions wherever needed. */
255 void
256 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
257 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
258 {
259 }
260
261 /* Default target hook that outputs nothing to a stream. */
262 void
263 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
264 {
265 }
266
267 /* Enable APP processing of subsequent output.
268 Used before the output from an `asm' statement. */
269
270 void
271 app_enable (void)
272 {
273 if (! app_on)
274 {
275 fputs (ASM_APP_ON, asm_out_file);
276 app_on = 1;
277 }
278 }
279
280 /* Disable APP processing of subsequent output.
281 Called from varasm.c before most kinds of output. */
282
283 void
284 app_disable (void)
285 {
286 if (app_on)
287 {
288 fputs (ASM_APP_OFF, asm_out_file);
289 app_on = 0;
290 }
291 }
292 \f
293 /* Return the number of slots filled in the current
294 delayed branch sequence (we don't count the insn needing the
295 delay slot). Zero if not in a delayed branch sequence. */
296
297 #ifdef DELAY_SLOTS
298 int
299 dbr_sequence_length (void)
300 {
301 if (final_sequence != 0)
302 return XVECLEN (final_sequence, 0) - 1;
303 else
304 return 0;
305 }
306 #endif
307 \f
308 /* The next two pages contain routines used to compute the length of an insn
309 and to shorten branches. */
310
311 /* Arrays for insn lengths, and addresses. The latter is referenced by
312 `insn_current_length'. */
313
314 static int *insn_lengths;
315
316 varray_type insn_addresses_;
317
318 /* Max uid for which the above arrays are valid. */
319 static int insn_lengths_max_uid;
320
321 /* Address of insn being processed. Used by `insn_current_length'. */
322 int insn_current_address;
323
324 /* Address of insn being processed in previous iteration. */
325 int insn_last_address;
326
327 /* known invariant alignment of insn being processed. */
328 int insn_current_align;
329
330 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
331 gives the next following alignment insn that increases the known
332 alignment, or NULL_RTX if there is no such insn.
333 For any alignment obtained this way, we can again index uid_align with
334 its uid to obtain the next following align that in turn increases the
335 alignment, till we reach NULL_RTX; the sequence obtained this way
336 for each insn we'll call the alignment chain of this insn in the following
337 comments. */
338
339 struct label_alignment
340 {
341 short alignment;
342 short max_skip;
343 };
344
345 static rtx *uid_align;
346 static int *uid_shuid;
347 static struct label_alignment *label_align;
348
349 /* Indicate that branch shortening hasn't yet been done. */
350
351 void
352 init_insn_lengths (void)
353 {
354 if (uid_shuid)
355 {
356 free (uid_shuid);
357 uid_shuid = 0;
358 }
359 if (insn_lengths)
360 {
361 free (insn_lengths);
362 insn_lengths = 0;
363 insn_lengths_max_uid = 0;
364 }
365 #ifdef HAVE_ATTR_length
366 INSN_ADDRESSES_FREE ();
367 #endif
368 if (uid_align)
369 {
370 free (uid_align);
371 uid_align = 0;
372 }
373 }
374
375 /* Obtain the current length of an insn. If branch shortening has been done,
376 get its actual length. Otherwise, get its maximum length. */
377
378 int
379 get_attr_length (rtx insn ATTRIBUTE_UNUSED)
380 {
381 #ifdef HAVE_ATTR_length
382 rtx body;
383 int i;
384 int length = 0;
385
386 if (insn_lengths_max_uid > INSN_UID (insn))
387 return insn_lengths[INSN_UID (insn)];
388 else
389 switch (GET_CODE (insn))
390 {
391 case NOTE:
392 case BARRIER:
393 case CODE_LABEL:
394 return 0;
395
396 case CALL_INSN:
397 length = insn_default_length (insn);
398 break;
399
400 case JUMP_INSN:
401 body = PATTERN (insn);
402 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
403 {
404 /* Alignment is machine-dependent and should be handled by
405 ADDR_VEC_ALIGN. */
406 }
407 else
408 length = insn_default_length (insn);
409 break;
410
411 case INSN:
412 body = PATTERN (insn);
413 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
414 return 0;
415
416 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
417 length = asm_insn_count (body) * insn_default_length (insn);
418 else if (GET_CODE (body) == SEQUENCE)
419 for (i = 0; i < XVECLEN (body, 0); i++)
420 length += get_attr_length (XVECEXP (body, 0, i));
421 else
422 length = insn_default_length (insn);
423 break;
424
425 default:
426 break;
427 }
428
429 #ifdef ADJUST_INSN_LENGTH
430 ADJUST_INSN_LENGTH (insn, length);
431 #endif
432 return length;
433 #else /* not HAVE_ATTR_length */
434 return 0;
435 #endif /* not HAVE_ATTR_length */
436 }
437 \f
438 /* Code to handle alignment inside shorten_branches. */
439
440 /* Here is an explanation how the algorithm in align_fuzz can give
441 proper results:
442
443 Call a sequence of instructions beginning with alignment point X
444 and continuing until the next alignment point `block X'. When `X'
445 is used in an expression, it means the alignment value of the
446 alignment point.
447
448 Call the distance between the start of the first insn of block X, and
449 the end of the last insn of block X `IX', for the `inner size of X'.
450 This is clearly the sum of the instruction lengths.
451
452 Likewise with the next alignment-delimited block following X, which we
453 shall call block Y.
454
455 Call the distance between the start of the first insn of block X, and
456 the start of the first insn of block Y `OX', for the `outer size of X'.
457
458 The estimated padding is then OX - IX.
459
460 OX can be safely estimated as
461
462 if (X >= Y)
463 OX = round_up(IX, Y)
464 else
465 OX = round_up(IX, X) + Y - X
466
467 Clearly est(IX) >= real(IX), because that only depends on the
468 instruction lengths, and those being overestimated is a given.
469
470 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
471 we needn't worry about that when thinking about OX.
472
473 When X >= Y, the alignment provided by Y adds no uncertainty factor
474 for branch ranges starting before X, so we can just round what we have.
475 But when X < Y, we don't know anything about the, so to speak,
476 `middle bits', so we have to assume the worst when aligning up from an
477 address mod X to one mod Y, which is Y - X. */
478
479 #ifndef LABEL_ALIGN
480 #define LABEL_ALIGN(LABEL) align_labels_log
481 #endif
482
483 #ifndef LABEL_ALIGN_MAX_SKIP
484 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
485 #endif
486
487 #ifndef LOOP_ALIGN
488 #define LOOP_ALIGN(LABEL) align_loops_log
489 #endif
490
491 #ifndef LOOP_ALIGN_MAX_SKIP
492 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
493 #endif
494
495 #ifndef LABEL_ALIGN_AFTER_BARRIER
496 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
497 #endif
498
499 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
500 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
501 #endif
502
503 #ifndef JUMP_ALIGN
504 #define JUMP_ALIGN(LABEL) align_jumps_log
505 #endif
506
507 #ifndef JUMP_ALIGN_MAX_SKIP
508 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
509 #endif
510
511 #ifndef ADDR_VEC_ALIGN
512 static int
513 final_addr_vec_align (rtx addr_vec)
514 {
515 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
516
517 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
518 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
519 return exact_log2 (align);
520
521 }
522
523 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
524 #endif
525
526 #ifndef INSN_LENGTH_ALIGNMENT
527 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
528 #endif
529
530 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
531
532 static int min_labelno, max_labelno;
533
534 #define LABEL_TO_ALIGNMENT(LABEL) \
535 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
536
537 #define LABEL_TO_MAX_SKIP(LABEL) \
538 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
539
540 /* For the benefit of port specific code do this also as a function. */
541
542 int
543 label_to_alignment (rtx label)
544 {
545 return LABEL_TO_ALIGNMENT (label);
546 }
547
548 #ifdef HAVE_ATTR_length
549 /* The differences in addresses
550 between a branch and its target might grow or shrink depending on
551 the alignment the start insn of the range (the branch for a forward
552 branch or the label for a backward branch) starts out on; if these
553 differences are used naively, they can even oscillate infinitely.
554 We therefore want to compute a 'worst case' address difference that
555 is independent of the alignment the start insn of the range end
556 up on, and that is at least as large as the actual difference.
557 The function align_fuzz calculates the amount we have to add to the
558 naively computed difference, by traversing the part of the alignment
559 chain of the start insn of the range that is in front of the end insn
560 of the range, and considering for each alignment the maximum amount
561 that it might contribute to a size increase.
562
563 For casesi tables, we also want to know worst case minimum amounts of
564 address difference, in case a machine description wants to introduce
565 some common offset that is added to all offsets in a table.
566 For this purpose, align_fuzz with a growth argument of 0 computes the
567 appropriate adjustment. */
568
569 /* Compute the maximum delta by which the difference of the addresses of
570 START and END might grow / shrink due to a different address for start
571 which changes the size of alignment insns between START and END.
572 KNOWN_ALIGN_LOG is the alignment known for START.
573 GROWTH should be ~0 if the objective is to compute potential code size
574 increase, and 0 if the objective is to compute potential shrink.
575 The return value is undefined for any other value of GROWTH. */
576
577 static int
578 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
579 {
580 int uid = INSN_UID (start);
581 rtx align_label;
582 int known_align = 1 << known_align_log;
583 int end_shuid = INSN_SHUID (end);
584 int fuzz = 0;
585
586 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
587 {
588 int align_addr, new_align;
589
590 uid = INSN_UID (align_label);
591 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
592 if (uid_shuid[uid] > end_shuid)
593 break;
594 known_align_log = LABEL_TO_ALIGNMENT (align_label);
595 new_align = 1 << known_align_log;
596 if (new_align < known_align)
597 continue;
598 fuzz += (-align_addr ^ growth) & (new_align - known_align);
599 known_align = new_align;
600 }
601 return fuzz;
602 }
603
604 /* Compute a worst-case reference address of a branch so that it
605 can be safely used in the presence of aligned labels. Since the
606 size of the branch itself is unknown, the size of the branch is
607 not included in the range. I.e. for a forward branch, the reference
608 address is the end address of the branch as known from the previous
609 branch shortening pass, minus a value to account for possible size
610 increase due to alignment. For a backward branch, it is the start
611 address of the branch as known from the current pass, plus a value
612 to account for possible size increase due to alignment.
613 NB.: Therefore, the maximum offset allowed for backward branches needs
614 to exclude the branch size. */
615
616 int
617 insn_current_reference_address (rtx branch)
618 {
619 rtx dest, seq;
620 int seq_uid;
621
622 if (! INSN_ADDRESSES_SET_P ())
623 return 0;
624
625 seq = NEXT_INSN (PREV_INSN (branch));
626 seq_uid = INSN_UID (seq);
627 if (GET_CODE (branch) != JUMP_INSN)
628 /* This can happen for example on the PA; the objective is to know the
629 offset to address something in front of the start of the function.
630 Thus, we can treat it like a backward branch.
631 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
632 any alignment we'd encounter, so we skip the call to align_fuzz. */
633 return insn_current_address;
634 dest = JUMP_LABEL (branch);
635
636 /* BRANCH has no proper alignment chain set, so use SEQ.
637 BRANCH also has no INSN_SHUID. */
638 if (INSN_SHUID (seq) < INSN_SHUID (dest))
639 {
640 /* Forward branch. */
641 return (insn_last_address + insn_lengths[seq_uid]
642 - align_fuzz (seq, dest, length_unit_log, ~0));
643 }
644 else
645 {
646 /* Backward branch. */
647 return (insn_current_address
648 + align_fuzz (dest, seq, length_unit_log, ~0));
649 }
650 }
651 #endif /* HAVE_ATTR_length */
652 \f
653 void
654 compute_alignments (void)
655 {
656 int log, max_skip, max_log;
657 basic_block bb;
658
659 if (label_align)
660 {
661 free (label_align);
662 label_align = 0;
663 }
664
665 max_labelno = max_label_num ();
666 min_labelno = get_first_label_num ();
667 label_align = xcalloc (max_labelno - min_labelno + 1,
668 sizeof (struct label_alignment));
669
670 /* If not optimizing or optimizing for size, don't assign any alignments. */
671 if (! optimize || optimize_size)
672 return;
673
674 FOR_EACH_BB (bb)
675 {
676 rtx label = bb->head;
677 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
678 edge e;
679
680 if (GET_CODE (label) != CODE_LABEL
681 || probably_never_executed_bb_p (bb))
682 continue;
683 max_log = LABEL_ALIGN (label);
684 max_skip = LABEL_ALIGN_MAX_SKIP;
685
686 for (e = bb->pred; e; e = e->pred_next)
687 {
688 if (e->flags & EDGE_FALLTHRU)
689 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
690 else
691 branch_frequency += EDGE_FREQUENCY (e);
692 }
693
694 /* There are two purposes to align block with no fallthru incoming edge:
695 1) to avoid fetch stalls when branch destination is near cache boundary
696 2) to improve cache efficiency in case the previous block is not executed
697 (so it does not need to be in the cache).
698
699 We to catch first case, we align frequently executed blocks.
700 To catch the second, we align blocks that are executed more frequently
701 than the predecessor and the predecessor is likely to not be executed
702 when function is called. */
703
704 if (!has_fallthru
705 && (branch_frequency > BB_FREQ_MAX / 10
706 || (bb->frequency > bb->prev_bb->frequency * 10
707 && (bb->prev_bb->frequency
708 <= ENTRY_BLOCK_PTR->frequency / 2))))
709 {
710 log = JUMP_ALIGN (label);
711 if (max_log < log)
712 {
713 max_log = log;
714 max_skip = JUMP_ALIGN_MAX_SKIP;
715 }
716 }
717 /* In case block is frequent and reached mostly by non-fallthru edge,
718 align it. It is most likely a first block of loop. */
719 if (has_fallthru
720 && maybe_hot_bb_p (bb)
721 && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
722 && branch_frequency > fallthru_frequency * 2)
723 {
724 log = LOOP_ALIGN (label);
725 if (max_log < log)
726 {
727 max_log = log;
728 max_skip = LOOP_ALIGN_MAX_SKIP;
729 }
730 }
731 LABEL_TO_ALIGNMENT (label) = max_log;
732 LABEL_TO_MAX_SKIP (label) = max_skip;
733 }
734 }
735 \f
736 /* Make a pass over all insns and compute their actual lengths by shortening
737 any branches of variable length if possible. */
738
739 /* Give a default value for the lowest address in a function. */
740
741 #ifndef FIRST_INSN_ADDRESS
742 #define FIRST_INSN_ADDRESS 0
743 #endif
744
745 /* shorten_branches might be called multiple times: for example, the SH
746 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
747 In order to do this, it needs proper length information, which it obtains
748 by calling shorten_branches. This cannot be collapsed with
749 shorten_branches itself into a single pass unless we also want to integrate
750 reorg.c, since the branch splitting exposes new instructions with delay
751 slots. */
752
753 void
754 shorten_branches (rtx first ATTRIBUTE_UNUSED)
755 {
756 rtx insn;
757 int max_uid;
758 int i;
759 int max_log;
760 int max_skip;
761 #ifdef HAVE_ATTR_length
762 #define MAX_CODE_ALIGN 16
763 rtx seq;
764 int something_changed = 1;
765 char *varying_length;
766 rtx body;
767 int uid;
768 rtx align_tab[MAX_CODE_ALIGN];
769
770 #endif
771
772 /* Compute maximum UID and allocate label_align / uid_shuid. */
773 max_uid = get_max_uid ();
774
775 uid_shuid = xmalloc (max_uid * sizeof *uid_shuid);
776
777 if (max_labelno != max_label_num ())
778 {
779 int old = max_labelno;
780 int n_labels;
781 int n_old_labels;
782
783 max_labelno = max_label_num ();
784
785 n_labels = max_labelno - min_labelno + 1;
786 n_old_labels = old - min_labelno + 1;
787
788 label_align = xrealloc (label_align,
789 n_labels * sizeof (struct label_alignment));
790
791 /* Range of labels grows monotonically in the function. Abort here
792 means that the initialization of array got lost. */
793 if (n_old_labels > n_labels)
794 abort ();
795
796 memset (label_align + n_old_labels, 0,
797 (n_labels - n_old_labels) * sizeof (struct label_alignment));
798 }
799
800 /* Initialize label_align and set up uid_shuid to be strictly
801 monotonically rising with insn order. */
802 /* We use max_log here to keep track of the maximum alignment we want to
803 impose on the next CODE_LABEL (or the current one if we are processing
804 the CODE_LABEL itself). */
805
806 max_log = 0;
807 max_skip = 0;
808
809 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
810 {
811 int log;
812
813 INSN_SHUID (insn) = i++;
814 if (INSN_P (insn))
815 {
816 /* reorg might make the first insn of a loop being run once only,
817 and delete the label in front of it. Then we want to apply
818 the loop alignment to the new label created by reorg, which
819 is separated by the former loop start insn from the
820 NOTE_INSN_LOOP_BEG. */
821 }
822 else if (GET_CODE (insn) == CODE_LABEL)
823 {
824 rtx next;
825
826 /* Merge in alignments computed by compute_alignments. */
827 log = LABEL_TO_ALIGNMENT (insn);
828 if (max_log < log)
829 {
830 max_log = log;
831 max_skip = LABEL_TO_MAX_SKIP (insn);
832 }
833
834 log = LABEL_ALIGN (insn);
835 if (max_log < log)
836 {
837 max_log = log;
838 max_skip = LABEL_ALIGN_MAX_SKIP;
839 }
840 next = NEXT_INSN (insn);
841 /* ADDR_VECs only take room if read-only data goes into the text
842 section. */
843 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
844 if (next && GET_CODE (next) == JUMP_INSN)
845 {
846 rtx nextbody = PATTERN (next);
847 if (GET_CODE (nextbody) == ADDR_VEC
848 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
849 {
850 log = ADDR_VEC_ALIGN (next);
851 if (max_log < log)
852 {
853 max_log = log;
854 max_skip = LABEL_ALIGN_MAX_SKIP;
855 }
856 }
857 }
858 LABEL_TO_ALIGNMENT (insn) = max_log;
859 LABEL_TO_MAX_SKIP (insn) = max_skip;
860 max_log = 0;
861 max_skip = 0;
862 }
863 else if (GET_CODE (insn) == BARRIER)
864 {
865 rtx label;
866
867 for (label = insn; label && ! INSN_P (label);
868 label = NEXT_INSN (label))
869 if (GET_CODE (label) == CODE_LABEL)
870 {
871 log = LABEL_ALIGN_AFTER_BARRIER (insn);
872 if (max_log < log)
873 {
874 max_log = log;
875 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
876 }
877 break;
878 }
879 }
880 }
881 #ifdef HAVE_ATTR_length
882
883 /* Allocate the rest of the arrays. */
884 insn_lengths = xmalloc (max_uid * sizeof (*insn_lengths));
885 insn_lengths_max_uid = max_uid;
886 /* Syntax errors can lead to labels being outside of the main insn stream.
887 Initialize insn_addresses, so that we get reproducible results. */
888 INSN_ADDRESSES_ALLOC (max_uid);
889
890 varying_length = xcalloc (max_uid, sizeof (char));
891
892 /* Initialize uid_align. We scan instructions
893 from end to start, and keep in align_tab[n] the last seen insn
894 that does an alignment of at least n+1, i.e. the successor
895 in the alignment chain for an insn that does / has a known
896 alignment of n. */
897 uid_align = xcalloc (max_uid, sizeof *uid_align);
898
899 for (i = MAX_CODE_ALIGN; --i >= 0;)
900 align_tab[i] = NULL_RTX;
901 seq = get_last_insn ();
902 for (; seq; seq = PREV_INSN (seq))
903 {
904 int uid = INSN_UID (seq);
905 int log;
906 log = (GET_CODE (seq) == CODE_LABEL ? LABEL_TO_ALIGNMENT (seq) : 0);
907 uid_align[uid] = align_tab[0];
908 if (log)
909 {
910 /* Found an alignment label. */
911 uid_align[uid] = align_tab[log];
912 for (i = log - 1; i >= 0; i--)
913 align_tab[i] = seq;
914 }
915 }
916 #ifdef CASE_VECTOR_SHORTEN_MODE
917 if (optimize)
918 {
919 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
920 label fields. */
921
922 int min_shuid = INSN_SHUID (get_insns ()) - 1;
923 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
924 int rel;
925
926 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
927 {
928 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
929 int len, i, min, max, insn_shuid;
930 int min_align;
931 addr_diff_vec_flags flags;
932
933 if (GET_CODE (insn) != JUMP_INSN
934 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
935 continue;
936 pat = PATTERN (insn);
937 len = XVECLEN (pat, 1);
938 if (len <= 0)
939 abort ();
940 min_align = MAX_CODE_ALIGN;
941 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
942 {
943 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
944 int shuid = INSN_SHUID (lab);
945 if (shuid < min)
946 {
947 min = shuid;
948 min_lab = lab;
949 }
950 if (shuid > max)
951 {
952 max = shuid;
953 max_lab = lab;
954 }
955 if (min_align > LABEL_TO_ALIGNMENT (lab))
956 min_align = LABEL_TO_ALIGNMENT (lab);
957 }
958 XEXP (pat, 2) = gen_rtx_LABEL_REF (VOIDmode, min_lab);
959 XEXP (pat, 3) = gen_rtx_LABEL_REF (VOIDmode, max_lab);
960 insn_shuid = INSN_SHUID (insn);
961 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
962 flags.min_align = min_align;
963 flags.base_after_vec = rel > insn_shuid;
964 flags.min_after_vec = min > insn_shuid;
965 flags.max_after_vec = max > insn_shuid;
966 flags.min_after_base = min > rel;
967 flags.max_after_base = max > rel;
968 ADDR_DIFF_VEC_FLAGS (pat) = flags;
969 }
970 }
971 #endif /* CASE_VECTOR_SHORTEN_MODE */
972
973 /* Compute initial lengths, addresses, and varying flags for each insn. */
974 for (insn_current_address = FIRST_INSN_ADDRESS, insn = first;
975 insn != 0;
976 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
977 {
978 uid = INSN_UID (insn);
979
980 insn_lengths[uid] = 0;
981
982 if (GET_CODE (insn) == CODE_LABEL)
983 {
984 int log = LABEL_TO_ALIGNMENT (insn);
985 if (log)
986 {
987 int align = 1 << log;
988 int new_address = (insn_current_address + align - 1) & -align;
989 insn_lengths[uid] = new_address - insn_current_address;
990 }
991 }
992
993 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
994
995 if (GET_CODE (insn) == NOTE || GET_CODE (insn) == BARRIER
996 || GET_CODE (insn) == CODE_LABEL)
997 continue;
998 if (INSN_DELETED_P (insn))
999 continue;
1000
1001 body = PATTERN (insn);
1002 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1003 {
1004 /* This only takes room if read-only data goes into the text
1005 section. */
1006 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1007 insn_lengths[uid] = (XVECLEN (body,
1008 GET_CODE (body) == ADDR_DIFF_VEC)
1009 * GET_MODE_SIZE (GET_MODE (body)));
1010 /* Alignment is handled by ADDR_VEC_ALIGN. */
1011 }
1012 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1013 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1014 else if (GET_CODE (body) == SEQUENCE)
1015 {
1016 int i;
1017 int const_delay_slots;
1018 #ifdef DELAY_SLOTS
1019 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1020 #else
1021 const_delay_slots = 0;
1022 #endif
1023 /* Inside a delay slot sequence, we do not do any branch shortening
1024 if the shortening could change the number of delay slots
1025 of the branch. */
1026 for (i = 0; i < XVECLEN (body, 0); i++)
1027 {
1028 rtx inner_insn = XVECEXP (body, 0, i);
1029 int inner_uid = INSN_UID (inner_insn);
1030 int inner_length;
1031
1032 if (GET_CODE (body) == ASM_INPUT
1033 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1034 inner_length = (asm_insn_count (PATTERN (inner_insn))
1035 * insn_default_length (inner_insn));
1036 else
1037 inner_length = insn_default_length (inner_insn);
1038
1039 insn_lengths[inner_uid] = inner_length;
1040 if (const_delay_slots)
1041 {
1042 if ((varying_length[inner_uid]
1043 = insn_variable_length_p (inner_insn)) != 0)
1044 varying_length[uid] = 1;
1045 INSN_ADDRESSES (inner_uid) = (insn_current_address
1046 + insn_lengths[uid]);
1047 }
1048 else
1049 varying_length[inner_uid] = 0;
1050 insn_lengths[uid] += inner_length;
1051 }
1052 }
1053 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1054 {
1055 insn_lengths[uid] = insn_default_length (insn);
1056 varying_length[uid] = insn_variable_length_p (insn);
1057 }
1058
1059 /* If needed, do any adjustment. */
1060 #ifdef ADJUST_INSN_LENGTH
1061 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1062 if (insn_lengths[uid] < 0)
1063 fatal_insn ("negative insn length", insn);
1064 #endif
1065 }
1066
1067 /* Now loop over all the insns finding varying length insns. For each,
1068 get the current insn length. If it has changed, reflect the change.
1069 When nothing changes for a full pass, we are done. */
1070
1071 while (something_changed)
1072 {
1073 something_changed = 0;
1074 insn_current_align = MAX_CODE_ALIGN - 1;
1075 for (insn_current_address = FIRST_INSN_ADDRESS, insn = first;
1076 insn != 0;
1077 insn = NEXT_INSN (insn))
1078 {
1079 int new_length;
1080 #ifdef ADJUST_INSN_LENGTH
1081 int tmp_length;
1082 #endif
1083 int length_align;
1084
1085 uid = INSN_UID (insn);
1086
1087 if (GET_CODE (insn) == CODE_LABEL)
1088 {
1089 int log = LABEL_TO_ALIGNMENT (insn);
1090 if (log > insn_current_align)
1091 {
1092 int align = 1 << log;
1093 int new_address= (insn_current_address + align - 1) & -align;
1094 insn_lengths[uid] = new_address - insn_current_address;
1095 insn_current_align = log;
1096 insn_current_address = new_address;
1097 }
1098 else
1099 insn_lengths[uid] = 0;
1100 INSN_ADDRESSES (uid) = insn_current_address;
1101 continue;
1102 }
1103
1104 length_align = INSN_LENGTH_ALIGNMENT (insn);
1105 if (length_align < insn_current_align)
1106 insn_current_align = length_align;
1107
1108 insn_last_address = INSN_ADDRESSES (uid);
1109 INSN_ADDRESSES (uid) = insn_current_address;
1110
1111 #ifdef CASE_VECTOR_SHORTEN_MODE
1112 if (optimize && GET_CODE (insn) == JUMP_INSN
1113 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1114 {
1115 rtx body = PATTERN (insn);
1116 int old_length = insn_lengths[uid];
1117 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1118 rtx min_lab = XEXP (XEXP (body, 2), 0);
1119 rtx max_lab = XEXP (XEXP (body, 3), 0);
1120 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1121 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1122 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1123 rtx prev;
1124 int rel_align = 0;
1125 addr_diff_vec_flags flags;
1126
1127 /* Avoid automatic aggregate initialization. */
1128 flags = ADDR_DIFF_VEC_FLAGS (body);
1129
1130 /* Try to find a known alignment for rel_lab. */
1131 for (prev = rel_lab;
1132 prev
1133 && ! insn_lengths[INSN_UID (prev)]
1134 && ! (varying_length[INSN_UID (prev)] & 1);
1135 prev = PREV_INSN (prev))
1136 if (varying_length[INSN_UID (prev)] & 2)
1137 {
1138 rel_align = LABEL_TO_ALIGNMENT (prev);
1139 break;
1140 }
1141
1142 /* See the comment on addr_diff_vec_flags in rtl.h for the
1143 meaning of the flags values. base: REL_LAB vec: INSN */
1144 /* Anything after INSN has still addresses from the last
1145 pass; adjust these so that they reflect our current
1146 estimate for this pass. */
1147 if (flags.base_after_vec)
1148 rel_addr += insn_current_address - insn_last_address;
1149 if (flags.min_after_vec)
1150 min_addr += insn_current_address - insn_last_address;
1151 if (flags.max_after_vec)
1152 max_addr += insn_current_address - insn_last_address;
1153 /* We want to know the worst case, i.e. lowest possible value
1154 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1155 its offset is positive, and we have to be wary of code shrink;
1156 otherwise, it is negative, and we have to be vary of code
1157 size increase. */
1158 if (flags.min_after_base)
1159 {
1160 /* If INSN is between REL_LAB and MIN_LAB, the size
1161 changes we are about to make can change the alignment
1162 within the observed offset, therefore we have to break
1163 it up into two parts that are independent. */
1164 if (! flags.base_after_vec && flags.min_after_vec)
1165 {
1166 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1167 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1168 }
1169 else
1170 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1171 }
1172 else
1173 {
1174 if (flags.base_after_vec && ! flags.min_after_vec)
1175 {
1176 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1177 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1178 }
1179 else
1180 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1181 }
1182 /* Likewise, determine the highest lowest possible value
1183 for the offset of MAX_LAB. */
1184 if (flags.max_after_base)
1185 {
1186 if (! flags.base_after_vec && flags.max_after_vec)
1187 {
1188 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1189 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1190 }
1191 else
1192 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1193 }
1194 else
1195 {
1196 if (flags.base_after_vec && ! flags.max_after_vec)
1197 {
1198 max_addr += align_fuzz (max_lab, insn, 0, 0);
1199 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1200 }
1201 else
1202 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1203 }
1204 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1205 max_addr - rel_addr,
1206 body));
1207 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1208 {
1209 insn_lengths[uid]
1210 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1211 insn_current_address += insn_lengths[uid];
1212 if (insn_lengths[uid] != old_length)
1213 something_changed = 1;
1214 }
1215
1216 continue;
1217 }
1218 #endif /* CASE_VECTOR_SHORTEN_MODE */
1219
1220 if (! (varying_length[uid]))
1221 {
1222 if (GET_CODE (insn) == INSN
1223 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1224 {
1225 int i;
1226
1227 body = PATTERN (insn);
1228 for (i = 0; i < XVECLEN (body, 0); i++)
1229 {
1230 rtx inner_insn = XVECEXP (body, 0, i);
1231 int inner_uid = INSN_UID (inner_insn);
1232
1233 INSN_ADDRESSES (inner_uid) = insn_current_address;
1234
1235 insn_current_address += insn_lengths[inner_uid];
1236 }
1237 }
1238 else
1239 insn_current_address += insn_lengths[uid];
1240
1241 continue;
1242 }
1243
1244 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1245 {
1246 int i;
1247
1248 body = PATTERN (insn);
1249 new_length = 0;
1250 for (i = 0; i < XVECLEN (body, 0); i++)
1251 {
1252 rtx inner_insn = XVECEXP (body, 0, i);
1253 int inner_uid = INSN_UID (inner_insn);
1254 int inner_length;
1255
1256 INSN_ADDRESSES (inner_uid) = insn_current_address;
1257
1258 /* insn_current_length returns 0 for insns with a
1259 non-varying length. */
1260 if (! varying_length[inner_uid])
1261 inner_length = insn_lengths[inner_uid];
1262 else
1263 inner_length = insn_current_length (inner_insn);
1264
1265 if (inner_length != insn_lengths[inner_uid])
1266 {
1267 insn_lengths[inner_uid] = inner_length;
1268 something_changed = 1;
1269 }
1270 insn_current_address += insn_lengths[inner_uid];
1271 new_length += inner_length;
1272 }
1273 }
1274 else
1275 {
1276 new_length = insn_current_length (insn);
1277 insn_current_address += new_length;
1278 }
1279
1280 #ifdef ADJUST_INSN_LENGTH
1281 /* If needed, do any adjustment. */
1282 tmp_length = new_length;
1283 ADJUST_INSN_LENGTH (insn, new_length);
1284 insn_current_address += (new_length - tmp_length);
1285 #endif
1286
1287 if (new_length != insn_lengths[uid])
1288 {
1289 insn_lengths[uid] = new_length;
1290 something_changed = 1;
1291 }
1292 }
1293 /* For a non-optimizing compile, do only a single pass. */
1294 if (!optimize)
1295 break;
1296 }
1297
1298 free (varying_length);
1299
1300 #endif /* HAVE_ATTR_length */
1301 }
1302
1303 #ifdef HAVE_ATTR_length
1304 /* Given the body of an INSN known to be generated by an ASM statement, return
1305 the number of machine instructions likely to be generated for this insn.
1306 This is used to compute its length. */
1307
1308 static int
1309 asm_insn_count (rtx body)
1310 {
1311 const char *template;
1312 int count = 1;
1313
1314 if (GET_CODE (body) == ASM_INPUT)
1315 template = XSTR (body, 0);
1316 else
1317 template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1318
1319 for (; *template; template++)
1320 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1321 count++;
1322
1323 return count;
1324 }
1325 #endif
1326 \f
1327 /* Output assembler code for the start of a function,
1328 and initialize some of the variables in this file
1329 for the new function. The label for the function and associated
1330 assembler pseudo-ops have already been output in `assemble_start_function'.
1331
1332 FIRST is the first insn of the rtl for the function being compiled.
1333 FILE is the file to write assembler code to.
1334 OPTIMIZE is nonzero if we should eliminate redundant
1335 test and compare insns. */
1336
1337 void
1338 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1339 int optimize ATTRIBUTE_UNUSED)
1340 {
1341 block_depth = 0;
1342
1343 this_is_asm_operands = 0;
1344
1345 last_filename = locator_file (prologue_locator);
1346 last_linenum = locator_line (prologue_locator);
1347
1348 high_block_linenum = high_function_linenum = last_linenum;
1349
1350 (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1351
1352 #if defined (DWARF2_UNWIND_INFO) || defined (IA64_UNWIND_INFO)
1353 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1354 dwarf2out_begin_prologue (0, NULL);
1355 #endif
1356
1357 #ifdef LEAF_REG_REMAP
1358 if (current_function_uses_only_leaf_regs)
1359 leaf_renumber_regs (first);
1360 #endif
1361
1362 /* The Sun386i and perhaps other machines don't work right
1363 if the profiling code comes after the prologue. */
1364 #ifdef PROFILE_BEFORE_PROLOGUE
1365 if (current_function_profile)
1366 profile_function (file);
1367 #endif /* PROFILE_BEFORE_PROLOGUE */
1368
1369 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1370 if (dwarf2out_do_frame ())
1371 dwarf2out_frame_debug (NULL_RTX);
1372 #endif
1373
1374 /* If debugging, assign block numbers to all of the blocks in this
1375 function. */
1376 if (write_symbols)
1377 {
1378 remove_unnecessary_notes ();
1379 reemit_insn_block_notes ();
1380 number_blocks (current_function_decl);
1381 /* We never actually put out begin/end notes for the top-level
1382 block in the function. But, conceptually, that block is
1383 always needed. */
1384 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1385 }
1386
1387 /* First output the function prologue: code to set up the stack frame. */
1388 (*targetm.asm_out.function_prologue) (file, get_frame_size ());
1389
1390 /* If the machine represents the prologue as RTL, the profiling code must
1391 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1392 #ifdef HAVE_prologue
1393 if (! HAVE_prologue)
1394 #endif
1395 profile_after_prologue (file);
1396 }
1397
1398 static void
1399 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1400 {
1401 #ifndef PROFILE_BEFORE_PROLOGUE
1402 if (current_function_profile)
1403 profile_function (file);
1404 #endif /* not PROFILE_BEFORE_PROLOGUE */
1405 }
1406
1407 static void
1408 profile_function (FILE *file ATTRIBUTE_UNUSED)
1409 {
1410 #ifndef NO_PROFILE_COUNTERS
1411 # define NO_PROFILE_COUNTERS 0
1412 #endif
1413 #if defined(ASM_OUTPUT_REG_PUSH)
1414 int sval = current_function_returns_struct;
1415 rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1416 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1417 int cxt = current_function_needs_context;
1418 #endif
1419 #endif /* ASM_OUTPUT_REG_PUSH */
1420
1421 if (! NO_PROFILE_COUNTERS)
1422 {
1423 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1424 data_section ();
1425 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1426 (*targetm.asm_out.internal_label) (file, "LP", current_function_funcdef_no);
1427 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1428 }
1429
1430 function_section (current_function_decl);
1431
1432 #if defined(ASM_OUTPUT_REG_PUSH)
1433 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1434 ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1435 #endif
1436
1437 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1438 if (cxt)
1439 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1440 #else
1441 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1442 if (cxt)
1443 {
1444 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1445 }
1446 #endif
1447 #endif
1448
1449 FUNCTION_PROFILER (file, current_function_funcdef_no);
1450
1451 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1452 if (cxt)
1453 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1454 #else
1455 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1456 if (cxt)
1457 {
1458 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1459 }
1460 #endif
1461 #endif
1462
1463 #if defined(ASM_OUTPUT_REG_PUSH)
1464 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1465 ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1466 #endif
1467 }
1468
1469 /* Output assembler code for the end of a function.
1470 For clarity, args are same as those of `final_start_function'
1471 even though not all of them are needed. */
1472
1473 void
1474 final_end_function (void)
1475 {
1476 app_disable ();
1477
1478 (*debug_hooks->end_function) (high_function_linenum);
1479
1480 /* Finally, output the function epilogue:
1481 code to restore the stack frame and return to the caller. */
1482 (*targetm.asm_out.function_epilogue) (asm_out_file, get_frame_size ());
1483
1484 /* And debug output. */
1485 (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1486
1487 #if defined (DWARF2_UNWIND_INFO)
1488 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1489 && dwarf2out_do_frame ())
1490 dwarf2out_end_epilogue (last_linenum, last_filename);
1491 #endif
1492 }
1493 \f
1494 /* Output assembler code for some insns: all or part of a function.
1495 For description of args, see `final_start_function', above.
1496
1497 PRESCAN is 1 if we are not really outputting,
1498 just scanning as if we were outputting.
1499 Prescanning deletes and rearranges insns just like ordinary output.
1500 PRESCAN is -2 if we are outputting after having prescanned.
1501 In this case, don't try to delete or rearrange insns
1502 because that has already been done.
1503 Prescanning is done only on certain machines. */
1504
1505 void
1506 final (rtx first, FILE *file, int optimize, int prescan)
1507 {
1508 rtx insn;
1509 int max_line = 0;
1510 int max_uid = 0;
1511
1512 last_ignored_compare = 0;
1513
1514 /* Make a map indicating which line numbers appear in this function.
1515 When producing SDB debugging info, delete troublesome line number
1516 notes from inlined functions in other files as well as duplicate
1517 line number notes. */
1518 #ifdef SDB_DEBUGGING_INFO
1519 if (write_symbols == SDB_DEBUG)
1520 {
1521 rtx last = 0;
1522 for (insn = first; insn; insn = NEXT_INSN (insn))
1523 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
1524 {
1525 if ((RTX_INTEGRATED_P (insn)
1526 && strcmp (NOTE_SOURCE_FILE (insn), main_input_filename) != 0)
1527 || (last != 0
1528 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1529 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)))
1530 {
1531 delete_insn (insn); /* Use delete_note. */
1532 continue;
1533 }
1534 last = insn;
1535 if (NOTE_LINE_NUMBER (insn) > max_line)
1536 max_line = NOTE_LINE_NUMBER (insn);
1537 }
1538 }
1539 else
1540 #endif
1541 {
1542 for (insn = first; insn; insn = NEXT_INSN (insn))
1543 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > max_line)
1544 max_line = NOTE_LINE_NUMBER (insn);
1545 }
1546
1547 line_note_exists = xcalloc (max_line + 1, sizeof (char));
1548
1549 for (insn = first; insn; insn = NEXT_INSN (insn))
1550 {
1551 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1552 max_uid = INSN_UID (insn);
1553 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
1554 line_note_exists[NOTE_LINE_NUMBER (insn)] = 1;
1555 #ifdef HAVE_cc0
1556 /* If CC tracking across branches is enabled, record the insn which
1557 jumps to each branch only reached from one place. */
1558 if (optimize && GET_CODE (insn) == JUMP_INSN)
1559 {
1560 rtx lab = JUMP_LABEL (insn);
1561 if (lab && LABEL_NUSES (lab) == 1)
1562 {
1563 LABEL_REFS (lab) = insn;
1564 }
1565 }
1566 #endif
1567 }
1568
1569 init_recog ();
1570
1571 CC_STATUS_INIT;
1572
1573 /* Output the insns. */
1574 for (insn = NEXT_INSN (first); insn;)
1575 {
1576 #ifdef HAVE_ATTR_length
1577 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1578 {
1579 /* This can be triggered by bugs elsewhere in the compiler if
1580 new insns are created after init_insn_lengths is called. */
1581 if (GET_CODE (insn) == NOTE)
1582 insn_current_address = -1;
1583 else
1584 abort ();
1585 }
1586 else
1587 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1588 #endif /* HAVE_ATTR_length */
1589
1590 insn = final_scan_insn (insn, file, optimize, prescan, 0);
1591 }
1592
1593 free (line_note_exists);
1594 line_note_exists = NULL;
1595 }
1596 \f
1597 const char *
1598 get_insn_template (int code, rtx insn)
1599 {
1600 const void *output = insn_data[code].output;
1601 switch (insn_data[code].output_format)
1602 {
1603 case INSN_OUTPUT_FORMAT_SINGLE:
1604 return (const char *) output;
1605 case INSN_OUTPUT_FORMAT_MULTI:
1606 return ((const char *const *) output)[which_alternative];
1607 case INSN_OUTPUT_FORMAT_FUNCTION:
1608 if (insn == NULL)
1609 abort ();
1610 return (*(insn_output_fn) output) (recog_data.operand, insn);
1611
1612 default:
1613 abort ();
1614 }
1615 }
1616
1617 /* Emit the appropriate declaration for an alternate-entry-point
1618 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1619 LABEL_KIND != LABEL_NORMAL.
1620
1621 The case fall-through in this function is intentional. */
1622 static void
1623 output_alternate_entry_point (FILE *file, rtx insn)
1624 {
1625 const char *name = LABEL_NAME (insn);
1626
1627 switch (LABEL_KIND (insn))
1628 {
1629 case LABEL_WEAK_ENTRY:
1630 #ifdef ASM_WEAKEN_LABEL
1631 ASM_WEAKEN_LABEL (file, name);
1632 #endif
1633 case LABEL_GLOBAL_ENTRY:
1634 (*targetm.asm_out.globalize_label) (file, name);
1635 case LABEL_STATIC_ENTRY:
1636 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1637 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1638 #endif
1639 ASM_OUTPUT_LABEL (file, name);
1640 break;
1641
1642 case LABEL_NORMAL:
1643 default:
1644 abort ();
1645 }
1646 }
1647
1648 /* The final scan for one insn, INSN.
1649 Args are same as in `final', except that INSN
1650 is the insn being scanned.
1651 Value returned is the next insn to be scanned.
1652
1653 NOPEEPHOLES is the flag to disallow peephole processing (currently
1654 used for within delayed branch sequence output). */
1655
1656 rtx
1657 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1658 int prescan, int nopeepholes ATTRIBUTE_UNUSED)
1659 {
1660 #ifdef HAVE_cc0
1661 rtx set;
1662 #endif
1663
1664 insn_counter++;
1665
1666 /* Ignore deleted insns. These can occur when we split insns (due to a
1667 template of "#") while not optimizing. */
1668 if (INSN_DELETED_P (insn))
1669 return NEXT_INSN (insn);
1670
1671 switch (GET_CODE (insn))
1672 {
1673 case NOTE:
1674 if (prescan > 0)
1675 break;
1676
1677 switch (NOTE_LINE_NUMBER (insn))
1678 {
1679 case NOTE_INSN_DELETED:
1680 case NOTE_INSN_LOOP_BEG:
1681 case NOTE_INSN_LOOP_END:
1682 case NOTE_INSN_LOOP_END_TOP_COND:
1683 case NOTE_INSN_LOOP_CONT:
1684 case NOTE_INSN_LOOP_VTOP:
1685 case NOTE_INSN_FUNCTION_END:
1686 case NOTE_INSN_REPEATED_LINE_NUMBER:
1687 case NOTE_INSN_EXPECTED_VALUE:
1688 break;
1689
1690 case NOTE_INSN_BASIC_BLOCK:
1691 #ifdef IA64_UNWIND_INFO
1692 IA64_UNWIND_EMIT (asm_out_file, insn);
1693 #endif
1694 if (flag_debug_asm)
1695 fprintf (asm_out_file, "\t%s basic block %d\n",
1696 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1697 break;
1698
1699 case NOTE_INSN_EH_REGION_BEG:
1700 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1701 NOTE_EH_HANDLER (insn));
1702 break;
1703
1704 case NOTE_INSN_EH_REGION_END:
1705 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1706 NOTE_EH_HANDLER (insn));
1707 break;
1708
1709 case NOTE_INSN_PROLOGUE_END:
1710 (*targetm.asm_out.function_end_prologue) (file);
1711 profile_after_prologue (file);
1712 break;
1713
1714 case NOTE_INSN_EPILOGUE_BEG:
1715 (*targetm.asm_out.function_begin_epilogue) (file);
1716 break;
1717
1718 case NOTE_INSN_FUNCTION_BEG:
1719 app_disable ();
1720 (*debug_hooks->end_prologue) (last_linenum, last_filename);
1721 break;
1722
1723 case NOTE_INSN_BLOCK_BEG:
1724 if (debug_info_level == DINFO_LEVEL_NORMAL
1725 || debug_info_level == DINFO_LEVEL_VERBOSE
1726 || write_symbols == DWARF_DEBUG
1727 || write_symbols == DWARF2_DEBUG
1728 || write_symbols == VMS_AND_DWARF2_DEBUG
1729 || write_symbols == VMS_DEBUG)
1730 {
1731 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1732
1733 app_disable ();
1734 ++block_depth;
1735 high_block_linenum = last_linenum;
1736
1737 /* Output debugging info about the symbol-block beginning. */
1738 (*debug_hooks->begin_block) (last_linenum, n);
1739
1740 /* Mark this block as output. */
1741 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1742 }
1743 break;
1744
1745 case NOTE_INSN_BLOCK_END:
1746 if (debug_info_level == DINFO_LEVEL_NORMAL
1747 || debug_info_level == DINFO_LEVEL_VERBOSE
1748 || write_symbols == DWARF_DEBUG
1749 || write_symbols == DWARF2_DEBUG
1750 || write_symbols == VMS_AND_DWARF2_DEBUG
1751 || write_symbols == VMS_DEBUG)
1752 {
1753 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1754
1755 app_disable ();
1756
1757 /* End of a symbol-block. */
1758 --block_depth;
1759 if (block_depth < 0)
1760 abort ();
1761
1762 (*debug_hooks->end_block) (high_block_linenum, n);
1763 }
1764 break;
1765
1766 case NOTE_INSN_DELETED_LABEL:
1767 /* Emit the label. We may have deleted the CODE_LABEL because
1768 the label could be proved to be unreachable, though still
1769 referenced (in the form of having its address taken. */
1770 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1771 break;
1772
1773 case 0:
1774 break;
1775
1776 default:
1777 if (NOTE_LINE_NUMBER (insn) <= 0)
1778 abort ();
1779 break;
1780 }
1781 break;
1782
1783 case BARRIER:
1784 #if defined (DWARF2_UNWIND_INFO)
1785 if (dwarf2out_do_frame ())
1786 dwarf2out_frame_debug (insn);
1787 #endif
1788 break;
1789
1790 case CODE_LABEL:
1791 /* The target port might emit labels in the output function for
1792 some insn, e.g. sh.c output_branchy_insn. */
1793 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
1794 {
1795 int align = LABEL_TO_ALIGNMENT (insn);
1796 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1797 int max_skip = LABEL_TO_MAX_SKIP (insn);
1798 #endif
1799
1800 if (align && NEXT_INSN (insn))
1801 {
1802 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1803 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1804 #else
1805 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1806 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1807 #else
1808 ASM_OUTPUT_ALIGN (file, align);
1809 #endif
1810 #endif
1811 }
1812 }
1813 #ifdef HAVE_cc0
1814 CC_STATUS_INIT;
1815 /* If this label is reached from only one place, set the condition
1816 codes from the instruction just before the branch. */
1817
1818 /* Disabled because some insns set cc_status in the C output code
1819 and NOTICE_UPDATE_CC alone can set incorrect status. */
1820 if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1821 {
1822 rtx jump = LABEL_REFS (insn);
1823 rtx barrier = prev_nonnote_insn (insn);
1824 rtx prev;
1825 /* If the LABEL_REFS field of this label has been set to point
1826 at a branch, the predecessor of the branch is a regular
1827 insn, and that branch is the only way to reach this label,
1828 set the condition codes based on the branch and its
1829 predecessor. */
1830 if (barrier && GET_CODE (barrier) == BARRIER
1831 && jump && GET_CODE (jump) == JUMP_INSN
1832 && (prev = prev_nonnote_insn (jump))
1833 && GET_CODE (prev) == INSN)
1834 {
1835 NOTICE_UPDATE_CC (PATTERN (prev), prev);
1836 NOTICE_UPDATE_CC (PATTERN (jump), jump);
1837 }
1838 }
1839 #endif
1840 if (prescan > 0)
1841 break;
1842
1843 if (LABEL_NAME (insn))
1844 (*debug_hooks->label) (insn);
1845
1846 if (app_on)
1847 {
1848 fputs (ASM_APP_OFF, file);
1849 app_on = 0;
1850 }
1851 if (NEXT_INSN (insn) != 0
1852 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN)
1853 {
1854 rtx nextbody = PATTERN (NEXT_INSN (insn));
1855
1856 /* If this label is followed by a jump-table,
1857 make sure we put the label in the read-only section. Also
1858 possibly write the label and jump table together. */
1859
1860 if (GET_CODE (nextbody) == ADDR_VEC
1861 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1862 {
1863 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1864 /* In this case, the case vector is being moved by the
1865 target, so don't output the label at all. Leave that
1866 to the back end macros. */
1867 #else
1868 if (! JUMP_TABLES_IN_TEXT_SECTION)
1869 {
1870 int log_align;
1871
1872 readonly_data_section ();
1873
1874 #ifdef ADDR_VEC_ALIGN
1875 log_align = ADDR_VEC_ALIGN (NEXT_INSN (insn));
1876 #else
1877 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1878 #endif
1879 ASM_OUTPUT_ALIGN (file, log_align);
1880 }
1881 else
1882 function_section (current_function_decl);
1883
1884 #ifdef ASM_OUTPUT_CASE_LABEL
1885 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1886 NEXT_INSN (insn));
1887 #else
1888 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1889 #endif
1890 #endif
1891 break;
1892 }
1893 }
1894 if (LABEL_ALT_ENTRY_P (insn))
1895 output_alternate_entry_point (file, insn);
1896 else
1897 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1898 break;
1899
1900 default:
1901 {
1902 rtx body = PATTERN (insn);
1903 int insn_code_number;
1904 const char *template;
1905 rtx note;
1906
1907 /* An INSN, JUMP_INSN or CALL_INSN.
1908 First check for special kinds that recog doesn't recognize. */
1909
1910 if (GET_CODE (body) == USE /* These are just declarations */
1911 || GET_CODE (body) == CLOBBER)
1912 break;
1913
1914 #ifdef HAVE_cc0
1915 /* If there is a REG_CC_SETTER note on this insn, it means that
1916 the setting of the condition code was done in the delay slot
1917 of the insn that branched here. So recover the cc status
1918 from the insn that set it. */
1919
1920 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1921 if (note)
1922 {
1923 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
1924 cc_prev_status = cc_status;
1925 }
1926 #endif
1927
1928 /* Detect insns that are really jump-tables
1929 and output them as such. */
1930
1931 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1932 {
1933 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1934 int vlen, idx;
1935 #endif
1936
1937 if (prescan > 0)
1938 break;
1939
1940 if (app_on)
1941 {
1942 fputs (ASM_APP_OFF, file);
1943 app_on = 0;
1944 }
1945
1946 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1947 if (GET_CODE (body) == ADDR_VEC)
1948 {
1949 #ifdef ASM_OUTPUT_ADDR_VEC
1950 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
1951 #else
1952 abort ();
1953 #endif
1954 }
1955 else
1956 {
1957 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
1958 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
1959 #else
1960 abort ();
1961 #endif
1962 }
1963 #else
1964 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
1965 for (idx = 0; idx < vlen; idx++)
1966 {
1967 if (GET_CODE (body) == ADDR_VEC)
1968 {
1969 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
1970 ASM_OUTPUT_ADDR_VEC_ELT
1971 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
1972 #else
1973 abort ();
1974 #endif
1975 }
1976 else
1977 {
1978 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
1979 ASM_OUTPUT_ADDR_DIFF_ELT
1980 (file,
1981 body,
1982 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
1983 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
1984 #else
1985 abort ();
1986 #endif
1987 }
1988 }
1989 #ifdef ASM_OUTPUT_CASE_END
1990 ASM_OUTPUT_CASE_END (file,
1991 CODE_LABEL_NUMBER (PREV_INSN (insn)),
1992 insn);
1993 #endif
1994 #endif
1995
1996 function_section (current_function_decl);
1997
1998 break;
1999 }
2000 /* Output this line note if it is the first or the last line
2001 note in a row. */
2002 if (notice_source_line (insn))
2003 {
2004 (*debug_hooks->source_line) (last_linenum, last_filename);
2005 }
2006
2007 if (GET_CODE (body) == ASM_INPUT)
2008 {
2009 const char *string = XSTR (body, 0);
2010
2011 /* There's no telling what that did to the condition codes. */
2012 CC_STATUS_INIT;
2013 if (prescan > 0)
2014 break;
2015
2016 if (string[0])
2017 {
2018 if (! app_on)
2019 {
2020 fputs (ASM_APP_ON, file);
2021 app_on = 1;
2022 }
2023 fprintf (asm_out_file, "\t%s\n", string);
2024 }
2025 break;
2026 }
2027
2028 /* Detect `asm' construct with operands. */
2029 if (asm_noperands (body) >= 0)
2030 {
2031 unsigned int noperands = asm_noperands (body);
2032 rtx *ops = alloca (noperands * sizeof (rtx));
2033 const char *string;
2034
2035 /* There's no telling what that did to the condition codes. */
2036 CC_STATUS_INIT;
2037 if (prescan > 0)
2038 break;
2039
2040 /* Get out the operand values. */
2041 string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2042 /* Inhibit aborts on what would otherwise be compiler bugs. */
2043 insn_noperands = noperands;
2044 this_is_asm_operands = insn;
2045
2046 #ifdef FINAL_PRESCAN_INSN
2047 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2048 #endif
2049
2050 /* Output the insn using them. */
2051 if (string[0])
2052 {
2053 if (! app_on)
2054 {
2055 fputs (ASM_APP_ON, file);
2056 app_on = 1;
2057 }
2058 output_asm_insn (string, ops);
2059 }
2060
2061 this_is_asm_operands = 0;
2062 break;
2063 }
2064
2065 if (prescan <= 0 && app_on)
2066 {
2067 fputs (ASM_APP_OFF, file);
2068 app_on = 0;
2069 }
2070
2071 if (GET_CODE (body) == SEQUENCE)
2072 {
2073 /* A delayed-branch sequence */
2074 int i;
2075 rtx next;
2076
2077 if (prescan > 0)
2078 break;
2079 final_sequence = body;
2080
2081 /* Record the delay slots' frame information before the branch.
2082 This is needed for delayed calls: see execute_cfa_program(). */
2083 #if defined (DWARF2_UNWIND_INFO)
2084 if (dwarf2out_do_frame ())
2085 for (i = 1; i < XVECLEN (body, 0); i++)
2086 dwarf2out_frame_debug (XVECEXP (body, 0, i));
2087 #endif
2088
2089 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2090 force the restoration of a comparison that was previously
2091 thought unnecessary. If that happens, cancel this sequence
2092 and cause that insn to be restored. */
2093
2094 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, prescan, 1);
2095 if (next != XVECEXP (body, 0, 1))
2096 {
2097 final_sequence = 0;
2098 return next;
2099 }
2100
2101 for (i = 1; i < XVECLEN (body, 0); i++)
2102 {
2103 rtx insn = XVECEXP (body, 0, i);
2104 rtx next = NEXT_INSN (insn);
2105 /* We loop in case any instruction in a delay slot gets
2106 split. */
2107 do
2108 insn = final_scan_insn (insn, file, 0, prescan, 1);
2109 while (insn != next);
2110 }
2111 #ifdef DBR_OUTPUT_SEQEND
2112 DBR_OUTPUT_SEQEND (file);
2113 #endif
2114 final_sequence = 0;
2115
2116 /* If the insn requiring the delay slot was a CALL_INSN, the
2117 insns in the delay slot are actually executed before the
2118 called function. Hence we don't preserve any CC-setting
2119 actions in these insns and the CC must be marked as being
2120 clobbered by the function. */
2121 if (GET_CODE (XVECEXP (body, 0, 0)) == CALL_INSN)
2122 {
2123 CC_STATUS_INIT;
2124 }
2125 break;
2126 }
2127
2128 /* We have a real machine instruction as rtl. */
2129
2130 body = PATTERN (insn);
2131
2132 #ifdef HAVE_cc0
2133 set = single_set (insn);
2134
2135 /* Check for redundant test and compare instructions
2136 (when the condition codes are already set up as desired).
2137 This is done only when optimizing; if not optimizing,
2138 it should be possible for the user to alter a variable
2139 with the debugger in between statements
2140 and the next statement should reexamine the variable
2141 to compute the condition codes. */
2142
2143 if (optimize)
2144 {
2145 if (set
2146 && GET_CODE (SET_DEST (set)) == CC0
2147 && insn != last_ignored_compare)
2148 {
2149 if (GET_CODE (SET_SRC (set)) == SUBREG)
2150 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2151 else if (GET_CODE (SET_SRC (set)) == COMPARE)
2152 {
2153 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2154 XEXP (SET_SRC (set), 0)
2155 = alter_subreg (&XEXP (SET_SRC (set), 0));
2156 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2157 XEXP (SET_SRC (set), 1)
2158 = alter_subreg (&XEXP (SET_SRC (set), 1));
2159 }
2160 if ((cc_status.value1 != 0
2161 && rtx_equal_p (SET_SRC (set), cc_status.value1))
2162 || (cc_status.value2 != 0
2163 && rtx_equal_p (SET_SRC (set), cc_status.value2)))
2164 {
2165 /* Don't delete insn if it has an addressing side-effect. */
2166 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2167 /* or if anything in it is volatile. */
2168 && ! volatile_refs_p (PATTERN (insn)))
2169 {
2170 /* We don't really delete the insn; just ignore it. */
2171 last_ignored_compare = insn;
2172 break;
2173 }
2174 }
2175 }
2176 }
2177 #endif
2178
2179 #ifndef STACK_REGS
2180 /* Don't bother outputting obvious no-ops, even without -O.
2181 This optimization is fast and doesn't interfere with debugging.
2182 Don't do this if the insn is in a delay slot, since this
2183 will cause an improper number of delay insns to be written. */
2184 if (final_sequence == 0
2185 && prescan >= 0
2186 && GET_CODE (insn) == INSN && GET_CODE (body) == SET
2187 && GET_CODE (SET_SRC (body)) == REG
2188 && GET_CODE (SET_DEST (body)) == REG
2189 && REGNO (SET_SRC (body)) == REGNO (SET_DEST (body)))
2190 break;
2191 #endif
2192
2193 #ifdef HAVE_cc0
2194 /* If this is a conditional branch, maybe modify it
2195 if the cc's are in a nonstandard state
2196 so that it accomplishes the same thing that it would
2197 do straightforwardly if the cc's were set up normally. */
2198
2199 if (cc_status.flags != 0
2200 && GET_CODE (insn) == JUMP_INSN
2201 && GET_CODE (body) == SET
2202 && SET_DEST (body) == pc_rtx
2203 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2204 && GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (body), 0))) == '<'
2205 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx
2206 /* This is done during prescan; it is not done again
2207 in final scan when prescan has been done. */
2208 && prescan >= 0)
2209 {
2210 /* This function may alter the contents of its argument
2211 and clear some of the cc_status.flags bits.
2212 It may also return 1 meaning condition now always true
2213 or -1 meaning condition now always false
2214 or 2 meaning condition nontrivial but altered. */
2215 int result = alter_cond (XEXP (SET_SRC (body), 0));
2216 /* If condition now has fixed value, replace the IF_THEN_ELSE
2217 with its then-operand or its else-operand. */
2218 if (result == 1)
2219 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2220 if (result == -1)
2221 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2222
2223 /* The jump is now either unconditional or a no-op.
2224 If it has become a no-op, don't try to output it.
2225 (It would not be recognized.) */
2226 if (SET_SRC (body) == pc_rtx)
2227 {
2228 delete_insn (insn);
2229 break;
2230 }
2231 else if (GET_CODE (SET_SRC (body)) == RETURN)
2232 /* Replace (set (pc) (return)) with (return). */
2233 PATTERN (insn) = body = SET_SRC (body);
2234
2235 /* Rerecognize the instruction if it has changed. */
2236 if (result != 0)
2237 INSN_CODE (insn) = -1;
2238 }
2239
2240 /* Make same adjustments to instructions that examine the
2241 condition codes without jumping and instructions that
2242 handle conditional moves (if this machine has either one). */
2243
2244 if (cc_status.flags != 0
2245 && set != 0)
2246 {
2247 rtx cond_rtx, then_rtx, else_rtx;
2248
2249 if (GET_CODE (insn) != JUMP_INSN
2250 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2251 {
2252 cond_rtx = XEXP (SET_SRC (set), 0);
2253 then_rtx = XEXP (SET_SRC (set), 1);
2254 else_rtx = XEXP (SET_SRC (set), 2);
2255 }
2256 else
2257 {
2258 cond_rtx = SET_SRC (set);
2259 then_rtx = const_true_rtx;
2260 else_rtx = const0_rtx;
2261 }
2262
2263 switch (GET_CODE (cond_rtx))
2264 {
2265 case GTU:
2266 case GT:
2267 case LTU:
2268 case LT:
2269 case GEU:
2270 case GE:
2271 case LEU:
2272 case LE:
2273 case EQ:
2274 case NE:
2275 {
2276 int result;
2277 if (XEXP (cond_rtx, 0) != cc0_rtx)
2278 break;
2279 result = alter_cond (cond_rtx);
2280 if (result == 1)
2281 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2282 else if (result == -1)
2283 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2284 else if (result == 2)
2285 INSN_CODE (insn) = -1;
2286 if (SET_DEST (set) == SET_SRC (set))
2287 delete_insn (insn);
2288 }
2289 break;
2290
2291 default:
2292 break;
2293 }
2294 }
2295
2296 #endif
2297
2298 #ifdef HAVE_peephole
2299 /* Do machine-specific peephole optimizations if desired. */
2300
2301 if (optimize && !flag_no_peephole && !nopeepholes)
2302 {
2303 rtx next = peephole (insn);
2304 /* When peepholing, if there were notes within the peephole,
2305 emit them before the peephole. */
2306 if (next != 0 && next != NEXT_INSN (insn))
2307 {
2308 rtx prev = PREV_INSN (insn);
2309
2310 for (note = NEXT_INSN (insn); note != next;
2311 note = NEXT_INSN (note))
2312 final_scan_insn (note, file, optimize, prescan, nopeepholes);
2313
2314 /* In case this is prescan, put the notes
2315 in proper position for later rescan. */
2316 note = NEXT_INSN (insn);
2317 PREV_INSN (note) = prev;
2318 NEXT_INSN (prev) = note;
2319 NEXT_INSN (PREV_INSN (next)) = insn;
2320 PREV_INSN (insn) = PREV_INSN (next);
2321 NEXT_INSN (insn) = next;
2322 PREV_INSN (next) = insn;
2323 }
2324
2325 /* PEEPHOLE might have changed this. */
2326 body = PATTERN (insn);
2327 }
2328 #endif
2329
2330 /* Try to recognize the instruction.
2331 If successful, verify that the operands satisfy the
2332 constraints for the instruction. Crash if they don't,
2333 since `reload' should have changed them so that they do. */
2334
2335 insn_code_number = recog_memoized (insn);
2336 cleanup_subreg_operands (insn);
2337
2338 /* Dump the insn in the assembly for debugging. */
2339 if (flag_dump_rtl_in_asm)
2340 {
2341 print_rtx_head = ASM_COMMENT_START;
2342 print_rtl_single (asm_out_file, insn);
2343 print_rtx_head = "";
2344 }
2345
2346 if (! constrain_operands_cached (1))
2347 fatal_insn_not_found (insn);
2348
2349 /* Some target machines need to prescan each insn before
2350 it is output. */
2351
2352 #ifdef FINAL_PRESCAN_INSN
2353 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2354 #endif
2355
2356 #ifdef HAVE_conditional_execution
2357 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2358 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2359 else
2360 current_insn_predicate = NULL_RTX;
2361 #endif
2362
2363 #ifdef HAVE_cc0
2364 cc_prev_status = cc_status;
2365
2366 /* Update `cc_status' for this instruction.
2367 The instruction's output routine may change it further.
2368 If the output routine for a jump insn needs to depend
2369 on the cc status, it should look at cc_prev_status. */
2370
2371 NOTICE_UPDATE_CC (body, insn);
2372 #endif
2373
2374 current_output_insn = debug_insn = insn;
2375
2376 #if defined (DWARF2_UNWIND_INFO)
2377 if (GET_CODE (insn) == CALL_INSN && dwarf2out_do_frame ())
2378 dwarf2out_frame_debug (insn);
2379 #endif
2380
2381 /* Find the proper template for this insn. */
2382 template = get_insn_template (insn_code_number, insn);
2383
2384 /* If the C code returns 0, it means that it is a jump insn
2385 which follows a deleted test insn, and that test insn
2386 needs to be reinserted. */
2387 if (template == 0)
2388 {
2389 rtx prev;
2390
2391 if (prev_nonnote_insn (insn) != last_ignored_compare)
2392 abort ();
2393
2394 /* We have already processed the notes between the setter and
2395 the user. Make sure we don't process them again, this is
2396 particularly important if one of the notes is a block
2397 scope note or an EH note. */
2398 for (prev = insn;
2399 prev != last_ignored_compare;
2400 prev = PREV_INSN (prev))
2401 {
2402 if (GET_CODE (prev) == NOTE)
2403 delete_insn (prev); /* Use delete_note. */
2404 }
2405
2406 return prev;
2407 }
2408
2409 /* If the template is the string "#", it means that this insn must
2410 be split. */
2411 if (template[0] == '#' && template[1] == '\0')
2412 {
2413 rtx new = try_split (body, insn, 0);
2414
2415 /* If we didn't split the insn, go away. */
2416 if (new == insn && PATTERN (new) == body)
2417 fatal_insn ("could not split insn", insn);
2418
2419 #ifdef HAVE_ATTR_length
2420 /* This instruction should have been split in shorten_branches,
2421 to ensure that we would have valid length info for the
2422 splitees. */
2423 abort ();
2424 #endif
2425
2426 return new;
2427 }
2428
2429 if (prescan > 0)
2430 break;
2431
2432 #ifdef IA64_UNWIND_INFO
2433 IA64_UNWIND_EMIT (asm_out_file, insn);
2434 #endif
2435 /* Output assembler code from the template. */
2436
2437 output_asm_insn (template, recog_data.operand);
2438
2439 /* If necessary, report the effect that the instruction has on
2440 the unwind info. We've already done this for delay slots
2441 and call instructions. */
2442 #if defined (DWARF2_UNWIND_INFO)
2443 if (GET_CODE (insn) == INSN
2444 #if !defined (HAVE_prologue)
2445 && !ACCUMULATE_OUTGOING_ARGS
2446 #endif
2447 && final_sequence == 0
2448 && dwarf2out_do_frame ())
2449 dwarf2out_frame_debug (insn);
2450 #endif
2451
2452 #if 0
2453 /* It's not at all clear why we did this and doing so used to
2454 interfere with tests that used REG_WAS_0 notes, which are
2455 now gone, so let's try with this out. */
2456
2457 /* Mark this insn as having been output. */
2458 INSN_DELETED_P (insn) = 1;
2459 #endif
2460
2461 /* Emit information for vtable gc. */
2462 note = find_reg_note (insn, REG_VTABLE_REF, NULL_RTX);
2463
2464 current_output_insn = debug_insn = 0;
2465 }
2466 }
2467 return NEXT_INSN (insn);
2468 }
2469 \f
2470 /* Output debugging info to the assembler file FILE
2471 based on the NOTE-insn INSN, assumed to be a line number. */
2472
2473 static bool
2474 notice_source_line (rtx insn)
2475 {
2476 const char *filename = insn_file (insn);
2477 int linenum = insn_line (insn);
2478
2479 if (filename && (filename != last_filename || last_linenum != linenum))
2480 {
2481 last_filename = filename;
2482 last_linenum = linenum;
2483 high_block_linenum = MAX (last_linenum, high_block_linenum);
2484 high_function_linenum = MAX (last_linenum, high_function_linenum);
2485 return true;
2486 }
2487 return false;
2488 }
2489 \f
2490 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2491 directly to the desired hard register. */
2492
2493 void
2494 cleanup_subreg_operands (rtx insn)
2495 {
2496 int i;
2497 extract_insn_cached (insn);
2498 for (i = 0; i < recog_data.n_operands; i++)
2499 {
2500 /* The following test cannot use recog_data.operand when testing
2501 for a SUBREG: the underlying object might have been changed
2502 already if we are inside a match_operator expression that
2503 matches the else clause. Instead we test the underlying
2504 expression directly. */
2505 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2506 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2507 else if (GET_CODE (recog_data.operand[i]) == PLUS
2508 || GET_CODE (recog_data.operand[i]) == MULT
2509 || GET_CODE (recog_data.operand[i]) == MEM)
2510 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2511 }
2512
2513 for (i = 0; i < recog_data.n_dups; i++)
2514 {
2515 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2516 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2517 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2518 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2519 || GET_CODE (*recog_data.dup_loc[i]) == MEM)
2520 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2521 }
2522 }
2523
2524 /* If X is a SUBREG, replace it with a REG or a MEM,
2525 based on the thing it is a subreg of. */
2526
2527 rtx
2528 alter_subreg (rtx *xp)
2529 {
2530 rtx x = *xp;
2531 rtx y = SUBREG_REG (x);
2532
2533 /* simplify_subreg does not remove subreg from volatile references.
2534 We are required to. */
2535 if (GET_CODE (y) == MEM)
2536 *xp = adjust_address (y, GET_MODE (x), SUBREG_BYTE (x));
2537 else
2538 {
2539 rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2540 SUBREG_BYTE (x));
2541
2542 if (new != 0)
2543 *xp = new;
2544 /* Simplify_subreg can't handle some REG cases, but we have to. */
2545 else if (GET_CODE (y) == REG)
2546 {
2547 unsigned int regno = subreg_hard_regno (x, 1);
2548 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2549 }
2550 else
2551 abort ();
2552 }
2553
2554 return *xp;
2555 }
2556
2557 /* Do alter_subreg on all the SUBREGs contained in X. */
2558
2559 static rtx
2560 walk_alter_subreg (rtx *xp)
2561 {
2562 rtx x = *xp;
2563 switch (GET_CODE (x))
2564 {
2565 case PLUS:
2566 case MULT:
2567 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2568 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2569 break;
2570
2571 case MEM:
2572 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2573 break;
2574
2575 case SUBREG:
2576 return alter_subreg (xp);
2577
2578 default:
2579 break;
2580 }
2581
2582 return *xp;
2583 }
2584 \f
2585 #ifdef HAVE_cc0
2586
2587 /* Given BODY, the body of a jump instruction, alter the jump condition
2588 as required by the bits that are set in cc_status.flags.
2589 Not all of the bits there can be handled at this level in all cases.
2590
2591 The value is normally 0.
2592 1 means that the condition has become always true.
2593 -1 means that the condition has become always false.
2594 2 means that COND has been altered. */
2595
2596 static int
2597 alter_cond (rtx cond)
2598 {
2599 int value = 0;
2600
2601 if (cc_status.flags & CC_REVERSED)
2602 {
2603 value = 2;
2604 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2605 }
2606
2607 if (cc_status.flags & CC_INVERTED)
2608 {
2609 value = 2;
2610 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2611 }
2612
2613 if (cc_status.flags & CC_NOT_POSITIVE)
2614 switch (GET_CODE (cond))
2615 {
2616 case LE:
2617 case LEU:
2618 case GEU:
2619 /* Jump becomes unconditional. */
2620 return 1;
2621
2622 case GT:
2623 case GTU:
2624 case LTU:
2625 /* Jump becomes no-op. */
2626 return -1;
2627
2628 case GE:
2629 PUT_CODE (cond, EQ);
2630 value = 2;
2631 break;
2632
2633 case LT:
2634 PUT_CODE (cond, NE);
2635 value = 2;
2636 break;
2637
2638 default:
2639 break;
2640 }
2641
2642 if (cc_status.flags & CC_NOT_NEGATIVE)
2643 switch (GET_CODE (cond))
2644 {
2645 case GE:
2646 case GEU:
2647 /* Jump becomes unconditional. */
2648 return 1;
2649
2650 case LT:
2651 case LTU:
2652 /* Jump becomes no-op. */
2653 return -1;
2654
2655 case LE:
2656 case LEU:
2657 PUT_CODE (cond, EQ);
2658 value = 2;
2659 break;
2660
2661 case GT:
2662 case GTU:
2663 PUT_CODE (cond, NE);
2664 value = 2;
2665 break;
2666
2667 default:
2668 break;
2669 }
2670
2671 if (cc_status.flags & CC_NO_OVERFLOW)
2672 switch (GET_CODE (cond))
2673 {
2674 case GEU:
2675 /* Jump becomes unconditional. */
2676 return 1;
2677
2678 case LEU:
2679 PUT_CODE (cond, EQ);
2680 value = 2;
2681 break;
2682
2683 case GTU:
2684 PUT_CODE (cond, NE);
2685 value = 2;
2686 break;
2687
2688 case LTU:
2689 /* Jump becomes no-op. */
2690 return -1;
2691
2692 default:
2693 break;
2694 }
2695
2696 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2697 switch (GET_CODE (cond))
2698 {
2699 default:
2700 abort ();
2701
2702 case NE:
2703 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2704 value = 2;
2705 break;
2706
2707 case EQ:
2708 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2709 value = 2;
2710 break;
2711 }
2712
2713 if (cc_status.flags & CC_NOT_SIGNED)
2714 /* The flags are valid if signed condition operators are converted
2715 to unsigned. */
2716 switch (GET_CODE (cond))
2717 {
2718 case LE:
2719 PUT_CODE (cond, LEU);
2720 value = 2;
2721 break;
2722
2723 case LT:
2724 PUT_CODE (cond, LTU);
2725 value = 2;
2726 break;
2727
2728 case GT:
2729 PUT_CODE (cond, GTU);
2730 value = 2;
2731 break;
2732
2733 case GE:
2734 PUT_CODE (cond, GEU);
2735 value = 2;
2736 break;
2737
2738 default:
2739 break;
2740 }
2741
2742 return value;
2743 }
2744 #endif
2745 \f
2746 /* Report inconsistency between the assembler template and the operands.
2747 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
2748
2749 void
2750 output_operand_lossage (const char *msgid, ...)
2751 {
2752 char *fmt_string;
2753 char *new_message;
2754 const char *pfx_str;
2755 va_list ap;
2756
2757 va_start (ap, msgid);
2758
2759 pfx_str = this_is_asm_operands ? _("invalid `asm': ") : "output_operand: ";
2760 asprintf (&fmt_string, "%s%s", pfx_str, _(msgid));
2761 vasprintf (&new_message, fmt_string, ap);
2762
2763 if (this_is_asm_operands)
2764 error_for_asm (this_is_asm_operands, "%s", new_message);
2765 else
2766 internal_error ("%s", new_message);
2767
2768 free (fmt_string);
2769 free (new_message);
2770 va_end (ap);
2771 }
2772 \f
2773 /* Output of assembler code from a template, and its subroutines. */
2774
2775 /* Annotate the assembly with a comment describing the pattern and
2776 alternative used. */
2777
2778 static void
2779 output_asm_name (void)
2780 {
2781 if (debug_insn)
2782 {
2783 int num = INSN_CODE (debug_insn);
2784 fprintf (asm_out_file, "\t%s %d\t%s",
2785 ASM_COMMENT_START, INSN_UID (debug_insn),
2786 insn_data[num].name);
2787 if (insn_data[num].n_alternatives > 1)
2788 fprintf (asm_out_file, "/%d", which_alternative + 1);
2789 #ifdef HAVE_ATTR_length
2790 fprintf (asm_out_file, "\t[length = %d]",
2791 get_attr_length (debug_insn));
2792 #endif
2793 /* Clear this so only the first assembler insn
2794 of any rtl insn will get the special comment for -dp. */
2795 debug_insn = 0;
2796 }
2797 }
2798
2799 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2800 or its address, return that expr . Set *PADDRESSP to 1 if the expr
2801 corresponds to the address of the object and 0 if to the object. */
2802
2803 static tree
2804 get_mem_expr_from_op (rtx op, int *paddressp)
2805 {
2806 tree expr;
2807 int inner_addressp;
2808
2809 *paddressp = 0;
2810
2811 if (GET_CODE (op) == REG)
2812 return REG_EXPR (op);
2813 else if (GET_CODE (op) != MEM)
2814 return 0;
2815
2816 if (MEM_EXPR (op) != 0)
2817 return MEM_EXPR (op);
2818
2819 /* Otherwise we have an address, so indicate it and look at the address. */
2820 *paddressp = 1;
2821 op = XEXP (op, 0);
2822
2823 /* First check if we have a decl for the address, then look at the right side
2824 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
2825 But don't allow the address to itself be indirect. */
2826 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2827 return expr;
2828 else if (GET_CODE (op) == PLUS
2829 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2830 return expr;
2831
2832 while (GET_RTX_CLASS (GET_CODE (op)) == '1'
2833 || GET_RTX_CLASS (GET_CODE (op)) == '2')
2834 op = XEXP (op, 0);
2835
2836 expr = get_mem_expr_from_op (op, &inner_addressp);
2837 return inner_addressp ? 0 : expr;
2838 }
2839
2840 /* Output operand names for assembler instructions. OPERANDS is the
2841 operand vector, OPORDER is the order to write the operands, and NOPS
2842 is the number of operands to write. */
2843
2844 static void
2845 output_asm_operand_names (rtx *operands, int *oporder, int nops)
2846 {
2847 int wrote = 0;
2848 int i;
2849
2850 for (i = 0; i < nops; i++)
2851 {
2852 int addressp;
2853 rtx op = operands[oporder[i]];
2854 tree expr = get_mem_expr_from_op (op, &addressp);
2855
2856 fprintf (asm_out_file, "%c%s",
2857 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2858 wrote = 1;
2859 if (expr)
2860 {
2861 fprintf (asm_out_file, "%s",
2862 addressp ? "*" : "");
2863 print_mem_expr (asm_out_file, expr);
2864 wrote = 1;
2865 }
2866 else if (REG_P (op) && ORIGINAL_REGNO (op)
2867 && ORIGINAL_REGNO (op) != REGNO (op))
2868 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2869 }
2870 }
2871
2872 /* Output text from TEMPLATE to the assembler output file,
2873 obeying %-directions to substitute operands taken from
2874 the vector OPERANDS.
2875
2876 %N (for N a digit) means print operand N in usual manner.
2877 %lN means require operand N to be a CODE_LABEL or LABEL_REF
2878 and print the label name with no punctuation.
2879 %cN means require operand N to be a constant
2880 and print the constant expression with no punctuation.
2881 %aN means expect operand N to be a memory address
2882 (not a memory reference!) and print a reference
2883 to that address.
2884 %nN means expect operand N to be a constant
2885 and print a constant expression for minus the value
2886 of the operand, with no other punctuation. */
2887
2888 void
2889 output_asm_insn (const char *template, rtx *operands)
2890 {
2891 const char *p;
2892 int c;
2893 #ifdef ASSEMBLER_DIALECT
2894 int dialect = 0;
2895 #endif
2896 int oporder[MAX_RECOG_OPERANDS];
2897 char opoutput[MAX_RECOG_OPERANDS];
2898 int ops = 0;
2899
2900 /* An insn may return a null string template
2901 in a case where no assembler code is needed. */
2902 if (*template == 0)
2903 return;
2904
2905 memset (opoutput, 0, sizeof opoutput);
2906 p = template;
2907 putc ('\t', asm_out_file);
2908
2909 #ifdef ASM_OUTPUT_OPCODE
2910 ASM_OUTPUT_OPCODE (asm_out_file, p);
2911 #endif
2912
2913 while ((c = *p++))
2914 switch (c)
2915 {
2916 case '\n':
2917 if (flag_verbose_asm)
2918 output_asm_operand_names (operands, oporder, ops);
2919 if (flag_print_asm_name)
2920 output_asm_name ();
2921
2922 ops = 0;
2923 memset (opoutput, 0, sizeof opoutput);
2924
2925 putc (c, asm_out_file);
2926 #ifdef ASM_OUTPUT_OPCODE
2927 while ((c = *p) == '\t')
2928 {
2929 putc (c, asm_out_file);
2930 p++;
2931 }
2932 ASM_OUTPUT_OPCODE (asm_out_file, p);
2933 #endif
2934 break;
2935
2936 #ifdef ASSEMBLER_DIALECT
2937 case '{':
2938 {
2939 int i;
2940
2941 if (dialect)
2942 output_operand_lossage ("nested assembly dialect alternatives");
2943 else
2944 dialect = 1;
2945
2946 /* If we want the first dialect, do nothing. Otherwise, skip
2947 DIALECT_NUMBER of strings ending with '|'. */
2948 for (i = 0; i < dialect_number; i++)
2949 {
2950 while (*p && *p != '}' && *p++ != '|')
2951 ;
2952 if (*p == '}')
2953 break;
2954 if (*p == '|')
2955 p++;
2956 }
2957
2958 if (*p == '\0')
2959 output_operand_lossage ("unterminated assembly dialect alternative");
2960 }
2961 break;
2962
2963 case '|':
2964 if (dialect)
2965 {
2966 /* Skip to close brace. */
2967 do
2968 {
2969 if (*p == '\0')
2970 {
2971 output_operand_lossage ("unterminated assembly dialect alternative");
2972 break;
2973 }
2974 }
2975 while (*p++ != '}');
2976 dialect = 0;
2977 }
2978 else
2979 putc (c, asm_out_file);
2980 break;
2981
2982 case '}':
2983 if (! dialect)
2984 putc (c, asm_out_file);
2985 dialect = 0;
2986 break;
2987 #endif
2988
2989 case '%':
2990 /* %% outputs a single %. */
2991 if (*p == '%')
2992 {
2993 p++;
2994 putc (c, asm_out_file);
2995 }
2996 /* %= outputs a number which is unique to each insn in the entire
2997 compilation. This is useful for making local labels that are
2998 referred to more than once in a given insn. */
2999 else if (*p == '=')
3000 {
3001 p++;
3002 fprintf (asm_out_file, "%d", insn_counter);
3003 }
3004 /* % followed by a letter and some digits
3005 outputs an operand in a special way depending on the letter.
3006 Letters `acln' are implemented directly.
3007 Other letters are passed to `output_operand' so that
3008 the PRINT_OPERAND macro can define them. */
3009 else if (ISALPHA (*p))
3010 {
3011 int letter = *p++;
3012 c = atoi (p);
3013
3014 if (! ISDIGIT (*p))
3015 output_operand_lossage ("operand number missing after %%-letter");
3016 else if (this_is_asm_operands
3017 && (c < 0 || (unsigned int) c >= insn_noperands))
3018 output_operand_lossage ("operand number out of range");
3019 else if (letter == 'l')
3020 output_asm_label (operands[c]);
3021 else if (letter == 'a')
3022 output_address (operands[c]);
3023 else if (letter == 'c')
3024 {
3025 if (CONSTANT_ADDRESS_P (operands[c]))
3026 output_addr_const (asm_out_file, operands[c]);
3027 else
3028 output_operand (operands[c], 'c');
3029 }
3030 else if (letter == 'n')
3031 {
3032 if (GET_CODE (operands[c]) == CONST_INT)
3033 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3034 - INTVAL (operands[c]));
3035 else
3036 {
3037 putc ('-', asm_out_file);
3038 output_addr_const (asm_out_file, operands[c]);
3039 }
3040 }
3041 else
3042 output_operand (operands[c], letter);
3043
3044 if (!opoutput[c])
3045 oporder[ops++] = c;
3046 opoutput[c] = 1;
3047
3048 while (ISDIGIT (c = *p))
3049 p++;
3050 }
3051 /* % followed by a digit outputs an operand the default way. */
3052 else if (ISDIGIT (*p))
3053 {
3054 c = atoi (p);
3055 if (this_is_asm_operands
3056 && (c < 0 || (unsigned int) c >= insn_noperands))
3057 output_operand_lossage ("operand number out of range");
3058 else
3059 output_operand (operands[c], 0);
3060
3061 if (!opoutput[c])
3062 oporder[ops++] = c;
3063 opoutput[c] = 1;
3064
3065 while (ISDIGIT (c = *p))
3066 p++;
3067 }
3068 /* % followed by punctuation: output something for that
3069 punctuation character alone, with no operand.
3070 The PRINT_OPERAND macro decides what is actually done. */
3071 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3072 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3073 output_operand (NULL_RTX, *p++);
3074 #endif
3075 else
3076 output_operand_lossage ("invalid %%-code");
3077 break;
3078
3079 default:
3080 putc (c, asm_out_file);
3081 }
3082
3083 /* Write out the variable names for operands, if we know them. */
3084 if (flag_verbose_asm)
3085 output_asm_operand_names (operands, oporder, ops);
3086 if (flag_print_asm_name)
3087 output_asm_name ();
3088
3089 putc ('\n', asm_out_file);
3090 }
3091 \f
3092 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3093
3094 void
3095 output_asm_label (rtx x)
3096 {
3097 char buf[256];
3098
3099 if (GET_CODE (x) == LABEL_REF)
3100 x = XEXP (x, 0);
3101 if (GET_CODE (x) == CODE_LABEL
3102 || (GET_CODE (x) == NOTE
3103 && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3104 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3105 else
3106 output_operand_lossage ("`%%l' operand isn't a label");
3107
3108 assemble_name (asm_out_file, buf);
3109 }
3110
3111 /* Print operand X using machine-dependent assembler syntax.
3112 The macro PRINT_OPERAND is defined just to control this function.
3113 CODE is a non-digit that preceded the operand-number in the % spec,
3114 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3115 between the % and the digits.
3116 When CODE is a non-letter, X is 0.
3117
3118 The meanings of the letters are machine-dependent and controlled
3119 by PRINT_OPERAND. */
3120
3121 static void
3122 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3123 {
3124 if (x && GET_CODE (x) == SUBREG)
3125 x = alter_subreg (&x);
3126
3127 /* If X is a pseudo-register, abort now rather than writing trash to the
3128 assembler file. */
3129
3130 if (x && GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
3131 abort ();
3132
3133 PRINT_OPERAND (asm_out_file, x, code);
3134 }
3135
3136 /* Print a memory reference operand for address X
3137 using machine-dependent assembler syntax.
3138 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3139
3140 void
3141 output_address (rtx x)
3142 {
3143 walk_alter_subreg (&x);
3144 PRINT_OPERAND_ADDRESS (asm_out_file, x);
3145 }
3146 \f
3147 /* Print an integer constant expression in assembler syntax.
3148 Addition and subtraction are the only arithmetic
3149 that may appear in these expressions. */
3150
3151 void
3152 output_addr_const (FILE *file, rtx x)
3153 {
3154 char buf[256];
3155
3156 restart:
3157 switch (GET_CODE (x))
3158 {
3159 case PC:
3160 putc ('.', file);
3161 break;
3162
3163 case SYMBOL_REF:
3164 #ifdef ASM_OUTPUT_SYMBOL_REF
3165 ASM_OUTPUT_SYMBOL_REF (file, x);
3166 #else
3167 assemble_name (file, XSTR (x, 0));
3168 #endif
3169 break;
3170
3171 case LABEL_REF:
3172 x = XEXP (x, 0);
3173 /* Fall through. */
3174 case CODE_LABEL:
3175 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3176 #ifdef ASM_OUTPUT_LABEL_REF
3177 ASM_OUTPUT_LABEL_REF (file, buf);
3178 #else
3179 assemble_name (file, buf);
3180 #endif
3181 break;
3182
3183 case CONST_INT:
3184 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3185 break;
3186
3187 case CONST:
3188 /* This used to output parentheses around the expression,
3189 but that does not work on the 386 (either ATT or BSD assembler). */
3190 output_addr_const (file, XEXP (x, 0));
3191 break;
3192
3193 case CONST_DOUBLE:
3194 if (GET_MODE (x) == VOIDmode)
3195 {
3196 /* We can use %d if the number is one word and positive. */
3197 if (CONST_DOUBLE_HIGH (x))
3198 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3199 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3200 else if (CONST_DOUBLE_LOW (x) < 0)
3201 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3202 else
3203 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3204 }
3205 else
3206 /* We can't handle floating point constants;
3207 PRINT_OPERAND must handle them. */
3208 output_operand_lossage ("floating constant misused");
3209 break;
3210
3211 case PLUS:
3212 /* Some assemblers need integer constants to appear last (eg masm). */
3213 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3214 {
3215 output_addr_const (file, XEXP (x, 1));
3216 if (INTVAL (XEXP (x, 0)) >= 0)
3217 fprintf (file, "+");
3218 output_addr_const (file, XEXP (x, 0));
3219 }
3220 else
3221 {
3222 output_addr_const (file, XEXP (x, 0));
3223 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3224 || INTVAL (XEXP (x, 1)) >= 0)
3225 fprintf (file, "+");
3226 output_addr_const (file, XEXP (x, 1));
3227 }
3228 break;
3229
3230 case MINUS:
3231 /* Avoid outputting things like x-x or x+5-x,
3232 since some assemblers can't handle that. */
3233 x = simplify_subtraction (x);
3234 if (GET_CODE (x) != MINUS)
3235 goto restart;
3236
3237 output_addr_const (file, XEXP (x, 0));
3238 fprintf (file, "-");
3239 if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3240 || GET_CODE (XEXP (x, 1)) == PC
3241 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3242 output_addr_const (file, XEXP (x, 1));
3243 else
3244 {
3245 fputs (targetm.asm_out.open_paren, file);
3246 output_addr_const (file, XEXP (x, 1));
3247 fputs (targetm.asm_out.close_paren, file);
3248 }
3249 break;
3250
3251 case ZERO_EXTEND:
3252 case SIGN_EXTEND:
3253 case SUBREG:
3254 output_addr_const (file, XEXP (x, 0));
3255 break;
3256
3257 default:
3258 #ifdef OUTPUT_ADDR_CONST_EXTRA
3259 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3260 break;
3261
3262 fail:
3263 #endif
3264 output_operand_lossage ("invalid expression as operand");
3265 }
3266 }
3267 \f
3268 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3269 %R prints the value of REGISTER_PREFIX.
3270 %L prints the value of LOCAL_LABEL_PREFIX.
3271 %U prints the value of USER_LABEL_PREFIX.
3272 %I prints the value of IMMEDIATE_PREFIX.
3273 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3274 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3275
3276 We handle alternate assembler dialects here, just like output_asm_insn. */
3277
3278 void
3279 asm_fprintf (FILE *file, const char *p, ...)
3280 {
3281 char buf[10];
3282 char *q, c;
3283 va_list argptr;
3284
3285 va_start (argptr, p);
3286
3287 buf[0] = '%';
3288
3289 while ((c = *p++))
3290 switch (c)
3291 {
3292 #ifdef ASSEMBLER_DIALECT
3293 case '{':
3294 {
3295 int i;
3296
3297 /* If we want the first dialect, do nothing. Otherwise, skip
3298 DIALECT_NUMBER of strings ending with '|'. */
3299 for (i = 0; i < dialect_number; i++)
3300 {
3301 while (*p && *p++ != '|')
3302 ;
3303
3304 if (*p == '|')
3305 p++;
3306 }
3307 }
3308 break;
3309
3310 case '|':
3311 /* Skip to close brace. */
3312 while (*p && *p++ != '}')
3313 ;
3314 break;
3315
3316 case '}':
3317 break;
3318 #endif
3319
3320 case '%':
3321 c = *p++;
3322 q = &buf[1];
3323 while (strchr ("-+ #0", c))
3324 {
3325 *q++ = c;
3326 c = *p++;
3327 }
3328 while (ISDIGIT (c) || c == '.')
3329 {
3330 *q++ = c;
3331 c = *p++;
3332 }
3333 switch (c)
3334 {
3335 case '%':
3336 putc ('%', file);
3337 break;
3338
3339 case 'd': case 'i': case 'u':
3340 case 'x': case 'X': case 'o':
3341 case 'c':
3342 *q++ = c;
3343 *q = 0;
3344 fprintf (file, buf, va_arg (argptr, int));
3345 break;
3346
3347 case 'w':
3348 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3349 'o' cases, but we do not check for those cases. It
3350 means that the value is a HOST_WIDE_INT, which may be
3351 either `long' or `long long'. */
3352 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3353 q += strlen (HOST_WIDE_INT_PRINT);
3354 *q++ = *p++;
3355 *q = 0;
3356 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3357 break;
3358
3359 case 'l':
3360 *q++ = c;
3361 #ifdef HAVE_LONG_LONG
3362 if (*p == 'l')
3363 {
3364 *q++ = *p++;
3365 *q++ = *p++;
3366 *q = 0;
3367 fprintf (file, buf, va_arg (argptr, long long));
3368 }
3369 else
3370 #endif
3371 {
3372 *q++ = *p++;
3373 *q = 0;
3374 fprintf (file, buf, va_arg (argptr, long));
3375 }
3376
3377 break;
3378
3379 case 's':
3380 *q++ = c;
3381 *q = 0;
3382 fprintf (file, buf, va_arg (argptr, char *));
3383 break;
3384
3385 case 'O':
3386 #ifdef ASM_OUTPUT_OPCODE
3387 ASM_OUTPUT_OPCODE (asm_out_file, p);
3388 #endif
3389 break;
3390
3391 case 'R':
3392 #ifdef REGISTER_PREFIX
3393 fprintf (file, "%s", REGISTER_PREFIX);
3394 #endif
3395 break;
3396
3397 case 'I':
3398 #ifdef IMMEDIATE_PREFIX
3399 fprintf (file, "%s", IMMEDIATE_PREFIX);
3400 #endif
3401 break;
3402
3403 case 'L':
3404 #ifdef LOCAL_LABEL_PREFIX
3405 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3406 #endif
3407 break;
3408
3409 case 'U':
3410 fputs (user_label_prefix, file);
3411 break;
3412
3413 #ifdef ASM_FPRINTF_EXTENSIONS
3414 /* Uppercase letters are reserved for general use by asm_fprintf
3415 and so are not available to target specific code. In order to
3416 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3417 they are defined here. As they get turned into real extensions
3418 to asm_fprintf they should be removed from this list. */
3419 case 'A': case 'B': case 'C': case 'D': case 'E':
3420 case 'F': case 'G': case 'H': case 'J': case 'K':
3421 case 'M': case 'N': case 'P': case 'Q': case 'S':
3422 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3423 break;
3424
3425 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3426 #endif
3427 default:
3428 abort ();
3429 }
3430 break;
3431
3432 default:
3433 putc (c, file);
3434 }
3435 va_end (argptr);
3436 }
3437 \f
3438 /* Split up a CONST_DOUBLE or integer constant rtx
3439 into two rtx's for single words,
3440 storing in *FIRST the word that comes first in memory in the target
3441 and in *SECOND the other. */
3442
3443 void
3444 split_double (rtx value, rtx *first, rtx *second)
3445 {
3446 if (GET_CODE (value) == CONST_INT)
3447 {
3448 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3449 {
3450 /* In this case the CONST_INT holds both target words.
3451 Extract the bits from it into two word-sized pieces.
3452 Sign extend each half to HOST_WIDE_INT. */
3453 unsigned HOST_WIDE_INT low, high;
3454 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3455
3456 /* Set sign_bit to the most significant bit of a word. */
3457 sign_bit = 1;
3458 sign_bit <<= BITS_PER_WORD - 1;
3459
3460 /* Set mask so that all bits of the word are set. We could
3461 have used 1 << BITS_PER_WORD instead of basing the
3462 calculation on sign_bit. However, on machines where
3463 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3464 compiler warning, even though the code would never be
3465 executed. */
3466 mask = sign_bit << 1;
3467 mask--;
3468
3469 /* Set sign_extend as any remaining bits. */
3470 sign_extend = ~mask;
3471
3472 /* Pick the lower word and sign-extend it. */
3473 low = INTVAL (value);
3474 low &= mask;
3475 if (low & sign_bit)
3476 low |= sign_extend;
3477
3478 /* Pick the higher word, shifted to the least significant
3479 bits, and sign-extend it. */
3480 high = INTVAL (value);
3481 high >>= BITS_PER_WORD - 1;
3482 high >>= 1;
3483 high &= mask;
3484 if (high & sign_bit)
3485 high |= sign_extend;
3486
3487 /* Store the words in the target machine order. */
3488 if (WORDS_BIG_ENDIAN)
3489 {
3490 *first = GEN_INT (high);
3491 *second = GEN_INT (low);
3492 }
3493 else
3494 {
3495 *first = GEN_INT (low);
3496 *second = GEN_INT (high);
3497 }
3498 }
3499 else
3500 {
3501 /* The rule for using CONST_INT for a wider mode
3502 is that we regard the value as signed.
3503 So sign-extend it. */
3504 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3505 if (WORDS_BIG_ENDIAN)
3506 {
3507 *first = high;
3508 *second = value;
3509 }
3510 else
3511 {
3512 *first = value;
3513 *second = high;
3514 }
3515 }
3516 }
3517 else if (GET_CODE (value) != CONST_DOUBLE)
3518 {
3519 if (WORDS_BIG_ENDIAN)
3520 {
3521 *first = const0_rtx;
3522 *second = value;
3523 }
3524 else
3525 {
3526 *first = value;
3527 *second = const0_rtx;
3528 }
3529 }
3530 else if (GET_MODE (value) == VOIDmode
3531 /* This is the old way we did CONST_DOUBLE integers. */
3532 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3533 {
3534 /* In an integer, the words are defined as most and least significant.
3535 So order them by the target's convention. */
3536 if (WORDS_BIG_ENDIAN)
3537 {
3538 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3539 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3540 }
3541 else
3542 {
3543 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3544 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3545 }
3546 }
3547 else
3548 {
3549 REAL_VALUE_TYPE r;
3550 long l[2];
3551 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3552
3553 /* Note, this converts the REAL_VALUE_TYPE to the target's
3554 format, splits up the floating point double and outputs
3555 exactly 32 bits of it into each of l[0] and l[1] --
3556 not necessarily BITS_PER_WORD bits. */
3557 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3558
3559 /* If 32 bits is an entire word for the target, but not for the host,
3560 then sign-extend on the host so that the number will look the same
3561 way on the host that it would on the target. See for instance
3562 simplify_unary_operation. The #if is needed to avoid compiler
3563 warnings. */
3564
3565 #if HOST_BITS_PER_LONG > 32
3566 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3567 {
3568 if (l[0] & ((long) 1 << 31))
3569 l[0] |= ((long) (-1) << 32);
3570 if (l[1] & ((long) 1 << 31))
3571 l[1] |= ((long) (-1) << 32);
3572 }
3573 #endif
3574
3575 *first = GEN_INT ((HOST_WIDE_INT) l[0]);
3576 *second = GEN_INT ((HOST_WIDE_INT) l[1]);
3577 }
3578 }
3579 \f
3580 /* Return nonzero if this function has no function calls. */
3581
3582 int
3583 leaf_function_p (void)
3584 {
3585 rtx insn;
3586 rtx link;
3587
3588 if (current_function_profile || profile_arc_flag)
3589 return 0;
3590
3591 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3592 {
3593 if (GET_CODE (insn) == CALL_INSN
3594 && ! SIBLING_CALL_P (insn))
3595 return 0;
3596 if (GET_CODE (insn) == INSN
3597 && GET_CODE (PATTERN (insn)) == SEQUENCE
3598 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3599 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3600 return 0;
3601 }
3602 for (link = current_function_epilogue_delay_list;
3603 link;
3604 link = XEXP (link, 1))
3605 {
3606 insn = XEXP (link, 0);
3607
3608 if (GET_CODE (insn) == CALL_INSN
3609 && ! SIBLING_CALL_P (insn))
3610 return 0;
3611 if (GET_CODE (insn) == INSN
3612 && GET_CODE (PATTERN (insn)) == SEQUENCE
3613 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3614 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3615 return 0;
3616 }
3617
3618 return 1;
3619 }
3620
3621 /* Return 1 if branch is a forward branch.
3622 Uses insn_shuid array, so it works only in the final pass. May be used by
3623 output templates to customary add branch prediction hints.
3624 */
3625 int
3626 final_forward_branch_p (rtx insn)
3627 {
3628 int insn_id, label_id;
3629 if (!uid_shuid)
3630 abort ();
3631 insn_id = INSN_SHUID (insn);
3632 label_id = INSN_SHUID (JUMP_LABEL (insn));
3633 /* We've hit some insns that does not have id information available. */
3634 if (!insn_id || !label_id)
3635 abort ();
3636 return insn_id < label_id;
3637 }
3638
3639 /* On some machines, a function with no call insns
3640 can run faster if it doesn't create its own register window.
3641 When output, the leaf function should use only the "output"
3642 registers. Ordinarily, the function would be compiled to use
3643 the "input" registers to find its arguments; it is a candidate
3644 for leaf treatment if it uses only the "input" registers.
3645 Leaf function treatment means renumbering so the function
3646 uses the "output" registers instead. */
3647
3648 #ifdef LEAF_REGISTERS
3649
3650 /* Return 1 if this function uses only the registers that can be
3651 safely renumbered. */
3652
3653 int
3654 only_leaf_regs_used (void)
3655 {
3656 int i;
3657 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3658
3659 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3660 if ((regs_ever_live[i] || global_regs[i])
3661 && ! permitted_reg_in_leaf_functions[i])
3662 return 0;
3663
3664 if (current_function_uses_pic_offset_table
3665 && pic_offset_table_rtx != 0
3666 && GET_CODE (pic_offset_table_rtx) == REG
3667 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3668 return 0;
3669
3670 return 1;
3671 }
3672
3673 /* Scan all instructions and renumber all registers into those
3674 available in leaf functions. */
3675
3676 static void
3677 leaf_renumber_regs (rtx first)
3678 {
3679 rtx insn;
3680
3681 /* Renumber only the actual patterns.
3682 The reg-notes can contain frame pointer refs,
3683 and renumbering them could crash, and should not be needed. */
3684 for (insn = first; insn; insn = NEXT_INSN (insn))
3685 if (INSN_P (insn))
3686 leaf_renumber_regs_insn (PATTERN (insn));
3687 for (insn = current_function_epilogue_delay_list;
3688 insn;
3689 insn = XEXP (insn, 1))
3690 if (INSN_P (XEXP (insn, 0)))
3691 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3692 }
3693
3694 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3695 available in leaf functions. */
3696
3697 void
3698 leaf_renumber_regs_insn (rtx in_rtx)
3699 {
3700 int i, j;
3701 const char *format_ptr;
3702
3703 if (in_rtx == 0)
3704 return;
3705
3706 /* Renumber all input-registers into output-registers.
3707 renumbered_regs would be 1 for an output-register;
3708 they */
3709
3710 if (GET_CODE (in_rtx) == REG)
3711 {
3712 int newreg;
3713
3714 /* Don't renumber the same reg twice. */
3715 if (in_rtx->used)
3716 return;
3717
3718 newreg = REGNO (in_rtx);
3719 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
3720 to reach here as part of a REG_NOTE. */
3721 if (newreg >= FIRST_PSEUDO_REGISTER)
3722 {
3723 in_rtx->used = 1;
3724 return;
3725 }
3726 newreg = LEAF_REG_REMAP (newreg);
3727 if (newreg < 0)
3728 abort ();
3729 regs_ever_live[REGNO (in_rtx)] = 0;
3730 regs_ever_live[newreg] = 1;
3731 REGNO (in_rtx) = newreg;
3732 in_rtx->used = 1;
3733 }
3734
3735 if (INSN_P (in_rtx))
3736 {
3737 /* Inside a SEQUENCE, we find insns.
3738 Renumber just the patterns of these insns,
3739 just as we do for the top-level insns. */
3740 leaf_renumber_regs_insn (PATTERN (in_rtx));
3741 return;
3742 }
3743
3744 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3745
3746 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3747 switch (*format_ptr++)
3748 {
3749 case 'e':
3750 leaf_renumber_regs_insn (XEXP (in_rtx, i));
3751 break;
3752
3753 case 'E':
3754 if (NULL != XVEC (in_rtx, i))
3755 {
3756 for (j = 0; j < XVECLEN (in_rtx, i); j++)
3757 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3758 }
3759 break;
3760
3761 case 'S':
3762 case 's':
3763 case '0':
3764 case 'i':
3765 case 'w':
3766 case 'n':
3767 case 'u':
3768 break;
3769
3770 default:
3771 abort ();
3772 }
3773 }
3774 #endif
3775
3776
3777 /* When -gused is used, emit debug info for only used symbols. But in
3778 addition to the standard intercepted debug_hooks there are some direct
3779 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3780 Those routines may also be called from a higher level intercepted routine. So
3781 to prevent recording data for an inner call to one of these for an intercept,
3782 we maintain a intercept nesting counter (debug_nesting). We only save the
3783 intercepted arguments if the nesting is 1. */
3784 int debug_nesting = 0;
3785
3786 static tree *symbol_queue;
3787 int symbol_queue_index = 0;
3788 static int symbol_queue_size = 0;
3789
3790 /* Generate the symbols for any queued up type symbols we encountered
3791 while generating the type info for some originally used symbol.
3792 This might generate additional entries in the queue. Only when
3793 the nesting depth goes to 0 is this routine called. */
3794
3795 void
3796 debug_flush_symbol_queue (void)
3797 {
3798 int i;
3799
3800 /* Make sure that additionally queued items are not flushed
3801 prematurely. */
3802
3803 ++debug_nesting;
3804
3805 for (i = 0; i < symbol_queue_index; ++i)
3806 {
3807 /* If we pushed queued symbols then such symbols are must be
3808 output no matter what anyone else says. Specifically,
3809 we need to make sure dbxout_symbol() thinks the symbol was
3810 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3811 which may be set for outside reasons. */
3812 int saved_tree_used = TREE_USED (symbol_queue[i]);
3813 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3814 TREE_USED (symbol_queue[i]) = 1;
3815 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3816
3817 #ifdef DBX_DEBUGGING_INFO
3818 dbxout_symbol (symbol_queue[i], 0);
3819 #endif
3820
3821 TREE_USED (symbol_queue[i]) = saved_tree_used;
3822 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3823 }
3824
3825 symbol_queue_index = 0;
3826 --debug_nesting;
3827 }
3828
3829 /* Queue a type symbol needed as part of the definition of a decl
3830 symbol. These symbols are generated when debug_flush_symbol_queue()
3831 is called. */
3832
3833 void
3834 debug_queue_symbol (tree decl)
3835 {
3836 if (symbol_queue_index >= symbol_queue_size)
3837 {
3838 symbol_queue_size += 10;
3839 symbol_queue = xrealloc (symbol_queue,
3840 symbol_queue_size * sizeof (tree));
3841 }
3842
3843 symbol_queue[symbol_queue_index++] = decl;
3844 }
3845
3846 /* Free symbol queue. */
3847 void
3848 debug_free_queue (void)
3849 {
3850 if (symbol_queue)
3851 {
3852 free (symbol_queue);
3853 symbol_queue = NULL;
3854 symbol_queue_size = 0;
3855 }
3856 }