final.c (SEEN_BB, [...]): Define.
[gcc.git] / gcc / final.c
1 /* Convert RTL to assembler code and output it, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* This is the final pass of the compiler.
23 It looks at the rtl code for a function and outputs assembler code.
24
25 Call `final_start_function' to output the assembler code for function entry,
26 `final' to output assembler code for some RTL code,
27 `final_end_function' to output assembler code for function exit.
28 If a function is compiled in several pieces, each piece is
29 output separately with `final'.
30
31 Some optimizations are also done at this level.
32 Move instructions that were made unnecessary by good register allocation
33 are detected and omitted from the output. (Though most of these
34 are removed by the last jump pass.)
35
36 Instructions to set the condition codes are omitted when it can be
37 seen that the condition codes already had the desired values.
38
39 In some cases it is sufficient if the inherited condition codes
40 have related values, but this may require the following insn
41 (the one that tests the condition codes) to be modified.
42
43 The code for the function prologue and epilogue are generated
44 directly in assembler by the target functions function_prologue and
45 function_epilogue. Those instructions never exist as rtl. */
46
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51
52 #include "tree.h"
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "regs.h"
56 #include "insn-config.h"
57 #include "insn-attr.h"
58 #include "recog.h"
59 #include "conditions.h"
60 #include "flags.h"
61 #include "real.h"
62 #include "hard-reg-set.h"
63 #include "output.h"
64 #include "except.h"
65 #include "function.h"
66 #include "toplev.h"
67 #include "reload.h"
68 #include "intl.h"
69 #include "basic-block.h"
70 #include "target.h"
71 #include "debug.h"
72 #include "expr.h"
73 #include "cfglayout.h"
74
75 #ifdef XCOFF_DEBUGGING_INFO
76 #include "xcoffout.h" /* Needed for external data
77 declarations for e.g. AIX 4.x. */
78 #endif
79
80 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
81 #include "dwarf2out.h"
82 #endif
83
84 #ifdef DBX_DEBUGGING_INFO
85 #include "dbxout.h"
86 #endif
87
88 /* If we aren't using cc0, CC_STATUS_INIT shouldn't exist. So define a
89 null default for it to save conditionalization later. */
90 #ifndef CC_STATUS_INIT
91 #define CC_STATUS_INIT
92 #endif
93
94 /* How to start an assembler comment. */
95 #ifndef ASM_COMMENT_START
96 #define ASM_COMMENT_START ";#"
97 #endif
98
99 /* Is the given character a logical line separator for the assembler? */
100 #ifndef IS_ASM_LOGICAL_LINE_SEPARATOR
101 #define IS_ASM_LOGICAL_LINE_SEPARATOR(C) ((C) == ';')
102 #endif
103
104 #ifndef JUMP_TABLES_IN_TEXT_SECTION
105 #define JUMP_TABLES_IN_TEXT_SECTION 0
106 #endif
107
108 #if defined(READONLY_DATA_SECTION) || defined(READONLY_DATA_SECTION_ASM_OP)
109 #define HAVE_READONLY_DATA_SECTION 1
110 #else
111 #define HAVE_READONLY_DATA_SECTION 0
112 #endif
113
114 /* Bitflags used by final_scan_insn. */
115 #define SEEN_BB 1
116 #define SEEN_NOTE 2
117 #define SEEN_EMITTED 4
118
119 /* Last insn processed by final_scan_insn. */
120 static rtx debug_insn;
121 rtx current_output_insn;
122
123 /* Line number of last NOTE. */
124 static int last_linenum;
125
126 /* Highest line number in current block. */
127 static int high_block_linenum;
128
129 /* Likewise for function. */
130 static int high_function_linenum;
131
132 /* Filename of last NOTE. */
133 static const char *last_filename;
134
135 extern int length_unit_log; /* This is defined in insn-attrtab.c. */
136
137 /* Nonzero while outputting an `asm' with operands.
138 This means that inconsistencies are the user's fault, so don't abort.
139 The precise value is the insn being output, to pass to error_for_asm. */
140 rtx this_is_asm_operands;
141
142 /* Number of operands of this insn, for an `asm' with operands. */
143 static unsigned int insn_noperands;
144
145 /* Compare optimization flag. */
146
147 static rtx last_ignored_compare = 0;
148
149 /* Assign a unique number to each insn that is output.
150 This can be used to generate unique local labels. */
151
152 static int insn_counter = 0;
153
154 #ifdef HAVE_cc0
155 /* This variable contains machine-dependent flags (defined in tm.h)
156 set and examined by output routines
157 that describe how to interpret the condition codes properly. */
158
159 CC_STATUS cc_status;
160
161 /* During output of an insn, this contains a copy of cc_status
162 from before the insn. */
163
164 CC_STATUS cc_prev_status;
165 #endif
166
167 /* Indexed by hardware reg number, is 1 if that register is ever
168 used in the current function.
169
170 In life_analysis, or in stupid_life_analysis, this is set
171 up to record the hard regs used explicitly. Reload adds
172 in the hard regs used for holding pseudo regs. Final uses
173 it to generate the code in the function prologue and epilogue
174 to save and restore registers as needed. */
175
176 char regs_ever_live[FIRST_PSEUDO_REGISTER];
177
178 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
179 Unlike regs_ever_live, elements of this array corresponding to
180 eliminable regs like the frame pointer are set if an asm sets them. */
181
182 char regs_asm_clobbered[FIRST_PSEUDO_REGISTER];
183
184 /* Nonzero means current function must be given a frame pointer.
185 Initialized in function.c to 0. Set only in reload1.c as per
186 the needs of the function. */
187
188 int frame_pointer_needed;
189
190 /* Number of unmatched NOTE_INSN_BLOCK_BEG notes we have seen. */
191
192 static int block_depth;
193
194 /* Nonzero if have enabled APP processing of our assembler output. */
195
196 static int app_on;
197
198 /* If we are outputting an insn sequence, this contains the sequence rtx.
199 Zero otherwise. */
200
201 rtx final_sequence;
202
203 #ifdef ASSEMBLER_DIALECT
204
205 /* Number of the assembler dialect to use, starting at 0. */
206 static int dialect_number;
207 #endif
208
209 #ifdef HAVE_conditional_execution
210 /* Nonnull if the insn currently being emitted was a COND_EXEC pattern. */
211 rtx current_insn_predicate;
212 #endif
213
214 #ifdef HAVE_ATTR_length
215 static int asm_insn_count (rtx);
216 #endif
217 static void profile_function (FILE *);
218 static void profile_after_prologue (FILE *);
219 static bool notice_source_line (rtx);
220 static rtx walk_alter_subreg (rtx *);
221 static void output_asm_name (void);
222 static void output_alternate_entry_point (FILE *, rtx);
223 static tree get_mem_expr_from_op (rtx, int *);
224 static void output_asm_operand_names (rtx *, int *, int);
225 static void output_operand (rtx, int);
226 #ifdef LEAF_REGISTERS
227 static void leaf_renumber_regs (rtx);
228 #endif
229 #ifdef HAVE_cc0
230 static int alter_cond (rtx);
231 #endif
232 #ifndef ADDR_VEC_ALIGN
233 static int final_addr_vec_align (rtx);
234 #endif
235 #ifdef HAVE_ATTR_length
236 static int align_fuzz (rtx, rtx, int, unsigned);
237 #endif
238 static rtx final_scan_insn (rtx, FILE *, int, int, int, int *);
239 \f
240 /* Initialize data in final at the beginning of a compilation. */
241
242 void
243 init_final (const char *filename ATTRIBUTE_UNUSED)
244 {
245 app_on = 0;
246 final_sequence = 0;
247
248 #ifdef ASSEMBLER_DIALECT
249 dialect_number = ASSEMBLER_DIALECT;
250 #endif
251 }
252
253 /* Default target function prologue and epilogue assembler output.
254
255 If not overridden for epilogue code, then the function body itself
256 contains return instructions wherever needed. */
257 void
258 default_function_pro_epilogue (FILE *file ATTRIBUTE_UNUSED,
259 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
260 {
261 }
262
263 /* Default target hook that outputs nothing to a stream. */
264 void
265 no_asm_to_stream (FILE *file ATTRIBUTE_UNUSED)
266 {
267 }
268
269 /* Enable APP processing of subsequent output.
270 Used before the output from an `asm' statement. */
271
272 void
273 app_enable (void)
274 {
275 if (! app_on)
276 {
277 fputs (ASM_APP_ON, asm_out_file);
278 app_on = 1;
279 }
280 }
281
282 /* Disable APP processing of subsequent output.
283 Called from varasm.c before most kinds of output. */
284
285 void
286 app_disable (void)
287 {
288 if (app_on)
289 {
290 fputs (ASM_APP_OFF, asm_out_file);
291 app_on = 0;
292 }
293 }
294 \f
295 /* Return the number of slots filled in the current
296 delayed branch sequence (we don't count the insn needing the
297 delay slot). Zero if not in a delayed branch sequence. */
298
299 #ifdef DELAY_SLOTS
300 int
301 dbr_sequence_length (void)
302 {
303 if (final_sequence != 0)
304 return XVECLEN (final_sequence, 0) - 1;
305 else
306 return 0;
307 }
308 #endif
309 \f
310 /* The next two pages contain routines used to compute the length of an insn
311 and to shorten branches. */
312
313 /* Arrays for insn lengths, and addresses. The latter is referenced by
314 `insn_current_length'. */
315
316 static int *insn_lengths;
317
318 varray_type insn_addresses_;
319
320 /* Max uid for which the above arrays are valid. */
321 static int insn_lengths_max_uid;
322
323 /* Address of insn being processed. Used by `insn_current_length'. */
324 int insn_current_address;
325
326 /* Address of insn being processed in previous iteration. */
327 int insn_last_address;
328
329 /* known invariant alignment of insn being processed. */
330 int insn_current_align;
331
332 /* After shorten_branches, for any insn, uid_align[INSN_UID (insn)]
333 gives the next following alignment insn that increases the known
334 alignment, or NULL_RTX if there is no such insn.
335 For any alignment obtained this way, we can again index uid_align with
336 its uid to obtain the next following align that in turn increases the
337 alignment, till we reach NULL_RTX; the sequence obtained this way
338 for each insn we'll call the alignment chain of this insn in the following
339 comments. */
340
341 struct label_alignment
342 {
343 short alignment;
344 short max_skip;
345 };
346
347 static rtx *uid_align;
348 static int *uid_shuid;
349 static struct label_alignment *label_align;
350
351 /* Indicate that branch shortening hasn't yet been done. */
352
353 void
354 init_insn_lengths (void)
355 {
356 if (uid_shuid)
357 {
358 free (uid_shuid);
359 uid_shuid = 0;
360 }
361 if (insn_lengths)
362 {
363 free (insn_lengths);
364 insn_lengths = 0;
365 insn_lengths_max_uid = 0;
366 }
367 #ifdef HAVE_ATTR_length
368 INSN_ADDRESSES_FREE ();
369 #endif
370 if (uid_align)
371 {
372 free (uid_align);
373 uid_align = 0;
374 }
375 }
376
377 /* Obtain the current length of an insn. If branch shortening has been done,
378 get its actual length. Otherwise, get its maximum length. */
379
380 int
381 get_attr_length (rtx insn ATTRIBUTE_UNUSED)
382 {
383 #ifdef HAVE_ATTR_length
384 rtx body;
385 int i;
386 int length = 0;
387
388 if (insn_lengths_max_uid > INSN_UID (insn))
389 return insn_lengths[INSN_UID (insn)];
390 else
391 switch (GET_CODE (insn))
392 {
393 case NOTE:
394 case BARRIER:
395 case CODE_LABEL:
396 return 0;
397
398 case CALL_INSN:
399 length = insn_default_length (insn);
400 break;
401
402 case JUMP_INSN:
403 body = PATTERN (insn);
404 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
405 {
406 /* Alignment is machine-dependent and should be handled by
407 ADDR_VEC_ALIGN. */
408 }
409 else
410 length = insn_default_length (insn);
411 break;
412
413 case INSN:
414 body = PATTERN (insn);
415 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
416 return 0;
417
418 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
419 length = asm_insn_count (body) * insn_default_length (insn);
420 else if (GET_CODE (body) == SEQUENCE)
421 for (i = 0; i < XVECLEN (body, 0); i++)
422 length += get_attr_length (XVECEXP (body, 0, i));
423 else
424 length = insn_default_length (insn);
425 break;
426
427 default:
428 break;
429 }
430
431 #ifdef ADJUST_INSN_LENGTH
432 ADJUST_INSN_LENGTH (insn, length);
433 #endif
434 return length;
435 #else /* not HAVE_ATTR_length */
436 return 0;
437 #endif /* not HAVE_ATTR_length */
438 }
439 \f
440 /* Code to handle alignment inside shorten_branches. */
441
442 /* Here is an explanation how the algorithm in align_fuzz can give
443 proper results:
444
445 Call a sequence of instructions beginning with alignment point X
446 and continuing until the next alignment point `block X'. When `X'
447 is used in an expression, it means the alignment value of the
448 alignment point.
449
450 Call the distance between the start of the first insn of block X, and
451 the end of the last insn of block X `IX', for the `inner size of X'.
452 This is clearly the sum of the instruction lengths.
453
454 Likewise with the next alignment-delimited block following X, which we
455 shall call block Y.
456
457 Call the distance between the start of the first insn of block X, and
458 the start of the first insn of block Y `OX', for the `outer size of X'.
459
460 The estimated padding is then OX - IX.
461
462 OX can be safely estimated as
463
464 if (X >= Y)
465 OX = round_up(IX, Y)
466 else
467 OX = round_up(IX, X) + Y - X
468
469 Clearly est(IX) >= real(IX), because that only depends on the
470 instruction lengths, and those being overestimated is a given.
471
472 Clearly round_up(foo, Z) >= round_up(bar, Z) if foo >= bar, so
473 we needn't worry about that when thinking about OX.
474
475 When X >= Y, the alignment provided by Y adds no uncertainty factor
476 for branch ranges starting before X, so we can just round what we have.
477 But when X < Y, we don't know anything about the, so to speak,
478 `middle bits', so we have to assume the worst when aligning up from an
479 address mod X to one mod Y, which is Y - X. */
480
481 #ifndef LABEL_ALIGN
482 #define LABEL_ALIGN(LABEL) align_labels_log
483 #endif
484
485 #ifndef LABEL_ALIGN_MAX_SKIP
486 #define LABEL_ALIGN_MAX_SKIP align_labels_max_skip
487 #endif
488
489 #ifndef LOOP_ALIGN
490 #define LOOP_ALIGN(LABEL) align_loops_log
491 #endif
492
493 #ifndef LOOP_ALIGN_MAX_SKIP
494 #define LOOP_ALIGN_MAX_SKIP align_loops_max_skip
495 #endif
496
497 #ifndef LABEL_ALIGN_AFTER_BARRIER
498 #define LABEL_ALIGN_AFTER_BARRIER(LABEL) 0
499 #endif
500
501 #ifndef LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
502 #define LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP 0
503 #endif
504
505 #ifndef JUMP_ALIGN
506 #define JUMP_ALIGN(LABEL) align_jumps_log
507 #endif
508
509 #ifndef JUMP_ALIGN_MAX_SKIP
510 #define JUMP_ALIGN_MAX_SKIP align_jumps_max_skip
511 #endif
512
513 #ifndef ADDR_VEC_ALIGN
514 static int
515 final_addr_vec_align (rtx addr_vec)
516 {
517 int align = GET_MODE_SIZE (GET_MODE (PATTERN (addr_vec)));
518
519 if (align > BIGGEST_ALIGNMENT / BITS_PER_UNIT)
520 align = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
521 return exact_log2 (align);
522
523 }
524
525 #define ADDR_VEC_ALIGN(ADDR_VEC) final_addr_vec_align (ADDR_VEC)
526 #endif
527
528 #ifndef INSN_LENGTH_ALIGNMENT
529 #define INSN_LENGTH_ALIGNMENT(INSN) length_unit_log
530 #endif
531
532 #define INSN_SHUID(INSN) (uid_shuid[INSN_UID (INSN)])
533
534 static int min_labelno, max_labelno;
535
536 #define LABEL_TO_ALIGNMENT(LABEL) \
537 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].alignment)
538
539 #define LABEL_TO_MAX_SKIP(LABEL) \
540 (label_align[CODE_LABEL_NUMBER (LABEL) - min_labelno].max_skip)
541
542 /* For the benefit of port specific code do this also as a function. */
543
544 int
545 label_to_alignment (rtx label)
546 {
547 return LABEL_TO_ALIGNMENT (label);
548 }
549
550 #ifdef HAVE_ATTR_length
551 /* The differences in addresses
552 between a branch and its target might grow or shrink depending on
553 the alignment the start insn of the range (the branch for a forward
554 branch or the label for a backward branch) starts out on; if these
555 differences are used naively, they can even oscillate infinitely.
556 We therefore want to compute a 'worst case' address difference that
557 is independent of the alignment the start insn of the range end
558 up on, and that is at least as large as the actual difference.
559 The function align_fuzz calculates the amount we have to add to the
560 naively computed difference, by traversing the part of the alignment
561 chain of the start insn of the range that is in front of the end insn
562 of the range, and considering for each alignment the maximum amount
563 that it might contribute to a size increase.
564
565 For casesi tables, we also want to know worst case minimum amounts of
566 address difference, in case a machine description wants to introduce
567 some common offset that is added to all offsets in a table.
568 For this purpose, align_fuzz with a growth argument of 0 computes the
569 appropriate adjustment. */
570
571 /* Compute the maximum delta by which the difference of the addresses of
572 START and END might grow / shrink due to a different address for start
573 which changes the size of alignment insns between START and END.
574 KNOWN_ALIGN_LOG is the alignment known for START.
575 GROWTH should be ~0 if the objective is to compute potential code size
576 increase, and 0 if the objective is to compute potential shrink.
577 The return value is undefined for any other value of GROWTH. */
578
579 static int
580 align_fuzz (rtx start, rtx end, int known_align_log, unsigned int growth)
581 {
582 int uid = INSN_UID (start);
583 rtx align_label;
584 int known_align = 1 << known_align_log;
585 int end_shuid = INSN_SHUID (end);
586 int fuzz = 0;
587
588 for (align_label = uid_align[uid]; align_label; align_label = uid_align[uid])
589 {
590 int align_addr, new_align;
591
592 uid = INSN_UID (align_label);
593 align_addr = INSN_ADDRESSES (uid) - insn_lengths[uid];
594 if (uid_shuid[uid] > end_shuid)
595 break;
596 known_align_log = LABEL_TO_ALIGNMENT (align_label);
597 new_align = 1 << known_align_log;
598 if (new_align < known_align)
599 continue;
600 fuzz += (-align_addr ^ growth) & (new_align - known_align);
601 known_align = new_align;
602 }
603 return fuzz;
604 }
605
606 /* Compute a worst-case reference address of a branch so that it
607 can be safely used in the presence of aligned labels. Since the
608 size of the branch itself is unknown, the size of the branch is
609 not included in the range. I.e. for a forward branch, the reference
610 address is the end address of the branch as known from the previous
611 branch shortening pass, minus a value to account for possible size
612 increase due to alignment. For a backward branch, it is the start
613 address of the branch as known from the current pass, plus a value
614 to account for possible size increase due to alignment.
615 NB.: Therefore, the maximum offset allowed for backward branches needs
616 to exclude the branch size. */
617
618 int
619 insn_current_reference_address (rtx branch)
620 {
621 rtx dest, seq;
622 int seq_uid;
623
624 if (! INSN_ADDRESSES_SET_P ())
625 return 0;
626
627 seq = NEXT_INSN (PREV_INSN (branch));
628 seq_uid = INSN_UID (seq);
629 if (GET_CODE (branch) != JUMP_INSN)
630 /* This can happen for example on the PA; the objective is to know the
631 offset to address something in front of the start of the function.
632 Thus, we can treat it like a backward branch.
633 We assume here that FUNCTION_BOUNDARY / BITS_PER_UNIT is larger than
634 any alignment we'd encounter, so we skip the call to align_fuzz. */
635 return insn_current_address;
636 dest = JUMP_LABEL (branch);
637
638 /* BRANCH has no proper alignment chain set, so use SEQ.
639 BRANCH also has no INSN_SHUID. */
640 if (INSN_SHUID (seq) < INSN_SHUID (dest))
641 {
642 /* Forward branch. */
643 return (insn_last_address + insn_lengths[seq_uid]
644 - align_fuzz (seq, dest, length_unit_log, ~0));
645 }
646 else
647 {
648 /* Backward branch. */
649 return (insn_current_address
650 + align_fuzz (dest, seq, length_unit_log, ~0));
651 }
652 }
653 #endif /* HAVE_ATTR_length */
654 \f
655 void
656 compute_alignments (void)
657 {
658 int log, max_skip, max_log;
659 basic_block bb;
660
661 if (label_align)
662 {
663 free (label_align);
664 label_align = 0;
665 }
666
667 max_labelno = max_label_num ();
668 min_labelno = get_first_label_num ();
669 label_align = xcalloc (max_labelno - min_labelno + 1,
670 sizeof (struct label_alignment));
671
672 /* If not optimizing or optimizing for size, don't assign any alignments. */
673 if (! optimize || optimize_size)
674 return;
675
676 FOR_EACH_BB (bb)
677 {
678 rtx label = BB_HEAD (bb);
679 int fallthru_frequency = 0, branch_frequency = 0, has_fallthru = 0;
680 edge e;
681
682 if (GET_CODE (label) != CODE_LABEL
683 || probably_never_executed_bb_p (bb))
684 continue;
685 max_log = LABEL_ALIGN (label);
686 max_skip = LABEL_ALIGN_MAX_SKIP;
687
688 for (e = bb->pred; e; e = e->pred_next)
689 {
690 if (e->flags & EDGE_FALLTHRU)
691 has_fallthru = 1, fallthru_frequency += EDGE_FREQUENCY (e);
692 else
693 branch_frequency += EDGE_FREQUENCY (e);
694 }
695
696 /* There are two purposes to align block with no fallthru incoming edge:
697 1) to avoid fetch stalls when branch destination is near cache boundary
698 2) to improve cache efficiency in case the previous block is not executed
699 (so it does not need to be in the cache).
700
701 We to catch first case, we align frequently executed blocks.
702 To catch the second, we align blocks that are executed more frequently
703 than the predecessor and the predecessor is likely to not be executed
704 when function is called. */
705
706 if (!has_fallthru
707 && (branch_frequency > BB_FREQ_MAX / 10
708 || (bb->frequency > bb->prev_bb->frequency * 10
709 && (bb->prev_bb->frequency
710 <= ENTRY_BLOCK_PTR->frequency / 2))))
711 {
712 log = JUMP_ALIGN (label);
713 if (max_log < log)
714 {
715 max_log = log;
716 max_skip = JUMP_ALIGN_MAX_SKIP;
717 }
718 }
719 /* In case block is frequent and reached mostly by non-fallthru edge,
720 align it. It is most likely a first block of loop. */
721 if (has_fallthru
722 && maybe_hot_bb_p (bb)
723 && branch_frequency + fallthru_frequency > BB_FREQ_MAX / 10
724 && branch_frequency > fallthru_frequency * 2)
725 {
726 log = LOOP_ALIGN (label);
727 if (max_log < log)
728 {
729 max_log = log;
730 max_skip = LOOP_ALIGN_MAX_SKIP;
731 }
732 }
733 LABEL_TO_ALIGNMENT (label) = max_log;
734 LABEL_TO_MAX_SKIP (label) = max_skip;
735 }
736 }
737 \f
738 /* Make a pass over all insns and compute their actual lengths by shortening
739 any branches of variable length if possible. */
740
741 /* shorten_branches might be called multiple times: for example, the SH
742 port splits out-of-range conditional branches in MACHINE_DEPENDENT_REORG.
743 In order to do this, it needs proper length information, which it obtains
744 by calling shorten_branches. This cannot be collapsed with
745 shorten_branches itself into a single pass unless we also want to integrate
746 reorg.c, since the branch splitting exposes new instructions with delay
747 slots. */
748
749 void
750 shorten_branches (rtx first ATTRIBUTE_UNUSED)
751 {
752 rtx insn;
753 int max_uid;
754 int i;
755 int max_log;
756 int max_skip;
757 #ifdef HAVE_ATTR_length
758 #define MAX_CODE_ALIGN 16
759 rtx seq;
760 int something_changed = 1;
761 char *varying_length;
762 rtx body;
763 int uid;
764 rtx align_tab[MAX_CODE_ALIGN];
765
766 #endif
767
768 /* Compute maximum UID and allocate label_align / uid_shuid. */
769 max_uid = get_max_uid ();
770
771 uid_shuid = xmalloc (max_uid * sizeof *uid_shuid);
772
773 if (max_labelno != max_label_num ())
774 {
775 int old = max_labelno;
776 int n_labels;
777 int n_old_labels;
778
779 max_labelno = max_label_num ();
780
781 n_labels = max_labelno - min_labelno + 1;
782 n_old_labels = old - min_labelno + 1;
783
784 label_align = xrealloc (label_align,
785 n_labels * sizeof (struct label_alignment));
786
787 /* Range of labels grows monotonically in the function. Abort here
788 means that the initialization of array got lost. */
789 if (n_old_labels > n_labels)
790 abort ();
791
792 memset (label_align + n_old_labels, 0,
793 (n_labels - n_old_labels) * sizeof (struct label_alignment));
794 }
795
796 /* Initialize label_align and set up uid_shuid to be strictly
797 monotonically rising with insn order. */
798 /* We use max_log here to keep track of the maximum alignment we want to
799 impose on the next CODE_LABEL (or the current one if we are processing
800 the CODE_LABEL itself). */
801
802 max_log = 0;
803 max_skip = 0;
804
805 for (insn = get_insns (), i = 1; insn; insn = NEXT_INSN (insn))
806 {
807 int log;
808
809 INSN_SHUID (insn) = i++;
810 if (INSN_P (insn))
811 {
812 /* reorg might make the first insn of a loop being run once only,
813 and delete the label in front of it. Then we want to apply
814 the loop alignment to the new label created by reorg, which
815 is separated by the former loop start insn from the
816 NOTE_INSN_LOOP_BEG. */
817 }
818 else if (GET_CODE (insn) == CODE_LABEL)
819 {
820 rtx next;
821
822 /* Merge in alignments computed by compute_alignments. */
823 log = LABEL_TO_ALIGNMENT (insn);
824 if (max_log < log)
825 {
826 max_log = log;
827 max_skip = LABEL_TO_MAX_SKIP (insn);
828 }
829
830 log = LABEL_ALIGN (insn);
831 if (max_log < log)
832 {
833 max_log = log;
834 max_skip = LABEL_ALIGN_MAX_SKIP;
835 }
836 next = NEXT_INSN (insn);
837 /* ADDR_VECs only take room if read-only data goes into the text
838 section. */
839 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
840 if (next && GET_CODE (next) == JUMP_INSN)
841 {
842 rtx nextbody = PATTERN (next);
843 if (GET_CODE (nextbody) == ADDR_VEC
844 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
845 {
846 log = ADDR_VEC_ALIGN (next);
847 if (max_log < log)
848 {
849 max_log = log;
850 max_skip = LABEL_ALIGN_MAX_SKIP;
851 }
852 }
853 }
854 LABEL_TO_ALIGNMENT (insn) = max_log;
855 LABEL_TO_MAX_SKIP (insn) = max_skip;
856 max_log = 0;
857 max_skip = 0;
858 }
859 else if (GET_CODE (insn) == BARRIER)
860 {
861 rtx label;
862
863 for (label = insn; label && ! INSN_P (label);
864 label = NEXT_INSN (label))
865 if (GET_CODE (label) == CODE_LABEL)
866 {
867 log = LABEL_ALIGN_AFTER_BARRIER (insn);
868 if (max_log < log)
869 {
870 max_log = log;
871 max_skip = LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP;
872 }
873 break;
874 }
875 }
876 }
877 #ifdef HAVE_ATTR_length
878
879 /* Allocate the rest of the arrays. */
880 insn_lengths = xmalloc (max_uid * sizeof (*insn_lengths));
881 insn_lengths_max_uid = max_uid;
882 /* Syntax errors can lead to labels being outside of the main insn stream.
883 Initialize insn_addresses, so that we get reproducible results. */
884 INSN_ADDRESSES_ALLOC (max_uid);
885
886 varying_length = xcalloc (max_uid, sizeof (char));
887
888 /* Initialize uid_align. We scan instructions
889 from end to start, and keep in align_tab[n] the last seen insn
890 that does an alignment of at least n+1, i.e. the successor
891 in the alignment chain for an insn that does / has a known
892 alignment of n. */
893 uid_align = xcalloc (max_uid, sizeof *uid_align);
894
895 for (i = MAX_CODE_ALIGN; --i >= 0;)
896 align_tab[i] = NULL_RTX;
897 seq = get_last_insn ();
898 for (; seq; seq = PREV_INSN (seq))
899 {
900 int uid = INSN_UID (seq);
901 int log;
902 log = (GET_CODE (seq) == CODE_LABEL ? LABEL_TO_ALIGNMENT (seq) : 0);
903 uid_align[uid] = align_tab[0];
904 if (log)
905 {
906 /* Found an alignment label. */
907 uid_align[uid] = align_tab[log];
908 for (i = log - 1; i >= 0; i--)
909 align_tab[i] = seq;
910 }
911 }
912 #ifdef CASE_VECTOR_SHORTEN_MODE
913 if (optimize)
914 {
915 /* Look for ADDR_DIFF_VECs, and initialize their minimum and maximum
916 label fields. */
917
918 int min_shuid = INSN_SHUID (get_insns ()) - 1;
919 int max_shuid = INSN_SHUID (get_last_insn ()) + 1;
920 int rel;
921
922 for (insn = first; insn != 0; insn = NEXT_INSN (insn))
923 {
924 rtx min_lab = NULL_RTX, max_lab = NULL_RTX, pat;
925 int len, i, min, max, insn_shuid;
926 int min_align;
927 addr_diff_vec_flags flags;
928
929 if (GET_CODE (insn) != JUMP_INSN
930 || GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
931 continue;
932 pat = PATTERN (insn);
933 len = XVECLEN (pat, 1);
934 if (len <= 0)
935 abort ();
936 min_align = MAX_CODE_ALIGN;
937 for (min = max_shuid, max = min_shuid, i = len - 1; i >= 0; i--)
938 {
939 rtx lab = XEXP (XVECEXP (pat, 1, i), 0);
940 int shuid = INSN_SHUID (lab);
941 if (shuid < min)
942 {
943 min = shuid;
944 min_lab = lab;
945 }
946 if (shuid > max)
947 {
948 max = shuid;
949 max_lab = lab;
950 }
951 if (min_align > LABEL_TO_ALIGNMENT (lab))
952 min_align = LABEL_TO_ALIGNMENT (lab);
953 }
954 XEXP (pat, 2) = gen_rtx_LABEL_REF (VOIDmode, min_lab);
955 XEXP (pat, 3) = gen_rtx_LABEL_REF (VOIDmode, max_lab);
956 insn_shuid = INSN_SHUID (insn);
957 rel = INSN_SHUID (XEXP (XEXP (pat, 0), 0));
958 flags.min_align = min_align;
959 flags.base_after_vec = rel > insn_shuid;
960 flags.min_after_vec = min > insn_shuid;
961 flags.max_after_vec = max > insn_shuid;
962 flags.min_after_base = min > rel;
963 flags.max_after_base = max > rel;
964 ADDR_DIFF_VEC_FLAGS (pat) = flags;
965 }
966 }
967 #endif /* CASE_VECTOR_SHORTEN_MODE */
968
969 /* Compute initial lengths, addresses, and varying flags for each insn. */
970 for (insn_current_address = 0, insn = first;
971 insn != 0;
972 insn_current_address += insn_lengths[uid], insn = NEXT_INSN (insn))
973 {
974 uid = INSN_UID (insn);
975
976 insn_lengths[uid] = 0;
977
978 if (GET_CODE (insn) == CODE_LABEL)
979 {
980 int log = LABEL_TO_ALIGNMENT (insn);
981 if (log)
982 {
983 int align = 1 << log;
984 int new_address = (insn_current_address + align - 1) & -align;
985 insn_lengths[uid] = new_address - insn_current_address;
986 }
987 }
988
989 INSN_ADDRESSES (uid) = insn_current_address + insn_lengths[uid];
990
991 if (GET_CODE (insn) == NOTE || GET_CODE (insn) == BARRIER
992 || GET_CODE (insn) == CODE_LABEL)
993 continue;
994 if (INSN_DELETED_P (insn))
995 continue;
996
997 body = PATTERN (insn);
998 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
999 {
1000 /* This only takes room if read-only data goes into the text
1001 section. */
1002 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1003 insn_lengths[uid] = (XVECLEN (body,
1004 GET_CODE (body) == ADDR_DIFF_VEC)
1005 * GET_MODE_SIZE (GET_MODE (body)));
1006 /* Alignment is handled by ADDR_VEC_ALIGN. */
1007 }
1008 else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0)
1009 insn_lengths[uid] = asm_insn_count (body) * insn_default_length (insn);
1010 else if (GET_CODE (body) == SEQUENCE)
1011 {
1012 int i;
1013 int const_delay_slots;
1014 #ifdef DELAY_SLOTS
1015 const_delay_slots = const_num_delay_slots (XVECEXP (body, 0, 0));
1016 #else
1017 const_delay_slots = 0;
1018 #endif
1019 /* Inside a delay slot sequence, we do not do any branch shortening
1020 if the shortening could change the number of delay slots
1021 of the branch. */
1022 for (i = 0; i < XVECLEN (body, 0); i++)
1023 {
1024 rtx inner_insn = XVECEXP (body, 0, i);
1025 int inner_uid = INSN_UID (inner_insn);
1026 int inner_length;
1027
1028 if (GET_CODE (body) == ASM_INPUT
1029 || asm_noperands (PATTERN (XVECEXP (body, 0, i))) >= 0)
1030 inner_length = (asm_insn_count (PATTERN (inner_insn))
1031 * insn_default_length (inner_insn));
1032 else
1033 inner_length = insn_default_length (inner_insn);
1034
1035 insn_lengths[inner_uid] = inner_length;
1036 if (const_delay_slots)
1037 {
1038 if ((varying_length[inner_uid]
1039 = insn_variable_length_p (inner_insn)) != 0)
1040 varying_length[uid] = 1;
1041 INSN_ADDRESSES (inner_uid) = (insn_current_address
1042 + insn_lengths[uid]);
1043 }
1044 else
1045 varying_length[inner_uid] = 0;
1046 insn_lengths[uid] += inner_length;
1047 }
1048 }
1049 else if (GET_CODE (body) != USE && GET_CODE (body) != CLOBBER)
1050 {
1051 insn_lengths[uid] = insn_default_length (insn);
1052 varying_length[uid] = insn_variable_length_p (insn);
1053 }
1054
1055 /* If needed, do any adjustment. */
1056 #ifdef ADJUST_INSN_LENGTH
1057 ADJUST_INSN_LENGTH (insn, insn_lengths[uid]);
1058 if (insn_lengths[uid] < 0)
1059 fatal_insn ("negative insn length", insn);
1060 #endif
1061 }
1062
1063 /* Now loop over all the insns finding varying length insns. For each,
1064 get the current insn length. If it has changed, reflect the change.
1065 When nothing changes for a full pass, we are done. */
1066
1067 while (something_changed)
1068 {
1069 something_changed = 0;
1070 insn_current_align = MAX_CODE_ALIGN - 1;
1071 for (insn_current_address = 0, insn = first;
1072 insn != 0;
1073 insn = NEXT_INSN (insn))
1074 {
1075 int new_length;
1076 #ifdef ADJUST_INSN_LENGTH
1077 int tmp_length;
1078 #endif
1079 int length_align;
1080
1081 uid = INSN_UID (insn);
1082
1083 if (GET_CODE (insn) == CODE_LABEL)
1084 {
1085 int log = LABEL_TO_ALIGNMENT (insn);
1086 if (log > insn_current_align)
1087 {
1088 int align = 1 << log;
1089 int new_address= (insn_current_address + align - 1) & -align;
1090 insn_lengths[uid] = new_address - insn_current_address;
1091 insn_current_align = log;
1092 insn_current_address = new_address;
1093 }
1094 else
1095 insn_lengths[uid] = 0;
1096 INSN_ADDRESSES (uid) = insn_current_address;
1097 continue;
1098 }
1099
1100 length_align = INSN_LENGTH_ALIGNMENT (insn);
1101 if (length_align < insn_current_align)
1102 insn_current_align = length_align;
1103
1104 insn_last_address = INSN_ADDRESSES (uid);
1105 INSN_ADDRESSES (uid) = insn_current_address;
1106
1107 #ifdef CASE_VECTOR_SHORTEN_MODE
1108 if (optimize && GET_CODE (insn) == JUMP_INSN
1109 && GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
1110 {
1111 rtx body = PATTERN (insn);
1112 int old_length = insn_lengths[uid];
1113 rtx rel_lab = XEXP (XEXP (body, 0), 0);
1114 rtx min_lab = XEXP (XEXP (body, 2), 0);
1115 rtx max_lab = XEXP (XEXP (body, 3), 0);
1116 int rel_addr = INSN_ADDRESSES (INSN_UID (rel_lab));
1117 int min_addr = INSN_ADDRESSES (INSN_UID (min_lab));
1118 int max_addr = INSN_ADDRESSES (INSN_UID (max_lab));
1119 rtx prev;
1120 int rel_align = 0;
1121 addr_diff_vec_flags flags;
1122
1123 /* Avoid automatic aggregate initialization. */
1124 flags = ADDR_DIFF_VEC_FLAGS (body);
1125
1126 /* Try to find a known alignment for rel_lab. */
1127 for (prev = rel_lab;
1128 prev
1129 && ! insn_lengths[INSN_UID (prev)]
1130 && ! (varying_length[INSN_UID (prev)] & 1);
1131 prev = PREV_INSN (prev))
1132 if (varying_length[INSN_UID (prev)] & 2)
1133 {
1134 rel_align = LABEL_TO_ALIGNMENT (prev);
1135 break;
1136 }
1137
1138 /* See the comment on addr_diff_vec_flags in rtl.h for the
1139 meaning of the flags values. base: REL_LAB vec: INSN */
1140 /* Anything after INSN has still addresses from the last
1141 pass; adjust these so that they reflect our current
1142 estimate for this pass. */
1143 if (flags.base_after_vec)
1144 rel_addr += insn_current_address - insn_last_address;
1145 if (flags.min_after_vec)
1146 min_addr += insn_current_address - insn_last_address;
1147 if (flags.max_after_vec)
1148 max_addr += insn_current_address - insn_last_address;
1149 /* We want to know the worst case, i.e. lowest possible value
1150 for the offset of MIN_LAB. If MIN_LAB is after REL_LAB,
1151 its offset is positive, and we have to be wary of code shrink;
1152 otherwise, it is negative, and we have to be vary of code
1153 size increase. */
1154 if (flags.min_after_base)
1155 {
1156 /* If INSN is between REL_LAB and MIN_LAB, the size
1157 changes we are about to make can change the alignment
1158 within the observed offset, therefore we have to break
1159 it up into two parts that are independent. */
1160 if (! flags.base_after_vec && flags.min_after_vec)
1161 {
1162 min_addr -= align_fuzz (rel_lab, insn, rel_align, 0);
1163 min_addr -= align_fuzz (insn, min_lab, 0, 0);
1164 }
1165 else
1166 min_addr -= align_fuzz (rel_lab, min_lab, rel_align, 0);
1167 }
1168 else
1169 {
1170 if (flags.base_after_vec && ! flags.min_after_vec)
1171 {
1172 min_addr -= align_fuzz (min_lab, insn, 0, ~0);
1173 min_addr -= align_fuzz (insn, rel_lab, 0, ~0);
1174 }
1175 else
1176 min_addr -= align_fuzz (min_lab, rel_lab, 0, ~0);
1177 }
1178 /* Likewise, determine the highest lowest possible value
1179 for the offset of MAX_LAB. */
1180 if (flags.max_after_base)
1181 {
1182 if (! flags.base_after_vec && flags.max_after_vec)
1183 {
1184 max_addr += align_fuzz (rel_lab, insn, rel_align, ~0);
1185 max_addr += align_fuzz (insn, max_lab, 0, ~0);
1186 }
1187 else
1188 max_addr += align_fuzz (rel_lab, max_lab, rel_align, ~0);
1189 }
1190 else
1191 {
1192 if (flags.base_after_vec && ! flags.max_after_vec)
1193 {
1194 max_addr += align_fuzz (max_lab, insn, 0, 0);
1195 max_addr += align_fuzz (insn, rel_lab, 0, 0);
1196 }
1197 else
1198 max_addr += align_fuzz (max_lab, rel_lab, 0, 0);
1199 }
1200 PUT_MODE (body, CASE_VECTOR_SHORTEN_MODE (min_addr - rel_addr,
1201 max_addr - rel_addr,
1202 body));
1203 if (JUMP_TABLES_IN_TEXT_SECTION || !HAVE_READONLY_DATA_SECTION)
1204 {
1205 insn_lengths[uid]
1206 = (XVECLEN (body, 1) * GET_MODE_SIZE (GET_MODE (body)));
1207 insn_current_address += insn_lengths[uid];
1208 if (insn_lengths[uid] != old_length)
1209 something_changed = 1;
1210 }
1211
1212 continue;
1213 }
1214 #endif /* CASE_VECTOR_SHORTEN_MODE */
1215
1216 if (! (varying_length[uid]))
1217 {
1218 if (GET_CODE (insn) == INSN
1219 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1220 {
1221 int i;
1222
1223 body = PATTERN (insn);
1224 for (i = 0; i < XVECLEN (body, 0); i++)
1225 {
1226 rtx inner_insn = XVECEXP (body, 0, i);
1227 int inner_uid = INSN_UID (inner_insn);
1228
1229 INSN_ADDRESSES (inner_uid) = insn_current_address;
1230
1231 insn_current_address += insn_lengths[inner_uid];
1232 }
1233 }
1234 else
1235 insn_current_address += insn_lengths[uid];
1236
1237 continue;
1238 }
1239
1240 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1241 {
1242 int i;
1243
1244 body = PATTERN (insn);
1245 new_length = 0;
1246 for (i = 0; i < XVECLEN (body, 0); i++)
1247 {
1248 rtx inner_insn = XVECEXP (body, 0, i);
1249 int inner_uid = INSN_UID (inner_insn);
1250 int inner_length;
1251
1252 INSN_ADDRESSES (inner_uid) = insn_current_address;
1253
1254 /* insn_current_length returns 0 for insns with a
1255 non-varying length. */
1256 if (! varying_length[inner_uid])
1257 inner_length = insn_lengths[inner_uid];
1258 else
1259 inner_length = insn_current_length (inner_insn);
1260
1261 if (inner_length != insn_lengths[inner_uid])
1262 {
1263 insn_lengths[inner_uid] = inner_length;
1264 something_changed = 1;
1265 }
1266 insn_current_address += insn_lengths[inner_uid];
1267 new_length += inner_length;
1268 }
1269 }
1270 else
1271 {
1272 new_length = insn_current_length (insn);
1273 insn_current_address += new_length;
1274 }
1275
1276 #ifdef ADJUST_INSN_LENGTH
1277 /* If needed, do any adjustment. */
1278 tmp_length = new_length;
1279 ADJUST_INSN_LENGTH (insn, new_length);
1280 insn_current_address += (new_length - tmp_length);
1281 #endif
1282
1283 if (new_length != insn_lengths[uid])
1284 {
1285 insn_lengths[uid] = new_length;
1286 something_changed = 1;
1287 }
1288 }
1289 /* For a non-optimizing compile, do only a single pass. */
1290 if (!optimize)
1291 break;
1292 }
1293
1294 free (varying_length);
1295
1296 #endif /* HAVE_ATTR_length */
1297 }
1298
1299 #ifdef HAVE_ATTR_length
1300 /* Given the body of an INSN known to be generated by an ASM statement, return
1301 the number of machine instructions likely to be generated for this insn.
1302 This is used to compute its length. */
1303
1304 static int
1305 asm_insn_count (rtx body)
1306 {
1307 const char *template;
1308 int count = 1;
1309
1310 if (GET_CODE (body) == ASM_INPUT)
1311 template = XSTR (body, 0);
1312 else
1313 template = decode_asm_operands (body, NULL, NULL, NULL, NULL);
1314
1315 for (; *template; template++)
1316 if (IS_ASM_LOGICAL_LINE_SEPARATOR (*template) || *template == '\n')
1317 count++;
1318
1319 return count;
1320 }
1321 #endif
1322 \f
1323 /* Output assembler code for the start of a function,
1324 and initialize some of the variables in this file
1325 for the new function. The label for the function and associated
1326 assembler pseudo-ops have already been output in `assemble_start_function'.
1327
1328 FIRST is the first insn of the rtl for the function being compiled.
1329 FILE is the file to write assembler code to.
1330 OPTIMIZE is nonzero if we should eliminate redundant
1331 test and compare insns. */
1332
1333 void
1334 final_start_function (rtx first ATTRIBUTE_UNUSED, FILE *file,
1335 int optimize ATTRIBUTE_UNUSED)
1336 {
1337 block_depth = 0;
1338
1339 this_is_asm_operands = 0;
1340
1341 last_filename = locator_file (prologue_locator);
1342 last_linenum = locator_line (prologue_locator);
1343
1344 high_block_linenum = high_function_linenum = last_linenum;
1345
1346 (*debug_hooks->begin_prologue) (last_linenum, last_filename);
1347
1348 #if defined (DWARF2_UNWIND_INFO) || defined (IA64_UNWIND_INFO)
1349 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG)
1350 dwarf2out_begin_prologue (0, NULL);
1351 #endif
1352
1353 #ifdef LEAF_REG_REMAP
1354 if (current_function_uses_only_leaf_regs)
1355 leaf_renumber_regs (first);
1356 #endif
1357
1358 /* The Sun386i and perhaps other machines don't work right
1359 if the profiling code comes after the prologue. */
1360 #ifdef PROFILE_BEFORE_PROLOGUE
1361 if (current_function_profile)
1362 profile_function (file);
1363 #endif /* PROFILE_BEFORE_PROLOGUE */
1364
1365 #if defined (DWARF2_UNWIND_INFO) && defined (HAVE_prologue)
1366 if (dwarf2out_do_frame ())
1367 dwarf2out_frame_debug (NULL_RTX);
1368 #endif
1369
1370 /* If debugging, assign block numbers to all of the blocks in this
1371 function. */
1372 if (write_symbols)
1373 {
1374 remove_unnecessary_notes ();
1375 reemit_insn_block_notes ();
1376 number_blocks (current_function_decl);
1377 /* We never actually put out begin/end notes for the top-level
1378 block in the function. But, conceptually, that block is
1379 always needed. */
1380 TREE_ASM_WRITTEN (DECL_INITIAL (current_function_decl)) = 1;
1381 }
1382
1383 /* First output the function prologue: code to set up the stack frame. */
1384 (*targetm.asm_out.function_prologue) (file, get_frame_size ());
1385
1386 /* If the machine represents the prologue as RTL, the profiling code must
1387 be emitted when NOTE_INSN_PROLOGUE_END is scanned. */
1388 #ifdef HAVE_prologue
1389 if (! HAVE_prologue)
1390 #endif
1391 profile_after_prologue (file);
1392 }
1393
1394 static void
1395 profile_after_prologue (FILE *file ATTRIBUTE_UNUSED)
1396 {
1397 #ifndef PROFILE_BEFORE_PROLOGUE
1398 if (current_function_profile)
1399 profile_function (file);
1400 #endif /* not PROFILE_BEFORE_PROLOGUE */
1401 }
1402
1403 static void
1404 profile_function (FILE *file ATTRIBUTE_UNUSED)
1405 {
1406 #ifndef NO_PROFILE_COUNTERS
1407 # define NO_PROFILE_COUNTERS 0
1408 #endif
1409 #if defined(ASM_OUTPUT_REG_PUSH)
1410 int sval = current_function_returns_struct;
1411 rtx svrtx = targetm.calls.struct_value_rtx (TREE_TYPE (current_function_decl), 1);
1412 #if defined(STATIC_CHAIN_INCOMING_REGNUM) || defined(STATIC_CHAIN_REGNUM)
1413 int cxt = current_function_needs_context;
1414 #endif
1415 #endif /* ASM_OUTPUT_REG_PUSH */
1416
1417 if (! NO_PROFILE_COUNTERS)
1418 {
1419 int align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
1420 data_section ();
1421 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
1422 (*targetm.asm_out.internal_label) (file, "LP", current_function_funcdef_no);
1423 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
1424 }
1425
1426 function_section (current_function_decl);
1427
1428 #if defined(ASM_OUTPUT_REG_PUSH)
1429 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1430 ASM_OUTPUT_REG_PUSH (file, REGNO (svrtx));
1431 #endif
1432
1433 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1434 if (cxt)
1435 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_INCOMING_REGNUM);
1436 #else
1437 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1438 if (cxt)
1439 {
1440 ASM_OUTPUT_REG_PUSH (file, STATIC_CHAIN_REGNUM);
1441 }
1442 #endif
1443 #endif
1444
1445 FUNCTION_PROFILER (file, current_function_funcdef_no);
1446
1447 #if defined(STATIC_CHAIN_INCOMING_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1448 if (cxt)
1449 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_INCOMING_REGNUM);
1450 #else
1451 #if defined(STATIC_CHAIN_REGNUM) && defined(ASM_OUTPUT_REG_PUSH)
1452 if (cxt)
1453 {
1454 ASM_OUTPUT_REG_POP (file, STATIC_CHAIN_REGNUM);
1455 }
1456 #endif
1457 #endif
1458
1459 #if defined(ASM_OUTPUT_REG_PUSH)
1460 if (sval && svrtx != NULL_RTX && GET_CODE (svrtx) == REG)
1461 ASM_OUTPUT_REG_POP (file, REGNO (svrtx));
1462 #endif
1463 }
1464
1465 /* Output assembler code for the end of a function.
1466 For clarity, args are same as those of `final_start_function'
1467 even though not all of them are needed. */
1468
1469 void
1470 final_end_function (void)
1471 {
1472 app_disable ();
1473
1474 (*debug_hooks->end_function) (high_function_linenum);
1475
1476 /* Finally, output the function epilogue:
1477 code to restore the stack frame and return to the caller. */
1478 (*targetm.asm_out.function_epilogue) (asm_out_file, get_frame_size ());
1479
1480 /* And debug output. */
1481 (*debug_hooks->end_epilogue) (last_linenum, last_filename);
1482
1483 #if defined (DWARF2_UNWIND_INFO)
1484 if (write_symbols != DWARF2_DEBUG && write_symbols != VMS_AND_DWARF2_DEBUG
1485 && dwarf2out_do_frame ())
1486 dwarf2out_end_epilogue (last_linenum, last_filename);
1487 #endif
1488 }
1489 \f
1490 /* Output assembler code for some insns: all or part of a function.
1491 For description of args, see `final_start_function', above.
1492
1493 PRESCAN is 1 if we are not really outputting,
1494 just scanning as if we were outputting.
1495 Prescanning deletes and rearranges insns just like ordinary output.
1496 PRESCAN is -2 if we are outputting after having prescanned.
1497 In this case, don't try to delete or rearrange insns
1498 because that has already been done.
1499 Prescanning is done only on certain machines. */
1500
1501 void
1502 final (rtx first, FILE *file, int optimize, int prescan)
1503 {
1504 rtx insn;
1505 int max_uid = 0;
1506 int seen = 0;
1507
1508 last_ignored_compare = 0;
1509
1510 #ifdef SDB_DEBUGGING_INFO
1511 /* When producing SDB debugging info, delete troublesome line number
1512 notes from inlined functions in other files as well as duplicate
1513 line number notes. */
1514 if (write_symbols == SDB_DEBUG)
1515 {
1516 rtx last = 0;
1517 for (insn = first; insn; insn = NEXT_INSN (insn))
1518 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
1519 {
1520 if ((RTX_INTEGRATED_P (insn)
1521 && strcmp (NOTE_SOURCE_FILE (insn), main_input_filename) != 0)
1522 || (last != 0
1523 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last)
1524 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last)))
1525 {
1526 delete_insn (insn); /* Use delete_note. */
1527 continue;
1528 }
1529 last = insn;
1530 }
1531 }
1532 #endif
1533
1534 for (insn = first; insn; insn = NEXT_INSN (insn))
1535 {
1536 if (INSN_UID (insn) > max_uid) /* Find largest UID. */
1537 max_uid = INSN_UID (insn);
1538 #ifdef HAVE_cc0
1539 /* If CC tracking across branches is enabled, record the insn which
1540 jumps to each branch only reached from one place. */
1541 if (optimize && GET_CODE (insn) == JUMP_INSN)
1542 {
1543 rtx lab = JUMP_LABEL (insn);
1544 if (lab && LABEL_NUSES (lab) == 1)
1545 {
1546 LABEL_REFS (lab) = insn;
1547 }
1548 }
1549 #endif
1550 }
1551
1552 init_recog ();
1553
1554 CC_STATUS_INIT;
1555
1556 /* Output the insns. */
1557 for (insn = NEXT_INSN (first); insn;)
1558 {
1559 #ifdef HAVE_ATTR_length
1560 if ((unsigned) INSN_UID (insn) >= INSN_ADDRESSES_SIZE ())
1561 {
1562 /* This can be triggered by bugs elsewhere in the compiler if
1563 new insns are created after init_insn_lengths is called. */
1564 if (GET_CODE (insn) == NOTE)
1565 insn_current_address = -1;
1566 else
1567 abort ();
1568 }
1569 else
1570 insn_current_address = INSN_ADDRESSES (INSN_UID (insn));
1571 #endif /* HAVE_ATTR_length */
1572
1573 insn = final_scan_insn (insn, file, optimize, prescan, 0, &seen);
1574 }
1575 }
1576 \f
1577 const char *
1578 get_insn_template (int code, rtx insn)
1579 {
1580 switch (insn_data[code].output_format)
1581 {
1582 case INSN_OUTPUT_FORMAT_SINGLE:
1583 return insn_data[code].output.single;
1584 case INSN_OUTPUT_FORMAT_MULTI:
1585 return insn_data[code].output.multi[which_alternative];
1586 case INSN_OUTPUT_FORMAT_FUNCTION:
1587 if (insn == NULL)
1588 abort ();
1589 return (*insn_data[code].output.function) (recog_data.operand, insn);
1590
1591 default:
1592 abort ();
1593 }
1594 }
1595
1596 /* Emit the appropriate declaration for an alternate-entry-point
1597 symbol represented by INSN, to FILE. INSN is a CODE_LABEL with
1598 LABEL_KIND != LABEL_NORMAL.
1599
1600 The case fall-through in this function is intentional. */
1601 static void
1602 output_alternate_entry_point (FILE *file, rtx insn)
1603 {
1604 const char *name = LABEL_NAME (insn);
1605
1606 switch (LABEL_KIND (insn))
1607 {
1608 case LABEL_WEAK_ENTRY:
1609 #ifdef ASM_WEAKEN_LABEL
1610 ASM_WEAKEN_LABEL (file, name);
1611 #endif
1612 case LABEL_GLOBAL_ENTRY:
1613 (*targetm.asm_out.globalize_label) (file, name);
1614 case LABEL_STATIC_ENTRY:
1615 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
1616 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
1617 #endif
1618 ASM_OUTPUT_LABEL (file, name);
1619 break;
1620
1621 case LABEL_NORMAL:
1622 default:
1623 abort ();
1624 }
1625 }
1626
1627 /* The final scan for one insn, INSN.
1628 Args are same as in `final', except that INSN
1629 is the insn being scanned.
1630 Value returned is the next insn to be scanned.
1631
1632 NOPEEPHOLES is the flag to disallow peephole processing (currently
1633 used for within delayed branch sequence output).
1634
1635 SEEN is used to track the end of the prologue, for emitting
1636 debug information. We force the emission of a line note after
1637 both NOTE_INSN_PROLOGUE_END and NOTE_INSN_FUNCTION_BEG, or
1638 at the beginning of the second basic block, whichever comes
1639 first. */
1640
1641 static rtx
1642 final_scan_insn (rtx insn, FILE *file, int optimize ATTRIBUTE_UNUSED,
1643 int prescan, int nopeepholes ATTRIBUTE_UNUSED,
1644 int *seen)
1645 {
1646 #ifdef HAVE_cc0
1647 rtx set;
1648 #endif
1649
1650 insn_counter++;
1651
1652 /* Ignore deleted insns. These can occur when we split insns (due to a
1653 template of "#") while not optimizing. */
1654 if (INSN_DELETED_P (insn))
1655 return NEXT_INSN (insn);
1656
1657 switch (GET_CODE (insn))
1658 {
1659 case NOTE:
1660 if (prescan > 0)
1661 break;
1662
1663 switch (NOTE_LINE_NUMBER (insn))
1664 {
1665 case NOTE_INSN_DELETED:
1666 case NOTE_INSN_LOOP_BEG:
1667 case NOTE_INSN_LOOP_END:
1668 case NOTE_INSN_LOOP_END_TOP_COND:
1669 case NOTE_INSN_LOOP_CONT:
1670 case NOTE_INSN_LOOP_VTOP:
1671 case NOTE_INSN_FUNCTION_END:
1672 case NOTE_INSN_REPEATED_LINE_NUMBER:
1673 case NOTE_INSN_EXPECTED_VALUE:
1674 break;
1675
1676 case NOTE_INSN_BASIC_BLOCK:
1677 #ifdef IA64_UNWIND_INFO
1678 IA64_UNWIND_EMIT (asm_out_file, insn);
1679 #endif
1680 if (flag_debug_asm)
1681 fprintf (asm_out_file, "\t%s basic block %d\n",
1682 ASM_COMMENT_START, NOTE_BASIC_BLOCK (insn)->index);
1683
1684 if ((*seen & (SEEN_EMITTED | SEEN_BB)) == SEEN_BB)
1685 {
1686 *seen |= SEEN_EMITTED;
1687 last_filename = NULL;
1688 }
1689 else
1690 *seen |= SEEN_BB;
1691
1692 break;
1693
1694 case NOTE_INSN_EH_REGION_BEG:
1695 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHB",
1696 NOTE_EH_HANDLER (insn));
1697 break;
1698
1699 case NOTE_INSN_EH_REGION_END:
1700 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LEHE",
1701 NOTE_EH_HANDLER (insn));
1702 break;
1703
1704 case NOTE_INSN_PROLOGUE_END:
1705 (*targetm.asm_out.function_end_prologue) (file);
1706 profile_after_prologue (file);
1707
1708 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1709 {
1710 *seen |= SEEN_EMITTED;
1711 last_filename = NULL;
1712 }
1713 else
1714 *seen |= SEEN_NOTE;
1715
1716 break;
1717
1718 case NOTE_INSN_EPILOGUE_BEG:
1719 (*targetm.asm_out.function_begin_epilogue) (file);
1720 break;
1721
1722 case NOTE_INSN_FUNCTION_BEG:
1723 app_disable ();
1724 (*debug_hooks->end_prologue) (last_linenum, last_filename);
1725
1726 if ((*seen & (SEEN_EMITTED | SEEN_NOTE)) == SEEN_NOTE)
1727 {
1728 *seen |= SEEN_EMITTED;
1729 last_filename = NULL;
1730 }
1731 else
1732 *seen |= SEEN_NOTE;
1733
1734 break;
1735
1736 case NOTE_INSN_BLOCK_BEG:
1737 if (debug_info_level == DINFO_LEVEL_NORMAL
1738 || debug_info_level == DINFO_LEVEL_VERBOSE
1739 || write_symbols == DWARF_DEBUG
1740 || write_symbols == DWARF2_DEBUG
1741 || write_symbols == VMS_AND_DWARF2_DEBUG
1742 || write_symbols == VMS_DEBUG)
1743 {
1744 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1745
1746 app_disable ();
1747 ++block_depth;
1748 high_block_linenum = last_linenum;
1749
1750 /* Output debugging info about the symbol-block beginning. */
1751 (*debug_hooks->begin_block) (last_linenum, n);
1752
1753 /* Mark this block as output. */
1754 TREE_ASM_WRITTEN (NOTE_BLOCK (insn)) = 1;
1755 }
1756 break;
1757
1758 case NOTE_INSN_BLOCK_END:
1759 if (debug_info_level == DINFO_LEVEL_NORMAL
1760 || debug_info_level == DINFO_LEVEL_VERBOSE
1761 || write_symbols == DWARF_DEBUG
1762 || write_symbols == DWARF2_DEBUG
1763 || write_symbols == VMS_AND_DWARF2_DEBUG
1764 || write_symbols == VMS_DEBUG)
1765 {
1766 int n = BLOCK_NUMBER (NOTE_BLOCK (insn));
1767
1768 app_disable ();
1769
1770 /* End of a symbol-block. */
1771 --block_depth;
1772 if (block_depth < 0)
1773 abort ();
1774
1775 (*debug_hooks->end_block) (high_block_linenum, n);
1776 }
1777 break;
1778
1779 case NOTE_INSN_DELETED_LABEL:
1780 /* Emit the label. We may have deleted the CODE_LABEL because
1781 the label could be proved to be unreachable, though still
1782 referenced (in the form of having its address taken. */
1783 ASM_OUTPUT_DEBUG_LABEL (file, "L", CODE_LABEL_NUMBER (insn));
1784 break;
1785
1786 case 0:
1787 break;
1788
1789 default:
1790 if (NOTE_LINE_NUMBER (insn) <= 0)
1791 abort ();
1792 break;
1793 }
1794 break;
1795
1796 case BARRIER:
1797 #if defined (DWARF2_UNWIND_INFO)
1798 if (dwarf2out_do_frame ())
1799 dwarf2out_frame_debug (insn);
1800 #endif
1801 break;
1802
1803 case CODE_LABEL:
1804 /* The target port might emit labels in the output function for
1805 some insn, e.g. sh.c output_branchy_insn. */
1806 if (CODE_LABEL_NUMBER (insn) <= max_labelno)
1807 {
1808 int align = LABEL_TO_ALIGNMENT (insn);
1809 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1810 int max_skip = LABEL_TO_MAX_SKIP (insn);
1811 #endif
1812
1813 if (align && NEXT_INSN (insn))
1814 {
1815 #ifdef ASM_OUTPUT_MAX_SKIP_ALIGN
1816 ASM_OUTPUT_MAX_SKIP_ALIGN (file, align, max_skip);
1817 #else
1818 #ifdef ASM_OUTPUT_ALIGN_WITH_NOP
1819 ASM_OUTPUT_ALIGN_WITH_NOP (file, align);
1820 #else
1821 ASM_OUTPUT_ALIGN (file, align);
1822 #endif
1823 #endif
1824 }
1825 }
1826 #ifdef HAVE_cc0
1827 CC_STATUS_INIT;
1828 /* If this label is reached from only one place, set the condition
1829 codes from the instruction just before the branch. */
1830
1831 /* Disabled because some insns set cc_status in the C output code
1832 and NOTICE_UPDATE_CC alone can set incorrect status. */
1833 if (0 /* optimize && LABEL_NUSES (insn) == 1*/)
1834 {
1835 rtx jump = LABEL_REFS (insn);
1836 rtx barrier = prev_nonnote_insn (insn);
1837 rtx prev;
1838 /* If the LABEL_REFS field of this label has been set to point
1839 at a branch, the predecessor of the branch is a regular
1840 insn, and that branch is the only way to reach this label,
1841 set the condition codes based on the branch and its
1842 predecessor. */
1843 if (barrier && GET_CODE (barrier) == BARRIER
1844 && jump && GET_CODE (jump) == JUMP_INSN
1845 && (prev = prev_nonnote_insn (jump))
1846 && GET_CODE (prev) == INSN)
1847 {
1848 NOTICE_UPDATE_CC (PATTERN (prev), prev);
1849 NOTICE_UPDATE_CC (PATTERN (jump), jump);
1850 }
1851 }
1852 #endif
1853 if (prescan > 0)
1854 break;
1855
1856 if (LABEL_NAME (insn))
1857 (*debug_hooks->label) (insn);
1858
1859 if (app_on)
1860 {
1861 fputs (ASM_APP_OFF, file);
1862 app_on = 0;
1863 }
1864 if (NEXT_INSN (insn) != 0
1865 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN)
1866 {
1867 rtx nextbody = PATTERN (NEXT_INSN (insn));
1868
1869 /* If this label is followed by a jump-table,
1870 make sure we put the label in the read-only section. Also
1871 possibly write the label and jump table together. */
1872
1873 if (GET_CODE (nextbody) == ADDR_VEC
1874 || GET_CODE (nextbody) == ADDR_DIFF_VEC)
1875 {
1876 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1877 /* In this case, the case vector is being moved by the
1878 target, so don't output the label at all. Leave that
1879 to the back end macros. */
1880 #else
1881 if (! JUMP_TABLES_IN_TEXT_SECTION)
1882 {
1883 int log_align;
1884
1885 readonly_data_section ();
1886
1887 #ifdef ADDR_VEC_ALIGN
1888 log_align = ADDR_VEC_ALIGN (NEXT_INSN (insn));
1889 #else
1890 log_align = exact_log2 (BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1891 #endif
1892 ASM_OUTPUT_ALIGN (file, log_align);
1893 }
1894 else
1895 function_section (current_function_decl);
1896
1897 #ifdef ASM_OUTPUT_CASE_LABEL
1898 ASM_OUTPUT_CASE_LABEL (file, "L", CODE_LABEL_NUMBER (insn),
1899 NEXT_INSN (insn));
1900 #else
1901 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1902 #endif
1903 #endif
1904 break;
1905 }
1906 }
1907 if (LABEL_ALT_ENTRY_P (insn))
1908 output_alternate_entry_point (file, insn);
1909 else
1910 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (insn));
1911 break;
1912
1913 default:
1914 {
1915 rtx body = PATTERN (insn);
1916 int insn_code_number;
1917 const char *template;
1918 rtx note;
1919
1920 /* An INSN, JUMP_INSN or CALL_INSN.
1921 First check for special kinds that recog doesn't recognize. */
1922
1923 if (GET_CODE (body) == USE /* These are just declarations. */
1924 || GET_CODE (body) == CLOBBER)
1925 break;
1926
1927 #ifdef HAVE_cc0
1928 /* If there is a REG_CC_SETTER note on this insn, it means that
1929 the setting of the condition code was done in the delay slot
1930 of the insn that branched here. So recover the cc status
1931 from the insn that set it. */
1932
1933 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1934 if (note)
1935 {
1936 NOTICE_UPDATE_CC (PATTERN (XEXP (note, 0)), XEXP (note, 0));
1937 cc_prev_status = cc_status;
1938 }
1939 #endif
1940
1941 /* Detect insns that are really jump-tables
1942 and output them as such. */
1943
1944 if (GET_CODE (body) == ADDR_VEC || GET_CODE (body) == ADDR_DIFF_VEC)
1945 {
1946 #if !(defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC))
1947 int vlen, idx;
1948 #endif
1949
1950 if (prescan > 0)
1951 break;
1952
1953 if (app_on)
1954 {
1955 fputs (ASM_APP_OFF, file);
1956 app_on = 0;
1957 }
1958
1959 #if defined(ASM_OUTPUT_ADDR_VEC) || defined(ASM_OUTPUT_ADDR_DIFF_VEC)
1960 if (GET_CODE (body) == ADDR_VEC)
1961 {
1962 #ifdef ASM_OUTPUT_ADDR_VEC
1963 ASM_OUTPUT_ADDR_VEC (PREV_INSN (insn), body);
1964 #else
1965 abort ();
1966 #endif
1967 }
1968 else
1969 {
1970 #ifdef ASM_OUTPUT_ADDR_DIFF_VEC
1971 ASM_OUTPUT_ADDR_DIFF_VEC (PREV_INSN (insn), body);
1972 #else
1973 abort ();
1974 #endif
1975 }
1976 #else
1977 vlen = XVECLEN (body, GET_CODE (body) == ADDR_DIFF_VEC);
1978 for (idx = 0; idx < vlen; idx++)
1979 {
1980 if (GET_CODE (body) == ADDR_VEC)
1981 {
1982 #ifdef ASM_OUTPUT_ADDR_VEC_ELT
1983 ASM_OUTPUT_ADDR_VEC_ELT
1984 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
1985 #else
1986 abort ();
1987 #endif
1988 }
1989 else
1990 {
1991 #ifdef ASM_OUTPUT_ADDR_DIFF_ELT
1992 ASM_OUTPUT_ADDR_DIFF_ELT
1993 (file,
1994 body,
1995 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
1996 CODE_LABEL_NUMBER (XEXP (XEXP (body, 0), 0)));
1997 #else
1998 abort ();
1999 #endif
2000 }
2001 }
2002 #ifdef ASM_OUTPUT_CASE_END
2003 ASM_OUTPUT_CASE_END (file,
2004 CODE_LABEL_NUMBER (PREV_INSN (insn)),
2005 insn);
2006 #endif
2007 #endif
2008
2009 function_section (current_function_decl);
2010
2011 break;
2012 }
2013 /* Output this line note if it is the first or the last line
2014 note in a row. */
2015 if (notice_source_line (insn))
2016 {
2017 (*debug_hooks->source_line) (last_linenum, last_filename);
2018 }
2019
2020 if (GET_CODE (body) == ASM_INPUT)
2021 {
2022 const char *string = XSTR (body, 0);
2023
2024 /* There's no telling what that did to the condition codes. */
2025 CC_STATUS_INIT;
2026 if (prescan > 0)
2027 break;
2028
2029 if (string[0])
2030 {
2031 if (! app_on)
2032 {
2033 fputs (ASM_APP_ON, file);
2034 app_on = 1;
2035 }
2036 fprintf (asm_out_file, "\t%s\n", string);
2037 }
2038 break;
2039 }
2040
2041 /* Detect `asm' construct with operands. */
2042 if (asm_noperands (body) >= 0)
2043 {
2044 unsigned int noperands = asm_noperands (body);
2045 rtx *ops = alloca (noperands * sizeof (rtx));
2046 const char *string;
2047
2048 /* There's no telling what that did to the condition codes. */
2049 CC_STATUS_INIT;
2050 if (prescan > 0)
2051 break;
2052
2053 /* Get out the operand values. */
2054 string = decode_asm_operands (body, ops, NULL, NULL, NULL);
2055 /* Inhibit aborts on what would otherwise be compiler bugs. */
2056 insn_noperands = noperands;
2057 this_is_asm_operands = insn;
2058
2059 #ifdef FINAL_PRESCAN_INSN
2060 FINAL_PRESCAN_INSN (insn, ops, insn_noperands);
2061 #endif
2062
2063 /* Output the insn using them. */
2064 if (string[0])
2065 {
2066 if (! app_on)
2067 {
2068 fputs (ASM_APP_ON, file);
2069 app_on = 1;
2070 }
2071 output_asm_insn (string, ops);
2072 }
2073
2074 this_is_asm_operands = 0;
2075 break;
2076 }
2077
2078 if (prescan <= 0 && app_on)
2079 {
2080 fputs (ASM_APP_OFF, file);
2081 app_on = 0;
2082 }
2083
2084 if (GET_CODE (body) == SEQUENCE)
2085 {
2086 /* A delayed-branch sequence */
2087 int i;
2088 rtx next;
2089
2090 if (prescan > 0)
2091 break;
2092 final_sequence = body;
2093
2094 /* Record the delay slots' frame information before the branch.
2095 This is needed for delayed calls: see execute_cfa_program(). */
2096 #if defined (DWARF2_UNWIND_INFO)
2097 if (dwarf2out_do_frame ())
2098 for (i = 1; i < XVECLEN (body, 0); i++)
2099 dwarf2out_frame_debug (XVECEXP (body, 0, i));
2100 #endif
2101
2102 /* The first insn in this SEQUENCE might be a JUMP_INSN that will
2103 force the restoration of a comparison that was previously
2104 thought unnecessary. If that happens, cancel this sequence
2105 and cause that insn to be restored. */
2106
2107 next = final_scan_insn (XVECEXP (body, 0, 0), file, 0, prescan, 1, seen);
2108 if (next != XVECEXP (body, 0, 1))
2109 {
2110 final_sequence = 0;
2111 return next;
2112 }
2113
2114 for (i = 1; i < XVECLEN (body, 0); i++)
2115 {
2116 rtx insn = XVECEXP (body, 0, i);
2117 rtx next = NEXT_INSN (insn);
2118 /* We loop in case any instruction in a delay slot gets
2119 split. */
2120 do
2121 insn = final_scan_insn (insn, file, 0, prescan, 1, seen);
2122 while (insn != next);
2123 }
2124 #ifdef DBR_OUTPUT_SEQEND
2125 DBR_OUTPUT_SEQEND (file);
2126 #endif
2127 final_sequence = 0;
2128
2129 /* If the insn requiring the delay slot was a CALL_INSN, the
2130 insns in the delay slot are actually executed before the
2131 called function. Hence we don't preserve any CC-setting
2132 actions in these insns and the CC must be marked as being
2133 clobbered by the function. */
2134 if (GET_CODE (XVECEXP (body, 0, 0)) == CALL_INSN)
2135 {
2136 CC_STATUS_INIT;
2137 }
2138 break;
2139 }
2140
2141 /* We have a real machine instruction as rtl. */
2142
2143 body = PATTERN (insn);
2144
2145 #ifdef HAVE_cc0
2146 set = single_set (insn);
2147
2148 /* Check for redundant test and compare instructions
2149 (when the condition codes are already set up as desired).
2150 This is done only when optimizing; if not optimizing,
2151 it should be possible for the user to alter a variable
2152 with the debugger in between statements
2153 and the next statement should reexamine the variable
2154 to compute the condition codes. */
2155
2156 if (optimize)
2157 {
2158 if (set
2159 && GET_CODE (SET_DEST (set)) == CC0
2160 && insn != last_ignored_compare)
2161 {
2162 if (GET_CODE (SET_SRC (set)) == SUBREG)
2163 SET_SRC (set) = alter_subreg (&SET_SRC (set));
2164 else if (GET_CODE (SET_SRC (set)) == COMPARE)
2165 {
2166 if (GET_CODE (XEXP (SET_SRC (set), 0)) == SUBREG)
2167 XEXP (SET_SRC (set), 0)
2168 = alter_subreg (&XEXP (SET_SRC (set), 0));
2169 if (GET_CODE (XEXP (SET_SRC (set), 1)) == SUBREG)
2170 XEXP (SET_SRC (set), 1)
2171 = alter_subreg (&XEXP (SET_SRC (set), 1));
2172 }
2173 if ((cc_status.value1 != 0
2174 && rtx_equal_p (SET_SRC (set), cc_status.value1))
2175 || (cc_status.value2 != 0
2176 && rtx_equal_p (SET_SRC (set), cc_status.value2)))
2177 {
2178 /* Don't delete insn if it has an addressing side-effect. */
2179 if (! FIND_REG_INC_NOTE (insn, NULL_RTX)
2180 /* or if anything in it is volatile. */
2181 && ! volatile_refs_p (PATTERN (insn)))
2182 {
2183 /* We don't really delete the insn; just ignore it. */
2184 last_ignored_compare = insn;
2185 break;
2186 }
2187 }
2188 }
2189 }
2190 #endif
2191
2192 #ifndef STACK_REGS
2193 /* Don't bother outputting obvious no-ops, even without -O.
2194 This optimization is fast and doesn't interfere with debugging.
2195 Don't do this if the insn is in a delay slot, since this
2196 will cause an improper number of delay insns to be written. */
2197 if (final_sequence == 0
2198 && prescan >= 0
2199 && GET_CODE (insn) == INSN && GET_CODE (body) == SET
2200 && GET_CODE (SET_SRC (body)) == REG
2201 && GET_CODE (SET_DEST (body)) == REG
2202 && REGNO (SET_SRC (body)) == REGNO (SET_DEST (body)))
2203 break;
2204 #endif
2205
2206 #ifdef HAVE_cc0
2207 /* If this is a conditional branch, maybe modify it
2208 if the cc's are in a nonstandard state
2209 so that it accomplishes the same thing that it would
2210 do straightforwardly if the cc's were set up normally. */
2211
2212 if (cc_status.flags != 0
2213 && GET_CODE (insn) == JUMP_INSN
2214 && GET_CODE (body) == SET
2215 && SET_DEST (body) == pc_rtx
2216 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2217 && GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (body), 0))) == '<'
2218 && XEXP (XEXP (SET_SRC (body), 0), 0) == cc0_rtx
2219 /* This is done during prescan; it is not done again
2220 in final scan when prescan has been done. */
2221 && prescan >= 0)
2222 {
2223 /* This function may alter the contents of its argument
2224 and clear some of the cc_status.flags bits.
2225 It may also return 1 meaning condition now always true
2226 or -1 meaning condition now always false
2227 or 2 meaning condition nontrivial but altered. */
2228 int result = alter_cond (XEXP (SET_SRC (body), 0));
2229 /* If condition now has fixed value, replace the IF_THEN_ELSE
2230 with its then-operand or its else-operand. */
2231 if (result == 1)
2232 SET_SRC (body) = XEXP (SET_SRC (body), 1);
2233 if (result == -1)
2234 SET_SRC (body) = XEXP (SET_SRC (body), 2);
2235
2236 /* The jump is now either unconditional or a no-op.
2237 If it has become a no-op, don't try to output it.
2238 (It would not be recognized.) */
2239 if (SET_SRC (body) == pc_rtx)
2240 {
2241 delete_insn (insn);
2242 break;
2243 }
2244 else if (GET_CODE (SET_SRC (body)) == RETURN)
2245 /* Replace (set (pc) (return)) with (return). */
2246 PATTERN (insn) = body = SET_SRC (body);
2247
2248 /* Rerecognize the instruction if it has changed. */
2249 if (result != 0)
2250 INSN_CODE (insn) = -1;
2251 }
2252
2253 /* Make same adjustments to instructions that examine the
2254 condition codes without jumping and instructions that
2255 handle conditional moves (if this machine has either one). */
2256
2257 if (cc_status.flags != 0
2258 && set != 0)
2259 {
2260 rtx cond_rtx, then_rtx, else_rtx;
2261
2262 if (GET_CODE (insn) != JUMP_INSN
2263 && GET_CODE (SET_SRC (set)) == IF_THEN_ELSE)
2264 {
2265 cond_rtx = XEXP (SET_SRC (set), 0);
2266 then_rtx = XEXP (SET_SRC (set), 1);
2267 else_rtx = XEXP (SET_SRC (set), 2);
2268 }
2269 else
2270 {
2271 cond_rtx = SET_SRC (set);
2272 then_rtx = const_true_rtx;
2273 else_rtx = const0_rtx;
2274 }
2275
2276 switch (GET_CODE (cond_rtx))
2277 {
2278 case GTU:
2279 case GT:
2280 case LTU:
2281 case LT:
2282 case GEU:
2283 case GE:
2284 case LEU:
2285 case LE:
2286 case EQ:
2287 case NE:
2288 {
2289 int result;
2290 if (XEXP (cond_rtx, 0) != cc0_rtx)
2291 break;
2292 result = alter_cond (cond_rtx);
2293 if (result == 1)
2294 validate_change (insn, &SET_SRC (set), then_rtx, 0);
2295 else if (result == -1)
2296 validate_change (insn, &SET_SRC (set), else_rtx, 0);
2297 else if (result == 2)
2298 INSN_CODE (insn) = -1;
2299 if (SET_DEST (set) == SET_SRC (set))
2300 delete_insn (insn);
2301 }
2302 break;
2303
2304 default:
2305 break;
2306 }
2307 }
2308
2309 #endif
2310
2311 #ifdef HAVE_peephole
2312 /* Do machine-specific peephole optimizations if desired. */
2313
2314 if (optimize && !flag_no_peephole && !nopeepholes)
2315 {
2316 rtx next = peephole (insn);
2317 /* When peepholing, if there were notes within the peephole,
2318 emit them before the peephole. */
2319 if (next != 0 && next != NEXT_INSN (insn))
2320 {
2321 rtx prev = PREV_INSN (insn);
2322
2323 for (note = NEXT_INSN (insn); note != next;
2324 note = NEXT_INSN (note))
2325 final_scan_insn (note, file, optimize, prescan, nopeepholes, seen);
2326
2327 /* In case this is prescan, put the notes
2328 in proper position for later rescan. */
2329 note = NEXT_INSN (insn);
2330 PREV_INSN (note) = prev;
2331 NEXT_INSN (prev) = note;
2332 NEXT_INSN (PREV_INSN (next)) = insn;
2333 PREV_INSN (insn) = PREV_INSN (next);
2334 NEXT_INSN (insn) = next;
2335 PREV_INSN (next) = insn;
2336 }
2337
2338 /* PEEPHOLE might have changed this. */
2339 body = PATTERN (insn);
2340 }
2341 #endif
2342
2343 /* Try to recognize the instruction.
2344 If successful, verify that the operands satisfy the
2345 constraints for the instruction. Crash if they don't,
2346 since `reload' should have changed them so that they do. */
2347
2348 insn_code_number = recog_memoized (insn);
2349 cleanup_subreg_operands (insn);
2350
2351 /* Dump the insn in the assembly for debugging. */
2352 if (flag_dump_rtl_in_asm)
2353 {
2354 print_rtx_head = ASM_COMMENT_START;
2355 print_rtl_single (asm_out_file, insn);
2356 print_rtx_head = "";
2357 }
2358
2359 if (! constrain_operands_cached (1))
2360 fatal_insn_not_found (insn);
2361
2362 /* Some target machines need to prescan each insn before
2363 it is output. */
2364
2365 #ifdef FINAL_PRESCAN_INSN
2366 FINAL_PRESCAN_INSN (insn, recog_data.operand, recog_data.n_operands);
2367 #endif
2368
2369 #ifdef HAVE_conditional_execution
2370 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
2371 current_insn_predicate = COND_EXEC_TEST (PATTERN (insn));
2372 else
2373 current_insn_predicate = NULL_RTX;
2374 #endif
2375
2376 #ifdef HAVE_cc0
2377 cc_prev_status = cc_status;
2378
2379 /* Update `cc_status' for this instruction.
2380 The instruction's output routine may change it further.
2381 If the output routine for a jump insn needs to depend
2382 on the cc status, it should look at cc_prev_status. */
2383
2384 NOTICE_UPDATE_CC (body, insn);
2385 #endif
2386
2387 current_output_insn = debug_insn = insn;
2388
2389 #if defined (DWARF2_UNWIND_INFO)
2390 if (GET_CODE (insn) == CALL_INSN && dwarf2out_do_frame ())
2391 dwarf2out_frame_debug (insn);
2392 #endif
2393
2394 /* Find the proper template for this insn. */
2395 template = get_insn_template (insn_code_number, insn);
2396
2397 /* If the C code returns 0, it means that it is a jump insn
2398 which follows a deleted test insn, and that test insn
2399 needs to be reinserted. */
2400 if (template == 0)
2401 {
2402 rtx prev;
2403
2404 if (prev_nonnote_insn (insn) != last_ignored_compare)
2405 abort ();
2406
2407 /* We have already processed the notes between the setter and
2408 the user. Make sure we don't process them again, this is
2409 particularly important if one of the notes is a block
2410 scope note or an EH note. */
2411 for (prev = insn;
2412 prev != last_ignored_compare;
2413 prev = PREV_INSN (prev))
2414 {
2415 if (GET_CODE (prev) == NOTE)
2416 delete_insn (prev); /* Use delete_note. */
2417 }
2418
2419 return prev;
2420 }
2421
2422 /* If the template is the string "#", it means that this insn must
2423 be split. */
2424 if (template[0] == '#' && template[1] == '\0')
2425 {
2426 rtx new = try_split (body, insn, 0);
2427
2428 /* If we didn't split the insn, go away. */
2429 if (new == insn && PATTERN (new) == body)
2430 fatal_insn ("could not split insn", insn);
2431
2432 #ifdef HAVE_ATTR_length
2433 /* This instruction should have been split in shorten_branches,
2434 to ensure that we would have valid length info for the
2435 splitees. */
2436 abort ();
2437 #endif
2438
2439 return new;
2440 }
2441
2442 if (prescan > 0)
2443 break;
2444
2445 #ifdef IA64_UNWIND_INFO
2446 IA64_UNWIND_EMIT (asm_out_file, insn);
2447 #endif
2448 /* Output assembler code from the template. */
2449
2450 output_asm_insn (template, recog_data.operand);
2451
2452 /* If necessary, report the effect that the instruction has on
2453 the unwind info. We've already done this for delay slots
2454 and call instructions. */
2455 #if defined (DWARF2_UNWIND_INFO)
2456 if (GET_CODE (insn) == INSN
2457 #if !defined (HAVE_prologue)
2458 && !ACCUMULATE_OUTGOING_ARGS
2459 #endif
2460 && final_sequence == 0
2461 && dwarf2out_do_frame ())
2462 dwarf2out_frame_debug (insn);
2463 #endif
2464
2465 #if 0
2466 /* It's not at all clear why we did this and doing so used to
2467 interfere with tests that used REG_WAS_0 notes, which are
2468 now gone, so let's try with this out. */
2469
2470 /* Mark this insn as having been output. */
2471 INSN_DELETED_P (insn) = 1;
2472 #endif
2473
2474 /* Emit information for vtable gc. */
2475 note = find_reg_note (insn, REG_VTABLE_REF, NULL_RTX);
2476
2477 current_output_insn = debug_insn = 0;
2478 }
2479 }
2480 return NEXT_INSN (insn);
2481 }
2482 \f
2483 /* Output debugging info to the assembler file FILE
2484 based on the NOTE-insn INSN, assumed to be a line number. */
2485
2486 static bool
2487 notice_source_line (rtx insn)
2488 {
2489 const char *filename = insn_file (insn);
2490 int linenum = insn_line (insn);
2491
2492 if (filename && (filename != last_filename || last_linenum != linenum))
2493 {
2494 last_filename = filename;
2495 last_linenum = linenum;
2496 high_block_linenum = MAX (last_linenum, high_block_linenum);
2497 high_function_linenum = MAX (last_linenum, high_function_linenum);
2498 return true;
2499 }
2500 return false;
2501 }
2502 \f
2503 /* For each operand in INSN, simplify (subreg (reg)) so that it refers
2504 directly to the desired hard register. */
2505
2506 void
2507 cleanup_subreg_operands (rtx insn)
2508 {
2509 int i;
2510 extract_insn_cached (insn);
2511 for (i = 0; i < recog_data.n_operands; i++)
2512 {
2513 /* The following test cannot use recog_data.operand when testing
2514 for a SUBREG: the underlying object might have been changed
2515 already if we are inside a match_operator expression that
2516 matches the else clause. Instead we test the underlying
2517 expression directly. */
2518 if (GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2519 recog_data.operand[i] = alter_subreg (recog_data.operand_loc[i]);
2520 else if (GET_CODE (recog_data.operand[i]) == PLUS
2521 || GET_CODE (recog_data.operand[i]) == MULT
2522 || GET_CODE (recog_data.operand[i]) == MEM)
2523 recog_data.operand[i] = walk_alter_subreg (recog_data.operand_loc[i]);
2524 }
2525
2526 for (i = 0; i < recog_data.n_dups; i++)
2527 {
2528 if (GET_CODE (*recog_data.dup_loc[i]) == SUBREG)
2529 *recog_data.dup_loc[i] = alter_subreg (recog_data.dup_loc[i]);
2530 else if (GET_CODE (*recog_data.dup_loc[i]) == PLUS
2531 || GET_CODE (*recog_data.dup_loc[i]) == MULT
2532 || GET_CODE (*recog_data.dup_loc[i]) == MEM)
2533 *recog_data.dup_loc[i] = walk_alter_subreg (recog_data.dup_loc[i]);
2534 }
2535 }
2536
2537 /* If X is a SUBREG, replace it with a REG or a MEM,
2538 based on the thing it is a subreg of. */
2539
2540 rtx
2541 alter_subreg (rtx *xp)
2542 {
2543 rtx x = *xp;
2544 rtx y = SUBREG_REG (x);
2545
2546 /* simplify_subreg does not remove subreg from volatile references.
2547 We are required to. */
2548 if (GET_CODE (y) == MEM)
2549 *xp = adjust_address (y, GET_MODE (x), SUBREG_BYTE (x));
2550 else
2551 {
2552 rtx new = simplify_subreg (GET_MODE (x), y, GET_MODE (y),
2553 SUBREG_BYTE (x));
2554
2555 if (new != 0)
2556 *xp = new;
2557 /* Simplify_subreg can't handle some REG cases, but we have to. */
2558 else if (GET_CODE (y) == REG)
2559 {
2560 unsigned int regno = subreg_hard_regno (x, 1);
2561 *xp = gen_rtx_REG_offset (y, GET_MODE (x), regno, SUBREG_BYTE (x));
2562 }
2563 else
2564 abort ();
2565 }
2566
2567 return *xp;
2568 }
2569
2570 /* Do alter_subreg on all the SUBREGs contained in X. */
2571
2572 static rtx
2573 walk_alter_subreg (rtx *xp)
2574 {
2575 rtx x = *xp;
2576 switch (GET_CODE (x))
2577 {
2578 case PLUS:
2579 case MULT:
2580 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2581 XEXP (x, 1) = walk_alter_subreg (&XEXP (x, 1));
2582 break;
2583
2584 case MEM:
2585 XEXP (x, 0) = walk_alter_subreg (&XEXP (x, 0));
2586 break;
2587
2588 case SUBREG:
2589 return alter_subreg (xp);
2590
2591 default:
2592 break;
2593 }
2594
2595 return *xp;
2596 }
2597 \f
2598 #ifdef HAVE_cc0
2599
2600 /* Given BODY, the body of a jump instruction, alter the jump condition
2601 as required by the bits that are set in cc_status.flags.
2602 Not all of the bits there can be handled at this level in all cases.
2603
2604 The value is normally 0.
2605 1 means that the condition has become always true.
2606 -1 means that the condition has become always false.
2607 2 means that COND has been altered. */
2608
2609 static int
2610 alter_cond (rtx cond)
2611 {
2612 int value = 0;
2613
2614 if (cc_status.flags & CC_REVERSED)
2615 {
2616 value = 2;
2617 PUT_CODE (cond, swap_condition (GET_CODE (cond)));
2618 }
2619
2620 if (cc_status.flags & CC_INVERTED)
2621 {
2622 value = 2;
2623 PUT_CODE (cond, reverse_condition (GET_CODE (cond)));
2624 }
2625
2626 if (cc_status.flags & CC_NOT_POSITIVE)
2627 switch (GET_CODE (cond))
2628 {
2629 case LE:
2630 case LEU:
2631 case GEU:
2632 /* Jump becomes unconditional. */
2633 return 1;
2634
2635 case GT:
2636 case GTU:
2637 case LTU:
2638 /* Jump becomes no-op. */
2639 return -1;
2640
2641 case GE:
2642 PUT_CODE (cond, EQ);
2643 value = 2;
2644 break;
2645
2646 case LT:
2647 PUT_CODE (cond, NE);
2648 value = 2;
2649 break;
2650
2651 default:
2652 break;
2653 }
2654
2655 if (cc_status.flags & CC_NOT_NEGATIVE)
2656 switch (GET_CODE (cond))
2657 {
2658 case GE:
2659 case GEU:
2660 /* Jump becomes unconditional. */
2661 return 1;
2662
2663 case LT:
2664 case LTU:
2665 /* Jump becomes no-op. */
2666 return -1;
2667
2668 case LE:
2669 case LEU:
2670 PUT_CODE (cond, EQ);
2671 value = 2;
2672 break;
2673
2674 case GT:
2675 case GTU:
2676 PUT_CODE (cond, NE);
2677 value = 2;
2678 break;
2679
2680 default:
2681 break;
2682 }
2683
2684 if (cc_status.flags & CC_NO_OVERFLOW)
2685 switch (GET_CODE (cond))
2686 {
2687 case GEU:
2688 /* Jump becomes unconditional. */
2689 return 1;
2690
2691 case LEU:
2692 PUT_CODE (cond, EQ);
2693 value = 2;
2694 break;
2695
2696 case GTU:
2697 PUT_CODE (cond, NE);
2698 value = 2;
2699 break;
2700
2701 case LTU:
2702 /* Jump becomes no-op. */
2703 return -1;
2704
2705 default:
2706 break;
2707 }
2708
2709 if (cc_status.flags & (CC_Z_IN_NOT_N | CC_Z_IN_N))
2710 switch (GET_CODE (cond))
2711 {
2712 default:
2713 abort ();
2714
2715 case NE:
2716 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? GE : LT);
2717 value = 2;
2718 break;
2719
2720 case EQ:
2721 PUT_CODE (cond, cc_status.flags & CC_Z_IN_N ? LT : GE);
2722 value = 2;
2723 break;
2724 }
2725
2726 if (cc_status.flags & CC_NOT_SIGNED)
2727 /* The flags are valid if signed condition operators are converted
2728 to unsigned. */
2729 switch (GET_CODE (cond))
2730 {
2731 case LE:
2732 PUT_CODE (cond, LEU);
2733 value = 2;
2734 break;
2735
2736 case LT:
2737 PUT_CODE (cond, LTU);
2738 value = 2;
2739 break;
2740
2741 case GT:
2742 PUT_CODE (cond, GTU);
2743 value = 2;
2744 break;
2745
2746 case GE:
2747 PUT_CODE (cond, GEU);
2748 value = 2;
2749 break;
2750
2751 default:
2752 break;
2753 }
2754
2755 return value;
2756 }
2757 #endif
2758 \f
2759 /* Report inconsistency between the assembler template and the operands.
2760 In an `asm', it's the user's fault; otherwise, the compiler's fault. */
2761
2762 void
2763 output_operand_lossage (const char *msgid, ...)
2764 {
2765 char *fmt_string;
2766 char *new_message;
2767 const char *pfx_str;
2768 va_list ap;
2769
2770 va_start (ap, msgid);
2771
2772 pfx_str = this_is_asm_operands ? _("invalid `asm': ") : "output_operand: ";
2773 asprintf (&fmt_string, "%s%s", pfx_str, _(msgid));
2774 vasprintf (&new_message, fmt_string, ap);
2775
2776 if (this_is_asm_operands)
2777 error_for_asm (this_is_asm_operands, "%s", new_message);
2778 else
2779 internal_error ("%s", new_message);
2780
2781 free (fmt_string);
2782 free (new_message);
2783 va_end (ap);
2784 }
2785 \f
2786 /* Output of assembler code from a template, and its subroutines. */
2787
2788 /* Annotate the assembly with a comment describing the pattern and
2789 alternative used. */
2790
2791 static void
2792 output_asm_name (void)
2793 {
2794 if (debug_insn)
2795 {
2796 int num = INSN_CODE (debug_insn);
2797 fprintf (asm_out_file, "\t%s %d\t%s",
2798 ASM_COMMENT_START, INSN_UID (debug_insn),
2799 insn_data[num].name);
2800 if (insn_data[num].n_alternatives > 1)
2801 fprintf (asm_out_file, "/%d", which_alternative + 1);
2802 #ifdef HAVE_ATTR_length
2803 fprintf (asm_out_file, "\t[length = %d]",
2804 get_attr_length (debug_insn));
2805 #endif
2806 /* Clear this so only the first assembler insn
2807 of any rtl insn will get the special comment for -dp. */
2808 debug_insn = 0;
2809 }
2810 }
2811
2812 /* If OP is a REG or MEM and we can find a MEM_EXPR corresponding to it
2813 or its address, return that expr . Set *PADDRESSP to 1 if the expr
2814 corresponds to the address of the object and 0 if to the object. */
2815
2816 static tree
2817 get_mem_expr_from_op (rtx op, int *paddressp)
2818 {
2819 tree expr;
2820 int inner_addressp;
2821
2822 *paddressp = 0;
2823
2824 if (GET_CODE (op) == REG)
2825 return REG_EXPR (op);
2826 else if (GET_CODE (op) != MEM)
2827 return 0;
2828
2829 if (MEM_EXPR (op) != 0)
2830 return MEM_EXPR (op);
2831
2832 /* Otherwise we have an address, so indicate it and look at the address. */
2833 *paddressp = 1;
2834 op = XEXP (op, 0);
2835
2836 /* First check if we have a decl for the address, then look at the right side
2837 if it is a PLUS. Otherwise, strip off arithmetic and keep looking.
2838 But don't allow the address to itself be indirect. */
2839 if ((expr = get_mem_expr_from_op (op, &inner_addressp)) && ! inner_addressp)
2840 return expr;
2841 else if (GET_CODE (op) == PLUS
2842 && (expr = get_mem_expr_from_op (XEXP (op, 1), &inner_addressp)))
2843 return expr;
2844
2845 while (GET_RTX_CLASS (GET_CODE (op)) == '1'
2846 || GET_RTX_CLASS (GET_CODE (op)) == '2')
2847 op = XEXP (op, 0);
2848
2849 expr = get_mem_expr_from_op (op, &inner_addressp);
2850 return inner_addressp ? 0 : expr;
2851 }
2852
2853 /* Output operand names for assembler instructions. OPERANDS is the
2854 operand vector, OPORDER is the order to write the operands, and NOPS
2855 is the number of operands to write. */
2856
2857 static void
2858 output_asm_operand_names (rtx *operands, int *oporder, int nops)
2859 {
2860 int wrote = 0;
2861 int i;
2862
2863 for (i = 0; i < nops; i++)
2864 {
2865 int addressp;
2866 rtx op = operands[oporder[i]];
2867 tree expr = get_mem_expr_from_op (op, &addressp);
2868
2869 fprintf (asm_out_file, "%c%s",
2870 wrote ? ',' : '\t', wrote ? "" : ASM_COMMENT_START);
2871 wrote = 1;
2872 if (expr)
2873 {
2874 fprintf (asm_out_file, "%s",
2875 addressp ? "*" : "");
2876 print_mem_expr (asm_out_file, expr);
2877 wrote = 1;
2878 }
2879 else if (REG_P (op) && ORIGINAL_REGNO (op)
2880 && ORIGINAL_REGNO (op) != REGNO (op))
2881 fprintf (asm_out_file, " tmp%i", ORIGINAL_REGNO (op));
2882 }
2883 }
2884
2885 /* Output text from TEMPLATE to the assembler output file,
2886 obeying %-directions to substitute operands taken from
2887 the vector OPERANDS.
2888
2889 %N (for N a digit) means print operand N in usual manner.
2890 %lN means require operand N to be a CODE_LABEL or LABEL_REF
2891 and print the label name with no punctuation.
2892 %cN means require operand N to be a constant
2893 and print the constant expression with no punctuation.
2894 %aN means expect operand N to be a memory address
2895 (not a memory reference!) and print a reference
2896 to that address.
2897 %nN means expect operand N to be a constant
2898 and print a constant expression for minus the value
2899 of the operand, with no other punctuation. */
2900
2901 void
2902 output_asm_insn (const char *template, rtx *operands)
2903 {
2904 const char *p;
2905 int c;
2906 #ifdef ASSEMBLER_DIALECT
2907 int dialect = 0;
2908 #endif
2909 int oporder[MAX_RECOG_OPERANDS];
2910 char opoutput[MAX_RECOG_OPERANDS];
2911 int ops = 0;
2912
2913 /* An insn may return a null string template
2914 in a case where no assembler code is needed. */
2915 if (*template == 0)
2916 return;
2917
2918 memset (opoutput, 0, sizeof opoutput);
2919 p = template;
2920 putc ('\t', asm_out_file);
2921
2922 #ifdef ASM_OUTPUT_OPCODE
2923 ASM_OUTPUT_OPCODE (asm_out_file, p);
2924 #endif
2925
2926 while ((c = *p++))
2927 switch (c)
2928 {
2929 case '\n':
2930 if (flag_verbose_asm)
2931 output_asm_operand_names (operands, oporder, ops);
2932 if (flag_print_asm_name)
2933 output_asm_name ();
2934
2935 ops = 0;
2936 memset (opoutput, 0, sizeof opoutput);
2937
2938 putc (c, asm_out_file);
2939 #ifdef ASM_OUTPUT_OPCODE
2940 while ((c = *p) == '\t')
2941 {
2942 putc (c, asm_out_file);
2943 p++;
2944 }
2945 ASM_OUTPUT_OPCODE (asm_out_file, p);
2946 #endif
2947 break;
2948
2949 #ifdef ASSEMBLER_DIALECT
2950 case '{':
2951 {
2952 int i;
2953
2954 if (dialect)
2955 output_operand_lossage ("nested assembly dialect alternatives");
2956 else
2957 dialect = 1;
2958
2959 /* If we want the first dialect, do nothing. Otherwise, skip
2960 DIALECT_NUMBER of strings ending with '|'. */
2961 for (i = 0; i < dialect_number; i++)
2962 {
2963 while (*p && *p != '}' && *p++ != '|')
2964 ;
2965 if (*p == '}')
2966 break;
2967 if (*p == '|')
2968 p++;
2969 }
2970
2971 if (*p == '\0')
2972 output_operand_lossage ("unterminated assembly dialect alternative");
2973 }
2974 break;
2975
2976 case '|':
2977 if (dialect)
2978 {
2979 /* Skip to close brace. */
2980 do
2981 {
2982 if (*p == '\0')
2983 {
2984 output_operand_lossage ("unterminated assembly dialect alternative");
2985 break;
2986 }
2987 }
2988 while (*p++ != '}');
2989 dialect = 0;
2990 }
2991 else
2992 putc (c, asm_out_file);
2993 break;
2994
2995 case '}':
2996 if (! dialect)
2997 putc (c, asm_out_file);
2998 dialect = 0;
2999 break;
3000 #endif
3001
3002 case '%':
3003 /* %% outputs a single %. */
3004 if (*p == '%')
3005 {
3006 p++;
3007 putc (c, asm_out_file);
3008 }
3009 /* %= outputs a number which is unique to each insn in the entire
3010 compilation. This is useful for making local labels that are
3011 referred to more than once in a given insn. */
3012 else if (*p == '=')
3013 {
3014 p++;
3015 fprintf (asm_out_file, "%d", insn_counter);
3016 }
3017 /* % followed by a letter and some digits
3018 outputs an operand in a special way depending on the letter.
3019 Letters `acln' are implemented directly.
3020 Other letters are passed to `output_operand' so that
3021 the PRINT_OPERAND macro can define them. */
3022 else if (ISALPHA (*p))
3023 {
3024 int letter = *p++;
3025 c = atoi (p);
3026
3027 if (! ISDIGIT (*p))
3028 output_operand_lossage ("operand number missing after %%-letter");
3029 else if (this_is_asm_operands
3030 && (c < 0 || (unsigned int) c >= insn_noperands))
3031 output_operand_lossage ("operand number out of range");
3032 else if (letter == 'l')
3033 output_asm_label (operands[c]);
3034 else if (letter == 'a')
3035 output_address (operands[c]);
3036 else if (letter == 'c')
3037 {
3038 if (CONSTANT_ADDRESS_P (operands[c]))
3039 output_addr_const (asm_out_file, operands[c]);
3040 else
3041 output_operand (operands[c], 'c');
3042 }
3043 else if (letter == 'n')
3044 {
3045 if (GET_CODE (operands[c]) == CONST_INT)
3046 fprintf (asm_out_file, HOST_WIDE_INT_PRINT_DEC,
3047 - INTVAL (operands[c]));
3048 else
3049 {
3050 putc ('-', asm_out_file);
3051 output_addr_const (asm_out_file, operands[c]);
3052 }
3053 }
3054 else
3055 output_operand (operands[c], letter);
3056
3057 if (!opoutput[c])
3058 oporder[ops++] = c;
3059 opoutput[c] = 1;
3060
3061 while (ISDIGIT (c = *p))
3062 p++;
3063 }
3064 /* % followed by a digit outputs an operand the default way. */
3065 else if (ISDIGIT (*p))
3066 {
3067 c = atoi (p);
3068 if (this_is_asm_operands
3069 && (c < 0 || (unsigned int) c >= insn_noperands))
3070 output_operand_lossage ("operand number out of range");
3071 else
3072 output_operand (operands[c], 0);
3073
3074 if (!opoutput[c])
3075 oporder[ops++] = c;
3076 opoutput[c] = 1;
3077
3078 while (ISDIGIT (c = *p))
3079 p++;
3080 }
3081 /* % followed by punctuation: output something for that
3082 punctuation character alone, with no operand.
3083 The PRINT_OPERAND macro decides what is actually done. */
3084 #ifdef PRINT_OPERAND_PUNCT_VALID_P
3085 else if (PRINT_OPERAND_PUNCT_VALID_P ((unsigned char) *p))
3086 output_operand (NULL_RTX, *p++);
3087 #endif
3088 else
3089 output_operand_lossage ("invalid %%-code");
3090 break;
3091
3092 default:
3093 putc (c, asm_out_file);
3094 }
3095
3096 /* Write out the variable names for operands, if we know them. */
3097 if (flag_verbose_asm)
3098 output_asm_operand_names (operands, oporder, ops);
3099 if (flag_print_asm_name)
3100 output_asm_name ();
3101
3102 putc ('\n', asm_out_file);
3103 }
3104 \f
3105 /* Output a LABEL_REF, or a bare CODE_LABEL, as an assembler symbol. */
3106
3107 void
3108 output_asm_label (rtx x)
3109 {
3110 char buf[256];
3111
3112 if (GET_CODE (x) == LABEL_REF)
3113 x = XEXP (x, 0);
3114 if (GET_CODE (x) == CODE_LABEL
3115 || (GET_CODE (x) == NOTE
3116 && NOTE_LINE_NUMBER (x) == NOTE_INSN_DELETED_LABEL))
3117 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3118 else
3119 output_operand_lossage ("`%%l' operand isn't a label");
3120
3121 assemble_name (asm_out_file, buf);
3122 }
3123
3124 /* Print operand X using machine-dependent assembler syntax.
3125 The macro PRINT_OPERAND is defined just to control this function.
3126 CODE is a non-digit that preceded the operand-number in the % spec,
3127 such as 'z' if the spec was `%z3'. CODE is 0 if there was no char
3128 between the % and the digits.
3129 When CODE is a non-letter, X is 0.
3130
3131 The meanings of the letters are machine-dependent and controlled
3132 by PRINT_OPERAND. */
3133
3134 static void
3135 output_operand (rtx x, int code ATTRIBUTE_UNUSED)
3136 {
3137 if (x && GET_CODE (x) == SUBREG)
3138 x = alter_subreg (&x);
3139
3140 /* If X is a pseudo-register, abort now rather than writing trash to the
3141 assembler file. */
3142
3143 if (x && GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)
3144 abort ();
3145
3146 PRINT_OPERAND (asm_out_file, x, code);
3147 }
3148
3149 /* Print a memory reference operand for address X
3150 using machine-dependent assembler syntax.
3151 The macro PRINT_OPERAND_ADDRESS exists just to control this function. */
3152
3153 void
3154 output_address (rtx x)
3155 {
3156 walk_alter_subreg (&x);
3157 PRINT_OPERAND_ADDRESS (asm_out_file, x);
3158 }
3159 \f
3160 /* Print an integer constant expression in assembler syntax.
3161 Addition and subtraction are the only arithmetic
3162 that may appear in these expressions. */
3163
3164 void
3165 output_addr_const (FILE *file, rtx x)
3166 {
3167 char buf[256];
3168
3169 restart:
3170 switch (GET_CODE (x))
3171 {
3172 case PC:
3173 putc ('.', file);
3174 break;
3175
3176 case SYMBOL_REF:
3177 #ifdef ASM_OUTPUT_SYMBOL_REF
3178 ASM_OUTPUT_SYMBOL_REF (file, x);
3179 #else
3180 assemble_name (file, XSTR (x, 0));
3181 #endif
3182 break;
3183
3184 case LABEL_REF:
3185 x = XEXP (x, 0);
3186 /* Fall through. */
3187 case CODE_LABEL:
3188 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (x));
3189 #ifdef ASM_OUTPUT_LABEL_REF
3190 ASM_OUTPUT_LABEL_REF (file, buf);
3191 #else
3192 assemble_name (file, buf);
3193 #endif
3194 break;
3195
3196 case CONST_INT:
3197 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
3198 break;
3199
3200 case CONST:
3201 /* This used to output parentheses around the expression,
3202 but that does not work on the 386 (either ATT or BSD assembler). */
3203 output_addr_const (file, XEXP (x, 0));
3204 break;
3205
3206 case CONST_DOUBLE:
3207 if (GET_MODE (x) == VOIDmode)
3208 {
3209 /* We can use %d if the number is one word and positive. */
3210 if (CONST_DOUBLE_HIGH (x))
3211 fprintf (file, HOST_WIDE_INT_PRINT_DOUBLE_HEX,
3212 CONST_DOUBLE_HIGH (x), CONST_DOUBLE_LOW (x));
3213 else if (CONST_DOUBLE_LOW (x) < 0)
3214 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
3215 else
3216 fprintf (file, HOST_WIDE_INT_PRINT_DEC, CONST_DOUBLE_LOW (x));
3217 }
3218 else
3219 /* We can't handle floating point constants;
3220 PRINT_OPERAND must handle them. */
3221 output_operand_lossage ("floating constant misused");
3222 break;
3223
3224 case PLUS:
3225 /* Some assemblers need integer constants to appear last (eg masm). */
3226 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3227 {
3228 output_addr_const (file, XEXP (x, 1));
3229 if (INTVAL (XEXP (x, 0)) >= 0)
3230 fprintf (file, "+");
3231 output_addr_const (file, XEXP (x, 0));
3232 }
3233 else
3234 {
3235 output_addr_const (file, XEXP (x, 0));
3236 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3237 || INTVAL (XEXP (x, 1)) >= 0)
3238 fprintf (file, "+");
3239 output_addr_const (file, XEXP (x, 1));
3240 }
3241 break;
3242
3243 case MINUS:
3244 /* Avoid outputting things like x-x or x+5-x,
3245 since some assemblers can't handle that. */
3246 x = simplify_subtraction (x);
3247 if (GET_CODE (x) != MINUS)
3248 goto restart;
3249
3250 output_addr_const (file, XEXP (x, 0));
3251 fprintf (file, "-");
3252 if ((GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0)
3253 || GET_CODE (XEXP (x, 1)) == PC
3254 || GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
3255 output_addr_const (file, XEXP (x, 1));
3256 else
3257 {
3258 fputs (targetm.asm_out.open_paren, file);
3259 output_addr_const (file, XEXP (x, 1));
3260 fputs (targetm.asm_out.close_paren, file);
3261 }
3262 break;
3263
3264 case ZERO_EXTEND:
3265 case SIGN_EXTEND:
3266 case SUBREG:
3267 output_addr_const (file, XEXP (x, 0));
3268 break;
3269
3270 default:
3271 #ifdef OUTPUT_ADDR_CONST_EXTRA
3272 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
3273 break;
3274
3275 fail:
3276 #endif
3277 output_operand_lossage ("invalid expression as operand");
3278 }
3279 }
3280 \f
3281 /* A poor man's fprintf, with the added features of %I, %R, %L, and %U.
3282 %R prints the value of REGISTER_PREFIX.
3283 %L prints the value of LOCAL_LABEL_PREFIX.
3284 %U prints the value of USER_LABEL_PREFIX.
3285 %I prints the value of IMMEDIATE_PREFIX.
3286 %O runs ASM_OUTPUT_OPCODE to transform what follows in the string.
3287 Also supported are %d, %i, %u, %x, %X, %o, %c, %s and %%.
3288
3289 We handle alternate assembler dialects here, just like output_asm_insn. */
3290
3291 void
3292 asm_fprintf (FILE *file, const char *p, ...)
3293 {
3294 char buf[10];
3295 char *q, c;
3296 va_list argptr;
3297
3298 va_start (argptr, p);
3299
3300 buf[0] = '%';
3301
3302 while ((c = *p++))
3303 switch (c)
3304 {
3305 #ifdef ASSEMBLER_DIALECT
3306 case '{':
3307 {
3308 int i;
3309
3310 /* If we want the first dialect, do nothing. Otherwise, skip
3311 DIALECT_NUMBER of strings ending with '|'. */
3312 for (i = 0; i < dialect_number; i++)
3313 {
3314 while (*p && *p++ != '|')
3315 ;
3316
3317 if (*p == '|')
3318 p++;
3319 }
3320 }
3321 break;
3322
3323 case '|':
3324 /* Skip to close brace. */
3325 while (*p && *p++ != '}')
3326 ;
3327 break;
3328
3329 case '}':
3330 break;
3331 #endif
3332
3333 case '%':
3334 c = *p++;
3335 q = &buf[1];
3336 while (strchr ("-+ #0", c))
3337 {
3338 *q++ = c;
3339 c = *p++;
3340 }
3341 while (ISDIGIT (c) || c == '.')
3342 {
3343 *q++ = c;
3344 c = *p++;
3345 }
3346 switch (c)
3347 {
3348 case '%':
3349 putc ('%', file);
3350 break;
3351
3352 case 'd': case 'i': case 'u':
3353 case 'x': case 'X': case 'o':
3354 case 'c':
3355 *q++ = c;
3356 *q = 0;
3357 fprintf (file, buf, va_arg (argptr, int));
3358 break;
3359
3360 case 'w':
3361 /* This is a prefix to the 'd', 'i', 'u', 'x', 'X', and
3362 'o' cases, but we do not check for those cases. It
3363 means that the value is a HOST_WIDE_INT, which may be
3364 either `long' or `long long'. */
3365 memcpy (q, HOST_WIDE_INT_PRINT, strlen (HOST_WIDE_INT_PRINT));
3366 q += strlen (HOST_WIDE_INT_PRINT);
3367 *q++ = *p++;
3368 *q = 0;
3369 fprintf (file, buf, va_arg (argptr, HOST_WIDE_INT));
3370 break;
3371
3372 case 'l':
3373 *q++ = c;
3374 #ifdef HAVE_LONG_LONG
3375 if (*p == 'l')
3376 {
3377 *q++ = *p++;
3378 *q++ = *p++;
3379 *q = 0;
3380 fprintf (file, buf, va_arg (argptr, long long));
3381 }
3382 else
3383 #endif
3384 {
3385 *q++ = *p++;
3386 *q = 0;
3387 fprintf (file, buf, va_arg (argptr, long));
3388 }
3389
3390 break;
3391
3392 case 's':
3393 *q++ = c;
3394 *q = 0;
3395 fprintf (file, buf, va_arg (argptr, char *));
3396 break;
3397
3398 case 'O':
3399 #ifdef ASM_OUTPUT_OPCODE
3400 ASM_OUTPUT_OPCODE (asm_out_file, p);
3401 #endif
3402 break;
3403
3404 case 'R':
3405 #ifdef REGISTER_PREFIX
3406 fprintf (file, "%s", REGISTER_PREFIX);
3407 #endif
3408 break;
3409
3410 case 'I':
3411 #ifdef IMMEDIATE_PREFIX
3412 fprintf (file, "%s", IMMEDIATE_PREFIX);
3413 #endif
3414 break;
3415
3416 case 'L':
3417 #ifdef LOCAL_LABEL_PREFIX
3418 fprintf (file, "%s", LOCAL_LABEL_PREFIX);
3419 #endif
3420 break;
3421
3422 case 'U':
3423 fputs (user_label_prefix, file);
3424 break;
3425
3426 #ifdef ASM_FPRINTF_EXTENSIONS
3427 /* Uppercase letters are reserved for general use by asm_fprintf
3428 and so are not available to target specific code. In order to
3429 prevent the ASM_FPRINTF_EXTENSIONS macro from using them then,
3430 they are defined here. As they get turned into real extensions
3431 to asm_fprintf they should be removed from this list. */
3432 case 'A': case 'B': case 'C': case 'D': case 'E':
3433 case 'F': case 'G': case 'H': case 'J': case 'K':
3434 case 'M': case 'N': case 'P': case 'Q': case 'S':
3435 case 'T': case 'V': case 'W': case 'Y': case 'Z':
3436 break;
3437
3438 ASM_FPRINTF_EXTENSIONS (file, argptr, p)
3439 #endif
3440 default:
3441 abort ();
3442 }
3443 break;
3444
3445 default:
3446 putc (c, file);
3447 }
3448 va_end (argptr);
3449 }
3450 \f
3451 /* Split up a CONST_DOUBLE or integer constant rtx
3452 into two rtx's for single words,
3453 storing in *FIRST the word that comes first in memory in the target
3454 and in *SECOND the other. */
3455
3456 void
3457 split_double (rtx value, rtx *first, rtx *second)
3458 {
3459 if (GET_CODE (value) == CONST_INT)
3460 {
3461 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
3462 {
3463 /* In this case the CONST_INT holds both target words.
3464 Extract the bits from it into two word-sized pieces.
3465 Sign extend each half to HOST_WIDE_INT. */
3466 unsigned HOST_WIDE_INT low, high;
3467 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
3468
3469 /* Set sign_bit to the most significant bit of a word. */
3470 sign_bit = 1;
3471 sign_bit <<= BITS_PER_WORD - 1;
3472
3473 /* Set mask so that all bits of the word are set. We could
3474 have used 1 << BITS_PER_WORD instead of basing the
3475 calculation on sign_bit. However, on machines where
3476 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
3477 compiler warning, even though the code would never be
3478 executed. */
3479 mask = sign_bit << 1;
3480 mask--;
3481
3482 /* Set sign_extend as any remaining bits. */
3483 sign_extend = ~mask;
3484
3485 /* Pick the lower word and sign-extend it. */
3486 low = INTVAL (value);
3487 low &= mask;
3488 if (low & sign_bit)
3489 low |= sign_extend;
3490
3491 /* Pick the higher word, shifted to the least significant
3492 bits, and sign-extend it. */
3493 high = INTVAL (value);
3494 high >>= BITS_PER_WORD - 1;
3495 high >>= 1;
3496 high &= mask;
3497 if (high & sign_bit)
3498 high |= sign_extend;
3499
3500 /* Store the words in the target machine order. */
3501 if (WORDS_BIG_ENDIAN)
3502 {
3503 *first = GEN_INT (high);
3504 *second = GEN_INT (low);
3505 }
3506 else
3507 {
3508 *first = GEN_INT (low);
3509 *second = GEN_INT (high);
3510 }
3511 }
3512 else
3513 {
3514 /* The rule for using CONST_INT for a wider mode
3515 is that we regard the value as signed.
3516 So sign-extend it. */
3517 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
3518 if (WORDS_BIG_ENDIAN)
3519 {
3520 *first = high;
3521 *second = value;
3522 }
3523 else
3524 {
3525 *first = value;
3526 *second = high;
3527 }
3528 }
3529 }
3530 else if (GET_CODE (value) != CONST_DOUBLE)
3531 {
3532 if (WORDS_BIG_ENDIAN)
3533 {
3534 *first = const0_rtx;
3535 *second = value;
3536 }
3537 else
3538 {
3539 *first = value;
3540 *second = const0_rtx;
3541 }
3542 }
3543 else if (GET_MODE (value) == VOIDmode
3544 /* This is the old way we did CONST_DOUBLE integers. */
3545 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
3546 {
3547 /* In an integer, the words are defined as most and least significant.
3548 So order them by the target's convention. */
3549 if (WORDS_BIG_ENDIAN)
3550 {
3551 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
3552 *second = GEN_INT (CONST_DOUBLE_LOW (value));
3553 }
3554 else
3555 {
3556 *first = GEN_INT (CONST_DOUBLE_LOW (value));
3557 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
3558 }
3559 }
3560 else
3561 {
3562 REAL_VALUE_TYPE r;
3563 long l[2];
3564 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
3565
3566 /* Note, this converts the REAL_VALUE_TYPE to the target's
3567 format, splits up the floating point double and outputs
3568 exactly 32 bits of it into each of l[0] and l[1] --
3569 not necessarily BITS_PER_WORD bits. */
3570 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
3571
3572 /* If 32 bits is an entire word for the target, but not for the host,
3573 then sign-extend on the host so that the number will look the same
3574 way on the host that it would on the target. See for instance
3575 simplify_unary_operation. The #if is needed to avoid compiler
3576 warnings. */
3577
3578 #if HOST_BITS_PER_LONG > 32
3579 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
3580 {
3581 if (l[0] & ((long) 1 << 31))
3582 l[0] |= ((long) (-1) << 32);
3583 if (l[1] & ((long) 1 << 31))
3584 l[1] |= ((long) (-1) << 32);
3585 }
3586 #endif
3587
3588 *first = GEN_INT ((HOST_WIDE_INT) l[0]);
3589 *second = GEN_INT ((HOST_WIDE_INT) l[1]);
3590 }
3591 }
3592 \f
3593 /* Return nonzero if this function has no function calls. */
3594
3595 int
3596 leaf_function_p (void)
3597 {
3598 rtx insn;
3599 rtx link;
3600
3601 if (current_function_profile || profile_arc_flag)
3602 return 0;
3603
3604 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3605 {
3606 if (GET_CODE (insn) == CALL_INSN
3607 && ! SIBLING_CALL_P (insn))
3608 return 0;
3609 if (GET_CODE (insn) == INSN
3610 && GET_CODE (PATTERN (insn)) == SEQUENCE
3611 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3612 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3613 return 0;
3614 }
3615 for (link = current_function_epilogue_delay_list;
3616 link;
3617 link = XEXP (link, 1))
3618 {
3619 insn = XEXP (link, 0);
3620
3621 if (GET_CODE (insn) == CALL_INSN
3622 && ! SIBLING_CALL_P (insn))
3623 return 0;
3624 if (GET_CODE (insn) == INSN
3625 && GET_CODE (PATTERN (insn)) == SEQUENCE
3626 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == CALL_INSN
3627 && ! SIBLING_CALL_P (XVECEXP (PATTERN (insn), 0, 0)))
3628 return 0;
3629 }
3630
3631 return 1;
3632 }
3633
3634 /* Return 1 if branch is a forward branch.
3635 Uses insn_shuid array, so it works only in the final pass. May be used by
3636 output templates to customary add branch prediction hints.
3637 */
3638 int
3639 final_forward_branch_p (rtx insn)
3640 {
3641 int insn_id, label_id;
3642 if (!uid_shuid)
3643 abort ();
3644 insn_id = INSN_SHUID (insn);
3645 label_id = INSN_SHUID (JUMP_LABEL (insn));
3646 /* We've hit some insns that does not have id information available. */
3647 if (!insn_id || !label_id)
3648 abort ();
3649 return insn_id < label_id;
3650 }
3651
3652 /* On some machines, a function with no call insns
3653 can run faster if it doesn't create its own register window.
3654 When output, the leaf function should use only the "output"
3655 registers. Ordinarily, the function would be compiled to use
3656 the "input" registers to find its arguments; it is a candidate
3657 for leaf treatment if it uses only the "input" registers.
3658 Leaf function treatment means renumbering so the function
3659 uses the "output" registers instead. */
3660
3661 #ifdef LEAF_REGISTERS
3662
3663 /* Return 1 if this function uses only the registers that can be
3664 safely renumbered. */
3665
3666 int
3667 only_leaf_regs_used (void)
3668 {
3669 int i;
3670 const char *const permitted_reg_in_leaf_functions = LEAF_REGISTERS;
3671
3672 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3673 if ((regs_ever_live[i] || global_regs[i])
3674 && ! permitted_reg_in_leaf_functions[i])
3675 return 0;
3676
3677 if (current_function_uses_pic_offset_table
3678 && pic_offset_table_rtx != 0
3679 && GET_CODE (pic_offset_table_rtx) == REG
3680 && ! permitted_reg_in_leaf_functions[REGNO (pic_offset_table_rtx)])
3681 return 0;
3682
3683 return 1;
3684 }
3685
3686 /* Scan all instructions and renumber all registers into those
3687 available in leaf functions. */
3688
3689 static void
3690 leaf_renumber_regs (rtx first)
3691 {
3692 rtx insn;
3693
3694 /* Renumber only the actual patterns.
3695 The reg-notes can contain frame pointer refs,
3696 and renumbering them could crash, and should not be needed. */
3697 for (insn = first; insn; insn = NEXT_INSN (insn))
3698 if (INSN_P (insn))
3699 leaf_renumber_regs_insn (PATTERN (insn));
3700 for (insn = current_function_epilogue_delay_list;
3701 insn;
3702 insn = XEXP (insn, 1))
3703 if (INSN_P (XEXP (insn, 0)))
3704 leaf_renumber_regs_insn (PATTERN (XEXP (insn, 0)));
3705 }
3706
3707 /* Scan IN_RTX and its subexpressions, and renumber all regs into those
3708 available in leaf functions. */
3709
3710 void
3711 leaf_renumber_regs_insn (rtx in_rtx)
3712 {
3713 int i, j;
3714 const char *format_ptr;
3715
3716 if (in_rtx == 0)
3717 return;
3718
3719 /* Renumber all input-registers into output-registers.
3720 renumbered_regs would be 1 for an output-register;
3721 they */
3722
3723 if (GET_CODE (in_rtx) == REG)
3724 {
3725 int newreg;
3726
3727 /* Don't renumber the same reg twice. */
3728 if (in_rtx->used)
3729 return;
3730
3731 newreg = REGNO (in_rtx);
3732 /* Don't try to renumber pseudo regs. It is possible for a pseudo reg
3733 to reach here as part of a REG_NOTE. */
3734 if (newreg >= FIRST_PSEUDO_REGISTER)
3735 {
3736 in_rtx->used = 1;
3737 return;
3738 }
3739 newreg = LEAF_REG_REMAP (newreg);
3740 if (newreg < 0)
3741 abort ();
3742 regs_ever_live[REGNO (in_rtx)] = 0;
3743 regs_ever_live[newreg] = 1;
3744 REGNO (in_rtx) = newreg;
3745 in_rtx->used = 1;
3746 }
3747
3748 if (INSN_P (in_rtx))
3749 {
3750 /* Inside a SEQUENCE, we find insns.
3751 Renumber just the patterns of these insns,
3752 just as we do for the top-level insns. */
3753 leaf_renumber_regs_insn (PATTERN (in_rtx));
3754 return;
3755 }
3756
3757 format_ptr = GET_RTX_FORMAT (GET_CODE (in_rtx));
3758
3759 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (in_rtx)); i++)
3760 switch (*format_ptr++)
3761 {
3762 case 'e':
3763 leaf_renumber_regs_insn (XEXP (in_rtx, i));
3764 break;
3765
3766 case 'E':
3767 if (NULL != XVEC (in_rtx, i))
3768 {
3769 for (j = 0; j < XVECLEN (in_rtx, i); j++)
3770 leaf_renumber_regs_insn (XVECEXP (in_rtx, i, j));
3771 }
3772 break;
3773
3774 case 'S':
3775 case 's':
3776 case '0':
3777 case 'i':
3778 case 'w':
3779 case 'n':
3780 case 'u':
3781 break;
3782
3783 default:
3784 abort ();
3785 }
3786 }
3787 #endif
3788
3789
3790 /* When -gused is used, emit debug info for only used symbols. But in
3791 addition to the standard intercepted debug_hooks there are some direct
3792 calls into this file, i.e., dbxout_symbol, dbxout_parms, and dbxout_reg_params.
3793 Those routines may also be called from a higher level intercepted routine. So
3794 to prevent recording data for an inner call to one of these for an intercept,
3795 we maintain an intercept nesting counter (debug_nesting). We only save the
3796 intercepted arguments if the nesting is 1. */
3797 int debug_nesting = 0;
3798
3799 static tree *symbol_queue;
3800 int symbol_queue_index = 0;
3801 static int symbol_queue_size = 0;
3802
3803 /* Generate the symbols for any queued up type symbols we encountered
3804 while generating the type info for some originally used symbol.
3805 This might generate additional entries in the queue. Only when
3806 the nesting depth goes to 0 is this routine called. */
3807
3808 void
3809 debug_flush_symbol_queue (void)
3810 {
3811 int i;
3812
3813 /* Make sure that additionally queued items are not flushed
3814 prematurely. */
3815
3816 ++debug_nesting;
3817
3818 for (i = 0; i < symbol_queue_index; ++i)
3819 {
3820 /* If we pushed queued symbols then such symbols are must be
3821 output no matter what anyone else says. Specifically,
3822 we need to make sure dbxout_symbol() thinks the symbol was
3823 used and also we need to override TYPE_DECL_SUPPRESS_DEBUG
3824 which may be set for outside reasons. */
3825 int saved_tree_used = TREE_USED (symbol_queue[i]);
3826 int saved_suppress_debug = TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]);
3827 TREE_USED (symbol_queue[i]) = 1;
3828 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = 0;
3829
3830 #ifdef DBX_DEBUGGING_INFO
3831 dbxout_symbol (symbol_queue[i], 0);
3832 #endif
3833
3834 TREE_USED (symbol_queue[i]) = saved_tree_used;
3835 TYPE_DECL_SUPPRESS_DEBUG (symbol_queue[i]) = saved_suppress_debug;
3836 }
3837
3838 symbol_queue_index = 0;
3839 --debug_nesting;
3840 }
3841
3842 /* Queue a type symbol needed as part of the definition of a decl
3843 symbol. These symbols are generated when debug_flush_symbol_queue()
3844 is called. */
3845
3846 void
3847 debug_queue_symbol (tree decl)
3848 {
3849 if (symbol_queue_index >= symbol_queue_size)
3850 {
3851 symbol_queue_size += 10;
3852 symbol_queue = xrealloc (symbol_queue,
3853 symbol_queue_size * sizeof (tree));
3854 }
3855
3856 symbol_queue[symbol_queue_index++] = decl;
3857 }
3858
3859 /* Free symbol queue. */
3860 void
3861 debug_free_queue (void)
3862 {
3863 if (symbol_queue)
3864 {
3865 free (symbol_queue);
3866 symbol_queue = NULL;
3867 symbol_queue_size = 0;
3868 }
3869 }