996415cff30c130b8b3358c109720b62d80099e1
[gcc.git] / gcc / config / arm / arm.c
1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
6
7 This file is part of GNU CC.
8
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
13
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
22
23 #include <stdio.h>
24 #include <string.h>
25 #include "assert.h"
26 #include "config.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "reload.h"
38
39 /* The maximum number of insns skipped which will be conditionalised if
40 possible. */
41 #define MAX_INSNS_SKIPPED 5
42
43 /* Some function declarations. */
44 extern FILE *asm_out_file;
45 extern char *output_multi_immediate ();
46 extern void arm_increase_location ();
47
48 HOST_WIDE_INT int_log2 PROTO ((HOST_WIDE_INT));
49 static int get_prologue_size PROTO ((void));
50
51 /* Define the information needed to generate branch insns. This is
52 stored from the compare operation. */
53
54 rtx arm_compare_op0, arm_compare_op1;
55 int arm_compare_fp;
56
57 /* What type of cpu are we compiling for? */
58
59 enum processor_type arm_cpu;
60
61 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
62 must report the mode of the memory reference from PRINT_OPERAND to
63 PRINT_OPERAND_ADDRESS. */
64 enum machine_mode output_memory_reference_mode;
65
66 /* Nonzero if the prologue must setup `fp'. */
67 int current_function_anonymous_args;
68
69 /* Location counter of .text segment. */
70 int arm_text_location = 0;
71
72 /* Set to one if we think that lr is only saved because of subroutine calls,
73 but all of these can be `put after' return insns */
74 int lr_save_eliminated;
75
76 /* A hash table is used to store text segment labels and their associated
77 offset from the start of the text segment. */
78 struct label_offset
79 {
80 char *name;
81 int offset;
82 struct label_offset *cdr;
83 };
84
85 #define LABEL_HASH_SIZE 257
86
87 static struct label_offset *offset_table[LABEL_HASH_SIZE];
88
89 /* Set to 1 when a return insn is output, this means that the epilogue
90 is not needed. */
91
92 static int return_used_this_function;
93
94 /* For an explanation of these variables, see final_prescan_insn below. */
95 int arm_ccfsm_state;
96 int arm_current_cc;
97 rtx arm_target_insn;
98 int arm_target_label;
99
100 /* The condition codes of the ARM, and the inverse function. */
101 char *arm_condition_codes[] =
102 {
103 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
104 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
105 };
106
107 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
108 \f
109 /* Return 1 if it is possible to return using a single instruction */
110
111 int
112 use_return_insn ()
113 {
114 int regno;
115
116 if (!reload_completed ||current_function_pretend_args_size
117 || current_function_anonymous_args
118 || (get_frame_size () && !(TARGET_APCS || frame_pointer_needed)))
119 return 0;
120
121 /* Can't be done if any of the FPU regs are pushed, since this also
122 requires an insn */
123 for (regno = 20; regno < 24; regno++)
124 if (regs_ever_live[regno])
125 return 0;
126
127 return 1;
128 }
129
130 /* Return TRUE if int I is a valid immediate ARM constant. */
131
132 int
133 const_ok_for_arm (i)
134 HOST_WIDE_INT i;
135 {
136 unsigned HOST_WIDE_INT mask = ~0xFF;
137
138 do
139 {
140 if ((i & mask & (unsigned HOST_WIDE_INT) 0xffffffff) == 0)
141 return TRUE;
142 mask =
143 (mask << 2) | ((mask & (unsigned HOST_WIDE_INT) 0xffffffff)
144 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT) 0xffffffff);
145 } while (mask != ~0xFF);
146
147 return FALSE;
148 }
149
150 /* This code has been fixed for cross compilation. */
151
152 static int fpa_consts_inited = 0;
153
154 char *strings_fpa[8] = {
155 "0.0",
156 "1.0",
157 "2.0",
158 "3.0",
159 "4.0",
160 "5.0",
161 "0.5",
162 "10.0"
163 };
164
165 static REAL_VALUE_TYPE values_fpa[8];
166
167 static void
168 init_fpa_table ()
169 {
170 int i;
171 REAL_VALUE_TYPE r;
172
173 for (i = 0; i < 8; i++)
174 {
175 r = REAL_VALUE_ATOF (strings_fpa[i], DFmode);
176 values_fpa[i] = r;
177 }
178
179 fpa_consts_inited = 1;
180 }
181
182 /* Return TRUE if rtx X is a valid immediate FPU constant. */
183
184 int
185 const_double_rtx_ok_for_fpu (x)
186 rtx x;
187 {
188 REAL_VALUE_TYPE r;
189 int i;
190
191 if (!fpa_consts_inited)
192 init_fpa_table ();
193
194 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
195 if (REAL_VALUE_MINUS_ZERO (r))
196 return 0;
197
198 for (i = 0; i < 8; i++)
199 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
200 return 1;
201
202 return 0;
203 }
204
205 /* Return TRUE if rtx X is a valid immediate FPU constant. */
206
207 int
208 neg_const_double_rtx_ok_for_fpu (x)
209 rtx x;
210 {
211 REAL_VALUE_TYPE r;
212 int i;
213
214 if (!fpa_consts_inited)
215 init_fpa_table ();
216
217 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
218 r = REAL_VALUE_NEGATE (r);
219 if (REAL_VALUE_MINUS_ZERO (r))
220 return 0;
221
222 for (i = 0; i < 8; i++)
223 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
224 return 1;
225
226 return 0;
227 }
228 \f
229 /* Predicates for `match_operand' and `match_operator'. */
230
231 /* s_register_operand is the same as register_operand, but it doesn't accept
232 (SUBREG (MEM)...). */
233
234 int
235 s_register_operand (op, mode)
236 register rtx op;
237 enum machine_mode mode;
238 {
239 if (GET_MODE (op) != mode && mode != VOIDmode)
240 return 0;
241
242 if (GET_CODE (op) == SUBREG)
243 op = SUBREG_REG (op);
244
245 /* We don't consider registers whose class is NO_REGS
246 to be a register operand. */
247 return (GET_CODE (op) == REG
248 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
249 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
250 }
251
252 /* Return 1 if OP is an item in memory, given that we are in reload. */
253
254 int
255 reload_memory_operand (op, mode)
256 rtx op;
257 enum machine_mode mode;
258 {
259 int regno = true_regnum (op);
260
261 return (! CONSTANT_P (op)
262 && (regno == -1
263 || (GET_CODE (op) == REG
264 && REGNO (op) >= FIRST_PSEUDO_REGISTER)));
265 }
266
267 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
268
269 int
270 arm_rhs_operand (op, mode)
271 rtx op;
272 enum machine_mode mode;
273 {
274 return (s_register_operand (op, mode)
275 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op))));
276 }
277
278 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
279 */
280
281 int
282 arm_rhsm_operand (op, mode)
283 rtx op;
284 enum machine_mode mode;
285 {
286 return (s_register_operand (op, mode)
287 || (GET_CODE (op) == CONST_INT && const_ok_for_arm (INTVAL (op)))
288 || memory_operand (op, mode));
289 }
290
291 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
292 constant that is valid when negated. */
293
294 int
295 arm_add_operand (op, mode)
296 rtx op;
297 enum machine_mode mode;
298 {
299 return (s_register_operand (op, mode)
300 || (GET_CODE (op) == CONST_INT
301 && (const_ok_for_arm (INTVAL (op))
302 || const_ok_for_arm (-INTVAL (op)))));
303 }
304
305 int
306 arm_not_operand (op, mode)
307 rtx op;
308 enum machine_mode mode;
309 {
310 return (s_register_operand (op, mode)
311 || (GET_CODE (op) == CONST_INT
312 && (const_ok_for_arm (INTVAL (op))
313 || const_ok_for_arm (~INTVAL (op)))));
314 }
315
316 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
317
318 int
319 fpu_rhs_operand (op, mode)
320 rtx op;
321 enum machine_mode mode;
322 {
323 if (s_register_operand (op, mode))
324 return TRUE;
325 else if (GET_CODE (op) == CONST_DOUBLE)
326 return (const_double_rtx_ok_for_fpu (op));
327
328 return FALSE;
329 }
330
331 int
332 fpu_add_operand (op, mode)
333 rtx op;
334 enum machine_mode mode;
335 {
336 if (s_register_operand (op, mode))
337 return TRUE;
338 else if (GET_CODE (op) == CONST_DOUBLE)
339 return (const_double_rtx_ok_for_fpu (op)
340 || neg_const_double_rtx_ok_for_fpu (op));
341
342 return FALSE;
343 }
344
345 /* Return nonzero if OP is a constant power of two. */
346
347 int
348 power_of_two_operand (op, mode)
349 rtx op;
350 enum machine_mode mode;
351 {
352 if (GET_CODE (op) == CONST_INT)
353 {
354 HOST_WIDE_INT value = INTVAL(op);
355 return value != 0 && (value & (value - 1)) == 0;
356 }
357 return FALSE;
358 }
359
360 /* Return TRUE for a valid operand of a DImode operation.
361 Either: REG, CONST_DOUBLE or MEM(DImode_address).
362 Note that this disallows MEM(REG+REG), but allows
363 MEM(PRE/POST_INC/DEC(REG)). */
364
365 int
366 di_operand (op, mode)
367 rtx op;
368 enum machine_mode mode;
369 {
370 if (s_register_operand (op, mode))
371 return TRUE;
372
373 switch (GET_CODE (op))
374 {
375 case CONST_DOUBLE:
376 case CONST_INT:
377 return TRUE;
378
379 case MEM:
380 return memory_address_p (DImode, XEXP (op, 0));
381
382 default:
383 return FALSE;
384 }
385 }
386
387 /* Return TRUE for valid index operands. */
388
389 int
390 index_operand (op, mode)
391 rtx op;
392 enum machine_mode mode;
393 {
394 return (s_register_operand(op, mode)
395 || (immediate_operand (op, mode)
396 && INTVAL (op) < 4096 && INTVAL (op) > -4096));
397 }
398
399 /* Return TRUE for valid shifts by a constant. This also accepts any
400 power of two on the (somewhat overly relaxed) assumption that the
401 shift operator in this case was a mult. */
402
403 int
404 const_shift_operand (op, mode)
405 rtx op;
406 enum machine_mode mode;
407 {
408 return (power_of_two_operand (op, mode)
409 || (immediate_operand (op, mode)
410 && (INTVAL (op) < 32 && INTVAL (op) > 0)));
411 }
412
413 /* Return TRUE for arithmetic operators which can be combined with a multiply
414 (shift). */
415
416 int
417 shiftable_operator (x, mode)
418 rtx x;
419 enum machine_mode mode;
420 {
421 if (GET_MODE (x) != mode)
422 return FALSE;
423 else
424 {
425 enum rtx_code code = GET_CODE (x);
426
427 return (code == PLUS || code == MINUS
428 || code == IOR || code == XOR || code == AND);
429 }
430 }
431
432 /* Return TRUE for shift operators. */
433
434 int
435 shift_operator (x, mode)
436 rtx x;
437 enum machine_mode mode;
438 {
439 if (GET_MODE (x) != mode)
440 return FALSE;
441 else
442 {
443 enum rtx_code code = GET_CODE (x);
444
445 if (code == MULT)
446 return power_of_two_operand (XEXP (x, 1));
447
448 return (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
449 }
450 }
451
452 int equality_operator (x, mode)
453 rtx x;
454 enum machine_mode mode;
455 {
456 return GET_CODE (x) == EQ || GET_CODE (x) == NE;
457 }
458
459 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
460
461 int
462 minmax_operator (x, mode)
463 rtx x;
464 enum machine_mode mode;
465 {
466 enum rtx_code code = GET_CODE (x);
467
468 if (GET_MODE (x) != mode)
469 return FALSE;
470
471 return code == SMIN || code == SMAX || code == UMIN || code == UMAX;
472 }
473
474 /* return TRUE if x is EQ or NE */
475
476 /* Return TRUE if this is the condition code register, if we aren't given
477 a mode, accept any class CCmode register */
478
479 int
480 cc_register (x, mode)
481 rtx x;
482 enum machine_mode mode;
483 {
484 if (mode == VOIDmode)
485 {
486 mode = GET_MODE (x);
487 if (GET_MODE_CLASS (mode) != MODE_CC)
488 return FALSE;
489 }
490
491 if (mode == GET_MODE (x) && GET_CODE (x) == REG && REGNO (x) == 24)
492 return TRUE;
493
494 return FALSE;
495 }
496
497 enum rtx_code
498 minmax_code (x)
499 rtx x;
500 {
501 enum rtx_code code = GET_CODE (x);
502
503 if (code == SMAX)
504 return GE;
505 else if (code == SMIN)
506 return LE;
507 else if (code == UMIN)
508 return LEU;
509 else if (code == UMAX)
510 return GEU;
511
512 abort ();
513 }
514
515 /* Return 1 if memory locations are adjacent */
516
517 int
518 adjacent_mem_locations (a, b)
519 rtx a, b;
520 {
521 int val0 = 0, val1 = 0;
522 int reg0, reg1;
523
524 if ((GET_CODE (XEXP (a, 0)) == REG
525 || (GET_CODE (XEXP (a, 0)) == PLUS
526 && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
527 && (GET_CODE (XEXP (b, 0)) == REG
528 || (GET_CODE (XEXP (b, 0)) == PLUS
529 && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
530 {
531 if (GET_CODE (XEXP (a, 0)) == PLUS)
532 {
533 reg0 = REGNO (XEXP (XEXP (a, 0), 0));
534 val0 = INTVAL (XEXP (XEXP (a, 0), 1));
535 }
536 else
537 reg0 = REGNO (XEXP (a, 0));
538 if (GET_CODE (XEXP (b, 0)) == PLUS)
539 {
540 reg1 = REGNO (XEXP (XEXP (b, 0), 0));
541 val1 = INTVAL (XEXP (XEXP (b, 0), 1));
542 }
543 else
544 reg1 = REGNO (XEXP (b, 0));
545 return (reg0 == reg1) && ((val1 - val0) == 4 || (val0 - val1) == 4);
546 }
547 return 0;
548 }
549
550 /* Return 1 if OP is a load multiple operation. It is known to be
551 parallel and the first section will be tested. */
552
553 int
554 load_multiple_operation (op, mode)
555 rtx op;
556 enum machine_mode mode;
557 {
558 HOST_WIDE_INT count = XVECLEN (op, 0);
559 int dest_regno;
560 rtx src_addr;
561 HOST_WIDE_INT i = 1, base = 0;
562 rtx elt;
563
564 if (count <= 1
565 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
566 return 0;
567
568 /* Check to see if this might be a write-back */
569 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
570 {
571 i++;
572 base = 1;
573
574 /* Now check it more carefully */
575 if (GET_CODE (SET_DEST (elt)) != REG
576 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
577 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
578 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
579 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
580 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
581 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
582 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
583 != REGNO (SET_DEST (elt)))
584 return 0;
585
586 count--;
587 }
588
589 /* Perform a quick check so we don't blow up below. */
590 if (count <= i
591 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
592 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
593 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
594 return 0;
595
596 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
597 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
598
599 for (; i < count; i++)
600 {
601 rtx elt = XVECEXP (op, 0, i);
602
603 if (GET_CODE (elt) != SET
604 || GET_CODE (SET_DEST (elt)) != REG
605 || GET_MODE (SET_DEST (elt)) != SImode
606 || REGNO (SET_DEST (elt)) != dest_regno + i - base
607 || GET_CODE (SET_SRC (elt)) != MEM
608 || GET_MODE (SET_SRC (elt)) != SImode
609 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
610 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
611 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
612 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
613 return 0;
614 }
615
616 return 1;
617 }
618
619 /* Return 1 if OP is a store multiple operation. It is known to be
620 parallel and the first section will be tested. */
621
622 int
623 store_multiple_operation (op, mode)
624 rtx op;
625 enum machine_mode mode;
626 {
627 HOST_WIDE_INT count = XVECLEN (op, 0);
628 int src_regno;
629 rtx dest_addr;
630 HOST_WIDE_INT i = 1, base = 0;
631 rtx elt;
632
633 if (count <= 1
634 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
635 return 0;
636
637 /* Check to see if this might be a write-back */
638 if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
639 {
640 i++;
641 base = 1;
642
643 /* Now check it more carefully */
644 if (GET_CODE (SET_DEST (elt)) != REG
645 || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
646 || REGNO (XEXP (SET_SRC (elt), 0)) != REGNO (SET_DEST (elt))
647 || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
648 || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 2) * 4
649 || GET_CODE (XVECEXP (op, 0, count - 1)) != CLOBBER
650 || GET_CODE (XEXP (XVECEXP (op, 0, count - 1), 0)) != REG
651 || REGNO (XEXP (XVECEXP (op, 0, count - 1), 0))
652 != REGNO (SET_DEST (elt)))
653 return 0;
654
655 count--;
656 }
657
658 /* Perform a quick check so we don't blow up below. */
659 if (count <= i
660 || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
661 || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
662 || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
663 return 0;
664
665 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
666 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
667
668 for (; i < count; i++)
669 {
670 elt = XVECEXP (op, 0, i);
671
672 if (GET_CODE (elt) != SET
673 || GET_CODE (SET_SRC (elt)) != REG
674 || GET_MODE (SET_SRC (elt)) != SImode
675 || REGNO (SET_SRC (elt)) != src_regno + i - base
676 || GET_CODE (SET_DEST (elt)) != MEM
677 || GET_MODE (SET_DEST (elt)) != SImode
678 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
679 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
680 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
681 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
682 return 0;
683 }
684
685 return 1;
686 }
687 \f
688 /* Routines for use with attributes */
689
690 int
691 const_pool_offset (symbol)
692 rtx symbol;
693 {
694 return get_pool_offset (symbol) - get_pool_size () - get_prologue_size ();
695 }
696 \f
697 /* Routines for use in generating RTL */
698
699 rtx
700 arm_gen_load_multiple (base_regno, count, from, up, write_back)
701 int base_regno;
702 int count;
703 rtx from;
704 int up;
705 int write_back;
706 {
707 int i = 0, j;
708 rtx result;
709 int sign = up ? 1 : -1;
710
711 result = gen_rtx (PARALLEL, VOIDmode,
712 rtvec_alloc (count + (write_back ? 2 : 0)));
713 if (write_back)
714 {
715 XVECEXP (result, 0, 0)
716 = gen_rtx (SET, GET_MODE (from), from,
717 plus_constant (from, count * 4 * sign));
718 i = 1;
719 count++;
720 }
721
722 for (j = 0; i < count; i++, j++)
723 {
724 XVECEXP (result, 0, i)
725 = gen_rtx (SET, VOIDmode, gen_rtx (REG, SImode, base_regno + j),
726 gen_rtx (MEM, SImode,
727 plus_constant (from, j * 4 * sign)));
728 }
729
730 if (write_back)
731 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, from);
732
733 return result;
734 }
735
736 rtx
737 arm_gen_store_multiple (base_regno, count, to, up, write_back)
738 int base_regno;
739 int count;
740 rtx to;
741 int up;
742 int write_back;
743 {
744 int i = 0, j;
745 rtx result;
746 int sign = up ? 1 : -1;
747
748 result = gen_rtx (PARALLEL, VOIDmode,
749 rtvec_alloc (count + (write_back ? 2 : 0)));
750 if (write_back)
751 {
752 XVECEXP (result, 0, 0)
753 = gen_rtx (SET, GET_MODE (to), to,
754 plus_constant (to, count * 4 * sign));
755 i = 1;
756 count++;
757 }
758
759 for (j = 0; i < count; i++, j++)
760 {
761 XVECEXP (result, 0, i)
762 = gen_rtx (SET, VOIDmode,
763 gen_rtx (MEM, SImode, plus_constant (to, j * 4 * sign)),
764 gen_rtx (REG, SImode, base_regno + j));
765 }
766
767 if (write_back)
768 XVECEXP (result, 0, i) = gen_rtx (CLOBBER, SImode, to);
769
770 return result;
771 }
772
773 /* X and Y are two things to compare using CODE. Emit the compare insn and
774 return the rtx for register 0 in the proper mode. FP means this is a
775 floating point compare: I don't think that it is needed on the arm. */
776
777 rtx
778 gen_compare_reg (code, x, y, fp)
779 enum rtx_code code;
780 rtx x, y;
781 {
782 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
783 rtx cc_reg = gen_rtx (REG, mode, 24);
784
785 emit_insn (gen_rtx (SET, VOIDmode, cc_reg,
786 gen_rtx (COMPARE, mode, x, y)));
787
788 return cc_reg;
789 }
790
791 void
792 arm_reload_out_hi (operands)
793 rtx *operands;
794 {
795 rtx base = find_replacement (&XEXP (operands[0], 0));
796
797 emit_insn (gen_rtx (SET, VOIDmode,
798 gen_rtx (MEM, QImode, base),
799 gen_rtx (SUBREG, QImode, operands[1], 0)));
800 emit_insn (gen_rtx (SET, VOIDmode, operands[2],
801 gen_rtx (LSHIFTRT, SImode,
802 gen_rtx (SUBREG, SImode, operands[1], 0),
803 GEN_INT (8))));
804 emit_insn (gen_rtx (SET, VOIDmode,
805 gen_rtx (MEM, QImode,
806 plus_constant (base, 1)),
807 gen_rtx (SUBREG, QImode, operands[2], 0)));
808 }
809 \f
810 /* Check to see if a branch is forwards or backwards. Return TRUE if it
811 is backwards. */
812
813 int
814 arm_backwards_branch (from, to)
815 int from, to;
816 {
817 return insn_addresses[to] <= insn_addresses[from];
818 }
819
820 /* Check to see if a branch is within the distance that can be done using
821 an arithmetic expression. */
822 int
823 short_branch (from, to)
824 int from, to;
825 {
826 int delta = insn_addresses[from] + 8 - insn_addresses[to];
827
828 return abs (delta) < 980; /* A small margin for safety */
829 }
830
831 /* Check to see that the insn isn't the target of the conditionalizing
832 code */
833 int
834 arm_insn_not_targeted (insn)
835 rtx insn;
836 {
837 return insn != arm_target_insn;
838 }
839
840 \f
841 /* Routines to output assembly language. */
842
843 /* If the rtx is the correct value then return the string of the number.
844 In this way we can ensure that valid double constants are generated even
845 when cross compiling. */
846 char *
847 fp_immediate_constant (x)
848 rtx (x);
849 {
850 REAL_VALUE_TYPE r;
851 int i;
852
853 if (!fpa_consts_inited)
854 init_fpa_table ();
855
856 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
857 for (i = 0; i < 8; i++)
858 if (REAL_VALUES_EQUAL (r, values_fpa[i]))
859 return strings_fpa[i];
860
861 abort ();
862 }
863
864 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
865 static char *
866 fp_const_from_val (r)
867 REAL_VALUE_TYPE *r;
868 {
869 int i;
870
871 if (! fpa_consts_inited)
872 init_fpa_table ();
873
874 for (i = 0; i < 8; i++)
875 if (REAL_VALUES_EQUAL (*r, values_fpa[i]))
876 return strings_fpa[i];
877
878 abort ();
879 }
880
881 /* Output the operands of a LDM/STM instruction to STREAM.
882 MASK is the ARM register set mask of which only bits 0-15 are important.
883 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
884 must follow the register list. */
885
886 void
887 print_multi_reg (stream, instr, mask, hat)
888 FILE *stream;
889 char *instr;
890 int mask, hat;
891 {
892 int i;
893 int not_first = FALSE;
894
895 fputc ('\t', stream);
896 fprintf (stream, instr, ARM_REG_PREFIX);
897 fputs (", {", stream);
898 for (i = 0; i < 16; i++)
899 if (mask & (1 << i))
900 {
901 if (not_first)
902 fprintf (stream, ", ");
903 fprintf (stream, "%s%s", ARM_REG_PREFIX, reg_names[i]);
904 not_first = TRUE;
905 }
906
907 fprintf (stream, "}%s\n", hat ? "^" : "");
908 }
909
910 /* Output a 'call' insn. */
911
912 char *
913 output_call (operands)
914 rtx *operands;
915 {
916 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
917
918 if (REGNO (operands[0]) == 14)
919 {
920 operands[0] = gen_rtx (REG, SImode, 12);
921 output_asm_insn ("mov%?\t%0, %|lr", operands);
922 }
923 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
924 output_asm_insn ("mov%?\t%|pc, %0", operands);
925 return "";
926 }
927
928 static int
929 eliminate_lr2ip (x)
930 rtx *x;
931 {
932 int something_changed = 0;
933 rtx x0 = *x;
934 int code = GET_CODE (x0);
935 register int i, j;
936 register char *fmt;
937
938 switch (code)
939 {
940 case REG:
941 if (REGNO (x0) == 14)
942 {
943 *x = gen_rtx (REG, SImode, 12);
944 return 1;
945 }
946 return 0;
947 default:
948 /* Scan through the sub-elements and change any references there */
949 fmt = GET_RTX_FORMAT (code);
950 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
951 if (fmt[i] == 'e')
952 something_changed |= eliminate_lr2ip (&XEXP (x0, i));
953 else if (fmt[i] == 'E')
954 for (j = 0; j < XVECLEN (x0, i); j++)
955 something_changed |= eliminate_lr2ip (&XVECEXP (x0, i, j));
956 return something_changed;
957 }
958 }
959
960 /* Output a 'call' insn that is a reference in memory. */
961
962 char *
963 output_call_mem (operands)
964 rtx *operands;
965 {
966 operands[0] = copy_rtx (operands[0]); /* Be ultra careful */
967 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
968 */
969 if (eliminate_lr2ip (&operands[0]))
970 output_asm_insn ("mov%?\t%|ip, %|lr", operands);
971
972 output_asm_insn ("mov%?\t%|lr, %|pc", operands);
973 output_asm_insn ("ldr%?\t%|pc, %0", operands);
974 return "";
975 }
976
977
978 /* Output a move from arm registers to an fpu registers.
979 OPERANDS[0] is an fpu register.
980 OPERANDS[1] is the first registers of an arm register pair. */
981
982 char *
983 output_mov_long_double_fpu_from_arm (operands)
984 rtx *operands;
985 {
986 int arm_reg0 = REGNO (operands[1]);
987 rtx ops[3];
988
989 if (arm_reg0 == 12)
990 abort();
991
992 ops[0] = gen_rtx (REG, SImode, arm_reg0);
993 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
994 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
995
996 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops);
997 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands);
998 return "";
999 }
1000
1001 /* Output a move from an fpu register to arm registers.
1002 OPERANDS[0] is the first registers of an arm register pair.
1003 OPERANDS[1] is an fpu register. */
1004
1005 char *
1006 output_mov_long_double_arm_from_fpu (operands)
1007 rtx *operands;
1008 {
1009 int arm_reg0 = REGNO (operands[0]);
1010 rtx ops[3];
1011
1012 if (arm_reg0 == 12)
1013 abort();
1014
1015 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1016 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1017 ops[2] = gen_rtx (REG, SImode, 2 + arm_reg0);
1018
1019 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands);
1020 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops);
1021 return "";
1022 }
1023
1024 /* Output a move from arm registers to arm registers of a long double
1025 OPERANDS[0] is the destination.
1026 OPERANDS[1] is the source. */
1027 char *
1028 output_mov_long_double_arm_from_arm (operands)
1029 rtx *operands;
1030 {
1031 /* We have to be careful here because the two might overlap */
1032 int dest_start = REGNO (operands[0]);
1033 int src_start = REGNO (operands[1]);
1034 rtx ops[2];
1035 int i;
1036
1037 if (dest_start < src_start)
1038 {
1039 for (i = 0; i < 3; i++)
1040 {
1041 ops[0] = gen_rtx (REG, SImode, dest_start + i);
1042 ops[1] = gen_rtx (REG, SImode, src_start + i);
1043 output_asm_insn ("mov%?\t%0, %1", ops);
1044 }
1045 }
1046 else
1047 {
1048 for (i = 2; i >= 0; i--)
1049 {
1050 ops[0] = gen_rtx (REG, SImode, dest_start + i);
1051 ops[1] = gen_rtx (REG, SImode, src_start + i);
1052 output_asm_insn ("mov%?\t%0, %1", ops);
1053 }
1054 }
1055
1056 return "";
1057 }
1058
1059
1060 /* Output a move from arm registers to an fpu registers.
1061 OPERANDS[0] is an fpu register.
1062 OPERANDS[1] is the first registers of an arm register pair. */
1063
1064 char *
1065 output_mov_double_fpu_from_arm (operands)
1066 rtx *operands;
1067 {
1068 int arm_reg0 = REGNO (operands[1]);
1069 rtx ops[2];
1070
1071 if (arm_reg0 == 12)
1072 abort();
1073 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1074 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1075 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops);
1076 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands);
1077 return "";
1078 }
1079
1080 /* Output a move from an fpu register to arm registers.
1081 OPERANDS[0] is the first registers of an arm register pair.
1082 OPERANDS[1] is an fpu register. */
1083
1084 char *
1085 output_mov_double_arm_from_fpu (operands)
1086 rtx *operands;
1087 {
1088 int arm_reg0 = REGNO (operands[0]);
1089 rtx ops[2];
1090
1091 if (arm_reg0 == 12)
1092 abort();
1093
1094 ops[0] = gen_rtx (REG, SImode, arm_reg0);
1095 ops[1] = gen_rtx (REG, SImode, 1 + arm_reg0);
1096 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands);
1097 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops);
1098 return "";
1099 }
1100
1101 /* Output a move between double words.
1102 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
1103 or MEM<-REG and all MEMs must be offsettable addresses. */
1104
1105 char *
1106 output_move_double (operands)
1107 rtx *operands;
1108 {
1109 enum rtx_code code0 = GET_CODE (operands[0]);
1110 enum rtx_code code1 = GET_CODE (operands[1]);
1111 rtx otherops[2];
1112
1113 if (code0 == REG)
1114 {
1115 int reg0 = REGNO (operands[0]);
1116
1117 otherops[0] = gen_rtx (REG, SImode, 1 + reg0);
1118 if (code1 == REG)
1119 {
1120 int reg1 = REGNO (operands[1]);
1121 if (reg1 == 12)
1122 abort();
1123
1124 otherops[1] = gen_rtx (REG, SImode, 1 + reg1);
1125
1126 /* Ensure the second source is not overwritten */
1127 if (reg0 == 1 + reg1)
1128 {
1129 output_asm_insn("mov%?\t%0, %1", otherops);
1130 output_asm_insn("mov%?\t%0, %1", operands);
1131 }
1132 else
1133 {
1134 output_asm_insn("mov%?\t%0, %1", operands);
1135 output_asm_insn("mov%?\t%0, %1", otherops);
1136 }
1137 }
1138 else if (code1 == CONST_DOUBLE)
1139 {
1140 otherops[1] = gen_rtx (CONST_INT, VOIDmode,
1141 CONST_DOUBLE_HIGH (operands[1]));
1142 operands[1] = gen_rtx (CONST_INT, VOIDmode,
1143 CONST_DOUBLE_LOW (operands[1]));
1144 output_mov_immediate (operands, FALSE, "");
1145 output_mov_immediate (otherops, FALSE, "");
1146 }
1147 else if (code1 == CONST_INT)
1148 {
1149 otherops[1] = const0_rtx;
1150 /* sign extend the intval into the high-order word */
1151 /* Note: output_mov_immediate may clobber operands[1], so we
1152 put this out first */
1153 if (INTVAL (operands[1]) < 0)
1154 output_asm_insn ("mvn%?\t%0, %1", otherops);
1155 else
1156 output_asm_insn ("mov%?\t%0, %1", otherops);
1157 output_mov_immediate (operands, FALSE, "");
1158 }
1159 else if (code1 == MEM)
1160 {
1161 switch (GET_CODE (XEXP (operands[1], 0)))
1162 {
1163 case REG:
1164 /* Handle the simple case where address is [r, #0] more
1165 efficient. */
1166 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
1167 break;
1168 case PRE_INC:
1169 output_asm_insn ("add%?\t%m1, %m1, #8", operands);
1170 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
1171 break;
1172 case PRE_DEC:
1173 output_asm_insn ("sub%?\t%m1, %m1, #8", operands);
1174 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
1175 break;
1176 case POST_INC:
1177 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands);
1178 break;
1179 case POST_DEC:
1180 output_asm_insn ("ldm%?ia\t%m1, %M0", operands);
1181 output_asm_insn ("sub%?\t%m1, %m1, #8", operands);
1182 break;
1183 default:
1184 otherops[1] = adj_offsettable_operand (operands[1], 4);
1185 /* Take care of overlapping base/data reg. */
1186 if (reg_mentioned_p (operands[0], operands[1]))
1187 {
1188 output_asm_insn ("ldr%?\t%0, %1", otherops);
1189 output_asm_insn ("ldr%?\t%0, %1", operands);
1190 }
1191 else
1192 {
1193 output_asm_insn ("ldr%?\t%0, %1", operands);
1194 output_asm_insn ("ldr%?\t%0, %1", otherops);
1195 }
1196 }
1197 }
1198 else abort(); /* Constraints should prevent this */
1199 }
1200 else if (code0 == MEM && code1 == REG)
1201 {
1202 if (REGNO (operands[1]) == 12)
1203 abort();
1204 switch (GET_CODE (XEXP (operands[0], 0)))
1205 {
1206 case REG:
1207 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
1208 break;
1209 case PRE_INC:
1210 output_asm_insn ("add%?\t%m0, %m0, #8", operands);
1211 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
1212 break;
1213 case PRE_DEC:
1214 output_asm_insn ("sub%?\t%m0, %m0, #8", operands);
1215 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
1216 break;
1217 case POST_INC:
1218 output_asm_insn ("stm%?ia\t%m0!, %M1", operands);
1219 break;
1220 case POST_DEC:
1221 output_asm_insn ("stm%?ia\t%m0, %M1", operands);
1222 output_asm_insn ("sub%?\t%m0, %m0, #8", operands);
1223 break;
1224 default:
1225 otherops[0] = adj_offsettable_operand (operands[0], 4);
1226 otherops[1] = gen_rtx (REG, SImode, 1 + REGNO (operands[1]));
1227 output_asm_insn ("str%?\t%1, %0", operands);
1228 output_asm_insn ("str%?\t%1, %0", otherops);
1229 }
1230 }
1231 else abort(); /* Constraints should prevent this */
1232
1233 return "";
1234 }
1235
1236
1237 /* Output an arbitrary MOV reg, #n.
1238 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
1239
1240 char *
1241 output_mov_immediate (operands)
1242 rtx *operands;
1243 {
1244 HOST_WIDE_INT n = INTVAL (operands[1]);
1245 int n_ones = 0;
1246 int i;
1247
1248 /* Try to use one MOV */
1249 if (const_ok_for_arm (n))
1250 {
1251 output_asm_insn ("mov%?\t%0, %1", operands);
1252 return "";
1253 }
1254
1255 /* Try to use one MVN */
1256 if (const_ok_for_arm (~n))
1257 {
1258 operands[1] = GEN_INT (~n);
1259 output_asm_insn ("mvn%?\t%0, %1", operands);
1260 return "";
1261 }
1262
1263 /* If all else fails, make it out of ORRs or BICs as appropriate. */
1264
1265 for (i=0; i < 32; i++)
1266 if (n & 1 << i)
1267 n_ones++;
1268
1269 if (n_ones > 16) /* Shorter to use MVN with BIC in this case. */
1270 output_multi_immediate(operands, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
1271 ~n);
1272 else
1273 output_multi_immediate(operands, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
1274 n);
1275
1276 return "";
1277 }
1278
1279
1280 /* Output an ADD r, s, #n where n may be too big for one instruction. If
1281 adding zero to one register, output nothing. */
1282
1283 char *
1284 output_add_immediate (operands)
1285 rtx *operands;
1286 {
1287 HOST_WIDE_INT n = INTVAL (operands[2]);
1288
1289 if (n != 0 || REGNO (operands[0]) != REGNO (operands[1]))
1290 {
1291 if (n < 0)
1292 output_multi_immediate (operands,
1293 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
1294 -n);
1295 else
1296 output_multi_immediate (operands,
1297 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
1298 n);
1299 }
1300
1301 return "";
1302 }
1303
1304 /* Output a multiple immediate operation.
1305 OPERANDS is the vector of operands referred to in the output patterns.
1306 INSTR1 is the output pattern to use for the first constant.
1307 INSTR2 is the output pattern to use for subsequent constants.
1308 IMMED_OP is the index of the constant slot in OPERANDS.
1309 N is the constant value. */
1310
1311 char *
1312 output_multi_immediate (operands, instr1, instr2, immed_op, n)
1313 rtx *operands;
1314 char *instr1, *instr2;
1315 int immed_op;
1316 HOST_WIDE_INT n;
1317 {
1318 #if HOST_BITS_PER_WIDE_INT > 32
1319 n &= 0xffffffff;
1320 #endif
1321
1322 if (n == 0)
1323 {
1324 operands[immed_op] = const0_rtx;
1325 output_asm_insn (instr1, operands); /* Quick and easy output */
1326 }
1327 else
1328 {
1329 int i;
1330 char *instr = instr1;
1331
1332 /* Note that n is never zero here (which would give no output) */
1333 for (i = 0; i < 32; i += 2)
1334 {
1335 if (n & (3 << i))
1336 {
1337 operands[immed_op] = GEN_INT (n & (255 << i));
1338 output_asm_insn (instr, operands);
1339 instr = instr2;
1340 i += 6;
1341 }
1342 }
1343 }
1344 return "";
1345 }
1346
1347
1348 /* Return the appropriate ARM instruction for the operation code.
1349 The returned result should not be overwritten. OP is the rtx of the
1350 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
1351 was shifted. */
1352
1353 char *
1354 arithmetic_instr (op, shift_first_arg)
1355 rtx op;
1356 int shift_first_arg;
1357 {
1358 switch (GET_CODE (op))
1359 {
1360 case PLUS:
1361 return "add";
1362
1363 case MINUS:
1364 return shift_first_arg ? "rsb" : "sub";
1365
1366 case IOR:
1367 return "orr";
1368
1369 case XOR:
1370 return "eor";
1371
1372 case AND:
1373 return "and";
1374
1375 default:
1376 abort ();
1377 }
1378 }
1379
1380
1381 /* Ensure valid constant shifts and return the appropriate shift mnemonic
1382 for the operation code. The returned result should not be overwritten.
1383 OP is the rtx code of the shift.
1384 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
1385 shift. */
1386
1387 static char *
1388 shift_op (op, amountp)
1389 rtx op;
1390 HOST_WIDE_INT *amountp;
1391 {
1392 int min_shift = 0;
1393 int max_shift = 31;
1394 char *mnem;
1395
1396 if (GET_CODE (XEXP (op, 1)) == REG || GET_CODE (XEXP (op, 1)) == SUBREG)
1397 *amountp = -1;
1398 else if (GET_CODE (XEXP (op, 1)) == CONST_INT)
1399 *amountp = INTVAL (XEXP (op, 1));
1400 else
1401 abort ();
1402
1403 switch (GET_CODE (op))
1404 {
1405 case ASHIFT:
1406 mnem = "asl";
1407 break;
1408
1409 case ASHIFTRT:
1410 mnem = "asr";
1411 max_shift = 32;
1412 break;
1413
1414 case LSHIFTRT:
1415 mnem = "lsr";
1416 max_shift = 32;
1417 break;
1418
1419 case ROTATERT:
1420 mnem = "ror";
1421 max_shift = 31;
1422 break;
1423
1424 case MULT:
1425 if (*amountp != -1)
1426 *amountp = int_log2 (*amountp);
1427 else
1428 abort ();
1429 return "asl";
1430
1431 default:
1432 abort ();
1433 }
1434
1435 if (*amountp != -1
1436 && (*amountp < min_shift || *amountp > max_shift))
1437 abort ();
1438 return mnem;
1439 }
1440
1441
1442 /* Obtain the shift from the POWER of two. */
1443
1444 HOST_WIDE_INT
1445 int_log2 (power)
1446 HOST_WIDE_INT power;
1447 {
1448 HOST_WIDE_INT shift = 0;
1449
1450 while (((1 << shift) & power) == 0)
1451 {
1452 if (shift > 31)
1453 abort ();
1454 shift++;
1455 }
1456
1457 return shift;
1458 }
1459
1460 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
1461 /bin/as is horribly restrictive. */
1462
1463 void
1464 output_ascii_pseudo_op (stream, p, len)
1465 FILE *stream;
1466 unsigned char *p;
1467 int len;
1468 {
1469 int i;
1470 int len_so_far = 1000;
1471 int chars_so_far = 0;
1472
1473 for (i = 0; i < len; i++)
1474 {
1475 register int c = p[i];
1476
1477 if (len_so_far > 50)
1478 {
1479 if (chars_so_far)
1480 fputs ("\"\n", stream);
1481 fputs ("\t.ascii\t\"", stream);
1482 len_so_far = 0;
1483 arm_increase_location (chars_so_far);
1484 chars_so_far = 0;
1485 }
1486
1487 if (c == '\"' || c == '\\')
1488 {
1489 putc('\\', stream);
1490 len_so_far++;
1491 }
1492
1493 if (c >= ' ' && c < 0177)
1494 {
1495 putc (c, stream);
1496 len_so_far++;
1497 }
1498 else
1499 {
1500 fprintf (stream, "\\%03o", c);
1501 len_so_far +=4;
1502 }
1503
1504 chars_so_far++;
1505 }
1506
1507 fputs ("\"\n", stream);
1508 arm_increase_location (chars_so_far);
1509 }
1510 \f
1511
1512 /* Try to determine whether a pattern really clobbers the link register.
1513 This information is useful when peepholing, so that lr need not be pushed
1514 if we combine a call followed by a return.
1515 NOTE: This code does not check for side-effect expressions in a SET_SRC:
1516 such a check should not be needed because these only update an existing
1517 value within a register; the register must still be set elsewhere within
1518 the function. */
1519
1520 static int
1521 pattern_really_clobbers_lr (x)
1522 rtx x;
1523 {
1524 int i;
1525
1526 switch (GET_CODE (x))
1527 {
1528 case SET:
1529 switch (GET_CODE (SET_DEST (x)))
1530 {
1531 case REG:
1532 return REGNO (SET_DEST (x)) == 14;
1533
1534 case SUBREG:
1535 if (GET_CODE (XEXP (SET_DEST (x), 0)) == REG)
1536 return REGNO (XEXP (SET_DEST (x), 0)) == 14;
1537
1538 if (GET_CODE (XEXP (SET_DEST (x), 0)) == MEM)
1539 return 0;
1540 abort ();
1541
1542 default:
1543 return 0;
1544 }
1545
1546 case PARALLEL:
1547 for (i = 0; i < XVECLEN (x, 0); i++)
1548 if (pattern_really_clobbers_lr (XVECEXP (x, 0, i)))
1549 return 1;
1550 return 0;
1551
1552 case CLOBBER:
1553 switch (GET_CODE (XEXP (x, 0)))
1554 {
1555 case REG:
1556 return REGNO (XEXP (x, 0)) == 14;
1557
1558 case SUBREG:
1559 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
1560 return REGNO (XEXP (XEXP (x, 0), 0)) == 14;
1561 abort ();
1562
1563 default:
1564 return 0;
1565 }
1566
1567 case UNSPEC:
1568 return 1;
1569
1570 default:
1571 return 0;
1572 }
1573 }
1574
1575 static int
1576 function_really_clobbers_lr (first)
1577 rtx first;
1578 {
1579 rtx insn, next;
1580
1581 for (insn = first; insn; insn = next_nonnote_insn (insn))
1582 {
1583 switch (GET_CODE (insn))
1584 {
1585 case BARRIER:
1586 case NOTE:
1587 case CODE_LABEL:
1588 case JUMP_INSN: /* Jump insns only change the PC (and conds) */
1589 case INLINE_HEADER:
1590 break;
1591
1592 case INSN:
1593 if (pattern_really_clobbers_lr (PATTERN (insn)))
1594 return 1;
1595 break;
1596
1597 case CALL_INSN:
1598 /* Don't yet know how to handle those calls that are not to a
1599 SYMBOL_REF */
1600 if (GET_CODE (PATTERN (insn)) != PARALLEL)
1601 abort ();
1602
1603 switch (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)))
1604 {
1605 case CALL:
1606 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn), 0, 0), 0), 0))
1607 != SYMBOL_REF)
1608 return 1;
1609 break;
1610
1611 case SET:
1612 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn),
1613 0, 0)), 0), 0))
1614 != SYMBOL_REF)
1615 return 1;
1616 break;
1617
1618 default: /* Don't recognize it, be safe */
1619 return 1;
1620 }
1621
1622 /* A call can be made (by peepholing) not to clobber lr iff it is
1623 followed by a return. There may, however, be a use insn iff
1624 we are returning the result of the call.
1625 If we run off the end of the insn chain, then that means the
1626 call was at the end of the function. Unfortunately we don't
1627 have a return insn for the peephole to recognize, so we
1628 must reject this. (Can this be fixed by adding our own insn?) */
1629 if ((next = next_nonnote_insn (insn)) == NULL)
1630 return 1;
1631
1632 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == USE
1633 && (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1634 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn), 0, 0)))
1635 == REGNO (XEXP (PATTERN (next), 0))))
1636 if ((next = next_nonnote_insn (next)) == NULL)
1637 return 1;
1638
1639 if (GET_CODE (next) == JUMP_INSN
1640 && GET_CODE (PATTERN (next)) == RETURN)
1641 break;
1642 return 1;
1643
1644 default:
1645 abort ();
1646 }
1647 }
1648
1649 /* We have reached the end of the chain so lr was _not_ clobbered */
1650 return 0;
1651 }
1652
1653 char *
1654 output_return_instruction (operand, really_return)
1655 rtx operand;
1656 int really_return;
1657 {
1658 char instr[100];
1659 int reg, live_regs = 0;
1660
1661 if (current_function_calls_alloca && ! really_return)
1662 abort();
1663
1664 for (reg = 0; reg <= 10; reg++)
1665 if (regs_ever_live[reg] && ! call_used_regs[reg])
1666 live_regs++;
1667
1668 if (live_regs || (regs_ever_live[14] && ! lr_save_eliminated))
1669 live_regs++;
1670
1671 if (frame_pointer_needed)
1672 live_regs += 4;
1673
1674 if (live_regs)
1675 {
1676 if (lr_save_eliminated || ! regs_ever_live[14])
1677 live_regs++;
1678
1679 if (frame_pointer_needed)
1680 strcpy (instr, "ldm%?%d0ea\t%|fp, {");
1681 else
1682 strcpy (instr, "ldm%?%d0fd\t%|sp!, {");
1683
1684 for (reg = 0; reg <= 10; reg++)
1685 if (regs_ever_live[reg] && ! call_used_regs[reg])
1686 {
1687 strcat (instr, "%|");
1688 strcat (instr, reg_names[reg]);
1689 if (--live_regs)
1690 strcat (instr, ", ");
1691 }
1692
1693 if (frame_pointer_needed)
1694 {
1695 strcat (instr, "%|");
1696 strcat (instr, reg_names[11]);
1697 strcat (instr, ", ");
1698 strcat (instr, "%|");
1699 strcat (instr, reg_names[13]);
1700 strcat (instr, ", ");
1701 strcat (instr, "%|");
1702 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
1703 }
1704 else
1705 {
1706 strcat (instr, "%|");
1707 strcat (instr, really_return ? reg_names[15] : reg_names[14]);
1708 }
1709 strcat (instr, (TARGET_6 || !really_return) ? "}" : "}^");
1710 output_asm_insn (instr, &operand);
1711 }
1712 else if (really_return)
1713 {
1714 strcpy (instr,
1715 TARGET_6 ? "mov%?%d0\t%|pc, lr" : "mov%?%d0s\t%|pc, %|lr");
1716 output_asm_insn (instr, &operand);
1717 }
1718
1719 return_used_this_function = 1;
1720 return "";
1721 }
1722
1723 /* Return the size of the prologue. It's not too bad if we slightly
1724 over-estimate. */
1725
1726 static int
1727 get_prologue_size ()
1728 {
1729 int amount = 0;
1730 int regno;
1731
1732 /* Until we know which registers are really used return the maximum. */
1733 if (! reload_completed)
1734 return 24;
1735
1736 /* Look for integer regs that have to be saved. */
1737 for (regno = 0; regno < 15; regno++)
1738 if (regs_ever_live[regno] && ! call_used_regs[regno])
1739 {
1740 amount = 4;
1741 break;
1742 }
1743
1744 /* Clobbering lr when none of the other regs have been saved also requires
1745 a save. */
1746 if (regs_ever_live[14])
1747 amount = 4;
1748
1749 /* If we need to push a stack frame then there is an extra instruction to
1750 preserve the current value of the stack pointer. */
1751 if (frame_pointer_needed)
1752 amount = 8;
1753
1754 /* Now look for floating-point regs that need saving. We need an
1755 instruction per register. */
1756 for (regno = 16; regno < 24; regno++)
1757 if (regs_ever_live[regno] && ! call_used_regs[regno])
1758 amount += 4;
1759
1760 if (current_function_anonymous_args && current_function_pretend_args_size)
1761 amount += 4;
1762
1763 return amount;
1764 }
1765
1766 /* The amount of stack adjustment that happens here, in output_return and in
1767 output_epilogue must be exactly the same as was calculated during reload,
1768 or things will point to the wrong place. The only time we can safely
1769 ignore this constraint is when a function has no arguments on the stack,
1770 no stack frame requirement and no live registers execpt for `lr'. If we
1771 can guarantee that by making all function calls into tail calls and that
1772 lr is not clobbered in any other way, then there is no need to push lr
1773 onto the stack. */
1774
1775 void
1776 output_func_prologue (f, frame_size)
1777 FILE *f;
1778 int frame_size;
1779 {
1780 int reg, live_regs_mask = 0;
1781 rtx operands[3];
1782
1783 /* Nonzero if we must stuff some register arguments onto the stack as if
1784 they were passed there. */
1785 int store_arg_regs = 0;
1786
1787 if (arm_ccfsm_state || arm_target_insn)
1788 abort (); /* Sanity check */
1789
1790 return_used_this_function = 0;
1791 lr_save_eliminated = 0;
1792
1793 fprintf (f, "\t%c args = %d, pretend = %d, frame = %d\n",
1794 ARM_COMMENT_CHAR, current_function_args_size,
1795 current_function_pretend_args_size, frame_size);
1796 fprintf (f, "\t%c frame_needed = %d, current_function_anonymous_args = %d\n",
1797 ARM_COMMENT_CHAR, frame_pointer_needed,
1798 current_function_anonymous_args);
1799
1800 if (current_function_anonymous_args && current_function_pretend_args_size)
1801 store_arg_regs = 1;
1802
1803 for (reg = 0; reg <= 10; reg++)
1804 if (regs_ever_live[reg] && ! call_used_regs[reg])
1805 live_regs_mask |= (1 << reg);
1806
1807 if (frame_pointer_needed)
1808 {
1809 live_regs_mask |= 0xD800;
1810 fprintf (f, "\tmov\t%sip, %ssp\n", ARM_REG_PREFIX, ARM_REG_PREFIX);
1811 }
1812 else if (regs_ever_live[14])
1813 {
1814 if (! current_function_args_size
1815 && ! function_really_clobbers_lr (get_insns ()))
1816 {
1817 fprintf (f,"\t%c I don't think this function clobbers lr\n",
1818 ARM_COMMENT_CHAR);
1819 lr_save_eliminated = 1;
1820 }
1821 else
1822 live_regs_mask |= 0x4000;
1823 }
1824
1825 /* If CURRENT_FUNCTION_PRETEND_ARGS_SIZE, adjust the stack pointer to make
1826 room. If also STORE_ARG_REGS store the argument registers involved in
1827 the created slot (this is for stdarg and varargs). */
1828 if (current_function_pretend_args_size)
1829 {
1830 if (store_arg_regs)
1831 {
1832 int arg_size, mask = 0;
1833
1834 assert (current_function_pretend_args_size <= 16);
1835 for (reg = 3, arg_size = current_function_pretend_args_size;
1836 arg_size > 0; reg--, arg_size -= 4)
1837 mask |= (1 << reg);
1838 print_multi_reg (f, "stmfd\t%ssp!", mask, FALSE);
1839 }
1840 else
1841 {
1842 operands[0] = operands[1] = stack_pointer_rtx;
1843 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1844 -current_function_pretend_args_size);
1845 output_add_immediate (operands);
1846 }
1847 }
1848
1849 if (live_regs_mask)
1850 {
1851 /* if a di mode load/store multiple is used, and the base register
1852 is r3, then r4 can become an ever live register without lr
1853 doing so, in this case we need to push lr as well, or we
1854 will fail to get a proper return. */
1855
1856 live_regs_mask |= 0x4000;
1857 lr_save_eliminated = 0;
1858
1859 /* Now push all the call-saved regs onto the stack */
1860 print_multi_reg (f, "stmfd\t%ssp!", live_regs_mask, FALSE);
1861 }
1862
1863 for (reg = 23; reg > 15; reg--)
1864 if (regs_ever_live[reg] && !call_used_regs[reg])
1865 fprintf (f, "\tstfe\t%s%s, [%ssp, #-12]!\n", ARM_REG_PREFIX,
1866 reg_names[reg], ARM_REG_PREFIX);
1867
1868 if (frame_pointer_needed)
1869 {
1870 /* Make `fp' point to saved value of `pc'. */
1871
1872 operands[0] = gen_rtx (REG, SImode, HARD_FRAME_POINTER_REGNUM);
1873 operands[1] = gen_rtx (REG, SImode, 12);
1874 operands[2] = GEN_INT ( - (4 + current_function_pretend_args_size));
1875 output_add_immediate (operands);
1876 }
1877
1878 if (frame_size)
1879 {
1880 operands[0] = operands[1] = stack_pointer_rtx;
1881 operands[2] = GEN_INT (-frame_size);
1882 output_add_immediate (operands);
1883 }
1884 }
1885
1886
1887 void
1888 output_func_epilogue (f, frame_size)
1889 FILE *f;
1890 int frame_size;
1891 {
1892 int reg, live_regs_mask = 0, code_size = 0;
1893 /* If we need this then it will always be at lesat this much */
1894 int floats_offset = 24;
1895 rtx operands[3];
1896
1897 if (use_return_insn() && return_used_this_function)
1898 {
1899 if (frame_size && !(frame_pointer_needed || TARGET_APCS))
1900 {
1901 abort ();
1902 }
1903 goto epilogue_done;
1904 }
1905
1906 for (reg = 0; reg <= 10; reg++)
1907 if (regs_ever_live[reg] && ! call_used_regs[reg])
1908 {
1909 live_regs_mask |= (1 << reg);
1910 floats_offset += 4;
1911 }
1912
1913 if (frame_pointer_needed)
1914 {
1915 for (reg = 23; reg > 15; reg--)
1916 if (regs_ever_live[reg] && ! call_used_regs[reg])
1917 {
1918 fprintf (f, "\tldfe\t%s%s, [%sfp, #-%d]\n", ARM_REG_PREFIX,
1919 reg_names[reg], ARM_REG_PREFIX, floats_offset);
1920 floats_offset += 12;
1921 code_size += 4;
1922 }
1923
1924 live_regs_mask |= 0xA800;
1925 print_multi_reg (f, "ldmea\t%sfp", live_regs_mask,
1926 TARGET_6 ? FALSE : TRUE);
1927 code_size += 4;
1928 }
1929 else
1930 {
1931 /* Restore stack pointer if necessary. */
1932 if (frame_size)
1933 {
1934 operands[0] = operands[1] = stack_pointer_rtx;
1935 operands[2] = gen_rtx (CONST_INT, VOIDmode, frame_size);
1936 output_add_immediate (operands);
1937 }
1938
1939 for (reg = 16; reg < 24; reg++)
1940 if (regs_ever_live[reg] && ! call_used_regs[reg])
1941 {
1942 fprintf (f, "\tldfe\t%s%s, [%ssp], #12\n", ARM_REG_PREFIX,
1943 reg_names[reg], ARM_REG_PREFIX);
1944 code_size += 4;
1945 }
1946 if (current_function_pretend_args_size == 0 && regs_ever_live[14])
1947 {
1948 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask | 0x8000,
1949 TARGET_6 ? FALSE : TRUE);
1950 code_size += 4;
1951 }
1952 else
1953 {
1954 if (live_regs_mask || regs_ever_live[14])
1955 {
1956 live_regs_mask |= 0x4000;
1957 print_multi_reg (f, "ldmfd\t%ssp!", live_regs_mask, FALSE);
1958 code_size += 4;
1959 }
1960 if (current_function_pretend_args_size)
1961 {
1962 operands[0] = operands[1] = stack_pointer_rtx;
1963 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1964 current_function_pretend_args_size);
1965 output_add_immediate (operands);
1966 }
1967 fprintf (f,
1968 TARGET_6 ? "\tmov\t%spc, %slr\n" : "\tmovs\t%spc, %slr\n",
1969 ARM_REG_PREFIX, ARM_REG_PREFIX, f);
1970 code_size += 4;
1971 }
1972 }
1973
1974 epilogue_done:
1975
1976 /* insn_addresses isn't allocated when not optimizing */
1977
1978 if (optimize > 0)
1979 arm_increase_location (code_size
1980 + insn_addresses[INSN_UID (get_last_insn ())]
1981 + get_prologue_size ());
1982
1983 current_function_anonymous_args = 0;
1984 }
1985 \f
1986 /* If CODE is 'd', then the X is a condition operand and the instruction
1987 should only be executed if the condition is true.
1988 if CODE is 'D', then the X is a condition operand and the instruciton
1989 should only be executed if the condition is false: however, if the mode
1990 of the comparison is CCFPEmode, then always execute the instruction -- we
1991 do this because in these circumstances !GE does not necessarily imply LT;
1992 in these cases the instruction pattern will take care to make sure that
1993 an instruction containing %d will follow, thereby undoing the effects of
1994 doing this instrucion unconditionally.
1995 If CODE is 'N' then X is a floating point operand that must be negated
1996 before output.
1997 If CODE is 'B' then output a bitwise inverted value of X (a const int).
1998 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
1999
2000 void
2001 arm_print_operand (stream, x, code)
2002 FILE *stream;
2003 rtx x;
2004 int code;
2005 {
2006 switch (code)
2007 {
2008 case '@':
2009 fputc (ARM_COMMENT_CHAR, stream);
2010 return;
2011
2012 case '|':
2013 fputs (ARM_REG_PREFIX, stream);
2014 return;
2015
2016 case '?':
2017 if (arm_ccfsm_state == 3 || arm_ccfsm_state == 4)
2018 fputs (arm_condition_codes[arm_current_cc], stream);
2019 return;
2020
2021 case 'N':
2022 {
2023 REAL_VALUE_TYPE r;
2024 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2025 r = REAL_VALUE_NEGATE (r);
2026 fprintf (stream, "%s", fp_const_from_val (&r));
2027 }
2028 return;
2029
2030 case 'B':
2031 if (GET_CODE (x) == CONST_INT)
2032 fprintf (stream,
2033 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
2034 "%d",
2035 #else
2036 "%ld",
2037 #endif
2038 ARM_SIGN_EXTEND (~ INTVAL (x)));
2039 else
2040 {
2041 putc ('~', stream);
2042 output_addr_const (stream, x);
2043 }
2044 return;
2045
2046 case 'i':
2047 fprintf (stream, "%s", arithmetic_instr (x, 1));
2048 return;
2049
2050 case 'I':
2051 fprintf (stream, "%s", arithmetic_instr (x, 0));
2052 return;
2053
2054 case 'S':
2055 {
2056 HOST_WIDE_INT val;
2057
2058 fprintf (stream, "%s ", shift_op (x, &val));
2059 if (val == -1)
2060 arm_print_operand (stream, XEXP (x, 1), 0);
2061 else
2062 fprintf (stream,
2063 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
2064 "#%d",
2065 #else
2066 "#%ld",
2067 #endif
2068 val);
2069 }
2070 return;
2071
2072 case 'R':
2073 if (REGNO (x) > 15)
2074 abort ();
2075 fputs (ARM_REG_PREFIX, stream);
2076 fputs (reg_names[REGNO (x) + 1], stream);
2077 return;
2078
2079 case 'm':
2080 fputs (ARM_REG_PREFIX, stream);
2081 if (GET_CODE (XEXP (x, 0)) == REG)
2082 fputs (reg_names[REGNO (XEXP (x, 0))], stream);
2083 else
2084 fputs (reg_names[REGNO (XEXP (XEXP (x, 0), 0))], stream);
2085 return;
2086
2087 case 'M':
2088 fprintf (stream, "{%s%s-%s%s}", ARM_REG_PREFIX, reg_names[REGNO (x)],
2089 ARM_REG_PREFIX, reg_names[REGNO (x) - 1
2090 + ((GET_MODE_SIZE (GET_MODE (x))
2091 + GET_MODE_SIZE (SImode) - 1)
2092 / GET_MODE_SIZE (SImode))]);
2093 return;
2094
2095 case 'd':
2096 if (x)
2097 fputs (arm_condition_codes[get_arm_condition_code (x)],
2098 stream);
2099 return;
2100
2101 case 'D':
2102 if (x && (flag_fast_math
2103 || GET_CODE (x) == EQ || GET_CODE (x) == NE
2104 || (GET_MODE (XEXP (x, 0)) != CCFPEmode
2105 && (GET_MODE_CLASS (GET_MODE (XEXP (x, 0)))
2106 != MODE_FLOAT))))
2107 fputs (arm_condition_codes[ARM_INVERSE_CONDITION_CODE
2108 (get_arm_condition_code (x))],
2109 stream);
2110 return;
2111
2112 default:
2113 if (x == 0)
2114 abort ();
2115
2116 if (GET_CODE (x) == REG)
2117 {
2118 fputs (ARM_REG_PREFIX, stream);
2119 fputs (reg_names[REGNO (x)], stream);
2120 }
2121 else if (GET_CODE (x) == MEM)
2122 {
2123 output_memory_reference_mode = GET_MODE (x);
2124 output_address (XEXP (x, 0));
2125 }
2126 else if (GET_CODE (x) == CONST_DOUBLE)
2127 fprintf (stream, "#%s", fp_immediate_constant (x));
2128 else if (GET_CODE (x) == NEG)
2129 abort (); /* This should never happen now. */
2130 else
2131 {
2132 fputc ('#', stream);
2133 output_addr_const (stream, x);
2134 }
2135 }
2136 }
2137
2138 /* Increase the `arm_text_location' by AMOUNT if we're in the text
2139 segment. */
2140
2141 void
2142 arm_increase_location (amount)
2143 int amount;
2144 {
2145 if (in_text_section ())
2146 arm_text_location += amount;
2147 }
2148
2149
2150 /* Output a label definition. If this label is within the .text segment, it
2151 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
2152 Maybe GCC remembers names not starting with a `*' for a long time, but this
2153 is a minority anyway, so we just make a copy. Do not store the leading `*'
2154 if the name starts with one. */
2155
2156 void
2157 arm_asm_output_label (stream, name)
2158 FILE *stream;
2159 char *name;
2160 {
2161 char *real_name, *s;
2162 struct label_offset *cur;
2163 int hash = 0;
2164
2165 assemble_name (stream, name);
2166 fputs (":\n", stream);
2167 if (! in_text_section ())
2168 return;
2169
2170 if (name[0] == '*')
2171 {
2172 real_name = xmalloc (1 + strlen (&name[1]));
2173 strcpy (real_name, &name[1]);
2174 }
2175 else
2176 {
2177 real_name = xmalloc (2 + strlen (name));
2178 strcpy (real_name, "_");
2179 strcat (real_name, name);
2180 }
2181 for (s = real_name; *s; s++)
2182 hash += *s;
2183
2184 hash = hash % LABEL_HASH_SIZE;
2185 cur = (struct label_offset *) xmalloc (sizeof (struct label_offset));
2186 cur->name = real_name;
2187 cur->offset = arm_text_location;
2188 cur->cdr = offset_table[hash];
2189 offset_table[hash] = cur;
2190 }
2191
2192 /* Load a symbol that is known to be in the text segment into a register.
2193 This should never be called when not optimizing. */
2194
2195 char *
2196 output_load_symbol (insn, operands)
2197 rtx insn;
2198 rtx *operands;
2199 {
2200 char *s;
2201 char *name = XSTR (operands[1], 0);
2202 struct label_offset *he;
2203 int hash = 0;
2204 int offset;
2205 unsigned int mask, never_mask = 0xffffffff;
2206 int shift, inst;
2207 char buffer[100];
2208
2209 if (optimize == 0 || *name != '*')
2210 abort ();
2211
2212 for (s = &name[1]; *s; s++)
2213 hash += *s;
2214
2215 hash = hash % LABEL_HASH_SIZE;
2216 he = offset_table[hash];
2217 while (he && strcmp (he->name, &name[1]))
2218 he = he->cdr;
2219
2220 if (!he)
2221 abort ();
2222
2223 offset = (arm_text_location + insn_addresses[INSN_UID (insn)]
2224 + get_prologue_size () + 8 - he->offset);
2225 if (offset < 0)
2226 abort ();
2227
2228 /* When generating the instructions, we never mask out the bits that we
2229 think will be always zero, then if a mistake has occured somewhere, the
2230 assembler will spot it and generate an error. */
2231
2232 /* If the symbol is word aligned then we might be able to reduce the
2233 number of loads. */
2234 shift = ((offset & 3) == 0) ? 2 : 0;
2235
2236 /* Clear the bits from NEVER_MASK that will be orred in with the individual
2237 instructions. */
2238 for (; shift < 32; shift += 8)
2239 {
2240 mask = 0xff << shift;
2241 if ((offset & mask) || ((unsigned) offset) > mask)
2242 never_mask &= ~mask;
2243 }
2244
2245 inst = 8;
2246 mask = 0xff << (shift - 32);
2247
2248 while (mask && (never_mask & mask) == 0)
2249 {
2250 if (inst == 8)
2251 {
2252 strcpy (buffer, "sub%?\t%0, %|pc, #(8 + . -%a1)");
2253 if ((never_mask | mask) != 0xffffffff)
2254 sprintf (buffer + strlen (buffer), " & 0x%x", mask | never_mask);
2255 }
2256 else
2257 sprintf (buffer, "sub%%?\t%%0, %%0, #(%d + . -%%a1) & 0x%x",
2258 inst, mask | never_mask);
2259
2260 output_asm_insn (buffer, operands);
2261 mask <<= 8;
2262 inst -= 4;
2263 }
2264
2265 return "";
2266 }
2267
2268 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
2269 directive hence this hack, which works by reserving some `.space' in the
2270 bss segment directly.
2271
2272 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
2273 define STATIC COMMON space but merely STATIC BSS space. */
2274
2275 void
2276 output_lcomm_directive (stream, name, size, rounded)
2277 FILE *stream;
2278 char *name;
2279 int size, rounded;
2280 {
2281 fprintf (stream, "\n\t.bss\t%c .lcomm\n", ARM_COMMENT_CHAR);
2282 assemble_name (stream, name);
2283 fprintf (stream, ":\t.space\t%d\n", rounded);
2284 if (in_text_section ())
2285 fputs ("\n\t.text\n", stream);
2286 else
2287 fputs ("\n\t.data\n", stream);
2288 }
2289 \f
2290 /* A finite state machine takes care of noticing whether or not instructions
2291 can be conditionally executed, and thus decrease execution time and code
2292 size by deleting branch instructions. The fsm is controlled by
2293 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
2294
2295 /* The state of the fsm controlling condition codes are:
2296 0: normal, do nothing special
2297 1: make ASM_OUTPUT_OPCODE not output this instruction
2298 2: make ASM_OUTPUT_OPCODE not output this instruction
2299 3: make instructions conditional
2300 4: make instructions conditional
2301
2302 State transitions (state->state by whom under condition):
2303 0 -> 1 final_prescan_insn if the `target' is a label
2304 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
2305 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
2306 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
2307 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
2308 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
2309 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
2310 (the target insn is arm_target_insn).
2311
2312 If the jump clobbers the conditions then we use states 2 and 4.
2313
2314 A similar thing can be done with conditional return insns.
2315
2316 XXX In case the `target' is an unconditional branch, this conditionalising
2317 of the instructions always reduces code size, but not always execution
2318 time. But then, I want to reduce the code size to somewhere near what
2319 /bin/cc produces. */
2320
2321 /* Returns the index of the ARM condition code string in
2322 `arm_condition_codes'. COMPARISON should be an rtx like
2323 `(eq (...) (...))'. */
2324
2325 int
2326 get_arm_condition_code (comparison)
2327 rtx comparison;
2328 {
2329 switch (GET_CODE (comparison))
2330 {
2331 case NE: return (1);
2332 case EQ: return (0);
2333 case GE: return (10);
2334 case GT: return (12);
2335 case LE: return (13);
2336 case LT: return (11);
2337 case GEU: return (2);
2338 case GTU: return (8);
2339 case LEU: return (9);
2340 case LTU: return (3);
2341 default: abort ();
2342 }
2343 /*NOTREACHED*/
2344 return (42);
2345 }
2346
2347
2348 void
2349 final_prescan_insn (insn, opvec, noperands)
2350 rtx insn;
2351 rtx *opvec;
2352 int noperands;
2353 {
2354 /* BODY will hold the body of INSN. */
2355 register rtx body = PATTERN (insn);
2356
2357 /* This will be 1 if trying to repeat the trick, and things need to be
2358 reversed if it appears to fail. */
2359 int reverse = 0;
2360
2361 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
2362 taken are clobbered, even if the rtl suggests otherwise. It also
2363 means that we have to grub around within the jump expression to find
2364 out what the conditions are when the jump isn't taken. */
2365 int jump_clobbers = 0;
2366
2367 /* If we start with a return insn, we only succeed if we find another one. */
2368 int seeking_return = 0;
2369
2370 /* START_INSN will hold the insn from where we start looking. This is the
2371 first insn after the following code_label if REVERSE is true. */
2372 rtx start_insn = insn;
2373
2374 /* If in state 4, check if the target branch is reached, in order to
2375 change back to state 0. */
2376 if (arm_ccfsm_state == 4)
2377 {
2378 if (insn == arm_target_insn)
2379 {
2380 arm_target_insn = NULL;
2381 arm_ccfsm_state = 0;
2382 }
2383 return;
2384 }
2385
2386 /* If in state 3, it is possible to repeat the trick, if this insn is an
2387 unconditional branch to a label, and immediately following this branch
2388 is the previous target label which is only used once, and the label this
2389 branch jumps to is not too far off. */
2390 if (arm_ccfsm_state == 3)
2391 {
2392 if (simplejump_p (insn))
2393 {
2394 start_insn = next_nonnote_insn (start_insn);
2395 if (GET_CODE (start_insn) == BARRIER)
2396 {
2397 /* XXX Isn't this always a barrier? */
2398 start_insn = next_nonnote_insn (start_insn);
2399 }
2400 if (GET_CODE (start_insn) == CODE_LABEL
2401 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
2402 && LABEL_NUSES (start_insn) == 1)
2403 reverse = TRUE;
2404 else
2405 return;
2406 }
2407 else if (GET_CODE (body) == RETURN)
2408 {
2409 start_insn = next_nonnote_insn (start_insn);
2410 if (GET_CODE (start_insn) == BARRIER)
2411 start_insn = next_nonnote_insn (start_insn);
2412 if (GET_CODE (start_insn) == CODE_LABEL
2413 && CODE_LABEL_NUMBER (start_insn) == arm_target_label
2414 && LABEL_NUSES (start_insn) == 1)
2415 {
2416 reverse = TRUE;
2417 seeking_return = 1;
2418 }
2419 else
2420 return;
2421 }
2422 else
2423 return;
2424 }
2425
2426 if (arm_ccfsm_state != 0 && !reverse)
2427 abort ();
2428 if (GET_CODE (insn) != JUMP_INSN)
2429 return;
2430
2431 /* This jump might be paralled with a clobber of the condition codes
2432 the jump should always come first */
2433 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2434 body = XVECEXP (body, 0, 0);
2435
2436 #if 0
2437 /* If this is a conditional return then we don't want to know */
2438 if (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2439 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE
2440 && (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN
2441 || GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN))
2442 return;
2443 #endif
2444
2445 if (reverse
2446 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2447 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2448 {
2449 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2450 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2451 int then_not_else = TRUE;
2452 rtx this_insn = start_insn, label = 0;
2453
2454 if (get_attr_conds (insn) == CONDS_JUMP_CLOB)
2455 jump_clobbers = 1;
2456
2457 /* Register the insn jumped to. */
2458 if (reverse)
2459 {
2460 if (!seeking_return)
2461 label = XEXP (SET_SRC (body), 0);
2462 }
2463 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2464 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2465 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2466 {
2467 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2468 then_not_else = FALSE;
2469 }
2470 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2471 seeking_return = 1;
2472 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2473 {
2474 seeking_return = 1;
2475 then_not_else = FALSE;
2476 }
2477 else
2478 abort ();
2479
2480 /* See how many insns this branch skips, and what kind of insns. If all
2481 insns are okay, and the label or unconditional branch to the same
2482 label is not too far away, succeed. */
2483 for (insns_skipped = 0;
2484 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2485 insns_skipped++)
2486 {
2487 rtx scanbody;
2488
2489 this_insn = next_nonnote_insn (this_insn);
2490 if (!this_insn)
2491 break;
2492
2493 scanbody = PATTERN (this_insn);
2494
2495 switch (GET_CODE (this_insn))
2496 {
2497 case CODE_LABEL:
2498 /* Succeed if it is the target label, otherwise fail since
2499 control falls in from somewhere else. */
2500 if (this_insn == label)
2501 {
2502 if (jump_clobbers)
2503 {
2504 arm_ccfsm_state = 2;
2505 this_insn = next_nonnote_insn (this_insn);
2506 }
2507 else
2508 arm_ccfsm_state = 1;
2509 succeed = TRUE;
2510 }
2511 else
2512 fail = TRUE;
2513 break;
2514
2515 case BARRIER:
2516 /* Succeed if the following insn is the target label.
2517 Otherwise fail.
2518 If return insns are used then the last insn in a function
2519 will be a barrier. */
2520 this_insn = next_nonnote_insn (this_insn);
2521 if (this_insn && this_insn == label)
2522 {
2523 if (jump_clobbers)
2524 {
2525 arm_ccfsm_state = 2;
2526 this_insn = next_nonnote_insn (this_insn);
2527 }
2528 else
2529 arm_ccfsm_state = 1;
2530 succeed = TRUE;
2531 }
2532 else
2533 fail = TRUE;
2534 break;
2535
2536 case CALL_INSN:
2537 /* The arm 6xx uses full 32 bit addresses so the cc is not
2538 preserved over calls */
2539 if (TARGET_6)
2540 fail = TRUE;
2541 break;
2542 case JUMP_INSN:
2543 /* If this is an unconditional branch to the same label, succeed.
2544 If it is to another label, do nothing. If it is conditional,
2545 fail. */
2546 /* XXX Probably, the test for the SET and the PC are unnecessary. */
2547
2548 if (GET_CODE (scanbody) == SET
2549 && GET_CODE (SET_DEST (scanbody)) == PC)
2550 {
2551 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2552 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2553 {
2554 arm_ccfsm_state = 2;
2555 succeed = TRUE;
2556 }
2557 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2558 fail = TRUE;
2559 }
2560 else if (GET_CODE (scanbody) == RETURN
2561 && seeking_return)
2562 {
2563 arm_ccfsm_state = 2;
2564 succeed = TRUE;
2565 }
2566 else if (GET_CODE (scanbody) == PARALLEL)
2567 {
2568 switch (get_attr_conds (this_insn))
2569 {
2570 case CONDS_NOCOND:
2571 break;
2572 default:
2573 fail = TRUE;
2574 break;
2575 }
2576 }
2577 break;
2578
2579 case INSN:
2580 /* Instructions using or affecting the condition codes make it
2581 fail. */
2582 if ((GET_CODE (scanbody) == SET
2583 || GET_CODE (scanbody) == PARALLEL)
2584 && get_attr_conds (this_insn) != CONDS_NOCOND)
2585 fail = TRUE;
2586 break;
2587
2588 default:
2589 break;
2590 }
2591 }
2592 if (succeed)
2593 {
2594 if ((!seeking_return) && (arm_ccfsm_state == 1 || reverse))
2595 arm_target_label = CODE_LABEL_NUMBER (label);
2596 else if (seeking_return || arm_ccfsm_state == 2)
2597 {
2598 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2599 {
2600 this_insn = next_nonnote_insn (this_insn);
2601 if (this_insn && (GET_CODE (this_insn) == BARRIER
2602 || GET_CODE (this_insn) == CODE_LABEL))
2603 abort ();
2604 }
2605 if (!this_insn)
2606 {
2607 /* Oh, dear! we ran off the end.. give up */
2608 recog (PATTERN (insn), insn, NULL_PTR);
2609 arm_ccfsm_state = 0;
2610 arm_target_insn = NULL;
2611 return;
2612 }
2613 arm_target_insn = this_insn;
2614 }
2615 else
2616 abort ();
2617 if (jump_clobbers)
2618 {
2619 if (reverse)
2620 abort ();
2621 arm_current_cc =
2622 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body),
2623 0), 0), 1));
2624 if (GET_CODE (XEXP (XEXP (SET_SRC (body), 0), 0)) == AND)
2625 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
2626 if (GET_CODE (XEXP (SET_SRC (body), 0)) == NE)
2627 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
2628 }
2629 else
2630 {
2631 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2632 what it was. */
2633 if (!reverse)
2634 arm_current_cc = get_arm_condition_code (XEXP (SET_SRC (body),
2635 0));
2636 }
2637
2638 if (reverse || then_not_else)
2639 arm_current_cc = ARM_INVERSE_CONDITION_CODE (arm_current_cc);
2640 }
2641 /* restore recog_operand (getting the attributes of other insns can
2642 destroy this array, but final.c assumes that it remains intact
2643 accross this call; since the insn has been recognized already we
2644 call recog direct). */
2645 recog (PATTERN (insn), insn, NULL_PTR);
2646 }
2647 }
2648
2649 /* EOF */