bfin.c (legitimize_pic_address): Lose dead code that tests for CONSTANT_POOL_ADDRESS_P.
[gcc.git] / gcc / config / bfin / bfin.c
1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "tree.h"
37 #include "flags.h"
38 #include "except.h"
39 #include "function.h"
40 #include "input.h"
41 #include "target.h"
42 #include "target-def.h"
43 #include "expr.h"
44 #include "toplev.h"
45 #include "recog.h"
46 #include "optabs.h"
47 #include "ggc.h"
48 #include "integrate.h"
49 #include "cgraph.h"
50 #include "langhooks.h"
51 #include "bfin-protos.h"
52 #include "tm-preds.h"
53 #include "gt-bfin.h"
54 #include "basic-block.h"
55
56 /* A C structure for machine-specific, per-function data.
57 This is added to the cfun structure. */
58 struct machine_function GTY(())
59 {
60 int has_hardware_loops;
61 };
62
63 /* Test and compare insns in bfin.md store the information needed to
64 generate branch and scc insns here. */
65 rtx bfin_compare_op0, bfin_compare_op1;
66
67 /* RTX for condition code flag register and RETS register */
68 extern GTY(()) rtx bfin_cc_rtx;
69 extern GTY(()) rtx bfin_rets_rtx;
70 rtx bfin_cc_rtx, bfin_rets_rtx;
71
72 int max_arg_registers = 0;
73
74 /* Arrays used when emitting register names. */
75 const char *short_reg_names[] = SHORT_REGISTER_NAMES;
76 const char *high_reg_names[] = HIGH_REGISTER_NAMES;
77 const char *dregs_pair_names[] = DREGS_PAIR_NAMES;
78 const char *byte_reg_names[] = BYTE_REGISTER_NAMES;
79
80 static int arg_regs[] = FUNCTION_ARG_REGISTERS;
81
82 /* Nonzero if -mshared-library-id was given. */
83 static int bfin_lib_id_given;
84
85 static void
86 bfin_globalize_label (FILE *stream, const char *name)
87 {
88 fputs (".global ", stream);
89 assemble_name (stream, name);
90 fputc (';',stream);
91 fputc ('\n',stream);
92 }
93
94 static void
95 output_file_start (void)
96 {
97 FILE *file = asm_out_file;
98 int i;
99
100 fprintf (file, ".file \"%s\";\n", input_filename);
101
102 for (i = 0; arg_regs[i] >= 0; i++)
103 ;
104 max_arg_registers = i; /* how many arg reg used */
105 }
106
107 /* Called early in the compilation to conditionally modify
108 fixed_regs/call_used_regs. */
109
110 void
111 conditional_register_usage (void)
112 {
113 /* initialize condition code flag register rtx */
114 bfin_cc_rtx = gen_rtx_REG (BImode, REG_CC);
115 bfin_rets_rtx = gen_rtx_REG (Pmode, REG_RETS);
116 }
117
118 /* Examine machine-dependent attributes of function type FUNTYPE and return its
119 type. See the definition of E_FUNKIND. */
120
121 static e_funkind funkind (tree funtype)
122 {
123 tree attrs = TYPE_ATTRIBUTES (funtype);
124 if (lookup_attribute ("interrupt_handler", attrs))
125 return INTERRUPT_HANDLER;
126 else if (lookup_attribute ("exception_handler", attrs))
127 return EXCPT_HANDLER;
128 else if (lookup_attribute ("nmi_handler", attrs))
129 return NMI_HANDLER;
130 else
131 return SUBROUTINE;
132 }
133 \f
134 /* Legitimize PIC addresses. If the address is already position-independent,
135 we return ORIG. Newly generated position-independent addresses go into a
136 reg. This is REG if nonzero, otherwise we allocate register(s) as
137 necessary. PICREG is the register holding the pointer to the PIC offset
138 table. */
139
140 static rtx
141 legitimize_pic_address (rtx orig, rtx reg, rtx picreg)
142 {
143 rtx addr = orig;
144 rtx new = orig;
145
146 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
147 {
148 int unspec;
149 rtx tmp;
150
151 if (TARGET_ID_SHARED_LIBRARY)
152 unspec = UNSPEC_MOVE_PIC;
153 else if (GET_CODE (addr) == SYMBOL_REF
154 && SYMBOL_REF_FUNCTION_P (addr))
155 unspec = UNSPEC_FUNCDESC_GOT17M4;
156 else
157 unspec = UNSPEC_MOVE_FDPIC;
158
159 if (reg == 0)
160 {
161 gcc_assert (!no_new_pseudos);
162 reg = gen_reg_rtx (Pmode);
163 }
164
165 tmp = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, addr), unspec);
166 new = gen_const_mem (Pmode, gen_rtx_PLUS (Pmode, picreg, tmp));
167
168 emit_move_insn (reg, new);
169 if (picreg == pic_offset_table_rtx)
170 current_function_uses_pic_offset_table = 1;
171 return reg;
172 }
173
174 else if (GET_CODE (addr) == CONST || GET_CODE (addr) == PLUS)
175 {
176 rtx base;
177
178 if (GET_CODE (addr) == CONST)
179 {
180 addr = XEXP (addr, 0);
181 gcc_assert (GET_CODE (addr) == PLUS);
182 }
183
184 if (XEXP (addr, 0) == picreg)
185 return orig;
186
187 if (reg == 0)
188 {
189 gcc_assert (!no_new_pseudos);
190 reg = gen_reg_rtx (Pmode);
191 }
192
193 base = legitimize_pic_address (XEXP (addr, 0), reg, picreg);
194 addr = legitimize_pic_address (XEXP (addr, 1),
195 base == reg ? NULL_RTX : reg,
196 picreg);
197
198 if (GET_CODE (addr) == CONST_INT)
199 {
200 gcc_assert (! reload_in_progress && ! reload_completed);
201 addr = force_reg (Pmode, addr);
202 }
203
204 if (GET_CODE (addr) == PLUS && CONSTANT_P (XEXP (addr, 1)))
205 {
206 base = gen_rtx_PLUS (Pmode, base, XEXP (addr, 0));
207 addr = XEXP (addr, 1);
208 }
209
210 return gen_rtx_PLUS (Pmode, base, addr);
211 }
212
213 return new;
214 }
215 \f
216 /* Stack frame layout. */
217
218 /* Compute the number of DREGS to save with a push_multiple operation.
219 This could include registers that aren't modified in the function,
220 since push_multiple only takes a range of registers.
221 If IS_INTHANDLER, then everything that is live must be saved, even
222 if normally call-clobbered. */
223
224 static int
225 n_dregs_to_save (bool is_inthandler)
226 {
227 unsigned i;
228
229 for (i = REG_R0; i <= REG_R7; i++)
230 {
231 if (regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
232 return REG_R7 - i + 1;
233
234 if (current_function_calls_eh_return)
235 {
236 unsigned j;
237 for (j = 0; ; j++)
238 {
239 unsigned test = EH_RETURN_DATA_REGNO (j);
240 if (test == INVALID_REGNUM)
241 break;
242 if (test == i)
243 return REG_R7 - i + 1;
244 }
245 }
246
247 }
248 return 0;
249 }
250
251 /* Like n_dregs_to_save, but compute number of PREGS to save. */
252
253 static int
254 n_pregs_to_save (bool is_inthandler)
255 {
256 unsigned i;
257
258 for (i = REG_P0; i <= REG_P5; i++)
259 if ((regs_ever_live[i] && (is_inthandler || ! call_used_regs[i]))
260 || (!TARGET_FDPIC
261 && i == PIC_OFFSET_TABLE_REGNUM
262 && (current_function_uses_pic_offset_table
263 || (TARGET_ID_SHARED_LIBRARY && ! current_function_is_leaf))))
264 return REG_P5 - i + 1;
265 return 0;
266 }
267
268 /* Determine if we are going to save the frame pointer in the prologue. */
269
270 static bool
271 must_save_fp_p (void)
272 {
273 return frame_pointer_needed || regs_ever_live[REG_FP];
274 }
275
276 static bool
277 stack_frame_needed_p (void)
278 {
279 /* EH return puts a new return address into the frame using an
280 address relative to the frame pointer. */
281 if (current_function_calls_eh_return)
282 return true;
283 return frame_pointer_needed;
284 }
285
286 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
287 must save all registers; this is used for interrupt handlers.
288 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
289 this for an interrupt (or exception) handler. */
290
291 static void
292 expand_prologue_reg_save (rtx spreg, int saveall, bool is_inthandler)
293 {
294 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
295 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
296 int dregno = REG_R7 + 1 - ndregs;
297 int pregno = REG_P5 + 1 - npregs;
298 int total = ndregs + npregs;
299 int i;
300 rtx pat, insn, val;
301
302 if (total == 0)
303 return;
304
305 val = GEN_INT (-total * 4);
306 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 2));
307 XVECEXP (pat, 0, 0) = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, val),
308 UNSPEC_PUSH_MULTIPLE);
309 XVECEXP (pat, 0, total + 1) = gen_rtx_SET (VOIDmode, spreg,
310 gen_rtx_PLUS (Pmode, spreg,
311 val));
312 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, total + 1)) = 1;
313 for (i = 0; i < total; i++)
314 {
315 rtx memref = gen_rtx_MEM (word_mode,
316 gen_rtx_PLUS (Pmode, spreg,
317 GEN_INT (- i * 4 - 4)));
318 rtx subpat;
319 if (ndregs > 0)
320 {
321 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
322 dregno++));
323 ndregs--;
324 }
325 else
326 {
327 subpat = gen_rtx_SET (VOIDmode, memref, gen_rtx_REG (word_mode,
328 pregno++));
329 npregs++;
330 }
331 XVECEXP (pat, 0, i + 1) = subpat;
332 RTX_FRAME_RELATED_P (subpat) = 1;
333 }
334 insn = emit_insn (pat);
335 RTX_FRAME_RELATED_P (insn) = 1;
336 }
337
338 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
339 must save all registers; this is used for interrupt handlers.
340 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
341 this for an interrupt (or exception) handler. */
342
343 static void
344 expand_epilogue_reg_restore (rtx spreg, bool saveall, bool is_inthandler)
345 {
346 int ndregs = saveall ? 8 : n_dregs_to_save (is_inthandler);
347 int npregs = saveall ? 6 : n_pregs_to_save (is_inthandler);
348 int total = ndregs + npregs;
349 int i, regno;
350 rtx pat, insn;
351
352 if (total == 0)
353 return;
354
355 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total + 1));
356 XVECEXP (pat, 0, 0) = gen_rtx_SET (VOIDmode, spreg,
357 gen_rtx_PLUS (Pmode, spreg,
358 GEN_INT (total * 4)));
359
360 if (npregs > 0)
361 regno = REG_P5 + 1;
362 else
363 regno = REG_R7 + 1;
364
365 for (i = 0; i < total; i++)
366 {
367 rtx addr = (i > 0
368 ? gen_rtx_PLUS (Pmode, spreg, GEN_INT (i * 4))
369 : spreg);
370 rtx memref = gen_rtx_MEM (word_mode, addr);
371
372 regno--;
373 XVECEXP (pat, 0, i + 1)
374 = gen_rtx_SET (VOIDmode, gen_rtx_REG (word_mode, regno), memref);
375
376 if (npregs > 0)
377 {
378 if (--npregs == 0)
379 regno = REG_R7 + 1;
380 }
381 }
382
383 insn = emit_insn (pat);
384 RTX_FRAME_RELATED_P (insn) = 1;
385 }
386
387 /* Perform any needed actions needed for a function that is receiving a
388 variable number of arguments.
389
390 CUM is as above.
391
392 MODE and TYPE are the mode and type of the current parameter.
393
394 PRETEND_SIZE is a variable that should be set to the amount of stack
395 that must be pushed by the prolog to pretend that our caller pushed
396 it.
397
398 Normally, this macro will push all remaining incoming registers on the
399 stack and set PRETEND_SIZE to the length of the registers pushed.
400
401 Blackfin specific :
402 - VDSP C compiler manual (our ABI) says that a variable args function
403 should save the R0, R1 and R2 registers in the stack.
404 - The caller will always leave space on the stack for the
405 arguments that are passed in registers, so we dont have
406 to leave any extra space.
407 - now, the vastart pointer can access all arguments from the stack. */
408
409 static void
410 setup_incoming_varargs (CUMULATIVE_ARGS *cum,
411 enum machine_mode mode ATTRIBUTE_UNUSED,
412 tree type ATTRIBUTE_UNUSED, int *pretend_size,
413 int no_rtl)
414 {
415 rtx mem;
416 int i;
417
418 if (no_rtl)
419 return;
420
421 /* The move for named arguments will be generated automatically by the
422 compiler. We need to generate the move rtx for the unnamed arguments
423 if they are in the first 3 words. We assume at least 1 named argument
424 exists, so we never generate [ARGP] = R0 here. */
425
426 for (i = cum->words + 1; i < max_arg_registers; i++)
427 {
428 mem = gen_rtx_MEM (Pmode,
429 plus_constant (arg_pointer_rtx, (i * UNITS_PER_WORD)));
430 emit_move_insn (mem, gen_rtx_REG (Pmode, i));
431 }
432
433 *pretend_size = 0;
434 }
435
436 /* Value should be nonzero if functions must have frame pointers.
437 Zero means the frame pointer need not be set up (and parms may
438 be accessed via the stack pointer) in functions that seem suitable. */
439
440 int
441 bfin_frame_pointer_required (void)
442 {
443 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
444
445 if (fkind != SUBROUTINE)
446 return 1;
447
448 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
449 so we have to override it for non-leaf functions. */
450 if (TARGET_OMIT_LEAF_FRAME_POINTER && ! current_function_is_leaf)
451 return 1;
452
453 return 0;
454 }
455
456 /* Return the number of registers pushed during the prologue. */
457
458 static int
459 n_regs_saved_by_prologue (void)
460 {
461 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
462 bool is_inthandler = fkind != SUBROUTINE;
463 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
464 bool all = (lookup_attribute ("saveall", attrs) != NULL_TREE
465 || (is_inthandler && !current_function_is_leaf));
466 int ndregs = all ? 8 : n_dregs_to_save (is_inthandler);
467 int npregs = all ? 6 : n_pregs_to_save (is_inthandler);
468 int n = ndregs + npregs;
469
470 if (all || stack_frame_needed_p ())
471 /* We use a LINK instruction in this case. */
472 n += 2;
473 else
474 {
475 if (must_save_fp_p ())
476 n++;
477 if (! current_function_is_leaf)
478 n++;
479 }
480
481 if (fkind != SUBROUTINE)
482 {
483 int i;
484
485 /* Increment once for ASTAT. */
486 n++;
487
488 /* RETE/X/N. */
489 if (lookup_attribute ("nesting", attrs))
490 n++;
491
492 for (i = REG_P7 + 1; i < REG_CC; i++)
493 if (all
494 || regs_ever_live[i]
495 || (!leaf_function_p () && call_used_regs[i]))
496 n += i == REG_A0 || i == REG_A1 ? 2 : 1;
497 }
498 return n;
499 }
500
501 /* Return the offset between two registers, one to be eliminated, and the other
502 its replacement, at the start of a routine. */
503
504 HOST_WIDE_INT
505 bfin_initial_elimination_offset (int from, int to)
506 {
507 HOST_WIDE_INT offset = 0;
508
509 if (from == ARG_POINTER_REGNUM)
510 offset = n_regs_saved_by_prologue () * 4;
511
512 if (to == STACK_POINTER_REGNUM)
513 {
514 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
515 offset += current_function_outgoing_args_size;
516 else if (current_function_outgoing_args_size)
517 offset += FIXED_STACK_AREA;
518
519 offset += get_frame_size ();
520 }
521
522 return offset;
523 }
524
525 /* Emit code to load a constant CONSTANT into register REG; setting
526 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
527 Make sure that the insns we generate need not be split. */
528
529 static void
530 frame_related_constant_load (rtx reg, HOST_WIDE_INT constant, bool related)
531 {
532 rtx insn;
533 rtx cst = GEN_INT (constant);
534
535 if (constant >= -32768 && constant < 65536)
536 insn = emit_move_insn (reg, cst);
537 else
538 {
539 /* We don't call split_load_immediate here, since dwarf2out.c can get
540 confused about some of the more clever sequences it can generate. */
541 insn = emit_insn (gen_movsi_high (reg, cst));
542 if (related)
543 RTX_FRAME_RELATED_P (insn) = 1;
544 insn = emit_insn (gen_movsi_low (reg, reg, cst));
545 }
546 if (related)
547 RTX_FRAME_RELATED_P (insn) = 1;
548 }
549
550 /* Generate efficient code to add a value to the frame pointer. We
551 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
552 generated insns if FRAME is nonzero. */
553
554 static void
555 add_to_sp (rtx spreg, HOST_WIDE_INT value, int frame)
556 {
557 if (value == 0)
558 return;
559
560 /* Choose whether to use a sequence using a temporary register, or
561 a sequence with multiple adds. We can add a signed 7 bit value
562 in one instruction. */
563 if (value > 120 || value < -120)
564 {
565 rtx tmpreg = gen_rtx_REG (SImode, REG_P1);
566 rtx insn;
567
568 if (frame)
569 frame_related_constant_load (tmpreg, value, TRUE);
570 else
571 {
572 insn = emit_move_insn (tmpreg, GEN_INT (value));
573 if (frame)
574 RTX_FRAME_RELATED_P (insn) = 1;
575 }
576
577 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
578 if (frame)
579 RTX_FRAME_RELATED_P (insn) = 1;
580 }
581 else
582 do
583 {
584 int size = value;
585 rtx insn;
586
587 if (size > 60)
588 size = 60;
589 else if (size < -60)
590 /* We could use -62, but that would leave the stack unaligned, so
591 it's no good. */
592 size = -60;
593
594 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (size)));
595 if (frame)
596 RTX_FRAME_RELATED_P (insn) = 1;
597 value -= size;
598 }
599 while (value != 0);
600 }
601
602 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
603 is too large, generate a sequence of insns that has the same effect.
604 SPREG contains (reg:SI REG_SP). */
605
606 static void
607 emit_link_insn (rtx spreg, HOST_WIDE_INT frame_size)
608 {
609 HOST_WIDE_INT link_size = frame_size;
610 rtx insn;
611 int i;
612
613 if (link_size > 262140)
614 link_size = 262140;
615
616 /* Use a LINK insn with as big a constant as possible, then subtract
617 any remaining size from the SP. */
618 insn = emit_insn (gen_link (GEN_INT (-8 - link_size)));
619 RTX_FRAME_RELATED_P (insn) = 1;
620
621 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
622 {
623 rtx set = XVECEXP (PATTERN (insn), 0, i);
624 gcc_assert (GET_CODE (set) == SET);
625 RTX_FRAME_RELATED_P (set) = 1;
626 }
627
628 frame_size -= link_size;
629
630 if (frame_size > 0)
631 {
632 /* Must use a call-clobbered PREG that isn't the static chain. */
633 rtx tmpreg = gen_rtx_REG (Pmode, REG_P1);
634
635 frame_related_constant_load (tmpreg, -frame_size, TRUE);
636 insn = emit_insn (gen_addsi3 (spreg, spreg, tmpreg));
637 RTX_FRAME_RELATED_P (insn) = 1;
638 }
639 }
640
641 /* Return the number of bytes we must reserve for outgoing arguments
642 in the current function's stack frame. */
643
644 static HOST_WIDE_INT
645 arg_area_size (void)
646 {
647 if (current_function_outgoing_args_size)
648 {
649 if (current_function_outgoing_args_size >= FIXED_STACK_AREA)
650 return current_function_outgoing_args_size;
651 else
652 return FIXED_STACK_AREA;
653 }
654 return 0;
655 }
656
657 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
658 function must save all its registers (true only for certain interrupt
659 handlers). */
660
661 static void
662 do_link (rtx spreg, HOST_WIDE_INT frame_size, bool all)
663 {
664 frame_size += arg_area_size ();
665
666 if (all || stack_frame_needed_p ()
667 || (must_save_fp_p () && ! current_function_is_leaf))
668 emit_link_insn (spreg, frame_size);
669 else
670 {
671 if (! current_function_is_leaf)
672 {
673 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
674 gen_rtx_PRE_DEC (Pmode, spreg)),
675 bfin_rets_rtx);
676 rtx insn = emit_insn (pat);
677 RTX_FRAME_RELATED_P (insn) = 1;
678 }
679 if (must_save_fp_p ())
680 {
681 rtx pat = gen_movsi (gen_rtx_MEM (Pmode,
682 gen_rtx_PRE_DEC (Pmode, spreg)),
683 gen_rtx_REG (Pmode, REG_FP));
684 rtx insn = emit_insn (pat);
685 RTX_FRAME_RELATED_P (insn) = 1;
686 }
687 add_to_sp (spreg, -frame_size, 1);
688 }
689 }
690
691 /* Like do_link, but used for epilogues to deallocate the stack frame. */
692
693 static void
694 do_unlink (rtx spreg, HOST_WIDE_INT frame_size, bool all)
695 {
696 frame_size += arg_area_size ();
697
698 if (all || stack_frame_needed_p ())
699 emit_insn (gen_unlink ());
700 else
701 {
702 rtx postinc = gen_rtx_MEM (Pmode, gen_rtx_POST_INC (Pmode, spreg));
703
704 add_to_sp (spreg, frame_size, 0);
705 if (must_save_fp_p ())
706 {
707 rtx fpreg = gen_rtx_REG (Pmode, REG_FP);
708 emit_move_insn (fpreg, postinc);
709 emit_insn (gen_rtx_USE (VOIDmode, fpreg));
710 }
711 if (! current_function_is_leaf)
712 {
713 emit_move_insn (bfin_rets_rtx, postinc);
714 emit_insn (gen_rtx_USE (VOIDmode, bfin_rets_rtx));
715 }
716 }
717 }
718
719 /* Generate a prologue suitable for a function of kind FKIND. This is
720 called for interrupt and exception handler prologues.
721 SPREG contains (reg:SI REG_SP). */
722
723 static void
724 expand_interrupt_handler_prologue (rtx spreg, e_funkind fkind)
725 {
726 int i;
727 HOST_WIDE_INT frame_size = get_frame_size ();
728 rtx predec1 = gen_rtx_PRE_DEC (SImode, spreg);
729 rtx predec = gen_rtx_MEM (SImode, predec1);
730 rtx insn;
731 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
732 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
733 tree kspisusp = lookup_attribute ("kspisusp", attrs);
734
735 if (kspisusp)
736 {
737 insn = emit_move_insn (spreg, gen_rtx_REG (Pmode, REG_USP));
738 RTX_FRAME_RELATED_P (insn) = 1;
739 }
740
741 /* We need space on the stack in case we need to save the argument
742 registers. */
743 if (fkind == EXCPT_HANDLER)
744 {
745 insn = emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (-12)));
746 RTX_FRAME_RELATED_P (insn) = 1;
747 }
748
749 insn = emit_move_insn (predec, gen_rtx_REG (SImode, REG_ASTAT));
750 RTX_FRAME_RELATED_P (insn) = 1;
751
752 /* If we're calling other functions, they won't save their call-clobbered
753 registers, so we must save everything here. */
754 if (!current_function_is_leaf)
755 all = true;
756 expand_prologue_reg_save (spreg, all, true);
757
758 for (i = REG_P7 + 1; i < REG_CC; i++)
759 if (all
760 || regs_ever_live[i]
761 || (!leaf_function_p () && call_used_regs[i]))
762 {
763 if (i == REG_A0 || i == REG_A1)
764 insn = emit_move_insn (gen_rtx_MEM (PDImode, predec1),
765 gen_rtx_REG (PDImode, i));
766 else
767 insn = emit_move_insn (predec, gen_rtx_REG (SImode, i));
768 RTX_FRAME_RELATED_P (insn) = 1;
769 }
770
771 if (lookup_attribute ("nesting", attrs))
772 {
773 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
774 : fkind == NMI_HANDLER ? REG_RETN
775 : REG_RETI));
776 insn = emit_move_insn (predec, srcreg);
777 RTX_FRAME_RELATED_P (insn) = 1;
778 }
779
780 do_link (spreg, frame_size, all);
781
782 if (fkind == EXCPT_HANDLER)
783 {
784 rtx r0reg = gen_rtx_REG (SImode, REG_R0);
785 rtx r1reg = gen_rtx_REG (SImode, REG_R1);
786 rtx r2reg = gen_rtx_REG (SImode, REG_R2);
787 rtx insn;
788
789 insn = emit_move_insn (r0reg, gen_rtx_REG (SImode, REG_SEQSTAT));
790 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
791 NULL_RTX);
792 insn = emit_insn (gen_ashrsi3 (r0reg, r0reg, GEN_INT (26)));
793 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
794 NULL_RTX);
795 insn = emit_insn (gen_ashlsi3 (r0reg, r0reg, GEN_INT (26)));
796 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
797 NULL_RTX);
798 insn = emit_move_insn (r1reg, spreg);
799 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
800 NULL_RTX);
801 insn = emit_move_insn (r2reg, gen_rtx_REG (Pmode, REG_FP));
802 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
803 NULL_RTX);
804 insn = emit_insn (gen_addsi3 (r2reg, r2reg, GEN_INT (8)));
805 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx,
806 NULL_RTX);
807 }
808 }
809
810 /* Generate an epilogue suitable for a function of kind FKIND. This is
811 called for interrupt and exception handler epilogues.
812 SPREG contains (reg:SI REG_SP). */
813
814 static void
815 expand_interrupt_handler_epilogue (rtx spreg, e_funkind fkind)
816 {
817 int i;
818 rtx postinc1 = gen_rtx_POST_INC (SImode, spreg);
819 rtx postinc = gen_rtx_MEM (SImode, postinc1);
820 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
821 bool all = lookup_attribute ("saveall", attrs) != NULL_TREE;
822
823 /* A slightly crude technique to stop flow from trying to delete "dead"
824 insns. */
825 MEM_VOLATILE_P (postinc) = 1;
826
827 do_unlink (spreg, get_frame_size (), all);
828
829 if (lookup_attribute ("nesting", attrs))
830 {
831 rtx srcreg = gen_rtx_REG (Pmode, (fkind == EXCPT_HANDLER ? REG_RETX
832 : fkind == NMI_HANDLER ? REG_RETN
833 : REG_RETI));
834 emit_move_insn (srcreg, postinc);
835 }
836
837 /* If we're calling other functions, they won't save their call-clobbered
838 registers, so we must save (and restore) everything here. */
839 if (!current_function_is_leaf)
840 all = true;
841
842 for (i = REG_CC - 1; i > REG_P7; i--)
843 if (all
844 || regs_ever_live[i]
845 || (!leaf_function_p () && call_used_regs[i]))
846 {
847 if (i == REG_A0 || i == REG_A1)
848 {
849 rtx mem = gen_rtx_MEM (PDImode, postinc1);
850 MEM_VOLATILE_P (mem) = 1;
851 emit_move_insn (gen_rtx_REG (PDImode, i), mem);
852 }
853 else
854 emit_move_insn (gen_rtx_REG (SImode, i), postinc);
855 }
856
857 expand_epilogue_reg_restore (spreg, all, true);
858
859 emit_move_insn (gen_rtx_REG (SImode, REG_ASTAT), postinc);
860
861 /* Deallocate any space we left on the stack in case we needed to save the
862 argument registers. */
863 if (fkind == EXCPT_HANDLER)
864 emit_insn (gen_addsi3 (spreg, spreg, GEN_INT (12)));
865
866 emit_jump_insn (gen_return_internal (GEN_INT (fkind)));
867 }
868
869 /* Used while emitting the prologue to generate code to load the correct value
870 into the PIC register, which is passed in DEST. */
871
872 static rtx
873 bfin_load_pic_reg (rtx dest)
874 {
875 struct cgraph_local_info *i = NULL;
876 rtx addr, insn;
877
878 if (flag_unit_at_a_time)
879 i = cgraph_local_info (current_function_decl);
880
881 /* Functions local to the translation unit don't need to reload the
882 pic reg, since the caller always passes a usable one. */
883 if (i && i->local)
884 return pic_offset_table_rtx;
885
886 if (bfin_lib_id_given)
887 addr = plus_constant (pic_offset_table_rtx, -4 - bfin_library_id * 4);
888 else
889 addr = gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
890 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
891 UNSPEC_LIBRARY_OFFSET));
892 insn = emit_insn (gen_movsi (dest, gen_rtx_MEM (Pmode, addr)));
893 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
894 return dest;
895 }
896
897 /* Generate RTL for the prologue of the current function. */
898
899 void
900 bfin_expand_prologue (void)
901 {
902 rtx insn;
903 HOST_WIDE_INT frame_size = get_frame_size ();
904 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
905 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
906 rtx pic_reg_loaded = NULL_RTX;
907
908 if (fkind != SUBROUTINE)
909 {
910 expand_interrupt_handler_prologue (spreg, fkind);
911 return;
912 }
913
914 if (current_function_limit_stack)
915 {
916 HOST_WIDE_INT offset
917 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM,
918 STACK_POINTER_REGNUM);
919 rtx lim = stack_limit_rtx;
920
921 if (GET_CODE (lim) == SYMBOL_REF)
922 {
923 rtx p2reg = gen_rtx_REG (Pmode, REG_P2);
924 if (TARGET_ID_SHARED_LIBRARY)
925 {
926 rtx p1reg = gen_rtx_REG (Pmode, REG_P1);
927 rtx val;
928 pic_reg_loaded = bfin_load_pic_reg (p2reg);
929 val = legitimize_pic_address (stack_limit_rtx, p1reg,
930 pic_reg_loaded);
931 emit_move_insn (p1reg, val);
932 frame_related_constant_load (p2reg, offset, FALSE);
933 emit_insn (gen_addsi3 (p2reg, p2reg, p1reg));
934 lim = p2reg;
935 }
936 else
937 {
938 rtx limit = plus_constant (stack_limit_rtx, offset);
939 emit_move_insn (p2reg, limit);
940 lim = p2reg;
941 }
942 }
943 emit_insn (gen_compare_lt (bfin_cc_rtx, spreg, lim));
944 emit_insn (gen_trapifcc ());
945 }
946 expand_prologue_reg_save (spreg, 0, false);
947
948 do_link (spreg, frame_size, false);
949
950 if (TARGET_ID_SHARED_LIBRARY
951 && (current_function_uses_pic_offset_table
952 || !current_function_is_leaf))
953 bfin_load_pic_reg (pic_offset_table_rtx);
954 }
955
956 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
957 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
958 eh_return pattern. */
959
960 void
961 bfin_expand_epilogue (int need_return, int eh_return)
962 {
963 rtx spreg = gen_rtx_REG (Pmode, REG_SP);
964 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
965
966 if (fkind != SUBROUTINE)
967 {
968 expand_interrupt_handler_epilogue (spreg, fkind);
969 return;
970 }
971
972 do_unlink (spreg, get_frame_size (), false);
973
974 expand_epilogue_reg_restore (spreg, false, false);
975
976 /* Omit the return insn if this is for a sibcall. */
977 if (! need_return)
978 return;
979
980 if (eh_return)
981 emit_insn (gen_addsi3 (spreg, spreg, gen_rtx_REG (Pmode, REG_P2)));
982
983 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE)));
984 }
985 \f
986 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
987
988 int
989 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
990 unsigned int new_reg)
991 {
992 /* Interrupt functions can only use registers that have already been
993 saved by the prologue, even if they would normally be
994 call-clobbered. */
995
996 if (funkind (TREE_TYPE (current_function_decl)) != SUBROUTINE
997 && !regs_ever_live[new_reg])
998 return 0;
999
1000 return 1;
1001 }
1002
1003 /* Return the value of the return address for the frame COUNT steps up
1004 from the current frame, after the prologue.
1005 We punt for everything but the current frame by returning const0_rtx. */
1006
1007 rtx
1008 bfin_return_addr_rtx (int count)
1009 {
1010 if (count != 0)
1011 return const0_rtx;
1012
1013 return get_hard_reg_initial_val (Pmode, REG_RETS);
1014 }
1015
1016 /* Try machine-dependent ways of modifying an illegitimate address X
1017 to be legitimate. If we find one, return the new, valid address,
1018 otherwise return NULL_RTX.
1019
1020 OLDX is the address as it was before break_out_memory_refs was called.
1021 In some cases it is useful to look at this to decide what needs to be done.
1022
1023 MODE is the mode of the memory reference. */
1024
1025 rtx
1026 legitimize_address (rtx x ATTRIBUTE_UNUSED, rtx oldx ATTRIBUTE_UNUSED,
1027 enum machine_mode mode ATTRIBUTE_UNUSED)
1028 {
1029 return NULL_RTX;
1030 }
1031
1032 static rtx
1033 bfin_delegitimize_address (rtx orig_x)
1034 {
1035 rtx x = orig_x, y;
1036
1037 if (GET_CODE (x) != MEM)
1038 return orig_x;
1039
1040 x = XEXP (x, 0);
1041 if (GET_CODE (x) == PLUS
1042 && GET_CODE (XEXP (x, 1)) == UNSPEC
1043 && XINT (XEXP (x, 1), 1) == UNSPEC_MOVE_PIC
1044 && GET_CODE (XEXP (x, 0)) == REG
1045 && REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
1046 return XVECEXP (XEXP (x, 1), 0, 0);
1047
1048 return orig_x;
1049 }
1050
1051 /* This predicate is used to compute the length of a load/store insn.
1052 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1053 32 bit instruction. */
1054
1055 int
1056 effective_address_32bit_p (rtx op, enum machine_mode mode)
1057 {
1058 HOST_WIDE_INT offset;
1059
1060 mode = GET_MODE (op);
1061 op = XEXP (op, 0);
1062
1063 if (GET_CODE (op) != PLUS)
1064 {
1065 gcc_assert (REG_P (op) || GET_CODE (op) == POST_INC
1066 || GET_CODE (op) == PRE_DEC || GET_CODE (op) == POST_DEC);
1067 return 0;
1068 }
1069
1070 offset = INTVAL (XEXP (op, 1));
1071
1072 /* All byte loads use a 16 bit offset. */
1073 if (GET_MODE_SIZE (mode) == 1)
1074 return 1;
1075
1076 if (GET_MODE_SIZE (mode) == 4)
1077 {
1078 /* Frame pointer relative loads can use a negative offset, all others
1079 are restricted to a small positive one. */
1080 if (XEXP (op, 0) == frame_pointer_rtx)
1081 return offset < -128 || offset > 60;
1082 return offset < 0 || offset > 60;
1083 }
1084
1085 /* Must be HImode now. */
1086 return offset < 0 || offset > 30;
1087 }
1088
1089 /* Returns true if X is a memory reference using an I register. */
1090 bool
1091 bfin_dsp_memref_p (rtx x)
1092 {
1093 if (! MEM_P (x))
1094 return false;
1095 x = XEXP (x, 0);
1096 if (GET_CODE (x) == POST_INC || GET_CODE (x) == PRE_INC
1097 || GET_CODE (x) == POST_DEC || GET_CODE (x) == PRE_DEC)
1098 x = XEXP (x, 0);
1099 return IREG_P (x);
1100 }
1101
1102 /* Return cost of the memory address ADDR.
1103 All addressing modes are equally cheap on the Blackfin. */
1104
1105 static int
1106 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED)
1107 {
1108 return 1;
1109 }
1110
1111 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1112
1113 void
1114 print_address_operand (FILE *file, rtx x)
1115 {
1116 switch (GET_CODE (x))
1117 {
1118 case PLUS:
1119 output_address (XEXP (x, 0));
1120 fprintf (file, "+");
1121 output_address (XEXP (x, 1));
1122 break;
1123
1124 case PRE_DEC:
1125 fprintf (file, "--");
1126 output_address (XEXP (x, 0));
1127 break;
1128 case POST_INC:
1129 output_address (XEXP (x, 0));
1130 fprintf (file, "++");
1131 break;
1132 case POST_DEC:
1133 output_address (XEXP (x, 0));
1134 fprintf (file, "--");
1135 break;
1136
1137 default:
1138 gcc_assert (GET_CODE (x) != MEM);
1139 print_operand (file, x, 0);
1140 break;
1141 }
1142 }
1143
1144 /* Adding intp DImode support by Tony
1145 * -- Q: (low word)
1146 * -- R: (high word)
1147 */
1148
1149 void
1150 print_operand (FILE *file, rtx x, char code)
1151 {
1152 enum machine_mode mode = GET_MODE (x);
1153
1154 switch (code)
1155 {
1156 case 'j':
1157 switch (GET_CODE (x))
1158 {
1159 case EQ:
1160 fprintf (file, "e");
1161 break;
1162 case NE:
1163 fprintf (file, "ne");
1164 break;
1165 case GT:
1166 fprintf (file, "g");
1167 break;
1168 case LT:
1169 fprintf (file, "l");
1170 break;
1171 case GE:
1172 fprintf (file, "ge");
1173 break;
1174 case LE:
1175 fprintf (file, "le");
1176 break;
1177 case GTU:
1178 fprintf (file, "g");
1179 break;
1180 case LTU:
1181 fprintf (file, "l");
1182 break;
1183 case GEU:
1184 fprintf (file, "ge");
1185 break;
1186 case LEU:
1187 fprintf (file, "le");
1188 break;
1189 default:
1190 output_operand_lossage ("invalid %%j value");
1191 }
1192 break;
1193
1194 case 'J': /* reverse logic */
1195 switch (GET_CODE(x))
1196 {
1197 case EQ:
1198 fprintf (file, "ne");
1199 break;
1200 case NE:
1201 fprintf (file, "e");
1202 break;
1203 case GT:
1204 fprintf (file, "le");
1205 break;
1206 case LT:
1207 fprintf (file, "ge");
1208 break;
1209 case GE:
1210 fprintf (file, "l");
1211 break;
1212 case LE:
1213 fprintf (file, "g");
1214 break;
1215 case GTU:
1216 fprintf (file, "le");
1217 break;
1218 case LTU:
1219 fprintf (file, "ge");
1220 break;
1221 case GEU:
1222 fprintf (file, "l");
1223 break;
1224 case LEU:
1225 fprintf (file, "g");
1226 break;
1227 default:
1228 output_operand_lossage ("invalid %%J value");
1229 }
1230 break;
1231
1232 default:
1233 switch (GET_CODE (x))
1234 {
1235 case REG:
1236 if (code == 'h')
1237 {
1238 gcc_assert (REGNO (x) < 32);
1239 fprintf (file, "%s", short_reg_names[REGNO (x)]);
1240 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1241 break;
1242 }
1243 else if (code == 'd')
1244 {
1245 gcc_assert (REGNO (x) < 32);
1246 fprintf (file, "%s", high_reg_names[REGNO (x)]);
1247 break;
1248 }
1249 else if (code == 'w')
1250 {
1251 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1252 fprintf (file, "%s.w", reg_names[REGNO (x)]);
1253 }
1254 else if (code == 'x')
1255 {
1256 gcc_assert (REGNO (x) == REG_A0 || REGNO (x) == REG_A1);
1257 fprintf (file, "%s.x", reg_names[REGNO (x)]);
1258 }
1259 else if (code == 'D')
1260 {
1261 fprintf (file, "%s", dregs_pair_names[REGNO (x)]);
1262 }
1263 else if (code == 'H')
1264 {
1265 gcc_assert (mode == DImode || mode == DFmode);
1266 gcc_assert (REG_P (x));
1267 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
1268 }
1269 else if (code == 'T')
1270 {
1271 gcc_assert (D_REGNO_P (REGNO (x)));
1272 fprintf (file, "%s", byte_reg_names[REGNO (x)]);
1273 }
1274 else
1275 fprintf (file, "%s", reg_names[REGNO (x)]);
1276 break;
1277
1278 case MEM:
1279 fputc ('[', file);
1280 x = XEXP (x,0);
1281 print_address_operand (file, x);
1282 fputc (']', file);
1283 break;
1284
1285 case CONST_INT:
1286 if (code == 'M')
1287 {
1288 switch (INTVAL (x))
1289 {
1290 case MACFLAG_NONE:
1291 break;
1292 case MACFLAG_FU:
1293 fputs ("(FU)", file);
1294 break;
1295 case MACFLAG_T:
1296 fputs ("(T)", file);
1297 break;
1298 case MACFLAG_TFU:
1299 fputs ("(TFU)", file);
1300 break;
1301 case MACFLAG_W32:
1302 fputs ("(W32)", file);
1303 break;
1304 case MACFLAG_IS:
1305 fputs ("(IS)", file);
1306 break;
1307 case MACFLAG_IU:
1308 fputs ("(IU)", file);
1309 break;
1310 case MACFLAG_IH:
1311 fputs ("(IH)", file);
1312 break;
1313 case MACFLAG_M:
1314 fputs ("(M)", file);
1315 break;
1316 case MACFLAG_ISS2:
1317 fputs ("(ISS2)", file);
1318 break;
1319 case MACFLAG_S2RND:
1320 fputs ("(S2RND)", file);
1321 break;
1322 default:
1323 gcc_unreachable ();
1324 }
1325 break;
1326 }
1327 else if (code == 'b')
1328 {
1329 if (INTVAL (x) == 0)
1330 fputs ("+=", file);
1331 else if (INTVAL (x) == 1)
1332 fputs ("-=", file);
1333 else
1334 gcc_unreachable ();
1335 break;
1336 }
1337 /* Moves to half registers with d or h modifiers always use unsigned
1338 constants. */
1339 else if (code == 'd')
1340 x = GEN_INT ((INTVAL (x) >> 16) & 0xffff);
1341 else if (code == 'h')
1342 x = GEN_INT (INTVAL (x) & 0xffff);
1343 else if (code == 'X')
1344 x = GEN_INT (exact_log2 (0xffffffff & INTVAL (x)));
1345 else if (code == 'Y')
1346 x = GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x)));
1347 else if (code == 'Z')
1348 /* Used for LINK insns. */
1349 x = GEN_INT (-8 - INTVAL (x));
1350
1351 /* fall through */
1352
1353 case SYMBOL_REF:
1354 output_addr_const (file, x);
1355 break;
1356
1357 case CONST_DOUBLE:
1358 output_operand_lossage ("invalid const_double operand");
1359 break;
1360
1361 case UNSPEC:
1362 switch (XINT (x, 1))
1363 {
1364 case UNSPEC_MOVE_PIC:
1365 output_addr_const (file, XVECEXP (x, 0, 0));
1366 fprintf (file, "@GOT");
1367 break;
1368
1369 case UNSPEC_MOVE_FDPIC:
1370 output_addr_const (file, XVECEXP (x, 0, 0));
1371 fprintf (file, "@GOT17M4");
1372 break;
1373
1374 case UNSPEC_FUNCDESC_GOT17M4:
1375 output_addr_const (file, XVECEXP (x, 0, 0));
1376 fprintf (file, "@FUNCDESC_GOT17M4");
1377 break;
1378
1379 case UNSPEC_LIBRARY_OFFSET:
1380 fprintf (file, "_current_shared_library_p5_offset_");
1381 break;
1382
1383 default:
1384 gcc_unreachable ();
1385 }
1386 break;
1387
1388 default:
1389 output_addr_const (file, x);
1390 }
1391 }
1392 }
1393 \f
1394 /* Argument support functions. */
1395
1396 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1397 for a call to a function whose data type is FNTYPE.
1398 For a library call, FNTYPE is 0.
1399 VDSP C Compiler manual, our ABI says that
1400 first 3 words of arguments will use R0, R1 and R2.
1401 */
1402
1403 void
1404 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
1405 rtx libname ATTRIBUTE_UNUSED)
1406 {
1407 static CUMULATIVE_ARGS zero_cum;
1408
1409 *cum = zero_cum;
1410
1411 /* Set up the number of registers to use for passing arguments. */
1412
1413 cum->nregs = max_arg_registers;
1414 cum->arg_regs = arg_regs;
1415
1416 cum->call_cookie = CALL_NORMAL;
1417 /* Check for a longcall attribute. */
1418 if (fntype && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
1419 cum->call_cookie |= CALL_SHORT;
1420 else if (fntype && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype)))
1421 cum->call_cookie |= CALL_LONG;
1422
1423 return;
1424 }
1425
1426 /* Update the data in CUM to advance over an argument
1427 of mode MODE and data type TYPE.
1428 (TYPE is null for libcalls where that information may not be available.) */
1429
1430 void
1431 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1432 int named ATTRIBUTE_UNUSED)
1433 {
1434 int count, bytes, words;
1435
1436 bytes = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1437 words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1438
1439 cum->words += words;
1440 cum->nregs -= words;
1441
1442 if (cum->nregs <= 0)
1443 {
1444 cum->nregs = 0;
1445 cum->arg_regs = NULL;
1446 }
1447 else
1448 {
1449 for (count = 1; count <= words; count++)
1450 cum->arg_regs++;
1451 }
1452
1453 return;
1454 }
1455
1456 /* Define where to put the arguments to a function.
1457 Value is zero to push the argument on the stack,
1458 or a hard register in which to store the argument.
1459
1460 MODE is the argument's machine mode.
1461 TYPE is the data type of the argument (as a tree).
1462 This is null for libcalls where that information may
1463 not be available.
1464 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1465 the preceding args and about the function being called.
1466 NAMED is nonzero if this argument is a named parameter
1467 (otherwise it is an extra parameter matching an ellipsis). */
1468
1469 struct rtx_def *
1470 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1471 int named ATTRIBUTE_UNUSED)
1472 {
1473 int bytes
1474 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1475
1476 if (mode == VOIDmode)
1477 /* Compute operand 2 of the call insn. */
1478 return GEN_INT (cum->call_cookie);
1479
1480 if (bytes == -1)
1481 return NULL_RTX;
1482
1483 if (cum->nregs)
1484 return gen_rtx_REG (mode, *(cum->arg_regs));
1485
1486 return NULL_RTX;
1487 }
1488
1489 /* For an arg passed partly in registers and partly in memory,
1490 this is the number of bytes passed in registers.
1491 For args passed entirely in registers or entirely in memory, zero.
1492
1493 Refer VDSP C Compiler manual, our ABI.
1494 First 3 words are in registers. So, if a an argument is larger
1495 than the registers available, it will span the register and
1496 stack. */
1497
1498 static int
1499 bfin_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1500 tree type ATTRIBUTE_UNUSED,
1501 bool named ATTRIBUTE_UNUSED)
1502 {
1503 int bytes
1504 = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
1505 int bytes_left = cum->nregs * UNITS_PER_WORD;
1506
1507 if (bytes == -1)
1508 return 0;
1509
1510 if (bytes_left == 0)
1511 return 0;
1512 if (bytes > bytes_left)
1513 return bytes_left;
1514 return 0;
1515 }
1516
1517 /* Variable sized types are passed by reference. */
1518
1519 static bool
1520 bfin_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1521 enum machine_mode mode ATTRIBUTE_UNUSED,
1522 tree type, bool named ATTRIBUTE_UNUSED)
1523 {
1524 return type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST;
1525 }
1526
1527 /* Decide whether a type should be returned in memory (true)
1528 or in a register (false). This is called by the macro
1529 RETURN_IN_MEMORY. */
1530
1531 int
1532 bfin_return_in_memory (tree type)
1533 {
1534 int size = int_size_in_bytes (type);
1535 return size > 2 * UNITS_PER_WORD || size == -1;
1536 }
1537
1538 /* Register in which address to store a structure value
1539 is passed to a function. */
1540 static rtx
1541 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
1542 int incoming ATTRIBUTE_UNUSED)
1543 {
1544 return gen_rtx_REG (Pmode, REG_P0);
1545 }
1546
1547 /* Return true when register may be used to pass function parameters. */
1548
1549 bool
1550 function_arg_regno_p (int n)
1551 {
1552 int i;
1553 for (i = 0; arg_regs[i] != -1; i++)
1554 if (n == arg_regs[i])
1555 return true;
1556 return false;
1557 }
1558
1559 /* Returns 1 if OP contains a symbol reference */
1560
1561 int
1562 symbolic_reference_mentioned_p (rtx op)
1563 {
1564 register const char *fmt;
1565 register int i;
1566
1567 if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)
1568 return 1;
1569
1570 fmt = GET_RTX_FORMAT (GET_CODE (op));
1571 for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)
1572 {
1573 if (fmt[i] == 'E')
1574 {
1575 register int j;
1576
1577 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
1578 if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))
1579 return 1;
1580 }
1581
1582 else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))
1583 return 1;
1584 }
1585
1586 return 0;
1587 }
1588
1589 /* Decide whether we can make a sibling call to a function. DECL is the
1590 declaration of the function being targeted by the call and EXP is the
1591 CALL_EXPR representing the call. */
1592
1593 static bool
1594 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
1595 tree exp ATTRIBUTE_UNUSED)
1596 {
1597 e_funkind fkind = funkind (TREE_TYPE (current_function_decl));
1598 return fkind == SUBROUTINE;
1599 }
1600 \f
1601 /* Emit RTL insns to initialize the variable parts of a trampoline at
1602 TRAMP. FNADDR is an RTX for the address of the function's pure
1603 code. CXT is an RTX for the static chain value for the function. */
1604
1605 void
1606 initialize_trampoline (tramp, fnaddr, cxt)
1607 rtx tramp, fnaddr, cxt;
1608 {
1609 rtx t1 = copy_to_reg (fnaddr);
1610 rtx t2 = copy_to_reg (cxt);
1611 rtx addr;
1612 int i = 0;
1613
1614 if (TARGET_FDPIC)
1615 {
1616 rtx a = memory_address (Pmode, plus_constant (tramp, 8));
1617 addr = memory_address (Pmode, tramp);
1618 emit_move_insn (gen_rtx_MEM (SImode, addr), a);
1619 i = 8;
1620 }
1621
1622 addr = memory_address (Pmode, plus_constant (tramp, i + 2));
1623 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1624 emit_insn (gen_ashrsi3 (t1, t1, GEN_INT (16)));
1625 addr = memory_address (Pmode, plus_constant (tramp, i + 6));
1626 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t1));
1627
1628 addr = memory_address (Pmode, plus_constant (tramp, i + 10));
1629 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1630 emit_insn (gen_ashrsi3 (t2, t2, GEN_INT (16)));
1631 addr = memory_address (Pmode, plus_constant (tramp, i + 14));
1632 emit_move_insn (gen_rtx_MEM (HImode, addr), gen_lowpart (HImode, t2));
1633 }
1634
1635 /* Emit insns to move operands[1] into operands[0]. */
1636
1637 void
1638 emit_pic_move (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1639 {
1640 rtx temp = reload_in_progress ? operands[0] : gen_reg_rtx (Pmode);
1641
1642 gcc_assert (!TARGET_FDPIC || !(reload_in_progress || reload_completed));
1643 if (GET_CODE (operands[0]) == MEM && SYMBOLIC_CONST (operands[1]))
1644 operands[1] = force_reg (SImode, operands[1]);
1645 else
1646 operands[1] = legitimize_pic_address (operands[1], temp,
1647 TARGET_FDPIC ? OUR_FDPIC_REG
1648 : pic_offset_table_rtx);
1649 }
1650
1651 /* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1652
1653 void
1654 expand_move (rtx *operands, enum machine_mode mode)
1655 {
1656 rtx op = operands[1];
1657 if ((TARGET_ID_SHARED_LIBRARY || TARGET_FDPIC)
1658 && SYMBOLIC_CONST (op))
1659 emit_pic_move (operands, mode);
1660 /* Don't generate memory->memory or constant->memory moves, go through a
1661 register */
1662 else if ((reload_in_progress | reload_completed) == 0
1663 && GET_CODE (operands[0]) == MEM
1664 && GET_CODE (operands[1]) != REG)
1665 operands[1] = force_reg (mode, operands[1]);
1666 }
1667 \f
1668 /* Split one or more DImode RTL references into pairs of SImode
1669 references. The RTL can be REG, offsettable MEM, integer constant, or
1670 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1671 split and "num" is its length. lo_half and hi_half are output arrays
1672 that parallel "operands". */
1673
1674 void
1675 split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
1676 {
1677 while (num--)
1678 {
1679 rtx op = operands[num];
1680
1681 /* simplify_subreg refuse to split volatile memory addresses,
1682 but we still have to handle it. */
1683 if (GET_CODE (op) == MEM)
1684 {
1685 lo_half[num] = adjust_address (op, SImode, 0);
1686 hi_half[num] = adjust_address (op, SImode, 4);
1687 }
1688 else
1689 {
1690 lo_half[num] = simplify_gen_subreg (SImode, op,
1691 GET_MODE (op) == VOIDmode
1692 ? DImode : GET_MODE (op), 0);
1693 hi_half[num] = simplify_gen_subreg (SImode, op,
1694 GET_MODE (op) == VOIDmode
1695 ? DImode : GET_MODE (op), 4);
1696 }
1697 }
1698 }
1699 \f
1700 bool
1701 bfin_longcall_p (rtx op, int call_cookie)
1702 {
1703 gcc_assert (GET_CODE (op) == SYMBOL_REF);
1704 if (call_cookie & CALL_SHORT)
1705 return 0;
1706 if (call_cookie & CALL_LONG)
1707 return 1;
1708 if (TARGET_LONG_CALLS)
1709 return 1;
1710 return 0;
1711 }
1712
1713 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1714 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1715 SIBCALL is nonzero if this is a sibling call. */
1716
1717 void
1718 bfin_expand_call (rtx retval, rtx fnaddr, rtx callarg1, rtx cookie, int sibcall)
1719 {
1720 rtx use = NULL, call;
1721 rtx callee = XEXP (fnaddr, 0);
1722 int nelts = 2 + !!sibcall;
1723 rtx pat;
1724 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REGNO);
1725 int n;
1726
1727 /* In an untyped call, we can get NULL for operand 2. */
1728 if (cookie == NULL_RTX)
1729 cookie = const0_rtx;
1730
1731 /* Static functions and indirect calls don't need the pic register. */
1732 if (!TARGET_FDPIC && flag_pic
1733 && GET_CODE (callee) == SYMBOL_REF
1734 && !SYMBOL_REF_LOCAL_P (callee))
1735 use_reg (&use, pic_offset_table_rtx);
1736
1737 if (TARGET_FDPIC)
1738 {
1739 if (GET_CODE (callee) != SYMBOL_REF
1740 || bfin_longcall_p (callee, INTVAL (cookie)))
1741 {
1742 rtx addr = callee;
1743 if (! address_operand (addr, Pmode))
1744 addr = force_reg (Pmode, addr);
1745
1746 fnaddr = gen_reg_rtx (SImode);
1747 emit_insn (gen_load_funcdescsi (fnaddr, addr));
1748 fnaddr = gen_rtx_MEM (Pmode, fnaddr);
1749
1750 picreg = gen_reg_rtx (SImode);
1751 emit_insn (gen_load_funcdescsi (picreg,
1752 plus_constant (addr, 4)));
1753 }
1754
1755 nelts++;
1756 }
1757 else if ((!register_no_elim_operand (callee, Pmode)
1758 && GET_CODE (callee) != SYMBOL_REF)
1759 || (GET_CODE (callee) == SYMBOL_REF
1760 && (flag_pic
1761 || bfin_longcall_p (callee, INTVAL (cookie)))))
1762 {
1763 callee = copy_to_mode_reg (Pmode, callee);
1764 fnaddr = gen_rtx_MEM (Pmode, callee);
1765 }
1766 call = gen_rtx_CALL (VOIDmode, fnaddr, callarg1);
1767
1768 if (retval)
1769 call = gen_rtx_SET (VOIDmode, retval, call);
1770
1771 pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nelts));
1772 n = 0;
1773 XVECEXP (pat, 0, n++) = call;
1774 if (TARGET_FDPIC)
1775 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, picreg);
1776 XVECEXP (pat, 0, n++) = gen_rtx_USE (VOIDmode, cookie);
1777 if (sibcall)
1778 XVECEXP (pat, 0, n++) = gen_rtx_RETURN (VOIDmode);
1779 call = emit_call_insn (pat);
1780 if (use)
1781 CALL_INSN_FUNCTION_USAGE (call) = use;
1782 }
1783 \f
1784 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1785
1786 int
1787 hard_regno_mode_ok (int regno, enum machine_mode mode)
1788 {
1789 /* Allow only dregs to store value of mode HI or QI */
1790 enum reg_class class = REGNO_REG_CLASS (regno);
1791
1792 if (mode == CCmode)
1793 return 0;
1794
1795 if (mode == V2HImode)
1796 return D_REGNO_P (regno);
1797 if (class == CCREGS)
1798 return mode == BImode;
1799 if (mode == PDImode || mode == V2PDImode)
1800 return regno == REG_A0 || regno == REG_A1;
1801 if (mode == SImode
1802 && TEST_HARD_REG_BIT (reg_class_contents[PROLOGUE_REGS], regno))
1803 return 1;
1804
1805 return TEST_HARD_REG_BIT (reg_class_contents[MOST_REGS], regno);
1806 }
1807
1808 /* Implements target hook vector_mode_supported_p. */
1809
1810 static bool
1811 bfin_vector_mode_supported_p (enum machine_mode mode)
1812 {
1813 return mode == V2HImode;
1814 }
1815
1816 /* Return the cost of moving data from a register in class CLASS1 to
1817 one in class CLASS2. A cost of 2 is the default. */
1818
1819 int
1820 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1821 enum reg_class class1, enum reg_class class2)
1822 {
1823 /* These need secondary reloads, so they're more expensive. */
1824 if ((class1 == CCREGS && class2 != DREGS)
1825 || (class1 != DREGS && class2 == CCREGS))
1826 return 4;
1827
1828 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1829 if (optimize_size)
1830 return 2;
1831
1832 /* There are some stalls involved when moving from a DREG to a different
1833 class reg, and using the value in one of the following instructions.
1834 Attempt to model this by slightly discouraging such moves. */
1835 if (class1 == DREGS && class2 != DREGS)
1836 return 2 * 2;
1837
1838 return 2;
1839 }
1840
1841 /* Return the cost of moving data of mode M between a
1842 register and memory. A value of 2 is the default; this cost is
1843 relative to those in `REGISTER_MOVE_COST'.
1844
1845 ??? In theory L1 memory has single-cycle latency. We should add a switch
1846 that tells the compiler whether we expect to use only L1 memory for the
1847 program; it'll make the costs more accurate. */
1848
1849 int
1850 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1851 enum reg_class class,
1852 int in ATTRIBUTE_UNUSED)
1853 {
1854 /* Make memory accesses slightly more expensive than any register-register
1855 move. Also, penalize non-DP registers, since they need secondary
1856 reloads to load and store. */
1857 if (! reg_class_subset_p (class, DPREGS))
1858 return 10;
1859
1860 return 8;
1861 }
1862
1863 /* Inform reload about cases where moving X with a mode MODE to a register in
1864 CLASS requires an extra scratch register. Return the class needed for the
1865 scratch register. */
1866
1867 static enum reg_class
1868 bfin_secondary_reload (bool in_p, rtx x, enum reg_class class,
1869 enum machine_mode mode, secondary_reload_info *sri)
1870 {
1871 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1872 in most other cases we can also use PREGS. */
1873 enum reg_class default_class = GET_MODE_SIZE (mode) >= 4 ? DPREGS : DREGS;
1874 enum reg_class x_class = NO_REGS;
1875 enum rtx_code code = GET_CODE (x);
1876
1877 if (code == SUBREG)
1878 x = SUBREG_REG (x), code = GET_CODE (x);
1879 if (REG_P (x))
1880 {
1881 int regno = REGNO (x);
1882 if (regno >= FIRST_PSEUDO_REGISTER)
1883 regno = reg_renumber[regno];
1884
1885 if (regno == -1)
1886 code = MEM;
1887 else
1888 x_class = REGNO_REG_CLASS (regno);
1889 }
1890
1891 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1892 This happens as a side effect of register elimination, and we need
1893 a scratch register to do it. */
1894 if (fp_plus_const_operand (x, mode))
1895 {
1896 rtx op2 = XEXP (x, 1);
1897 int large_constant_p = ! CONST_7BIT_IMM_P (INTVAL (op2));
1898
1899 if (class == PREGS || class == PREGS_CLOBBERED)
1900 return NO_REGS;
1901 /* If destination is a DREG, we can do this without a scratch register
1902 if the constant is valid for an add instruction. */
1903 if ((class == DREGS || class == DPREGS)
1904 && ! large_constant_p)
1905 return NO_REGS;
1906 /* Reloading to anything other than a DREG? Use a PREG scratch
1907 register. */
1908 sri->icode = CODE_FOR_reload_insi;
1909 return NO_REGS;
1910 }
1911
1912 /* Data can usually be moved freely between registers of most classes.
1913 AREGS are an exception; they can only move to or from another register
1914 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1915 if (x_class == AREGS)
1916 return class == DREGS || class == AREGS ? NO_REGS : DREGS;
1917
1918 if (class == AREGS)
1919 {
1920 if (x != const0_rtx && x_class != DREGS)
1921 return DREGS;
1922 else
1923 return NO_REGS;
1924 }
1925
1926 /* CCREGS can only be moved from/to DREGS. */
1927 if (class == CCREGS && x_class != DREGS)
1928 return DREGS;
1929 if (x_class == CCREGS && class != DREGS)
1930 return DREGS;
1931
1932 /* All registers other than AREGS can load arbitrary constants. The only
1933 case that remains is MEM. */
1934 if (code == MEM)
1935 if (! reg_class_subset_p (class, default_class))
1936 return default_class;
1937 return NO_REGS;
1938 }
1939 \f
1940 /* Implement TARGET_HANDLE_OPTION. */
1941
1942 static bool
1943 bfin_handle_option (size_t code, const char *arg, int value)
1944 {
1945 switch (code)
1946 {
1947 case OPT_mshared_library_id_:
1948 if (value > MAX_LIBRARY_ID)
1949 error ("-mshared-library-id=%s is not between 0 and %d",
1950 arg, MAX_LIBRARY_ID);
1951 bfin_lib_id_given = 1;
1952 return true;
1953
1954 default:
1955 return true;
1956 }
1957 }
1958
1959 static struct machine_function *
1960 bfin_init_machine_status (void)
1961 {
1962 struct machine_function *f;
1963
1964 f = ggc_alloc_cleared (sizeof (struct machine_function));
1965
1966 return f;
1967 }
1968
1969 /* Implement the macro OVERRIDE_OPTIONS. */
1970
1971 void
1972 override_options (void)
1973 {
1974 if (TARGET_OMIT_LEAF_FRAME_POINTER)
1975 flag_omit_frame_pointer = 1;
1976
1977 /* Library identification */
1978 if (bfin_lib_id_given && ! TARGET_ID_SHARED_LIBRARY)
1979 error ("-mshared-library-id= specified without -mid-shared-library");
1980
1981 if (TARGET_ID_SHARED_LIBRARY && flag_pic == 0)
1982 flag_pic = 1;
1983
1984 if (TARGET_ID_SHARED_LIBRARY && TARGET_FDPIC)
1985 error ("ID shared libraries and FD-PIC mode can't be used together.");
1986
1987 /* There is no single unaligned SI op for PIC code. Sometimes we
1988 need to use ".4byte" and sometimes we need to use ".picptr".
1989 See bfin_assemble_integer for details. */
1990 if (TARGET_FDPIC)
1991 targetm.asm_out.unaligned_op.si = 0;
1992
1993 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
1994 since we don't support it and it'll just break. */
1995 if (flag_pic && !TARGET_FDPIC && !TARGET_ID_SHARED_LIBRARY)
1996 flag_pic = 0;
1997
1998 flag_schedule_insns = 0;
1999
2000 init_machine_status = bfin_init_machine_status;
2001 }
2002
2003 /* Return the destination address of BRANCH.
2004 We need to use this instead of get_attr_length, because the
2005 cbranch_with_nops pattern conservatively sets its length to 6, and
2006 we still prefer to use shorter sequences. */
2007
2008 static int
2009 branch_dest (rtx branch)
2010 {
2011 rtx dest;
2012 int dest_uid;
2013 rtx pat = PATTERN (branch);
2014 if (GET_CODE (pat) == PARALLEL)
2015 pat = XVECEXP (pat, 0, 0);
2016 dest = SET_SRC (pat);
2017 if (GET_CODE (dest) == IF_THEN_ELSE)
2018 dest = XEXP (dest, 1);
2019 dest = XEXP (dest, 0);
2020 dest_uid = INSN_UID (dest);
2021 return INSN_ADDRESSES (dest_uid);
2022 }
2023
2024 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2025 it's a branch that's predicted taken. */
2026
2027 static int
2028 cbranch_predicted_taken_p (rtx insn)
2029 {
2030 rtx x = find_reg_note (insn, REG_BR_PROB, 0);
2031
2032 if (x)
2033 {
2034 int pred_val = INTVAL (XEXP (x, 0));
2035
2036 return pred_val >= REG_BR_PROB_BASE / 2;
2037 }
2038
2039 return 0;
2040 }
2041
2042 /* Templates for use by asm_conditional_branch. */
2043
2044 static const char *ccbranch_templates[][3] = {
2045 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2046 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2047 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2048 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2049 };
2050
2051 /* Output INSN, which is a conditional branch instruction with operands
2052 OPERANDS.
2053
2054 We deal with the various forms of conditional branches that can be generated
2055 by bfin_reorg to prevent the hardware from doing speculative loads, by
2056 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2057 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2058 Either of these is only necessary if the branch is short, otherwise the
2059 template we use ends in an unconditional jump which flushes the pipeline
2060 anyway. */
2061
2062 void
2063 asm_conditional_branch (rtx insn, rtx *operands, int n_nops, int predict_taken)
2064 {
2065 int offset = branch_dest (insn) - INSN_ADDRESSES (INSN_UID (insn));
2066 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2067 is to be taken from start of if cc rather than jump.
2068 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2069 */
2070 int len = (offset >= -1024 && offset <= 1022 ? 0
2071 : offset >= -4094 && offset <= 4096 ? 1
2072 : 2);
2073 int bp = predict_taken && len == 0 ? 1 : cbranch_predicted_taken_p (insn);
2074 int idx = (bp << 1) | (GET_CODE (operands[0]) == EQ ? BRF : BRT);
2075 output_asm_insn (ccbranch_templates[idx][len], operands);
2076 gcc_assert (n_nops == 0 || !bp);
2077 if (len == 0)
2078 while (n_nops-- > 0)
2079 output_asm_insn ("nop;", NULL);
2080 }
2081
2082 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2083 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2084
2085 rtx
2086 bfin_gen_compare (rtx cmp, enum machine_mode mode ATTRIBUTE_UNUSED)
2087 {
2088 enum rtx_code code1, code2;
2089 rtx op0 = bfin_compare_op0, op1 = bfin_compare_op1;
2090 rtx tem = bfin_cc_rtx;
2091 enum rtx_code code = GET_CODE (cmp);
2092
2093 /* If we have a BImode input, then we already have a compare result, and
2094 do not need to emit another comparison. */
2095 if (GET_MODE (op0) == BImode)
2096 {
2097 gcc_assert ((code == NE || code == EQ) && op1 == const0_rtx);
2098 tem = op0, code2 = code;
2099 }
2100 else
2101 {
2102 switch (code) {
2103 /* bfin has these conditions */
2104 case EQ:
2105 case LT:
2106 case LE:
2107 case LEU:
2108 case LTU:
2109 code1 = code;
2110 code2 = NE;
2111 break;
2112 default:
2113 code1 = reverse_condition (code);
2114 code2 = EQ;
2115 break;
2116 }
2117 emit_insn (gen_rtx_SET (BImode, tem,
2118 gen_rtx_fmt_ee (code1, BImode, op0, op1)));
2119 }
2120
2121 return gen_rtx_fmt_ee (code2, BImode, tem, CONST0_RTX (BImode));
2122 }
2123 \f
2124 /* Return nonzero iff C has exactly one bit set if it is interpreted
2125 as a 32 bit constant. */
2126
2127 int
2128 log2constp (unsigned HOST_WIDE_INT c)
2129 {
2130 c &= 0xFFFFFFFF;
2131 return c != 0 && (c & (c-1)) == 0;
2132 }
2133
2134 /* Returns the number of consecutive least significant zeros in the binary
2135 representation of *V.
2136 We modify *V to contain the original value arithmetically shifted right by
2137 the number of zeroes. */
2138
2139 static int
2140 shiftr_zero (HOST_WIDE_INT *v)
2141 {
2142 unsigned HOST_WIDE_INT tmp = *v;
2143 unsigned HOST_WIDE_INT sgn;
2144 int n = 0;
2145
2146 if (tmp == 0)
2147 return 0;
2148
2149 sgn = tmp & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1));
2150 while ((tmp & 0x1) == 0 && n <= 32)
2151 {
2152 tmp = (tmp >> 1) | sgn;
2153 n++;
2154 }
2155 *v = tmp;
2156 return n;
2157 }
2158
2159 /* After reload, split the load of an immediate constant. OPERANDS are the
2160 operands of the movsi_insn pattern which we are splitting. We return
2161 nonzero if we emitted a sequence to load the constant, zero if we emitted
2162 nothing because we want to use the splitter's default sequence. */
2163
2164 int
2165 split_load_immediate (rtx operands[])
2166 {
2167 HOST_WIDE_INT val = INTVAL (operands[1]);
2168 HOST_WIDE_INT tmp;
2169 HOST_WIDE_INT shifted = val;
2170 HOST_WIDE_INT shifted_compl = ~val;
2171 int num_zero = shiftr_zero (&shifted);
2172 int num_compl_zero = shiftr_zero (&shifted_compl);
2173 unsigned int regno = REGNO (operands[0]);
2174 enum reg_class class1 = REGNO_REG_CLASS (regno);
2175
2176 /* This case takes care of single-bit set/clear constants, which we could
2177 also implement with BITSET/BITCLR. */
2178 if (num_zero
2179 && shifted >= -32768 && shifted < 65536
2180 && (D_REGNO_P (regno)
2181 || (regno >= REG_P0 && regno <= REG_P7 && num_zero <= 2)))
2182 {
2183 emit_insn (gen_movsi (operands[0], GEN_INT (shifted)));
2184 emit_insn (gen_ashlsi3 (operands[0], operands[0], GEN_INT (num_zero)));
2185 return 1;
2186 }
2187
2188 tmp = val & 0xFFFF;
2189 tmp |= -(tmp & 0x8000);
2190
2191 /* If high word has one bit set or clear, try to use a bit operation. */
2192 if (D_REGNO_P (regno))
2193 {
2194 if (log2constp (val & 0xFFFF0000))
2195 {
2196 emit_insn (gen_movsi (operands[0], GEN_INT (val & 0xFFFF)));
2197 emit_insn (gen_iorsi3 (operands[0], operands[0], GEN_INT (val & 0xFFFF0000)));
2198 return 1;
2199 }
2200 else if (log2constp (val | 0xFFFF) && (val & 0x8000) != 0)
2201 {
2202 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2203 emit_insn (gen_andsi3 (operands[0], operands[0], GEN_INT (val | 0xFFFF)));
2204 }
2205 }
2206
2207 if (D_REGNO_P (regno))
2208 {
2209 if (CONST_7BIT_IMM_P (tmp))
2210 {
2211 emit_insn (gen_movsi (operands[0], GEN_INT (tmp)));
2212 emit_insn (gen_movstricthi_high (operands[0], GEN_INT (val & -65536)));
2213 return 1;
2214 }
2215
2216 if ((val & 0xFFFF0000) == 0)
2217 {
2218 emit_insn (gen_movsi (operands[0], const0_rtx));
2219 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2220 return 1;
2221 }
2222
2223 if ((val & 0xFFFF0000) == 0xFFFF0000)
2224 {
2225 emit_insn (gen_movsi (operands[0], constm1_rtx));
2226 emit_insn (gen_movsi_low (operands[0], operands[0], operands[1]));
2227 return 1;
2228 }
2229 }
2230
2231 /* Need DREGs for the remaining case. */
2232 if (regno > REG_R7)
2233 return 0;
2234
2235 if (optimize_size
2236 && num_compl_zero && CONST_7BIT_IMM_P (shifted_compl))
2237 {
2238 /* If optimizing for size, generate a sequence that has more instructions
2239 but is shorter. */
2240 emit_insn (gen_movsi (operands[0], GEN_INT (shifted_compl)));
2241 emit_insn (gen_ashlsi3 (operands[0], operands[0],
2242 GEN_INT (num_compl_zero)));
2243 emit_insn (gen_one_cmplsi2 (operands[0], operands[0]));
2244 return 1;
2245 }
2246 return 0;
2247 }
2248 \f
2249 /* Return true if the legitimate memory address for a memory operand of mode
2250 MODE. Return false if not. */
2251
2252 static bool
2253 bfin_valid_add (enum machine_mode mode, HOST_WIDE_INT value)
2254 {
2255 unsigned HOST_WIDE_INT v = value > 0 ? value : -value;
2256 int sz = GET_MODE_SIZE (mode);
2257 int shift = sz == 1 ? 0 : sz == 2 ? 1 : 2;
2258 /* The usual offsettable_memref machinery doesn't work so well for this
2259 port, so we deal with the problem here. */
2260 unsigned HOST_WIDE_INT mask = sz == 8 ? 0x7ffe : 0x7fff;
2261 return (v & ~(mask << shift)) == 0;
2262 }
2263
2264 static bool
2265 bfin_valid_reg_p (unsigned int regno, int strict, enum machine_mode mode,
2266 enum rtx_code outer_code)
2267 {
2268 if (strict)
2269 return REGNO_OK_FOR_BASE_STRICT_P (regno, mode, outer_code, SCRATCH);
2270 else
2271 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno, mode, outer_code, SCRATCH);
2272 }
2273
2274 bool
2275 bfin_legitimate_address_p (enum machine_mode mode, rtx x, int strict)
2276 {
2277 switch (GET_CODE (x)) {
2278 case REG:
2279 if (bfin_valid_reg_p (REGNO (x), strict, mode, MEM))
2280 return true;
2281 break;
2282 case PLUS:
2283 if (REG_P (XEXP (x, 0))
2284 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PLUS)
2285 && ((GET_CODE (XEXP (x, 1)) == UNSPEC && mode == SImode)
2286 || (GET_CODE (XEXP (x, 1)) == CONST_INT
2287 && bfin_valid_add (mode, INTVAL (XEXP (x, 1))))))
2288 return true;
2289 break;
2290 case POST_INC:
2291 case POST_DEC:
2292 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2293 && REG_P (XEXP (x, 0))
2294 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, POST_INC))
2295 return true;
2296 case PRE_DEC:
2297 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode)
2298 && XEXP (x, 0) == stack_pointer_rtx
2299 && REG_P (XEXP (x, 0))
2300 && bfin_valid_reg_p (REGNO (XEXP (x, 0)), strict, mode, PRE_DEC))
2301 return true;
2302 break;
2303 default:
2304 break;
2305 }
2306 return false;
2307 }
2308
2309 static bool
2310 bfin_rtx_costs (rtx x, int code, int outer_code, int *total)
2311 {
2312 int cost2 = COSTS_N_INSNS (1);
2313
2314 switch (code)
2315 {
2316 case CONST_INT:
2317 if (outer_code == SET || outer_code == PLUS)
2318 *total = CONST_7BIT_IMM_P (INTVAL (x)) ? 0 : cost2;
2319 else if (outer_code == AND)
2320 *total = log2constp (~INTVAL (x)) ? 0 : cost2;
2321 else if (outer_code == LE || outer_code == LT || outer_code == EQ)
2322 *total = (INTVAL (x) >= -4 && INTVAL (x) <= 3) ? 0 : cost2;
2323 else if (outer_code == LEU || outer_code == LTU)
2324 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 7) ? 0 : cost2;
2325 else if (outer_code == MULT)
2326 *total = (INTVAL (x) == 2 || INTVAL (x) == 4) ? 0 : cost2;
2327 else if (outer_code == ASHIFT && (INTVAL (x) == 1 || INTVAL (x) == 2))
2328 *total = 0;
2329 else if (outer_code == ASHIFT || outer_code == ASHIFTRT
2330 || outer_code == LSHIFTRT)
2331 *total = (INTVAL (x) >= 0 && INTVAL (x) <= 31) ? 0 : cost2;
2332 else if (outer_code == IOR || outer_code == XOR)
2333 *total = (INTVAL (x) & (INTVAL (x) - 1)) == 0 ? 0 : cost2;
2334 else
2335 *total = cost2;
2336 return true;
2337
2338 case CONST:
2339 case LABEL_REF:
2340 case SYMBOL_REF:
2341 case CONST_DOUBLE:
2342 *total = COSTS_N_INSNS (2);
2343 return true;
2344
2345 case PLUS:
2346 if (GET_MODE (x) == Pmode)
2347 {
2348 if (GET_CODE (XEXP (x, 0)) == MULT
2349 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2350 {
2351 HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
2352 if (val == 2 || val == 4)
2353 {
2354 *total = cost2;
2355 *total += rtx_cost (XEXP (XEXP (x, 0), 0), outer_code);
2356 *total += rtx_cost (XEXP (x, 1), outer_code);
2357 return true;
2358 }
2359 }
2360 }
2361
2362 /* fall through */
2363
2364 case MINUS:
2365 case ASHIFT:
2366 case ASHIFTRT:
2367 case LSHIFTRT:
2368 if (GET_MODE (x) == DImode)
2369 *total = 6 * cost2;
2370 return false;
2371
2372 case AND:
2373 case IOR:
2374 case XOR:
2375 if (GET_MODE (x) == DImode)
2376 *total = 2 * cost2;
2377 return false;
2378
2379 case MULT:
2380 if (GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD)
2381 *total = COSTS_N_INSNS (3);
2382 return false;
2383
2384 case UDIV:
2385 case UMOD:
2386 *total = COSTS_N_INSNS (32);
2387 return true;
2388
2389 case VEC_CONCAT:
2390 case VEC_SELECT:
2391 if (outer_code == SET)
2392 *total = cost2;
2393 return true;
2394
2395 default:
2396 return false;
2397 }
2398 }
2399
2400 static void
2401 bfin_internal_label (FILE *stream, const char *prefix, unsigned long num)
2402 {
2403 fprintf (stream, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX, prefix, num);
2404 }
2405 \f
2406 /* Used for communication between {push,pop}_multiple_operation (which
2407 we use not only as a predicate) and the corresponding output functions. */
2408 static int first_preg_to_save, first_dreg_to_save;
2409
2410 int
2411 push_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2412 {
2413 int lastdreg = 8, lastpreg = 6;
2414 int i, group;
2415
2416 first_preg_to_save = lastpreg;
2417 first_dreg_to_save = lastdreg;
2418 for (i = 1, group = 0; i < XVECLEN (op, 0) - 1; i++)
2419 {
2420 rtx t = XVECEXP (op, 0, i);
2421 rtx src, dest;
2422 int regno;
2423
2424 if (GET_CODE (t) != SET)
2425 return 0;
2426
2427 src = SET_SRC (t);
2428 dest = SET_DEST (t);
2429 if (GET_CODE (dest) != MEM || ! REG_P (src))
2430 return 0;
2431 dest = XEXP (dest, 0);
2432 if (GET_CODE (dest) != PLUS
2433 || ! REG_P (XEXP (dest, 0))
2434 || REGNO (XEXP (dest, 0)) != REG_SP
2435 || GET_CODE (XEXP (dest, 1)) != CONST_INT
2436 || INTVAL (XEXP (dest, 1)) != -i * 4)
2437 return 0;
2438
2439 regno = REGNO (src);
2440 if (group == 0)
2441 {
2442 if (D_REGNO_P (regno))
2443 {
2444 group = 1;
2445 first_dreg_to_save = lastdreg = regno - REG_R0;
2446 }
2447 else if (regno >= REG_P0 && regno <= REG_P7)
2448 {
2449 group = 2;
2450 first_preg_to_save = lastpreg = regno - REG_P0;
2451 }
2452 else
2453 return 0;
2454
2455 continue;
2456 }
2457
2458 if (group == 1)
2459 {
2460 if (regno >= REG_P0 && regno <= REG_P7)
2461 {
2462 group = 2;
2463 first_preg_to_save = lastpreg = regno - REG_P0;
2464 }
2465 else if (regno != REG_R0 + lastdreg + 1)
2466 return 0;
2467 else
2468 lastdreg++;
2469 }
2470 else if (group == 2)
2471 {
2472 if (regno != REG_P0 + lastpreg + 1)
2473 return 0;
2474 lastpreg++;
2475 }
2476 }
2477 return 1;
2478 }
2479
2480 int
2481 pop_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2482 {
2483 int lastdreg = 8, lastpreg = 6;
2484 int i, group;
2485
2486 for (i = 1, group = 0; i < XVECLEN (op, 0); i++)
2487 {
2488 rtx t = XVECEXP (op, 0, i);
2489 rtx src, dest;
2490 int regno;
2491
2492 if (GET_CODE (t) != SET)
2493 return 0;
2494
2495 src = SET_SRC (t);
2496 dest = SET_DEST (t);
2497 if (GET_CODE (src) != MEM || ! REG_P (dest))
2498 return 0;
2499 src = XEXP (src, 0);
2500
2501 if (i == 1)
2502 {
2503 if (! REG_P (src) || REGNO (src) != REG_SP)
2504 return 0;
2505 }
2506 else if (GET_CODE (src) != PLUS
2507 || ! REG_P (XEXP (src, 0))
2508 || REGNO (XEXP (src, 0)) != REG_SP
2509 || GET_CODE (XEXP (src, 1)) != CONST_INT
2510 || INTVAL (XEXP (src, 1)) != (i - 1) * 4)
2511 return 0;
2512
2513 regno = REGNO (dest);
2514 if (group == 0)
2515 {
2516 if (regno == REG_R7)
2517 {
2518 group = 1;
2519 lastdreg = 7;
2520 }
2521 else if (regno != REG_P0 + lastpreg - 1)
2522 return 0;
2523 else
2524 lastpreg--;
2525 }
2526 else if (group == 1)
2527 {
2528 if (regno != REG_R0 + lastdreg - 1)
2529 return 0;
2530 else
2531 lastdreg--;
2532 }
2533 }
2534 first_dreg_to_save = lastdreg;
2535 first_preg_to_save = lastpreg;
2536 return 1;
2537 }
2538
2539 /* Emit assembly code for one multi-register push described by INSN, with
2540 operands in OPERANDS. */
2541
2542 void
2543 output_push_multiple (rtx insn, rtx *operands)
2544 {
2545 char buf[80];
2546 int ok;
2547
2548 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2549 ok = push_multiple_operation (PATTERN (insn), VOIDmode);
2550 gcc_assert (ok);
2551
2552 if (first_dreg_to_save == 8)
2553 sprintf (buf, "[--sp] = ( p5:%d );\n", first_preg_to_save);
2554 else if (first_preg_to_save == 6)
2555 sprintf (buf, "[--sp] = ( r7:%d );\n", first_dreg_to_save);
2556 else
2557 sprintf (buf, "[--sp] = ( r7:%d, p5:%d );\n",
2558 first_dreg_to_save, first_preg_to_save);
2559
2560 output_asm_insn (buf, operands);
2561 }
2562
2563 /* Emit assembly code for one multi-register pop described by INSN, with
2564 operands in OPERANDS. */
2565
2566 void
2567 output_pop_multiple (rtx insn, rtx *operands)
2568 {
2569 char buf[80];
2570 int ok;
2571
2572 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2573 ok = pop_multiple_operation (PATTERN (insn), VOIDmode);
2574 gcc_assert (ok);
2575
2576 if (first_dreg_to_save == 8)
2577 sprintf (buf, "( p5:%d ) = [sp++];\n", first_preg_to_save);
2578 else if (first_preg_to_save == 6)
2579 sprintf (buf, "( r7:%d ) = [sp++];\n", first_dreg_to_save);
2580 else
2581 sprintf (buf, "( r7:%d, p5:%d ) = [sp++];\n",
2582 first_dreg_to_save, first_preg_to_save);
2583
2584 output_asm_insn (buf, operands);
2585 }
2586
2587 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2588
2589 static void
2590 single_move_for_movmem (rtx dst, rtx src, enum machine_mode mode, HOST_WIDE_INT offset)
2591 {
2592 rtx scratch = gen_reg_rtx (mode);
2593 rtx srcmem, dstmem;
2594
2595 srcmem = adjust_address_nv (src, mode, offset);
2596 dstmem = adjust_address_nv (dst, mode, offset);
2597 emit_move_insn (scratch, srcmem);
2598 emit_move_insn (dstmem, scratch);
2599 }
2600
2601 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2602 alignment ALIGN_EXP. Return true if successful, false if we should fall
2603 back on a different method. */
2604
2605 bool
2606 bfin_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp)
2607 {
2608 rtx srcreg, destreg, countreg;
2609 HOST_WIDE_INT align = 0;
2610 unsigned HOST_WIDE_INT count = 0;
2611
2612 if (GET_CODE (align_exp) == CONST_INT)
2613 align = INTVAL (align_exp);
2614 if (GET_CODE (count_exp) == CONST_INT)
2615 {
2616 count = INTVAL (count_exp);
2617 #if 0
2618 if (!TARGET_INLINE_ALL_STRINGOPS && count > 64)
2619 return false;
2620 #endif
2621 }
2622
2623 /* If optimizing for size, only do single copies inline. */
2624 if (optimize_size)
2625 {
2626 if (count == 2 && align < 2)
2627 return false;
2628 if (count == 4 && align < 4)
2629 return false;
2630 if (count != 1 && count != 2 && count != 4)
2631 return false;
2632 }
2633 if (align < 2 && count != 1)
2634 return false;
2635
2636 destreg = copy_to_mode_reg (Pmode, XEXP (dst, 0));
2637 if (destreg != XEXP (dst, 0))
2638 dst = replace_equiv_address_nv (dst, destreg);
2639 srcreg = copy_to_mode_reg (Pmode, XEXP (src, 0));
2640 if (srcreg != XEXP (src, 0))
2641 src = replace_equiv_address_nv (src, srcreg);
2642
2643 if (count != 0 && align >= 2)
2644 {
2645 unsigned HOST_WIDE_INT offset = 0;
2646
2647 if (align >= 4)
2648 {
2649 if ((count & ~3) == 4)
2650 {
2651 single_move_for_movmem (dst, src, SImode, offset);
2652 offset = 4;
2653 }
2654 else if (count & ~3)
2655 {
2656 HOST_WIDE_INT new_count = ((count >> 2) & 0x3fffffff) - 1;
2657 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2658
2659 emit_insn (gen_rep_movsi (destreg, srcreg, countreg, destreg, srcreg));
2660 }
2661 if (count & 2)
2662 {
2663 single_move_for_movmem (dst, src, HImode, offset);
2664 offset += 2;
2665 }
2666 }
2667 else
2668 {
2669 if ((count & ~1) == 2)
2670 {
2671 single_move_for_movmem (dst, src, HImode, offset);
2672 offset = 2;
2673 }
2674 else if (count & ~1)
2675 {
2676 HOST_WIDE_INT new_count = ((count >> 1) & 0x7fffffff) - 1;
2677 countreg = copy_to_mode_reg (Pmode, GEN_INT (new_count));
2678
2679 emit_insn (gen_rep_movhi (destreg, srcreg, countreg, destreg, srcreg));
2680 }
2681 }
2682 if (count & 1)
2683 {
2684 single_move_for_movmem (dst, src, QImode, offset);
2685 }
2686 return true;
2687 }
2688 return false;
2689 }
2690
2691 \f
2692 static int
2693 bfin_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
2694 {
2695 enum attr_type insn_type, dep_insn_type;
2696 int dep_insn_code_number;
2697
2698 /* Anti and output dependencies have zero cost. */
2699 if (REG_NOTE_KIND (link) != 0)
2700 return 0;
2701
2702 dep_insn_code_number = recog_memoized (dep_insn);
2703
2704 /* If we can't recognize the insns, we can't really do anything. */
2705 if (dep_insn_code_number < 0 || recog_memoized (insn) < 0)
2706 return cost;
2707
2708 insn_type = get_attr_type (insn);
2709 dep_insn_type = get_attr_type (dep_insn);
2710
2711 if (dep_insn_type == TYPE_MOVE || dep_insn_type == TYPE_MCLD)
2712 {
2713 rtx pat = PATTERN (dep_insn);
2714 rtx dest = SET_DEST (pat);
2715 rtx src = SET_SRC (pat);
2716 if (! ADDRESS_REGNO_P (REGNO (dest)) || ! D_REGNO_P (REGNO (src)))
2717 return cost;
2718 return cost + (dep_insn_type == TYPE_MOVE ? 4 : 3);
2719 }
2720
2721 return cost;
2722 }
2723
2724 \f
2725 /* Increment the counter for the number of loop instructions in the
2726 current function. */
2727
2728 void
2729 bfin_hardware_loop (void)
2730 {
2731 cfun->machine->has_hardware_loops++;
2732 }
2733
2734 /* Maximum loop nesting depth. */
2735 #define MAX_LOOP_DEPTH 2
2736
2737 /* Maximum size of a loop. */
2738 #define MAX_LOOP_LENGTH 2042
2739
2740 /* We need to keep a vector of loops */
2741 typedef struct loop_info *loop_info;
2742 DEF_VEC_P (loop_info);
2743 DEF_VEC_ALLOC_P (loop_info,heap);
2744
2745 /* Information about a loop we have found (or are in the process of
2746 finding). */
2747 struct loop_info GTY (())
2748 {
2749 /* loop number, for dumps */
2750 int loop_no;
2751
2752 /* Predecessor block of the loop. This is the one that falls into
2753 the loop and contains the initialization instruction. */
2754 basic_block predecessor;
2755
2756 /* First block in the loop. This is the one branched to by the loop_end
2757 insn. */
2758 basic_block head;
2759
2760 /* Last block in the loop (the one with the loop_end insn). */
2761 basic_block tail;
2762
2763 /* The successor block of the loop. This is the one the loop_end insn
2764 falls into. */
2765 basic_block successor;
2766
2767 /* The last instruction in the tail. */
2768 rtx last_insn;
2769
2770 /* The loop_end insn. */
2771 rtx loop_end;
2772
2773 /* The iteration register. */
2774 rtx iter_reg;
2775
2776 /* The new initialization insn. */
2777 rtx init;
2778
2779 /* The new initialization instruction. */
2780 rtx loop_init;
2781
2782 /* The new label placed at the beginning of the loop. */
2783 rtx start_label;
2784
2785 /* The new label placed at the end of the loop. */
2786 rtx end_label;
2787
2788 /* The length of the loop. */
2789 int length;
2790
2791 /* The nesting depth of the loop. */
2792 int depth;
2793
2794 /* Nonzero if we can't optimize this loop. */
2795 int bad;
2796
2797 /* True if we have visited this loop. */
2798 int visited;
2799
2800 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
2801 int clobber_loop0;
2802
2803 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
2804 int clobber_loop1;
2805
2806 /* Next loop in the graph. */
2807 struct loop_info *next;
2808
2809 /* Immediate outer loop of this loop. */
2810 struct loop_info *outer;
2811
2812 /* Vector of blocks only within the loop, including those within
2813 inner loops. */
2814 VEC (basic_block,heap) *blocks;
2815
2816 /* Same information in a bitmap. */
2817 bitmap block_bitmap;
2818
2819 /* Vector of inner loops within this loop */
2820 VEC (loop_info,heap) *loops;
2821 };
2822
2823 static void
2824 bfin_dump_loops (loop_info loops)
2825 {
2826 loop_info loop;
2827
2828 for (loop = loops; loop; loop = loop->next)
2829 {
2830 loop_info i;
2831 basic_block b;
2832 unsigned ix;
2833
2834 fprintf (dump_file, ";; loop %d: ", loop->loop_no);
2835 if (loop->bad)
2836 fprintf (dump_file, "(bad) ");
2837 fprintf (dump_file, "{head:%d, depth:%d}", loop->head->index, loop->depth);
2838
2839 fprintf (dump_file, " blocks: [ ");
2840 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
2841 fprintf (dump_file, "%d ", b->index);
2842 fprintf (dump_file, "] ");
2843
2844 fprintf (dump_file, " inner loops: [ ");
2845 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, i); ix++)
2846 fprintf (dump_file, "%d ", i->loop_no);
2847 fprintf (dump_file, "]\n");
2848 }
2849 fprintf (dump_file, "\n");
2850 }
2851
2852 /* Scan the blocks of LOOP (and its inferiors) looking for basic block
2853 BB. Return true, if we find it. */
2854
2855 static bool
2856 bfin_bb_in_loop (loop_info loop, basic_block bb)
2857 {
2858 return bitmap_bit_p (loop->block_bitmap, bb->index);
2859 }
2860
2861 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
2862 REG. Return true, if we find any. Don't count the loop's loop_end
2863 insn if it matches LOOP_END. */
2864
2865 static bool
2866 bfin_scan_loop (loop_info loop, rtx reg, rtx loop_end)
2867 {
2868 unsigned ix;
2869 basic_block bb;
2870
2871 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
2872 {
2873 rtx insn;
2874
2875 for (insn = BB_HEAD (bb);
2876 insn != NEXT_INSN (BB_END (bb));
2877 insn = NEXT_INSN (insn))
2878 {
2879 if (!INSN_P (insn))
2880 continue;
2881 if (insn == loop_end)
2882 continue;
2883 if (reg_mentioned_p (reg, PATTERN (insn)))
2884 return true;
2885 }
2886 }
2887 return false;
2888 }
2889
2890 /* Optimize LOOP. */
2891
2892 static void
2893 bfin_optimize_loop (loop_info loop)
2894 {
2895 basic_block bb;
2896 loop_info inner;
2897 rtx insn, init_insn, last_insn, nop_insn;
2898 rtx loop_init, start_label, end_label;
2899 rtx reg_lc0, reg_lc1, reg_lt0, reg_lt1, reg_lb0, reg_lb1;
2900 rtx iter_reg;
2901 rtx lc_reg, lt_reg, lb_reg;
2902 rtx seq;
2903 int length;
2904 unsigned ix;
2905 int inner_depth = 0;
2906
2907 if (loop->visited)
2908 return;
2909
2910 loop->visited = 1;
2911
2912 if (loop->bad)
2913 {
2914 if (dump_file)
2915 fprintf (dump_file, ";; loop %d bad when found\n", loop->loop_no);
2916 goto bad_loop;
2917 }
2918
2919 /* Every loop contains in its list of inner loops every loop nested inside
2920 it, even if there are intermediate loops. This works because we're doing
2921 a depth-first search here and never visit a loop more than once. */
2922 for (ix = 0; VEC_iterate (loop_info, loop->loops, ix, inner); ix++)
2923 {
2924 bfin_optimize_loop (inner);
2925
2926 if (!inner->bad && inner_depth < inner->depth)
2927 {
2928 inner_depth = inner->depth;
2929
2930 loop->clobber_loop0 |= inner->clobber_loop0;
2931 loop->clobber_loop1 |= inner->clobber_loop1;
2932 }
2933 }
2934
2935 loop->depth = inner_depth + 1;
2936 if (loop->depth > MAX_LOOP_DEPTH)
2937 {
2938 if (dump_file)
2939 fprintf (dump_file, ";; loop %d too deep\n", loop->loop_no);
2940 goto bad_loop;
2941 }
2942
2943 /* Get the loop iteration register. */
2944 iter_reg = loop->iter_reg;
2945
2946 if (!DPREG_P (iter_reg))
2947 {
2948 if (dump_file)
2949 fprintf (dump_file, ";; loop %d iteration count NOT in PREG or DREG\n",
2950 loop->loop_no);
2951 goto bad_loop;
2952 }
2953
2954 /* Check if start_label appears before loop_end and calculate the
2955 offset between them. We calculate the length of instructions
2956 conservatively. */
2957 length = 0;
2958 for (insn = loop->start_label;
2959 insn && insn != loop->loop_end;
2960 insn = NEXT_INSN (insn))
2961 {
2962 if (JUMP_P (insn) && any_condjump_p (insn) && !optimize_size)
2963 {
2964 if (TARGET_CSYNC_ANOMALY)
2965 length += 8;
2966 else if (TARGET_SPECLD_ANOMALY)
2967 length += 6;
2968 }
2969 else if (LABEL_P (insn))
2970 {
2971 if (TARGET_CSYNC_ANOMALY)
2972 length += 4;
2973 }
2974
2975 if (INSN_P (insn))
2976 length += get_attr_length (insn);
2977 }
2978
2979 if (!insn)
2980 {
2981 if (dump_file)
2982 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
2983 loop->loop_no);
2984 goto bad_loop;
2985 }
2986
2987 loop->length = length;
2988 if (loop->length > MAX_LOOP_LENGTH)
2989 {
2990 if (dump_file)
2991 fprintf (dump_file, ";; loop %d too long\n", loop->loop_no);
2992 goto bad_loop;
2993 }
2994
2995 /* Scan all the blocks to make sure they don't use iter_reg. */
2996 if (bfin_scan_loop (loop, iter_reg, loop->loop_end))
2997 {
2998 if (dump_file)
2999 fprintf (dump_file, ";; loop %d uses iterator\n", loop->loop_no);
3000 goto bad_loop;
3001 }
3002
3003 /* Scan all the insns to see if the loop body clobber
3004 any hardware loop registers. */
3005
3006 reg_lc0 = gen_rtx_REG (SImode, REG_LC0);
3007 reg_lc1 = gen_rtx_REG (SImode, REG_LC1);
3008 reg_lt0 = gen_rtx_REG (SImode, REG_LT0);
3009 reg_lt1 = gen_rtx_REG (SImode, REG_LT1);
3010 reg_lb0 = gen_rtx_REG (SImode, REG_LB0);
3011 reg_lb1 = gen_rtx_REG (SImode, REG_LB1);
3012
3013 for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
3014 {
3015 rtx insn;
3016
3017 for (insn = BB_HEAD (bb);
3018 insn != NEXT_INSN (BB_END (bb));
3019 insn = NEXT_INSN (insn))
3020 {
3021 if (!INSN_P (insn))
3022 continue;
3023
3024 if (reg_set_p (reg_lc0, insn)
3025 || reg_set_p (reg_lt0, insn)
3026 || reg_set_p (reg_lb0, insn))
3027 loop->clobber_loop0 = 1;
3028
3029 if (reg_set_p (reg_lc1, insn)
3030 || reg_set_p (reg_lt1, insn)
3031 || reg_set_p (reg_lb1, insn))
3032 loop->clobber_loop1 |= 1;
3033 }
3034 }
3035
3036 if ((loop->clobber_loop0 && loop->clobber_loop1)
3037 || (loop->depth == MAX_LOOP_DEPTH && loop->clobber_loop0))
3038 {
3039 loop->depth = MAX_LOOP_DEPTH + 1;
3040 if (dump_file)
3041 fprintf (dump_file, ";; loop %d no loop reg available\n",
3042 loop->loop_no);
3043 goto bad_loop;
3044 }
3045
3046 /* There should be an instruction before the loop_end instruction
3047 in the same basic block. And the instruction must not be
3048 - JUMP
3049 - CONDITIONAL BRANCH
3050 - CALL
3051 - CSYNC
3052 - SSYNC
3053 - Returns (RTS, RTN, etc.) */
3054
3055 bb = loop->tail;
3056 last_insn = PREV_INSN (loop->loop_end);
3057
3058 while (1)
3059 {
3060 for (; last_insn != PREV_INSN (BB_HEAD (bb));
3061 last_insn = PREV_INSN (last_insn))
3062 if (INSN_P (last_insn))
3063 break;
3064
3065 if (last_insn != PREV_INSN (BB_HEAD (bb)))
3066 break;
3067
3068 if (single_pred_p (bb)
3069 && single_pred (bb) != ENTRY_BLOCK_PTR)
3070 {
3071 bb = single_pred (bb);
3072 last_insn = BB_END (bb);
3073 continue;
3074 }
3075 else
3076 {
3077 last_insn = NULL_RTX;
3078 break;
3079 }
3080 }
3081
3082 if (!last_insn)
3083 {
3084 if (dump_file)
3085 fprintf (dump_file, ";; loop %d has no last instruction\n",
3086 loop->loop_no);
3087 goto bad_loop;
3088 }
3089
3090 if (JUMP_P (last_insn))
3091 {
3092 loop_info inner = bb->aux;
3093 if (inner
3094 && inner->outer == loop
3095 && inner->loop_end == last_insn
3096 && inner->depth == 1)
3097 /* This jump_insn is the exact loop_end of an inner loop
3098 and to be optimized away. So use the inner's last_insn. */
3099 last_insn = inner->last_insn;
3100 else
3101 {
3102 if (dump_file)
3103 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3104 loop->loop_no);
3105 goto bad_loop;
3106 }
3107 }
3108 else if (CALL_P (last_insn)
3109 || get_attr_type (last_insn) == TYPE_SYNC
3110 || recog_memoized (last_insn) == CODE_FOR_return_internal)
3111 {
3112 if (dump_file)
3113 fprintf (dump_file, ";; loop %d has bad last instruction\n",
3114 loop->loop_no);
3115 goto bad_loop;
3116 }
3117
3118 if (GET_CODE (PATTERN (last_insn)) == ASM_INPUT
3119 || asm_noperands (PATTERN (last_insn)) >= 0
3120 || get_attr_seq_insns (last_insn) == SEQ_INSNS_MULTI)
3121 {
3122 nop_insn = emit_insn_after (gen_nop (), last_insn);
3123 last_insn = nop_insn;
3124 }
3125
3126 loop->last_insn = last_insn;
3127
3128 /* The loop is good for replacement. */
3129 start_label = loop->start_label;
3130 end_label = gen_label_rtx ();
3131 iter_reg = loop->iter_reg;
3132
3133 if (loop->depth == 1 && !loop->clobber_loop1)
3134 {
3135 lc_reg = reg_lc1;
3136 lt_reg = reg_lt1;
3137 lb_reg = reg_lb1;
3138 loop->clobber_loop1 = 1;
3139 }
3140 else
3141 {
3142 lc_reg = reg_lc0;
3143 lt_reg = reg_lt0;
3144 lb_reg = reg_lb0;
3145 loop->clobber_loop0 = 1;
3146 }
3147
3148 /* If iter_reg is a DREG, we need generate an instruction to load
3149 the loop count into LC register. */
3150 if (D_REGNO_P (REGNO (iter_reg)))
3151 {
3152 init_insn = gen_movsi (lc_reg, iter_reg);
3153 loop_init = gen_lsetup_without_autoinit (lt_reg, start_label,
3154 lb_reg, end_label,
3155 lc_reg);
3156 }
3157 else if (P_REGNO_P (REGNO (iter_reg)))
3158 {
3159 init_insn = NULL_RTX;
3160 loop_init = gen_lsetup_with_autoinit (lt_reg, start_label,
3161 lb_reg, end_label,
3162 lc_reg, iter_reg);
3163 }
3164 else
3165 gcc_unreachable ();
3166
3167 loop->init = init_insn;
3168 loop->end_label = end_label;
3169 loop->loop_init = loop_init;
3170
3171 if (dump_file)
3172 {
3173 fprintf (dump_file, ";; replacing loop %d initializer with\n",
3174 loop->loop_no);
3175 print_rtl_single (dump_file, loop->loop_init);
3176 fprintf (dump_file, ";; replacing loop %d terminator with\n",
3177 loop->loop_no);
3178 print_rtl_single (dump_file, loop->loop_end);
3179 }
3180
3181 start_sequence ();
3182
3183 if (loop->init != NULL_RTX)
3184 emit_insn (loop->init);
3185 emit_insn(loop->loop_init);
3186 emit_label (loop->start_label);
3187
3188 seq = get_insns ();
3189 end_sequence ();
3190
3191 emit_insn_after (seq, BB_END (loop->predecessor));
3192 delete_insn (loop->loop_end);
3193
3194 /* Insert the loop end label before the last instruction of the loop. */
3195 emit_label_before (loop->end_label, loop->last_insn);
3196
3197 return;
3198
3199 bad_loop:
3200
3201 if (dump_file)
3202 fprintf (dump_file, ";; loop %d is bad\n", loop->loop_no);
3203
3204 loop->bad = 1;
3205
3206 if (DPREG_P (loop->iter_reg))
3207 {
3208 /* If loop->iter_reg is a DREG or PREG, we can split it here
3209 without scratch register. */
3210 rtx insn;
3211
3212 emit_insn_before (gen_addsi3 (loop->iter_reg,
3213 loop->iter_reg,
3214 constm1_rtx),
3215 loop->loop_end);
3216
3217 emit_insn_before (gen_cmpsi (loop->iter_reg, const0_rtx),
3218 loop->loop_end);
3219
3220 insn = emit_jump_insn_before (gen_bne (loop->start_label),
3221 loop->loop_end);
3222
3223 JUMP_LABEL (insn) = loop->start_label;
3224 LABEL_NUSES (loop->start_label)++;
3225 delete_insn (loop->loop_end);
3226 }
3227 }
3228
3229 /* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
3230 a newly set up structure describing the loop, it is this function's
3231 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
3232 loop_end insn and its enclosing basic block. */
3233
3234 static void
3235 bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
3236 {
3237 unsigned dwork = 0;
3238 basic_block bb;
3239 VEC (basic_block,heap) *works = VEC_alloc (basic_block,heap,20);
3240
3241 loop->tail = tail_bb;
3242 loop->head = BRANCH_EDGE (tail_bb)->dest;
3243 loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
3244 loop->predecessor = NULL;
3245 loop->loop_end = tail_insn;
3246 loop->last_insn = NULL_RTX;
3247 loop->iter_reg = SET_DEST (XVECEXP (PATTERN (tail_insn), 0, 1));
3248 loop->depth = loop->length = 0;
3249 loop->visited = 0;
3250 loop->clobber_loop0 = loop->clobber_loop1 = 0;
3251 loop->outer = NULL;
3252 loop->loops = NULL;
3253
3254 loop->init = loop->loop_init = NULL_RTX;
3255 loop->start_label = XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn), 0, 0)), 1), 0);
3256 loop->end_label = NULL_RTX;
3257 loop->bad = 0;
3258
3259 VEC_safe_push (basic_block, heap, works, loop->head);
3260
3261 while (VEC_iterate (basic_block, works, dwork++, bb))
3262 {
3263 edge e;
3264 edge_iterator ei;
3265 if (bb == EXIT_BLOCK_PTR)
3266 {
3267 /* We've reached the exit block. The loop must be bad. */
3268 if (dump_file)
3269 fprintf (dump_file,
3270 ";; Loop is bad - reached exit block while scanning\n");
3271 loop->bad = 1;
3272 break;
3273 }
3274
3275 if (bitmap_bit_p (loop->block_bitmap, bb->index))
3276 continue;
3277
3278 /* We've not seen this block before. Add it to the loop's
3279 list and then add each successor to the work list. */
3280
3281 VEC_safe_push (basic_block, heap, loop->blocks, bb);
3282 bitmap_set_bit (loop->block_bitmap, bb->index);
3283
3284 if (bb != tail_bb)
3285 {
3286 FOR_EACH_EDGE (e, ei, bb->succs)
3287 {
3288 basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
3289 if (!REGNO_REG_SET_P (succ->il.rtl->global_live_at_start,
3290 REGNO (loop->iter_reg)))
3291 continue;
3292 if (!VEC_space (basic_block, works, 1))
3293 {
3294 if (dwork)
3295 {
3296 VEC_block_remove (basic_block, works, 0, dwork);
3297 dwork = 0;
3298 }
3299 else
3300 VEC_reserve (basic_block, heap, works, 1);
3301 }
3302 VEC_quick_push (basic_block, works, succ);
3303 }
3304 }
3305 }
3306
3307 if (!loop->bad)
3308 {
3309 /* Make sure we only have one entry point. */
3310 if (EDGE_COUNT (loop->head->preds) == 2)
3311 {
3312 loop->predecessor = EDGE_PRED (loop->head, 0)->src;
3313 if (loop->predecessor == loop->tail)
3314 /* We wanted the other predecessor. */
3315 loop->predecessor = EDGE_PRED (loop->head, 1)->src;
3316
3317 /* We can only place a loop insn on a fall through edge of a
3318 single exit block. */
3319 if (EDGE_COUNT (loop->predecessor->succs) != 1
3320 || !(EDGE_SUCC (loop->predecessor, 0)->flags & EDGE_FALLTHRU)
3321 /* If loop->predecessor is in loop, loop->head is not really
3322 the head of the loop. */
3323 || bfin_bb_in_loop (loop, loop->predecessor))
3324 loop->predecessor = NULL;
3325 }
3326
3327 if (loop->predecessor == NULL)
3328 {
3329 if (dump_file)
3330 fprintf (dump_file, ";; loop has bad predecessor\n");
3331 loop->bad = 1;
3332 }
3333 }
3334
3335 #ifdef ENABLE_CHECKING
3336 /* Make sure nothing jumps into this loop. This shouldn't happen as we
3337 wouldn't have generated the counted loop patterns in such a case.
3338 However, this test must be done after the test above to detect loops
3339 with invalid headers. */
3340 if (!loop->bad)
3341 for (dwork = 0; VEC_iterate (basic_block, loop->blocks, dwork, bb); dwork++)
3342 {
3343 edge e;
3344 edge_iterator ei;
3345 if (bb == loop->head)
3346 continue;
3347 FOR_EACH_EDGE (e, ei, bb->preds)
3348 {
3349 basic_block pred = EDGE_PRED (bb, ei.index)->src;
3350 if (!bfin_bb_in_loop (loop, pred))
3351 abort ();
3352 }
3353 }
3354 #endif
3355 VEC_free (basic_block, heap, works);
3356 }
3357
3358 static void
3359 bfin_reorg_loops (FILE *dump_file)
3360 {
3361 bitmap_obstack stack;
3362 bitmap tmp_bitmap;
3363 basic_block bb;
3364 loop_info loops = NULL;
3365 loop_info loop;
3366 int nloops = 0;
3367
3368 bitmap_obstack_initialize (&stack);
3369
3370 /* Find all the possible loop tails. This means searching for every
3371 loop_end instruction. For each one found, create a loop_info
3372 structure and add the head block to the work list. */
3373 FOR_EACH_BB (bb)
3374 {
3375 rtx tail = BB_END (bb);
3376
3377 while (GET_CODE (tail) == NOTE)
3378 tail = PREV_INSN (tail);
3379
3380 bb->aux = NULL;
3381
3382 if (INSN_P (tail) && recog_memoized (tail) == CODE_FOR_loop_end)
3383 {
3384 /* A possible loop end */
3385
3386 loop = XNEW (struct loop_info);
3387 loop->next = loops;
3388 loops = loop;
3389 loop->loop_no = nloops++;
3390 loop->blocks = VEC_alloc (basic_block, heap, 20);
3391 loop->block_bitmap = BITMAP_ALLOC (&stack);
3392 bb->aux = loop;
3393
3394 if (dump_file)
3395 {
3396 fprintf (dump_file, ";; potential loop %d ending at\n",
3397 loop->loop_no);
3398 print_rtl_single (dump_file, tail);
3399 }
3400
3401 bfin_discover_loop (loop, bb, tail);
3402 }
3403 }
3404
3405 tmp_bitmap = BITMAP_ALLOC (&stack);
3406 /* Compute loop nestings. */
3407 for (loop = loops; loop; loop = loop->next)
3408 {
3409 loop_info other;
3410 if (loop->bad)
3411 continue;
3412
3413 for (other = loop->next; other; other = other->next)
3414 {
3415 if (other->bad)
3416 continue;
3417
3418 bitmap_and (tmp_bitmap, other->block_bitmap, loop->block_bitmap);
3419 if (bitmap_empty_p (tmp_bitmap))
3420 continue;
3421 if (bitmap_equal_p (tmp_bitmap, other->block_bitmap))
3422 {
3423 other->outer = loop;
3424 VEC_safe_push (loop_info, heap, loop->loops, other);
3425 }
3426 else if (bitmap_equal_p (tmp_bitmap, loop->block_bitmap))
3427 {
3428 loop->outer = other;
3429 VEC_safe_push (loop_info, heap, other->loops, loop);
3430 }
3431 else
3432 {
3433 loop->bad = other->bad = 1;
3434 }
3435 }
3436 }
3437 BITMAP_FREE (tmp_bitmap);
3438
3439 if (dump_file)
3440 {
3441 fprintf (dump_file, ";; All loops found:\n\n");
3442 bfin_dump_loops (loops);
3443 }
3444
3445 /* Now apply the optimizations. */
3446 for (loop = loops; loop; loop = loop->next)
3447 bfin_optimize_loop (loop);
3448
3449 if (dump_file)
3450 {
3451 fprintf (dump_file, ";; After hardware loops optimization:\n\n");
3452 bfin_dump_loops (loops);
3453 }
3454
3455 /* Free up the loop structures */
3456 while (loops)
3457 {
3458 loop = loops;
3459 loops = loop->next;
3460 VEC_free (loop_info, heap, loop->loops);
3461 VEC_free (basic_block, heap, loop->blocks);
3462 BITMAP_FREE (loop->block_bitmap);
3463 XDELETE (loop);
3464 }
3465
3466 if (dump_file)
3467 print_rtl (dump_file, get_insns ());
3468 }
3469
3470 \f
3471 /* We use the machine specific reorg pass for emitting CSYNC instructions
3472 after conditional branches as needed.
3473
3474 The Blackfin is unusual in that a code sequence like
3475 if cc jump label
3476 r0 = (p0)
3477 may speculatively perform the load even if the condition isn't true. This
3478 happens for a branch that is predicted not taken, because the pipeline
3479 isn't flushed or stalled, so the early stages of the following instructions,
3480 which perform the memory reference, are allowed to execute before the
3481 jump condition is evaluated.
3482 Therefore, we must insert additional instructions in all places where this
3483 could lead to incorrect behavior. The manual recommends CSYNC, while
3484 VDSP seems to use NOPs (even though its corresponding compiler option is
3485 named CSYNC).
3486
3487 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
3488 When optimizing for size, we turn the branch into a predicted taken one.
3489 This may be slower due to mispredicts, but saves code size. */
3490
3491 static void
3492 bfin_reorg (void)
3493 {
3494 rtx insn, last_condjump = NULL_RTX;
3495 int cycles_since_jump = INT_MAX;
3496
3497 /* Doloop optimization */
3498 if (cfun->machine->has_hardware_loops)
3499 bfin_reorg_loops (dump_file);
3500
3501 if (! TARGET_SPECLD_ANOMALY && ! TARGET_CSYNC_ANOMALY)
3502 return;
3503
3504 /* First pass: find predicted-false branches; if something after them
3505 needs nops, insert them or change the branch to predict true. */
3506 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3507 {
3508 rtx pat;
3509
3510 if (NOTE_P (insn) || BARRIER_P (insn) || LABEL_P (insn))
3511 continue;
3512
3513 pat = PATTERN (insn);
3514 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
3515 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
3516 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
3517 continue;
3518
3519 if (JUMP_P (insn))
3520 {
3521 if (any_condjump_p (insn)
3522 && ! cbranch_predicted_taken_p (insn))
3523 {
3524 last_condjump = insn;
3525 cycles_since_jump = 0;
3526 }
3527 else
3528 cycles_since_jump = INT_MAX;
3529 }
3530 else if (INSN_P (insn))
3531 {
3532 enum attr_type type = get_attr_type (insn);
3533 int delay_needed = 0;
3534 if (cycles_since_jump < INT_MAX)
3535 cycles_since_jump++;
3536
3537 if (type == TYPE_MCLD && TARGET_SPECLD_ANOMALY)
3538 {
3539 rtx pat = single_set (insn);
3540 if (may_trap_p (SET_SRC (pat)))
3541 delay_needed = 3;
3542 }
3543 else if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
3544 delay_needed = 4;
3545
3546 if (delay_needed > cycles_since_jump)
3547 {
3548 rtx pat;
3549 int num_clobbers;
3550 rtx *op = recog_data.operand;
3551
3552 delay_needed -= cycles_since_jump;
3553
3554 extract_insn (last_condjump);
3555 if (optimize_size)
3556 {
3557 pat = gen_cbranch_predicted_taken (op[0], op[1], op[2],
3558 op[3]);
3559 cycles_since_jump = INT_MAX;
3560 }
3561 else
3562 /* Do not adjust cycles_since_jump in this case, so that
3563 we'll increase the number of NOPs for a subsequent insn
3564 if necessary. */
3565 pat = gen_cbranch_with_nops (op[0], op[1], op[2], op[3],
3566 GEN_INT (delay_needed));
3567 PATTERN (last_condjump) = pat;
3568 INSN_CODE (last_condjump) = recog (pat, insn, &num_clobbers);
3569 }
3570 }
3571 }
3572 /* Second pass: for predicted-true branches, see if anything at the
3573 branch destination needs extra nops. */
3574 if (! TARGET_CSYNC_ANOMALY)
3575 return;
3576
3577 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3578 {
3579 if (JUMP_P (insn)
3580 && any_condjump_p (insn)
3581 && (INSN_CODE (insn) == CODE_FOR_cbranch_predicted_taken
3582 || cbranch_predicted_taken_p (insn)))
3583 {
3584 rtx target = JUMP_LABEL (insn);
3585 rtx label = target;
3586 cycles_since_jump = 0;
3587 for (; target && cycles_since_jump < 3; target = NEXT_INSN (target))
3588 {
3589 rtx pat;
3590
3591 if (NOTE_P (target) || BARRIER_P (target) || LABEL_P (target))
3592 continue;
3593
3594 pat = PATTERN (target);
3595 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
3596 || GET_CODE (pat) == ASM_INPUT || GET_CODE (pat) == ADDR_VEC
3597 || GET_CODE (pat) == ADDR_DIFF_VEC || asm_noperands (pat) >= 0)
3598 continue;
3599
3600 if (INSN_P (target))
3601 {
3602 enum attr_type type = get_attr_type (target);
3603 int delay_needed = 0;
3604 if (cycles_since_jump < INT_MAX)
3605 cycles_since_jump++;
3606
3607 if (type == TYPE_SYNC && TARGET_CSYNC_ANOMALY)
3608 delay_needed = 2;
3609
3610 if (delay_needed > cycles_since_jump)
3611 {
3612 rtx prev = prev_real_insn (label);
3613 delay_needed -= cycles_since_jump;
3614 if (dump_file)
3615 fprintf (dump_file, "Adding %d nops after %d\n",
3616 delay_needed, INSN_UID (label));
3617 if (JUMP_P (prev)
3618 && INSN_CODE (prev) == CODE_FOR_cbranch_with_nops)
3619 {
3620 rtx x;
3621 HOST_WIDE_INT v;
3622
3623 if (dump_file)
3624 fprintf (dump_file,
3625 "Reducing nops on insn %d.\n",
3626 INSN_UID (prev));
3627 x = PATTERN (prev);
3628 x = XVECEXP (x, 0, 1);
3629 v = INTVAL (XVECEXP (x, 0, 0)) - delay_needed;
3630 XVECEXP (x, 0, 0) = GEN_INT (v);
3631 }
3632 while (delay_needed-- > 0)
3633 emit_insn_after (gen_nop (), label);
3634 break;
3635 }
3636 }
3637 }
3638 }
3639 }
3640 }
3641 \f
3642 /* Handle interrupt_handler, exception_handler and nmi_handler function
3643 attributes; arguments as in struct attribute_spec.handler. */
3644
3645 static tree
3646 handle_int_attribute (tree *node, tree name,
3647 tree args ATTRIBUTE_UNUSED,
3648 int flags ATTRIBUTE_UNUSED,
3649 bool *no_add_attrs)
3650 {
3651 tree x = *node;
3652 if (TREE_CODE (x) == FUNCTION_DECL)
3653 x = TREE_TYPE (x);
3654
3655 if (TREE_CODE (x) != FUNCTION_TYPE)
3656 {
3657 warning (OPT_Wattributes, "%qs attribute only applies to functions",
3658 IDENTIFIER_POINTER (name));
3659 *no_add_attrs = true;
3660 }
3661 else if (funkind (x) != SUBROUTINE)
3662 error ("multiple function type attributes specified");
3663
3664 return NULL_TREE;
3665 }
3666
3667 /* Return 0 if the attributes for two types are incompatible, 1 if they
3668 are compatible, and 2 if they are nearly compatible (which causes a
3669 warning to be generated). */
3670
3671 static int
3672 bfin_comp_type_attributes (tree type1, tree type2)
3673 {
3674 e_funkind kind1, kind2;
3675
3676 if (TREE_CODE (type1) != FUNCTION_TYPE)
3677 return 1;
3678
3679 kind1 = funkind (type1);
3680 kind2 = funkind (type2);
3681
3682 if (kind1 != kind2)
3683 return 0;
3684
3685 /* Check for mismatched modifiers */
3686 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1))
3687 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2)))
3688 return 0;
3689
3690 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1))
3691 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2)))
3692 return 0;
3693
3694 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1))
3695 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2)))
3696 return 0;
3697
3698 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1))
3699 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2)))
3700 return 0;
3701
3702 return 1;
3703 }
3704
3705 /* Handle a "longcall" or "shortcall" attribute; arguments as in
3706 struct attribute_spec.handler. */
3707
3708 static tree
3709 bfin_handle_longcall_attribute (tree *node, tree name,
3710 tree args ATTRIBUTE_UNUSED,
3711 int flags ATTRIBUTE_UNUSED,
3712 bool *no_add_attrs)
3713 {
3714 if (TREE_CODE (*node) != FUNCTION_TYPE
3715 && TREE_CODE (*node) != FIELD_DECL
3716 && TREE_CODE (*node) != TYPE_DECL)
3717 {
3718 warning (OPT_Wattributes, "`%s' attribute only applies to functions",
3719 IDENTIFIER_POINTER (name));
3720 *no_add_attrs = true;
3721 }
3722
3723 if ((strcmp (IDENTIFIER_POINTER (name), "longcall") == 0
3724 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node)))
3725 || (strcmp (IDENTIFIER_POINTER (name), "shortcall") == 0
3726 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node))))
3727 {
3728 warning (OPT_Wattributes,
3729 "can't apply both longcall and shortcall attributes to the same function");
3730 *no_add_attrs = true;
3731 }
3732
3733 return NULL_TREE;
3734 }
3735
3736 /* Table of valid machine attributes. */
3737 const struct attribute_spec bfin_attribute_table[] =
3738 {
3739 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
3740 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute },
3741 { "exception_handler", 0, 0, false, true, true, handle_int_attribute },
3742 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute },
3743 { "nesting", 0, 0, false, true, true, NULL },
3744 { "kspisusp", 0, 0, false, true, true, NULL },
3745 { "saveall", 0, 0, false, true, true, NULL },
3746 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
3747 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute },
3748 { NULL, 0, 0, false, false, false, NULL }
3749 };
3750 \f
3751 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
3752 tell the assembler to generate pointers to function descriptors in
3753 some cases. */
3754
3755 static bool
3756 bfin_assemble_integer (rtx value, unsigned int size, int aligned_p)
3757 {
3758 if (TARGET_FDPIC && size == UNITS_PER_WORD)
3759 {
3760 if (GET_CODE (value) == SYMBOL_REF
3761 && SYMBOL_REF_FUNCTION_P (value))
3762 {
3763 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
3764 output_addr_const (asm_out_file, value);
3765 fputs (")\n", asm_out_file);
3766 return true;
3767 }
3768 if (!aligned_p)
3769 {
3770 /* We've set the unaligned SI op to NULL, so we always have to
3771 handle the unaligned case here. */
3772 assemble_integer_with_op ("\t.4byte\t", value);
3773 return true;
3774 }
3775 }
3776 return default_assemble_integer (value, size, aligned_p);
3777 }
3778 \f
3779 /* Output the assembler code for a thunk function. THUNK_DECL is the
3780 declaration for the thunk function itself, FUNCTION is the decl for
3781 the target function. DELTA is an immediate constant offset to be
3782 added to THIS. If VCALL_OFFSET is nonzero, the word at
3783 *(*this + vcall_offset) should be added to THIS. */
3784
3785 static void
3786 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED,
3787 tree thunk ATTRIBUTE_UNUSED, HOST_WIDE_INT delta,
3788 HOST_WIDE_INT vcall_offset, tree function)
3789 {
3790 rtx xops[3];
3791 /* The this parameter is passed as the first argument. */
3792 rtx this = gen_rtx_REG (Pmode, REG_R0);
3793
3794 /* Adjust the this parameter by a fixed constant. */
3795 if (delta)
3796 {
3797 xops[1] = this;
3798 if (delta >= -64 && delta <= 63)
3799 {
3800 xops[0] = GEN_INT (delta);
3801 output_asm_insn ("%1 += %0;", xops);
3802 }
3803 else if (delta >= -128 && delta < -64)
3804 {
3805 xops[0] = GEN_INT (delta + 64);
3806 output_asm_insn ("%1 += -64; %1 += %0;", xops);
3807 }
3808 else if (delta > 63 && delta <= 126)
3809 {
3810 xops[0] = GEN_INT (delta - 63);
3811 output_asm_insn ("%1 += 63; %1 += %0;", xops);
3812 }
3813 else
3814 {
3815 xops[0] = GEN_INT (delta);
3816 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops);
3817 }
3818 }
3819
3820 /* Adjust the this parameter by a value stored in the vtable. */
3821 if (vcall_offset)
3822 {
3823 rtx p2tmp = gen_rtx_REG (Pmode, REG_P2);
3824 rtx tmp = gen_rtx_REG (Pmode, REG_R2);
3825
3826 xops[1] = tmp;
3827 xops[2] = p2tmp;
3828 output_asm_insn ("%2 = r0; %2 = [%2];", xops);
3829
3830 /* Adjust the this parameter. */
3831 xops[0] = gen_rtx_MEM (Pmode, plus_constant (p2tmp, vcall_offset));
3832 if (!memory_operand (xops[0], Pmode))
3833 {
3834 rtx tmp2 = gen_rtx_REG (Pmode, REG_P1);
3835 xops[0] = GEN_INT (vcall_offset);
3836 xops[1] = tmp2;
3837 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops);
3838 xops[0] = gen_rtx_MEM (Pmode, p2tmp);
3839 }
3840 xops[2] = this;
3841 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops);
3842 }
3843
3844 xops[0] = XEXP (DECL_RTL (function), 0);
3845 if (1 || !flag_pic || (*targetm.binds_local_p) (function))
3846 output_asm_insn ("jump.l\t%P0", xops);
3847 }
3848 \f
3849 /* Codes for all the Blackfin builtins. */
3850 enum bfin_builtins
3851 {
3852 BFIN_BUILTIN_CSYNC,
3853 BFIN_BUILTIN_SSYNC,
3854 BFIN_BUILTIN_COMPOSE_2X16,
3855 BFIN_BUILTIN_EXTRACTLO,
3856 BFIN_BUILTIN_EXTRACTHI,
3857
3858 BFIN_BUILTIN_SSADD_2X16,
3859 BFIN_BUILTIN_SSSUB_2X16,
3860 BFIN_BUILTIN_SSADDSUB_2X16,
3861 BFIN_BUILTIN_SSSUBADD_2X16,
3862 BFIN_BUILTIN_MULT_2X16,
3863 BFIN_BUILTIN_MULTR_2X16,
3864 BFIN_BUILTIN_NEG_2X16,
3865 BFIN_BUILTIN_ABS_2X16,
3866 BFIN_BUILTIN_MIN_2X16,
3867 BFIN_BUILTIN_MAX_2X16,
3868
3869 BFIN_BUILTIN_SSADD_1X16,
3870 BFIN_BUILTIN_SSSUB_1X16,
3871 BFIN_BUILTIN_MULT_1X16,
3872 BFIN_BUILTIN_MULTR_1X16,
3873 BFIN_BUILTIN_NORM_1X16,
3874 BFIN_BUILTIN_NEG_1X16,
3875 BFIN_BUILTIN_ABS_1X16,
3876 BFIN_BUILTIN_MIN_1X16,
3877 BFIN_BUILTIN_MAX_1X16,
3878
3879 BFIN_BUILTIN_DIFFHL_2X16,
3880 BFIN_BUILTIN_DIFFLH_2X16,
3881
3882 BFIN_BUILTIN_SSADD_1X32,
3883 BFIN_BUILTIN_SSSUB_1X32,
3884 BFIN_BUILTIN_NORM_1X32,
3885 BFIN_BUILTIN_NEG_1X32,
3886 BFIN_BUILTIN_MIN_1X32,
3887 BFIN_BUILTIN_MAX_1X32,
3888 BFIN_BUILTIN_MULT_1X32,
3889
3890 BFIN_BUILTIN_MULHISILL,
3891 BFIN_BUILTIN_MULHISILH,
3892 BFIN_BUILTIN_MULHISIHL,
3893 BFIN_BUILTIN_MULHISIHH,
3894
3895 BFIN_BUILTIN_LSHIFT_1X16,
3896 BFIN_BUILTIN_LSHIFT_2X16,
3897 BFIN_BUILTIN_SSASHIFT_1X16,
3898 BFIN_BUILTIN_SSASHIFT_2X16,
3899
3900 BFIN_BUILTIN_CPLX_MUL_16,
3901 BFIN_BUILTIN_CPLX_MAC_16,
3902 BFIN_BUILTIN_CPLX_MSU_16,
3903
3904 BFIN_BUILTIN_MAX
3905 };
3906
3907 #define def_builtin(NAME, TYPE, CODE) \
3908 do { \
3909 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3910 NULL, NULL_TREE); \
3911 } while (0)
3912
3913 /* Set up all builtin functions for this target. */
3914 static void
3915 bfin_init_builtins (void)
3916 {
3917 tree V2HI_type_node = build_vector_type_for_mode (intHI_type_node, V2HImode);
3918 tree void_ftype_void
3919 = build_function_type (void_type_node, void_list_node);
3920 tree short_ftype_short
3921 = build_function_type_list (short_integer_type_node, short_integer_type_node,
3922 NULL_TREE);
3923 tree short_ftype_int_int
3924 = build_function_type_list (short_integer_type_node, integer_type_node,
3925 integer_type_node, NULL_TREE);
3926 tree int_ftype_int_int
3927 = build_function_type_list (integer_type_node, integer_type_node,
3928 integer_type_node, NULL_TREE);
3929 tree int_ftype_int
3930 = build_function_type_list (integer_type_node, integer_type_node,
3931 NULL_TREE);
3932 tree short_ftype_int
3933 = build_function_type_list (short_integer_type_node, integer_type_node,
3934 NULL_TREE);
3935 tree int_ftype_v2hi_v2hi
3936 = build_function_type_list (integer_type_node, V2HI_type_node,
3937 V2HI_type_node, NULL_TREE);
3938 tree v2hi_ftype_v2hi_v2hi
3939 = build_function_type_list (V2HI_type_node, V2HI_type_node,
3940 V2HI_type_node, NULL_TREE);
3941 tree v2hi_ftype_v2hi_v2hi_v2hi
3942 = build_function_type_list (V2HI_type_node, V2HI_type_node,
3943 V2HI_type_node, V2HI_type_node, NULL_TREE);
3944 tree v2hi_ftype_int_int
3945 = build_function_type_list (V2HI_type_node, integer_type_node,
3946 integer_type_node, NULL_TREE);
3947 tree v2hi_ftype_v2hi_int
3948 = build_function_type_list (V2HI_type_node, V2HI_type_node,
3949 integer_type_node, NULL_TREE);
3950 tree int_ftype_short_short
3951 = build_function_type_list (integer_type_node, short_integer_type_node,
3952 short_integer_type_node, NULL_TREE);
3953 tree v2hi_ftype_v2hi
3954 = build_function_type_list (V2HI_type_node, V2HI_type_node, NULL_TREE);
3955 tree short_ftype_v2hi
3956 = build_function_type_list (short_integer_type_node, V2HI_type_node,
3957 NULL_TREE);
3958
3959 /* Add the remaining MMX insns with somewhat more complicated types. */
3960 def_builtin ("__builtin_bfin_csync", void_ftype_void, BFIN_BUILTIN_CSYNC);
3961 def_builtin ("__builtin_bfin_ssync", void_ftype_void, BFIN_BUILTIN_SSYNC);
3962
3963 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int,
3964 BFIN_BUILTIN_COMPOSE_2X16);
3965 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi,
3966 BFIN_BUILTIN_EXTRACTHI);
3967 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi,
3968 BFIN_BUILTIN_EXTRACTLO);
3969
3970 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi,
3971 BFIN_BUILTIN_MIN_2X16);
3972 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi,
3973 BFIN_BUILTIN_MAX_2X16);
3974
3975 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi,
3976 BFIN_BUILTIN_SSADD_2X16);
3977 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi,
3978 BFIN_BUILTIN_SSSUB_2X16);
3979 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi,
3980 BFIN_BUILTIN_SSADDSUB_2X16);
3981 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi,
3982 BFIN_BUILTIN_SSSUBADD_2X16);
3983 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi,
3984 BFIN_BUILTIN_MULT_2X16);
3985 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi,
3986 BFIN_BUILTIN_MULTR_2X16);
3987 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi,
3988 BFIN_BUILTIN_NEG_2X16);
3989 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi,
3990 BFIN_BUILTIN_ABS_2X16);
3991
3992 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int,
3993 BFIN_BUILTIN_SSADD_1X16);
3994 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int,
3995 BFIN_BUILTIN_SSSUB_1X16);
3996 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int,
3997 BFIN_BUILTIN_MULT_1X16);
3998 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int,
3999 BFIN_BUILTIN_MULTR_1X16);
4000 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short,
4001 BFIN_BUILTIN_NEG_1X16);
4002 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short,
4003 BFIN_BUILTIN_ABS_1X16);
4004 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int,
4005 BFIN_BUILTIN_NORM_1X16);
4006
4007 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi,
4008 BFIN_BUILTIN_DIFFHL_2X16);
4009 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi,
4010 BFIN_BUILTIN_DIFFLH_2X16);
4011
4012 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi,
4013 BFIN_BUILTIN_MULHISILL);
4014 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi,
4015 BFIN_BUILTIN_MULHISIHL);
4016 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi,
4017 BFIN_BUILTIN_MULHISILH);
4018 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi,
4019 BFIN_BUILTIN_MULHISIHH);
4020
4021 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int,
4022 BFIN_BUILTIN_SSADD_1X32);
4023 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int,
4024 BFIN_BUILTIN_SSSUB_1X32);
4025 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int,
4026 BFIN_BUILTIN_NEG_1X32);
4027 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int,
4028 BFIN_BUILTIN_NORM_1X32);
4029 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short,
4030 BFIN_BUILTIN_MULT_1X32);
4031
4032 /* Shifts. */
4033 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int,
4034 BFIN_BUILTIN_SSASHIFT_1X16);
4035 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int,
4036 BFIN_BUILTIN_SSASHIFT_2X16);
4037 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int,
4038 BFIN_BUILTIN_LSHIFT_1X16);
4039 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int,
4040 BFIN_BUILTIN_LSHIFT_2X16);
4041
4042 /* Complex numbers. */
4043 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi,
4044 BFIN_BUILTIN_CPLX_MUL_16);
4045 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi,
4046 BFIN_BUILTIN_CPLX_MAC_16);
4047 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi,
4048 BFIN_BUILTIN_CPLX_MSU_16);
4049 }
4050
4051
4052 struct builtin_description
4053 {
4054 const enum insn_code icode;
4055 const char *const name;
4056 const enum bfin_builtins code;
4057 int macflag;
4058 };
4059
4060 static const struct builtin_description bdesc_2arg[] =
4061 {
4062 { CODE_FOR_composev2hi, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16, -1 },
4063
4064 { CODE_FOR_ssashiftv2hi3, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16, -1 },
4065 { CODE_FOR_ssashifthi3, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16, -1 },
4066 { CODE_FOR_lshiftv2hi3, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16, -1 },
4067 { CODE_FOR_lshifthi3, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16, -1 },
4068
4069 { CODE_FOR_sminhi3, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16, -1 },
4070 { CODE_FOR_smaxhi3, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16, -1 },
4071 { CODE_FOR_ssaddhi3, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16, -1 },
4072 { CODE_FOR_sssubhi3, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16, -1 },
4073
4074 { CODE_FOR_sminsi3, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32, -1 },
4075 { CODE_FOR_smaxsi3, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32, -1 },
4076 { CODE_FOR_ssaddsi3, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32, -1 },
4077 { CODE_FOR_sssubsi3, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32, -1 },
4078
4079 { CODE_FOR_sminv2hi3, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16, -1 },
4080 { CODE_FOR_smaxv2hi3, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16, -1 },
4081 { CODE_FOR_ssaddv2hi3, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16, -1 },
4082 { CODE_FOR_sssubv2hi3, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16, -1 },
4083 { CODE_FOR_ssaddsubv2hi3, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16, -1 },
4084 { CODE_FOR_sssubaddv2hi3, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16, -1 },
4085
4086 { CODE_FOR_flag_mulhisi, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32, MACFLAG_NONE },
4087 { CODE_FOR_flag_mulhi, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16, MACFLAG_T },
4088 { CODE_FOR_flag_mulhi, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16, MACFLAG_NONE },
4089 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16, MACFLAG_T },
4090 { CODE_FOR_flag_mulv2hi, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16, MACFLAG_NONE }
4091 };
4092
4093 static const struct builtin_description bdesc_1arg[] =
4094 {
4095 { CODE_FOR_signbitshi2, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16, 0 },
4096 { CODE_FOR_ssneghi2, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16, 0 },
4097 { CODE_FOR_abshi2, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16, 0 },
4098
4099 { CODE_FOR_signbitssi2, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32, 0 },
4100 { CODE_FOR_ssnegsi2, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32, 0 },
4101
4102 { CODE_FOR_movv2hi_hi_low, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO, 0 },
4103 { CODE_FOR_movv2hi_hi_high, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI, 0 },
4104 { CODE_FOR_ssnegv2hi2, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16, 0 },
4105 { CODE_FOR_absv2hi2, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16, 0 }
4106 };
4107
4108 /* Errors in the source file can cause expand_expr to return const0_rtx
4109 where we expect a vector. To avoid crashing, use one of the vector
4110 clear instructions. */
4111 static rtx
4112 safe_vector_operand (rtx x, enum machine_mode mode)
4113 {
4114 if (x != const0_rtx)
4115 return x;
4116 x = gen_reg_rtx (SImode);
4117
4118 emit_insn (gen_movsi (x, CONST0_RTX (SImode)));
4119 return gen_lowpart (mode, x);
4120 }
4121
4122 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
4123 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
4124
4125 static rtx
4126 bfin_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target,
4127 int macflag)
4128 {
4129 rtx pat;
4130 tree arg0 = TREE_VALUE (arglist);
4131 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4132 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4133 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4134 enum machine_mode op0mode = GET_MODE (op0);
4135 enum machine_mode op1mode = GET_MODE (op1);
4136 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4137 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4138 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4139
4140 if (VECTOR_MODE_P (mode0))
4141 op0 = safe_vector_operand (op0, mode0);
4142 if (VECTOR_MODE_P (mode1))
4143 op1 = safe_vector_operand (op1, mode1);
4144
4145 if (! target
4146 || GET_MODE (target) != tmode
4147 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4148 target = gen_reg_rtx (tmode);
4149
4150 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
4151 {
4152 op0mode = HImode;
4153 op0 = gen_lowpart (HImode, op0);
4154 }
4155 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
4156 {
4157 op1mode = HImode;
4158 op1 = gen_lowpart (HImode, op1);
4159 }
4160 /* In case the insn wants input operands in modes different from
4161 the result, abort. */
4162 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
4163 && (op1mode == mode1 || op1mode == VOIDmode));
4164
4165 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4166 op0 = copy_to_mode_reg (mode0, op0);
4167 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4168 op1 = copy_to_mode_reg (mode1, op1);
4169
4170 if (macflag == -1)
4171 pat = GEN_FCN (icode) (target, op0, op1);
4172 else
4173 pat = GEN_FCN (icode) (target, op0, op1, GEN_INT (macflag));
4174 if (! pat)
4175 return 0;
4176
4177 emit_insn (pat);
4178 return target;
4179 }
4180
4181 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
4182
4183 static rtx
4184 bfin_expand_unop_builtin (enum insn_code icode, tree arglist,
4185 rtx target)
4186 {
4187 rtx pat;
4188 tree arg0 = TREE_VALUE (arglist);
4189 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4190 enum machine_mode op0mode = GET_MODE (op0);
4191 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4192 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4193
4194 if (! target
4195 || GET_MODE (target) != tmode
4196 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4197 target = gen_reg_rtx (tmode);
4198
4199 if (VECTOR_MODE_P (mode0))
4200 op0 = safe_vector_operand (op0, mode0);
4201
4202 if (op0mode == SImode && mode0 == HImode)
4203 {
4204 op0mode = HImode;
4205 op0 = gen_lowpart (HImode, op0);
4206 }
4207 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
4208
4209 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4210 op0 = copy_to_mode_reg (mode0, op0);
4211
4212 pat = GEN_FCN (icode) (target, op0);
4213 if (! pat)
4214 return 0;
4215 emit_insn (pat);
4216 return target;
4217 }
4218
4219 /* Expand an expression EXP that calls a built-in function,
4220 with result going to TARGET if that's convenient
4221 (and in mode MODE if that's convenient).
4222 SUBTARGET may be used as the target for computing one of EXP's operands.
4223 IGNORE is nonzero if the value is to be ignored. */
4224
4225 static rtx
4226 bfin_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
4227 rtx subtarget ATTRIBUTE_UNUSED,
4228 enum machine_mode mode ATTRIBUTE_UNUSED,
4229 int ignore ATTRIBUTE_UNUSED)
4230 {
4231 size_t i;
4232 enum insn_code icode;
4233 const struct builtin_description *d;
4234 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4235 tree arglist = TREE_OPERAND (exp, 1);
4236 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
4237 tree arg0, arg1, arg2;
4238 rtx op0, op1, op2, accvec, pat, tmp1, tmp2;
4239 enum machine_mode tmode, mode0;
4240
4241 switch (fcode)
4242 {
4243 case BFIN_BUILTIN_CSYNC:
4244 emit_insn (gen_csync ());
4245 return 0;
4246 case BFIN_BUILTIN_SSYNC:
4247 emit_insn (gen_ssync ());
4248 return 0;
4249
4250 case BFIN_BUILTIN_DIFFHL_2X16:
4251 case BFIN_BUILTIN_DIFFLH_2X16:
4252 arg0 = TREE_VALUE (arglist);
4253 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4254 icode = (fcode == BFIN_BUILTIN_DIFFHL_2X16
4255 ? CODE_FOR_subhilov2hi3 : CODE_FOR_sublohiv2hi3);
4256 tmode = insn_data[icode].operand[0].mode;
4257 mode0 = insn_data[icode].operand[1].mode;
4258
4259 if (! target
4260 || GET_MODE (target) != tmode
4261 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4262 target = gen_reg_rtx (tmode);
4263
4264 if (VECTOR_MODE_P (mode0))
4265 op0 = safe_vector_operand (op0, mode0);
4266
4267 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4268 op0 = copy_to_mode_reg (mode0, op0);
4269
4270 pat = GEN_FCN (icode) (target, op0, op0);
4271 if (! pat)
4272 return 0;
4273 emit_insn (pat);
4274 return target;
4275
4276 case BFIN_BUILTIN_CPLX_MUL_16:
4277 arg0 = TREE_VALUE (arglist);
4278 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4279 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4280 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4281 accvec = gen_reg_rtx (V2PDImode);
4282
4283 if (! target
4284 || GET_MODE (target) != V2HImode
4285 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4286 target = gen_reg_rtx (tmode);
4287 if (! register_operand (op0, GET_MODE (op0)))
4288 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4289 if (! register_operand (op1, GET_MODE (op1)))
4290 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4291
4292 emit_insn (gen_flag_macinit1v2hi_parts (accvec, op0, op1, const0_rtx,
4293 const0_rtx, const0_rtx,
4294 const1_rtx, GEN_INT (MACFLAG_NONE)));
4295 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4296 const1_rtx, const1_rtx,
4297 const0_rtx, accvec, const1_rtx, const0_rtx,
4298 GEN_INT (MACFLAG_NONE), accvec));
4299
4300 return target;
4301
4302 case BFIN_BUILTIN_CPLX_MAC_16:
4303 case BFIN_BUILTIN_CPLX_MSU_16:
4304 arg0 = TREE_VALUE (arglist);
4305 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4306 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
4307 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4308 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4309 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
4310 accvec = gen_reg_rtx (V2PDImode);
4311
4312 if (! target
4313 || GET_MODE (target) != V2HImode
4314 || ! (*insn_data[icode].operand[0].predicate) (target, V2HImode))
4315 target = gen_reg_rtx (tmode);
4316 if (! register_operand (op0, GET_MODE (op0)))
4317 op0 = copy_to_mode_reg (GET_MODE (op0), op0);
4318 if (! register_operand (op1, GET_MODE (op1)))
4319 op1 = copy_to_mode_reg (GET_MODE (op1), op1);
4320
4321 tmp1 = gen_reg_rtx (SImode);
4322 tmp2 = gen_reg_rtx (SImode);
4323 emit_insn (gen_ashlsi3 (tmp1, gen_lowpart (SImode, op2), GEN_INT (16)));
4324 emit_move_insn (tmp2, gen_lowpart (SImode, op2));
4325 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode, tmp2), const0_rtx));
4326 emit_insn (gen_load_accumulator_pair (accvec, tmp1, tmp2));
4327 emit_insn (gen_flag_macv2hi_parts_acconly (accvec, op0, op1, const0_rtx,
4328 const0_rtx, const0_rtx,
4329 const1_rtx, accvec, const0_rtx,
4330 const0_rtx,
4331 GEN_INT (MACFLAG_W32)));
4332 tmp1 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const1_rtx : const0_rtx);
4333 tmp2 = (fcode == BFIN_BUILTIN_CPLX_MAC_16 ? const0_rtx : const1_rtx);
4334 emit_insn (gen_flag_macv2hi_parts (target, op0, op1, const1_rtx,
4335 const1_rtx, const1_rtx,
4336 const0_rtx, accvec, tmp1, tmp2,
4337 GEN_INT (MACFLAG_NONE), accvec));
4338
4339 return target;
4340
4341 default:
4342 break;
4343 }
4344
4345 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
4346 if (d->code == fcode)
4347 return bfin_expand_binop_builtin (d->icode, arglist, target,
4348 d->macflag);
4349
4350 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
4351 if (d->code == fcode)
4352 return bfin_expand_unop_builtin (d->icode, arglist, target);
4353
4354 gcc_unreachable ();
4355 }
4356 \f
4357 #undef TARGET_INIT_BUILTINS
4358 #define TARGET_INIT_BUILTINS bfin_init_builtins
4359
4360 #undef TARGET_EXPAND_BUILTIN
4361 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
4362
4363 #undef TARGET_ASM_GLOBALIZE_LABEL
4364 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
4365
4366 #undef TARGET_ASM_FILE_START
4367 #define TARGET_ASM_FILE_START output_file_start
4368
4369 #undef TARGET_ATTRIBUTE_TABLE
4370 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
4371
4372 #undef TARGET_COMP_TYPE_ATTRIBUTES
4373 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
4374
4375 #undef TARGET_RTX_COSTS
4376 #define TARGET_RTX_COSTS bfin_rtx_costs
4377
4378 #undef TARGET_ADDRESS_COST
4379 #define TARGET_ADDRESS_COST bfin_address_cost
4380
4381 #undef TARGET_ASM_INTERNAL_LABEL
4382 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
4383
4384 #undef TARGET_ASM_INTEGER
4385 #define TARGET_ASM_INTEGER bfin_assemble_integer
4386
4387 #undef TARGET_MACHINE_DEPENDENT_REORG
4388 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
4389
4390 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4391 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
4392
4393 #undef TARGET_ASM_OUTPUT_MI_THUNK
4394 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
4395 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
4396 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
4397
4398 #undef TARGET_SCHED_ADJUST_COST
4399 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
4400
4401 #undef TARGET_PROMOTE_PROTOTYPES
4402 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
4403 #undef TARGET_PROMOTE_FUNCTION_ARGS
4404 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
4405 #undef TARGET_PROMOTE_FUNCTION_RETURN
4406 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
4407
4408 #undef TARGET_ARG_PARTIAL_BYTES
4409 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
4410
4411 #undef TARGET_PASS_BY_REFERENCE
4412 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
4413
4414 #undef TARGET_SETUP_INCOMING_VARARGS
4415 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
4416
4417 #undef TARGET_STRUCT_VALUE_RTX
4418 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
4419
4420 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4421 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
4422
4423 #undef TARGET_HANDLE_OPTION
4424 #define TARGET_HANDLE_OPTION bfin_handle_option
4425
4426 #undef TARGET_DEFAULT_TARGET_FLAGS
4427 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
4428
4429 #undef TARGET_SECONDARY_RELOAD
4430 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
4431
4432 #undef TARGET_DELEGITIMIZE_ADDRESS
4433 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
4434
4435 struct gcc_target targetm = TARGET_INITIALIZER;