toplev.c (finalize): Add no_backend parameter.
[gcc.git] / gcc / targhooks.c
1 /* Default target hook functions.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* The migration of target macros to target hooks works as follows:
22
23 1. Create a target hook that uses the existing target macros to
24 implement the same functionality.
25
26 2. Convert all the MI files to use the hook instead of the macro.
27
28 3. Repeat for a majority of the remaining target macros. This will
29 take some time.
30
31 4. Tell target maintainers to start migrating.
32
33 5. Eventually convert the backends to override the hook instead of
34 defining the macros. This will take some time too.
35
36 6. TBD when, poison the macros. Unmigrated targets will break at
37 this point.
38
39 Note that we expect steps 1-3 to be done by the people that
40 understand what the MI does with each macro, and step 5 to be done
41 by the target maintainers for their respective targets.
42
43 Note that steps 1 and 2 don't have to be done together, but no
44 target can override the new hook until step 2 is complete for it.
45
46 Once the macros are poisoned, we will revert to the old migration
47 rules - migrate the macro, callers, and targets all at once. This
48 comment can thus be removed at that point. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "machmode.h"
55 #include "rtl.h"
56 #include "tree.h"
57 #include "expr.h"
58 #include "output.h"
59 #include "diagnostic-core.h"
60 #include "toplev.h"
61 #include "function.h"
62 #include "target.h"
63 #include "tm_p.h"
64 #include "target-def.h"
65 #include "ggc.h"
66 #include "hard-reg-set.h"
67 #include "regs.h"
68 #include "reload.h"
69 #include "optabs.h"
70 #include "recog.h"
71
72
73 bool
74 default_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
75 rtx addr ATTRIBUTE_UNUSED,
76 bool strict ATTRIBUTE_UNUSED)
77 {
78 #ifdef GO_IF_LEGITIMATE_ADDRESS
79 /* Defer to the old implementation using a goto. */
80 if (strict)
81 return strict_memory_address_p (mode, addr);
82 else
83 return memory_address_p (mode, addr);
84 #else
85 gcc_unreachable ();
86 #endif
87 }
88
89 void
90 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
91 {
92 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
93 ASM_OUTPUT_EXTERNAL_LIBCALL(asm_out_file, fun);
94 #endif
95 }
96
97 int
98 default_unspec_may_trap_p (const_rtx x, unsigned flags)
99 {
100 int i;
101
102 if (GET_CODE (x) == UNSPEC_VOLATILE
103 /* Any floating arithmetic may trap. */
104 || (SCALAR_FLOAT_MODE_P (GET_MODE (x))
105 && flag_trapping_math))
106 return 1;
107
108 for (i = 0; i < XVECLEN (x, 0); ++i)
109 {
110 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
111 return 1;
112 }
113
114 return 0;
115 }
116
117 enum machine_mode
118 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
119 enum machine_mode mode,
120 int *punsignedp ATTRIBUTE_UNUSED,
121 const_tree funtype ATTRIBUTE_UNUSED,
122 int for_return ATTRIBUTE_UNUSED)
123 {
124 if (for_return == 2)
125 return promote_mode (type, mode, punsignedp);
126 return mode;
127 }
128
129 enum machine_mode
130 default_promote_function_mode_always_promote (const_tree type,
131 enum machine_mode mode,
132 int *punsignedp,
133 const_tree funtype ATTRIBUTE_UNUSED,
134 int for_return ATTRIBUTE_UNUSED)
135 {
136 return promote_mode (type, mode, punsignedp);
137 }
138
139
140 enum machine_mode
141 default_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
142 {
143 if (m1 == m2)
144 return m1;
145 return VOIDmode;
146 }
147
148 bool
149 default_return_in_memory (const_tree type,
150 const_tree fntype ATTRIBUTE_UNUSED)
151 {
152 return (TYPE_MODE (type) == BLKmode);
153 }
154
155 rtx
156 default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
157 enum machine_mode mode ATTRIBUTE_UNUSED)
158 {
159 return x;
160 }
161
162 rtx
163 default_expand_builtin_saveregs (void)
164 {
165 error ("__builtin_saveregs not supported by this target");
166 return const0_rtx;
167 }
168
169 void
170 default_setup_incoming_varargs (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
171 enum machine_mode mode ATTRIBUTE_UNUSED,
172 tree type ATTRIBUTE_UNUSED,
173 int *pretend_arg_size ATTRIBUTE_UNUSED,
174 int second_time ATTRIBUTE_UNUSED)
175 {
176 }
177
178 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
179
180 rtx
181 default_builtin_setjmp_frame_value (void)
182 {
183 return virtual_stack_vars_rtx;
184 }
185
186 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
187
188 bool
189 hook_bool_CUMULATIVE_ARGS_false (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
190 {
191 return false;
192 }
193
194 bool
195 default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
196 {
197 return (targetm.calls.setup_incoming_varargs
198 != default_setup_incoming_varargs);
199 }
200
201 enum machine_mode
202 default_eh_return_filter_mode (void)
203 {
204 return targetm.unwind_word_mode ();
205 }
206
207 enum machine_mode
208 default_libgcc_cmp_return_mode (void)
209 {
210 return word_mode;
211 }
212
213 enum machine_mode
214 default_libgcc_shift_count_mode (void)
215 {
216 return word_mode;
217 }
218
219 enum machine_mode
220 default_unwind_word_mode (void)
221 {
222 return word_mode;
223 }
224
225 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
226
227 unsigned HOST_WIDE_INT
228 default_shift_truncation_mask (enum machine_mode mode)
229 {
230 return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
231 }
232
233 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
234
235 unsigned int
236 default_min_divisions_for_recip_mul (enum machine_mode mode ATTRIBUTE_UNUSED)
237 {
238 return have_insn_for (DIV, mode) ? 3 : 2;
239 }
240
241 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
242
243 int
244 default_mode_rep_extended (enum machine_mode mode ATTRIBUTE_UNUSED,
245 enum machine_mode mode_rep ATTRIBUTE_UNUSED)
246 {
247 return UNKNOWN;
248 }
249
250 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
251
252 bool
253 hook_bool_CUMULATIVE_ARGS_true (CUMULATIVE_ARGS * a ATTRIBUTE_UNUSED)
254 {
255 return true;
256 }
257
258 /* Return machine mode for non-standard suffix
259 or VOIDmode if non-standard suffixes are unsupported. */
260 enum machine_mode
261 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
262 {
263 return VOIDmode;
264 }
265
266 /* The generic C++ ABI specifies this is a 64-bit value. */
267 tree
268 default_cxx_guard_type (void)
269 {
270 return long_long_integer_type_node;
271 }
272
273
274 /* Returns the size of the cookie to use when allocating an array
275 whose elements have the indicated TYPE. Assumes that it is already
276 known that a cookie is needed. */
277
278 tree
279 default_cxx_get_cookie_size (tree type)
280 {
281 tree cookie_size;
282
283 /* We need to allocate an additional max (sizeof (size_t), alignof
284 (true_type)) bytes. */
285 tree sizetype_size;
286 tree type_align;
287
288 sizetype_size = size_in_bytes (sizetype);
289 type_align = size_int (TYPE_ALIGN_UNIT (type));
290 if (INT_CST_LT_UNSIGNED (type_align, sizetype_size))
291 cookie_size = sizetype_size;
292 else
293 cookie_size = type_align;
294
295 return cookie_size;
296 }
297
298 /* Return true if a parameter must be passed by reference. This version
299 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
300
301 bool
302 hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED,
303 enum machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
304 bool named_arg ATTRIBUTE_UNUSED)
305 {
306 return targetm.calls.must_pass_in_stack (mode, type);
307 }
308
309 /* Return true if a parameter follows callee copies conventions. This
310 version of the hook is true for all named arguments. */
311
312 bool
313 hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
314 enum machine_mode mode ATTRIBUTE_UNUSED,
315 const_tree type ATTRIBUTE_UNUSED, bool named)
316 {
317 return named;
318 }
319
320 /* Emit to STREAM the assembler syntax for insn operand X. */
321
322 void
323 default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
324 int code ATTRIBUTE_UNUSED)
325 {
326 #ifdef PRINT_OPERAND
327 PRINT_OPERAND (stream, x, code);
328 #else
329 gcc_unreachable ();
330 #endif
331 }
332
333 /* Emit to STREAM the assembler syntax for an insn operand whose memory
334 address is X. */
335
336 void
337 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
338 rtx x ATTRIBUTE_UNUSED)
339 {
340 #ifdef PRINT_OPERAND_ADDRESS
341 PRINT_OPERAND_ADDRESS (stream, x);
342 #else
343 gcc_unreachable ();
344 #endif
345 }
346
347 /* Return true if CODE is a valid punctuation character for the
348 `print_operand' hook. */
349
350 bool
351 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
352 {
353 #ifdef PRINT_OPERAND_PUNCT_VALID_P
354 return PRINT_OPERAND_PUNCT_VALID_P (code);
355 #else
356 return false;
357 #endif
358 }
359
360 /* The default implementation of TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
361
362 bool
363 default_asm_output_addr_const_extra (FILE *file ATTRIBUTE_UNUSED,
364 rtx x ATTRIBUTE_UNUSED)
365 {
366 #ifdef OUTPUT_ADDR_CONST_EXTRA
367 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
368 return true;
369
370 fail:
371 #endif
372 return false;
373 }
374
375 /* True if MODE is valid for the target. By "valid", we mean able to
376 be manipulated in non-trivial ways. In particular, this means all
377 the arithmetic is supported.
378
379 By default we guess this means that any C type is supported. If
380 we can't map the mode back to a type that would be available in C,
381 then reject it. Special case, here, is the double-word arithmetic
382 supported by optabs.c. */
383
384 bool
385 default_scalar_mode_supported_p (enum machine_mode mode)
386 {
387 int precision = GET_MODE_PRECISION (mode);
388
389 switch (GET_MODE_CLASS (mode))
390 {
391 case MODE_PARTIAL_INT:
392 case MODE_INT:
393 if (precision == CHAR_TYPE_SIZE)
394 return true;
395 if (precision == SHORT_TYPE_SIZE)
396 return true;
397 if (precision == INT_TYPE_SIZE)
398 return true;
399 if (precision == LONG_TYPE_SIZE)
400 return true;
401 if (precision == LONG_LONG_TYPE_SIZE)
402 return true;
403 if (precision == 2 * BITS_PER_WORD)
404 return true;
405 return false;
406
407 case MODE_FLOAT:
408 if (precision == FLOAT_TYPE_SIZE)
409 return true;
410 if (precision == DOUBLE_TYPE_SIZE)
411 return true;
412 if (precision == LONG_DOUBLE_TYPE_SIZE)
413 return true;
414 return false;
415
416 case MODE_DECIMAL_FLOAT:
417 case MODE_FRACT:
418 case MODE_UFRACT:
419 case MODE_ACCUM:
420 case MODE_UACCUM:
421 return false;
422
423 default:
424 gcc_unreachable ();
425 }
426 }
427
428 /* Make some target macros useable by target-independent code. */
429 bool
430 targhook_words_big_endian (void)
431 {
432 return !!WORDS_BIG_ENDIAN;
433 }
434
435 bool
436 targhook_float_words_big_endian (void)
437 {
438 return !!FLOAT_WORDS_BIG_ENDIAN;
439 }
440
441 /* True if the target supports decimal floating point. */
442
443 bool
444 default_decimal_float_supported_p (void)
445 {
446 return ENABLE_DECIMAL_FLOAT;
447 }
448
449 /* True if the target supports fixed-point arithmetic. */
450
451 bool
452 default_fixed_point_supported_p (void)
453 {
454 return ENABLE_FIXED_POINT;
455 }
456
457 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
458 an error message.
459
460 This function checks whether a given INSN is valid within a low-overhead
461 loop. If INSN is invalid it returns the reason for that, otherwise it
462 returns NULL. A called function may clobber any special registers required
463 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
464 register for branch on table instructions. We reject the doloop pattern in
465 these cases. */
466
467 const char *
468 default_invalid_within_doloop (const_rtx insn)
469 {
470 if (CALL_P (insn))
471 return "Function call in loop.";
472
473 if (JUMP_TABLE_DATA_P (insn))
474 return "Computed branch in the loop.";
475
476 return NULL;
477 }
478
479 /* Mapping of builtin functions to vectorized variants. */
480
481 tree
482 default_builtin_vectorized_function (tree fndecl ATTRIBUTE_UNUSED,
483 tree type_out ATTRIBUTE_UNUSED,
484 tree type_in ATTRIBUTE_UNUSED)
485 {
486 return NULL_TREE;
487 }
488
489 /* Vectorized conversion. */
490
491 tree
492 default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
493 tree dest_type ATTRIBUTE_UNUSED,
494 tree src_type ATTRIBUTE_UNUSED)
495 {
496 return NULL_TREE;
497 }
498
499 /* Default vectorizer cost model values. */
500
501 int
502 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
503 tree vectype ATTRIBUTE_UNUSED,
504 int misalign ATTRIBUTE_UNUSED)
505 {
506 switch (type_of_cost)
507 {
508 case scalar_stmt:
509 case scalar_load:
510 case scalar_store:
511 case vector_stmt:
512 case vector_load:
513 case vector_store:
514 case vec_to_scalar:
515 case scalar_to_vec:
516 case cond_branch_not_taken:
517 case vec_perm:
518 return 1;
519
520 case unaligned_load:
521 case unaligned_store:
522 return 2;
523
524 case cond_branch_taken:
525 return 3;
526
527 default:
528 gcc_unreachable ();
529 }
530 }
531
532 /* Reciprocal. */
533
534 tree
535 default_builtin_reciprocal (unsigned int fn ATTRIBUTE_UNUSED,
536 bool md_fn ATTRIBUTE_UNUSED,
537 bool sqrt ATTRIBUTE_UNUSED)
538 {
539 return NULL_TREE;
540 }
541
542 bool
543 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
544 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
545 enum machine_mode mode ATTRIBUTE_UNUSED,
546 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
547 {
548 return false;
549 }
550
551 bool
552 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
553 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
554 enum machine_mode mode ATTRIBUTE_UNUSED,
555 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
556 {
557 return true;
558 }
559
560 int
561 hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
562 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
563 enum machine_mode mode ATTRIBUTE_UNUSED,
564 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
565 {
566 return 0;
567 }
568
569 void
570 default_function_arg_advance (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
571 enum machine_mode mode ATTRIBUTE_UNUSED,
572 const_tree type ATTRIBUTE_UNUSED,
573 bool named ATTRIBUTE_UNUSED)
574 {
575 #ifdef FUNCTION_ARG_ADVANCE
576 CUMULATIVE_ARGS args = *ca;
577 FUNCTION_ARG_ADVANCE (args, mode, CONST_CAST_TREE (type), named);
578 *ca = args;
579 #else
580 gcc_unreachable ();
581 #endif
582 }
583
584 rtx
585 default_function_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
586 enum machine_mode mode ATTRIBUTE_UNUSED,
587 const_tree type ATTRIBUTE_UNUSED,
588 bool named ATTRIBUTE_UNUSED)
589 {
590 #ifdef FUNCTION_ARG
591 return FUNCTION_ARG (*ca, mode, CONST_CAST_TREE (type), named);
592 #else
593 gcc_unreachable ();
594 #endif
595 }
596
597 rtx
598 default_function_incoming_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
599 enum machine_mode mode ATTRIBUTE_UNUSED,
600 const_tree type ATTRIBUTE_UNUSED,
601 bool named ATTRIBUTE_UNUSED)
602 {
603 #ifdef FUNCTION_INCOMING_ARG
604 return FUNCTION_INCOMING_ARG (*ca, mode, CONST_CAST_TREE (type), named);
605 #else
606 gcc_unreachable ();
607 #endif
608 }
609
610 unsigned int
611 default_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
612 const_tree type ATTRIBUTE_UNUSED)
613 {
614 return PARM_BOUNDARY;
615 }
616
617 void
618 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
619 {
620 }
621
622 const char *
623 hook_invalid_arg_for_unprototyped_fn (
624 const_tree typelist ATTRIBUTE_UNUSED,
625 const_tree funcdecl ATTRIBUTE_UNUSED,
626 const_tree val ATTRIBUTE_UNUSED)
627 {
628 return NULL;
629 }
630
631 /* Initialize the stack protection decls. */
632
633 /* Stack protection related decls living in libgcc. */
634 static GTY(()) tree stack_chk_guard_decl;
635
636 tree
637 default_stack_protect_guard (void)
638 {
639 tree t = stack_chk_guard_decl;
640
641 if (t == NULL)
642 {
643 rtx x;
644
645 t = build_decl (UNKNOWN_LOCATION,
646 VAR_DECL, get_identifier ("__stack_chk_guard"),
647 ptr_type_node);
648 TREE_STATIC (t) = 1;
649 TREE_PUBLIC (t) = 1;
650 DECL_EXTERNAL (t) = 1;
651 TREE_USED (t) = 1;
652 TREE_THIS_VOLATILE (t) = 1;
653 DECL_ARTIFICIAL (t) = 1;
654 DECL_IGNORED_P (t) = 1;
655
656 /* Do not share RTL as the declaration is visible outside of
657 current function. */
658 x = DECL_RTL (t);
659 RTX_FLAG (x, used) = 1;
660
661 stack_chk_guard_decl = t;
662 }
663
664 return t;
665 }
666
667 static GTY(()) tree stack_chk_fail_decl;
668
669 tree
670 default_external_stack_protect_fail (void)
671 {
672 tree t = stack_chk_fail_decl;
673
674 if (t == NULL_TREE)
675 {
676 t = build_function_type_list (void_type_node, NULL_TREE);
677 t = build_decl (UNKNOWN_LOCATION,
678 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
679 TREE_STATIC (t) = 1;
680 TREE_PUBLIC (t) = 1;
681 DECL_EXTERNAL (t) = 1;
682 TREE_USED (t) = 1;
683 TREE_THIS_VOLATILE (t) = 1;
684 TREE_NOTHROW (t) = 1;
685 DECL_ARTIFICIAL (t) = 1;
686 DECL_IGNORED_P (t) = 1;
687 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
688 DECL_VISIBILITY_SPECIFIED (t) = 1;
689
690 stack_chk_fail_decl = t;
691 }
692
693 return build_call_expr (t, 0);
694 }
695
696 tree
697 default_hidden_stack_protect_fail (void)
698 {
699 #ifndef HAVE_GAS_HIDDEN
700 return default_external_stack_protect_fail ();
701 #else
702 tree t = stack_chk_fail_decl;
703
704 if (!flag_pic)
705 return default_external_stack_protect_fail ();
706
707 if (t == NULL_TREE)
708 {
709 t = build_function_type_list (void_type_node, NULL_TREE);
710 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
711 get_identifier ("__stack_chk_fail_local"), t);
712 TREE_STATIC (t) = 1;
713 TREE_PUBLIC (t) = 1;
714 DECL_EXTERNAL (t) = 1;
715 TREE_USED (t) = 1;
716 TREE_THIS_VOLATILE (t) = 1;
717 TREE_NOTHROW (t) = 1;
718 DECL_ARTIFICIAL (t) = 1;
719 DECL_IGNORED_P (t) = 1;
720 DECL_VISIBILITY_SPECIFIED (t) = 1;
721 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
722
723 stack_chk_fail_decl = t;
724 }
725
726 return build_call_expr (t, 0);
727 #endif
728 }
729
730 bool
731 hook_bool_const_rtx_commutative_p (const_rtx x,
732 int outer_code ATTRIBUTE_UNUSED)
733 {
734 return COMMUTATIVE_P (x);
735 }
736
737 rtx
738 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
739 const_tree fn_decl_or_type,
740 bool outgoing ATTRIBUTE_UNUSED)
741 {
742 /* The old interface doesn't handle receiving the function type. */
743 if (fn_decl_or_type
744 && !DECL_P (fn_decl_or_type))
745 fn_decl_or_type = NULL;
746
747 #ifdef FUNCTION_VALUE
748 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
749 #else
750 gcc_unreachable ();
751 #endif
752 }
753
754 rtx
755 default_libcall_value (enum machine_mode mode ATTRIBUTE_UNUSED,
756 const_rtx fun ATTRIBUTE_UNUSED)
757 {
758 #ifdef LIBCALL_VALUE
759 return LIBCALL_VALUE (mode);
760 #else
761 gcc_unreachable ();
762 #endif
763 }
764
765 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
766
767 bool
768 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
769 {
770 #ifdef FUNCTION_VALUE_REGNO_P
771 return FUNCTION_VALUE_REGNO_P (regno);
772 #else
773 gcc_unreachable ();
774 #endif
775 }
776
777 rtx
778 default_internal_arg_pointer (void)
779 {
780 /* If the reg that the virtual arg pointer will be translated into is
781 not a fixed reg or is the stack pointer, make a copy of the virtual
782 arg pointer, and address parms via the copy. The frame pointer is
783 considered fixed even though it is not marked as such. */
784 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
785 || ! (fixed_regs[ARG_POINTER_REGNUM]
786 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
787 return copy_to_reg (virtual_incoming_args_rtx);
788 else
789 return virtual_incoming_args_rtx;
790 }
791
792 rtx
793 default_static_chain (const_tree fndecl, bool incoming_p)
794 {
795 if (!DECL_STATIC_CHAIN (fndecl))
796 return NULL;
797
798 if (incoming_p)
799 {
800 #ifdef STATIC_CHAIN_INCOMING_REGNUM
801 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
802 #endif
803 }
804
805 #ifdef STATIC_CHAIN_REGNUM
806 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
807 #endif
808
809 {
810 static bool issued_error;
811 if (!issued_error)
812 {
813 issued_error = true;
814 sorry ("nested functions not supported on this target");
815 }
816
817 /* It really doesn't matter what we return here, so long at it
818 doesn't cause the rest of the compiler to crash. */
819 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
820 }
821 }
822
823 void
824 default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
825 rtx ARG_UNUSED (r_chain))
826 {
827 sorry ("nested function trampolines not supported on this target");
828 }
829
830 int
831 default_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
832 tree funtype ATTRIBUTE_UNUSED,
833 int size ATTRIBUTE_UNUSED)
834 {
835 return 0;
836 }
837
838 reg_class_t
839 default_branch_target_register_class (void)
840 {
841 return NO_REGS;
842 }
843
844 #ifdef IRA_COVER_CLASSES
845 const reg_class_t *
846 default_ira_cover_classes (void)
847 {
848 static reg_class_t classes[] = IRA_COVER_CLASSES;
849 return classes;
850 }
851 #endif
852
853 reg_class_t
854 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
855 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
856 enum machine_mode reload_mode ATTRIBUTE_UNUSED,
857 secondary_reload_info *sri)
858 {
859 enum reg_class rclass = NO_REGS;
860 enum reg_class reload_class = (enum reg_class) reload_class_i;
861
862 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
863 {
864 sri->icode = sri->prev_sri->t_icode;
865 return NO_REGS;
866 }
867 #ifdef SECONDARY_INPUT_RELOAD_CLASS
868 if (in_p)
869 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
870 #endif
871 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
872 if (! in_p)
873 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
874 #endif
875 if (rclass != NO_REGS)
876 {
877 enum insn_code icode
878 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
879 reload_mode);
880
881 if (icode != CODE_FOR_nothing
882 && insn_data[(int) icode].operand[in_p].predicate
883 && ! insn_data[(int) icode].operand[in_p].predicate (x, reload_mode))
884 icode = CODE_FOR_nothing;
885 else if (icode != CODE_FOR_nothing)
886 {
887 const char *insn_constraint, *scratch_constraint;
888 char insn_letter, scratch_letter;
889 enum reg_class insn_class, scratch_class;
890
891 gcc_assert (insn_data[(int) icode].n_operands == 3);
892 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
893 if (!*insn_constraint)
894 insn_class = ALL_REGS;
895 else
896 {
897 if (in_p)
898 {
899 gcc_assert (*insn_constraint == '=');
900 insn_constraint++;
901 }
902 insn_letter = *insn_constraint;
903 insn_class
904 = (insn_letter == 'r' ? GENERAL_REGS
905 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) insn_letter,
906 insn_constraint));
907 gcc_assert (insn_class != NO_REGS);
908 }
909
910 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
911 /* The scratch register's constraint must start with "=&",
912 except for an input reload, where only "=" is necessary,
913 and where it might be beneficial to re-use registers from
914 the input. */
915 gcc_assert (scratch_constraint[0] == '='
916 && (in_p || scratch_constraint[1] == '&'));
917 scratch_constraint++;
918 if (*scratch_constraint == '&')
919 scratch_constraint++;
920 scratch_letter = *scratch_constraint;
921 scratch_class
922 = (scratch_letter == 'r' ? GENERAL_REGS
923 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
924 scratch_constraint));
925
926 if (reg_class_subset_p (reload_class, insn_class))
927 {
928 gcc_assert (scratch_class == rclass);
929 rclass = NO_REGS;
930 }
931 else
932 rclass = insn_class;
933
934 }
935 if (rclass == NO_REGS)
936 sri->icode = icode;
937 else
938 sri->t_icode = icode;
939 }
940 return rclass;
941 }
942
943 bool
944 default_handle_c_option (size_t code ATTRIBUTE_UNUSED,
945 const char *arg ATTRIBUTE_UNUSED,
946 int value ATTRIBUTE_UNUSED)
947 {
948 return false;
949 }
950
951 /* By default, if flag_pic is true, then neither local nor global relocs
952 should be placed in readonly memory. */
953
954 int
955 default_reloc_rw_mask (void)
956 {
957 return flag_pic ? 3 : 0;
958 }
959
960 /* By default, do no modification. */
961 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
962 tree id)
963 {
964 return id;
965 }
966
967 bool
968 default_builtin_vector_alignment_reachable (const_tree type, bool is_packed)
969 {
970 if (is_packed)
971 return false;
972
973 /* Assuming that types whose size is > pointer-size are not guaranteed to be
974 naturally aligned. */
975 if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
976 return false;
977
978 /* Assuming that types whose size is <= pointer-size
979 are naturally aligned. */
980 return true;
981 }
982
983 /* By default, assume that a target supports any factor of misalignment
984 memory access if it supports movmisalign patten.
985 is_packed is true if the memory access is defined in a packed struct. */
986 bool
987 default_builtin_support_vector_misalignment (enum machine_mode mode,
988 const_tree type
989 ATTRIBUTE_UNUSED,
990 int misalignment
991 ATTRIBUTE_UNUSED,
992 bool is_packed
993 ATTRIBUTE_UNUSED)
994 {
995 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
996 return true;
997 return false;
998 }
999
1000 /* By default, only attempt to parallelize bitwise operations, and
1001 possibly adds/subtracts using bit-twiddling. */
1002
1003 enum machine_mode
1004 default_preferred_simd_mode (enum machine_mode mode ATTRIBUTE_UNUSED)
1005 {
1006 return word_mode;
1007 }
1008
1009 /* By default only the size derived from the preferred vector mode
1010 is tried. */
1011
1012 unsigned int
1013 default_autovectorize_vector_sizes (void)
1014 {
1015 return 0;
1016 }
1017
1018 /* Determine whether or not a pointer mode is valid. Assume defaults
1019 of ptr_mode or Pmode - can be overridden. */
1020 bool
1021 default_valid_pointer_mode (enum machine_mode mode)
1022 {
1023 return (mode == ptr_mode || mode == Pmode);
1024 }
1025
1026 /* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
1027 for the generic address space only. */
1028
1029 enum machine_mode
1030 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1031 {
1032 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1033 return ptr_mode;
1034 }
1035
1036 /* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
1037 for the generic address space only. */
1038
1039 enum machine_mode
1040 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1041 {
1042 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1043 return Pmode;
1044 }
1045
1046 /* Named address space version of valid_pointer_mode. */
1047
1048 bool
1049 default_addr_space_valid_pointer_mode (enum machine_mode mode, addr_space_t as)
1050 {
1051 if (!ADDR_SPACE_GENERIC_P (as))
1052 return (mode == targetm.addr_space.pointer_mode (as)
1053 || mode == targetm.addr_space.address_mode (as));
1054
1055 return targetm.valid_pointer_mode (mode);
1056 }
1057
1058 /* Some places still assume that all pointer or address modes are the
1059 standard Pmode and ptr_mode. These optimizations become invalid if
1060 the target actually supports multiple different modes. For now,
1061 we disable such optimizations on such targets, using this function. */
1062
1063 bool
1064 target_default_pointer_address_modes_p (void)
1065 {
1066 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1067 return false;
1068 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1069 return false;
1070
1071 return true;
1072 }
1073
1074 /* Named address space version of legitimate_address_p. */
1075
1076 bool
1077 default_addr_space_legitimate_address_p (enum machine_mode mode, rtx mem,
1078 bool strict, addr_space_t as)
1079 {
1080 if (!ADDR_SPACE_GENERIC_P (as))
1081 gcc_unreachable ();
1082
1083 return targetm.legitimate_address_p (mode, mem, strict);
1084 }
1085
1086 /* Named address space version of LEGITIMIZE_ADDRESS. */
1087
1088 rtx
1089 default_addr_space_legitimize_address (rtx x, rtx oldx,
1090 enum machine_mode mode, addr_space_t as)
1091 {
1092 if (!ADDR_SPACE_GENERIC_P (as))
1093 return x;
1094
1095 return targetm.legitimize_address (x, oldx, mode);
1096 }
1097
1098 /* The default hook for determining if one named address space is a subset of
1099 another and to return which address space to use as the common address
1100 space. */
1101
1102 bool
1103 default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1104 {
1105 return (subset == superset);
1106 }
1107
1108 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1109 called for targets with only a generic address space. */
1110
1111 rtx
1112 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1113 tree from_type ATTRIBUTE_UNUSED,
1114 tree to_type ATTRIBUTE_UNUSED)
1115 {
1116 gcc_unreachable ();
1117 }
1118
1119 bool
1120 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1121 {
1122 return true;
1123 }
1124
1125 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1126
1127 bool
1128 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED)
1129 {
1130 #ifdef GO_IF_MODE_DEPENDENT_ADDRESS
1131
1132 GO_IF_MODE_DEPENDENT_ADDRESS (CONST_CAST_RTX (addr), win);
1133 return false;
1134 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1135 win: ATTRIBUTE_UNUSED_LABEL
1136 return true;
1137
1138 #else
1139
1140 return false;
1141
1142 #endif
1143 }
1144
1145 bool
1146 default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1147 tree ARG_UNUSED (name),
1148 tree ARG_UNUSED (args),
1149 int ARG_UNUSED (flags))
1150 {
1151 warning (OPT_Wattributes,
1152 "target attribute is not supported on this machine");
1153
1154 return false;
1155 }
1156
1157 bool
1158 default_target_option_pragma_parse (tree ARG_UNUSED (args),
1159 tree ARG_UNUSED (pop_target))
1160 {
1161 warning (OPT_Wpragmas,
1162 "#pragma GCC target is not supported for this machine");
1163
1164 return false;
1165 }
1166
1167 bool
1168 default_target_can_inline_p (tree caller, tree callee)
1169 {
1170 bool ret = false;
1171 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1172 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1173
1174 /* If callee has no option attributes, then it is ok to inline */
1175 if (!callee_opts)
1176 ret = true;
1177
1178 /* If caller has no option attributes, but callee does then it is not ok to
1179 inline */
1180 else if (!caller_opts)
1181 ret = false;
1182
1183 /* If both caller and callee have attributes, assume that if the pointer is
1184 different, the the two functions have different target options since
1185 build_target_option_node uses a hash table for the options. */
1186 else
1187 ret = (callee_opts == caller_opts);
1188
1189 return ret;
1190 }
1191
1192 #ifndef HAVE_casesi
1193 # define HAVE_casesi 0
1194 #endif
1195
1196 /* If the machine does not have a case insn that compares the bounds,
1197 this means extra overhead for dispatch tables, which raises the
1198 threshold for using them. */
1199
1200 unsigned int default_case_values_threshold (void)
1201 {
1202 return (HAVE_casesi ? 4 : 5);
1203 }
1204
1205 bool
1206 default_have_conditional_execution (void)
1207 {
1208 #ifdef HAVE_conditional_execution
1209 return HAVE_conditional_execution;
1210 #else
1211 return false;
1212 #endif
1213 }
1214
1215 /* Compute cost of moving registers to/from memory. */
1216
1217 int
1218 default_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1219 reg_class_t rclass ATTRIBUTE_UNUSED,
1220 bool in ATTRIBUTE_UNUSED)
1221 {
1222 #ifndef MEMORY_MOVE_COST
1223 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1224 #else
1225 return MEMORY_MOVE_COST (mode, (enum reg_class) rclass, in);
1226 #endif
1227 }
1228
1229 /* Compute cost of moving data from a register of class FROM to one of
1230 TO, using MODE. */
1231
1232 int
1233 default_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1234 reg_class_t from ATTRIBUTE_UNUSED,
1235 reg_class_t to ATTRIBUTE_UNUSED)
1236 {
1237 #ifndef REGISTER_MOVE_COST
1238 return 2;
1239 #else
1240 return REGISTER_MOVE_COST (mode, (enum reg_class) from, (enum reg_class) to);
1241 #endif
1242 }
1243
1244 bool
1245 default_profile_before_prologue (void)
1246 {
1247 #ifdef PROFILE_BEFORE_PROLOGUE
1248 return true;
1249 #else
1250 return false;
1251 #endif
1252 }
1253
1254 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1255
1256 reg_class_t
1257 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
1258 reg_class_t rclass)
1259 {
1260 #ifdef PREFERRED_RELOAD_CLASS
1261 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
1262 #else
1263 return rclass;
1264 #endif
1265 }
1266
1267 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1268
1269 reg_class_t
1270 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
1271 reg_class_t rclass)
1272 {
1273 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1274 return PREFERRED_OUTPUT_RELOAD_CLASS (x, (enum reg_class) rclass);
1275 #else
1276 return rclass;
1277 #endif
1278 }
1279
1280 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1281
1282 bool
1283 default_class_likely_spilled_p (reg_class_t rclass)
1284 {
1285 return (reg_class_size[(int) rclass] == 1);
1286 }
1287
1288 /* Determine the debugging unwind mechanism for the target. */
1289
1290 enum unwind_info_type
1291 default_debug_unwind_info (void)
1292 {
1293 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1294 /* ??? Change all users to the hook, then poison this. */
1295 #ifdef DWARF2_FRAME_INFO
1296 if (DWARF2_FRAME_INFO)
1297 return UI_DWARF2;
1298 #endif
1299
1300 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1301 #ifdef DWARF2_DEBUGGING_INFO
1302 if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
1303 return UI_DWARF2;
1304 #endif
1305
1306 return UI_NONE;
1307 }
1308
1309 /* Determine the exception handling mechanism for the target. */
1310
1311 enum unwind_info_type
1312 default_except_unwind_info (struct gcc_options *opts ATTRIBUTE_UNUSED)
1313 {
1314 /* Obey the configure switch to turn on sjlj exceptions. */
1315 #ifdef CONFIG_SJLJ_EXCEPTIONS
1316 if (CONFIG_SJLJ_EXCEPTIONS)
1317 return UI_SJLJ;
1318 #endif
1319
1320 /* ??? Change all users to the hook, then poison this. */
1321 #ifdef DWARF2_UNWIND_INFO
1322 if (DWARF2_UNWIND_INFO)
1323 return UI_DWARF2;
1324 #endif
1325
1326 return UI_SJLJ;
1327 }
1328
1329 /* To be used by targets that force dwarf2 unwind enabled. */
1330
1331 enum unwind_info_type
1332 dwarf2_except_unwind_info (struct gcc_options *opts ATTRIBUTE_UNUSED)
1333 {
1334 /* Obey the configure switch to turn on sjlj exceptions. */
1335 #ifdef CONFIG_SJLJ_EXCEPTIONS
1336 if (CONFIG_SJLJ_EXCEPTIONS)
1337 return UI_SJLJ;
1338 #endif
1339
1340 return UI_DWARF2;
1341 }
1342
1343 /* To be used by targets that force sjlj unwind enabled. */
1344
1345 enum unwind_info_type
1346 sjlj_except_unwind_info (struct gcc_options *opts ATTRIBUTE_UNUSED)
1347 {
1348 return UI_SJLJ;
1349 }
1350
1351 /* To be used by targets where reg_raw_mode doesn't return the right
1352 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1353
1354 enum machine_mode
1355 default_get_reg_raw_mode(int regno)
1356 {
1357 return reg_raw_mode[regno];
1358 }
1359
1360 const struct default_options empty_optimization_table[] =
1361 {
1362 { OPT_LEVELS_NONE, 0, NULL, 0 }
1363 };
1364
1365 #include "gt-targhooks.h"