re PR bootstrap/44335 (gcc-4.6-20100529 java bootstrap failure on arm-linux-gnueabi)
[gcc.git] / gcc / targhooks.c
1 /* Default target hook functions.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* The migration of target macros to target hooks works as follows:
22
23 1. Create a target hook that uses the existing target macros to
24 implement the same functionality.
25
26 2. Convert all the MI files to use the hook instead of the macro.
27
28 3. Repeat for a majority of the remaining target macros. This will
29 take some time.
30
31 4. Tell target maintainers to start migrating.
32
33 5. Eventually convert the backends to override the hook instead of
34 defining the macros. This will take some time too.
35
36 6. TBD when, poison the macros. Unmigrated targets will break at
37 this point.
38
39 Note that we expect steps 1-3 to be done by the people that
40 understand what the MI does with each macro, and step 5 to be done
41 by the target maintainers for their respective targets.
42
43 Note that steps 1 and 2 don't have to be done together, but no
44 target can override the new hook until step 2 is complete for it.
45
46 Once the macros are poisoned, we will revert to the old migration
47 rules - migrate the macro, callers, and targets all at once. This
48 comment can thus be removed at that point. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "machmode.h"
55 #include "rtl.h"
56 #include "tree.h"
57 #include "expr.h"
58 #include "output.h"
59 #include "diagnostic-core.h"
60 #include "toplev.h"
61 #include "function.h"
62 #include "target.h"
63 #include "tm_p.h"
64 #include "target-def.h"
65 #include "ggc.h"
66 #include "hard-reg-set.h"
67 #include "regs.h"
68 #include "reload.h"
69 #include "optabs.h"
70 #include "recog.h"
71
72
73 bool
74 default_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
75 rtx addr ATTRIBUTE_UNUSED,
76 bool strict ATTRIBUTE_UNUSED)
77 {
78 #ifdef GO_IF_LEGITIMATE_ADDRESS
79 /* Defer to the old implementation using a goto. */
80 if (strict)
81 return strict_memory_address_p (mode, addr);
82 else
83 return memory_address_p (mode, addr);
84 #else
85 gcc_unreachable ();
86 #endif
87 }
88
89 void
90 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
91 {
92 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
93 ASM_OUTPUT_EXTERNAL_LIBCALL(asm_out_file, fun);
94 #endif
95 }
96
97 int
98 default_unspec_may_trap_p (const_rtx x, unsigned flags)
99 {
100 int i;
101
102 if (GET_CODE (x) == UNSPEC_VOLATILE
103 /* Any floating arithmetic may trap. */
104 || (SCALAR_FLOAT_MODE_P (GET_MODE (x))
105 && flag_trapping_math))
106 return 1;
107
108 for (i = 0; i < XVECLEN (x, 0); ++i)
109 {
110 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
111 return 1;
112 }
113
114 return 0;
115 }
116
117 enum machine_mode
118 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
119 enum machine_mode mode,
120 int *punsignedp ATTRIBUTE_UNUSED,
121 const_tree funtype ATTRIBUTE_UNUSED,
122 int for_return ATTRIBUTE_UNUSED)
123 {
124 if (for_return == 2)
125 return promote_mode (type, mode, punsignedp);
126 return mode;
127 }
128
129 enum machine_mode
130 default_promote_function_mode_always_promote (const_tree type,
131 enum machine_mode mode,
132 int *punsignedp,
133 const_tree funtype ATTRIBUTE_UNUSED,
134 int for_return ATTRIBUTE_UNUSED)
135 {
136 return promote_mode (type, mode, punsignedp);
137 }
138
139
140 enum machine_mode
141 default_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
142 {
143 if (m1 == m2)
144 return m1;
145 return VOIDmode;
146 }
147
148 bool
149 default_return_in_memory (const_tree type,
150 const_tree fntype ATTRIBUTE_UNUSED)
151 {
152 return (TYPE_MODE (type) == BLKmode);
153 }
154
155 rtx
156 default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
157 enum machine_mode mode ATTRIBUTE_UNUSED)
158 {
159 return x;
160 }
161
162 rtx
163 default_expand_builtin_saveregs (void)
164 {
165 error ("__builtin_saveregs not supported by this target");
166 return const0_rtx;
167 }
168
169 void
170 default_setup_incoming_varargs (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
171 enum machine_mode mode ATTRIBUTE_UNUSED,
172 tree type ATTRIBUTE_UNUSED,
173 int *pretend_arg_size ATTRIBUTE_UNUSED,
174 int second_time ATTRIBUTE_UNUSED)
175 {
176 }
177
178 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
179
180 rtx
181 default_builtin_setjmp_frame_value (void)
182 {
183 return virtual_stack_vars_rtx;
184 }
185
186 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
187
188 bool
189 hook_bool_CUMULATIVE_ARGS_false (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
190 {
191 return false;
192 }
193
194 bool
195 default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
196 {
197 return (targetm.calls.setup_incoming_varargs
198 != default_setup_incoming_varargs);
199 }
200
201 enum machine_mode
202 default_eh_return_filter_mode (void)
203 {
204 return targetm.unwind_word_mode ();
205 }
206
207 enum machine_mode
208 default_libgcc_cmp_return_mode (void)
209 {
210 return word_mode;
211 }
212
213 enum machine_mode
214 default_libgcc_shift_count_mode (void)
215 {
216 return word_mode;
217 }
218
219 enum machine_mode
220 default_unwind_word_mode (void)
221 {
222 return word_mode;
223 }
224
225 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
226
227 unsigned HOST_WIDE_INT
228 default_shift_truncation_mask (enum machine_mode mode)
229 {
230 return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
231 }
232
233 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
234
235 unsigned int
236 default_min_divisions_for_recip_mul (enum machine_mode mode ATTRIBUTE_UNUSED)
237 {
238 return have_insn_for (DIV, mode) ? 3 : 2;
239 }
240
241 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
242
243 int
244 default_mode_rep_extended (enum machine_mode mode ATTRIBUTE_UNUSED,
245 enum machine_mode mode_rep ATTRIBUTE_UNUSED)
246 {
247 return UNKNOWN;
248 }
249
250 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
251
252 bool
253 hook_bool_CUMULATIVE_ARGS_true (CUMULATIVE_ARGS * a ATTRIBUTE_UNUSED)
254 {
255 return true;
256 }
257
258 /* Return machine mode for non-standard suffix
259 or VOIDmode if non-standard suffixes are unsupported. */
260 enum machine_mode
261 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
262 {
263 return VOIDmode;
264 }
265
266 /* The generic C++ ABI specifies this is a 64-bit value. */
267 tree
268 default_cxx_guard_type (void)
269 {
270 return long_long_integer_type_node;
271 }
272
273
274 /* Returns the size of the cookie to use when allocating an array
275 whose elements have the indicated TYPE. Assumes that it is already
276 known that a cookie is needed. */
277
278 tree
279 default_cxx_get_cookie_size (tree type)
280 {
281 tree cookie_size;
282
283 /* We need to allocate an additional max (sizeof (size_t), alignof
284 (true_type)) bytes. */
285 tree sizetype_size;
286 tree type_align;
287
288 sizetype_size = size_in_bytes (sizetype);
289 type_align = size_int (TYPE_ALIGN_UNIT (type));
290 if (INT_CST_LT_UNSIGNED (type_align, sizetype_size))
291 cookie_size = sizetype_size;
292 else
293 cookie_size = type_align;
294
295 return cookie_size;
296 }
297
298 /* Return true if a parameter must be passed by reference. This version
299 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
300
301 bool
302 hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED,
303 enum machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
304 bool named_arg ATTRIBUTE_UNUSED)
305 {
306 return targetm.calls.must_pass_in_stack (mode, type);
307 }
308
309 /* Return true if a parameter follows callee copies conventions. This
310 version of the hook is true for all named arguments. */
311
312 bool
313 hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
314 enum machine_mode mode ATTRIBUTE_UNUSED,
315 const_tree type ATTRIBUTE_UNUSED, bool named)
316 {
317 return named;
318 }
319
320 /* Emit to STREAM the assembler syntax for insn operand X. */
321
322 void
323 default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
324 int code ATTRIBUTE_UNUSED)
325 {
326 #ifdef PRINT_OPERAND
327 PRINT_OPERAND (stream, x, code);
328 #else
329 gcc_unreachable ();
330 #endif
331 }
332
333 /* Emit to STREAM the assembler syntax for an insn operand whose memory
334 address is X. */
335
336 void
337 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
338 rtx x ATTRIBUTE_UNUSED)
339 {
340 #ifdef PRINT_OPERAND_ADDRESS
341 PRINT_OPERAND_ADDRESS (stream, x);
342 #else
343 gcc_unreachable ();
344 #endif
345 }
346
347 /* Return true if CODE is a valid punctuation character for the
348 `print_operand' hook. */
349
350 bool
351 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
352 {
353 #ifdef PRINT_OPERAND_PUNCT_VALID_P
354 return PRINT_OPERAND_PUNCT_VALID_P (code);
355 #else
356 return false;
357 #endif
358 }
359
360 /* The default implementation of TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
361
362 bool
363 default_asm_output_addr_const_extra (FILE *file ATTRIBUTE_UNUSED,
364 rtx x ATTRIBUTE_UNUSED)
365 {
366 #ifdef OUTPUT_ADDR_CONST_EXTRA
367 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
368 return true;
369
370 fail:
371 #endif
372 return false;
373 }
374
375 /* True if MODE is valid for the target. By "valid", we mean able to
376 be manipulated in non-trivial ways. In particular, this means all
377 the arithmetic is supported.
378
379 By default we guess this means that any C type is supported. If
380 we can't map the mode back to a type that would be available in C,
381 then reject it. Special case, here, is the double-word arithmetic
382 supported by optabs.c. */
383
384 bool
385 default_scalar_mode_supported_p (enum machine_mode mode)
386 {
387 int precision = GET_MODE_PRECISION (mode);
388
389 switch (GET_MODE_CLASS (mode))
390 {
391 case MODE_PARTIAL_INT:
392 case MODE_INT:
393 if (precision == CHAR_TYPE_SIZE)
394 return true;
395 if (precision == SHORT_TYPE_SIZE)
396 return true;
397 if (precision == INT_TYPE_SIZE)
398 return true;
399 if (precision == LONG_TYPE_SIZE)
400 return true;
401 if (precision == LONG_LONG_TYPE_SIZE)
402 return true;
403 if (precision == 2 * BITS_PER_WORD)
404 return true;
405 return false;
406
407 case MODE_FLOAT:
408 if (precision == FLOAT_TYPE_SIZE)
409 return true;
410 if (precision == DOUBLE_TYPE_SIZE)
411 return true;
412 if (precision == LONG_DOUBLE_TYPE_SIZE)
413 return true;
414 return false;
415
416 case MODE_DECIMAL_FLOAT:
417 case MODE_FRACT:
418 case MODE_UFRACT:
419 case MODE_ACCUM:
420 case MODE_UACCUM:
421 return false;
422
423 default:
424 gcc_unreachable ();
425 }
426 }
427
428 /* Make some target macros useable by target-independent code. */
429 bool
430 targhook_words_big_endian (void)
431 {
432 return !!WORDS_BIG_ENDIAN;
433 }
434
435 bool
436 targhook_float_words_big_endian (void)
437 {
438 return !!FLOAT_WORDS_BIG_ENDIAN;
439 }
440
441 /* True if the target supports decimal floating point. */
442
443 bool
444 default_decimal_float_supported_p (void)
445 {
446 return ENABLE_DECIMAL_FLOAT;
447 }
448
449 /* True if the target supports fixed-point arithmetic. */
450
451 bool
452 default_fixed_point_supported_p (void)
453 {
454 return ENABLE_FIXED_POINT;
455 }
456
457 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
458 an error message.
459
460 This function checks whether a given INSN is valid within a low-overhead
461 loop. If INSN is invalid it returns the reason for that, otherwise it
462 returns NULL. A called function may clobber any special registers required
463 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
464 register for branch on table instructions. We reject the doloop pattern in
465 these cases. */
466
467 const char *
468 default_invalid_within_doloop (const_rtx insn)
469 {
470 if (CALL_P (insn))
471 return "Function call in loop.";
472
473 if (JUMP_TABLE_DATA_P (insn))
474 return "Computed branch in the loop.";
475
476 return NULL;
477 }
478
479 /* Mapping of builtin functions to vectorized variants. */
480
481 tree
482 default_builtin_vectorized_function (tree fndecl ATTRIBUTE_UNUSED,
483 tree type_out ATTRIBUTE_UNUSED,
484 tree type_in ATTRIBUTE_UNUSED)
485 {
486 return NULL_TREE;
487 }
488
489 /* Vectorized conversion. */
490
491 tree
492 default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
493 tree dest_type ATTRIBUTE_UNUSED,
494 tree src_type ATTRIBUTE_UNUSED)
495 {
496 return NULL_TREE;
497 }
498
499 /* Default vectorizer cost model values. */
500
501 int
502 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
503 tree vectype ATTRIBUTE_UNUSED,
504 int misalign ATTRIBUTE_UNUSED)
505 {
506 switch (type_of_cost)
507 {
508 case scalar_stmt:
509 case scalar_load:
510 case scalar_store:
511 case vector_stmt:
512 case vector_load:
513 case vector_store:
514 case vec_to_scalar:
515 case scalar_to_vec:
516 case cond_branch_not_taken:
517 case vec_perm:
518 return 1;
519
520 case unaligned_load:
521 case unaligned_store:
522 return 2;
523
524 case cond_branch_taken:
525 return 3;
526
527 default:
528 gcc_unreachable ();
529 }
530 }
531
532 /* Reciprocal. */
533
534 tree
535 default_builtin_reciprocal (unsigned int fn ATTRIBUTE_UNUSED,
536 bool md_fn ATTRIBUTE_UNUSED,
537 bool sqrt ATTRIBUTE_UNUSED)
538 {
539 return NULL_TREE;
540 }
541
542 bool
543 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
544 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
545 enum machine_mode mode ATTRIBUTE_UNUSED,
546 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
547 {
548 return false;
549 }
550
551 bool
552 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
553 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
554 enum machine_mode mode ATTRIBUTE_UNUSED,
555 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
556 {
557 return true;
558 }
559
560 int
561 hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
562 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
563 enum machine_mode mode ATTRIBUTE_UNUSED,
564 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
565 {
566 return 0;
567 }
568
569 void
570 default_function_arg_advance (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
571 enum machine_mode mode ATTRIBUTE_UNUSED,
572 const_tree type ATTRIBUTE_UNUSED,
573 bool named ATTRIBUTE_UNUSED)
574 {
575 #ifdef FUNCTION_ARG_ADVANCE
576 CUMULATIVE_ARGS args = *ca;
577 FUNCTION_ARG_ADVANCE (args, mode, CONST_CAST_TREE (type), named);
578 *ca = args;
579 #else
580 gcc_unreachable ();
581 #endif
582 }
583
584 rtx
585 default_function_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
586 enum machine_mode mode ATTRIBUTE_UNUSED,
587 const_tree type ATTRIBUTE_UNUSED,
588 bool named ATTRIBUTE_UNUSED)
589 {
590 #ifdef FUNCTION_ARG
591 return FUNCTION_ARG (*ca, mode, CONST_CAST_TREE (type), named);
592 #else
593 gcc_unreachable ();
594 #endif
595 }
596
597 rtx
598 default_function_incoming_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
599 enum machine_mode mode ATTRIBUTE_UNUSED,
600 const_tree type ATTRIBUTE_UNUSED,
601 bool named ATTRIBUTE_UNUSED)
602 {
603 #ifdef FUNCTION_INCOMING_ARG
604 return FUNCTION_INCOMING_ARG (*ca, mode, CONST_CAST_TREE (type), named);
605 #else
606 gcc_unreachable ();
607 #endif
608 }
609
610 void
611 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
612 {
613 }
614
615 const char *
616 hook_invalid_arg_for_unprototyped_fn (
617 const_tree typelist ATTRIBUTE_UNUSED,
618 const_tree funcdecl ATTRIBUTE_UNUSED,
619 const_tree val ATTRIBUTE_UNUSED)
620 {
621 return NULL;
622 }
623
624 /* Initialize the stack protection decls. */
625
626 /* Stack protection related decls living in libgcc. */
627 static GTY(()) tree stack_chk_guard_decl;
628
629 tree
630 default_stack_protect_guard (void)
631 {
632 tree t = stack_chk_guard_decl;
633
634 if (t == NULL)
635 {
636 rtx x;
637
638 t = build_decl (UNKNOWN_LOCATION,
639 VAR_DECL, get_identifier ("__stack_chk_guard"),
640 ptr_type_node);
641 TREE_STATIC (t) = 1;
642 TREE_PUBLIC (t) = 1;
643 DECL_EXTERNAL (t) = 1;
644 TREE_USED (t) = 1;
645 TREE_THIS_VOLATILE (t) = 1;
646 DECL_ARTIFICIAL (t) = 1;
647 DECL_IGNORED_P (t) = 1;
648
649 /* Do not share RTL as the declaration is visible outside of
650 current function. */
651 x = DECL_RTL (t);
652 RTX_FLAG (x, used) = 1;
653
654 stack_chk_guard_decl = t;
655 }
656
657 return t;
658 }
659
660 static GTY(()) tree stack_chk_fail_decl;
661
662 tree
663 default_external_stack_protect_fail (void)
664 {
665 tree t = stack_chk_fail_decl;
666
667 if (t == NULL_TREE)
668 {
669 t = build_function_type_list (void_type_node, NULL_TREE);
670 t = build_decl (UNKNOWN_LOCATION,
671 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
672 TREE_STATIC (t) = 1;
673 TREE_PUBLIC (t) = 1;
674 DECL_EXTERNAL (t) = 1;
675 TREE_USED (t) = 1;
676 TREE_THIS_VOLATILE (t) = 1;
677 TREE_NOTHROW (t) = 1;
678 DECL_ARTIFICIAL (t) = 1;
679 DECL_IGNORED_P (t) = 1;
680 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
681 DECL_VISIBILITY_SPECIFIED (t) = 1;
682
683 stack_chk_fail_decl = t;
684 }
685
686 return build_call_expr (t, 0);
687 }
688
689 tree
690 default_hidden_stack_protect_fail (void)
691 {
692 #ifndef HAVE_GAS_HIDDEN
693 return default_external_stack_protect_fail ();
694 #else
695 tree t = stack_chk_fail_decl;
696
697 if (!flag_pic)
698 return default_external_stack_protect_fail ();
699
700 if (t == NULL_TREE)
701 {
702 t = build_function_type_list (void_type_node, NULL_TREE);
703 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
704 get_identifier ("__stack_chk_fail_local"), t);
705 TREE_STATIC (t) = 1;
706 TREE_PUBLIC (t) = 1;
707 DECL_EXTERNAL (t) = 1;
708 TREE_USED (t) = 1;
709 TREE_THIS_VOLATILE (t) = 1;
710 TREE_NOTHROW (t) = 1;
711 DECL_ARTIFICIAL (t) = 1;
712 DECL_IGNORED_P (t) = 1;
713 DECL_VISIBILITY_SPECIFIED (t) = 1;
714 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
715
716 stack_chk_fail_decl = t;
717 }
718
719 return build_call_expr (t, 0);
720 #endif
721 }
722
723 bool
724 hook_bool_const_rtx_commutative_p (const_rtx x,
725 int outer_code ATTRIBUTE_UNUSED)
726 {
727 return COMMUTATIVE_P (x);
728 }
729
730 rtx
731 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
732 const_tree fn_decl_or_type,
733 bool outgoing ATTRIBUTE_UNUSED)
734 {
735 /* The old interface doesn't handle receiving the function type. */
736 if (fn_decl_or_type
737 && !DECL_P (fn_decl_or_type))
738 fn_decl_or_type = NULL;
739
740 #ifdef FUNCTION_VALUE
741 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
742 #else
743 gcc_unreachable ();
744 #endif
745 }
746
747 rtx
748 default_libcall_value (enum machine_mode mode ATTRIBUTE_UNUSED,
749 const_rtx fun ATTRIBUTE_UNUSED)
750 {
751 #ifdef LIBCALL_VALUE
752 return LIBCALL_VALUE (mode);
753 #else
754 gcc_unreachable ();
755 #endif
756 }
757
758 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
759
760 bool
761 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
762 {
763 #ifdef FUNCTION_VALUE_REGNO_P
764 return FUNCTION_VALUE_REGNO_P (regno);
765 #else
766 gcc_unreachable ();
767 #endif
768 }
769
770 rtx
771 default_internal_arg_pointer (void)
772 {
773 /* If the reg that the virtual arg pointer will be translated into is
774 not a fixed reg or is the stack pointer, make a copy of the virtual
775 arg pointer, and address parms via the copy. The frame pointer is
776 considered fixed even though it is not marked as such. */
777 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
778 || ! (fixed_regs[ARG_POINTER_REGNUM]
779 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
780 return copy_to_reg (virtual_incoming_args_rtx);
781 else
782 return virtual_incoming_args_rtx;
783 }
784
785 rtx
786 default_static_chain (const_tree fndecl, bool incoming_p)
787 {
788 if (!DECL_STATIC_CHAIN (fndecl))
789 return NULL;
790
791 if (incoming_p)
792 {
793 #ifdef STATIC_CHAIN_INCOMING_REGNUM
794 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
795 #endif
796 }
797
798 #ifdef STATIC_CHAIN_REGNUM
799 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
800 #endif
801
802 {
803 static bool issued_error;
804 if (!issued_error)
805 {
806 issued_error = true;
807 sorry ("nested functions not supported on this target");
808 }
809
810 /* It really doesn't matter what we return here, so long at it
811 doesn't cause the rest of the compiler to crash. */
812 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
813 }
814 }
815
816 void
817 default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
818 rtx ARG_UNUSED (r_chain))
819 {
820 sorry ("nested function trampolines not supported on this target");
821 }
822
823 int
824 default_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
825 tree funtype ATTRIBUTE_UNUSED,
826 int size ATTRIBUTE_UNUSED)
827 {
828 return 0;
829 }
830
831 reg_class_t
832 default_branch_target_register_class (void)
833 {
834 return NO_REGS;
835 }
836
837 #ifdef IRA_COVER_CLASSES
838 const reg_class_t *
839 default_ira_cover_classes (void)
840 {
841 static reg_class_t classes[] = IRA_COVER_CLASSES;
842 return classes;
843 }
844 #endif
845
846 reg_class_t
847 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
848 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
849 enum machine_mode reload_mode ATTRIBUTE_UNUSED,
850 secondary_reload_info *sri)
851 {
852 enum reg_class rclass = NO_REGS;
853 enum reg_class reload_class = (enum reg_class) reload_class_i;
854
855 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
856 {
857 sri->icode = sri->prev_sri->t_icode;
858 return NO_REGS;
859 }
860 #ifdef SECONDARY_INPUT_RELOAD_CLASS
861 if (in_p)
862 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
863 #endif
864 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
865 if (! in_p)
866 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
867 #endif
868 if (rclass != NO_REGS)
869 {
870 enum insn_code icode
871 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
872 reload_mode);
873
874 if (icode != CODE_FOR_nothing
875 && insn_data[(int) icode].operand[in_p].predicate
876 && ! insn_data[(int) icode].operand[in_p].predicate (x, reload_mode))
877 icode = CODE_FOR_nothing;
878 else if (icode != CODE_FOR_nothing)
879 {
880 const char *insn_constraint, *scratch_constraint;
881 char insn_letter, scratch_letter;
882 enum reg_class insn_class, scratch_class;
883
884 gcc_assert (insn_data[(int) icode].n_operands == 3);
885 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
886 if (!*insn_constraint)
887 insn_class = ALL_REGS;
888 else
889 {
890 if (in_p)
891 {
892 gcc_assert (*insn_constraint == '=');
893 insn_constraint++;
894 }
895 insn_letter = *insn_constraint;
896 insn_class
897 = (insn_letter == 'r' ? GENERAL_REGS
898 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) insn_letter,
899 insn_constraint));
900 gcc_assert (insn_class != NO_REGS);
901 }
902
903 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
904 /* The scratch register's constraint must start with "=&",
905 except for an input reload, where only "=" is necessary,
906 and where it might be beneficial to re-use registers from
907 the input. */
908 gcc_assert (scratch_constraint[0] == '='
909 && (in_p || scratch_constraint[1] == '&'));
910 scratch_constraint++;
911 if (*scratch_constraint == '&')
912 scratch_constraint++;
913 scratch_letter = *scratch_constraint;
914 scratch_class
915 = (scratch_letter == 'r' ? GENERAL_REGS
916 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
917 scratch_constraint));
918
919 if (reg_class_subset_p (reload_class, insn_class))
920 {
921 gcc_assert (scratch_class == rclass);
922 rclass = NO_REGS;
923 }
924 else
925 rclass = insn_class;
926
927 }
928 if (rclass == NO_REGS)
929 sri->icode = icode;
930 else
931 sri->t_icode = icode;
932 }
933 return rclass;
934 }
935
936 bool
937 default_handle_c_option (size_t code ATTRIBUTE_UNUSED,
938 const char *arg ATTRIBUTE_UNUSED,
939 int value ATTRIBUTE_UNUSED)
940 {
941 return false;
942 }
943
944 /* By default, if flag_pic is true, then neither local nor global relocs
945 should be placed in readonly memory. */
946
947 int
948 default_reloc_rw_mask (void)
949 {
950 return flag_pic ? 3 : 0;
951 }
952
953 /* By default, do no modification. */
954 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
955 tree id)
956 {
957 return id;
958 }
959
960 bool
961 default_builtin_vector_alignment_reachable (const_tree type, bool is_packed)
962 {
963 if (is_packed)
964 return false;
965
966 /* Assuming that types whose size is > pointer-size are not guaranteed to be
967 naturally aligned. */
968 if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
969 return false;
970
971 /* Assuming that types whose size is <= pointer-size
972 are naturally aligned. */
973 return true;
974 }
975
976 /* By default, assume that a target supports any factor of misalignment
977 memory access if it supports movmisalign patten.
978 is_packed is true if the memory access is defined in a packed struct. */
979 bool
980 default_builtin_support_vector_misalignment (enum machine_mode mode,
981 const_tree type
982 ATTRIBUTE_UNUSED,
983 int misalignment
984 ATTRIBUTE_UNUSED,
985 bool is_packed
986 ATTRIBUTE_UNUSED)
987 {
988 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
989 return true;
990 return false;
991 }
992
993 /* By default, only attempt to parallelize bitwise operations, and
994 possibly adds/subtracts using bit-twiddling. */
995
996 enum machine_mode
997 default_preferred_simd_mode (enum machine_mode mode ATTRIBUTE_UNUSED)
998 {
999 return word_mode;
1000 }
1001
1002 /* By default only the size derived from the preferred vector mode
1003 is tried. */
1004
1005 unsigned int
1006 default_autovectorize_vector_sizes (void)
1007 {
1008 return 0;
1009 }
1010
1011 /* Determine whether or not a pointer mode is valid. Assume defaults
1012 of ptr_mode or Pmode - can be overridden. */
1013 bool
1014 default_valid_pointer_mode (enum machine_mode mode)
1015 {
1016 return (mode == ptr_mode || mode == Pmode);
1017 }
1018
1019 /* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
1020 for the generic address space only. */
1021
1022 enum machine_mode
1023 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1024 {
1025 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1026 return ptr_mode;
1027 }
1028
1029 /* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
1030 for the generic address space only. */
1031
1032 enum machine_mode
1033 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1034 {
1035 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1036 return Pmode;
1037 }
1038
1039 /* Named address space version of valid_pointer_mode. */
1040
1041 bool
1042 default_addr_space_valid_pointer_mode (enum machine_mode mode, addr_space_t as)
1043 {
1044 if (!ADDR_SPACE_GENERIC_P (as))
1045 return (mode == targetm.addr_space.pointer_mode (as)
1046 || mode == targetm.addr_space.address_mode (as));
1047
1048 return targetm.valid_pointer_mode (mode);
1049 }
1050
1051 /* Some places still assume that all pointer or address modes are the
1052 standard Pmode and ptr_mode. These optimizations become invalid if
1053 the target actually supports multiple different modes. For now,
1054 we disable such optimizations on such targets, using this function. */
1055
1056 bool
1057 target_default_pointer_address_modes_p (void)
1058 {
1059 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1060 return false;
1061 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1062 return false;
1063
1064 return true;
1065 }
1066
1067 /* Named address space version of legitimate_address_p. */
1068
1069 bool
1070 default_addr_space_legitimate_address_p (enum machine_mode mode, rtx mem,
1071 bool strict, addr_space_t as)
1072 {
1073 if (!ADDR_SPACE_GENERIC_P (as))
1074 gcc_unreachable ();
1075
1076 return targetm.legitimate_address_p (mode, mem, strict);
1077 }
1078
1079 /* Named address space version of LEGITIMIZE_ADDRESS. */
1080
1081 rtx
1082 default_addr_space_legitimize_address (rtx x, rtx oldx,
1083 enum machine_mode mode, addr_space_t as)
1084 {
1085 if (!ADDR_SPACE_GENERIC_P (as))
1086 return x;
1087
1088 return targetm.legitimize_address (x, oldx, mode);
1089 }
1090
1091 /* The default hook for determining if one named address space is a subset of
1092 another and to return which address space to use as the common address
1093 space. */
1094
1095 bool
1096 default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1097 {
1098 return (subset == superset);
1099 }
1100
1101 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1102 called for targets with only a generic address space. */
1103
1104 rtx
1105 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1106 tree from_type ATTRIBUTE_UNUSED,
1107 tree to_type ATTRIBUTE_UNUSED)
1108 {
1109 gcc_unreachable ();
1110 }
1111
1112 bool
1113 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1114 {
1115 return true;
1116 }
1117
1118 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1119
1120 bool
1121 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED)
1122 {
1123 #ifdef GO_IF_MODE_DEPENDENT_ADDRESS
1124
1125 GO_IF_MODE_DEPENDENT_ADDRESS (CONST_CAST_RTX (addr), win);
1126 return false;
1127 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1128 win: ATTRIBUTE_UNUSED_LABEL
1129 return true;
1130
1131 #else
1132
1133 return false;
1134
1135 #endif
1136 }
1137
1138 bool
1139 default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1140 tree ARG_UNUSED (name),
1141 tree ARG_UNUSED (args),
1142 int ARG_UNUSED (flags))
1143 {
1144 warning (OPT_Wattributes,
1145 "target attribute is not supported on this machine");
1146
1147 return false;
1148 }
1149
1150 bool
1151 default_target_option_pragma_parse (tree ARG_UNUSED (args),
1152 tree ARG_UNUSED (pop_target))
1153 {
1154 warning (OPT_Wpragmas,
1155 "#pragma GCC target is not supported for this machine");
1156
1157 return false;
1158 }
1159
1160 bool
1161 default_target_can_inline_p (tree caller, tree callee)
1162 {
1163 bool ret = false;
1164 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1165 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1166
1167 /* If callee has no option attributes, then it is ok to inline */
1168 if (!callee_opts)
1169 ret = true;
1170
1171 /* If caller has no option attributes, but callee does then it is not ok to
1172 inline */
1173 else if (!caller_opts)
1174 ret = false;
1175
1176 /* If both caller and callee have attributes, assume that if the pointer is
1177 different, the the two functions have different target options since
1178 build_target_option_node uses a hash table for the options. */
1179 else
1180 ret = (callee_opts == caller_opts);
1181
1182 return ret;
1183 }
1184
1185 #ifndef HAVE_casesi
1186 # define HAVE_casesi 0
1187 #endif
1188
1189 /* If the machine does not have a case insn that compares the bounds,
1190 this means extra overhead for dispatch tables, which raises the
1191 threshold for using them. */
1192
1193 unsigned int default_case_values_threshold (void)
1194 {
1195 return (HAVE_casesi ? 4 : 5);
1196 }
1197
1198 bool
1199 default_have_conditional_execution (void)
1200 {
1201 #ifdef HAVE_conditional_execution
1202 return HAVE_conditional_execution;
1203 #else
1204 return false;
1205 #endif
1206 }
1207
1208 /* Compute cost of moving registers to/from memory. */
1209
1210 int
1211 default_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1212 reg_class_t rclass ATTRIBUTE_UNUSED,
1213 bool in ATTRIBUTE_UNUSED)
1214 {
1215 #ifndef MEMORY_MOVE_COST
1216 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1217 #else
1218 return MEMORY_MOVE_COST (mode, (enum reg_class) rclass, in);
1219 #endif
1220 }
1221
1222 /* Compute cost of moving data from a register of class FROM to one of
1223 TO, using MODE. */
1224
1225 int
1226 default_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1227 reg_class_t from ATTRIBUTE_UNUSED,
1228 reg_class_t to ATTRIBUTE_UNUSED)
1229 {
1230 #ifndef REGISTER_MOVE_COST
1231 return 2;
1232 #else
1233 return REGISTER_MOVE_COST (mode, (enum reg_class) from, (enum reg_class) to);
1234 #endif
1235 }
1236
1237 bool
1238 default_profile_before_prologue (void)
1239 {
1240 #ifdef PROFILE_BEFORE_PROLOGUE
1241 return true;
1242 #else
1243 return false;
1244 #endif
1245 }
1246
1247 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1248
1249 reg_class_t
1250 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
1251 reg_class_t rclass)
1252 {
1253 #ifdef PREFERRED_RELOAD_CLASS
1254 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
1255 #else
1256 return rclass;
1257 #endif
1258 }
1259
1260 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1261
1262 reg_class_t
1263 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
1264 reg_class_t rclass)
1265 {
1266 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1267 return PREFERRED_OUTPUT_RELOAD_CLASS (x, (enum reg_class) rclass);
1268 #else
1269 return rclass;
1270 #endif
1271 }
1272
1273 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1274
1275 bool
1276 default_class_likely_spilled_p (reg_class_t rclass)
1277 {
1278 return (reg_class_size[(int) rclass] == 1);
1279 }
1280
1281 /* Determine the debugging unwind mechanism for the target. */
1282
1283 enum unwind_info_type
1284 default_debug_unwind_info (void)
1285 {
1286 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1287 /* ??? Change all users to the hook, then poison this. */
1288 #ifdef DWARF2_FRAME_INFO
1289 if (DWARF2_FRAME_INFO)
1290 return UI_DWARF2;
1291 #endif
1292
1293 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1294 #ifdef DWARF2_DEBUGGING_INFO
1295 if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
1296 return UI_DWARF2;
1297 #endif
1298
1299 return UI_NONE;
1300 }
1301
1302 /* Determine the exception handling mechanism for the target. */
1303
1304 enum unwind_info_type
1305 default_except_unwind_info (void)
1306 {
1307 /* ??? Change the one user to the hook, then poison this. */
1308 #ifdef MUST_USE_SJLJ_EXCEPTIONS
1309 if (MUST_USE_SJLJ_EXCEPTIONS)
1310 return UI_SJLJ;
1311 #endif
1312
1313 /* Obey the configure switch to turn on sjlj exceptions. */
1314 #ifdef CONFIG_SJLJ_EXCEPTIONS
1315 if (CONFIG_SJLJ_EXCEPTIONS)
1316 return UI_SJLJ;
1317 #endif
1318
1319 /* ??? Change all users to the hook, then poison this. */
1320 #ifdef DWARF2_UNWIND_INFO
1321 if (DWARF2_UNWIND_INFO)
1322 return UI_DWARF2;
1323 #endif
1324
1325 return UI_SJLJ;
1326 }
1327
1328 /* To be used by targets that force dwarf2 unwind enabled. */
1329
1330 enum unwind_info_type
1331 dwarf2_except_unwind_info (void)
1332 {
1333 /* Obey the configure switch to turn on sjlj exceptions. */
1334 #ifdef CONFIG_SJLJ_EXCEPTIONS
1335 if (CONFIG_SJLJ_EXCEPTIONS)
1336 return UI_SJLJ;
1337 #endif
1338
1339 return UI_DWARF2;
1340 }
1341
1342 /* To be used by targets that force sjlj unwind enabled. */
1343
1344 enum unwind_info_type
1345 sjlj_except_unwind_info (void)
1346 {
1347 return UI_SJLJ;
1348 }
1349
1350 /* To be used by targets where reg_raw_mode doesn't return the right
1351 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1352
1353 enum machine_mode
1354 default_get_reg_raw_mode(int regno)
1355 {
1356 return reg_raw_mode[regno];
1357 }
1358
1359 const struct default_options empty_optimization_table[] =
1360 {
1361 { OPT_LEVELS_NONE, 0, NULL, 0 }
1362 };
1363
1364 #include "gt-targhooks.h"