coretypes.h: Include machmode.h...
[gcc.git] / gcc / targhooks.c
1 /* Default target hook functions.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* The migration of target macros to target hooks works as follows:
21
22 1. Create a target hook that uses the existing target macros to
23 implement the same functionality.
24
25 2. Convert all the MI files to use the hook instead of the macro.
26
27 3. Repeat for a majority of the remaining target macros. This will
28 take some time.
29
30 4. Tell target maintainers to start migrating.
31
32 5. Eventually convert the backends to override the hook instead of
33 defining the macros. This will take some time too.
34
35 6. TBD when, poison the macros. Unmigrated targets will break at
36 this point.
37
38 Note that we expect steps 1-3 to be done by the people that
39 understand what the MI does with each macro, and step 5 to be done
40 by the target maintainers for their respective targets.
41
42 Note that steps 1 and 2 don't have to be done together, but no
43 target can override the new hook until step 2 is complete for it.
44
45 Once the macros are poisoned, we will revert to the old migration
46 rules - migrate the macro, callers, and targets all at once. This
47 comment can thus be removed at that point. */
48
49 #include "config.h"
50 #include "system.h"
51 #include "coretypes.h"
52 #include "tm.h"
53 #include "rtl.h"
54 #include "hash-set.h"
55 #include "vec.h"
56 #include "input.h"
57 #include "alias.h"
58 #include "symtab.h"
59 #include "inchash.h"
60 #include "tree.h"
61 #include "fold-const.h"
62 #include "stor-layout.h"
63 #include "varasm.h"
64 #include "hashtab.h"
65 #include "hard-reg-set.h"
66 #include "function.h"
67 #include "flags.h"
68 #include "statistics.h"
69 #include "insn-config.h"
70 #include "expmed.h"
71 #include "dojump.h"
72 #include "explow.h"
73 #include "calls.h"
74 #include "emit-rtl.h"
75 #include "stmt.h"
76 #include "expr.h"
77 #include "output.h"
78 #include "diagnostic-core.h"
79 #include "target.h"
80 #include "tm_p.h"
81 #include "target-def.h"
82 #include "regs.h"
83 #include "reload.h"
84 #include "insn-codes.h"
85 #include "optabs.h"
86 #include "recog.h"
87 #include "intl.h"
88 #include "opts.h"
89 #include "tree-ssa-alias.h"
90 #include "gimple-expr.h"
91 #include "gimplify.h"
92 #include "stringpool.h"
93 #include "tree-ssanames.h"
94
95
96 bool
97 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
98 rtx addr ATTRIBUTE_UNUSED,
99 bool strict ATTRIBUTE_UNUSED)
100 {
101 #ifdef GO_IF_LEGITIMATE_ADDRESS
102 /* Defer to the old implementation using a goto. */
103 if (strict)
104 return strict_memory_address_p (mode, addr);
105 else
106 return memory_address_p (mode, addr);
107 #else
108 gcc_unreachable ();
109 #endif
110 }
111
112 void
113 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
114 {
115 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
116 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
117 #endif
118 }
119
120 int
121 default_unspec_may_trap_p (const_rtx x, unsigned flags)
122 {
123 int i;
124
125 /* Any floating arithmetic may trap. */
126 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
127 return 1;
128
129 for (i = 0; i < XVECLEN (x, 0); ++i)
130 {
131 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
132 return 1;
133 }
134
135 return 0;
136 }
137
138 machine_mode
139 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
140 machine_mode mode,
141 int *punsignedp ATTRIBUTE_UNUSED,
142 const_tree funtype ATTRIBUTE_UNUSED,
143 int for_return ATTRIBUTE_UNUSED)
144 {
145 if (type != NULL_TREE && for_return == 2)
146 return promote_mode (type, mode, punsignedp);
147 return mode;
148 }
149
150 machine_mode
151 default_promote_function_mode_always_promote (const_tree type,
152 machine_mode mode,
153 int *punsignedp,
154 const_tree funtype ATTRIBUTE_UNUSED,
155 int for_return ATTRIBUTE_UNUSED)
156 {
157 return promote_mode (type, mode, punsignedp);
158 }
159
160 machine_mode
161 default_cc_modes_compatible (machine_mode m1, machine_mode m2)
162 {
163 if (m1 == m2)
164 return m1;
165 return VOIDmode;
166 }
167
168 bool
169 default_return_in_memory (const_tree type,
170 const_tree fntype ATTRIBUTE_UNUSED)
171 {
172 return (TYPE_MODE (type) == BLKmode);
173 }
174
175 rtx
176 default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
177 machine_mode mode ATTRIBUTE_UNUSED)
178 {
179 return x;
180 }
181
182 bool
183 default_legitimize_address_displacement (rtx *disp ATTRIBUTE_UNUSED,
184 rtx *offset ATTRIBUTE_UNUSED,
185 machine_mode mode ATTRIBUTE_UNUSED)
186 {
187 return false;
188 }
189
190 rtx
191 default_expand_builtin_saveregs (void)
192 {
193 error ("__builtin_saveregs not supported by this target");
194 return const0_rtx;
195 }
196
197 void
198 default_setup_incoming_varargs (cumulative_args_t ca ATTRIBUTE_UNUSED,
199 machine_mode mode ATTRIBUTE_UNUSED,
200 tree type ATTRIBUTE_UNUSED,
201 int *pretend_arg_size ATTRIBUTE_UNUSED,
202 int second_time ATTRIBUTE_UNUSED)
203 {
204 }
205
206 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
207
208 rtx
209 default_builtin_setjmp_frame_value (void)
210 {
211 return virtual_stack_vars_rtx;
212 }
213
214 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
215
216 bool
217 hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
218 {
219 return false;
220 }
221
222 bool
223 default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
224 {
225 return (targetm.calls.setup_incoming_varargs
226 != default_setup_incoming_varargs);
227 }
228
229 machine_mode
230 default_eh_return_filter_mode (void)
231 {
232 return targetm.unwind_word_mode ();
233 }
234
235 machine_mode
236 default_libgcc_cmp_return_mode (void)
237 {
238 return word_mode;
239 }
240
241 machine_mode
242 default_libgcc_shift_count_mode (void)
243 {
244 return word_mode;
245 }
246
247 machine_mode
248 default_unwind_word_mode (void)
249 {
250 return word_mode;
251 }
252
253 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
254
255 unsigned HOST_WIDE_INT
256 default_shift_truncation_mask (machine_mode mode)
257 {
258 return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
259 }
260
261 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
262
263 unsigned int
264 default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
265 {
266 return have_insn_for (DIV, mode) ? 3 : 2;
267 }
268
269 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
270
271 int
272 default_mode_rep_extended (machine_mode mode ATTRIBUTE_UNUSED,
273 machine_mode mode_rep ATTRIBUTE_UNUSED)
274 {
275 return UNKNOWN;
276 }
277
278 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
279
280 bool
281 hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
282 {
283 return true;
284 }
285
286 /* Return machine mode for non-standard suffix
287 or VOIDmode if non-standard suffixes are unsupported. */
288 machine_mode
289 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
290 {
291 return VOIDmode;
292 }
293
294 /* The generic C++ ABI specifies this is a 64-bit value. */
295 tree
296 default_cxx_guard_type (void)
297 {
298 return long_long_integer_type_node;
299 }
300
301 /* Returns the size of the cookie to use when allocating an array
302 whose elements have the indicated TYPE. Assumes that it is already
303 known that a cookie is needed. */
304
305 tree
306 default_cxx_get_cookie_size (tree type)
307 {
308 tree cookie_size;
309
310 /* We need to allocate an additional max (sizeof (size_t), alignof
311 (true_type)) bytes. */
312 tree sizetype_size;
313 tree type_align;
314
315 sizetype_size = size_in_bytes (sizetype);
316 type_align = size_int (TYPE_ALIGN_UNIT (type));
317 if (tree_int_cst_lt (type_align, sizetype_size))
318 cookie_size = sizetype_size;
319 else
320 cookie_size = type_align;
321
322 return cookie_size;
323 }
324
325 /* Return true if a parameter must be passed by reference. This version
326 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
327
328 bool
329 hook_pass_by_reference_must_pass_in_stack (cumulative_args_t c ATTRIBUTE_UNUSED,
330 machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
331 bool named_arg ATTRIBUTE_UNUSED)
332 {
333 return targetm.calls.must_pass_in_stack (mode, type);
334 }
335
336 /* Return true if a parameter follows callee copies conventions. This
337 version of the hook is true for all named arguments. */
338
339 bool
340 hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED,
341 machine_mode mode ATTRIBUTE_UNUSED,
342 const_tree type ATTRIBUTE_UNUSED, bool named)
343 {
344 return named;
345 }
346
347 /* Emit to STREAM the assembler syntax for insn operand X. */
348
349 void
350 default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
351 int code ATTRIBUTE_UNUSED)
352 {
353 #ifdef PRINT_OPERAND
354 PRINT_OPERAND (stream, x, code);
355 #else
356 gcc_unreachable ();
357 #endif
358 }
359
360 /* Emit to STREAM the assembler syntax for an insn operand whose memory
361 address is X. */
362
363 void
364 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
365 rtx x ATTRIBUTE_UNUSED)
366 {
367 #ifdef PRINT_OPERAND_ADDRESS
368 PRINT_OPERAND_ADDRESS (stream, x);
369 #else
370 gcc_unreachable ();
371 #endif
372 }
373
374 /* Return true if CODE is a valid punctuation character for the
375 `print_operand' hook. */
376
377 bool
378 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
379 {
380 #ifdef PRINT_OPERAND_PUNCT_VALID_P
381 return PRINT_OPERAND_PUNCT_VALID_P (code);
382 #else
383 return false;
384 #endif
385 }
386
387 /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
388 tree
389 default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
390 {
391 const char *skipped = name + (*name == '*' ? 1 : 0);
392 const char *stripped = targetm.strip_name_encoding (skipped);
393 if (*name != '*' && user_label_prefix[0])
394 stripped = ACONCAT ((user_label_prefix, stripped, NULL));
395 return get_identifier (stripped);
396 }
397
398 /* True if MODE is valid for the target. By "valid", we mean able to
399 be manipulated in non-trivial ways. In particular, this means all
400 the arithmetic is supported.
401
402 By default we guess this means that any C type is supported. If
403 we can't map the mode back to a type that would be available in C,
404 then reject it. Special case, here, is the double-word arithmetic
405 supported by optabs.c. */
406
407 bool
408 default_scalar_mode_supported_p (machine_mode mode)
409 {
410 int precision = GET_MODE_PRECISION (mode);
411
412 switch (GET_MODE_CLASS (mode))
413 {
414 case MODE_PARTIAL_INT:
415 case MODE_INT:
416 if (precision == CHAR_TYPE_SIZE)
417 return true;
418 if (precision == SHORT_TYPE_SIZE)
419 return true;
420 if (precision == INT_TYPE_SIZE)
421 return true;
422 if (precision == LONG_TYPE_SIZE)
423 return true;
424 if (precision == LONG_LONG_TYPE_SIZE)
425 return true;
426 if (precision == 2 * BITS_PER_WORD)
427 return true;
428 return false;
429
430 case MODE_FLOAT:
431 if (precision == FLOAT_TYPE_SIZE)
432 return true;
433 if (precision == DOUBLE_TYPE_SIZE)
434 return true;
435 if (precision == LONG_DOUBLE_TYPE_SIZE)
436 return true;
437 return false;
438
439 case MODE_DECIMAL_FLOAT:
440 case MODE_FRACT:
441 case MODE_UFRACT:
442 case MODE_ACCUM:
443 case MODE_UACCUM:
444 return false;
445
446 default:
447 gcc_unreachable ();
448 }
449 }
450
451 /* Return true if libgcc supports floating-point mode MODE (known to
452 be supported as a scalar mode). */
453
454 bool
455 default_libgcc_floating_mode_supported_p (machine_mode mode)
456 {
457 switch (mode)
458 {
459 #ifdef HAVE_SFmode
460 case SFmode:
461 #endif
462 #ifdef HAVE_DFmode
463 case DFmode:
464 #endif
465 #ifdef HAVE_XFmode
466 case XFmode:
467 #endif
468 #ifdef HAVE_TFmode
469 case TFmode:
470 #endif
471 return true;
472
473 default:
474 return false;
475 }
476 }
477
478 /* Make some target macros useable by target-independent code. */
479 bool
480 targhook_words_big_endian (void)
481 {
482 return !!WORDS_BIG_ENDIAN;
483 }
484
485 bool
486 targhook_float_words_big_endian (void)
487 {
488 return !!FLOAT_WORDS_BIG_ENDIAN;
489 }
490
491 /* True if the target supports floating-point exceptions and rounding
492 modes. */
493
494 bool
495 default_float_exceptions_rounding_supported_p (void)
496 {
497 #ifdef HAVE_adddf3
498 return HAVE_adddf3;
499 #else
500 return false;
501 #endif
502 }
503
504 /* True if the target supports decimal floating point. */
505
506 bool
507 default_decimal_float_supported_p (void)
508 {
509 return ENABLE_DECIMAL_FLOAT;
510 }
511
512 /* True if the target supports fixed-point arithmetic. */
513
514 bool
515 default_fixed_point_supported_p (void)
516 {
517 return ENABLE_FIXED_POINT;
518 }
519
520 /* True if the target supports GNU indirect functions. */
521
522 bool
523 default_has_ifunc_p (void)
524 {
525 return HAVE_GNU_INDIRECT_FUNCTION;
526 }
527
528 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
529 an error message.
530
531 This function checks whether a given INSN is valid within a low-overhead
532 loop. If INSN is invalid it returns the reason for that, otherwise it
533 returns NULL. A called function may clobber any special registers required
534 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
535 register for branch on table instructions. We reject the doloop pattern in
536 these cases. */
537
538 const char *
539 default_invalid_within_doloop (const rtx_insn *insn)
540 {
541 if (CALL_P (insn))
542 return "Function call in loop.";
543
544 if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
545 return "Computed branch in the loop.";
546
547 return NULL;
548 }
549
550 /* Mapping of builtin functions to vectorized variants. */
551
552 tree
553 default_builtin_vectorized_function (tree fndecl ATTRIBUTE_UNUSED,
554 tree type_out ATTRIBUTE_UNUSED,
555 tree type_in ATTRIBUTE_UNUSED)
556 {
557 return NULL_TREE;
558 }
559
560 /* Vectorized conversion. */
561
562 tree
563 default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
564 tree dest_type ATTRIBUTE_UNUSED,
565 tree src_type ATTRIBUTE_UNUSED)
566 {
567 return NULL_TREE;
568 }
569
570 /* Default vectorizer cost model values. */
571
572 int
573 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
574 tree vectype,
575 int misalign ATTRIBUTE_UNUSED)
576 {
577 unsigned elements;
578
579 switch (type_of_cost)
580 {
581 case scalar_stmt:
582 case scalar_load:
583 case scalar_store:
584 case vector_stmt:
585 case vector_load:
586 case vector_store:
587 case vec_to_scalar:
588 case scalar_to_vec:
589 case cond_branch_not_taken:
590 case vec_perm:
591 case vec_promote_demote:
592 return 1;
593
594 case unaligned_load:
595 case unaligned_store:
596 return 2;
597
598 case cond_branch_taken:
599 return 3;
600
601 case vec_construct:
602 elements = TYPE_VECTOR_SUBPARTS (vectype);
603 return elements / 2 + 1;
604
605 default:
606 gcc_unreachable ();
607 }
608 }
609
610 /* Reciprocal. */
611
612 tree
613 default_builtin_reciprocal (unsigned int fn ATTRIBUTE_UNUSED,
614 bool md_fn ATTRIBUTE_UNUSED,
615 bool sqrt ATTRIBUTE_UNUSED)
616 {
617 return NULL_TREE;
618 }
619
620 bool
621 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
622 cumulative_args_t ca ATTRIBUTE_UNUSED,
623 machine_mode mode ATTRIBUTE_UNUSED,
624 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
625 {
626 return false;
627 }
628
629 bool
630 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
631 cumulative_args_t ca ATTRIBUTE_UNUSED,
632 machine_mode mode ATTRIBUTE_UNUSED,
633 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
634 {
635 return true;
636 }
637
638 int
639 hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
640 cumulative_args_t ca ATTRIBUTE_UNUSED,
641 machine_mode mode ATTRIBUTE_UNUSED,
642 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
643 {
644 return 0;
645 }
646
647 void
648 default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED,
649 machine_mode mode ATTRIBUTE_UNUSED,
650 const_tree type ATTRIBUTE_UNUSED,
651 bool named ATTRIBUTE_UNUSED)
652 {
653 gcc_unreachable ();
654 }
655
656 rtx
657 default_function_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
658 machine_mode mode ATTRIBUTE_UNUSED,
659 const_tree type ATTRIBUTE_UNUSED,
660 bool named ATTRIBUTE_UNUSED)
661 {
662 gcc_unreachable ();
663 }
664
665 rtx
666 default_function_incoming_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
667 machine_mode mode ATTRIBUTE_UNUSED,
668 const_tree type ATTRIBUTE_UNUSED,
669 bool named ATTRIBUTE_UNUSED)
670 {
671 gcc_unreachable ();
672 }
673
674 unsigned int
675 default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
676 const_tree type ATTRIBUTE_UNUSED)
677 {
678 return PARM_BOUNDARY;
679 }
680
681 unsigned int
682 default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
683 const_tree type ATTRIBUTE_UNUSED)
684 {
685 return PARM_BOUNDARY;
686 }
687
688 void
689 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
690 {
691 }
692
693 const char *
694 hook_invalid_arg_for_unprototyped_fn (
695 const_tree typelist ATTRIBUTE_UNUSED,
696 const_tree funcdecl ATTRIBUTE_UNUSED,
697 const_tree val ATTRIBUTE_UNUSED)
698 {
699 return NULL;
700 }
701
702 /* Initialize the stack protection decls. */
703
704 /* Stack protection related decls living in libgcc. */
705 static GTY(()) tree stack_chk_guard_decl;
706
707 tree
708 default_stack_protect_guard (void)
709 {
710 tree t = stack_chk_guard_decl;
711
712 if (t == NULL)
713 {
714 rtx x;
715
716 t = build_decl (UNKNOWN_LOCATION,
717 VAR_DECL, get_identifier ("__stack_chk_guard"),
718 ptr_type_node);
719 TREE_STATIC (t) = 1;
720 TREE_PUBLIC (t) = 1;
721 DECL_EXTERNAL (t) = 1;
722 TREE_USED (t) = 1;
723 TREE_THIS_VOLATILE (t) = 1;
724 DECL_ARTIFICIAL (t) = 1;
725 DECL_IGNORED_P (t) = 1;
726
727 /* Do not share RTL as the declaration is visible outside of
728 current function. */
729 x = DECL_RTL (t);
730 RTX_FLAG (x, used) = 1;
731
732 stack_chk_guard_decl = t;
733 }
734
735 return t;
736 }
737
738 static GTY(()) tree stack_chk_fail_decl;
739
740 tree
741 default_external_stack_protect_fail (void)
742 {
743 tree t = stack_chk_fail_decl;
744
745 if (t == NULL_TREE)
746 {
747 t = build_function_type_list (void_type_node, NULL_TREE);
748 t = build_decl (UNKNOWN_LOCATION,
749 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
750 TREE_STATIC (t) = 1;
751 TREE_PUBLIC (t) = 1;
752 DECL_EXTERNAL (t) = 1;
753 TREE_USED (t) = 1;
754 TREE_THIS_VOLATILE (t) = 1;
755 TREE_NOTHROW (t) = 1;
756 DECL_ARTIFICIAL (t) = 1;
757 DECL_IGNORED_P (t) = 1;
758 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
759 DECL_VISIBILITY_SPECIFIED (t) = 1;
760
761 stack_chk_fail_decl = t;
762 }
763
764 return build_call_expr (t, 0);
765 }
766
767 tree
768 default_hidden_stack_protect_fail (void)
769 {
770 #ifndef HAVE_GAS_HIDDEN
771 return default_external_stack_protect_fail ();
772 #else
773 tree t = stack_chk_fail_decl;
774
775 if (!flag_pic)
776 return default_external_stack_protect_fail ();
777
778 if (t == NULL_TREE)
779 {
780 t = build_function_type_list (void_type_node, NULL_TREE);
781 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
782 get_identifier ("__stack_chk_fail_local"), t);
783 TREE_STATIC (t) = 1;
784 TREE_PUBLIC (t) = 1;
785 DECL_EXTERNAL (t) = 1;
786 TREE_USED (t) = 1;
787 TREE_THIS_VOLATILE (t) = 1;
788 TREE_NOTHROW (t) = 1;
789 DECL_ARTIFICIAL (t) = 1;
790 DECL_IGNORED_P (t) = 1;
791 DECL_VISIBILITY_SPECIFIED (t) = 1;
792 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
793
794 stack_chk_fail_decl = t;
795 }
796
797 return build_call_expr (t, 0);
798 #endif
799 }
800
801 bool
802 hook_bool_const_rtx_commutative_p (const_rtx x,
803 int outer_code ATTRIBUTE_UNUSED)
804 {
805 return COMMUTATIVE_P (x);
806 }
807
808 rtx
809 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
810 const_tree fn_decl_or_type,
811 bool outgoing ATTRIBUTE_UNUSED)
812 {
813 /* The old interface doesn't handle receiving the function type. */
814 if (fn_decl_or_type
815 && !DECL_P (fn_decl_or_type))
816 fn_decl_or_type = NULL;
817
818 #ifdef FUNCTION_VALUE
819 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
820 #else
821 gcc_unreachable ();
822 #endif
823 }
824
825 rtx
826 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
827 const_rtx fun ATTRIBUTE_UNUSED)
828 {
829 #ifdef LIBCALL_VALUE
830 return LIBCALL_VALUE (mode);
831 #else
832 gcc_unreachable ();
833 #endif
834 }
835
836 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
837
838 bool
839 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
840 {
841 #ifdef FUNCTION_VALUE_REGNO_P
842 return FUNCTION_VALUE_REGNO_P (regno);
843 #else
844 gcc_unreachable ();
845 #endif
846 }
847
848 rtx
849 default_internal_arg_pointer (void)
850 {
851 /* If the reg that the virtual arg pointer will be translated into is
852 not a fixed reg or is the stack pointer, make a copy of the virtual
853 arg pointer, and address parms via the copy. The frame pointer is
854 considered fixed even though it is not marked as such. */
855 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
856 || ! (fixed_regs[ARG_POINTER_REGNUM]
857 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
858 return copy_to_reg (virtual_incoming_args_rtx);
859 else
860 return virtual_incoming_args_rtx;
861 }
862
863 rtx
864 default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
865 {
866 if (incoming_p)
867 {
868 #ifdef STATIC_CHAIN_INCOMING_REGNUM
869 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
870 #endif
871 }
872
873 #ifdef STATIC_CHAIN_REGNUM
874 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
875 #endif
876
877 {
878 static bool issued_error;
879 if (!issued_error)
880 {
881 issued_error = true;
882 sorry ("nested functions not supported on this target");
883 }
884
885 /* It really doesn't matter what we return here, so long at it
886 doesn't cause the rest of the compiler to crash. */
887 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
888 }
889 }
890
891 void
892 default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
893 rtx ARG_UNUSED (r_chain))
894 {
895 sorry ("nested function trampolines not supported on this target");
896 }
897
898 int
899 default_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
900 tree funtype ATTRIBUTE_UNUSED,
901 int size ATTRIBUTE_UNUSED)
902 {
903 return 0;
904 }
905
906 reg_class_t
907 default_branch_target_register_class (void)
908 {
909 return NO_REGS;
910 }
911
912 reg_class_t
913 default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
914 reg_class_t cl)
915 {
916 return cl;
917 }
918
919 extern bool
920 default_lra_p (void)
921 {
922 return false;
923 }
924
925 int
926 default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
927 {
928 return 0;
929 }
930
931 extern bool
932 default_register_usage_leveling_p (void)
933 {
934 return false;
935 }
936
937 extern bool
938 default_different_addr_displacement_p (void)
939 {
940 return false;
941 }
942
943 reg_class_t
944 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
945 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
946 machine_mode reload_mode ATTRIBUTE_UNUSED,
947 secondary_reload_info *sri)
948 {
949 enum reg_class rclass = NO_REGS;
950 enum reg_class reload_class = (enum reg_class) reload_class_i;
951
952 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
953 {
954 sri->icode = sri->prev_sri->t_icode;
955 return NO_REGS;
956 }
957 #ifdef SECONDARY_INPUT_RELOAD_CLASS
958 if (in_p)
959 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
960 #endif
961 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
962 if (! in_p)
963 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
964 #endif
965 if (rclass != NO_REGS)
966 {
967 enum insn_code icode
968 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
969 reload_mode);
970
971 if (icode != CODE_FOR_nothing
972 && !insn_operand_matches (icode, in_p, x))
973 icode = CODE_FOR_nothing;
974 else if (icode != CODE_FOR_nothing)
975 {
976 const char *insn_constraint, *scratch_constraint;
977 enum reg_class insn_class, scratch_class;
978
979 gcc_assert (insn_data[(int) icode].n_operands == 3);
980 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
981 if (!*insn_constraint)
982 insn_class = ALL_REGS;
983 else
984 {
985 if (in_p)
986 {
987 gcc_assert (*insn_constraint == '=');
988 insn_constraint++;
989 }
990 insn_class = (reg_class_for_constraint
991 (lookup_constraint (insn_constraint)));
992 gcc_assert (insn_class != NO_REGS);
993 }
994
995 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
996 /* The scratch register's constraint must start with "=&",
997 except for an input reload, where only "=" is necessary,
998 and where it might be beneficial to re-use registers from
999 the input. */
1000 gcc_assert (scratch_constraint[0] == '='
1001 && (in_p || scratch_constraint[1] == '&'));
1002 scratch_constraint++;
1003 if (*scratch_constraint == '&')
1004 scratch_constraint++;
1005 scratch_class = (reg_class_for_constraint
1006 (lookup_constraint (scratch_constraint)));
1007
1008 if (reg_class_subset_p (reload_class, insn_class))
1009 {
1010 gcc_assert (scratch_class == rclass);
1011 rclass = NO_REGS;
1012 }
1013 else
1014 rclass = insn_class;
1015
1016 }
1017 if (rclass == NO_REGS)
1018 sri->icode = icode;
1019 else
1020 sri->t_icode = icode;
1021 }
1022 return rclass;
1023 }
1024
1025 /* By default, if flag_pic is true, then neither local nor global relocs
1026 should be placed in readonly memory. */
1027
1028 int
1029 default_reloc_rw_mask (void)
1030 {
1031 return flag_pic ? 3 : 0;
1032 }
1033
1034 /* By default, do no modification. */
1035 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1036 tree id)
1037 {
1038 return id;
1039 }
1040
1041 /* Default to natural alignment for vector types. */
1042 HOST_WIDE_INT
1043 default_vector_alignment (const_tree type)
1044 {
1045 return tree_to_shwi (TYPE_SIZE (type));
1046 }
1047
1048 bool
1049 default_builtin_vector_alignment_reachable (const_tree type, bool is_packed)
1050 {
1051 if (is_packed)
1052 return false;
1053
1054 /* Assuming that types whose size is > pointer-size are not guaranteed to be
1055 naturally aligned. */
1056 if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
1057 return false;
1058
1059 /* Assuming that types whose size is <= pointer-size
1060 are naturally aligned. */
1061 return true;
1062 }
1063
1064 /* By default, assume that a target supports any factor of misalignment
1065 memory access if it supports movmisalign patten.
1066 is_packed is true if the memory access is defined in a packed struct. */
1067 bool
1068 default_builtin_support_vector_misalignment (machine_mode mode,
1069 const_tree type
1070 ATTRIBUTE_UNUSED,
1071 int misalignment
1072 ATTRIBUTE_UNUSED,
1073 bool is_packed
1074 ATTRIBUTE_UNUSED)
1075 {
1076 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1077 return true;
1078 return false;
1079 }
1080
1081 /* By default, only attempt to parallelize bitwise operations, and
1082 possibly adds/subtracts using bit-twiddling. */
1083
1084 machine_mode
1085 default_preferred_simd_mode (machine_mode mode ATTRIBUTE_UNUSED)
1086 {
1087 return word_mode;
1088 }
1089
1090 /* By default only the size derived from the preferred vector mode
1091 is tried. */
1092
1093 unsigned int
1094 default_autovectorize_vector_sizes (void)
1095 {
1096 return 0;
1097 }
1098
1099 /* By default, the cost model accumulates three separate costs (prologue,
1100 loop body, and epilogue) for a vectorized loop or block. So allocate an
1101 array of three unsigned ints, set it to zero, and return its address. */
1102
1103 void *
1104 default_init_cost (struct loop *loop_info ATTRIBUTE_UNUSED)
1105 {
1106 unsigned *cost = XNEWVEC (unsigned, 3);
1107 cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0;
1108 return cost;
1109 }
1110
1111 /* By default, the cost model looks up the cost of the given statement
1112 kind and mode, multiplies it by the occurrence count, accumulates
1113 it into the cost specified by WHERE, and returns the cost added. */
1114
1115 unsigned
1116 default_add_stmt_cost (void *data, int count, enum vect_cost_for_stmt kind,
1117 struct _stmt_vec_info *stmt_info, int misalign,
1118 enum vect_cost_model_location where)
1119 {
1120 unsigned *cost = (unsigned *) data;
1121 unsigned retval = 0;
1122
1123 tree vectype = stmt_info ? stmt_vectype (stmt_info) : NULL_TREE;
1124 int stmt_cost = targetm.vectorize.builtin_vectorization_cost (kind, vectype,
1125 misalign);
1126 /* Statements in an inner loop relative to the loop being
1127 vectorized are weighted more heavily. The value here is
1128 arbitrary and could potentially be improved with analysis. */
1129 if (where == vect_body && stmt_info && stmt_in_inner_loop_p (stmt_info))
1130 count *= 50; /* FIXME. */
1131
1132 retval = (unsigned) (count * stmt_cost);
1133 cost[where] += retval;
1134
1135 return retval;
1136 }
1137
1138 /* By default, the cost model just returns the accumulated costs. */
1139
1140 void
1141 default_finish_cost (void *data, unsigned *prologue_cost,
1142 unsigned *body_cost, unsigned *epilogue_cost)
1143 {
1144 unsigned *cost = (unsigned *) data;
1145 *prologue_cost = cost[vect_prologue];
1146 *body_cost = cost[vect_body];
1147 *epilogue_cost = cost[vect_epilogue];
1148 }
1149
1150 /* Free the cost data. */
1151
1152 void
1153 default_destroy_cost_data (void *data)
1154 {
1155 free (data);
1156 }
1157
1158 /* Determine whether or not a pointer mode is valid. Assume defaults
1159 of ptr_mode or Pmode - can be overridden. */
1160 bool
1161 default_valid_pointer_mode (machine_mode mode)
1162 {
1163 return (mode == ptr_mode || mode == Pmode);
1164 }
1165
1166 /* Determine whether the memory reference specified by REF may alias
1167 the C libraries errno location. */
1168 bool
1169 default_ref_may_alias_errno (ao_ref *ref)
1170 {
1171 tree base = ao_ref_base (ref);
1172 /* The default implementation assumes the errno location is
1173 a declaration of type int or is always accessed via a
1174 pointer to int. We assume that accesses to errno are
1175 not deliberately obfuscated (even in conforming ways). */
1176 if (TYPE_UNSIGNED (TREE_TYPE (base))
1177 || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1178 return false;
1179 /* The default implementation assumes an errno location
1180 declaration is never defined in the current compilation unit. */
1181 if (DECL_P (base)
1182 && !TREE_STATIC (base))
1183 return true;
1184 else if (TREE_CODE (base) == MEM_REF
1185 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1186 {
1187 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1188 return !pi || pi->pt.anything || pi->pt.nonlocal;
1189 }
1190 return false;
1191 }
1192
1193 /* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
1194 for the generic address space only. */
1195
1196 machine_mode
1197 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1198 {
1199 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1200 return ptr_mode;
1201 }
1202
1203 /* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
1204 for the generic address space only. */
1205
1206 machine_mode
1207 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1208 {
1209 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1210 return Pmode;
1211 }
1212
1213 /* Named address space version of valid_pointer_mode. */
1214
1215 bool
1216 default_addr_space_valid_pointer_mode (machine_mode mode, addr_space_t as)
1217 {
1218 if (!ADDR_SPACE_GENERIC_P (as))
1219 return (mode == targetm.addr_space.pointer_mode (as)
1220 || mode == targetm.addr_space.address_mode (as));
1221
1222 return targetm.valid_pointer_mode (mode);
1223 }
1224
1225 /* Some places still assume that all pointer or address modes are the
1226 standard Pmode and ptr_mode. These optimizations become invalid if
1227 the target actually supports multiple different modes. For now,
1228 we disable such optimizations on such targets, using this function. */
1229
1230 bool
1231 target_default_pointer_address_modes_p (void)
1232 {
1233 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1234 return false;
1235 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1236 return false;
1237
1238 return true;
1239 }
1240
1241 /* Named address space version of legitimate_address_p. */
1242
1243 bool
1244 default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
1245 bool strict, addr_space_t as)
1246 {
1247 if (!ADDR_SPACE_GENERIC_P (as))
1248 gcc_unreachable ();
1249
1250 return targetm.legitimate_address_p (mode, mem, strict);
1251 }
1252
1253 /* Named address space version of LEGITIMIZE_ADDRESS. */
1254
1255 rtx
1256 default_addr_space_legitimize_address (rtx x, rtx oldx,
1257 machine_mode mode, addr_space_t as)
1258 {
1259 if (!ADDR_SPACE_GENERIC_P (as))
1260 return x;
1261
1262 return targetm.legitimize_address (x, oldx, mode);
1263 }
1264
1265 /* The default hook for determining if one named address space is a subset of
1266 another and to return which address space to use as the common address
1267 space. */
1268
1269 bool
1270 default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1271 {
1272 return (subset == superset);
1273 }
1274
1275 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1276 called for targets with only a generic address space. */
1277
1278 rtx
1279 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1280 tree from_type ATTRIBUTE_UNUSED,
1281 tree to_type ATTRIBUTE_UNUSED)
1282 {
1283 gcc_unreachable ();
1284 }
1285
1286 bool
1287 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1288 {
1289 return true;
1290 }
1291
1292 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1293
1294 bool
1295 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1296 addr_space_t addrspace ATTRIBUTE_UNUSED)
1297 {
1298 return false;
1299 }
1300
1301 bool
1302 default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1303 tree ARG_UNUSED (name),
1304 tree ARG_UNUSED (args),
1305 int ARG_UNUSED (flags))
1306 {
1307 warning (OPT_Wattributes,
1308 "target attribute is not supported on this machine");
1309
1310 return false;
1311 }
1312
1313 bool
1314 default_target_option_pragma_parse (tree ARG_UNUSED (args),
1315 tree ARG_UNUSED (pop_target))
1316 {
1317 warning (OPT_Wpragmas,
1318 "#pragma GCC target is not supported for this machine");
1319
1320 return false;
1321 }
1322
1323 bool
1324 default_target_can_inline_p (tree caller, tree callee)
1325 {
1326 bool ret = false;
1327 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1328 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1329
1330 /* If callee has no option attributes, then it is ok to inline */
1331 if (!callee_opts)
1332 ret = true;
1333
1334 /* If caller has no option attributes, but callee does then it is not ok to
1335 inline */
1336 else if (!caller_opts)
1337 ret = false;
1338
1339 /* If both caller and callee have attributes, assume that if the
1340 pointer is different, the two functions have different target
1341 options since build_target_option_node uses a hash table for the
1342 options. */
1343 else
1344 ret = (callee_opts == caller_opts);
1345
1346 return ret;
1347 }
1348
1349 #ifndef HAVE_casesi
1350 # define HAVE_casesi 0
1351 #endif
1352
1353 /* If the machine does not have a case insn that compares the bounds,
1354 this means extra overhead for dispatch tables, which raises the
1355 threshold for using them. */
1356
1357 unsigned int
1358 default_case_values_threshold (void)
1359 {
1360 return (HAVE_casesi ? 4 : 5);
1361 }
1362
1363 bool
1364 default_have_conditional_execution (void)
1365 {
1366 #ifdef HAVE_conditional_execution
1367 return HAVE_conditional_execution;
1368 #else
1369 return false;
1370 #endif
1371 }
1372
1373 /* By default we assume that c99 functions are present at the runtime,
1374 but sincos is not. */
1375 bool
1376 default_libc_has_function (enum function_class fn_class)
1377 {
1378 if (fn_class == function_c94
1379 || fn_class == function_c99_misc
1380 || fn_class == function_c99_math_complex)
1381 return true;
1382
1383 return false;
1384 }
1385
1386 bool
1387 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1388 {
1389 return true;
1390 }
1391
1392 bool
1393 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1394 {
1395 return false;
1396 }
1397
1398 tree
1399 default_builtin_tm_load_store (tree ARG_UNUSED (type))
1400 {
1401 return NULL_TREE;
1402 }
1403
1404 /* Compute cost of moving registers to/from memory. */
1405
1406 int
1407 default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1408 reg_class_t rclass ATTRIBUTE_UNUSED,
1409 bool in ATTRIBUTE_UNUSED)
1410 {
1411 #ifndef MEMORY_MOVE_COST
1412 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1413 #else
1414 return MEMORY_MOVE_COST (mode, (enum reg_class) rclass, in);
1415 #endif
1416 }
1417
1418 /* Compute cost of moving data from a register of class FROM to one of
1419 TO, using MODE. */
1420
1421 int
1422 default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1423 reg_class_t from ATTRIBUTE_UNUSED,
1424 reg_class_t to ATTRIBUTE_UNUSED)
1425 {
1426 #ifndef REGISTER_MOVE_COST
1427 return 2;
1428 #else
1429 return REGISTER_MOVE_COST (mode, (enum reg_class) from, (enum reg_class) to);
1430 #endif
1431 }
1432
1433 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1434 behaviour. SPEED_P is true if we are compiling for speed. */
1435
1436 unsigned int
1437 get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
1438 {
1439 unsigned int move_ratio;
1440 #ifdef MOVE_RATIO
1441 move_ratio = (unsigned int) MOVE_RATIO (speed_p);
1442 #else
1443 #if defined (HAVE_movmemqi) || defined (HAVE_movmemhi) || defined (HAVE_movmemsi) || defined (HAVE_movmemdi) || defined (HAVE_movmemti)
1444 move_ratio = 2;
1445 #else /* No movmem patterns, pick a default. */
1446 move_ratio = ((speed_p) ? 15 : 3);
1447 #endif
1448 #endif
1449 return move_ratio;
1450 }
1451
1452 /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1453 used; return FALSE if the movmem/setmem optab should be expanded, or
1454 a call to memcpy emitted. */
1455
1456 bool
1457 default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
1458 unsigned int alignment,
1459 enum by_pieces_operation op,
1460 bool speed_p)
1461 {
1462 unsigned int max_size = 0;
1463 unsigned int ratio = 0;
1464
1465 switch (op)
1466 {
1467 case CLEAR_BY_PIECES:
1468 max_size = STORE_MAX_PIECES;
1469 ratio = CLEAR_RATIO (speed_p);
1470 break;
1471 case MOVE_BY_PIECES:
1472 max_size = MOVE_MAX_PIECES;
1473 ratio = get_move_ratio (speed_p);
1474 break;
1475 case SET_BY_PIECES:
1476 max_size = STORE_MAX_PIECES;
1477 ratio = SET_RATIO (speed_p);
1478 break;
1479 case STORE_BY_PIECES:
1480 max_size = STORE_MAX_PIECES;
1481 ratio = get_move_ratio (speed_p);
1482 break;
1483 }
1484
1485 return move_by_pieces_ninsns (size, alignment, max_size + 1) < ratio;
1486 }
1487
1488 bool
1489 default_profile_before_prologue (void)
1490 {
1491 #ifdef PROFILE_BEFORE_PROLOGUE
1492 return true;
1493 #else
1494 return false;
1495 #endif
1496 }
1497
1498 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1499
1500 reg_class_t
1501 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
1502 reg_class_t rclass)
1503 {
1504 #ifdef PREFERRED_RELOAD_CLASS
1505 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
1506 #else
1507 return rclass;
1508 #endif
1509 }
1510
1511 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1512
1513 reg_class_t
1514 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
1515 reg_class_t rclass)
1516 {
1517 return rclass;
1518 }
1519
1520 /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
1521 reg_class_t
1522 default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
1523 {
1524 return NO_REGS;
1525 }
1526
1527 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1528
1529 bool
1530 default_class_likely_spilled_p (reg_class_t rclass)
1531 {
1532 return (reg_class_size[(int) rclass] == 1);
1533 }
1534
1535 /* The default implementation of TARGET_CLASS_MAX_NREGS. */
1536
1537 unsigned char
1538 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
1539 machine_mode mode ATTRIBUTE_UNUSED)
1540 {
1541 #ifdef CLASS_MAX_NREGS
1542 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass, mode);
1543 #else
1544 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1545 #endif
1546 }
1547
1548 /* Determine the debugging unwind mechanism for the target. */
1549
1550 enum unwind_info_type
1551 default_debug_unwind_info (void)
1552 {
1553 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1554 /* ??? Change all users to the hook, then poison this. */
1555 #ifdef DWARF2_FRAME_INFO
1556 if (DWARF2_FRAME_INFO)
1557 return UI_DWARF2;
1558 #endif
1559
1560 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1561 #ifdef DWARF2_DEBUGGING_INFO
1562 if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
1563 return UI_DWARF2;
1564 #endif
1565
1566 return UI_NONE;
1567 }
1568
1569 /* Determine the correct mode for a Dwarf frame register that represents
1570 register REGNO. */
1571
1572 machine_mode
1573 default_dwarf_frame_reg_mode (int regno)
1574 {
1575 machine_mode save_mode = reg_raw_mode[regno];
1576
1577 if (HARD_REGNO_CALL_PART_CLOBBERED (regno, save_mode))
1578 save_mode = choose_hard_reg_mode (regno, 1, true);
1579 return save_mode;
1580 }
1581
1582 /* To be used by targets where reg_raw_mode doesn't return the right
1583 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1584
1585 machine_mode
1586 default_get_reg_raw_mode (int regno)
1587 {
1588 return reg_raw_mode[regno];
1589 }
1590
1591 /* Return true if a leaf function should stay leaf even with profiling
1592 enabled. */
1593
1594 bool
1595 default_keep_leaf_when_profiled ()
1596 {
1597 return false;
1598 }
1599
1600 /* Return true if the state of option OPTION should be stored in PCH files
1601 and checked by default_pch_valid_p. Store the option's current state
1602 in STATE if so. */
1603
1604 static inline bool
1605 option_affects_pch_p (int option, struct cl_option_state *state)
1606 {
1607 if ((cl_options[option].flags & CL_TARGET) == 0)
1608 return false;
1609 if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
1610 return false;
1611 if (option_flag_var (option, &global_options) == &target_flags)
1612 if (targetm.check_pch_target_flags)
1613 return false;
1614 return get_option_state (&global_options, option, state);
1615 }
1616
1617 /* Default version of get_pch_validity.
1618 By default, every flag difference is fatal; that will be mostly right for
1619 most targets, but completely right for very few. */
1620
1621 void *
1622 default_get_pch_validity (size_t *sz)
1623 {
1624 struct cl_option_state state;
1625 size_t i;
1626 char *result, *r;
1627
1628 *sz = 2;
1629 if (targetm.check_pch_target_flags)
1630 *sz += sizeof (target_flags);
1631 for (i = 0; i < cl_options_count; i++)
1632 if (option_affects_pch_p (i, &state))
1633 *sz += state.size;
1634
1635 result = r = XNEWVEC (char, *sz);
1636 r[0] = flag_pic;
1637 r[1] = flag_pie;
1638 r += 2;
1639 if (targetm.check_pch_target_flags)
1640 {
1641 memcpy (r, &target_flags, sizeof (target_flags));
1642 r += sizeof (target_flags);
1643 }
1644
1645 for (i = 0; i < cl_options_count; i++)
1646 if (option_affects_pch_p (i, &state))
1647 {
1648 memcpy (r, state.data, state.size);
1649 r += state.size;
1650 }
1651
1652 return result;
1653 }
1654
1655 /* Return a message which says that a PCH file was created with a different
1656 setting of OPTION. */
1657
1658 static const char *
1659 pch_option_mismatch (const char *option)
1660 {
1661 return xasprintf (_("created and used with differing settings of '%s'"),
1662 option);
1663 }
1664
1665 /* Default version of pch_valid_p. */
1666
1667 const char *
1668 default_pch_valid_p (const void *data_p, size_t len)
1669 {
1670 struct cl_option_state state;
1671 const char *data = (const char *)data_p;
1672 size_t i;
1673
1674 /* -fpic and -fpie also usually make a PCH invalid. */
1675 if (data[0] != flag_pic)
1676 return _("created and used with different settings of -fpic");
1677 if (data[1] != flag_pie)
1678 return _("created and used with different settings of -fpie");
1679 data += 2;
1680
1681 /* Check target_flags. */
1682 if (targetm.check_pch_target_flags)
1683 {
1684 int tf;
1685 const char *r;
1686
1687 memcpy (&tf, data, sizeof (target_flags));
1688 data += sizeof (target_flags);
1689 len -= sizeof (target_flags);
1690 r = targetm.check_pch_target_flags (tf);
1691 if (r != NULL)
1692 return r;
1693 }
1694
1695 for (i = 0; i < cl_options_count; i++)
1696 if (option_affects_pch_p (i, &state))
1697 {
1698 if (memcmp (data, state.data, state.size) != 0)
1699 return pch_option_mismatch (cl_options[i].opt_text);
1700 data += state.size;
1701 len -= state.size;
1702 }
1703
1704 return NULL;
1705 }
1706
1707 /* Default version of cstore_mode. */
1708
1709 machine_mode
1710 default_cstore_mode (enum insn_code icode)
1711 {
1712 return insn_data[(int) icode].operand[0].mode;
1713 }
1714
1715 /* Default version of member_type_forces_blk. */
1716
1717 bool
1718 default_member_type_forces_blk (const_tree, machine_mode)
1719 {
1720 return false;
1721 }
1722
1723 rtx
1724 default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED,
1725 rtx ptr ATTRIBUTE_UNUSED,
1726 rtx bnd ATTRIBUTE_UNUSED)
1727 {
1728 gcc_unreachable ();
1729 }
1730
1731 void
1732 default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED,
1733 rtx addr ATTRIBUTE_UNUSED,
1734 rtx bounds ATTRIBUTE_UNUSED,
1735 rtx to ATTRIBUTE_UNUSED)
1736 {
1737 gcc_unreachable ();
1738 }
1739
1740 rtx
1741 default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED)
1742 {
1743 gcc_unreachable ();
1744 }
1745
1746 void
1747 default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED,
1748 rtx bounds ATTRIBUTE_UNUSED)
1749 {
1750 gcc_unreachable ();
1751 }
1752
1753 /* Default version of canonicalize_comparison. */
1754
1755 void
1756 default_canonicalize_comparison (int *, rtx *, rtx *, bool)
1757 {
1758 }
1759
1760 /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
1761
1762 void
1763 default_atomic_assign_expand_fenv (tree *, tree *, tree *)
1764 {
1765 }
1766
1767 #ifndef PAD_VARARGS_DOWN
1768 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
1769 #endif
1770
1771 /* Build an indirect-ref expression over the given TREE, which represents a
1772 piece of a va_arg() expansion. */
1773 tree
1774 build_va_arg_indirect_ref (tree addr)
1775 {
1776 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
1777 return addr;
1778 }
1779
1780 /* The "standard" implementation of va_arg: read the value from the
1781 current (padded) address and increment by the (padded) size. */
1782
1783 tree
1784 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1785 gimple_seq *post_p)
1786 {
1787 tree addr, t, type_size, rounded_size, valist_tmp;
1788 unsigned HOST_WIDE_INT align, boundary;
1789 bool indirect;
1790
1791 /* All of the alignment and movement below is for args-grow-up machines.
1792 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
1793 implement their own specialized gimplify_va_arg_expr routines. */
1794 if (ARGS_GROW_DOWNWARD)
1795 gcc_unreachable ();
1796
1797 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
1798 if (indirect)
1799 type = build_pointer_type (type);
1800
1801 align = PARM_BOUNDARY / BITS_PER_UNIT;
1802 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
1803
1804 /* When we align parameter on stack for caller, if the parameter
1805 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
1806 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
1807 here with caller. */
1808 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
1809 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
1810
1811 boundary /= BITS_PER_UNIT;
1812
1813 /* Hoist the valist value into a temporary for the moment. */
1814 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
1815
1816 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
1817 requires greater alignment, we must perform dynamic alignment. */
1818 if (boundary > align
1819 && !integer_zerop (TYPE_SIZE (type)))
1820 {
1821 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
1822 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
1823 gimplify_and_add (t, pre_p);
1824
1825 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
1826 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
1827 valist_tmp,
1828 build_int_cst (TREE_TYPE (valist), -boundary)));
1829 gimplify_and_add (t, pre_p);
1830 }
1831 else
1832 boundary = align;
1833
1834 /* If the actual alignment is less than the alignment of the type,
1835 adjust the type accordingly so that we don't assume strict alignment
1836 when dereferencing the pointer. */
1837 boundary *= BITS_PER_UNIT;
1838 if (boundary < TYPE_ALIGN (type))
1839 {
1840 type = build_variant_type_copy (type);
1841 TYPE_ALIGN (type) = boundary;
1842 }
1843
1844 /* Compute the rounded size of the type. */
1845 type_size = size_in_bytes (type);
1846 rounded_size = round_up (type_size, align);
1847
1848 /* Reduce rounded_size so it's sharable with the postqueue. */
1849 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
1850
1851 /* Get AP. */
1852 addr = valist_tmp;
1853 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
1854 {
1855 /* Small args are padded downward. */
1856 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
1857 rounded_size, size_int (align));
1858 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
1859 size_binop (MINUS_EXPR, rounded_size, type_size));
1860 addr = fold_build_pointer_plus (addr, t);
1861 }
1862
1863 /* Compute new value for AP. */
1864 t = fold_build_pointer_plus (valist_tmp, rounded_size);
1865 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
1866 gimplify_and_add (t, pre_p);
1867
1868 addr = fold_convert (build_pointer_type (type), addr);
1869
1870 if (indirect)
1871 addr = build_va_arg_indirect_ref (addr);
1872
1873 return build_va_arg_indirect_ref (addr);
1874 }
1875
1876 tree
1877 default_chkp_bound_type (void)
1878 {
1879 tree res = make_node (POINTER_BOUNDS_TYPE);
1880 TYPE_PRECISION (res) = TYPE_PRECISION (size_type_node) * 2;
1881 TYPE_NAME (res) = get_identifier ("__bounds_type");
1882 SET_TYPE_MODE (res, targetm.chkp_bound_mode ());
1883 layout_type (res);
1884 return res;
1885 }
1886
1887 enum machine_mode
1888 default_chkp_bound_mode (void)
1889 {
1890 return VOIDmode;
1891 }
1892
1893 tree
1894 default_builtin_chkp_function (unsigned int fcode ATTRIBUTE_UNUSED)
1895 {
1896 return NULL_TREE;
1897 }
1898
1899 rtx
1900 default_chkp_function_value_bounds (const_tree ret_type ATTRIBUTE_UNUSED,
1901 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1902 bool outgoing ATTRIBUTE_UNUSED)
1903 {
1904 gcc_unreachable ();
1905 }
1906
1907 tree
1908 default_chkp_make_bounds_constant (HOST_WIDE_INT lb ATTRIBUTE_UNUSED,
1909 HOST_WIDE_INT ub ATTRIBUTE_UNUSED)
1910 {
1911 return NULL_TREE;
1912 }
1913
1914 int
1915 default_chkp_initialize_bounds (tree var ATTRIBUTE_UNUSED,
1916 tree lb ATTRIBUTE_UNUSED,
1917 tree ub ATTRIBUTE_UNUSED,
1918 tree *stmts ATTRIBUTE_UNUSED)
1919 {
1920 return 0;
1921 }
1922
1923 void
1924 default_setup_incoming_vararg_bounds (cumulative_args_t ca ATTRIBUTE_UNUSED,
1925 enum machine_mode mode ATTRIBUTE_UNUSED,
1926 tree type ATTRIBUTE_UNUSED,
1927 int *pretend_arg_size ATTRIBUTE_UNUSED,
1928 int second_time ATTRIBUTE_UNUSED)
1929 {
1930 }
1931
1932 /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
1933 not support nested low-overhead loops. */
1934
1935 bool
1936 can_use_doloop_if_innermost (const widest_int &, const widest_int &,
1937 unsigned int loop_depth, bool)
1938 {
1939 return loop_depth == 1;
1940 }
1941
1942 #include "gt-targhooks.h"