re PR target/65697 (__atomic memory barriers not strong enough for __sync builtins)
[gcc.git] / gcc / targhooks.c
1 /* Default target hook functions.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* The migration of target macros to target hooks works as follows:
21
22 1. Create a target hook that uses the existing target macros to
23 implement the same functionality.
24
25 2. Convert all the MI files to use the hook instead of the macro.
26
27 3. Repeat for a majority of the remaining target macros. This will
28 take some time.
29
30 4. Tell target maintainers to start migrating.
31
32 5. Eventually convert the backends to override the hook instead of
33 defining the macros. This will take some time too.
34
35 6. TBD when, poison the macros. Unmigrated targets will break at
36 this point.
37
38 Note that we expect steps 1-3 to be done by the people that
39 understand what the MI does with each macro, and step 5 to be done
40 by the target maintainers for their respective targets.
41
42 Note that steps 1 and 2 don't have to be done together, but no
43 target can override the new hook until step 2 is complete for it.
44
45 Once the macros are poisoned, we will revert to the old migration
46 rules - migrate the macro, callers, and targets all at once. This
47 comment can thus be removed at that point. */
48
49 #include "config.h"
50 #include "system.h"
51 #include "coretypes.h"
52 #include "tm.h"
53 #include "rtl.h"
54 #include "alias.h"
55 #include "symtab.h"
56 #include "tree.h"
57 #include "fold-const.h"
58 #include "stor-layout.h"
59 #include "varasm.h"
60 #include "hard-reg-set.h"
61 #include "function.h"
62 #include "flags.h"
63 #include "insn-config.h"
64 #include "expmed.h"
65 #include "dojump.h"
66 #include "explow.h"
67 #include "calls.h"
68 #include "emit-rtl.h"
69 #include "stmt.h"
70 #include "expr.h"
71 #include "output.h"
72 #include "diagnostic-core.h"
73 #include "target.h"
74 #include "tm_p.h"
75 #include "regs.h"
76 #include "reload.h"
77 #include "insn-codes.h"
78 #include "optabs.h"
79 #include "recog.h"
80 #include "intl.h"
81 #include "opts.h"
82 #include "tree-ssa-alias.h"
83 #include "gimple-expr.h"
84 #include "gimplify.h"
85 #include "stringpool.h"
86 #include "tree-ssanames.h"
87
88
89 bool
90 default_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED,
91 rtx addr ATTRIBUTE_UNUSED,
92 bool strict ATTRIBUTE_UNUSED)
93 {
94 #ifdef GO_IF_LEGITIMATE_ADDRESS
95 /* Defer to the old implementation using a goto. */
96 if (strict)
97 return strict_memory_address_p (mode, addr);
98 else
99 return memory_address_p (mode, addr);
100 #else
101 gcc_unreachable ();
102 #endif
103 }
104
105 void
106 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
107 {
108 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
109 ASM_OUTPUT_EXTERNAL_LIBCALL (asm_out_file, fun);
110 #endif
111 }
112
113 int
114 default_unspec_may_trap_p (const_rtx x, unsigned flags)
115 {
116 int i;
117
118 /* Any floating arithmetic may trap. */
119 if ((SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math))
120 return 1;
121
122 for (i = 0; i < XVECLEN (x, 0); ++i)
123 {
124 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
125 return 1;
126 }
127
128 return 0;
129 }
130
131 machine_mode
132 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
133 machine_mode mode,
134 int *punsignedp ATTRIBUTE_UNUSED,
135 const_tree funtype ATTRIBUTE_UNUSED,
136 int for_return ATTRIBUTE_UNUSED)
137 {
138 if (type != NULL_TREE && for_return == 2)
139 return promote_mode (type, mode, punsignedp);
140 return mode;
141 }
142
143 machine_mode
144 default_promote_function_mode_always_promote (const_tree type,
145 machine_mode mode,
146 int *punsignedp,
147 const_tree funtype ATTRIBUTE_UNUSED,
148 int for_return ATTRIBUTE_UNUSED)
149 {
150 return promote_mode (type, mode, punsignedp);
151 }
152
153 machine_mode
154 default_cc_modes_compatible (machine_mode m1, machine_mode m2)
155 {
156 if (m1 == m2)
157 return m1;
158 return VOIDmode;
159 }
160
161 bool
162 default_return_in_memory (const_tree type,
163 const_tree fntype ATTRIBUTE_UNUSED)
164 {
165 return (TYPE_MODE (type) == BLKmode);
166 }
167
168 rtx
169 default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
170 machine_mode mode ATTRIBUTE_UNUSED)
171 {
172 return x;
173 }
174
175 bool
176 default_legitimize_address_displacement (rtx *disp ATTRIBUTE_UNUSED,
177 rtx *offset ATTRIBUTE_UNUSED,
178 machine_mode mode ATTRIBUTE_UNUSED)
179 {
180 return false;
181 }
182
183 rtx
184 default_expand_builtin_saveregs (void)
185 {
186 error ("__builtin_saveregs not supported by this target");
187 return const0_rtx;
188 }
189
190 void
191 default_setup_incoming_varargs (cumulative_args_t ca ATTRIBUTE_UNUSED,
192 machine_mode mode ATTRIBUTE_UNUSED,
193 tree type ATTRIBUTE_UNUSED,
194 int *pretend_arg_size ATTRIBUTE_UNUSED,
195 int second_time ATTRIBUTE_UNUSED)
196 {
197 }
198
199 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
200
201 rtx
202 default_builtin_setjmp_frame_value (void)
203 {
204 return virtual_stack_vars_rtx;
205 }
206
207 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
208
209 bool
210 hook_bool_CUMULATIVE_ARGS_false (cumulative_args_t ca ATTRIBUTE_UNUSED)
211 {
212 return false;
213 }
214
215 bool
216 default_pretend_outgoing_varargs_named (cumulative_args_t ca ATTRIBUTE_UNUSED)
217 {
218 return (targetm.calls.setup_incoming_varargs
219 != default_setup_incoming_varargs);
220 }
221
222 machine_mode
223 default_eh_return_filter_mode (void)
224 {
225 return targetm.unwind_word_mode ();
226 }
227
228 machine_mode
229 default_libgcc_cmp_return_mode (void)
230 {
231 return word_mode;
232 }
233
234 machine_mode
235 default_libgcc_shift_count_mode (void)
236 {
237 return word_mode;
238 }
239
240 machine_mode
241 default_unwind_word_mode (void)
242 {
243 return word_mode;
244 }
245
246 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
247
248 unsigned HOST_WIDE_INT
249 default_shift_truncation_mask (machine_mode mode)
250 {
251 return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
252 }
253
254 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
255
256 unsigned int
257 default_min_divisions_for_recip_mul (machine_mode mode ATTRIBUTE_UNUSED)
258 {
259 return have_insn_for (DIV, mode) ? 3 : 2;
260 }
261
262 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
263
264 int
265 default_mode_rep_extended (machine_mode mode ATTRIBUTE_UNUSED,
266 machine_mode mode_rep ATTRIBUTE_UNUSED)
267 {
268 return UNKNOWN;
269 }
270
271 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
272
273 bool
274 hook_bool_CUMULATIVE_ARGS_true (cumulative_args_t a ATTRIBUTE_UNUSED)
275 {
276 return true;
277 }
278
279 /* Return machine mode for non-standard suffix
280 or VOIDmode if non-standard suffixes are unsupported. */
281 machine_mode
282 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
283 {
284 return VOIDmode;
285 }
286
287 /* The generic C++ ABI specifies this is a 64-bit value. */
288 tree
289 default_cxx_guard_type (void)
290 {
291 return long_long_integer_type_node;
292 }
293
294 /* Returns the size of the cookie to use when allocating an array
295 whose elements have the indicated TYPE. Assumes that it is already
296 known that a cookie is needed. */
297
298 tree
299 default_cxx_get_cookie_size (tree type)
300 {
301 tree cookie_size;
302
303 /* We need to allocate an additional max (sizeof (size_t), alignof
304 (true_type)) bytes. */
305 tree sizetype_size;
306 tree type_align;
307
308 sizetype_size = size_in_bytes (sizetype);
309 type_align = size_int (TYPE_ALIGN_UNIT (type));
310 if (tree_int_cst_lt (type_align, sizetype_size))
311 cookie_size = sizetype_size;
312 else
313 cookie_size = type_align;
314
315 return cookie_size;
316 }
317
318 /* Return true if a parameter must be passed by reference. This version
319 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
320
321 bool
322 hook_pass_by_reference_must_pass_in_stack (cumulative_args_t c ATTRIBUTE_UNUSED,
323 machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
324 bool named_arg ATTRIBUTE_UNUSED)
325 {
326 return targetm.calls.must_pass_in_stack (mode, type);
327 }
328
329 /* Return true if a parameter follows callee copies conventions. This
330 version of the hook is true for all named arguments. */
331
332 bool
333 hook_callee_copies_named (cumulative_args_t ca ATTRIBUTE_UNUSED,
334 machine_mode mode ATTRIBUTE_UNUSED,
335 const_tree type ATTRIBUTE_UNUSED, bool named)
336 {
337 return named;
338 }
339
340 /* Emit to STREAM the assembler syntax for insn operand X. */
341
342 void
343 default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
344 int code ATTRIBUTE_UNUSED)
345 {
346 #ifdef PRINT_OPERAND
347 PRINT_OPERAND (stream, x, code);
348 #else
349 gcc_unreachable ();
350 #endif
351 }
352
353 /* Emit to STREAM the assembler syntax for an insn operand whose memory
354 address is X. */
355
356 void
357 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
358 rtx x ATTRIBUTE_UNUSED)
359 {
360 #ifdef PRINT_OPERAND_ADDRESS
361 PRINT_OPERAND_ADDRESS (stream, x);
362 #else
363 gcc_unreachable ();
364 #endif
365 }
366
367 /* Return true if CODE is a valid punctuation character for the
368 `print_operand' hook. */
369
370 bool
371 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
372 {
373 #ifdef PRINT_OPERAND_PUNCT_VALID_P
374 return PRINT_OPERAND_PUNCT_VALID_P (code);
375 #else
376 return false;
377 #endif
378 }
379
380 /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
381 tree
382 default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
383 {
384 const char *skipped = name + (*name == '*' ? 1 : 0);
385 const char *stripped = targetm.strip_name_encoding (skipped);
386 if (*name != '*' && user_label_prefix[0])
387 stripped = ACONCAT ((user_label_prefix, stripped, NULL));
388 return get_identifier (stripped);
389 }
390
391 /* True if MODE is valid for the target. By "valid", we mean able to
392 be manipulated in non-trivial ways. In particular, this means all
393 the arithmetic is supported.
394
395 By default we guess this means that any C type is supported. If
396 we can't map the mode back to a type that would be available in C,
397 then reject it. Special case, here, is the double-word arithmetic
398 supported by optabs.c. */
399
400 bool
401 default_scalar_mode_supported_p (machine_mode mode)
402 {
403 int precision = GET_MODE_PRECISION (mode);
404
405 switch (GET_MODE_CLASS (mode))
406 {
407 case MODE_PARTIAL_INT:
408 case MODE_INT:
409 if (precision == CHAR_TYPE_SIZE)
410 return true;
411 if (precision == SHORT_TYPE_SIZE)
412 return true;
413 if (precision == INT_TYPE_SIZE)
414 return true;
415 if (precision == LONG_TYPE_SIZE)
416 return true;
417 if (precision == LONG_LONG_TYPE_SIZE)
418 return true;
419 if (precision == 2 * BITS_PER_WORD)
420 return true;
421 return false;
422
423 case MODE_FLOAT:
424 if (precision == FLOAT_TYPE_SIZE)
425 return true;
426 if (precision == DOUBLE_TYPE_SIZE)
427 return true;
428 if (precision == LONG_DOUBLE_TYPE_SIZE)
429 return true;
430 return false;
431
432 case MODE_DECIMAL_FLOAT:
433 case MODE_FRACT:
434 case MODE_UFRACT:
435 case MODE_ACCUM:
436 case MODE_UACCUM:
437 return false;
438
439 default:
440 gcc_unreachable ();
441 }
442 }
443
444 /* Return true if libgcc supports floating-point mode MODE (known to
445 be supported as a scalar mode). */
446
447 bool
448 default_libgcc_floating_mode_supported_p (machine_mode mode)
449 {
450 switch (mode)
451 {
452 #ifdef HAVE_SFmode
453 case SFmode:
454 #endif
455 #ifdef HAVE_DFmode
456 case DFmode:
457 #endif
458 #ifdef HAVE_XFmode
459 case XFmode:
460 #endif
461 #ifdef HAVE_TFmode
462 case TFmode:
463 #endif
464 return true;
465
466 default:
467 return false;
468 }
469 }
470
471 /* Make some target macros useable by target-independent code. */
472 bool
473 targhook_words_big_endian (void)
474 {
475 return !!WORDS_BIG_ENDIAN;
476 }
477
478 bool
479 targhook_float_words_big_endian (void)
480 {
481 return !!FLOAT_WORDS_BIG_ENDIAN;
482 }
483
484 /* True if the target supports floating-point exceptions and rounding
485 modes. */
486
487 bool
488 default_float_exceptions_rounding_supported_p (void)
489 {
490 #ifdef HAVE_adddf3
491 return HAVE_adddf3;
492 #else
493 return false;
494 #endif
495 }
496
497 /* True if the target supports decimal floating point. */
498
499 bool
500 default_decimal_float_supported_p (void)
501 {
502 return ENABLE_DECIMAL_FLOAT;
503 }
504
505 /* True if the target supports fixed-point arithmetic. */
506
507 bool
508 default_fixed_point_supported_p (void)
509 {
510 return ENABLE_FIXED_POINT;
511 }
512
513 /* True if the target supports GNU indirect functions. */
514
515 bool
516 default_has_ifunc_p (void)
517 {
518 return HAVE_GNU_INDIRECT_FUNCTION;
519 }
520
521 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
522 an error message.
523
524 This function checks whether a given INSN is valid within a low-overhead
525 loop. If INSN is invalid it returns the reason for that, otherwise it
526 returns NULL. A called function may clobber any special registers required
527 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
528 register for branch on table instructions. We reject the doloop pattern in
529 these cases. */
530
531 const char *
532 default_invalid_within_doloop (const rtx_insn *insn)
533 {
534 if (CALL_P (insn))
535 return "Function call in loop.";
536
537 if (tablejump_p (insn, NULL, NULL) || computed_jump_p (insn))
538 return "Computed branch in the loop.";
539
540 return NULL;
541 }
542
543 /* Mapping of builtin functions to vectorized variants. */
544
545 tree
546 default_builtin_vectorized_function (tree fndecl ATTRIBUTE_UNUSED,
547 tree type_out ATTRIBUTE_UNUSED,
548 tree type_in ATTRIBUTE_UNUSED)
549 {
550 return NULL_TREE;
551 }
552
553 /* Vectorized conversion. */
554
555 tree
556 default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
557 tree dest_type ATTRIBUTE_UNUSED,
558 tree src_type ATTRIBUTE_UNUSED)
559 {
560 return NULL_TREE;
561 }
562
563 /* Default vectorizer cost model values. */
564
565 int
566 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
567 tree vectype,
568 int misalign ATTRIBUTE_UNUSED)
569 {
570 unsigned elements;
571
572 switch (type_of_cost)
573 {
574 case scalar_stmt:
575 case scalar_load:
576 case scalar_store:
577 case vector_stmt:
578 case vector_load:
579 case vector_store:
580 case vec_to_scalar:
581 case scalar_to_vec:
582 case cond_branch_not_taken:
583 case vec_perm:
584 case vec_promote_demote:
585 return 1;
586
587 case unaligned_load:
588 case unaligned_store:
589 return 2;
590
591 case cond_branch_taken:
592 return 3;
593
594 case vec_construct:
595 elements = TYPE_VECTOR_SUBPARTS (vectype);
596 return elements / 2 + 1;
597
598 default:
599 gcc_unreachable ();
600 }
601 }
602
603 /* Reciprocal. */
604
605 tree
606 default_builtin_reciprocal (unsigned int fn ATTRIBUTE_UNUSED,
607 bool md_fn ATTRIBUTE_UNUSED,
608 bool sqrt ATTRIBUTE_UNUSED)
609 {
610 return NULL_TREE;
611 }
612
613 bool
614 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
615 cumulative_args_t ca ATTRIBUTE_UNUSED,
616 machine_mode mode ATTRIBUTE_UNUSED,
617 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
618 {
619 return false;
620 }
621
622 bool
623 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
624 cumulative_args_t ca ATTRIBUTE_UNUSED,
625 machine_mode mode ATTRIBUTE_UNUSED,
626 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
627 {
628 return true;
629 }
630
631 int
632 hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
633 cumulative_args_t ca ATTRIBUTE_UNUSED,
634 machine_mode mode ATTRIBUTE_UNUSED,
635 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
636 {
637 return 0;
638 }
639
640 void
641 default_function_arg_advance (cumulative_args_t ca ATTRIBUTE_UNUSED,
642 machine_mode mode ATTRIBUTE_UNUSED,
643 const_tree type ATTRIBUTE_UNUSED,
644 bool named ATTRIBUTE_UNUSED)
645 {
646 gcc_unreachable ();
647 }
648
649 rtx
650 default_function_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
651 machine_mode mode ATTRIBUTE_UNUSED,
652 const_tree type ATTRIBUTE_UNUSED,
653 bool named ATTRIBUTE_UNUSED)
654 {
655 gcc_unreachable ();
656 }
657
658 rtx
659 default_function_incoming_arg (cumulative_args_t ca ATTRIBUTE_UNUSED,
660 machine_mode mode ATTRIBUTE_UNUSED,
661 const_tree type ATTRIBUTE_UNUSED,
662 bool named ATTRIBUTE_UNUSED)
663 {
664 gcc_unreachable ();
665 }
666
667 unsigned int
668 default_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
669 const_tree type ATTRIBUTE_UNUSED)
670 {
671 return PARM_BOUNDARY;
672 }
673
674 unsigned int
675 default_function_arg_round_boundary (machine_mode mode ATTRIBUTE_UNUSED,
676 const_tree type ATTRIBUTE_UNUSED)
677 {
678 return PARM_BOUNDARY;
679 }
680
681 void
682 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
683 {
684 }
685
686 const char *
687 hook_invalid_arg_for_unprototyped_fn (
688 const_tree typelist ATTRIBUTE_UNUSED,
689 const_tree funcdecl ATTRIBUTE_UNUSED,
690 const_tree val ATTRIBUTE_UNUSED)
691 {
692 return NULL;
693 }
694
695 /* Initialize the stack protection decls. */
696
697 /* Stack protection related decls living in libgcc. */
698 static GTY(()) tree stack_chk_guard_decl;
699
700 tree
701 default_stack_protect_guard (void)
702 {
703 tree t = stack_chk_guard_decl;
704
705 if (t == NULL)
706 {
707 rtx x;
708
709 t = build_decl (UNKNOWN_LOCATION,
710 VAR_DECL, get_identifier ("__stack_chk_guard"),
711 ptr_type_node);
712 TREE_STATIC (t) = 1;
713 TREE_PUBLIC (t) = 1;
714 DECL_EXTERNAL (t) = 1;
715 TREE_USED (t) = 1;
716 TREE_THIS_VOLATILE (t) = 1;
717 DECL_ARTIFICIAL (t) = 1;
718 DECL_IGNORED_P (t) = 1;
719
720 /* Do not share RTL as the declaration is visible outside of
721 current function. */
722 x = DECL_RTL (t);
723 RTX_FLAG (x, used) = 1;
724
725 stack_chk_guard_decl = t;
726 }
727
728 return t;
729 }
730
731 static GTY(()) tree stack_chk_fail_decl;
732
733 tree
734 default_external_stack_protect_fail (void)
735 {
736 tree t = stack_chk_fail_decl;
737
738 if (t == NULL_TREE)
739 {
740 t = build_function_type_list (void_type_node, NULL_TREE);
741 t = build_decl (UNKNOWN_LOCATION,
742 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
743 TREE_STATIC (t) = 1;
744 TREE_PUBLIC (t) = 1;
745 DECL_EXTERNAL (t) = 1;
746 TREE_USED (t) = 1;
747 TREE_THIS_VOLATILE (t) = 1;
748 TREE_NOTHROW (t) = 1;
749 DECL_ARTIFICIAL (t) = 1;
750 DECL_IGNORED_P (t) = 1;
751 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
752 DECL_VISIBILITY_SPECIFIED (t) = 1;
753
754 stack_chk_fail_decl = t;
755 }
756
757 return build_call_expr (t, 0);
758 }
759
760 tree
761 default_hidden_stack_protect_fail (void)
762 {
763 #ifndef HAVE_GAS_HIDDEN
764 return default_external_stack_protect_fail ();
765 #else
766 tree t = stack_chk_fail_decl;
767
768 if (!flag_pic)
769 return default_external_stack_protect_fail ();
770
771 if (t == NULL_TREE)
772 {
773 t = build_function_type_list (void_type_node, NULL_TREE);
774 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
775 get_identifier ("__stack_chk_fail_local"), t);
776 TREE_STATIC (t) = 1;
777 TREE_PUBLIC (t) = 1;
778 DECL_EXTERNAL (t) = 1;
779 TREE_USED (t) = 1;
780 TREE_THIS_VOLATILE (t) = 1;
781 TREE_NOTHROW (t) = 1;
782 DECL_ARTIFICIAL (t) = 1;
783 DECL_IGNORED_P (t) = 1;
784 DECL_VISIBILITY_SPECIFIED (t) = 1;
785 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
786
787 stack_chk_fail_decl = t;
788 }
789
790 return build_call_expr (t, 0);
791 #endif
792 }
793
794 bool
795 hook_bool_const_rtx_commutative_p (const_rtx x,
796 int outer_code ATTRIBUTE_UNUSED)
797 {
798 return COMMUTATIVE_P (x);
799 }
800
801 rtx
802 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
803 const_tree fn_decl_or_type,
804 bool outgoing ATTRIBUTE_UNUSED)
805 {
806 /* The old interface doesn't handle receiving the function type. */
807 if (fn_decl_or_type
808 && !DECL_P (fn_decl_or_type))
809 fn_decl_or_type = NULL;
810
811 #ifdef FUNCTION_VALUE
812 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
813 #else
814 gcc_unreachable ();
815 #endif
816 }
817
818 rtx
819 default_libcall_value (machine_mode mode ATTRIBUTE_UNUSED,
820 const_rtx fun ATTRIBUTE_UNUSED)
821 {
822 #ifdef LIBCALL_VALUE
823 return LIBCALL_VALUE (mode);
824 #else
825 gcc_unreachable ();
826 #endif
827 }
828
829 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
830
831 bool
832 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
833 {
834 #ifdef FUNCTION_VALUE_REGNO_P
835 return FUNCTION_VALUE_REGNO_P (regno);
836 #else
837 gcc_unreachable ();
838 #endif
839 }
840
841 rtx
842 default_internal_arg_pointer (void)
843 {
844 /* If the reg that the virtual arg pointer will be translated into is
845 not a fixed reg or is the stack pointer, make a copy of the virtual
846 arg pointer, and address parms via the copy. The frame pointer is
847 considered fixed even though it is not marked as such. */
848 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
849 || ! (fixed_regs[ARG_POINTER_REGNUM]
850 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
851 return copy_to_reg (virtual_incoming_args_rtx);
852 else
853 return virtual_incoming_args_rtx;
854 }
855
856 rtx
857 default_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
858 {
859 if (incoming_p)
860 {
861 #ifdef STATIC_CHAIN_INCOMING_REGNUM
862 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
863 #endif
864 }
865
866 #ifdef STATIC_CHAIN_REGNUM
867 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
868 #endif
869
870 {
871 static bool issued_error;
872 if (!issued_error)
873 {
874 issued_error = true;
875 sorry ("nested functions not supported on this target");
876 }
877
878 /* It really doesn't matter what we return here, so long at it
879 doesn't cause the rest of the compiler to crash. */
880 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
881 }
882 }
883
884 void
885 default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
886 rtx ARG_UNUSED (r_chain))
887 {
888 sorry ("nested function trampolines not supported on this target");
889 }
890
891 int
892 default_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
893 tree funtype ATTRIBUTE_UNUSED,
894 int size ATTRIBUTE_UNUSED)
895 {
896 return 0;
897 }
898
899 reg_class_t
900 default_branch_target_register_class (void)
901 {
902 return NO_REGS;
903 }
904
905 reg_class_t
906 default_ira_change_pseudo_allocno_class (int regno ATTRIBUTE_UNUSED,
907 reg_class_t cl)
908 {
909 return cl;
910 }
911
912 extern bool
913 default_lra_p (void)
914 {
915 return false;
916 }
917
918 int
919 default_register_priority (int hard_regno ATTRIBUTE_UNUSED)
920 {
921 return 0;
922 }
923
924 extern bool
925 default_register_usage_leveling_p (void)
926 {
927 return false;
928 }
929
930 extern bool
931 default_different_addr_displacement_p (void)
932 {
933 return false;
934 }
935
936 reg_class_t
937 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
938 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
939 machine_mode reload_mode ATTRIBUTE_UNUSED,
940 secondary_reload_info *sri)
941 {
942 enum reg_class rclass = NO_REGS;
943 enum reg_class reload_class = (enum reg_class) reload_class_i;
944
945 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
946 {
947 sri->icode = sri->prev_sri->t_icode;
948 return NO_REGS;
949 }
950 #ifdef SECONDARY_INPUT_RELOAD_CLASS
951 if (in_p)
952 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
953 #endif
954 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
955 if (! in_p)
956 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
957 #endif
958 if (rclass != NO_REGS)
959 {
960 enum insn_code icode
961 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
962 reload_mode);
963
964 if (icode != CODE_FOR_nothing
965 && !insn_operand_matches (icode, in_p, x))
966 icode = CODE_FOR_nothing;
967 else if (icode != CODE_FOR_nothing)
968 {
969 const char *insn_constraint, *scratch_constraint;
970 enum reg_class insn_class, scratch_class;
971
972 gcc_assert (insn_data[(int) icode].n_operands == 3);
973 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
974 if (!*insn_constraint)
975 insn_class = ALL_REGS;
976 else
977 {
978 if (in_p)
979 {
980 gcc_assert (*insn_constraint == '=');
981 insn_constraint++;
982 }
983 insn_class = (reg_class_for_constraint
984 (lookup_constraint (insn_constraint)));
985 gcc_assert (insn_class != NO_REGS);
986 }
987
988 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
989 /* The scratch register's constraint must start with "=&",
990 except for an input reload, where only "=" is necessary,
991 and where it might be beneficial to re-use registers from
992 the input. */
993 gcc_assert (scratch_constraint[0] == '='
994 && (in_p || scratch_constraint[1] == '&'));
995 scratch_constraint++;
996 if (*scratch_constraint == '&')
997 scratch_constraint++;
998 scratch_class = (reg_class_for_constraint
999 (lookup_constraint (scratch_constraint)));
1000
1001 if (reg_class_subset_p (reload_class, insn_class))
1002 {
1003 gcc_assert (scratch_class == rclass);
1004 rclass = NO_REGS;
1005 }
1006 else
1007 rclass = insn_class;
1008
1009 }
1010 if (rclass == NO_REGS)
1011 sri->icode = icode;
1012 else
1013 sri->t_icode = icode;
1014 }
1015 return rclass;
1016 }
1017
1018 /* By default, if flag_pic is true, then neither local nor global relocs
1019 should be placed in readonly memory. */
1020
1021 int
1022 default_reloc_rw_mask (void)
1023 {
1024 return flag_pic ? 3 : 0;
1025 }
1026
1027 /* By default, do no modification. */
1028 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
1029 tree id)
1030 {
1031 return id;
1032 }
1033
1034 /* Default to natural alignment for vector types. */
1035 HOST_WIDE_INT
1036 default_vector_alignment (const_tree type)
1037 {
1038 return tree_to_shwi (TYPE_SIZE (type));
1039 }
1040
1041 bool
1042 default_builtin_vector_alignment_reachable (const_tree type, bool is_packed)
1043 {
1044 if (is_packed)
1045 return false;
1046
1047 /* Assuming that types whose size is > pointer-size are not guaranteed to be
1048 naturally aligned. */
1049 if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
1050 return false;
1051
1052 /* Assuming that types whose size is <= pointer-size
1053 are naturally aligned. */
1054 return true;
1055 }
1056
1057 /* By default, assume that a target supports any factor of misalignment
1058 memory access if it supports movmisalign patten.
1059 is_packed is true if the memory access is defined in a packed struct. */
1060 bool
1061 default_builtin_support_vector_misalignment (machine_mode mode,
1062 const_tree type
1063 ATTRIBUTE_UNUSED,
1064 int misalignment
1065 ATTRIBUTE_UNUSED,
1066 bool is_packed
1067 ATTRIBUTE_UNUSED)
1068 {
1069 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1070 return true;
1071 return false;
1072 }
1073
1074 /* By default, only attempt to parallelize bitwise operations, and
1075 possibly adds/subtracts using bit-twiddling. */
1076
1077 machine_mode
1078 default_preferred_simd_mode (machine_mode mode ATTRIBUTE_UNUSED)
1079 {
1080 return word_mode;
1081 }
1082
1083 /* By default only the size derived from the preferred vector mode
1084 is tried. */
1085
1086 unsigned int
1087 default_autovectorize_vector_sizes (void)
1088 {
1089 return 0;
1090 }
1091
1092 /* By default, the cost model accumulates three separate costs (prologue,
1093 loop body, and epilogue) for a vectorized loop or block. So allocate an
1094 array of three unsigned ints, set it to zero, and return its address. */
1095
1096 void *
1097 default_init_cost (struct loop *loop_info ATTRIBUTE_UNUSED)
1098 {
1099 unsigned *cost = XNEWVEC (unsigned, 3);
1100 cost[vect_prologue] = cost[vect_body] = cost[vect_epilogue] = 0;
1101 return cost;
1102 }
1103
1104 /* By default, the cost model looks up the cost of the given statement
1105 kind and mode, multiplies it by the occurrence count, accumulates
1106 it into the cost specified by WHERE, and returns the cost added. */
1107
1108 unsigned
1109 default_add_stmt_cost (void *data, int count, enum vect_cost_for_stmt kind,
1110 struct _stmt_vec_info *stmt_info, int misalign,
1111 enum vect_cost_model_location where)
1112 {
1113 unsigned *cost = (unsigned *) data;
1114 unsigned retval = 0;
1115
1116 tree vectype = stmt_info ? stmt_vectype (stmt_info) : NULL_TREE;
1117 int stmt_cost = targetm.vectorize.builtin_vectorization_cost (kind, vectype,
1118 misalign);
1119 /* Statements in an inner loop relative to the loop being
1120 vectorized are weighted more heavily. The value here is
1121 arbitrary and could potentially be improved with analysis. */
1122 if (where == vect_body && stmt_info && stmt_in_inner_loop_p (stmt_info))
1123 count *= 50; /* FIXME. */
1124
1125 retval = (unsigned) (count * stmt_cost);
1126 cost[where] += retval;
1127
1128 return retval;
1129 }
1130
1131 /* By default, the cost model just returns the accumulated costs. */
1132
1133 void
1134 default_finish_cost (void *data, unsigned *prologue_cost,
1135 unsigned *body_cost, unsigned *epilogue_cost)
1136 {
1137 unsigned *cost = (unsigned *) data;
1138 *prologue_cost = cost[vect_prologue];
1139 *body_cost = cost[vect_body];
1140 *epilogue_cost = cost[vect_epilogue];
1141 }
1142
1143 /* Free the cost data. */
1144
1145 void
1146 default_destroy_cost_data (void *data)
1147 {
1148 free (data);
1149 }
1150
1151 /* Determine whether or not a pointer mode is valid. Assume defaults
1152 of ptr_mode or Pmode - can be overridden. */
1153 bool
1154 default_valid_pointer_mode (machine_mode mode)
1155 {
1156 return (mode == ptr_mode || mode == Pmode);
1157 }
1158
1159 /* Determine whether the memory reference specified by REF may alias
1160 the C libraries errno location. */
1161 bool
1162 default_ref_may_alias_errno (ao_ref *ref)
1163 {
1164 tree base = ao_ref_base (ref);
1165 /* The default implementation assumes the errno location is
1166 a declaration of type int or is always accessed via a
1167 pointer to int. We assume that accesses to errno are
1168 not deliberately obfuscated (even in conforming ways). */
1169 if (TYPE_UNSIGNED (TREE_TYPE (base))
1170 || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1171 return false;
1172 /* The default implementation assumes an errno location
1173 declaration is never defined in the current compilation unit. */
1174 if (DECL_P (base)
1175 && !TREE_STATIC (base))
1176 return true;
1177 else if (TREE_CODE (base) == MEM_REF
1178 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1179 {
1180 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1181 return !pi || pi->pt.anything || pi->pt.nonlocal;
1182 }
1183 return false;
1184 }
1185
1186 /* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
1187 for the generic address space only. */
1188
1189 machine_mode
1190 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1191 {
1192 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1193 return ptr_mode;
1194 }
1195
1196 /* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
1197 for the generic address space only. */
1198
1199 machine_mode
1200 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1201 {
1202 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1203 return Pmode;
1204 }
1205
1206 /* Named address space version of valid_pointer_mode. */
1207
1208 bool
1209 default_addr_space_valid_pointer_mode (machine_mode mode, addr_space_t as)
1210 {
1211 if (!ADDR_SPACE_GENERIC_P (as))
1212 return (mode == targetm.addr_space.pointer_mode (as)
1213 || mode == targetm.addr_space.address_mode (as));
1214
1215 return targetm.valid_pointer_mode (mode);
1216 }
1217
1218 /* Some places still assume that all pointer or address modes are the
1219 standard Pmode and ptr_mode. These optimizations become invalid if
1220 the target actually supports multiple different modes. For now,
1221 we disable such optimizations on such targets, using this function. */
1222
1223 bool
1224 target_default_pointer_address_modes_p (void)
1225 {
1226 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1227 return false;
1228 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1229 return false;
1230
1231 return true;
1232 }
1233
1234 /* Named address space version of legitimate_address_p. */
1235
1236 bool
1237 default_addr_space_legitimate_address_p (machine_mode mode, rtx mem,
1238 bool strict, addr_space_t as)
1239 {
1240 if (!ADDR_SPACE_GENERIC_P (as))
1241 gcc_unreachable ();
1242
1243 return targetm.legitimate_address_p (mode, mem, strict);
1244 }
1245
1246 /* Named address space version of LEGITIMIZE_ADDRESS. */
1247
1248 rtx
1249 default_addr_space_legitimize_address (rtx x, rtx oldx,
1250 machine_mode mode, addr_space_t as)
1251 {
1252 if (!ADDR_SPACE_GENERIC_P (as))
1253 return x;
1254
1255 return targetm.legitimize_address (x, oldx, mode);
1256 }
1257
1258 /* The default hook for determining if one named address space is a subset of
1259 another and to return which address space to use as the common address
1260 space. */
1261
1262 bool
1263 default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1264 {
1265 return (subset == superset);
1266 }
1267
1268 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1269 called for targets with only a generic address space. */
1270
1271 rtx
1272 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1273 tree from_type ATTRIBUTE_UNUSED,
1274 tree to_type ATTRIBUTE_UNUSED)
1275 {
1276 gcc_unreachable ();
1277 }
1278
1279 bool
1280 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1281 {
1282 return true;
1283 }
1284
1285 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1286
1287 bool
1288 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED,
1289 addr_space_t addrspace ATTRIBUTE_UNUSED)
1290 {
1291 return false;
1292 }
1293
1294 bool
1295 default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1296 tree ARG_UNUSED (name),
1297 tree ARG_UNUSED (args),
1298 int ARG_UNUSED (flags))
1299 {
1300 warning (OPT_Wattributes,
1301 "target attribute is not supported on this machine");
1302
1303 return false;
1304 }
1305
1306 bool
1307 default_target_option_pragma_parse (tree ARG_UNUSED (args),
1308 tree ARG_UNUSED (pop_target))
1309 {
1310 warning (OPT_Wpragmas,
1311 "#pragma GCC target is not supported for this machine");
1312
1313 return false;
1314 }
1315
1316 bool
1317 default_target_can_inline_p (tree caller, tree callee)
1318 {
1319 bool ret = false;
1320 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1321 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1322
1323 /* If callee has no option attributes, then it is ok to inline */
1324 if (!callee_opts)
1325 ret = true;
1326
1327 /* If caller has no option attributes, but callee does then it is not ok to
1328 inline */
1329 else if (!caller_opts)
1330 ret = false;
1331
1332 /* If both caller and callee have attributes, assume that if the
1333 pointer is different, the two functions have different target
1334 options since build_target_option_node uses a hash table for the
1335 options. */
1336 else
1337 ret = (callee_opts == caller_opts);
1338
1339 return ret;
1340 }
1341
1342 #ifndef HAVE_casesi
1343 # define HAVE_casesi 0
1344 #endif
1345
1346 /* If the machine does not have a case insn that compares the bounds,
1347 this means extra overhead for dispatch tables, which raises the
1348 threshold for using them. */
1349
1350 unsigned int
1351 default_case_values_threshold (void)
1352 {
1353 return (HAVE_casesi ? 4 : 5);
1354 }
1355
1356 bool
1357 default_have_conditional_execution (void)
1358 {
1359 #ifdef HAVE_conditional_execution
1360 return HAVE_conditional_execution;
1361 #else
1362 return false;
1363 #endif
1364 }
1365
1366 /* By default we assume that c99 functions are present at the runtime,
1367 but sincos is not. */
1368 bool
1369 default_libc_has_function (enum function_class fn_class)
1370 {
1371 if (fn_class == function_c94
1372 || fn_class == function_c99_misc
1373 || fn_class == function_c99_math_complex)
1374 return true;
1375
1376 return false;
1377 }
1378
1379 bool
1380 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1381 {
1382 return true;
1383 }
1384
1385 bool
1386 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
1387 {
1388 return false;
1389 }
1390
1391 tree
1392 default_builtin_tm_load_store (tree ARG_UNUSED (type))
1393 {
1394 return NULL_TREE;
1395 }
1396
1397 /* Compute cost of moving registers to/from memory. */
1398
1399 int
1400 default_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1401 reg_class_t rclass ATTRIBUTE_UNUSED,
1402 bool in ATTRIBUTE_UNUSED)
1403 {
1404 #ifndef MEMORY_MOVE_COST
1405 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1406 #else
1407 return MEMORY_MOVE_COST (mode, (enum reg_class) rclass, in);
1408 #endif
1409 }
1410
1411 /* Compute cost of moving data from a register of class FROM to one of
1412 TO, using MODE. */
1413
1414 int
1415 default_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1416 reg_class_t from ATTRIBUTE_UNUSED,
1417 reg_class_t to ATTRIBUTE_UNUSED)
1418 {
1419 #ifndef REGISTER_MOVE_COST
1420 return 2;
1421 #else
1422 return REGISTER_MOVE_COST (mode, (enum reg_class) from, (enum reg_class) to);
1423 #endif
1424 }
1425
1426 /* For hooks which use the MOVE_RATIO macro, this gives the legacy default
1427 behaviour. SPEED_P is true if we are compiling for speed. */
1428
1429 unsigned int
1430 get_move_ratio (bool speed_p ATTRIBUTE_UNUSED)
1431 {
1432 unsigned int move_ratio;
1433 #ifdef MOVE_RATIO
1434 move_ratio = (unsigned int) MOVE_RATIO (speed_p);
1435 #else
1436 #if defined (HAVE_movmemqi) || defined (HAVE_movmemhi) || defined (HAVE_movmemsi) || defined (HAVE_movmemdi) || defined (HAVE_movmemti)
1437 move_ratio = 2;
1438 #else /* No movmem patterns, pick a default. */
1439 move_ratio = ((speed_p) ? 15 : 3);
1440 #endif
1441 #endif
1442 return move_ratio;
1443 }
1444
1445 /* Return TRUE if the move_by_pieces/set_by_pieces infrastructure should be
1446 used; return FALSE if the movmem/setmem optab should be expanded, or
1447 a call to memcpy emitted. */
1448
1449 bool
1450 default_use_by_pieces_infrastructure_p (unsigned HOST_WIDE_INT size,
1451 unsigned int alignment,
1452 enum by_pieces_operation op,
1453 bool speed_p)
1454 {
1455 unsigned int max_size = 0;
1456 unsigned int ratio = 0;
1457
1458 switch (op)
1459 {
1460 case CLEAR_BY_PIECES:
1461 max_size = STORE_MAX_PIECES;
1462 ratio = CLEAR_RATIO (speed_p);
1463 break;
1464 case MOVE_BY_PIECES:
1465 max_size = MOVE_MAX_PIECES;
1466 ratio = get_move_ratio (speed_p);
1467 break;
1468 case SET_BY_PIECES:
1469 max_size = STORE_MAX_PIECES;
1470 ratio = SET_RATIO (speed_p);
1471 break;
1472 case STORE_BY_PIECES:
1473 max_size = STORE_MAX_PIECES;
1474 ratio = get_move_ratio (speed_p);
1475 break;
1476 }
1477
1478 return move_by_pieces_ninsns (size, alignment, max_size + 1) < ratio;
1479 }
1480
1481 bool
1482 default_profile_before_prologue (void)
1483 {
1484 #ifdef PROFILE_BEFORE_PROLOGUE
1485 return true;
1486 #else
1487 return false;
1488 #endif
1489 }
1490
1491 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1492
1493 reg_class_t
1494 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
1495 reg_class_t rclass)
1496 {
1497 #ifdef PREFERRED_RELOAD_CLASS
1498 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
1499 #else
1500 return rclass;
1501 #endif
1502 }
1503
1504 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1505
1506 reg_class_t
1507 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
1508 reg_class_t rclass)
1509 {
1510 return rclass;
1511 }
1512
1513 /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
1514 reg_class_t
1515 default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
1516 {
1517 return NO_REGS;
1518 }
1519
1520 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1521
1522 bool
1523 default_class_likely_spilled_p (reg_class_t rclass)
1524 {
1525 return (reg_class_size[(int) rclass] == 1);
1526 }
1527
1528 /* The default implementation of TARGET_CLASS_MAX_NREGS. */
1529
1530 unsigned char
1531 default_class_max_nregs (reg_class_t rclass ATTRIBUTE_UNUSED,
1532 machine_mode mode ATTRIBUTE_UNUSED)
1533 {
1534 #ifdef CLASS_MAX_NREGS
1535 return (unsigned char) CLASS_MAX_NREGS ((enum reg_class) rclass, mode);
1536 #else
1537 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1538 #endif
1539 }
1540
1541 /* Determine the debugging unwind mechanism for the target. */
1542
1543 enum unwind_info_type
1544 default_debug_unwind_info (void)
1545 {
1546 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1547 /* ??? Change all users to the hook, then poison this. */
1548 #ifdef DWARF2_FRAME_INFO
1549 if (DWARF2_FRAME_INFO)
1550 return UI_DWARF2;
1551 #endif
1552
1553 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1554 #ifdef DWARF2_DEBUGGING_INFO
1555 if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
1556 return UI_DWARF2;
1557 #endif
1558
1559 return UI_NONE;
1560 }
1561
1562 /* Determine the correct mode for a Dwarf frame register that represents
1563 register REGNO. */
1564
1565 machine_mode
1566 default_dwarf_frame_reg_mode (int regno)
1567 {
1568 machine_mode save_mode = reg_raw_mode[regno];
1569
1570 if (HARD_REGNO_CALL_PART_CLOBBERED (regno, save_mode))
1571 save_mode = choose_hard_reg_mode (regno, 1, true);
1572 return save_mode;
1573 }
1574
1575 /* To be used by targets where reg_raw_mode doesn't return the right
1576 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1577
1578 machine_mode
1579 default_get_reg_raw_mode (int regno)
1580 {
1581 return reg_raw_mode[regno];
1582 }
1583
1584 /* Return true if a leaf function should stay leaf even with profiling
1585 enabled. */
1586
1587 bool
1588 default_keep_leaf_when_profiled ()
1589 {
1590 return false;
1591 }
1592
1593 /* Return true if the state of option OPTION should be stored in PCH files
1594 and checked by default_pch_valid_p. Store the option's current state
1595 in STATE if so. */
1596
1597 static inline bool
1598 option_affects_pch_p (int option, struct cl_option_state *state)
1599 {
1600 if ((cl_options[option].flags & CL_TARGET) == 0)
1601 return false;
1602 if ((cl_options[option].flags & CL_PCH_IGNORE) != 0)
1603 return false;
1604 if (option_flag_var (option, &global_options) == &target_flags)
1605 if (targetm.check_pch_target_flags)
1606 return false;
1607 return get_option_state (&global_options, option, state);
1608 }
1609
1610 /* Default version of get_pch_validity.
1611 By default, every flag difference is fatal; that will be mostly right for
1612 most targets, but completely right for very few. */
1613
1614 void *
1615 default_get_pch_validity (size_t *sz)
1616 {
1617 struct cl_option_state state;
1618 size_t i;
1619 char *result, *r;
1620
1621 *sz = 2;
1622 if (targetm.check_pch_target_flags)
1623 *sz += sizeof (target_flags);
1624 for (i = 0; i < cl_options_count; i++)
1625 if (option_affects_pch_p (i, &state))
1626 *sz += state.size;
1627
1628 result = r = XNEWVEC (char, *sz);
1629 r[0] = flag_pic;
1630 r[1] = flag_pie;
1631 r += 2;
1632 if (targetm.check_pch_target_flags)
1633 {
1634 memcpy (r, &target_flags, sizeof (target_flags));
1635 r += sizeof (target_flags);
1636 }
1637
1638 for (i = 0; i < cl_options_count; i++)
1639 if (option_affects_pch_p (i, &state))
1640 {
1641 memcpy (r, state.data, state.size);
1642 r += state.size;
1643 }
1644
1645 return result;
1646 }
1647
1648 /* Return a message which says that a PCH file was created with a different
1649 setting of OPTION. */
1650
1651 static const char *
1652 pch_option_mismatch (const char *option)
1653 {
1654 return xasprintf (_("created and used with differing settings of '%s'"),
1655 option);
1656 }
1657
1658 /* Default version of pch_valid_p. */
1659
1660 const char *
1661 default_pch_valid_p (const void *data_p, size_t len)
1662 {
1663 struct cl_option_state state;
1664 const char *data = (const char *)data_p;
1665 size_t i;
1666
1667 /* -fpic and -fpie also usually make a PCH invalid. */
1668 if (data[0] != flag_pic)
1669 return _("created and used with different settings of -fpic");
1670 if (data[1] != flag_pie)
1671 return _("created and used with different settings of -fpie");
1672 data += 2;
1673
1674 /* Check target_flags. */
1675 if (targetm.check_pch_target_flags)
1676 {
1677 int tf;
1678 const char *r;
1679
1680 memcpy (&tf, data, sizeof (target_flags));
1681 data += sizeof (target_flags);
1682 len -= sizeof (target_flags);
1683 r = targetm.check_pch_target_flags (tf);
1684 if (r != NULL)
1685 return r;
1686 }
1687
1688 for (i = 0; i < cl_options_count; i++)
1689 if (option_affects_pch_p (i, &state))
1690 {
1691 if (memcmp (data, state.data, state.size) != 0)
1692 return pch_option_mismatch (cl_options[i].opt_text);
1693 data += state.size;
1694 len -= state.size;
1695 }
1696
1697 return NULL;
1698 }
1699
1700 /* Default version of cstore_mode. */
1701
1702 machine_mode
1703 default_cstore_mode (enum insn_code icode)
1704 {
1705 return insn_data[(int) icode].operand[0].mode;
1706 }
1707
1708 /* Default version of member_type_forces_blk. */
1709
1710 bool
1711 default_member_type_forces_blk (const_tree, machine_mode)
1712 {
1713 return false;
1714 }
1715
1716 rtx
1717 default_load_bounds_for_arg (rtx addr ATTRIBUTE_UNUSED,
1718 rtx ptr ATTRIBUTE_UNUSED,
1719 rtx bnd ATTRIBUTE_UNUSED)
1720 {
1721 gcc_unreachable ();
1722 }
1723
1724 void
1725 default_store_bounds_for_arg (rtx val ATTRIBUTE_UNUSED,
1726 rtx addr ATTRIBUTE_UNUSED,
1727 rtx bounds ATTRIBUTE_UNUSED,
1728 rtx to ATTRIBUTE_UNUSED)
1729 {
1730 gcc_unreachable ();
1731 }
1732
1733 rtx
1734 default_load_returned_bounds (rtx slot ATTRIBUTE_UNUSED)
1735 {
1736 gcc_unreachable ();
1737 }
1738
1739 void
1740 default_store_returned_bounds (rtx slot ATTRIBUTE_UNUSED,
1741 rtx bounds ATTRIBUTE_UNUSED)
1742 {
1743 gcc_unreachable ();
1744 }
1745
1746 /* Default version of canonicalize_comparison. */
1747
1748 void
1749 default_canonicalize_comparison (int *, rtx *, rtx *, bool)
1750 {
1751 }
1752
1753 /* Default implementation of TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
1754
1755 void
1756 default_atomic_assign_expand_fenv (tree *, tree *, tree *)
1757 {
1758 }
1759
1760 #ifndef PAD_VARARGS_DOWN
1761 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
1762 #endif
1763
1764 /* Build an indirect-ref expression over the given TREE, which represents a
1765 piece of a va_arg() expansion. */
1766 tree
1767 build_va_arg_indirect_ref (tree addr)
1768 {
1769 addr = build_simple_mem_ref_loc (EXPR_LOCATION (addr), addr);
1770 return addr;
1771 }
1772
1773 /* The "standard" implementation of va_arg: read the value from the
1774 current (padded) address and increment by the (padded) size. */
1775
1776 tree
1777 std_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1778 gimple_seq *post_p)
1779 {
1780 tree addr, t, type_size, rounded_size, valist_tmp;
1781 unsigned HOST_WIDE_INT align, boundary;
1782 bool indirect;
1783
1784 /* All of the alignment and movement below is for args-grow-up machines.
1785 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
1786 implement their own specialized gimplify_va_arg_expr routines. */
1787 if (ARGS_GROW_DOWNWARD)
1788 gcc_unreachable ();
1789
1790 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
1791 if (indirect)
1792 type = build_pointer_type (type);
1793
1794 align = PARM_BOUNDARY / BITS_PER_UNIT;
1795 boundary = targetm.calls.function_arg_boundary (TYPE_MODE (type), type);
1796
1797 /* When we align parameter on stack for caller, if the parameter
1798 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
1799 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
1800 here with caller. */
1801 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
1802 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
1803
1804 boundary /= BITS_PER_UNIT;
1805
1806 /* Hoist the valist value into a temporary for the moment. */
1807 valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
1808
1809 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
1810 requires greater alignment, we must perform dynamic alignment. */
1811 if (boundary > align
1812 && !integer_zerop (TYPE_SIZE (type)))
1813 {
1814 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
1815 fold_build_pointer_plus_hwi (valist_tmp, boundary - 1));
1816 gimplify_and_add (t, pre_p);
1817
1818 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
1819 fold_build2 (BIT_AND_EXPR, TREE_TYPE (valist),
1820 valist_tmp,
1821 build_int_cst (TREE_TYPE (valist), -boundary)));
1822 gimplify_and_add (t, pre_p);
1823 }
1824 else
1825 boundary = align;
1826
1827 /* If the actual alignment is less than the alignment of the type,
1828 adjust the type accordingly so that we don't assume strict alignment
1829 when dereferencing the pointer. */
1830 boundary *= BITS_PER_UNIT;
1831 if (boundary < TYPE_ALIGN (type))
1832 {
1833 type = build_variant_type_copy (type);
1834 TYPE_ALIGN (type) = boundary;
1835 }
1836
1837 /* Compute the rounded size of the type. */
1838 type_size = size_in_bytes (type);
1839 rounded_size = round_up (type_size, align);
1840
1841 /* Reduce rounded_size so it's sharable with the postqueue. */
1842 gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
1843
1844 /* Get AP. */
1845 addr = valist_tmp;
1846 if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
1847 {
1848 /* Small args are padded downward. */
1849 t = fold_build2_loc (input_location, GT_EXPR, sizetype,
1850 rounded_size, size_int (align));
1851 t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
1852 size_binop (MINUS_EXPR, rounded_size, type_size));
1853 addr = fold_build_pointer_plus (addr, t);
1854 }
1855
1856 /* Compute new value for AP. */
1857 t = fold_build_pointer_plus (valist_tmp, rounded_size);
1858 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
1859 gimplify_and_add (t, pre_p);
1860
1861 addr = fold_convert (build_pointer_type (type), addr);
1862
1863 if (indirect)
1864 addr = build_va_arg_indirect_ref (addr);
1865
1866 return build_va_arg_indirect_ref (addr);
1867 }
1868
1869 tree
1870 default_chkp_bound_type (void)
1871 {
1872 tree res = make_node (POINTER_BOUNDS_TYPE);
1873 TYPE_PRECISION (res) = TYPE_PRECISION (size_type_node) * 2;
1874 TYPE_NAME (res) = get_identifier ("__bounds_type");
1875 SET_TYPE_MODE (res, targetm.chkp_bound_mode ());
1876 layout_type (res);
1877 return res;
1878 }
1879
1880 enum machine_mode
1881 default_chkp_bound_mode (void)
1882 {
1883 return VOIDmode;
1884 }
1885
1886 tree
1887 default_builtin_chkp_function (unsigned int fcode ATTRIBUTE_UNUSED)
1888 {
1889 return NULL_TREE;
1890 }
1891
1892 rtx
1893 default_chkp_function_value_bounds (const_tree ret_type ATTRIBUTE_UNUSED,
1894 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1895 bool outgoing ATTRIBUTE_UNUSED)
1896 {
1897 gcc_unreachable ();
1898 }
1899
1900 tree
1901 default_chkp_make_bounds_constant (HOST_WIDE_INT lb ATTRIBUTE_UNUSED,
1902 HOST_WIDE_INT ub ATTRIBUTE_UNUSED)
1903 {
1904 return NULL_TREE;
1905 }
1906
1907 int
1908 default_chkp_initialize_bounds (tree var ATTRIBUTE_UNUSED,
1909 tree lb ATTRIBUTE_UNUSED,
1910 tree ub ATTRIBUTE_UNUSED,
1911 tree *stmts ATTRIBUTE_UNUSED)
1912 {
1913 return 0;
1914 }
1915
1916 void
1917 default_setup_incoming_vararg_bounds (cumulative_args_t ca ATTRIBUTE_UNUSED,
1918 enum machine_mode mode ATTRIBUTE_UNUSED,
1919 tree type ATTRIBUTE_UNUSED,
1920 int *pretend_arg_size ATTRIBUTE_UNUSED,
1921 int second_time ATTRIBUTE_UNUSED)
1922 {
1923 }
1924
1925 /* An implementation of TARGET_CAN_USE_DOLOOP_P for targets that do
1926 not support nested low-overhead loops. */
1927
1928 bool
1929 can_use_doloop_if_innermost (const widest_int &, const widest_int &,
1930 unsigned int loop_depth, bool)
1931 {
1932 return loop_depth == 1;
1933 }
1934
1935 #include "gt-targhooks.h"