target.def (handle_option): Take gcc_options and cl_decoded_option pointers and locat...
[gcc.git] / gcc / targhooks.c
1 /* Default target hook functions.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* The migration of target macros to target hooks works as follows:
22
23 1. Create a target hook that uses the existing target macros to
24 implement the same functionality.
25
26 2. Convert all the MI files to use the hook instead of the macro.
27
28 3. Repeat for a majority of the remaining target macros. This will
29 take some time.
30
31 4. Tell target maintainers to start migrating.
32
33 5. Eventually convert the backends to override the hook instead of
34 defining the macros. This will take some time too.
35
36 6. TBD when, poison the macros. Unmigrated targets will break at
37 this point.
38
39 Note that we expect steps 1-3 to be done by the people that
40 understand what the MI does with each macro, and step 5 to be done
41 by the target maintainers for their respective targets.
42
43 Note that steps 1 and 2 don't have to be done together, but no
44 target can override the new hook until step 2 is complete for it.
45
46 Once the macros are poisoned, we will revert to the old migration
47 rules - migrate the macro, callers, and targets all at once. This
48 comment can thus be removed at that point. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "machmode.h"
55 #include "rtl.h"
56 #include "tree.h"
57 #include "expr.h"
58 #include "output.h"
59 #include "diagnostic-core.h"
60 #include "function.h"
61 #include "target.h"
62 #include "tm_p.h"
63 #include "target-def.h"
64 #include "ggc.h"
65 #include "hard-reg-set.h"
66 #include "regs.h"
67 #include "reload.h"
68 #include "optabs.h"
69 #include "recog.h"
70 #include "intl.h"
71 #include "opts.h"
72 #include "tree-flow.h"
73 #include "tree-ssa-alias.h"
74
75
76 bool
77 default_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
78 rtx addr ATTRIBUTE_UNUSED,
79 bool strict ATTRIBUTE_UNUSED)
80 {
81 #ifdef GO_IF_LEGITIMATE_ADDRESS
82 /* Defer to the old implementation using a goto. */
83 if (strict)
84 return strict_memory_address_p (mode, addr);
85 else
86 return memory_address_p (mode, addr);
87 #else
88 gcc_unreachable ();
89 #endif
90 }
91
92 void
93 default_external_libcall (rtx fun ATTRIBUTE_UNUSED)
94 {
95 #ifdef ASM_OUTPUT_EXTERNAL_LIBCALL
96 ASM_OUTPUT_EXTERNAL_LIBCALL(asm_out_file, fun);
97 #endif
98 }
99
100 int
101 default_unspec_may_trap_p (const_rtx x, unsigned flags)
102 {
103 int i;
104
105 if (GET_CODE (x) == UNSPEC_VOLATILE
106 /* Any floating arithmetic may trap. */
107 || (SCALAR_FLOAT_MODE_P (GET_MODE (x))
108 && flag_trapping_math))
109 return 1;
110
111 for (i = 0; i < XVECLEN (x, 0); ++i)
112 {
113 if (may_trap_p_1 (XVECEXP (x, 0, i), flags))
114 return 1;
115 }
116
117 return 0;
118 }
119
120 enum machine_mode
121 default_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
122 enum machine_mode mode,
123 int *punsignedp ATTRIBUTE_UNUSED,
124 const_tree funtype ATTRIBUTE_UNUSED,
125 int for_return ATTRIBUTE_UNUSED)
126 {
127 if (for_return == 2)
128 return promote_mode (type, mode, punsignedp);
129 return mode;
130 }
131
132 enum machine_mode
133 default_promote_function_mode_always_promote (const_tree type,
134 enum machine_mode mode,
135 int *punsignedp,
136 const_tree funtype ATTRIBUTE_UNUSED,
137 int for_return ATTRIBUTE_UNUSED)
138 {
139 return promote_mode (type, mode, punsignedp);
140 }
141
142
143 enum machine_mode
144 default_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2)
145 {
146 if (m1 == m2)
147 return m1;
148 return VOIDmode;
149 }
150
151 bool
152 default_return_in_memory (const_tree type,
153 const_tree fntype ATTRIBUTE_UNUSED)
154 {
155 return (TYPE_MODE (type) == BLKmode);
156 }
157
158 rtx
159 default_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
160 enum machine_mode mode ATTRIBUTE_UNUSED)
161 {
162 return x;
163 }
164
165 rtx
166 default_expand_builtin_saveregs (void)
167 {
168 error ("__builtin_saveregs not supported by this target");
169 return const0_rtx;
170 }
171
172 void
173 default_setup_incoming_varargs (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
174 enum machine_mode mode ATTRIBUTE_UNUSED,
175 tree type ATTRIBUTE_UNUSED,
176 int *pretend_arg_size ATTRIBUTE_UNUSED,
177 int second_time ATTRIBUTE_UNUSED)
178 {
179 }
180
181 /* The default implementation of TARGET_BUILTIN_SETJMP_FRAME_VALUE. */
182
183 rtx
184 default_builtin_setjmp_frame_value (void)
185 {
186 return virtual_stack_vars_rtx;
187 }
188
189 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns false. */
190
191 bool
192 hook_bool_CUMULATIVE_ARGS_false (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
193 {
194 return false;
195 }
196
197 bool
198 default_pretend_outgoing_varargs_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED)
199 {
200 return (targetm.calls.setup_incoming_varargs
201 != default_setup_incoming_varargs);
202 }
203
204 enum machine_mode
205 default_eh_return_filter_mode (void)
206 {
207 return targetm.unwind_word_mode ();
208 }
209
210 enum machine_mode
211 default_libgcc_cmp_return_mode (void)
212 {
213 return word_mode;
214 }
215
216 enum machine_mode
217 default_libgcc_shift_count_mode (void)
218 {
219 return word_mode;
220 }
221
222 enum machine_mode
223 default_unwind_word_mode (void)
224 {
225 return word_mode;
226 }
227
228 /* The default implementation of TARGET_SHIFT_TRUNCATION_MASK. */
229
230 unsigned HOST_WIDE_INT
231 default_shift_truncation_mask (enum machine_mode mode)
232 {
233 return SHIFT_COUNT_TRUNCATED ? GET_MODE_BITSIZE (mode) - 1 : 0;
234 }
235
236 /* The default implementation of TARGET_MIN_DIVISIONS_FOR_RECIP_MUL. */
237
238 unsigned int
239 default_min_divisions_for_recip_mul (enum machine_mode mode ATTRIBUTE_UNUSED)
240 {
241 return have_insn_for (DIV, mode) ? 3 : 2;
242 }
243
244 /* The default implementation of TARGET_MODE_REP_EXTENDED. */
245
246 int
247 default_mode_rep_extended (enum machine_mode mode ATTRIBUTE_UNUSED,
248 enum machine_mode mode_rep ATTRIBUTE_UNUSED)
249 {
250 return UNKNOWN;
251 }
252
253 /* Generic hook that takes a CUMULATIVE_ARGS pointer and returns true. */
254
255 bool
256 hook_bool_CUMULATIVE_ARGS_true (CUMULATIVE_ARGS * a ATTRIBUTE_UNUSED)
257 {
258 return true;
259 }
260
261 /* Return machine mode for non-standard suffix
262 or VOIDmode if non-standard suffixes are unsupported. */
263 enum machine_mode
264 default_mode_for_suffix (char suffix ATTRIBUTE_UNUSED)
265 {
266 return VOIDmode;
267 }
268
269 /* The generic C++ ABI specifies this is a 64-bit value. */
270 tree
271 default_cxx_guard_type (void)
272 {
273 return long_long_integer_type_node;
274 }
275
276
277 /* Returns the size of the cookie to use when allocating an array
278 whose elements have the indicated TYPE. Assumes that it is already
279 known that a cookie is needed. */
280
281 tree
282 default_cxx_get_cookie_size (tree type)
283 {
284 tree cookie_size;
285
286 /* We need to allocate an additional max (sizeof (size_t), alignof
287 (true_type)) bytes. */
288 tree sizetype_size;
289 tree type_align;
290
291 sizetype_size = size_in_bytes (sizetype);
292 type_align = size_int (TYPE_ALIGN_UNIT (type));
293 if (INT_CST_LT_UNSIGNED (type_align, sizetype_size))
294 cookie_size = sizetype_size;
295 else
296 cookie_size = type_align;
297
298 return cookie_size;
299 }
300
301 /* Return true if a parameter must be passed by reference. This version
302 of the TARGET_PASS_BY_REFERENCE hook uses just MUST_PASS_IN_STACK. */
303
304 bool
305 hook_pass_by_reference_must_pass_in_stack (CUMULATIVE_ARGS *c ATTRIBUTE_UNUSED,
306 enum machine_mode mode ATTRIBUTE_UNUSED, const_tree type ATTRIBUTE_UNUSED,
307 bool named_arg ATTRIBUTE_UNUSED)
308 {
309 return targetm.calls.must_pass_in_stack (mode, type);
310 }
311
312 /* Return true if a parameter follows callee copies conventions. This
313 version of the hook is true for all named arguments. */
314
315 bool
316 hook_callee_copies_named (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
317 enum machine_mode mode ATTRIBUTE_UNUSED,
318 const_tree type ATTRIBUTE_UNUSED, bool named)
319 {
320 return named;
321 }
322
323 /* Emit to STREAM the assembler syntax for insn operand X. */
324
325 void
326 default_print_operand (FILE *stream ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
327 int code ATTRIBUTE_UNUSED)
328 {
329 #ifdef PRINT_OPERAND
330 PRINT_OPERAND (stream, x, code);
331 #else
332 gcc_unreachable ();
333 #endif
334 }
335
336 /* Emit to STREAM the assembler syntax for an insn operand whose memory
337 address is X. */
338
339 void
340 default_print_operand_address (FILE *stream ATTRIBUTE_UNUSED,
341 rtx x ATTRIBUTE_UNUSED)
342 {
343 #ifdef PRINT_OPERAND_ADDRESS
344 PRINT_OPERAND_ADDRESS (stream, x);
345 #else
346 gcc_unreachable ();
347 #endif
348 }
349
350 /* Return true if CODE is a valid punctuation character for the
351 `print_operand' hook. */
352
353 bool
354 default_print_operand_punct_valid_p (unsigned char code ATTRIBUTE_UNUSED)
355 {
356 #ifdef PRINT_OPERAND_PUNCT_VALID_P
357 return PRINT_OPERAND_PUNCT_VALID_P (code);
358 #else
359 return false;
360 #endif
361 }
362
363 /* The default implementation of TARGET_MANGLE_ASSEMBLER_NAME. */
364 tree
365 default_mangle_assembler_name (const char *name ATTRIBUTE_UNUSED)
366 {
367 const char *skipped = name + (*name == '*' ? 1 : 0);
368 const char *stripped = targetm.strip_name_encoding (skipped);
369 if (*name != '*' && user_label_prefix[0])
370 stripped = ACONCAT ((user_label_prefix, stripped, NULL));
371 return get_identifier (stripped);
372 }
373
374 /* The default implementation of TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA. */
375
376 bool
377 default_asm_output_addr_const_extra (FILE *file ATTRIBUTE_UNUSED,
378 rtx x ATTRIBUTE_UNUSED)
379 {
380 #ifdef OUTPUT_ADDR_CONST_EXTRA
381 OUTPUT_ADDR_CONST_EXTRA (file, x, fail);
382 return true;
383
384 fail:
385 #endif
386 return false;
387 }
388
389 /* True if MODE is valid for the target. By "valid", we mean able to
390 be manipulated in non-trivial ways. In particular, this means all
391 the arithmetic is supported.
392
393 By default we guess this means that any C type is supported. If
394 we can't map the mode back to a type that would be available in C,
395 then reject it. Special case, here, is the double-word arithmetic
396 supported by optabs.c. */
397
398 bool
399 default_scalar_mode_supported_p (enum machine_mode mode)
400 {
401 int precision = GET_MODE_PRECISION (mode);
402
403 switch (GET_MODE_CLASS (mode))
404 {
405 case MODE_PARTIAL_INT:
406 case MODE_INT:
407 if (precision == CHAR_TYPE_SIZE)
408 return true;
409 if (precision == SHORT_TYPE_SIZE)
410 return true;
411 if (precision == INT_TYPE_SIZE)
412 return true;
413 if (precision == LONG_TYPE_SIZE)
414 return true;
415 if (precision == LONG_LONG_TYPE_SIZE)
416 return true;
417 if (precision == 2 * BITS_PER_WORD)
418 return true;
419 return false;
420
421 case MODE_FLOAT:
422 if (precision == FLOAT_TYPE_SIZE)
423 return true;
424 if (precision == DOUBLE_TYPE_SIZE)
425 return true;
426 if (precision == LONG_DOUBLE_TYPE_SIZE)
427 return true;
428 return false;
429
430 case MODE_DECIMAL_FLOAT:
431 case MODE_FRACT:
432 case MODE_UFRACT:
433 case MODE_ACCUM:
434 case MODE_UACCUM:
435 return false;
436
437 default:
438 gcc_unreachable ();
439 }
440 }
441
442 /* Make some target macros useable by target-independent code. */
443 bool
444 targhook_words_big_endian (void)
445 {
446 return !!WORDS_BIG_ENDIAN;
447 }
448
449 bool
450 targhook_float_words_big_endian (void)
451 {
452 return !!FLOAT_WORDS_BIG_ENDIAN;
453 }
454
455 /* True if the target supports decimal floating point. */
456
457 bool
458 default_decimal_float_supported_p (void)
459 {
460 return ENABLE_DECIMAL_FLOAT;
461 }
462
463 /* True if the target supports fixed-point arithmetic. */
464
465 bool
466 default_fixed_point_supported_p (void)
467 {
468 return ENABLE_FIXED_POINT;
469 }
470
471 /* NULL if INSN insn is valid within a low-overhead loop, otherwise returns
472 an error message.
473
474 This function checks whether a given INSN is valid within a low-overhead
475 loop. If INSN is invalid it returns the reason for that, otherwise it
476 returns NULL. A called function may clobber any special registers required
477 for low-overhead looping. Additionally, some targets (eg, PPC) use the count
478 register for branch on table instructions. We reject the doloop pattern in
479 these cases. */
480
481 const char *
482 default_invalid_within_doloop (const_rtx insn)
483 {
484 if (CALL_P (insn))
485 return "Function call in loop.";
486
487 if (JUMP_TABLE_DATA_P (insn))
488 return "Computed branch in the loop.";
489
490 return NULL;
491 }
492
493 /* Mapping of builtin functions to vectorized variants. */
494
495 tree
496 default_builtin_vectorized_function (tree fndecl ATTRIBUTE_UNUSED,
497 tree type_out ATTRIBUTE_UNUSED,
498 tree type_in ATTRIBUTE_UNUSED)
499 {
500 return NULL_TREE;
501 }
502
503 /* Vectorized conversion. */
504
505 tree
506 default_builtin_vectorized_conversion (unsigned int code ATTRIBUTE_UNUSED,
507 tree dest_type ATTRIBUTE_UNUSED,
508 tree src_type ATTRIBUTE_UNUSED)
509 {
510 return NULL_TREE;
511 }
512
513 /* Default vectorizer cost model values. */
514
515 int
516 default_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
517 tree vectype ATTRIBUTE_UNUSED,
518 int misalign ATTRIBUTE_UNUSED)
519 {
520 switch (type_of_cost)
521 {
522 case scalar_stmt:
523 case scalar_load:
524 case scalar_store:
525 case vector_stmt:
526 case vector_load:
527 case vector_store:
528 case vec_to_scalar:
529 case scalar_to_vec:
530 case cond_branch_not_taken:
531 case vec_perm:
532 return 1;
533
534 case unaligned_load:
535 case unaligned_store:
536 return 2;
537
538 case cond_branch_taken:
539 return 3;
540
541 default:
542 gcc_unreachable ();
543 }
544 }
545
546 /* Reciprocal. */
547
548 tree
549 default_builtin_reciprocal (unsigned int fn ATTRIBUTE_UNUSED,
550 bool md_fn ATTRIBUTE_UNUSED,
551 bool sqrt ATTRIBUTE_UNUSED)
552 {
553 return NULL_TREE;
554 }
555
556 bool
557 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_false (
558 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
559 enum machine_mode mode ATTRIBUTE_UNUSED,
560 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
561 {
562 return false;
563 }
564
565 bool
566 hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true (
567 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
568 enum machine_mode mode ATTRIBUTE_UNUSED,
569 const_tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
570 {
571 return true;
572 }
573
574 int
575 hook_int_CUMULATIVE_ARGS_mode_tree_bool_0 (
576 CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
577 enum machine_mode mode ATTRIBUTE_UNUSED,
578 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
579 {
580 return 0;
581 }
582
583 void
584 default_function_arg_advance (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
585 enum machine_mode mode ATTRIBUTE_UNUSED,
586 const_tree type ATTRIBUTE_UNUSED,
587 bool named ATTRIBUTE_UNUSED)
588 {
589 #ifdef FUNCTION_ARG_ADVANCE
590 CUMULATIVE_ARGS args = *ca;
591 FUNCTION_ARG_ADVANCE (args, mode, CONST_CAST_TREE (type), named);
592 *ca = args;
593 #else
594 gcc_unreachable ();
595 #endif
596 }
597
598 rtx
599 default_function_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
600 enum machine_mode mode ATTRIBUTE_UNUSED,
601 const_tree type ATTRIBUTE_UNUSED,
602 bool named ATTRIBUTE_UNUSED)
603 {
604 #ifdef FUNCTION_ARG
605 return FUNCTION_ARG (*ca, mode, CONST_CAST_TREE (type), named);
606 #else
607 gcc_unreachable ();
608 #endif
609 }
610
611 rtx
612 default_function_incoming_arg (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
613 enum machine_mode mode ATTRIBUTE_UNUSED,
614 const_tree type ATTRIBUTE_UNUSED,
615 bool named ATTRIBUTE_UNUSED)
616 {
617 #ifdef FUNCTION_INCOMING_ARG
618 return FUNCTION_INCOMING_ARG (*ca, mode, CONST_CAST_TREE (type), named);
619 #else
620 gcc_unreachable ();
621 #endif
622 }
623
624 unsigned int
625 default_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
626 const_tree type ATTRIBUTE_UNUSED)
627 {
628 return PARM_BOUNDARY;
629 }
630
631 void
632 hook_void_bitmap (bitmap regs ATTRIBUTE_UNUSED)
633 {
634 }
635
636 const char *
637 hook_invalid_arg_for_unprototyped_fn (
638 const_tree typelist ATTRIBUTE_UNUSED,
639 const_tree funcdecl ATTRIBUTE_UNUSED,
640 const_tree val ATTRIBUTE_UNUSED)
641 {
642 return NULL;
643 }
644
645 /* Initialize the stack protection decls. */
646
647 /* Stack protection related decls living in libgcc. */
648 static GTY(()) tree stack_chk_guard_decl;
649
650 tree
651 default_stack_protect_guard (void)
652 {
653 tree t = stack_chk_guard_decl;
654
655 if (t == NULL)
656 {
657 rtx x;
658
659 t = build_decl (UNKNOWN_LOCATION,
660 VAR_DECL, get_identifier ("__stack_chk_guard"),
661 ptr_type_node);
662 TREE_STATIC (t) = 1;
663 TREE_PUBLIC (t) = 1;
664 DECL_EXTERNAL (t) = 1;
665 TREE_USED (t) = 1;
666 TREE_THIS_VOLATILE (t) = 1;
667 DECL_ARTIFICIAL (t) = 1;
668 DECL_IGNORED_P (t) = 1;
669
670 /* Do not share RTL as the declaration is visible outside of
671 current function. */
672 x = DECL_RTL (t);
673 RTX_FLAG (x, used) = 1;
674
675 stack_chk_guard_decl = t;
676 }
677
678 return t;
679 }
680
681 static GTY(()) tree stack_chk_fail_decl;
682
683 tree
684 default_external_stack_protect_fail (void)
685 {
686 tree t = stack_chk_fail_decl;
687
688 if (t == NULL_TREE)
689 {
690 t = build_function_type_list (void_type_node, NULL_TREE);
691 t = build_decl (UNKNOWN_LOCATION,
692 FUNCTION_DECL, get_identifier ("__stack_chk_fail"), t);
693 TREE_STATIC (t) = 1;
694 TREE_PUBLIC (t) = 1;
695 DECL_EXTERNAL (t) = 1;
696 TREE_USED (t) = 1;
697 TREE_THIS_VOLATILE (t) = 1;
698 TREE_NOTHROW (t) = 1;
699 DECL_ARTIFICIAL (t) = 1;
700 DECL_IGNORED_P (t) = 1;
701 DECL_VISIBILITY (t) = VISIBILITY_DEFAULT;
702 DECL_VISIBILITY_SPECIFIED (t) = 1;
703
704 stack_chk_fail_decl = t;
705 }
706
707 return build_call_expr (t, 0);
708 }
709
710 tree
711 default_hidden_stack_protect_fail (void)
712 {
713 #ifndef HAVE_GAS_HIDDEN
714 return default_external_stack_protect_fail ();
715 #else
716 tree t = stack_chk_fail_decl;
717
718 if (!flag_pic)
719 return default_external_stack_protect_fail ();
720
721 if (t == NULL_TREE)
722 {
723 t = build_function_type_list (void_type_node, NULL_TREE);
724 t = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
725 get_identifier ("__stack_chk_fail_local"), t);
726 TREE_STATIC (t) = 1;
727 TREE_PUBLIC (t) = 1;
728 DECL_EXTERNAL (t) = 1;
729 TREE_USED (t) = 1;
730 TREE_THIS_VOLATILE (t) = 1;
731 TREE_NOTHROW (t) = 1;
732 DECL_ARTIFICIAL (t) = 1;
733 DECL_IGNORED_P (t) = 1;
734 DECL_VISIBILITY_SPECIFIED (t) = 1;
735 DECL_VISIBILITY (t) = VISIBILITY_HIDDEN;
736
737 stack_chk_fail_decl = t;
738 }
739
740 return build_call_expr (t, 0);
741 #endif
742 }
743
744 bool
745 hook_bool_const_rtx_commutative_p (const_rtx x,
746 int outer_code ATTRIBUTE_UNUSED)
747 {
748 return COMMUTATIVE_P (x);
749 }
750
751 rtx
752 default_function_value (const_tree ret_type ATTRIBUTE_UNUSED,
753 const_tree fn_decl_or_type,
754 bool outgoing ATTRIBUTE_UNUSED)
755 {
756 /* The old interface doesn't handle receiving the function type. */
757 if (fn_decl_or_type
758 && !DECL_P (fn_decl_or_type))
759 fn_decl_or_type = NULL;
760
761 #ifdef FUNCTION_VALUE
762 return FUNCTION_VALUE (ret_type, fn_decl_or_type);
763 #else
764 gcc_unreachable ();
765 #endif
766 }
767
768 rtx
769 default_libcall_value (enum machine_mode mode ATTRIBUTE_UNUSED,
770 const_rtx fun ATTRIBUTE_UNUSED)
771 {
772 #ifdef LIBCALL_VALUE
773 return LIBCALL_VALUE (mode);
774 #else
775 gcc_unreachable ();
776 #endif
777 }
778
779 /* The default hook for TARGET_FUNCTION_VALUE_REGNO_P. */
780
781 bool
782 default_function_value_regno_p (const unsigned int regno ATTRIBUTE_UNUSED)
783 {
784 #ifdef FUNCTION_VALUE_REGNO_P
785 return FUNCTION_VALUE_REGNO_P (regno);
786 #else
787 gcc_unreachable ();
788 #endif
789 }
790
791 rtx
792 default_internal_arg_pointer (void)
793 {
794 /* If the reg that the virtual arg pointer will be translated into is
795 not a fixed reg or is the stack pointer, make a copy of the virtual
796 arg pointer, and address parms via the copy. The frame pointer is
797 considered fixed even though it is not marked as such. */
798 if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
799 || ! (fixed_regs[ARG_POINTER_REGNUM]
800 || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
801 return copy_to_reg (virtual_incoming_args_rtx);
802 else
803 return virtual_incoming_args_rtx;
804 }
805
806 rtx
807 default_static_chain (const_tree fndecl, bool incoming_p)
808 {
809 if (!DECL_STATIC_CHAIN (fndecl))
810 return NULL;
811
812 if (incoming_p)
813 {
814 #ifdef STATIC_CHAIN_INCOMING_REGNUM
815 return gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
816 #endif
817 }
818
819 #ifdef STATIC_CHAIN_REGNUM
820 return gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
821 #endif
822
823 {
824 static bool issued_error;
825 if (!issued_error)
826 {
827 issued_error = true;
828 sorry ("nested functions not supported on this target");
829 }
830
831 /* It really doesn't matter what we return here, so long at it
832 doesn't cause the rest of the compiler to crash. */
833 return gen_rtx_MEM (Pmode, stack_pointer_rtx);
834 }
835 }
836
837 void
838 default_trampoline_init (rtx ARG_UNUSED (m_tramp), tree ARG_UNUSED (t_func),
839 rtx ARG_UNUSED (r_chain))
840 {
841 sorry ("nested function trampolines not supported on this target");
842 }
843
844 int
845 default_return_pops_args (tree fundecl ATTRIBUTE_UNUSED,
846 tree funtype ATTRIBUTE_UNUSED,
847 int size ATTRIBUTE_UNUSED)
848 {
849 return 0;
850 }
851
852 reg_class_t
853 default_branch_target_register_class (void)
854 {
855 return NO_REGS;
856 }
857
858 #ifdef IRA_COVER_CLASSES
859 const reg_class_t *
860 default_ira_cover_classes (void)
861 {
862 static reg_class_t classes[] = IRA_COVER_CLASSES;
863 return classes;
864 }
865 #endif
866
867 reg_class_t
868 default_secondary_reload (bool in_p ATTRIBUTE_UNUSED, rtx x ATTRIBUTE_UNUSED,
869 reg_class_t reload_class_i ATTRIBUTE_UNUSED,
870 enum machine_mode reload_mode ATTRIBUTE_UNUSED,
871 secondary_reload_info *sri)
872 {
873 enum reg_class rclass = NO_REGS;
874 enum reg_class reload_class = (enum reg_class) reload_class_i;
875
876 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
877 {
878 sri->icode = sri->prev_sri->t_icode;
879 return NO_REGS;
880 }
881 #ifdef SECONDARY_INPUT_RELOAD_CLASS
882 if (in_p)
883 rclass = SECONDARY_INPUT_RELOAD_CLASS (reload_class, reload_mode, x);
884 #endif
885 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
886 if (! in_p)
887 rclass = SECONDARY_OUTPUT_RELOAD_CLASS (reload_class, reload_mode, x);
888 #endif
889 if (rclass != NO_REGS)
890 {
891 enum insn_code icode
892 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
893 reload_mode);
894
895 if (icode != CODE_FOR_nothing
896 && !insn_operand_matches (icode, in_p, x))
897 icode = CODE_FOR_nothing;
898 else if (icode != CODE_FOR_nothing)
899 {
900 const char *insn_constraint, *scratch_constraint;
901 char insn_letter, scratch_letter;
902 enum reg_class insn_class, scratch_class;
903
904 gcc_assert (insn_data[(int) icode].n_operands == 3);
905 insn_constraint = insn_data[(int) icode].operand[!in_p].constraint;
906 if (!*insn_constraint)
907 insn_class = ALL_REGS;
908 else
909 {
910 if (in_p)
911 {
912 gcc_assert (*insn_constraint == '=');
913 insn_constraint++;
914 }
915 insn_letter = *insn_constraint;
916 insn_class
917 = (insn_letter == 'r' ? GENERAL_REGS
918 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) insn_letter,
919 insn_constraint));
920 gcc_assert (insn_class != NO_REGS);
921 }
922
923 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
924 /* The scratch register's constraint must start with "=&",
925 except for an input reload, where only "=" is necessary,
926 and where it might be beneficial to re-use registers from
927 the input. */
928 gcc_assert (scratch_constraint[0] == '='
929 && (in_p || scratch_constraint[1] == '&'));
930 scratch_constraint++;
931 if (*scratch_constraint == '&')
932 scratch_constraint++;
933 scratch_letter = *scratch_constraint;
934 scratch_class
935 = (scratch_letter == 'r' ? GENERAL_REGS
936 : REG_CLASS_FROM_CONSTRAINT ((unsigned char) scratch_letter,
937 scratch_constraint));
938
939 if (reg_class_subset_p (reload_class, insn_class))
940 {
941 gcc_assert (scratch_class == rclass);
942 rclass = NO_REGS;
943 }
944 else
945 rclass = insn_class;
946
947 }
948 if (rclass == NO_REGS)
949 sri->icode = icode;
950 else
951 sri->t_icode = icode;
952 }
953 return rclass;
954 }
955
956 bool
957 default_handle_c_option (size_t code ATTRIBUTE_UNUSED,
958 const char *arg ATTRIBUTE_UNUSED,
959 int value ATTRIBUTE_UNUSED)
960 {
961 return false;
962 }
963
964 /* By default, if flag_pic is true, then neither local nor global relocs
965 should be placed in readonly memory. */
966
967 int
968 default_reloc_rw_mask (void)
969 {
970 return flag_pic ? 3 : 0;
971 }
972
973 /* By default, do no modification. */
974 tree default_mangle_decl_assembler_name (tree decl ATTRIBUTE_UNUSED,
975 tree id)
976 {
977 return id;
978 }
979
980 bool
981 default_builtin_vector_alignment_reachable (const_tree type, bool is_packed)
982 {
983 if (is_packed)
984 return false;
985
986 /* Assuming that types whose size is > pointer-size are not guaranteed to be
987 naturally aligned. */
988 if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
989 return false;
990
991 /* Assuming that types whose size is <= pointer-size
992 are naturally aligned. */
993 return true;
994 }
995
996 /* By default, assume that a target supports any factor of misalignment
997 memory access if it supports movmisalign patten.
998 is_packed is true if the memory access is defined in a packed struct. */
999 bool
1000 default_builtin_support_vector_misalignment (enum machine_mode mode,
1001 const_tree type
1002 ATTRIBUTE_UNUSED,
1003 int misalignment
1004 ATTRIBUTE_UNUSED,
1005 bool is_packed
1006 ATTRIBUTE_UNUSED)
1007 {
1008 if (optab_handler (movmisalign_optab, mode) != CODE_FOR_nothing)
1009 return true;
1010 return false;
1011 }
1012
1013 /* By default, only attempt to parallelize bitwise operations, and
1014 possibly adds/subtracts using bit-twiddling. */
1015
1016 enum machine_mode
1017 default_preferred_simd_mode (enum machine_mode mode ATTRIBUTE_UNUSED)
1018 {
1019 return word_mode;
1020 }
1021
1022 /* By default only the size derived from the preferred vector mode
1023 is tried. */
1024
1025 unsigned int
1026 default_autovectorize_vector_sizes (void)
1027 {
1028 return 0;
1029 }
1030
1031 /* Determine whether or not a pointer mode is valid. Assume defaults
1032 of ptr_mode or Pmode - can be overridden. */
1033 bool
1034 default_valid_pointer_mode (enum machine_mode mode)
1035 {
1036 return (mode == ptr_mode || mode == Pmode);
1037 }
1038
1039 /* Determine whether the memory reference specified by REF may alias
1040 the C libraries errno location. */
1041 bool
1042 default_ref_may_alias_errno (ao_ref *ref)
1043 {
1044 tree base = ao_ref_base (ref);
1045 /* The default implementation assumes the errno location is
1046 a declaration of type int or is always accessed via a
1047 pointer to int. We assume that accesses to errno are
1048 not deliberately obfuscated (even in conforming ways). */
1049 if (TYPE_UNSIGNED (TREE_TYPE (base))
1050 || TYPE_MODE (TREE_TYPE (base)) != TYPE_MODE (integer_type_node))
1051 return false;
1052 /* The default implementation assumes an errno location
1053 declaration is never defined in the current compilation unit. */
1054 if (DECL_P (base)
1055 && !TREE_STATIC (base))
1056 return true;
1057 else if (TREE_CODE (base) == MEM_REF
1058 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1059 {
1060 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1061 return !pi || pi->pt.anything || pi->pt.nonlocal;
1062 }
1063 return false;
1064 }
1065
1066 /* Return the mode for a pointer to a given ADDRSPACE, defaulting to ptr_mode
1067 for the generic address space only. */
1068
1069 enum machine_mode
1070 default_addr_space_pointer_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1071 {
1072 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1073 return ptr_mode;
1074 }
1075
1076 /* Return the mode for an address in a given ADDRSPACE, defaulting to Pmode
1077 for the generic address space only. */
1078
1079 enum machine_mode
1080 default_addr_space_address_mode (addr_space_t addrspace ATTRIBUTE_UNUSED)
1081 {
1082 gcc_assert (ADDR_SPACE_GENERIC_P (addrspace));
1083 return Pmode;
1084 }
1085
1086 /* Named address space version of valid_pointer_mode. */
1087
1088 bool
1089 default_addr_space_valid_pointer_mode (enum machine_mode mode, addr_space_t as)
1090 {
1091 if (!ADDR_SPACE_GENERIC_P (as))
1092 return (mode == targetm.addr_space.pointer_mode (as)
1093 || mode == targetm.addr_space.address_mode (as));
1094
1095 return targetm.valid_pointer_mode (mode);
1096 }
1097
1098 /* Some places still assume that all pointer or address modes are the
1099 standard Pmode and ptr_mode. These optimizations become invalid if
1100 the target actually supports multiple different modes. For now,
1101 we disable such optimizations on such targets, using this function. */
1102
1103 bool
1104 target_default_pointer_address_modes_p (void)
1105 {
1106 if (targetm.addr_space.address_mode != default_addr_space_address_mode)
1107 return false;
1108 if (targetm.addr_space.pointer_mode != default_addr_space_pointer_mode)
1109 return false;
1110
1111 return true;
1112 }
1113
1114 /* Named address space version of legitimate_address_p. */
1115
1116 bool
1117 default_addr_space_legitimate_address_p (enum machine_mode mode, rtx mem,
1118 bool strict, addr_space_t as)
1119 {
1120 if (!ADDR_SPACE_GENERIC_P (as))
1121 gcc_unreachable ();
1122
1123 return targetm.legitimate_address_p (mode, mem, strict);
1124 }
1125
1126 /* Named address space version of LEGITIMIZE_ADDRESS. */
1127
1128 rtx
1129 default_addr_space_legitimize_address (rtx x, rtx oldx,
1130 enum machine_mode mode, addr_space_t as)
1131 {
1132 if (!ADDR_SPACE_GENERIC_P (as))
1133 return x;
1134
1135 return targetm.legitimize_address (x, oldx, mode);
1136 }
1137
1138 /* The default hook for determining if one named address space is a subset of
1139 another and to return which address space to use as the common address
1140 space. */
1141
1142 bool
1143 default_addr_space_subset_p (addr_space_t subset, addr_space_t superset)
1144 {
1145 return (subset == superset);
1146 }
1147
1148 /* The default hook for TARGET_ADDR_SPACE_CONVERT. This hook should never be
1149 called for targets with only a generic address space. */
1150
1151 rtx
1152 default_addr_space_convert (rtx op ATTRIBUTE_UNUSED,
1153 tree from_type ATTRIBUTE_UNUSED,
1154 tree to_type ATTRIBUTE_UNUSED)
1155 {
1156 gcc_unreachable ();
1157 }
1158
1159 bool
1160 default_hard_regno_scratch_ok (unsigned int regno ATTRIBUTE_UNUSED)
1161 {
1162 return true;
1163 }
1164
1165 /* The default implementation of TARGET_MODE_DEPENDENT_ADDRESS_P. */
1166
1167 bool
1168 default_mode_dependent_address_p (const_rtx addr ATTRIBUTE_UNUSED)
1169 {
1170 #ifdef GO_IF_MODE_DEPENDENT_ADDRESS
1171
1172 GO_IF_MODE_DEPENDENT_ADDRESS (CONST_CAST_RTX (addr), win);
1173 return false;
1174 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1175 win: ATTRIBUTE_UNUSED_LABEL
1176 return true;
1177
1178 #else
1179
1180 return false;
1181
1182 #endif
1183 }
1184
1185 bool
1186 default_target_option_valid_attribute_p (tree ARG_UNUSED (fndecl),
1187 tree ARG_UNUSED (name),
1188 tree ARG_UNUSED (args),
1189 int ARG_UNUSED (flags))
1190 {
1191 warning (OPT_Wattributes,
1192 "target attribute is not supported on this machine");
1193
1194 return false;
1195 }
1196
1197 bool
1198 default_target_option_pragma_parse (tree ARG_UNUSED (args),
1199 tree ARG_UNUSED (pop_target))
1200 {
1201 warning (OPT_Wpragmas,
1202 "#pragma GCC target is not supported for this machine");
1203
1204 return false;
1205 }
1206
1207 bool
1208 default_target_can_inline_p (tree caller, tree callee)
1209 {
1210 bool ret = false;
1211 tree callee_opts = DECL_FUNCTION_SPECIFIC_TARGET (callee);
1212 tree caller_opts = DECL_FUNCTION_SPECIFIC_TARGET (caller);
1213
1214 /* If callee has no option attributes, then it is ok to inline */
1215 if (!callee_opts)
1216 ret = true;
1217
1218 /* If caller has no option attributes, but callee does then it is not ok to
1219 inline */
1220 else if (!caller_opts)
1221 ret = false;
1222
1223 /* If both caller and callee have attributes, assume that if the pointer is
1224 different, the the two functions have different target options since
1225 build_target_option_node uses a hash table for the options. */
1226 else
1227 ret = (callee_opts == caller_opts);
1228
1229 return ret;
1230 }
1231
1232 #ifndef HAVE_casesi
1233 # define HAVE_casesi 0
1234 #endif
1235
1236 /* If the machine does not have a case insn that compares the bounds,
1237 this means extra overhead for dispatch tables, which raises the
1238 threshold for using them. */
1239
1240 unsigned int default_case_values_threshold (void)
1241 {
1242 return (HAVE_casesi ? 4 : 5);
1243 }
1244
1245 bool
1246 default_have_conditional_execution (void)
1247 {
1248 #ifdef HAVE_conditional_execution
1249 return HAVE_conditional_execution;
1250 #else
1251 return false;
1252 #endif
1253 }
1254
1255 /* Compute cost of moving registers to/from memory. */
1256
1257 int
1258 default_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1259 reg_class_t rclass ATTRIBUTE_UNUSED,
1260 bool in ATTRIBUTE_UNUSED)
1261 {
1262 #ifndef MEMORY_MOVE_COST
1263 return (4 + memory_move_secondary_cost (mode, (enum reg_class) rclass, in));
1264 #else
1265 return MEMORY_MOVE_COST (mode, (enum reg_class) rclass, in);
1266 #endif
1267 }
1268
1269 /* Compute cost of moving data from a register of class FROM to one of
1270 TO, using MODE. */
1271
1272 int
1273 default_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1274 reg_class_t from ATTRIBUTE_UNUSED,
1275 reg_class_t to ATTRIBUTE_UNUSED)
1276 {
1277 #ifndef REGISTER_MOVE_COST
1278 return 2;
1279 #else
1280 return REGISTER_MOVE_COST (mode, (enum reg_class) from, (enum reg_class) to);
1281 #endif
1282 }
1283
1284 bool
1285 default_profile_before_prologue (void)
1286 {
1287 #ifdef PROFILE_BEFORE_PROLOGUE
1288 return true;
1289 #else
1290 return false;
1291 #endif
1292 }
1293
1294 /* The default implementation of TARGET_PREFERRED_RELOAD_CLASS. */
1295
1296 reg_class_t
1297 default_preferred_reload_class (rtx x ATTRIBUTE_UNUSED,
1298 reg_class_t rclass)
1299 {
1300 #ifdef PREFERRED_RELOAD_CLASS
1301 return (reg_class_t) PREFERRED_RELOAD_CLASS (x, (enum reg_class) rclass);
1302 #else
1303 return rclass;
1304 #endif
1305 }
1306
1307 /* The default implementation of TARGET_OUTPUT_PREFERRED_RELOAD_CLASS. */
1308
1309 reg_class_t
1310 default_preferred_output_reload_class (rtx x ATTRIBUTE_UNUSED,
1311 reg_class_t rclass)
1312 {
1313 #ifdef PREFERRED_OUTPUT_RELOAD_CLASS
1314 return PREFERRED_OUTPUT_RELOAD_CLASS (x, (enum reg_class) rclass);
1315 #else
1316 return rclass;
1317 #endif
1318 }
1319
1320 /* The default implementation of TARGET_PREFERRED_RENAME_CLASS. */
1321 reg_class_t
1322 default_preferred_rename_class (reg_class_t rclass ATTRIBUTE_UNUSED)
1323 {
1324 return NO_REGS;
1325 }
1326
1327 /* The default implementation of TARGET_CLASS_LIKELY_SPILLED_P. */
1328
1329 bool
1330 default_class_likely_spilled_p (reg_class_t rclass)
1331 {
1332 return (reg_class_size[(int) rclass] == 1);
1333 }
1334
1335 /* Determine the debugging unwind mechanism for the target. */
1336
1337 enum unwind_info_type
1338 default_debug_unwind_info (void)
1339 {
1340 /* If the target wants to force the use of dwarf2 unwind info, let it. */
1341 /* ??? Change all users to the hook, then poison this. */
1342 #ifdef DWARF2_FRAME_INFO
1343 if (DWARF2_FRAME_INFO)
1344 return UI_DWARF2;
1345 #endif
1346
1347 /* Otherwise, only turn it on if dwarf2 debugging is enabled. */
1348 #ifdef DWARF2_DEBUGGING_INFO
1349 if (write_symbols == DWARF2_DEBUG || write_symbols == VMS_AND_DWARF2_DEBUG)
1350 return UI_DWARF2;
1351 #endif
1352
1353 return UI_NONE;
1354 }
1355
1356 /* Determine the exception handling mechanism for the target. */
1357
1358 enum unwind_info_type
1359 default_except_unwind_info (struct gcc_options *opts ATTRIBUTE_UNUSED)
1360 {
1361 /* Obey the configure switch to turn on sjlj exceptions. */
1362 #ifdef CONFIG_SJLJ_EXCEPTIONS
1363 if (CONFIG_SJLJ_EXCEPTIONS)
1364 return UI_SJLJ;
1365 #endif
1366
1367 /* ??? Change all users to the hook, then poison this. */
1368 #ifdef DWARF2_UNWIND_INFO
1369 if (DWARF2_UNWIND_INFO)
1370 return UI_DWARF2;
1371 #endif
1372
1373 return UI_SJLJ;
1374 }
1375
1376 /* To be used by targets that force dwarf2 unwind enabled. */
1377
1378 enum unwind_info_type
1379 dwarf2_except_unwind_info (struct gcc_options *opts ATTRIBUTE_UNUSED)
1380 {
1381 /* Obey the configure switch to turn on sjlj exceptions. */
1382 #ifdef CONFIG_SJLJ_EXCEPTIONS
1383 if (CONFIG_SJLJ_EXCEPTIONS)
1384 return UI_SJLJ;
1385 #endif
1386
1387 return UI_DWARF2;
1388 }
1389
1390 /* To be used by targets that force sjlj unwind enabled. */
1391
1392 enum unwind_info_type
1393 sjlj_except_unwind_info (struct gcc_options *opts ATTRIBUTE_UNUSED)
1394 {
1395 return UI_SJLJ;
1396 }
1397
1398 /* To be used by targets where reg_raw_mode doesn't return the right
1399 mode for registers used in apply_builtin_return and apply_builtin_arg. */
1400
1401 enum machine_mode
1402 default_get_reg_raw_mode(int regno)
1403 {
1404 return reg_raw_mode[regno];
1405 }
1406
1407 /* Return true if the state of option OPTION should be stored in PCH files
1408 and checked by default_pch_valid_p. Store the option's current state
1409 in STATE if so. */
1410
1411 static inline bool
1412 option_affects_pch_p (int option, struct cl_option_state *state)
1413 {
1414 if ((cl_options[option].flags & CL_TARGET) == 0)
1415 return false;
1416 if (option_flag_var (option, &global_options) == &target_flags)
1417 if (targetm.check_pch_target_flags)
1418 return false;
1419 return get_option_state (&global_options, option, state);
1420 }
1421
1422 /* Default version of get_pch_validity.
1423 By default, every flag difference is fatal; that will be mostly right for
1424 most targets, but completely right for very few. */
1425
1426 void *
1427 default_get_pch_validity (size_t *sz)
1428 {
1429 struct cl_option_state state;
1430 size_t i;
1431 char *result, *r;
1432
1433 *sz = 2;
1434 if (targetm.check_pch_target_flags)
1435 *sz += sizeof (target_flags);
1436 for (i = 0; i < cl_options_count; i++)
1437 if (option_affects_pch_p (i, &state))
1438 *sz += state.size;
1439
1440 result = r = XNEWVEC (char, *sz);
1441 r[0] = flag_pic;
1442 r[1] = flag_pie;
1443 r += 2;
1444 if (targetm.check_pch_target_flags)
1445 {
1446 memcpy (r, &target_flags, sizeof (target_flags));
1447 r += sizeof (target_flags);
1448 }
1449
1450 for (i = 0; i < cl_options_count; i++)
1451 if (option_affects_pch_p (i, &state))
1452 {
1453 memcpy (r, state.data, state.size);
1454 r += state.size;
1455 }
1456
1457 return result;
1458 }
1459
1460 /* Return a message which says that a PCH file was created with a different
1461 setting of OPTION. */
1462
1463 static const char *
1464 pch_option_mismatch (const char *option)
1465 {
1466 char *r;
1467
1468 asprintf (&r, _("created and used with differing settings of '%s'"), option);
1469 if (r == NULL)
1470 return _("out of memory");
1471 return r;
1472 }
1473
1474 /* Default version of pch_valid_p. */
1475
1476 const char *
1477 default_pch_valid_p (const void *data_p, size_t len)
1478 {
1479 struct cl_option_state state;
1480 const char *data = (const char *)data_p;
1481 size_t i;
1482
1483 /* -fpic and -fpie also usually make a PCH invalid. */
1484 if (data[0] != flag_pic)
1485 return _("created and used with different settings of -fpic");
1486 if (data[1] != flag_pie)
1487 return _("created and used with different settings of -fpie");
1488 data += 2;
1489
1490 /* Check target_flags. */
1491 if (targetm.check_pch_target_flags)
1492 {
1493 int tf;
1494 const char *r;
1495
1496 memcpy (&tf, data, sizeof (target_flags));
1497 data += sizeof (target_flags);
1498 len -= sizeof (target_flags);
1499 r = targetm.check_pch_target_flags (tf);
1500 if (r != NULL)
1501 return r;
1502 }
1503
1504 for (i = 0; i < cl_options_count; i++)
1505 if (option_affects_pch_p (i, &state))
1506 {
1507 if (memcmp (data, state.data, state.size) != 0)
1508 return pch_option_mismatch (cl_options[i].opt_text);
1509 data += state.size;
1510 len -= state.size;
1511 }
1512
1513 return NULL;
1514 }
1515
1516 /* Default version of TARGET_HANDLE_OPTION. */
1517
1518 bool
1519 default_target_handle_option (struct gcc_options *opts ATTRIBUTE_UNUSED,
1520 struct gcc_options *opts_set ATTRIBUTE_UNUSED,
1521 const struct cl_decoded_option *decoded ATTRIBUTE_UNUSED,
1522 location_t loc ATTRIBUTE_UNUSED)
1523 {
1524 return true;
1525 }
1526
1527 const struct default_options empty_optimization_table[] =
1528 {
1529 { OPT_LEVELS_NONE, 0, NULL, 0 }
1530 };
1531
1532 #include "gt-targhooks.h"