tm.texi.in (OVERRIDE_OPTIONS): Remove documentation.
[gcc.git] / gcc / config / frv / frv.c
1 /* Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007,
2 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "expr.h"
38 #include "obstack.h"
39 #include "except.h"
40 #include "function.h"
41 #include "optabs.h"
42 #include "diagnostic-core.h"
43 #include "toplev.h"
44 #include "basic-block.h"
45 #include "tm_p.h"
46 #include "ggc.h"
47 #include <ctype.h>
48 #include "target.h"
49 #include "target-def.h"
50 #include "targhooks.h"
51 #include "integrate.h"
52 #include "langhooks.h"
53 #include "df.h"
54
55 #ifndef FRV_INLINE
56 #define FRV_INLINE inline
57 #endif
58
59 /* The maximum number of distinct NOP patterns. There are three:
60 nop, fnop and mnop. */
61 #define NUM_NOP_PATTERNS 3
62
63 /* Classification of instructions and units: integer, floating-point/media,
64 branch and control. */
65 enum frv_insn_group { GROUP_I, GROUP_FM, GROUP_B, GROUP_C, NUM_GROUPS };
66
67 /* The DFA names of the units, in packet order. */
68 static const char *const frv_unit_names[] =
69 {
70 "c",
71 "i0", "f0",
72 "i1", "f1",
73 "i2", "f2",
74 "i3", "f3",
75 "b0", "b1"
76 };
77
78 /* The classification of each unit in frv_unit_names[]. */
79 static const enum frv_insn_group frv_unit_groups[ARRAY_SIZE (frv_unit_names)] =
80 {
81 GROUP_C,
82 GROUP_I, GROUP_FM,
83 GROUP_I, GROUP_FM,
84 GROUP_I, GROUP_FM,
85 GROUP_I, GROUP_FM,
86 GROUP_B, GROUP_B
87 };
88
89 /* Return the DFA unit code associated with the Nth unit of integer
90 or floating-point group GROUP, */
91 #define NTH_UNIT(GROUP, N) frv_unit_codes[(GROUP) + (N) * 2 + 1]
92
93 /* Return the number of integer or floating-point unit UNIT
94 (1 for I1, 2 for F2, etc.). */
95 #define UNIT_NUMBER(UNIT) (((UNIT) - 1) / 2)
96
97 /* The DFA unit number for each unit in frv_unit_names[]. */
98 static int frv_unit_codes[ARRAY_SIZE (frv_unit_names)];
99
100 /* FRV_TYPE_TO_UNIT[T] is the last unit in frv_unit_names[] that can issue
101 an instruction of type T. The value is ARRAY_SIZE (frv_unit_names) if
102 no instruction of type T has been seen. */
103 static unsigned int frv_type_to_unit[TYPE_UNKNOWN + 1];
104
105 /* An array of dummy nop INSNs, one for each type of nop that the
106 target supports. */
107 static GTY(()) rtx frv_nops[NUM_NOP_PATTERNS];
108
109 /* The number of nop instructions in frv_nops[]. */
110 static unsigned int frv_num_nops;
111
112 /* Information about one __builtin_read or __builtin_write access, or
113 the combination of several such accesses. The most general value
114 is all-zeros (an unknown access to an unknown address). */
115 struct frv_io {
116 /* The type of access. FRV_IO_UNKNOWN means the access can be either
117 a read or a write. */
118 enum { FRV_IO_UNKNOWN, FRV_IO_READ, FRV_IO_WRITE } type;
119
120 /* The constant address being accessed, or zero if not known. */
121 HOST_WIDE_INT const_address;
122
123 /* The run-time address, as used in operand 0 of the membar pattern. */
124 rtx var_address;
125 };
126
127 /* Return true if instruction INSN should be packed with the following
128 instruction. */
129 #define PACKING_FLAG_P(INSN) (GET_MODE (INSN) == TImode)
130
131 /* Set the value of PACKING_FLAG_P(INSN). */
132 #define SET_PACKING_FLAG(INSN) PUT_MODE (INSN, TImode)
133 #define CLEAR_PACKING_FLAG(INSN) PUT_MODE (INSN, VOIDmode)
134
135 /* Loop with REG set to each hard register in rtx X. */
136 #define FOR_EACH_REGNO(REG, X) \
137 for (REG = REGNO (X); \
138 REG < REGNO (X) + HARD_REGNO_NREGS (REGNO (X), GET_MODE (X)); \
139 REG++)
140
141 /* This structure contains machine specific function data. */
142 struct GTY(()) machine_function
143 {
144 /* True if we have created an rtx that relies on the stack frame. */
145 int frame_needed;
146
147 /* True if this function contains at least one __builtin_{read,write}*. */
148 bool has_membar_p;
149 };
150
151 /* Temporary register allocation support structure. */
152 typedef struct frv_tmp_reg_struct
153 {
154 HARD_REG_SET regs; /* possible registers to allocate */
155 int next_reg[N_REG_CLASSES]; /* next register to allocate per class */
156 }
157 frv_tmp_reg_t;
158
159 /* Register state information for VLIW re-packing phase. */
160 #define REGSTATE_CC_MASK 0x07 /* Mask to isolate CCn for cond exec */
161 #define REGSTATE_MODIFIED 0x08 /* reg modified in current VLIW insn */
162 #define REGSTATE_IF_TRUE 0x10 /* reg modified in cond exec true */
163 #define REGSTATE_IF_FALSE 0x20 /* reg modified in cond exec false */
164
165 #define REGSTATE_IF_EITHER (REGSTATE_IF_TRUE | REGSTATE_IF_FALSE)
166
167 typedef unsigned char regstate_t;
168
169 /* Used in frv_frame_accessor_t to indicate the direction of a register-to-
170 memory move. */
171 enum frv_stack_op
172 {
173 FRV_LOAD,
174 FRV_STORE
175 };
176
177 /* Information required by frv_frame_access. */
178 typedef struct
179 {
180 /* This field is FRV_LOAD if registers are to be loaded from the stack and
181 FRV_STORE if they should be stored onto the stack. FRV_STORE implies
182 the move is being done by the prologue code while FRV_LOAD implies it
183 is being done by the epilogue. */
184 enum frv_stack_op op;
185
186 /* The base register to use when accessing the stack. This may be the
187 frame pointer, stack pointer, or a temporary. The choice of register
188 depends on which part of the frame is being accessed and how big the
189 frame is. */
190 rtx base;
191
192 /* The offset of BASE from the bottom of the current frame, in bytes. */
193 int base_offset;
194 } frv_frame_accessor_t;
195
196 /* Conditional execution support gathered together in one structure. */
197 typedef struct
198 {
199 /* Linked list of insns to add if the conditional execution conversion was
200 successful. Each link points to an EXPR_LIST which points to the pattern
201 of the insn to add, and the insn to be inserted before. */
202 rtx added_insns_list;
203
204 /* Identify which registers are safe to allocate for if conversions to
205 conditional execution. We keep the last allocated register in the
206 register classes between COND_EXEC statements. This will mean we allocate
207 different registers for each different COND_EXEC group if we can. This
208 might allow the scheduler to intermix two different COND_EXEC sections. */
209 frv_tmp_reg_t tmp_reg;
210
211 /* For nested IFs, identify which CC registers are used outside of setting
212 via a compare isnsn, and using via a check insn. This will allow us to
213 know if we can rewrite the register to use a different register that will
214 be paired with the CR register controlling the nested IF-THEN blocks. */
215 HARD_REG_SET nested_cc_ok_rewrite;
216
217 /* Temporary registers allocated to hold constants during conditional
218 execution. */
219 rtx scratch_regs[FIRST_PSEUDO_REGISTER];
220
221 /* Current number of temp registers available. */
222 int cur_scratch_regs;
223
224 /* Number of nested conditional execution blocks. */
225 int num_nested_cond_exec;
226
227 /* Map of insns that set up constants in scratch registers. */
228 bitmap scratch_insns_bitmap;
229
230 /* Conditional execution test register (CC0..CC7). */
231 rtx cr_reg;
232
233 /* Conditional execution compare register that is paired with cr_reg, so that
234 nested compares can be done. The csubcc and caddcc instructions don't
235 have enough bits to specify both a CC register to be set and a CR register
236 to do the test on, so the same bit number is used for both. Needless to
237 say, this is rather inconvenient for GCC. */
238 rtx nested_cc_reg;
239
240 /* Extra CR registers used for &&, ||. */
241 rtx extra_int_cr;
242 rtx extra_fp_cr;
243
244 /* Previous CR used in nested if, to make sure we are dealing with the same
245 nested if as the previous statement. */
246 rtx last_nested_if_cr;
247 }
248 frv_ifcvt_t;
249
250 static /* GTY(()) */ frv_ifcvt_t frv_ifcvt;
251
252 /* Map register number to smallest register class. */
253 enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER];
254
255 /* Map class letter into register class. */
256 enum reg_class reg_class_from_letter[256];
257
258 /* Cached value of frv_stack_info. */
259 static frv_stack_t *frv_stack_cache = (frv_stack_t *)0;
260
261 /* -mcpu= support */
262 frv_cpu_t frv_cpu_type = CPU_TYPE; /* value of -mcpu= */
263
264 /* Forward references */
265
266 static bool frv_handle_option (size_t, const char *, int);
267 static void frv_option_override (void);
268 static bool frv_legitimate_address_p (enum machine_mode, rtx, bool);
269 static int frv_default_flags_for_cpu (void);
270 static int frv_string_begins_with (const_tree, const char *);
271 static FRV_INLINE bool frv_small_data_reloc_p (rtx, int);
272 static void frv_print_operand (FILE *, rtx, int);
273 static void frv_print_operand_address (FILE *, rtx);
274 static bool frv_print_operand_punct_valid_p (unsigned char code);
275 static void frv_print_operand_memory_reference_reg
276 (FILE *, rtx);
277 static void frv_print_operand_memory_reference (FILE *, rtx, int);
278 static int frv_print_operand_jump_hint (rtx);
279 static const char *comparison_string (enum rtx_code, rtx);
280 static rtx frv_function_value (const_tree, const_tree,
281 bool);
282 static rtx frv_libcall_value (enum machine_mode,
283 const_rtx);
284 static FRV_INLINE int frv_regno_ok_for_base_p (int, int);
285 static rtx single_set_pattern (rtx);
286 static int frv_function_contains_far_jump (void);
287 static rtx frv_alloc_temp_reg (frv_tmp_reg_t *,
288 enum reg_class,
289 enum machine_mode,
290 int, int);
291 static rtx frv_frame_offset_rtx (int);
292 static rtx frv_frame_mem (enum machine_mode, rtx, int);
293 static rtx frv_dwarf_store (rtx, int);
294 static void frv_frame_insn (rtx, rtx);
295 static void frv_frame_access (frv_frame_accessor_t*,
296 rtx, int);
297 static void frv_frame_access_multi (frv_frame_accessor_t*,
298 frv_stack_t *, int);
299 static void frv_frame_access_standard_regs (enum frv_stack_op,
300 frv_stack_t *);
301 static struct machine_function *frv_init_machine_status (void);
302 static rtx frv_int_to_acc (enum insn_code, int, rtx);
303 static enum machine_mode frv_matching_accg_mode (enum machine_mode);
304 static rtx frv_read_argument (tree, unsigned int);
305 static rtx frv_read_iacc_argument (enum machine_mode, tree, unsigned int);
306 static int frv_check_constant_argument (enum insn_code, int, rtx);
307 static rtx frv_legitimize_target (enum insn_code, rtx);
308 static rtx frv_legitimize_argument (enum insn_code, int, rtx);
309 static rtx frv_legitimize_tls_address (rtx, enum tls_model);
310 static rtx frv_legitimize_address (rtx, rtx, enum machine_mode);
311 static rtx frv_expand_set_builtin (enum insn_code, tree, rtx);
312 static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx);
313 static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx);
314 static rtx frv_expand_cut_builtin (enum insn_code, tree, rtx);
315 static rtx frv_expand_binopimm_builtin (enum insn_code, tree, rtx);
316 static rtx frv_expand_voidbinop_builtin (enum insn_code, tree);
317 static rtx frv_expand_int_void2arg (enum insn_code, tree);
318 static rtx frv_expand_prefetches (enum insn_code, tree);
319 static rtx frv_expand_voidtriop_builtin (enum insn_code, tree);
320 static rtx frv_expand_voidaccop_builtin (enum insn_code, tree);
321 static rtx frv_expand_mclracc_builtin (tree);
322 static rtx frv_expand_mrdacc_builtin (enum insn_code, tree);
323 static rtx frv_expand_mwtacc_builtin (enum insn_code, tree);
324 static rtx frv_expand_noargs_builtin (enum insn_code);
325 static void frv_split_iacc_move (rtx, rtx);
326 static rtx frv_emit_comparison (enum rtx_code, rtx, rtx);
327 static int frv_clear_registers_used (rtx *, void *);
328 static void frv_ifcvt_add_insn (rtx, rtx, int);
329 static rtx frv_ifcvt_rewrite_mem (rtx, enum machine_mode, rtx);
330 static rtx frv_ifcvt_load_value (rtx, rtx);
331 static int frv_acc_group_1 (rtx *, void *);
332 static unsigned int frv_insn_unit (rtx);
333 static bool frv_issues_to_branch_unit_p (rtx);
334 static int frv_cond_flags (rtx);
335 static bool frv_regstate_conflict_p (regstate_t, regstate_t);
336 static int frv_registers_conflict_p_1 (rtx *, void *);
337 static bool frv_registers_conflict_p (rtx);
338 static void frv_registers_update_1 (rtx, const_rtx, void *);
339 static void frv_registers_update (rtx);
340 static void frv_start_packet (void);
341 static void frv_start_packet_block (void);
342 static void frv_finish_packet (void (*) (void));
343 static bool frv_pack_insn_p (rtx);
344 static void frv_add_insn_to_packet (rtx);
345 static void frv_insert_nop_in_packet (rtx);
346 static bool frv_for_each_packet (void (*) (void));
347 static bool frv_sort_insn_group_1 (enum frv_insn_group,
348 unsigned int, unsigned int,
349 unsigned int, unsigned int,
350 state_t);
351 static int frv_compare_insns (const void *, const void *);
352 static void frv_sort_insn_group (enum frv_insn_group);
353 static void frv_reorder_packet (void);
354 static void frv_fill_unused_units (enum frv_insn_group);
355 static void frv_align_label (void);
356 static void frv_reorg_packet (void);
357 static void frv_register_nop (rtx);
358 static void frv_reorg (void);
359 static void frv_pack_insns (void);
360 static void frv_function_prologue (FILE *, HOST_WIDE_INT);
361 static void frv_function_epilogue (FILE *, HOST_WIDE_INT);
362 static bool frv_assemble_integer (rtx, unsigned, int);
363 static void frv_init_builtins (void);
364 static rtx frv_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
365 static void frv_init_libfuncs (void);
366 static bool frv_in_small_data_p (const_tree);
367 static void frv_asm_output_mi_thunk
368 (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
369 static void frv_setup_incoming_varargs (CUMULATIVE_ARGS *,
370 enum machine_mode,
371 tree, int *, int);
372 static rtx frv_expand_builtin_saveregs (void);
373 static void frv_expand_builtin_va_start (tree, rtx);
374 static bool frv_rtx_costs (rtx, int, int, int*, bool);
375 static int frv_register_move_cost (enum machine_mode,
376 reg_class_t, reg_class_t);
377 static int frv_memory_move_cost (enum machine_mode,
378 reg_class_t, bool);
379 static void frv_asm_out_constructor (rtx, int);
380 static void frv_asm_out_destructor (rtx, int);
381 static bool frv_function_symbol_referenced_p (rtx);
382 static bool frv_cannot_force_const_mem (rtx);
383 static const char *unspec_got_name (int);
384 static void frv_output_const_unspec (FILE *,
385 const struct frv_unspec *);
386 static bool frv_function_ok_for_sibcall (tree, tree);
387 static rtx frv_struct_value_rtx (tree, int);
388 static bool frv_must_pass_in_stack (enum machine_mode mode, const_tree type);
389 static int frv_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
390 tree, bool);
391 static void frv_output_dwarf_dtprel (FILE *, int, rtx)
392 ATTRIBUTE_UNUSED;
393 static reg_class_t frv_secondary_reload (bool, rtx, reg_class_t,
394 enum machine_mode,
395 secondary_reload_info *);
396 static bool frv_frame_pointer_required (void);
397 static bool frv_can_eliminate (const int, const int);
398 static void frv_trampoline_init (rtx, tree, rtx);
399 static bool frv_class_likely_spilled_p (reg_class_t);
400 \f
401 /* Allow us to easily change the default for -malloc-cc. */
402 #ifndef DEFAULT_NO_ALLOC_CC
403 #define MASK_DEFAULT_ALLOC_CC MASK_ALLOC_CC
404 #else
405 #define MASK_DEFAULT_ALLOC_CC 0
406 #endif
407 \f
408 /* Initialize the GCC target structure. */
409 #undef TARGET_PRINT_OPERAND
410 #define TARGET_PRINT_OPERAND frv_print_operand
411 #undef TARGET_PRINT_OPERAND_ADDRESS
412 #define TARGET_PRINT_OPERAND_ADDRESS frv_print_operand_address
413 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
414 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P frv_print_operand_punct_valid_p
415 #undef TARGET_ASM_FUNCTION_PROLOGUE
416 #define TARGET_ASM_FUNCTION_PROLOGUE frv_function_prologue
417 #undef TARGET_ASM_FUNCTION_EPILOGUE
418 #define TARGET_ASM_FUNCTION_EPILOGUE frv_function_epilogue
419 #undef TARGET_ASM_INTEGER
420 #define TARGET_ASM_INTEGER frv_assemble_integer
421 #undef TARGET_DEFAULT_TARGET_FLAGS
422 #define TARGET_DEFAULT_TARGET_FLAGS \
423 (MASK_DEFAULT_ALLOC_CC \
424 | MASK_COND_MOVE \
425 | MASK_SCC \
426 | MASK_COND_EXEC \
427 | MASK_VLIW_BRANCH \
428 | MASK_MULTI_CE \
429 | MASK_NESTED_CE)
430 #undef TARGET_HANDLE_OPTION
431 #define TARGET_HANDLE_OPTION frv_handle_option
432 #undef TARGET_OPTION_OVERRIDE
433 #define TARGET_OPTION_OVERRIDE frv_option_override
434 #undef TARGET_INIT_BUILTINS
435 #define TARGET_INIT_BUILTINS frv_init_builtins
436 #undef TARGET_EXPAND_BUILTIN
437 #define TARGET_EXPAND_BUILTIN frv_expand_builtin
438 #undef TARGET_INIT_LIBFUNCS
439 #define TARGET_INIT_LIBFUNCS frv_init_libfuncs
440 #undef TARGET_IN_SMALL_DATA_P
441 #define TARGET_IN_SMALL_DATA_P frv_in_small_data_p
442 #undef TARGET_REGISTER_MOVE_COST
443 #define TARGET_REGISTER_MOVE_COST frv_register_move_cost
444 #undef TARGET_MEMORY_MOVE_COST
445 #define TARGET_MEMORY_MOVE_COST frv_memory_move_cost
446 #undef TARGET_RTX_COSTS
447 #define TARGET_RTX_COSTS frv_rtx_costs
448 #undef TARGET_ASM_CONSTRUCTOR
449 #define TARGET_ASM_CONSTRUCTOR frv_asm_out_constructor
450 #undef TARGET_ASM_DESTRUCTOR
451 #define TARGET_ASM_DESTRUCTOR frv_asm_out_destructor
452
453 #undef TARGET_ASM_OUTPUT_MI_THUNK
454 #define TARGET_ASM_OUTPUT_MI_THUNK frv_asm_output_mi_thunk
455 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
456 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
457
458 #undef TARGET_SCHED_ISSUE_RATE
459 #define TARGET_SCHED_ISSUE_RATE frv_issue_rate
460
461 #undef TARGET_LEGITIMIZE_ADDRESS
462 #define TARGET_LEGITIMIZE_ADDRESS frv_legitimize_address
463
464 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
465 #define TARGET_FUNCTION_OK_FOR_SIBCALL frv_function_ok_for_sibcall
466 #undef TARGET_CANNOT_FORCE_CONST_MEM
467 #define TARGET_CANNOT_FORCE_CONST_MEM frv_cannot_force_const_mem
468
469 #undef TARGET_HAVE_TLS
470 #define TARGET_HAVE_TLS HAVE_AS_TLS
471
472 #undef TARGET_STRUCT_VALUE_RTX
473 #define TARGET_STRUCT_VALUE_RTX frv_struct_value_rtx
474 #undef TARGET_MUST_PASS_IN_STACK
475 #define TARGET_MUST_PASS_IN_STACK frv_must_pass_in_stack
476 #undef TARGET_PASS_BY_REFERENCE
477 #define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
478 #undef TARGET_ARG_PARTIAL_BYTES
479 #define TARGET_ARG_PARTIAL_BYTES frv_arg_partial_bytes
480
481 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
482 #define TARGET_EXPAND_BUILTIN_SAVEREGS frv_expand_builtin_saveregs
483 #undef TARGET_SETUP_INCOMING_VARARGS
484 #define TARGET_SETUP_INCOMING_VARARGS frv_setup_incoming_varargs
485 #undef TARGET_MACHINE_DEPENDENT_REORG
486 #define TARGET_MACHINE_DEPENDENT_REORG frv_reorg
487
488 #undef TARGET_EXPAND_BUILTIN_VA_START
489 #define TARGET_EXPAND_BUILTIN_VA_START frv_expand_builtin_va_start
490
491 #if HAVE_AS_TLS
492 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
493 #define TARGET_ASM_OUTPUT_DWARF_DTPREL frv_output_dwarf_dtprel
494 #endif
495
496 #undef TARGET_CLASS_LIKELY_SPILLED_P
497 #define TARGET_CLASS_LIKELY_SPILLED_P frv_class_likely_spilled_p
498
499 #undef TARGET_SECONDARY_RELOAD
500 #define TARGET_SECONDARY_RELOAD frv_secondary_reload
501
502 #undef TARGET_LEGITIMATE_ADDRESS_P
503 #define TARGET_LEGITIMATE_ADDRESS_P frv_legitimate_address_p
504
505 #undef TARGET_FRAME_POINTER_REQUIRED
506 #define TARGET_FRAME_POINTER_REQUIRED frv_frame_pointer_required
507
508 #undef TARGET_CAN_ELIMINATE
509 #define TARGET_CAN_ELIMINATE frv_can_eliminate
510
511 #undef TARGET_TRAMPOLINE_INIT
512 #define TARGET_TRAMPOLINE_INIT frv_trampoline_init
513
514 #undef TARGET_FUNCTION_VALUE
515 #define TARGET_FUNCTION_VALUE frv_function_value
516 #undef TARGET_LIBCALL_VALUE
517 #define TARGET_LIBCALL_VALUE frv_libcall_value
518
519 struct gcc_target targetm = TARGET_INITIALIZER;
520
521 #define FRV_SYMBOL_REF_TLS_P(RTX) \
522 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
523
524 \f
525 /* Any function call that satisfies the machine-independent
526 requirements is eligible on FR-V. */
527
528 static bool
529 frv_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
530 tree exp ATTRIBUTE_UNUSED)
531 {
532 return true;
533 }
534
535 /* Return true if SYMBOL is a small data symbol and relocation RELOC
536 can be used to access it directly in a load or store. */
537
538 static FRV_INLINE bool
539 frv_small_data_reloc_p (rtx symbol, int reloc)
540 {
541 return (GET_CODE (symbol) == SYMBOL_REF
542 && SYMBOL_REF_SMALL_P (symbol)
543 && (!TARGET_FDPIC || flag_pic == 1)
544 && (reloc == R_FRV_GOTOFF12 || reloc == R_FRV_GPREL12));
545 }
546
547 /* Return true if X is a valid relocation unspec. If it is, fill in UNSPEC
548 appropriately. */
549
550 bool
551 frv_const_unspec_p (rtx x, struct frv_unspec *unspec)
552 {
553 if (GET_CODE (x) == CONST)
554 {
555 unspec->offset = 0;
556 x = XEXP (x, 0);
557 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
558 {
559 unspec->offset += INTVAL (XEXP (x, 1));
560 x = XEXP (x, 0);
561 }
562 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_GOT)
563 {
564 unspec->symbol = XVECEXP (x, 0, 0);
565 unspec->reloc = INTVAL (XVECEXP (x, 0, 1));
566
567 if (unspec->offset == 0)
568 return true;
569
570 if (frv_small_data_reloc_p (unspec->symbol, unspec->reloc)
571 && unspec->offset > 0
572 && (unsigned HOST_WIDE_INT) unspec->offset < g_switch_value)
573 return true;
574 }
575 }
576 return false;
577 }
578
579 /* Decide whether we can force certain constants to memory. If we
580 decide we can't, the caller should be able to cope with it in
581 another way.
582
583 We never allow constants to be forced into memory for TARGET_FDPIC.
584 This is necessary for several reasons:
585
586 1. Since LEGITIMATE_CONSTANT_P rejects constant pool addresses, the
587 target-independent code will try to force them into the constant
588 pool, thus leading to infinite recursion.
589
590 2. We can never introduce new constant pool references during reload.
591 Any such reference would require use of the pseudo FDPIC register.
592
593 3. We can't represent a constant added to a function pointer (which is
594 not the same as a pointer to a function+constant).
595
596 4. In many cases, it's more efficient to calculate the constant in-line. */
597
598 static bool
599 frv_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
600 {
601 return TARGET_FDPIC;
602 }
603 \f
604 /* Implement TARGET_HANDLE_OPTION. */
605
606 static bool
607 frv_handle_option (size_t code, const char *arg, int value)
608 {
609 switch (code)
610 {
611 case OPT_G:
612 g_switch_value = value;
613 g_switch_set = true;
614 return true;
615
616 case OPT_mcpu_:
617 if (strcmp (arg, "simple") == 0)
618 frv_cpu_type = FRV_CPU_SIMPLE;
619 else if (strcmp (arg, "tomcat") == 0)
620 frv_cpu_type = FRV_CPU_TOMCAT;
621 else if (strcmp (arg, "fr550") == 0)
622 frv_cpu_type = FRV_CPU_FR550;
623 else if (strcmp (arg, "fr500") == 0)
624 frv_cpu_type = FRV_CPU_FR500;
625 else if (strcmp (arg, "fr450") == 0)
626 frv_cpu_type = FRV_CPU_FR450;
627 else if (strcmp (arg, "fr405") == 0)
628 frv_cpu_type = FRV_CPU_FR405;
629 else if (strcmp (arg, "fr400") == 0)
630 frv_cpu_type = FRV_CPU_FR400;
631 else if (strcmp (arg, "fr300") == 0)
632 frv_cpu_type = FRV_CPU_FR300;
633 else if (strcmp (arg, "frv") == 0)
634 frv_cpu_type = FRV_CPU_GENERIC;
635 else
636 return false;
637 return true;
638
639 default:
640 return true;
641 }
642 }
643
644 static int
645 frv_default_flags_for_cpu (void)
646 {
647 switch (frv_cpu_type)
648 {
649 case FRV_CPU_GENERIC:
650 return MASK_DEFAULT_FRV;
651
652 case FRV_CPU_FR550:
653 return MASK_DEFAULT_FR550;
654
655 case FRV_CPU_FR500:
656 case FRV_CPU_TOMCAT:
657 return MASK_DEFAULT_FR500;
658
659 case FRV_CPU_FR450:
660 return MASK_DEFAULT_FR450;
661
662 case FRV_CPU_FR405:
663 case FRV_CPU_FR400:
664 return MASK_DEFAULT_FR400;
665
666 case FRV_CPU_FR300:
667 case FRV_CPU_SIMPLE:
668 return MASK_DEFAULT_SIMPLE;
669
670 default:
671 gcc_unreachable ();
672 }
673 }
674
675 /* Implement TARGET_OPTION_OVERRIDE. */
676
677 static void
678 frv_option_override (void)
679 {
680 int regno;
681 unsigned int i;
682
683 target_flags |= (frv_default_flags_for_cpu () & ~target_flags_explicit);
684
685 /* -mlibrary-pic sets -fPIC and -G0 and also suppresses warnings from the
686 linker about linking pic and non-pic code. */
687 if (TARGET_LIBPIC)
688 {
689 if (!flag_pic) /* -fPIC */
690 flag_pic = 2;
691
692 if (! g_switch_set) /* -G0 */
693 {
694 g_switch_set = 1;
695 g_switch_value = 0;
696 }
697 }
698
699 /* A C expression whose value is a register class containing hard
700 register REGNO. In general there is more than one such class;
701 choose a class which is "minimal", meaning that no smaller class
702 also contains the register. */
703
704 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
705 {
706 enum reg_class rclass;
707
708 if (GPR_P (regno))
709 {
710 int gpr_reg = regno - GPR_FIRST;
711
712 if (gpr_reg == GR8_REG)
713 rclass = GR8_REGS;
714
715 else if (gpr_reg == GR9_REG)
716 rclass = GR9_REGS;
717
718 else if (gpr_reg == GR14_REG)
719 rclass = FDPIC_FPTR_REGS;
720
721 else if (gpr_reg == FDPIC_REGNO)
722 rclass = FDPIC_REGS;
723
724 else if ((gpr_reg & 3) == 0)
725 rclass = QUAD_REGS;
726
727 else if ((gpr_reg & 1) == 0)
728 rclass = EVEN_REGS;
729
730 else
731 rclass = GPR_REGS;
732 }
733
734 else if (FPR_P (regno))
735 {
736 int fpr_reg = regno - GPR_FIRST;
737 if ((fpr_reg & 3) == 0)
738 rclass = QUAD_FPR_REGS;
739
740 else if ((fpr_reg & 1) == 0)
741 rclass = FEVEN_REGS;
742
743 else
744 rclass = FPR_REGS;
745 }
746
747 else if (regno == LR_REGNO)
748 rclass = LR_REG;
749
750 else if (regno == LCR_REGNO)
751 rclass = LCR_REG;
752
753 else if (ICC_P (regno))
754 rclass = ICC_REGS;
755
756 else if (FCC_P (regno))
757 rclass = FCC_REGS;
758
759 else if (ICR_P (regno))
760 rclass = ICR_REGS;
761
762 else if (FCR_P (regno))
763 rclass = FCR_REGS;
764
765 else if (ACC_P (regno))
766 {
767 int r = regno - ACC_FIRST;
768 if ((r & 3) == 0)
769 rclass = QUAD_ACC_REGS;
770 else if ((r & 1) == 0)
771 rclass = EVEN_ACC_REGS;
772 else
773 rclass = ACC_REGS;
774 }
775
776 else if (ACCG_P (regno))
777 rclass = ACCG_REGS;
778
779 else
780 rclass = NO_REGS;
781
782 regno_reg_class[regno] = rclass;
783 }
784
785 /* Check for small data option */
786 if (!g_switch_set)
787 g_switch_value = SDATA_DEFAULT_SIZE;
788
789 /* A C expression which defines the machine-dependent operand
790 constraint letters for register classes. If CHAR is such a
791 letter, the value should be the register class corresponding to
792 it. Otherwise, the value should be `NO_REGS'. The register
793 letter `r', corresponding to class `GENERAL_REGS', will not be
794 passed to this macro; you do not need to handle it.
795
796 The following letters are unavailable, due to being used as
797 constraints:
798 '0'..'9'
799 '<', '>'
800 'E', 'F', 'G', 'H'
801 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P'
802 'Q', 'R', 'S', 'T', 'U'
803 'V', 'X'
804 'g', 'i', 'm', 'n', 'o', 'p', 'r', 's' */
805
806 for (i = 0; i < 256; i++)
807 reg_class_from_letter[i] = NO_REGS;
808
809 reg_class_from_letter['a'] = ACC_REGS;
810 reg_class_from_letter['b'] = EVEN_ACC_REGS;
811 reg_class_from_letter['c'] = CC_REGS;
812 reg_class_from_letter['d'] = GPR_REGS;
813 reg_class_from_letter['e'] = EVEN_REGS;
814 reg_class_from_letter['f'] = FPR_REGS;
815 reg_class_from_letter['h'] = FEVEN_REGS;
816 reg_class_from_letter['l'] = LR_REG;
817 reg_class_from_letter['q'] = QUAD_REGS;
818 reg_class_from_letter['t'] = ICC_REGS;
819 reg_class_from_letter['u'] = FCC_REGS;
820 reg_class_from_letter['v'] = ICR_REGS;
821 reg_class_from_letter['w'] = FCR_REGS;
822 reg_class_from_letter['x'] = QUAD_FPR_REGS;
823 reg_class_from_letter['y'] = LCR_REG;
824 reg_class_from_letter['z'] = SPR_REGS;
825 reg_class_from_letter['A'] = QUAD_ACC_REGS;
826 reg_class_from_letter['B'] = ACCG_REGS;
827 reg_class_from_letter['C'] = CR_REGS;
828 reg_class_from_letter['W'] = FDPIC_CALL_REGS; /* gp14+15 */
829 reg_class_from_letter['Z'] = FDPIC_REGS; /* gp15 */
830
831 /* There is no single unaligned SI op for PIC code. Sometimes we
832 need to use ".4byte" and sometimes we need to use ".picptr".
833 See frv_assemble_integer for details. */
834 if (flag_pic || TARGET_FDPIC)
835 targetm.asm_out.unaligned_op.si = 0;
836
837 if ((target_flags_explicit & MASK_LINKED_FP) == 0)
838 target_flags |= MASK_LINKED_FP;
839
840 if ((target_flags_explicit & MASK_OPTIMIZE_MEMBAR) == 0)
841 target_flags |= MASK_OPTIMIZE_MEMBAR;
842
843 for (i = 0; i < ARRAY_SIZE (frv_unit_names); i++)
844 frv_unit_codes[i] = get_cpu_unit_code (frv_unit_names[i]);
845
846 for (i = 0; i < ARRAY_SIZE (frv_type_to_unit); i++)
847 frv_type_to_unit[i] = ARRAY_SIZE (frv_unit_codes);
848
849 init_machine_status = frv_init_machine_status;
850 }
851
852 \f
853 /* Some machines may desire to change what optimizations are performed for
854 various optimization levels. This macro, if defined, is executed once just
855 after the optimization level is determined and before the remainder of the
856 command options have been parsed. Values set in this macro are used as the
857 default values for the other command line options.
858
859 LEVEL is the optimization level specified; 2 if `-O2' is specified, 1 if
860 `-O' is specified, and 0 if neither is specified.
861
862 SIZE is nonzero if `-Os' is specified, 0 otherwise.
863
864 You should not use this macro to change options that are not
865 machine-specific. These should uniformly selected by the same optimization
866 level on all supported machines. Use this macro to enable machine-specific
867 optimizations.
868
869 *Do not examine `write_symbols' in this macro!* The debugging options are
870 *not supposed to alter the generated code. */
871
872 /* On the FRV, possibly disable VLIW packing which is done by the 2nd
873 scheduling pass at the current time. */
874 void
875 frv_optimization_options (int level, int size ATTRIBUTE_UNUSED)
876 {
877 if (level >= 2)
878 {
879 #ifdef DISABLE_SCHED2
880 flag_schedule_insns_after_reload = 0;
881 #endif
882 #ifdef ENABLE_RCSP
883 flag_rcsp = 1;
884 #endif
885 }
886 }
887
888 \f
889 /* Return true if NAME (a STRING_CST node) begins with PREFIX. */
890
891 static int
892 frv_string_begins_with (const_tree name, const char *prefix)
893 {
894 const int prefix_len = strlen (prefix);
895
896 /* Remember: NAME's length includes the null terminator. */
897 return (TREE_STRING_LENGTH (name) > prefix_len
898 && strncmp (TREE_STRING_POINTER (name), prefix, prefix_len) == 0);
899 }
900 \f
901 /* Zero or more C statements that may conditionally modify two variables
902 `fixed_regs' and `call_used_regs' (both of type `char []') after they have
903 been initialized from the two preceding macros.
904
905 This is necessary in case the fixed or call-clobbered registers depend on
906 target flags.
907
908 You need not define this macro if it has no work to do.
909
910 If the usage of an entire class of registers depends on the target flags,
911 you may indicate this to GCC by using this macro to modify `fixed_regs' and
912 `call_used_regs' to 1 for each of the registers in the classes which should
913 not be used by GCC. Also define the macro `REG_CLASS_FROM_LETTER' to return
914 `NO_REGS' if it is called with a letter for a class that shouldn't be used.
915
916 (However, if this class is not included in `GENERAL_REGS' and all of the
917 insn patterns whose constraints permit this class are controlled by target
918 switches, then GCC will automatically avoid using these registers when the
919 target switches are opposed to them.) */
920
921 void
922 frv_conditional_register_usage (void)
923 {
924 int i;
925
926 for (i = GPR_FIRST + NUM_GPRS; i <= GPR_LAST; i++)
927 fixed_regs[i] = call_used_regs[i] = 1;
928
929 for (i = FPR_FIRST + NUM_FPRS; i <= FPR_LAST; i++)
930 fixed_regs[i] = call_used_regs[i] = 1;
931
932 /* Reserve the registers used for conditional execution. At present, we need
933 1 ICC and 1 ICR register. */
934 fixed_regs[ICC_TEMP] = call_used_regs[ICC_TEMP] = 1;
935 fixed_regs[ICR_TEMP] = call_used_regs[ICR_TEMP] = 1;
936
937 if (TARGET_FIXED_CC)
938 {
939 fixed_regs[ICC_FIRST] = call_used_regs[ICC_FIRST] = 1;
940 fixed_regs[FCC_FIRST] = call_used_regs[FCC_FIRST] = 1;
941 fixed_regs[ICR_FIRST] = call_used_regs[ICR_FIRST] = 1;
942 fixed_regs[FCR_FIRST] = call_used_regs[FCR_FIRST] = 1;
943 }
944
945 if (TARGET_FDPIC)
946 fixed_regs[GPR_FIRST + 16] = fixed_regs[GPR_FIRST + 17] =
947 call_used_regs[GPR_FIRST + 16] = call_used_regs[GPR_FIRST + 17] = 0;
948
949 #if 0
950 /* If -fpic, SDA_BASE_REG is the PIC register. */
951 if (g_switch_value == 0 && !flag_pic)
952 fixed_regs[SDA_BASE_REG] = call_used_regs[SDA_BASE_REG] = 0;
953
954 if (!flag_pic)
955 fixed_regs[PIC_REGNO] = call_used_regs[PIC_REGNO] = 0;
956 #endif
957 }
958
959 \f
960 /*
961 * Compute the stack frame layout
962 *
963 * Register setup:
964 * +---------------+-----------------------+-----------------------+
965 * |Register |type |caller-save/callee-save|
966 * +---------------+-----------------------+-----------------------+
967 * |GR0 |Zero register | - |
968 * |GR1 |Stack pointer(SP) | - |
969 * |GR2 |Frame pointer(FP) | - |
970 * |GR3 |Hidden parameter | caller save |
971 * |GR4-GR7 | - | caller save |
972 * |GR8-GR13 |Argument register | caller save |
973 * |GR14-GR15 | - | caller save |
974 * |GR16-GR31 | - | callee save |
975 * |GR32-GR47 | - | caller save |
976 * |GR48-GR63 | - | callee save |
977 * |FR0-FR15 | - | caller save |
978 * |FR16-FR31 | - | callee save |
979 * |FR32-FR47 | - | caller save |
980 * |FR48-FR63 | - | callee save |
981 * +---------------+-----------------------+-----------------------+
982 *
983 * Stack frame setup:
984 * Low
985 * SP-> |-----------------------------------|
986 * | Argument area |
987 * |-----------------------------------|
988 * | Register save area |
989 * |-----------------------------------|
990 * | Local variable save area |
991 * FP-> |-----------------------------------|
992 * | Old FP |
993 * |-----------------------------------|
994 * | Hidden parameter save area |
995 * |-----------------------------------|
996 * | Return address(LR) storage area |
997 * |-----------------------------------|
998 * | Padding for alignment |
999 * |-----------------------------------|
1000 * | Register argument area |
1001 * OLD SP-> |-----------------------------------|
1002 * | Parameter area |
1003 * |-----------------------------------|
1004 * High
1005 *
1006 * Argument area/Parameter area:
1007 *
1008 * When a function is called, this area is used for argument transfer. When
1009 * the argument is set up by the caller function, this area is referred to as
1010 * the argument area. When the argument is referenced by the callee function,
1011 * this area is referred to as the parameter area. The area is allocated when
1012 * all arguments cannot be placed on the argument register at the time of
1013 * argument transfer.
1014 *
1015 * Register save area:
1016 *
1017 * This is a register save area that must be guaranteed for the caller
1018 * function. This area is not secured when the register save operation is not
1019 * needed.
1020 *
1021 * Local variable save area:
1022 *
1023 * This is the area for local variables and temporary variables.
1024 *
1025 * Old FP:
1026 *
1027 * This area stores the FP value of the caller function.
1028 *
1029 * Hidden parameter save area:
1030 *
1031 * This area stores the start address of the return value storage
1032 * area for a struct/union return function.
1033 * When a struct/union is used as the return value, the caller
1034 * function stores the return value storage area start address in
1035 * register GR3 and passes it to the caller function.
1036 * The callee function interprets the address stored in the GR3
1037 * as the return value storage area start address.
1038 * When register GR3 needs to be saved into memory, the callee
1039 * function saves it in the hidden parameter save area. This
1040 * area is not secured when the save operation is not needed.
1041 *
1042 * Return address(LR) storage area:
1043 *
1044 * This area saves the LR. The LR stores the address of a return to the caller
1045 * function for the purpose of function calling.
1046 *
1047 * Argument register area:
1048 *
1049 * This area saves the argument register. This area is not secured when the
1050 * save operation is not needed.
1051 *
1052 * Argument:
1053 *
1054 * Arguments, the count of which equals the count of argument registers (6
1055 * words), are positioned in registers GR8 to GR13 and delivered to the callee
1056 * function. When a struct/union return function is called, the return value
1057 * area address is stored in register GR3. Arguments not placed in the
1058 * argument registers will be stored in the stack argument area for transfer
1059 * purposes. When an 8-byte type argument is to be delivered using registers,
1060 * it is divided into two and placed in two registers for transfer. When
1061 * argument registers must be saved to memory, the callee function secures an
1062 * argument register save area in the stack. In this case, a continuous
1063 * argument register save area must be established in the parameter area. The
1064 * argument register save area must be allocated as needed to cover the size of
1065 * the argument register to be saved. If the function has a variable count of
1066 * arguments, it saves all argument registers in the argument register save
1067 * area.
1068 *
1069 * Argument Extension Format:
1070 *
1071 * When an argument is to be stored in the stack, its type is converted to an
1072 * extended type in accordance with the individual argument type. The argument
1073 * is freed by the caller function after the return from the callee function is
1074 * made.
1075 *
1076 * +-----------------------+---------------+------------------------+
1077 * | Argument Type |Extended Type |Stack Storage Size(byte)|
1078 * +-----------------------+---------------+------------------------+
1079 * |char |int | 4 |
1080 * |signed char |int | 4 |
1081 * |unsigned char |int | 4 |
1082 * |[signed] short int |int | 4 |
1083 * |unsigned short int |int | 4 |
1084 * |[signed] int |No extension | 4 |
1085 * |unsigned int |No extension | 4 |
1086 * |[signed] long int |No extension | 4 |
1087 * |unsigned long int |No extension | 4 |
1088 * |[signed] long long int |No extension | 8 |
1089 * |unsigned long long int |No extension | 8 |
1090 * |float |double | 8 |
1091 * |double |No extension | 8 |
1092 * |long double |No extension | 8 |
1093 * |pointer |No extension | 4 |
1094 * |struct/union |- | 4 (*1) |
1095 * +-----------------------+---------------+------------------------+
1096 *
1097 * When a struct/union is to be delivered as an argument, the caller copies it
1098 * to the local variable area and delivers the address of that area.
1099 *
1100 * Return Value:
1101 *
1102 * +-------------------------------+----------------------+
1103 * |Return Value Type |Return Value Interface|
1104 * +-------------------------------+----------------------+
1105 * |void |None |
1106 * |[signed|unsigned] char |GR8 |
1107 * |[signed|unsigned] short int |GR8 |
1108 * |[signed|unsigned] int |GR8 |
1109 * |[signed|unsigned] long int |GR8 |
1110 * |pointer |GR8 |
1111 * |[signed|unsigned] long long int|GR8 & GR9 |
1112 * |float |GR8 |
1113 * |double |GR8 & GR9 |
1114 * |long double |GR8 & GR9 |
1115 * |struct/union |(*1) |
1116 * +-------------------------------+----------------------+
1117 *
1118 * When a struct/union is used as the return value, the caller function stores
1119 * the start address of the return value storage area into GR3 and then passes
1120 * it to the callee function. The callee function interprets GR3 as the start
1121 * address of the return value storage area. When this address needs to be
1122 * saved in memory, the callee function secures the hidden parameter save area
1123 * and saves the address in that area.
1124 */
1125
1126 frv_stack_t *
1127 frv_stack_info (void)
1128 {
1129 static frv_stack_t info, zero_info;
1130 frv_stack_t *info_ptr = &info;
1131 tree fndecl = current_function_decl;
1132 int varargs_p = 0;
1133 tree cur_arg;
1134 tree next_arg;
1135 int range;
1136 int alignment;
1137 int offset;
1138
1139 /* If we've already calculated the values and reload is complete,
1140 just return now. */
1141 if (frv_stack_cache)
1142 return frv_stack_cache;
1143
1144 /* Zero all fields. */
1145 info = zero_info;
1146
1147 /* Set up the register range information. */
1148 info_ptr->regs[STACK_REGS_GPR].name = "gpr";
1149 info_ptr->regs[STACK_REGS_GPR].first = LAST_ARG_REGNUM + 1;
1150 info_ptr->regs[STACK_REGS_GPR].last = GPR_LAST;
1151 info_ptr->regs[STACK_REGS_GPR].dword_p = TRUE;
1152
1153 info_ptr->regs[STACK_REGS_FPR].name = "fpr";
1154 info_ptr->regs[STACK_REGS_FPR].first = FPR_FIRST;
1155 info_ptr->regs[STACK_REGS_FPR].last = FPR_LAST;
1156 info_ptr->regs[STACK_REGS_FPR].dword_p = TRUE;
1157
1158 info_ptr->regs[STACK_REGS_LR].name = "lr";
1159 info_ptr->regs[STACK_REGS_LR].first = LR_REGNO;
1160 info_ptr->regs[STACK_REGS_LR].last = LR_REGNO;
1161 info_ptr->regs[STACK_REGS_LR].special_p = 1;
1162
1163 info_ptr->regs[STACK_REGS_CC].name = "cc";
1164 info_ptr->regs[STACK_REGS_CC].first = CC_FIRST;
1165 info_ptr->regs[STACK_REGS_CC].last = CC_LAST;
1166 info_ptr->regs[STACK_REGS_CC].field_p = TRUE;
1167
1168 info_ptr->regs[STACK_REGS_LCR].name = "lcr";
1169 info_ptr->regs[STACK_REGS_LCR].first = LCR_REGNO;
1170 info_ptr->regs[STACK_REGS_LCR].last = LCR_REGNO;
1171
1172 info_ptr->regs[STACK_REGS_STDARG].name = "stdarg";
1173 info_ptr->regs[STACK_REGS_STDARG].first = FIRST_ARG_REGNUM;
1174 info_ptr->regs[STACK_REGS_STDARG].last = LAST_ARG_REGNUM;
1175 info_ptr->regs[STACK_REGS_STDARG].dword_p = 1;
1176 info_ptr->regs[STACK_REGS_STDARG].special_p = 1;
1177
1178 info_ptr->regs[STACK_REGS_STRUCT].name = "struct";
1179 info_ptr->regs[STACK_REGS_STRUCT].first = FRV_STRUCT_VALUE_REGNUM;
1180 info_ptr->regs[STACK_REGS_STRUCT].last = FRV_STRUCT_VALUE_REGNUM;
1181 info_ptr->regs[STACK_REGS_STRUCT].special_p = 1;
1182
1183 info_ptr->regs[STACK_REGS_FP].name = "fp";
1184 info_ptr->regs[STACK_REGS_FP].first = FRAME_POINTER_REGNUM;
1185 info_ptr->regs[STACK_REGS_FP].last = FRAME_POINTER_REGNUM;
1186 info_ptr->regs[STACK_REGS_FP].special_p = 1;
1187
1188 /* Determine if this is a stdarg function. If so, allocate space to store
1189 the 6 arguments. */
1190 if (cfun->stdarg)
1191 varargs_p = 1;
1192
1193 else
1194 {
1195 /* Find the last argument, and see if it is __builtin_va_alist. */
1196 for (cur_arg = DECL_ARGUMENTS (fndecl); cur_arg != (tree)0; cur_arg = next_arg)
1197 {
1198 next_arg = DECL_CHAIN (cur_arg);
1199 if (next_arg == (tree)0)
1200 {
1201 if (DECL_NAME (cur_arg)
1202 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (cur_arg)), "__builtin_va_alist"))
1203 varargs_p = 1;
1204
1205 break;
1206 }
1207 }
1208 }
1209
1210 /* Iterate over all of the register ranges. */
1211 for (range = 0; range < STACK_REGS_MAX; range++)
1212 {
1213 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1214 int first = reg_ptr->first;
1215 int last = reg_ptr->last;
1216 int size_1word = 0;
1217 int size_2words = 0;
1218 int regno;
1219
1220 /* Calculate which registers need to be saved & save area size. */
1221 switch (range)
1222 {
1223 default:
1224 for (regno = first; regno <= last; regno++)
1225 {
1226 if ((df_regs_ever_live_p (regno) && !call_used_regs[regno])
1227 || (crtl->calls_eh_return
1228 && (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM))
1229 || (!TARGET_FDPIC && flag_pic
1230 && crtl->uses_pic_offset_table && regno == PIC_REGNO))
1231 {
1232 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1233 size_1word += UNITS_PER_WORD;
1234 }
1235 }
1236 break;
1237
1238 /* Calculate whether we need to create a frame after everything else
1239 has been processed. */
1240 case STACK_REGS_FP:
1241 break;
1242
1243 case STACK_REGS_LR:
1244 if (df_regs_ever_live_p (LR_REGNO)
1245 || profile_flag
1246 /* This is set for __builtin_return_address, etc. */
1247 || cfun->machine->frame_needed
1248 || (TARGET_LINKED_FP && frame_pointer_needed)
1249 || (!TARGET_FDPIC && flag_pic
1250 && crtl->uses_pic_offset_table))
1251 {
1252 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1253 size_1word += UNITS_PER_WORD;
1254 }
1255 break;
1256
1257 case STACK_REGS_STDARG:
1258 if (varargs_p)
1259 {
1260 /* If this is a stdarg function with a non varardic
1261 argument split between registers and the stack,
1262 adjust the saved registers downward. */
1263 last -= (ADDR_ALIGN (crtl->args.pretend_args_size, UNITS_PER_WORD)
1264 / UNITS_PER_WORD);
1265
1266 for (regno = first; regno <= last; regno++)
1267 {
1268 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1269 size_1word += UNITS_PER_WORD;
1270 }
1271
1272 info_ptr->stdarg_size = size_1word;
1273 }
1274 break;
1275
1276 case STACK_REGS_STRUCT:
1277 if (cfun->returns_struct)
1278 {
1279 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1280 size_1word += UNITS_PER_WORD;
1281 }
1282 break;
1283 }
1284
1285
1286 if (size_1word)
1287 {
1288 /* If this is a field, it only takes one word. */
1289 if (reg_ptr->field_p)
1290 size_1word = UNITS_PER_WORD;
1291
1292 /* Determine which register pairs can be saved together. */
1293 else if (reg_ptr->dword_p && TARGET_DWORD)
1294 {
1295 for (regno = first; regno < last; regno += 2)
1296 {
1297 if (info_ptr->save_p[regno] && info_ptr->save_p[regno+1])
1298 {
1299 size_2words += 2 * UNITS_PER_WORD;
1300 size_1word -= 2 * UNITS_PER_WORD;
1301 info_ptr->save_p[regno] = REG_SAVE_2WORDS;
1302 info_ptr->save_p[regno+1] = REG_SAVE_NO_SAVE;
1303 }
1304 }
1305 }
1306
1307 reg_ptr->size_1word = size_1word;
1308 reg_ptr->size_2words = size_2words;
1309
1310 if (! reg_ptr->special_p)
1311 {
1312 info_ptr->regs_size_1word += size_1word;
1313 info_ptr->regs_size_2words += size_2words;
1314 }
1315 }
1316 }
1317
1318 /* Set up the sizes of each each field in the frame body, making the sizes
1319 of each be divisible by the size of a dword if dword operations might
1320 be used, or the size of a word otherwise. */
1321 alignment = (TARGET_DWORD? 2 * UNITS_PER_WORD : UNITS_PER_WORD);
1322
1323 info_ptr->parameter_size = ADDR_ALIGN (crtl->outgoing_args_size, alignment);
1324 info_ptr->regs_size = ADDR_ALIGN (info_ptr->regs_size_2words
1325 + info_ptr->regs_size_1word,
1326 alignment);
1327 info_ptr->vars_size = ADDR_ALIGN (get_frame_size (), alignment);
1328
1329 info_ptr->pretend_size = crtl->args.pretend_args_size;
1330
1331 /* Work out the size of the frame, excluding the header. Both the frame
1332 body and register parameter area will be dword-aligned. */
1333 info_ptr->total_size
1334 = (ADDR_ALIGN (info_ptr->parameter_size
1335 + info_ptr->regs_size
1336 + info_ptr->vars_size,
1337 2 * UNITS_PER_WORD)
1338 + ADDR_ALIGN (info_ptr->pretend_size
1339 + info_ptr->stdarg_size,
1340 2 * UNITS_PER_WORD));
1341
1342 /* See if we need to create a frame at all, if so add header area. */
1343 if (info_ptr->total_size > 0
1344 || frame_pointer_needed
1345 || info_ptr->regs[STACK_REGS_LR].size_1word > 0
1346 || info_ptr->regs[STACK_REGS_STRUCT].size_1word > 0)
1347 {
1348 offset = info_ptr->parameter_size;
1349 info_ptr->header_size = 4 * UNITS_PER_WORD;
1350 info_ptr->total_size += 4 * UNITS_PER_WORD;
1351
1352 /* Calculate the offsets to save normal register pairs. */
1353 for (range = 0; range < STACK_REGS_MAX; range++)
1354 {
1355 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1356 if (! reg_ptr->special_p)
1357 {
1358 int first = reg_ptr->first;
1359 int last = reg_ptr->last;
1360 int regno;
1361
1362 for (regno = first; regno <= last; regno++)
1363 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS
1364 && regno != FRAME_POINTER_REGNUM
1365 && (regno < FIRST_ARG_REGNUM
1366 || regno > LAST_ARG_REGNUM))
1367 {
1368 info_ptr->reg_offset[regno] = offset;
1369 offset += 2 * UNITS_PER_WORD;
1370 }
1371 }
1372 }
1373
1374 /* Calculate the offsets to save normal single registers. */
1375 for (range = 0; range < STACK_REGS_MAX; range++)
1376 {
1377 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1378 if (! reg_ptr->special_p)
1379 {
1380 int first = reg_ptr->first;
1381 int last = reg_ptr->last;
1382 int regno;
1383
1384 for (regno = first; regno <= last; regno++)
1385 if (info_ptr->save_p[regno] == REG_SAVE_1WORD
1386 && regno != FRAME_POINTER_REGNUM
1387 && (regno < FIRST_ARG_REGNUM
1388 || regno > LAST_ARG_REGNUM))
1389 {
1390 info_ptr->reg_offset[regno] = offset;
1391 offset += UNITS_PER_WORD;
1392 }
1393 }
1394 }
1395
1396 /* Calculate the offset to save the local variables at. */
1397 offset = ADDR_ALIGN (offset, alignment);
1398 if (info_ptr->vars_size)
1399 {
1400 info_ptr->vars_offset = offset;
1401 offset += info_ptr->vars_size;
1402 }
1403
1404 /* Align header to a dword-boundary. */
1405 offset = ADDR_ALIGN (offset, 2 * UNITS_PER_WORD);
1406
1407 /* Calculate the offsets in the fixed frame. */
1408 info_ptr->save_p[FRAME_POINTER_REGNUM] = REG_SAVE_1WORD;
1409 info_ptr->reg_offset[FRAME_POINTER_REGNUM] = offset;
1410 info_ptr->regs[STACK_REGS_FP].size_1word = UNITS_PER_WORD;
1411
1412 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1413 info_ptr->reg_offset[LR_REGNO] = offset + 2*UNITS_PER_WORD;
1414 info_ptr->regs[STACK_REGS_LR].size_1word = UNITS_PER_WORD;
1415
1416 if (cfun->returns_struct)
1417 {
1418 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1419 info_ptr->reg_offset[FRV_STRUCT_VALUE_REGNUM] = offset + UNITS_PER_WORD;
1420 info_ptr->regs[STACK_REGS_STRUCT].size_1word = UNITS_PER_WORD;
1421 }
1422
1423 /* Calculate the offsets to store the arguments passed in registers
1424 for stdarg functions. The register pairs are first and the single
1425 register if any is last. The register save area starts on a
1426 dword-boundary. */
1427 if (info_ptr->stdarg_size)
1428 {
1429 int first = info_ptr->regs[STACK_REGS_STDARG].first;
1430 int last = info_ptr->regs[STACK_REGS_STDARG].last;
1431 int regno;
1432
1433 /* Skip the header. */
1434 offset += 4 * UNITS_PER_WORD;
1435 for (regno = first; regno <= last; regno++)
1436 {
1437 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS)
1438 {
1439 info_ptr->reg_offset[regno] = offset;
1440 offset += 2 * UNITS_PER_WORD;
1441 }
1442 else if (info_ptr->save_p[regno] == REG_SAVE_1WORD)
1443 {
1444 info_ptr->reg_offset[regno] = offset;
1445 offset += UNITS_PER_WORD;
1446 }
1447 }
1448 }
1449 }
1450
1451 if (reload_completed)
1452 frv_stack_cache = info_ptr;
1453
1454 return info_ptr;
1455 }
1456
1457 \f
1458 /* Print the information about the frv stack offsets, etc. when debugging. */
1459
1460 void
1461 frv_debug_stack (frv_stack_t *info)
1462 {
1463 int range;
1464
1465 if (!info)
1466 info = frv_stack_info ();
1467
1468 fprintf (stderr, "\nStack information for function %s:\n",
1469 ((current_function_decl && DECL_NAME (current_function_decl))
1470 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
1471 : "<unknown>"));
1472
1473 fprintf (stderr, "\ttotal_size\t= %6d\n", info->total_size);
1474 fprintf (stderr, "\tvars_size\t= %6d\n", info->vars_size);
1475 fprintf (stderr, "\tparam_size\t= %6d\n", info->parameter_size);
1476 fprintf (stderr, "\tregs_size\t= %6d, 1w = %3d, 2w = %3d\n",
1477 info->regs_size, info->regs_size_1word, info->regs_size_2words);
1478
1479 fprintf (stderr, "\theader_size\t= %6d\n", info->header_size);
1480 fprintf (stderr, "\tpretend_size\t= %6d\n", info->pretend_size);
1481 fprintf (stderr, "\tvars_offset\t= %6d\n", info->vars_offset);
1482 fprintf (stderr, "\tregs_offset\t= %6d\n", info->regs_offset);
1483
1484 for (range = 0; range < STACK_REGS_MAX; range++)
1485 {
1486 frv_stack_regs_t *regs = &(info->regs[range]);
1487 if ((regs->size_1word + regs->size_2words) > 0)
1488 {
1489 int first = regs->first;
1490 int last = regs->last;
1491 int regno;
1492
1493 fprintf (stderr, "\t%s\tsize\t= %6d, 1w = %3d, 2w = %3d, save =",
1494 regs->name, regs->size_1word + regs->size_2words,
1495 regs->size_1word, regs->size_2words);
1496
1497 for (regno = first; regno <= last; regno++)
1498 {
1499 if (info->save_p[regno] == REG_SAVE_1WORD)
1500 fprintf (stderr, " %s (%d)", reg_names[regno],
1501 info->reg_offset[regno]);
1502
1503 else if (info->save_p[regno] == REG_SAVE_2WORDS)
1504 fprintf (stderr, " %s-%s (%d)", reg_names[regno],
1505 reg_names[regno+1], info->reg_offset[regno]);
1506 }
1507
1508 fputc ('\n', stderr);
1509 }
1510 }
1511
1512 fflush (stderr);
1513 }
1514
1515
1516 \f
1517
1518 /* Used during final to control the packing of insns. The value is
1519 1 if the current instruction should be packed with the next one,
1520 0 if it shouldn't or -1 if packing is disabled altogether. */
1521
1522 static int frv_insn_packing_flag;
1523
1524 /* True if the current function contains a far jump. */
1525
1526 static int
1527 frv_function_contains_far_jump (void)
1528 {
1529 rtx insn = get_insns ();
1530 while (insn != NULL
1531 && !(GET_CODE (insn) == JUMP_INSN
1532 /* Ignore tablejump patterns. */
1533 && GET_CODE (PATTERN (insn)) != ADDR_VEC
1534 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
1535 && get_attr_far_jump (insn) == FAR_JUMP_YES))
1536 insn = NEXT_INSN (insn);
1537 return (insn != NULL);
1538 }
1539
1540 /* For the FRV, this function makes sure that a function with far jumps
1541 will return correctly. It also does the VLIW packing. */
1542
1543 static void
1544 frv_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1545 {
1546 /* If no frame was created, check whether the function uses a call
1547 instruction to implement a far jump. If so, save the link in gr3 and
1548 replace all returns to LR with returns to GR3. GR3 is used because it
1549 is call-clobbered, because is not available to the register allocator,
1550 and because all functions that take a hidden argument pointer will have
1551 a stack frame. */
1552 if (frv_stack_info ()->total_size == 0 && frv_function_contains_far_jump ())
1553 {
1554 rtx insn;
1555
1556 /* Just to check that the above comment is true. */
1557 gcc_assert (!df_regs_ever_live_p (GPR_FIRST + 3));
1558
1559 /* Generate the instruction that saves the link register. */
1560 fprintf (file, "\tmovsg lr,gr3\n");
1561
1562 /* Replace the LR with GR3 in *return_internal patterns. The insn
1563 will now return using jmpl @(gr3,0) rather than bralr. We cannot
1564 simply emit a different assembly directive because bralr and jmpl
1565 execute in different units. */
1566 for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
1567 if (GET_CODE (insn) == JUMP_INSN)
1568 {
1569 rtx pattern = PATTERN (insn);
1570 if (GET_CODE (pattern) == PARALLEL
1571 && XVECLEN (pattern, 0) >= 2
1572 && GET_CODE (XVECEXP (pattern, 0, 0)) == RETURN
1573 && GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
1574 {
1575 rtx address = XEXP (XVECEXP (pattern, 0, 1), 0);
1576 if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO)
1577 SET_REGNO (address, GPR_FIRST + 3);
1578 }
1579 }
1580 }
1581
1582 frv_pack_insns ();
1583
1584 /* Allow the garbage collector to free the nops created by frv_reorg. */
1585 memset (frv_nops, 0, sizeof (frv_nops));
1586 }
1587
1588 \f
1589 /* Return the next available temporary register in a given class. */
1590
1591 static rtx
1592 frv_alloc_temp_reg (
1593 frv_tmp_reg_t *info, /* which registers are available */
1594 enum reg_class rclass, /* register class desired */
1595 enum machine_mode mode, /* mode to allocate register with */
1596 int mark_as_used, /* register not available after allocation */
1597 int no_abort) /* return NULL instead of aborting */
1598 {
1599 int regno = info->next_reg[ (int)rclass ];
1600 int orig_regno = regno;
1601 HARD_REG_SET *reg_in_class = &reg_class_contents[ (int)rclass ];
1602 int i, nr;
1603
1604 for (;;)
1605 {
1606 if (TEST_HARD_REG_BIT (*reg_in_class, regno)
1607 && TEST_HARD_REG_BIT (info->regs, regno))
1608 break;
1609
1610 if (++regno >= FIRST_PSEUDO_REGISTER)
1611 regno = 0;
1612 if (regno == orig_regno)
1613 {
1614 gcc_assert (no_abort);
1615 return NULL_RTX;
1616 }
1617 }
1618
1619 nr = HARD_REGNO_NREGS (regno, mode);
1620 info->next_reg[ (int)rclass ] = regno + nr;
1621
1622 if (mark_as_used)
1623 for (i = 0; i < nr; i++)
1624 CLEAR_HARD_REG_BIT (info->regs, regno+i);
1625
1626 return gen_rtx_REG (mode, regno);
1627 }
1628
1629 \f
1630 /* Return an rtx with the value OFFSET, which will either be a register or a
1631 signed 12-bit integer. It can be used as the second operand in an "add"
1632 instruction, or as the index in a load or store.
1633
1634 The function returns a constant rtx if OFFSET is small enough, otherwise
1635 it loads the constant into register OFFSET_REGNO and returns that. */
1636 static rtx
1637 frv_frame_offset_rtx (int offset)
1638 {
1639 rtx offset_rtx = GEN_INT (offset);
1640 if (IN_RANGE_P (offset, -2048, 2047))
1641 return offset_rtx;
1642 else
1643 {
1644 rtx reg_rtx = gen_rtx_REG (SImode, OFFSET_REGNO);
1645 if (IN_RANGE_P (offset, -32768, 32767))
1646 emit_insn (gen_movsi (reg_rtx, offset_rtx));
1647 else
1648 {
1649 emit_insn (gen_movsi_high (reg_rtx, offset_rtx));
1650 emit_insn (gen_movsi_lo_sum (reg_rtx, offset_rtx));
1651 }
1652 return reg_rtx;
1653 }
1654 }
1655
1656 /* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The
1657 prologue and epilogue uses such expressions to access the stack. */
1658 static rtx
1659 frv_frame_mem (enum machine_mode mode, rtx base, int offset)
1660 {
1661 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode,
1662 base,
1663 frv_frame_offset_rtx (offset)));
1664 }
1665
1666 /* Generate a frame-related expression:
1667
1668 (set REG (mem (plus (sp) (const_int OFFSET)))).
1669
1670 Such expressions are used in FRAME_RELATED_EXPR notes for more complex
1671 instructions. Marking the expressions as frame-related is superfluous if
1672 the note contains just a single set. But if the note contains a PARALLEL
1673 or SEQUENCE that has several sets, each set must be individually marked
1674 as frame-related. */
1675 static rtx
1676 frv_dwarf_store (rtx reg, int offset)
1677 {
1678 rtx set = gen_rtx_SET (VOIDmode,
1679 gen_rtx_MEM (GET_MODE (reg),
1680 plus_constant (stack_pointer_rtx,
1681 offset)),
1682 reg);
1683 RTX_FRAME_RELATED_P (set) = 1;
1684 return set;
1685 }
1686
1687 /* Emit a frame-related instruction whose pattern is PATTERN. The
1688 instruction is the last in a sequence that cumulatively performs the
1689 operation described by DWARF_PATTERN. The instruction is marked as
1690 frame-related and has a REG_FRAME_RELATED_EXPR note containing
1691 DWARF_PATTERN. */
1692 static void
1693 frv_frame_insn (rtx pattern, rtx dwarf_pattern)
1694 {
1695 rtx insn = emit_insn (pattern);
1696 RTX_FRAME_RELATED_P (insn) = 1;
1697 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1698 dwarf_pattern,
1699 REG_NOTES (insn));
1700 }
1701
1702 /* Emit instructions that transfer REG to or from the memory location (sp +
1703 STACK_OFFSET). The register is stored in memory if ACCESSOR->OP is
1704 FRV_STORE and loaded if it is FRV_LOAD. Only the prologue uses this
1705 function to store registers and only the epilogue uses it to load them.
1706
1707 The caller sets up ACCESSOR so that BASE is equal to (sp + BASE_OFFSET).
1708 The generated instruction will use BASE as its base register. BASE may
1709 simply be the stack pointer, but if several accesses are being made to a
1710 region far away from the stack pointer, it may be more efficient to set
1711 up a temporary instead.
1712
1713 Store instructions will be frame-related and will be annotated with the
1714 overall effect of the store. Load instructions will be followed by a
1715 (use) to prevent later optimizations from zapping them.
1716
1717 The function takes care of the moves to and from SPRs, using TEMP_REGNO
1718 as a temporary in such cases. */
1719 static void
1720 frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset)
1721 {
1722 enum machine_mode mode = GET_MODE (reg);
1723 rtx mem = frv_frame_mem (mode,
1724 accessor->base,
1725 stack_offset - accessor->base_offset);
1726
1727 if (accessor->op == FRV_LOAD)
1728 {
1729 if (SPR_P (REGNO (reg)))
1730 {
1731 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1732 emit_insn (gen_rtx_SET (VOIDmode, temp, mem));
1733 emit_insn (gen_rtx_SET (VOIDmode, reg, temp));
1734 }
1735 else
1736 {
1737 /* We cannot use reg+reg addressing for DImode access. */
1738 if (mode == DImode
1739 && GET_CODE (XEXP (mem, 0)) == PLUS
1740 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1741 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1742 {
1743 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1744 rtx insn = emit_move_insn (temp,
1745 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1746 XEXP (XEXP (mem, 0), 1)));
1747 mem = gen_rtx_MEM (DImode, temp);
1748 }
1749 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1750 }
1751 emit_use (reg);
1752 }
1753 else
1754 {
1755 if (SPR_P (REGNO (reg)))
1756 {
1757 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1758 emit_insn (gen_rtx_SET (VOIDmode, temp, reg));
1759 frv_frame_insn (gen_rtx_SET (Pmode, mem, temp),
1760 frv_dwarf_store (reg, stack_offset));
1761 }
1762 else if (mode == DImode)
1763 {
1764 /* For DImode saves, the dwarf2 version needs to be a SEQUENCE
1765 with a separate save for each register. */
1766 rtx reg1 = gen_rtx_REG (SImode, REGNO (reg));
1767 rtx reg2 = gen_rtx_REG (SImode, REGNO (reg) + 1);
1768 rtx set1 = frv_dwarf_store (reg1, stack_offset);
1769 rtx set2 = frv_dwarf_store (reg2, stack_offset + 4);
1770
1771 /* Also we cannot use reg+reg addressing. */
1772 if (GET_CODE (XEXP (mem, 0)) == PLUS
1773 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1774 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1775 {
1776 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1777 rtx insn = emit_move_insn (temp,
1778 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1779 XEXP (XEXP (mem, 0), 1)));
1780 mem = gen_rtx_MEM (DImode, temp);
1781 }
1782
1783 frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
1784 gen_rtx_PARALLEL (VOIDmode,
1785 gen_rtvec (2, set1, set2)));
1786 }
1787 else
1788 frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
1789 frv_dwarf_store (reg, stack_offset));
1790 }
1791 }
1792
1793 /* A function that uses frv_frame_access to transfer a group of registers to
1794 or from the stack. ACCESSOR is passed directly to frv_frame_access, INFO
1795 is the stack information generated by frv_stack_info, and REG_SET is the
1796 number of the register set to transfer. */
1797 static void
1798 frv_frame_access_multi (frv_frame_accessor_t *accessor,
1799 frv_stack_t *info,
1800 int reg_set)
1801 {
1802 frv_stack_regs_t *regs_info;
1803 int regno;
1804
1805 regs_info = &info->regs[reg_set];
1806 for (regno = regs_info->first; regno <= regs_info->last; regno++)
1807 if (info->save_p[regno])
1808 frv_frame_access (accessor,
1809 info->save_p[regno] == REG_SAVE_2WORDS
1810 ? gen_rtx_REG (DImode, regno)
1811 : gen_rtx_REG (SImode, regno),
1812 info->reg_offset[regno]);
1813 }
1814
1815 /* Save or restore callee-saved registers that are kept outside the frame
1816 header. The function saves the registers if OP is FRV_STORE and restores
1817 them if OP is FRV_LOAD. INFO is the stack information generated by
1818 frv_stack_info. */
1819 static void
1820 frv_frame_access_standard_regs (enum frv_stack_op op, frv_stack_t *info)
1821 {
1822 frv_frame_accessor_t accessor;
1823
1824 accessor.op = op;
1825 accessor.base = stack_pointer_rtx;
1826 accessor.base_offset = 0;
1827 frv_frame_access_multi (&accessor, info, STACK_REGS_GPR);
1828 frv_frame_access_multi (&accessor, info, STACK_REGS_FPR);
1829 frv_frame_access_multi (&accessor, info, STACK_REGS_LCR);
1830 }
1831
1832
1833 /* Called after register allocation to add any instructions needed for the
1834 prologue. Using a prologue insn is favored compared to putting all of the
1835 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1836 it allows the scheduler to intermix instructions with the saves of
1837 the caller saved registers. In some cases, it might be necessary
1838 to emit a barrier instruction as the last insn to prevent such
1839 scheduling.
1840
1841 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1842 so that the debug info generation code can handle them properly. */
1843 void
1844 frv_expand_prologue (void)
1845 {
1846 frv_stack_t *info = frv_stack_info ();
1847 rtx sp = stack_pointer_rtx;
1848 rtx fp = frame_pointer_rtx;
1849 frv_frame_accessor_t accessor;
1850
1851 if (TARGET_DEBUG_STACK)
1852 frv_debug_stack (info);
1853
1854 if (info->total_size == 0)
1855 return;
1856
1857 /* We're interested in three areas of the frame here:
1858
1859 A: the register save area
1860 B: the old FP
1861 C: the header after B
1862
1863 If the frame pointer isn't used, we'll have to set up A, B and C
1864 using the stack pointer. If the frame pointer is used, we'll access
1865 them as follows:
1866
1867 A: set up using sp
1868 B: set up using sp or a temporary (see below)
1869 C: set up using fp
1870
1871 We set up B using the stack pointer if the frame is small enough.
1872 Otherwise, it's more efficient to copy the old stack pointer into a
1873 temporary and use that.
1874
1875 Note that it's important to make sure the prologue and epilogue use the
1876 same registers to access A and C, since doing otherwise will confuse
1877 the aliasing code. */
1878
1879 /* Set up ACCESSOR for accessing region B above. If the frame pointer
1880 isn't used, the same method will serve for C. */
1881 accessor.op = FRV_STORE;
1882 if (frame_pointer_needed && info->total_size > 2048)
1883 {
1884 rtx insn;
1885
1886 accessor.base = gen_rtx_REG (Pmode, OLD_SP_REGNO);
1887 accessor.base_offset = info->total_size;
1888 insn = emit_insn (gen_movsi (accessor.base, sp));
1889 }
1890 else
1891 {
1892 accessor.base = stack_pointer_rtx;
1893 accessor.base_offset = 0;
1894 }
1895
1896 /* Allocate the stack space. */
1897 {
1898 rtx asm_offset = frv_frame_offset_rtx (-info->total_size);
1899 rtx dwarf_offset = GEN_INT (-info->total_size);
1900
1901 frv_frame_insn (gen_stack_adjust (sp, sp, asm_offset),
1902 gen_rtx_SET (Pmode,
1903 sp,
1904 gen_rtx_PLUS (Pmode, sp, dwarf_offset)));
1905 }
1906
1907 /* If the frame pointer is needed, store the old one at (sp + FP_OFFSET)
1908 and point the new one to that location. */
1909 if (frame_pointer_needed)
1910 {
1911 int fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1912
1913 /* ASM_SRC and DWARF_SRC both point to the frame header. ASM_SRC is
1914 based on ACCESSOR.BASE but DWARF_SRC is always based on the stack
1915 pointer. */
1916 rtx asm_src = plus_constant (accessor.base,
1917 fp_offset - accessor.base_offset);
1918 rtx dwarf_src = plus_constant (sp, fp_offset);
1919
1920 /* Store the old frame pointer at (sp + FP_OFFSET). */
1921 frv_frame_access (&accessor, fp, fp_offset);
1922
1923 /* Set up the new frame pointer. */
1924 frv_frame_insn (gen_rtx_SET (VOIDmode, fp, asm_src),
1925 gen_rtx_SET (VOIDmode, fp, dwarf_src));
1926
1927 /* Access region C from the frame pointer. */
1928 accessor.base = fp;
1929 accessor.base_offset = fp_offset;
1930 }
1931
1932 /* Set up region C. */
1933 frv_frame_access_multi (&accessor, info, STACK_REGS_STRUCT);
1934 frv_frame_access_multi (&accessor, info, STACK_REGS_LR);
1935 frv_frame_access_multi (&accessor, info, STACK_REGS_STDARG);
1936
1937 /* Set up region A. */
1938 frv_frame_access_standard_regs (FRV_STORE, info);
1939
1940 /* If this is a varargs/stdarg function, issue a blockage to prevent the
1941 scheduler from moving loads before the stores saving the registers. */
1942 if (info->stdarg_size > 0)
1943 emit_insn (gen_blockage ());
1944
1945 /* Set up pic register/small data register for this function. */
1946 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
1947 emit_insn (gen_pic_prologue (gen_rtx_REG (Pmode, PIC_REGNO),
1948 gen_rtx_REG (Pmode, LR_REGNO),
1949 gen_rtx_REG (SImode, OFFSET_REGNO)));
1950 }
1951
1952 \f
1953 /* Under frv, all of the work is done via frv_expand_epilogue, but
1954 this function provides a convenient place to do cleanup. */
1955
1956 static void
1957 frv_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
1958 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1959 {
1960 frv_stack_cache = (frv_stack_t *)0;
1961
1962 /* Zap last used registers for conditional execution. */
1963 memset (&frv_ifcvt.tmp_reg, 0, sizeof (frv_ifcvt.tmp_reg));
1964
1965 /* Release the bitmap of created insns. */
1966 BITMAP_FREE (frv_ifcvt.scratch_insns_bitmap);
1967 }
1968
1969 \f
1970 /* Called after register allocation to add any instructions needed for the
1971 epilogue. Using an epilogue insn is favored compared to putting all of the
1972 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1973 it allows the scheduler to intermix instructions with the saves of
1974 the caller saved registers. In some cases, it might be necessary
1975 to emit a barrier instruction as the last insn to prevent such
1976 scheduling. */
1977
1978 void
1979 frv_expand_epilogue (bool emit_return)
1980 {
1981 frv_stack_t *info = frv_stack_info ();
1982 rtx fp = frame_pointer_rtx;
1983 rtx sp = stack_pointer_rtx;
1984 rtx return_addr;
1985 int fp_offset;
1986
1987 fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1988
1989 /* Restore the stack pointer to its original value if alloca or the like
1990 is used. */
1991 if (! current_function_sp_is_unchanging)
1992 emit_insn (gen_addsi3 (sp, fp, frv_frame_offset_rtx (-fp_offset)));
1993
1994 /* Restore the callee-saved registers that were used in this function. */
1995 frv_frame_access_standard_regs (FRV_LOAD, info);
1996
1997 /* Set RETURN_ADDR to the address we should return to. Set it to NULL if
1998 no return instruction should be emitted. */
1999 if (info->save_p[LR_REGNO])
2000 {
2001 int lr_offset;
2002 rtx mem;
2003
2004 /* Use the same method to access the link register's slot as we did in
2005 the prologue. In other words, use the frame pointer if available,
2006 otherwise use the stack pointer.
2007
2008 LR_OFFSET is the offset of the link register's slot from the start
2009 of the frame and MEM is a memory rtx for it. */
2010 lr_offset = info->reg_offset[LR_REGNO];
2011 if (frame_pointer_needed)
2012 mem = frv_frame_mem (Pmode, fp, lr_offset - fp_offset);
2013 else
2014 mem = frv_frame_mem (Pmode, sp, lr_offset);
2015
2016 /* Load the old link register into a GPR. */
2017 return_addr = gen_rtx_REG (Pmode, TEMP_REGNO);
2018 emit_insn (gen_rtx_SET (VOIDmode, return_addr, mem));
2019 }
2020 else
2021 return_addr = gen_rtx_REG (Pmode, LR_REGNO);
2022
2023 /* Restore the old frame pointer. Emit a USE afterwards to make sure
2024 the load is preserved. */
2025 if (frame_pointer_needed)
2026 {
2027 emit_insn (gen_rtx_SET (VOIDmode, fp, gen_rtx_MEM (Pmode, fp)));
2028 emit_use (fp);
2029 }
2030
2031 /* Deallocate the stack frame. */
2032 if (info->total_size != 0)
2033 {
2034 rtx offset = frv_frame_offset_rtx (info->total_size);
2035 emit_insn (gen_stack_adjust (sp, sp, offset));
2036 }
2037
2038 /* If this function uses eh_return, add the final stack adjustment now. */
2039 if (crtl->calls_eh_return)
2040 emit_insn (gen_stack_adjust (sp, sp, EH_RETURN_STACKADJ_RTX));
2041
2042 if (emit_return)
2043 emit_jump_insn (gen_epilogue_return (return_addr));
2044 else
2045 {
2046 rtx lr = return_addr;
2047
2048 if (REGNO (return_addr) != LR_REGNO)
2049 {
2050 lr = gen_rtx_REG (Pmode, LR_REGNO);
2051 emit_move_insn (lr, return_addr);
2052 }
2053
2054 emit_use (lr);
2055 }
2056 }
2057
2058 \f
2059 /* Worker function for TARGET_ASM_OUTPUT_MI_THUNK. */
2060
2061 static void
2062 frv_asm_output_mi_thunk (FILE *file,
2063 tree thunk_fndecl ATTRIBUTE_UNUSED,
2064 HOST_WIDE_INT delta,
2065 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
2066 tree function)
2067 {
2068 const char *name_func = XSTR (XEXP (DECL_RTL (function), 0), 0);
2069 const char *name_arg0 = reg_names[FIRST_ARG_REGNUM];
2070 const char *name_jmp = reg_names[JUMP_REGNO];
2071 const char *parallel = (frv_issue_rate () > 1 ? ".p" : "");
2072
2073 /* Do the add using an addi if possible. */
2074 if (IN_RANGE_P (delta, -2048, 2047))
2075 fprintf (file, "\taddi %s,#%d,%s\n", name_arg0, (int) delta, name_arg0);
2076 else
2077 {
2078 const char *const name_add = reg_names[TEMP_REGNO];
2079 fprintf (file, "\tsethi%s #hi(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
2080 parallel, delta, name_add);
2081 fprintf (file, "\tsetlo #lo(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
2082 delta, name_add);
2083 fprintf (file, "\tadd %s,%s,%s\n", name_add, name_arg0, name_arg0);
2084 }
2085
2086 if (TARGET_FDPIC)
2087 {
2088 const char *name_pic = reg_names[FDPIC_REGNO];
2089 name_jmp = reg_names[FDPIC_FPTR_REGNO];
2090
2091 if (flag_pic != 1)
2092 {
2093 fprintf (file, "\tsethi%s #gotofffuncdeschi(", parallel);
2094 assemble_name (file, name_func);
2095 fprintf (file, "),%s\n", name_jmp);
2096
2097 fprintf (file, "\tsetlo #gotofffuncdesclo(");
2098 assemble_name (file, name_func);
2099 fprintf (file, "),%s\n", name_jmp);
2100
2101 fprintf (file, "\tldd @(%s,%s), %s\n", name_jmp, name_pic, name_jmp);
2102 }
2103 else
2104 {
2105 fprintf (file, "\tlddo @(%s,#gotofffuncdesc12(", name_pic);
2106 assemble_name (file, name_func);
2107 fprintf (file, "\t)), %s\n", name_jmp);
2108 }
2109 }
2110 else if (!flag_pic)
2111 {
2112 fprintf (file, "\tsethi%s #hi(", parallel);
2113 assemble_name (file, name_func);
2114 fprintf (file, "),%s\n", name_jmp);
2115
2116 fprintf (file, "\tsetlo #lo(");
2117 assemble_name (file, name_func);
2118 fprintf (file, "),%s\n", name_jmp);
2119 }
2120 else
2121 {
2122 /* Use JUMP_REGNO as a temporary PIC register. */
2123 const char *name_lr = reg_names[LR_REGNO];
2124 const char *name_gppic = name_jmp;
2125 const char *name_tmp = reg_names[TEMP_REGNO];
2126
2127 fprintf (file, "\tmovsg %s,%s\n", name_lr, name_tmp);
2128 fprintf (file, "\tcall 1f\n");
2129 fprintf (file, "1:\tmovsg %s,%s\n", name_lr, name_gppic);
2130 fprintf (file, "\tmovgs %s,%s\n", name_tmp, name_lr);
2131 fprintf (file, "\tsethi%s #gprelhi(1b),%s\n", parallel, name_tmp);
2132 fprintf (file, "\tsetlo #gprello(1b),%s\n", name_tmp);
2133 fprintf (file, "\tsub %s,%s,%s\n", name_gppic, name_tmp, name_gppic);
2134
2135 fprintf (file, "\tsethi%s #gprelhi(", parallel);
2136 assemble_name (file, name_func);
2137 fprintf (file, "),%s\n", name_tmp);
2138
2139 fprintf (file, "\tsetlo #gprello(");
2140 assemble_name (file, name_func);
2141 fprintf (file, "),%s\n", name_tmp);
2142
2143 fprintf (file, "\tadd %s,%s,%s\n", name_gppic, name_tmp, name_jmp);
2144 }
2145
2146 /* Jump to the function address. */
2147 fprintf (file, "\tjmpl @(%s,%s)\n", name_jmp, reg_names[GPR_FIRST+0]);
2148 }
2149
2150 \f
2151
2152 /* On frv, create a frame whenever we need to create stack. */
2153
2154 static bool
2155 frv_frame_pointer_required (void)
2156 {
2157 /* If we forgoing the usual linkage requirements, we only need
2158 a frame pointer if the stack pointer might change. */
2159 if (!TARGET_LINKED_FP)
2160 return !current_function_sp_is_unchanging;
2161
2162 if (! current_function_is_leaf)
2163 return true;
2164
2165 if (get_frame_size () != 0)
2166 return true;
2167
2168 if (cfun->stdarg)
2169 return true;
2170
2171 if (!current_function_sp_is_unchanging)
2172 return true;
2173
2174 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
2175 return true;
2176
2177 if (profile_flag)
2178 return true;
2179
2180 if (cfun->machine->frame_needed)
2181 return true;
2182
2183 return false;
2184 }
2185
2186 \f
2187 /* Worker function for TARGET_CAN_ELIMINATE. */
2188
2189 bool
2190 frv_can_eliminate (const int from, const int to)
2191 {
2192 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
2193 ? ! frame_pointer_needed
2194 : true);
2195 }
2196
2197 /* This macro is similar to `INITIAL_FRAME_POINTER_OFFSET'. It specifies the
2198 initial difference between the specified pair of registers. This macro must
2199 be defined if `ELIMINABLE_REGS' is defined. */
2200
2201 /* See frv_stack_info for more details on the frv stack frame. */
2202
2203 int
2204 frv_initial_elimination_offset (int from, int to)
2205 {
2206 frv_stack_t *info = frv_stack_info ();
2207 int ret = 0;
2208
2209 if (to == STACK_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2210 ret = info->total_size - info->pretend_size;
2211
2212 else if (to == STACK_POINTER_REGNUM && from == FRAME_POINTER_REGNUM)
2213 ret = info->reg_offset[FRAME_POINTER_REGNUM];
2214
2215 else if (to == FRAME_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2216 ret = (info->total_size
2217 - info->reg_offset[FRAME_POINTER_REGNUM]
2218 - info->pretend_size);
2219
2220 else
2221 gcc_unreachable ();
2222
2223 if (TARGET_DEBUG_STACK)
2224 fprintf (stderr, "Eliminate %s to %s by adding %d\n",
2225 reg_names [from], reg_names[to], ret);
2226
2227 return ret;
2228 }
2229
2230 \f
2231 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2232
2233 static void
2234 frv_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
2235 enum machine_mode mode,
2236 tree type ATTRIBUTE_UNUSED,
2237 int *pretend_size,
2238 int second_time)
2239 {
2240 if (TARGET_DEBUG_ARG)
2241 fprintf (stderr,
2242 "setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n",
2243 *cum, GET_MODE_NAME (mode), *pretend_size, second_time);
2244 }
2245
2246 \f
2247 /* Worker function for TARGET_EXPAND_BUILTIN_SAVEREGS. */
2248
2249 static rtx
2250 frv_expand_builtin_saveregs (void)
2251 {
2252 int offset = UNITS_PER_WORD * FRV_NUM_ARG_REGS;
2253
2254 if (TARGET_DEBUG_ARG)
2255 fprintf (stderr, "expand_builtin_saveregs: offset from ap = %d\n",
2256 offset);
2257
2258 return gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, GEN_INT (- offset));
2259 }
2260
2261 \f
2262 /* Expand __builtin_va_start to do the va_start macro. */
2263
2264 static void
2265 frv_expand_builtin_va_start (tree valist, rtx nextarg)
2266 {
2267 tree t;
2268 int num = crtl->args.info - FIRST_ARG_REGNUM - FRV_NUM_ARG_REGS;
2269
2270 nextarg = gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx,
2271 GEN_INT (UNITS_PER_WORD * num));
2272
2273 if (TARGET_DEBUG_ARG)
2274 {
2275 fprintf (stderr, "va_start: args_info = %d, num = %d\n",
2276 crtl->args.info, num);
2277
2278 debug_rtx (nextarg);
2279 }
2280
2281 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
2282 fold_convert (TREE_TYPE (valist),
2283 make_tree (sizetype, nextarg)));
2284 TREE_SIDE_EFFECTS (t) = 1;
2285
2286 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2287 }
2288
2289 \f
2290 /* Expand a block move operation, and return 1 if successful. Return 0
2291 if we should let the compiler generate normal code.
2292
2293 operands[0] is the destination
2294 operands[1] is the source
2295 operands[2] is the length
2296 operands[3] is the alignment */
2297
2298 /* Maximum number of loads to do before doing the stores */
2299 #ifndef MAX_MOVE_REG
2300 #define MAX_MOVE_REG 4
2301 #endif
2302
2303 /* Maximum number of total loads to do. */
2304 #ifndef TOTAL_MOVE_REG
2305 #define TOTAL_MOVE_REG 8
2306 #endif
2307
2308 int
2309 frv_expand_block_move (rtx operands[])
2310 {
2311 rtx orig_dest = operands[0];
2312 rtx orig_src = operands[1];
2313 rtx bytes_rtx = operands[2];
2314 rtx align_rtx = operands[3];
2315 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2316 int align;
2317 int bytes;
2318 int offset;
2319 int num_reg;
2320 int i;
2321 rtx src_reg;
2322 rtx dest_reg;
2323 rtx src_addr;
2324 rtx dest_addr;
2325 rtx src_mem;
2326 rtx dest_mem;
2327 rtx tmp_reg;
2328 rtx stores[MAX_MOVE_REG];
2329 int move_bytes;
2330 enum machine_mode mode;
2331
2332 /* If this is not a fixed size move, just call memcpy. */
2333 if (! constp)
2334 return FALSE;
2335
2336 /* This should be a fixed size alignment. */
2337 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2338
2339 align = INTVAL (align_rtx);
2340
2341 /* Anything to move? */
2342 bytes = INTVAL (bytes_rtx);
2343 if (bytes <= 0)
2344 return TRUE;
2345
2346 /* Don't support real large moves. */
2347 if (bytes > TOTAL_MOVE_REG*align)
2348 return FALSE;
2349
2350 /* Move the address into scratch registers. */
2351 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2352 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2353
2354 num_reg = offset = 0;
2355 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
2356 {
2357 /* Calculate the correct offset for src/dest. */
2358 if (offset == 0)
2359 {
2360 src_addr = src_reg;
2361 dest_addr = dest_reg;
2362 }
2363 else
2364 {
2365 src_addr = plus_constant (src_reg, offset);
2366 dest_addr = plus_constant (dest_reg, offset);
2367 }
2368
2369 /* Generate the appropriate load and store, saving the stores
2370 for later. */
2371 if (bytes >= 4 && align >= 4)
2372 mode = SImode;
2373 else if (bytes >= 2 && align >= 2)
2374 mode = HImode;
2375 else
2376 mode = QImode;
2377
2378 move_bytes = GET_MODE_SIZE (mode);
2379 tmp_reg = gen_reg_rtx (mode);
2380 src_mem = change_address (orig_src, mode, src_addr);
2381 dest_mem = change_address (orig_dest, mode, dest_addr);
2382 emit_insn (gen_rtx_SET (VOIDmode, tmp_reg, src_mem));
2383 stores[num_reg++] = gen_rtx_SET (VOIDmode, dest_mem, tmp_reg);
2384
2385 if (num_reg >= MAX_MOVE_REG)
2386 {
2387 for (i = 0; i < num_reg; i++)
2388 emit_insn (stores[i]);
2389 num_reg = 0;
2390 }
2391 }
2392
2393 for (i = 0; i < num_reg; i++)
2394 emit_insn (stores[i]);
2395
2396 return TRUE;
2397 }
2398
2399 \f
2400 /* Expand a block clear operation, and return 1 if successful. Return 0
2401 if we should let the compiler generate normal code.
2402
2403 operands[0] is the destination
2404 operands[1] is the length
2405 operands[3] is the alignment */
2406
2407 int
2408 frv_expand_block_clear (rtx operands[])
2409 {
2410 rtx orig_dest = operands[0];
2411 rtx bytes_rtx = operands[1];
2412 rtx align_rtx = operands[3];
2413 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2414 int align;
2415 int bytes;
2416 int offset;
2417 int num_reg;
2418 rtx dest_reg;
2419 rtx dest_addr;
2420 rtx dest_mem;
2421 int clear_bytes;
2422 enum machine_mode mode;
2423
2424 /* If this is not a fixed size move, just call memcpy. */
2425 if (! constp)
2426 return FALSE;
2427
2428 /* This should be a fixed size alignment. */
2429 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2430
2431 align = INTVAL (align_rtx);
2432
2433 /* Anything to move? */
2434 bytes = INTVAL (bytes_rtx);
2435 if (bytes <= 0)
2436 return TRUE;
2437
2438 /* Don't support real large clears. */
2439 if (bytes > TOTAL_MOVE_REG*align)
2440 return FALSE;
2441
2442 /* Move the address into a scratch register. */
2443 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2444
2445 num_reg = offset = 0;
2446 for ( ; bytes > 0; (bytes -= clear_bytes), (offset += clear_bytes))
2447 {
2448 /* Calculate the correct offset for src/dest. */
2449 dest_addr = ((offset == 0)
2450 ? dest_reg
2451 : plus_constant (dest_reg, offset));
2452
2453 /* Generate the appropriate store of gr0. */
2454 if (bytes >= 4 && align >= 4)
2455 mode = SImode;
2456 else if (bytes >= 2 && align >= 2)
2457 mode = HImode;
2458 else
2459 mode = QImode;
2460
2461 clear_bytes = GET_MODE_SIZE (mode);
2462 dest_mem = change_address (orig_dest, mode, dest_addr);
2463 emit_insn (gen_rtx_SET (VOIDmode, dest_mem, const0_rtx));
2464 }
2465
2466 return TRUE;
2467 }
2468
2469 \f
2470 /* The following variable is used to output modifiers of assembler
2471 code of the current output insn. */
2472
2473 static rtx *frv_insn_operands;
2474
2475 /* The following function is used to add assembler insn code suffix .p
2476 if it is necessary. */
2477
2478 const char *
2479 frv_asm_output_opcode (FILE *f, const char *ptr)
2480 {
2481 int c;
2482
2483 if (frv_insn_packing_flag <= 0)
2484 return ptr;
2485
2486 for (; *ptr && *ptr != ' ' && *ptr != '\t';)
2487 {
2488 c = *ptr++;
2489 if (c == '%' && ((*ptr >= 'a' && *ptr <= 'z')
2490 || (*ptr >= 'A' && *ptr <= 'Z')))
2491 {
2492 int letter = *ptr++;
2493
2494 c = atoi (ptr);
2495 frv_print_operand (f, frv_insn_operands [c], letter);
2496 while ((c = *ptr) >= '0' && c <= '9')
2497 ptr++;
2498 }
2499 else
2500 fputc (c, f);
2501 }
2502
2503 fprintf (f, ".p");
2504
2505 return ptr;
2506 }
2507
2508 /* Set up the packing bit for the current output insn. Note that this
2509 function is not called for asm insns. */
2510
2511 void
2512 frv_final_prescan_insn (rtx insn, rtx *opvec,
2513 int noperands ATTRIBUTE_UNUSED)
2514 {
2515 if (INSN_P (insn))
2516 {
2517 if (frv_insn_packing_flag >= 0)
2518 {
2519 frv_insn_operands = opvec;
2520 frv_insn_packing_flag = PACKING_FLAG_P (insn);
2521 }
2522 else if (recog_memoized (insn) >= 0
2523 && get_attr_acc_group (insn) == ACC_GROUP_ODD)
2524 /* Packing optimizations have been disabled, but INSN can only
2525 be issued in M1. Insert an mnop in M0. */
2526 fprintf (asm_out_file, "\tmnop.p\n");
2527 }
2528 }
2529
2530
2531 \f
2532 /* A C expression whose value is RTL representing the address in a stack frame
2533 where the pointer to the caller's frame is stored. Assume that FRAMEADDR is
2534 an RTL expression for the address of the stack frame itself.
2535
2536 If you don't define this macro, the default is to return the value of
2537 FRAMEADDR--that is, the stack frame address is also the address of the stack
2538 word that points to the previous frame. */
2539
2540 /* The default is correct, but we need to make sure the frame gets created. */
2541 rtx
2542 frv_dynamic_chain_address (rtx frame)
2543 {
2544 cfun->machine->frame_needed = 1;
2545 return frame;
2546 }
2547
2548
2549 /* A C expression whose value is RTL representing the value of the return
2550 address for the frame COUNT steps up from the current frame, after the
2551 prologue. FRAMEADDR is the frame pointer of the COUNT frame, or the frame
2552 pointer of the COUNT - 1 frame if `RETURN_ADDR_IN_PREVIOUS_FRAME' is
2553 defined.
2554
2555 The value of the expression must always be the correct address when COUNT is
2556 zero, but may be `NULL_RTX' if there is not way to determine the return
2557 address of other frames. */
2558
2559 rtx
2560 frv_return_addr_rtx (int count, rtx frame)
2561 {
2562 if (count != 0)
2563 return const0_rtx;
2564 cfun->machine->frame_needed = 1;
2565 return gen_rtx_MEM (Pmode, plus_constant (frame, 8));
2566 }
2567
2568 /* Given a memory reference MEMREF, interpret the referenced memory as
2569 an array of MODE values, and return a reference to the element
2570 specified by INDEX. Assume that any pre-modification implicit in
2571 MEMREF has already happened.
2572
2573 MEMREF must be a legitimate operand for modes larger than SImode.
2574 frv_legitimate_address_p forbids register+register addresses, which
2575 this function cannot handle. */
2576 rtx
2577 frv_index_memory (rtx memref, enum machine_mode mode, int index)
2578 {
2579 rtx base = XEXP (memref, 0);
2580 if (GET_CODE (base) == PRE_MODIFY)
2581 base = XEXP (base, 0);
2582 return change_address (memref, mode,
2583 plus_constant (base, index * GET_MODE_SIZE (mode)));
2584 }
2585
2586 \f
2587 /* Print a memory address as an operand to reference that memory location. */
2588 static void
2589 frv_print_operand_address (FILE * stream, rtx x)
2590 {
2591 if (GET_CODE (x) == MEM)
2592 x = XEXP (x, 0);
2593
2594 switch (GET_CODE (x))
2595 {
2596 case REG:
2597 fputs (reg_names [ REGNO (x)], stream);
2598 return;
2599
2600 case CONST_INT:
2601 fprintf (stream, "%ld", (long) INTVAL (x));
2602 return;
2603
2604 case SYMBOL_REF:
2605 assemble_name (stream, XSTR (x, 0));
2606 return;
2607
2608 case LABEL_REF:
2609 case CONST:
2610 output_addr_const (stream, x);
2611 return;
2612
2613 case PLUS:
2614 /* Poorly constructed asm statements can trigger this alternative.
2615 See gcc/testsuite/gcc.dg/asm-4.c for an example. */
2616 frv_print_operand_memory_reference (stream, x, 0);
2617 return;
2618
2619 default:
2620 break;
2621 }
2622
2623 fatal_insn ("bad insn to frv_print_operand_address:", x);
2624 }
2625
2626 \f
2627 static void
2628 frv_print_operand_memory_reference_reg (FILE * stream, rtx x)
2629 {
2630 int regno = true_regnum (x);
2631 if (GPR_P (regno))
2632 fputs (reg_names[regno], stream);
2633 else
2634 fatal_insn ("bad register to frv_print_operand_memory_reference_reg:", x);
2635 }
2636
2637 /* Print a memory reference suitable for the ld/st instructions. */
2638
2639 static void
2640 frv_print_operand_memory_reference (FILE * stream, rtx x, int addr_offset)
2641 {
2642 struct frv_unspec unspec;
2643 rtx x0 = NULL_RTX;
2644 rtx x1 = NULL_RTX;
2645
2646 switch (GET_CODE (x))
2647 {
2648 case SUBREG:
2649 case REG:
2650 x0 = x;
2651 break;
2652
2653 case PRE_MODIFY: /* (pre_modify (reg) (plus (reg) (reg))) */
2654 x0 = XEXP (x, 0);
2655 x1 = XEXP (XEXP (x, 1), 1);
2656 break;
2657
2658 case CONST_INT:
2659 x1 = x;
2660 break;
2661
2662 case PLUS:
2663 x0 = XEXP (x, 0);
2664 x1 = XEXP (x, 1);
2665 if (GET_CODE (x0) == CONST_INT)
2666 {
2667 x0 = XEXP (x, 1);
2668 x1 = XEXP (x, 0);
2669 }
2670 break;
2671
2672 default:
2673 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2674 break;
2675
2676 }
2677
2678 if (addr_offset)
2679 {
2680 if (!x1)
2681 x1 = const0_rtx;
2682 else if (GET_CODE (x1) != CONST_INT)
2683 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2684 }
2685
2686 fputs ("@(", stream);
2687 if (!x0)
2688 fputs (reg_names[GPR_R0], stream);
2689 else if (GET_CODE (x0) == REG || GET_CODE (x0) == SUBREG)
2690 frv_print_operand_memory_reference_reg (stream, x0);
2691 else
2692 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2693
2694 fputs (",", stream);
2695 if (!x1)
2696 fputs (reg_names [GPR_R0], stream);
2697
2698 else
2699 {
2700 switch (GET_CODE (x1))
2701 {
2702 case SUBREG:
2703 case REG:
2704 frv_print_operand_memory_reference_reg (stream, x1);
2705 break;
2706
2707 case CONST_INT:
2708 fprintf (stream, "%ld", (long) (INTVAL (x1) + addr_offset));
2709 break;
2710
2711 case CONST:
2712 if (!frv_const_unspec_p (x1, &unspec))
2713 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x1);
2714 frv_output_const_unspec (stream, &unspec);
2715 break;
2716
2717 default:
2718 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2719 }
2720 }
2721
2722 fputs (")", stream);
2723 }
2724
2725 \f
2726 /* Return 2 for likely branches and 0 for non-likely branches */
2727
2728 #define FRV_JUMP_LIKELY 2
2729 #define FRV_JUMP_NOT_LIKELY 0
2730
2731 static int
2732 frv_print_operand_jump_hint (rtx insn)
2733 {
2734 rtx note;
2735 rtx labelref;
2736 int ret;
2737 HOST_WIDE_INT prob = -1;
2738 enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
2739
2740 gcc_assert (GET_CODE (insn) == JUMP_INSN);
2741
2742 /* Assume any non-conditional jump is likely. */
2743 if (! any_condjump_p (insn))
2744 ret = FRV_JUMP_LIKELY;
2745
2746 else
2747 {
2748 labelref = condjump_label (insn);
2749 if (labelref)
2750 {
2751 rtx label = XEXP (labelref, 0);
2752 jump_type = (insn_current_address > INSN_ADDRESSES (INSN_UID (label))
2753 ? BACKWARD
2754 : FORWARD);
2755 }
2756
2757 note = find_reg_note (insn, REG_BR_PROB, 0);
2758 if (!note)
2759 ret = ((jump_type == BACKWARD) ? FRV_JUMP_LIKELY : FRV_JUMP_NOT_LIKELY);
2760
2761 else
2762 {
2763 prob = INTVAL (XEXP (note, 0));
2764 ret = ((prob >= (REG_BR_PROB_BASE / 2))
2765 ? FRV_JUMP_LIKELY
2766 : FRV_JUMP_NOT_LIKELY);
2767 }
2768 }
2769
2770 #if 0
2771 if (TARGET_DEBUG)
2772 {
2773 char *direction;
2774
2775 switch (jump_type)
2776 {
2777 default:
2778 case UNKNOWN: direction = "unknown jump direction"; break;
2779 case BACKWARD: direction = "jump backward"; break;
2780 case FORWARD: direction = "jump forward"; break;
2781 }
2782
2783 fprintf (stderr,
2784 "%s: uid %ld, %s, probability = %ld, max prob. = %ld, hint = %d\n",
2785 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
2786 (long)INSN_UID (insn), direction, (long)prob,
2787 (long)REG_BR_PROB_BASE, ret);
2788 }
2789 #endif
2790
2791 return ret;
2792 }
2793
2794 \f
2795 /* Return the comparison operator to use for CODE given that the ICC
2796 register is OP0. */
2797
2798 static const char *
2799 comparison_string (enum rtx_code code, rtx op0)
2800 {
2801 bool is_nz_p = GET_MODE (op0) == CC_NZmode;
2802 switch (code)
2803 {
2804 default: output_operand_lossage ("bad condition code");
2805 case EQ: return "eq";
2806 case NE: return "ne";
2807 case LT: return is_nz_p ? "n" : "lt";
2808 case LE: return "le";
2809 case GT: return "gt";
2810 case GE: return is_nz_p ? "p" : "ge";
2811 case LTU: return is_nz_p ? "no" : "c";
2812 case LEU: return is_nz_p ? "eq" : "ls";
2813 case GTU: return is_nz_p ? "ne" : "hi";
2814 case GEU: return is_nz_p ? "ra" : "nc";
2815 }
2816 }
2817
2818 /* Print an operand to an assembler instruction.
2819
2820 `%' followed by a letter and a digit says to output an operand in an
2821 alternate fashion. Four letters have standard, built-in meanings
2822 described below. The hook `TARGET_PRINT_OPERAND' can define
2823 additional letters with nonstandard meanings.
2824
2825 `%cDIGIT' can be used to substitute an operand that is a constant value
2826 without the syntax that normally indicates an immediate operand.
2827
2828 `%nDIGIT' is like `%cDIGIT' except that the value of the constant is negated
2829 before printing.
2830
2831 `%aDIGIT' can be used to substitute an operand as if it were a memory
2832 reference, with the actual operand treated as the address. This may be
2833 useful when outputting a "load address" instruction, because often the
2834 assembler syntax for such an instruction requires you to write the operand
2835 as if it were a memory reference.
2836
2837 `%lDIGIT' is used to substitute a `label_ref' into a jump instruction.
2838
2839 `%=' outputs a number which is unique to each instruction in the entire
2840 compilation. This is useful for making local labels to be referred to more
2841 than once in a single template that generates multiple assembler
2842 instructions.
2843
2844 `%' followed by a punctuation character specifies a substitution that
2845 does not use an operand. Only one case is standard: `%%' outputs a
2846 `%' into the assembler code. Other nonstandard cases can be defined
2847 in the `TARGET_PRINT_OPERAND' hook. You must also define which
2848 punctuation characters are valid with the
2849 `TARGET_PRINT_OPERAND_PUNCT_VALID_P' hook. */
2850
2851 static void
2852 frv_print_operand (FILE * file, rtx x, int code)
2853 {
2854 struct frv_unspec unspec;
2855 HOST_WIDE_INT value;
2856 int offset;
2857
2858 if (code != 0 && !ISALPHA (code))
2859 value = 0;
2860
2861 else if (GET_CODE (x) == CONST_INT)
2862 value = INTVAL (x);
2863
2864 else if (GET_CODE (x) == CONST_DOUBLE)
2865 {
2866 if (GET_MODE (x) == SFmode)
2867 {
2868 REAL_VALUE_TYPE rv;
2869 long l;
2870
2871 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2872 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2873 value = l;
2874 }
2875
2876 else if (GET_MODE (x) == VOIDmode)
2877 value = CONST_DOUBLE_LOW (x);
2878
2879 else
2880 fatal_insn ("bad insn in frv_print_operand, bad const_double", x);
2881 }
2882
2883 else
2884 value = 0;
2885
2886 switch (code)
2887 {
2888
2889 case '.':
2890 /* Output r0. */
2891 fputs (reg_names[GPR_R0], file);
2892 break;
2893
2894 case '#':
2895 fprintf (file, "%d", frv_print_operand_jump_hint (current_output_insn));
2896 break;
2897
2898 case '@':
2899 /* Output small data area base register (gr16). */
2900 fputs (reg_names[SDA_BASE_REG], file);
2901 break;
2902
2903 case '~':
2904 /* Output pic register (gr17). */
2905 fputs (reg_names[PIC_REGNO], file);
2906 break;
2907
2908 case '*':
2909 /* Output the temporary integer CCR register. */
2910 fputs (reg_names[ICR_TEMP], file);
2911 break;
2912
2913 case '&':
2914 /* Output the temporary integer CC register. */
2915 fputs (reg_names[ICC_TEMP], file);
2916 break;
2917
2918 /* case 'a': print an address. */
2919
2920 case 'C':
2921 /* Print appropriate test for integer branch false operation. */
2922 fputs (comparison_string (reverse_condition (GET_CODE (x)),
2923 XEXP (x, 0)), file);
2924 break;
2925
2926 case 'c':
2927 /* Print appropriate test for integer branch true operation. */
2928 fputs (comparison_string (GET_CODE (x), XEXP (x, 0)), file);
2929 break;
2930
2931 case 'e':
2932 /* Print 1 for a NE and 0 for an EQ to give the final argument
2933 for a conditional instruction. */
2934 if (GET_CODE (x) == NE)
2935 fputs ("1", file);
2936
2937 else if (GET_CODE (x) == EQ)
2938 fputs ("0", file);
2939
2940 else
2941 fatal_insn ("bad insn to frv_print_operand, 'e' modifier:", x);
2942 break;
2943
2944 case 'F':
2945 /* Print appropriate test for floating point branch false operation. */
2946 switch (GET_CODE (x))
2947 {
2948 default:
2949 fatal_insn ("bad insn to frv_print_operand, 'F' modifier:", x);
2950
2951 case EQ: fputs ("ne", file); break;
2952 case NE: fputs ("eq", file); break;
2953 case LT: fputs ("uge", file); break;
2954 case LE: fputs ("ug", file); break;
2955 case GT: fputs ("ule", file); break;
2956 case GE: fputs ("ul", file); break;
2957 }
2958 break;
2959
2960 case 'f':
2961 /* Print appropriate test for floating point branch true operation. */
2962 switch (GET_CODE (x))
2963 {
2964 default:
2965 fatal_insn ("bad insn to frv_print_operand, 'f' modifier:", x);
2966
2967 case EQ: fputs ("eq", file); break;
2968 case NE: fputs ("ne", file); break;
2969 case LT: fputs ("lt", file); break;
2970 case LE: fputs ("le", file); break;
2971 case GT: fputs ("gt", file); break;
2972 case GE: fputs ("ge", file); break;
2973 }
2974 break;
2975
2976 case 'g':
2977 /* Print appropriate GOT function. */
2978 if (GET_CODE (x) != CONST_INT)
2979 fatal_insn ("bad insn to frv_print_operand, 'g' modifier:", x);
2980 fputs (unspec_got_name (INTVAL (x)), file);
2981 break;
2982
2983 case 'I':
2984 /* Print 'i' if the operand is a constant, or is a memory reference that
2985 adds a constant. */
2986 if (GET_CODE (x) == MEM)
2987 x = ((GET_CODE (XEXP (x, 0)) == PLUS)
2988 ? XEXP (XEXP (x, 0), 1)
2989 : XEXP (x, 0));
2990 else if (GET_CODE (x) == PLUS)
2991 x = XEXP (x, 1);
2992
2993 switch (GET_CODE (x))
2994 {
2995 default:
2996 break;
2997
2998 case CONST_INT:
2999 case SYMBOL_REF:
3000 case CONST:
3001 fputs ("i", file);
3002 break;
3003 }
3004 break;
3005
3006 case 'i':
3007 /* For jump instructions, print 'i' if the operand is a constant or
3008 is an expression that adds a constant. */
3009 if (GET_CODE (x) == CONST_INT)
3010 fputs ("i", file);
3011
3012 else
3013 {
3014 if (GET_CODE (x) == CONST_INT
3015 || (GET_CODE (x) == PLUS
3016 && (GET_CODE (XEXP (x, 1)) == CONST_INT
3017 || GET_CODE (XEXP (x, 0)) == CONST_INT)))
3018 fputs ("i", file);
3019 }
3020 break;
3021
3022 case 'L':
3023 /* Print the lower register of a double word register pair */
3024 if (GET_CODE (x) == REG)
3025 fputs (reg_names[ REGNO (x)+1 ], file);
3026 else
3027 fatal_insn ("bad insn to frv_print_operand, 'L' modifier:", x);
3028 break;
3029
3030 /* case 'l': print a LABEL_REF. */
3031
3032 case 'M':
3033 case 'N':
3034 /* Print a memory reference for ld/st/jmp, %N prints a memory reference
3035 for the second word of double memory operations. */
3036 offset = (code == 'M') ? 0 : UNITS_PER_WORD;
3037 switch (GET_CODE (x))
3038 {
3039 default:
3040 fatal_insn ("bad insn to frv_print_operand, 'M/N' modifier:", x);
3041
3042 case MEM:
3043 frv_print_operand_memory_reference (file, XEXP (x, 0), offset);
3044 break;
3045
3046 case REG:
3047 case SUBREG:
3048 case CONST_INT:
3049 case PLUS:
3050 case SYMBOL_REF:
3051 frv_print_operand_memory_reference (file, x, offset);
3052 break;
3053 }
3054 break;
3055
3056 case 'O':
3057 /* Print the opcode of a command. */
3058 switch (GET_CODE (x))
3059 {
3060 default:
3061 fatal_insn ("bad insn to frv_print_operand, 'O' modifier:", x);
3062
3063 case PLUS: fputs ("add", file); break;
3064 case MINUS: fputs ("sub", file); break;
3065 case AND: fputs ("and", file); break;
3066 case IOR: fputs ("or", file); break;
3067 case XOR: fputs ("xor", file); break;
3068 case ASHIFT: fputs ("sll", file); break;
3069 case ASHIFTRT: fputs ("sra", file); break;
3070 case LSHIFTRT: fputs ("srl", file); break;
3071 }
3072 break;
3073
3074 /* case 'n': negate and print a constant int. */
3075
3076 case 'P':
3077 /* Print PIC label using operand as the number. */
3078 if (GET_CODE (x) != CONST_INT)
3079 fatal_insn ("bad insn to frv_print_operand, P modifier:", x);
3080
3081 fprintf (file, ".LCF%ld", (long)INTVAL (x));
3082 break;
3083
3084 case 'U':
3085 /* Print 'u' if the operand is a update load/store. */
3086 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
3087 fputs ("u", file);
3088 break;
3089
3090 case 'z':
3091 /* If value is 0, print gr0, otherwise it must be a register. */
3092 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
3093 fputs (reg_names[GPR_R0], file);
3094
3095 else if (GET_CODE (x) == REG)
3096 fputs (reg_names [REGNO (x)], file);
3097
3098 else
3099 fatal_insn ("bad insn in frv_print_operand, z case", x);
3100 break;
3101
3102 case 'x':
3103 /* Print constant in hex. */
3104 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3105 {
3106 fprintf (file, "%s0x%.4lx", IMMEDIATE_PREFIX, (long) value);
3107 break;
3108 }
3109
3110 /* Fall through. */
3111
3112 case '\0':
3113 if (GET_CODE (x) == REG)
3114 fputs (reg_names [REGNO (x)], file);
3115
3116 else if (GET_CODE (x) == CONST_INT
3117 || GET_CODE (x) == CONST_DOUBLE)
3118 fprintf (file, "%s%ld", IMMEDIATE_PREFIX, (long) value);
3119
3120 else if (frv_const_unspec_p (x, &unspec))
3121 frv_output_const_unspec (file, &unspec);
3122
3123 else if (GET_CODE (x) == MEM)
3124 frv_print_operand_address (file, XEXP (x, 0));
3125
3126 else if (CONSTANT_ADDRESS_P (x))
3127 frv_print_operand_address (file, x);
3128
3129 else
3130 fatal_insn ("bad insn in frv_print_operand, 0 case", x);
3131
3132 break;
3133
3134 default:
3135 fatal_insn ("frv_print_operand: unknown code", x);
3136 break;
3137 }
3138
3139 return;
3140 }
3141
3142 static bool
3143 frv_print_operand_punct_valid_p (unsigned char code)
3144 {
3145 return (code == '.' || code == '#' || code == '@' || code == '~'
3146 || code == '*' || code == '&');
3147 }
3148
3149 \f
3150 /* A C statement (sans semicolon) for initializing the variable CUM for the
3151 state at the beginning of the argument list. The variable has type
3152 `CUMULATIVE_ARGS'. The value of FNTYPE is the tree node for the data type
3153 of the function which will receive the args, or 0 if the args are to a
3154 compiler support library function. The value of INDIRECT is nonzero when
3155 processing an indirect call, for example a call through a function pointer.
3156 The value of INDIRECT is zero for a call to an explicitly named function, a
3157 library function call, or when `INIT_CUMULATIVE_ARGS' is used to find
3158 arguments for the function being compiled.
3159
3160 When processing a call to a compiler support library function, LIBNAME
3161 identifies which one. It is a `symbol_ref' rtx which contains the name of
3162 the function, as a string. LIBNAME is 0 when an ordinary C function call is
3163 being processed. Thus, each time this macro is called, either LIBNAME or
3164 FNTYPE is nonzero, but never both of them at once. */
3165
3166 void
3167 frv_init_cumulative_args (CUMULATIVE_ARGS *cum,
3168 tree fntype,
3169 rtx libname,
3170 tree fndecl,
3171 int incoming)
3172 {
3173 *cum = FIRST_ARG_REGNUM;
3174
3175 if (TARGET_DEBUG_ARG)
3176 {
3177 fprintf (stderr, "\ninit_cumulative_args:");
3178 if (!fndecl && fntype)
3179 fputs (" indirect", stderr);
3180
3181 if (incoming)
3182 fputs (" incoming", stderr);
3183
3184 if (fntype)
3185 {
3186 tree ret_type = TREE_TYPE (fntype);
3187 fprintf (stderr, " return=%s,",
3188 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3189 }
3190
3191 if (libname && GET_CODE (libname) == SYMBOL_REF)
3192 fprintf (stderr, " libname=%s", XSTR (libname, 0));
3193
3194 if (cfun->returns_struct)
3195 fprintf (stderr, " return-struct");
3196
3197 putc ('\n', stderr);
3198 }
3199 }
3200
3201 \f
3202 /* Return true if we should pass an argument on the stack rather than
3203 in registers. */
3204
3205 static bool
3206 frv_must_pass_in_stack (enum machine_mode mode, const_tree type)
3207 {
3208 if (mode == BLKmode)
3209 return true;
3210 if (type == NULL)
3211 return false;
3212 return AGGREGATE_TYPE_P (type);
3213 }
3214
3215 /* If defined, a C expression that gives the alignment boundary, in bits, of an
3216 argument with the specified mode and type. If it is not defined,
3217 `PARM_BOUNDARY' is used for all arguments. */
3218
3219 int
3220 frv_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
3221 tree type ATTRIBUTE_UNUSED)
3222 {
3223 return BITS_PER_WORD;
3224 }
3225
3226 rtx
3227 frv_function_arg (CUMULATIVE_ARGS *cum,
3228 enum machine_mode mode,
3229 tree type ATTRIBUTE_UNUSED,
3230 int named,
3231 int incoming ATTRIBUTE_UNUSED)
3232 {
3233 enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3234 int arg_num = *cum;
3235 rtx ret;
3236 const char *debstr;
3237
3238 /* Return a marker for use in the call instruction. */
3239 if (xmode == VOIDmode)
3240 {
3241 ret = const0_rtx;
3242 debstr = "<0>";
3243 }
3244
3245 else if (arg_num <= LAST_ARG_REGNUM)
3246 {
3247 ret = gen_rtx_REG (xmode, arg_num);
3248 debstr = reg_names[arg_num];
3249 }
3250
3251 else
3252 {
3253 ret = NULL_RTX;
3254 debstr = "memory";
3255 }
3256
3257 if (TARGET_DEBUG_ARG)
3258 fprintf (stderr,
3259 "function_arg: words = %2d, mode = %4s, named = %d, size = %3d, arg = %s\n",
3260 arg_num, GET_MODE_NAME (mode), named, GET_MODE_SIZE (mode), debstr);
3261
3262 return ret;
3263 }
3264
3265 \f
3266 /* A C statement (sans semicolon) to update the summarizer variable CUM to
3267 advance past an argument in the argument list. The values MODE, TYPE and
3268 NAMED describe that argument. Once this is done, the variable CUM is
3269 suitable for analyzing the *following* argument with `FUNCTION_ARG', etc.
3270
3271 This macro need not do anything if the argument in question was passed on
3272 the stack. The compiler knows how to track the amount of stack space used
3273 for arguments without any special help. */
3274
3275 void
3276 frv_function_arg_advance (CUMULATIVE_ARGS *cum,
3277 enum machine_mode mode,
3278 tree type ATTRIBUTE_UNUSED,
3279 int named)
3280 {
3281 enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3282 int bytes = GET_MODE_SIZE (xmode);
3283 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3284 int arg_num = *cum;
3285
3286 *cum = arg_num + words;
3287
3288 if (TARGET_DEBUG_ARG)
3289 fprintf (stderr,
3290 "function_adv: words = %2d, mode = %4s, named = %d, size = %3d\n",
3291 arg_num, GET_MODE_NAME (mode), named, words * UNITS_PER_WORD);
3292 }
3293
3294 \f
3295 /* A C expression for the number of words, at the beginning of an argument,
3296 must be put in registers. The value must be zero for arguments that are
3297 passed entirely in registers or that are entirely pushed on the stack.
3298
3299 On some machines, certain arguments must be passed partially in registers
3300 and partially in memory. On these machines, typically the first N words of
3301 arguments are passed in registers, and the rest on the stack. If a
3302 multi-word argument (a `double' or a structure) crosses that boundary, its
3303 first few words must be passed in registers and the rest must be pushed.
3304 This macro tells the compiler when this occurs, and how many of the words
3305 should go in registers.
3306
3307 `FUNCTION_ARG' for these arguments should return the first register to be
3308 used by the caller for this argument; likewise `FUNCTION_INCOMING_ARG', for
3309 the called function. */
3310
3311 static int
3312 frv_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3313 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
3314 {
3315 enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3316 int bytes = GET_MODE_SIZE (xmode);
3317 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3318 int arg_num = *cum;
3319 int ret;
3320
3321 ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1)
3322 ? LAST_ARG_REGNUM - arg_num + 1
3323 : 0);
3324 ret *= UNITS_PER_WORD;
3325
3326 if (TARGET_DEBUG_ARG && ret)
3327 fprintf (stderr, "frv_arg_partial_bytes: %d\n", ret);
3328
3329 return ret;
3330 }
3331
3332 \f
3333 /* Implements TARGET_FUNCTION_VALUE. */
3334
3335 static rtx
3336 frv_function_value (const_tree valtype,
3337 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3338 bool outgoing ATTRIBUTE_UNUSED)
3339 {
3340 return gen_rtx_REG (TYPE_MODE (valtype), RETURN_VALUE_REGNUM);
3341 }
3342
3343 \f
3344 /* Implements TARGET_LIBCALL_VALUE. */
3345
3346 static rtx
3347 frv_libcall_value (enum machine_mode mode,
3348 const_rtx fun ATTRIBUTE_UNUSED)
3349 {
3350 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3351 }
3352
3353 \f
3354 /* Implements FUNCTION_VALUE_REGNO_P. */
3355
3356 bool
3357 frv_function_value_regno_p (const unsigned int regno)
3358 {
3359 return (regno == RETURN_VALUE_REGNUM);
3360 }
3361 \f
3362 /* Return true if a register is ok to use as a base or index register. */
3363
3364 static FRV_INLINE int
3365 frv_regno_ok_for_base_p (int regno, int strict_p)
3366 {
3367 if (GPR_P (regno))
3368 return TRUE;
3369
3370 if (strict_p)
3371 return (reg_renumber[regno] >= 0 && GPR_P (reg_renumber[regno]));
3372
3373 if (regno == ARG_POINTER_REGNUM)
3374 return TRUE;
3375
3376 return (regno >= FIRST_PSEUDO_REGISTER);
3377 }
3378
3379 \f
3380 /* A C compound statement with a conditional `goto LABEL;' executed if X (an
3381 RTX) is a legitimate memory address on the target machine for a memory
3382 operand of mode MODE.
3383
3384 It usually pays to define several simpler macros to serve as subroutines for
3385 this one. Otherwise it may be too complicated to understand.
3386
3387 This macro must exist in two variants: a strict variant and a non-strict
3388 one. The strict variant is used in the reload pass. It must be defined so
3389 that any pseudo-register that has not been allocated a hard register is
3390 considered a memory reference. In contexts where some kind of register is
3391 required, a pseudo-register with no hard register must be rejected.
3392
3393 The non-strict variant is used in other passes. It must be defined to
3394 accept all pseudo-registers in every context where some kind of register is
3395 required.
3396
3397 Compiler source files that want to use the strict variant of this macro
3398 define the macro `REG_OK_STRICT'. You should use an `#ifdef REG_OK_STRICT'
3399 conditional to define the strict variant in that case and the non-strict
3400 variant otherwise.
3401
3402 Normally, constant addresses which are the sum of a `symbol_ref' and an
3403 integer are stored inside a `const' RTX to mark them as constant.
3404 Therefore, there is no need to recognize such sums specifically as
3405 legitimate addresses. Normally you would simply recognize any `const' as
3406 legitimate.
3407
3408 Usually `TARGET_PRINT_OPERAND_ADDRESS' is not prepared to handle
3409 constant sums that are not marked with `const'. It assumes that a
3410 naked `plus' indicates indexing. If so, then you *must* reject such
3411 naked constant sums as illegitimate addresses, so that none of them
3412 will be given to `TARGET_PRINT_OPERAND_ADDRESS'. */
3413
3414 int
3415 frv_legitimate_address_p_1 (enum machine_mode mode,
3416 rtx x,
3417 int strict_p,
3418 int condexec_p,
3419 int allow_double_reg_p)
3420 {
3421 rtx x0, x1;
3422 int ret = 0;
3423 HOST_WIDE_INT value;
3424 unsigned regno0;
3425
3426 if (FRV_SYMBOL_REF_TLS_P (x))
3427 return 0;
3428
3429 switch (GET_CODE (x))
3430 {
3431 default:
3432 break;
3433
3434 case SUBREG:
3435 x = SUBREG_REG (x);
3436 if (GET_CODE (x) != REG)
3437 break;
3438
3439 /* Fall through. */
3440
3441 case REG:
3442 ret = frv_regno_ok_for_base_p (REGNO (x), strict_p);
3443 break;
3444
3445 case PRE_MODIFY:
3446 x0 = XEXP (x, 0);
3447 x1 = XEXP (x, 1);
3448 if (GET_CODE (x0) != REG
3449 || ! frv_regno_ok_for_base_p (REGNO (x0), strict_p)
3450 || GET_CODE (x1) != PLUS
3451 || ! rtx_equal_p (x0, XEXP (x1, 0))
3452 || GET_CODE (XEXP (x1, 1)) != REG
3453 || ! frv_regno_ok_for_base_p (REGNO (XEXP (x1, 1)), strict_p))
3454 break;
3455
3456 ret = 1;
3457 break;
3458
3459 case CONST_INT:
3460 /* 12-bit immediate */
3461 if (condexec_p)
3462 ret = FALSE;
3463 else
3464 {
3465 ret = IN_RANGE_P (INTVAL (x), -2048, 2047);
3466
3467 /* If we can't use load/store double operations, make sure we can
3468 address the second word. */
3469 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3470 ret = IN_RANGE_P (INTVAL (x) + GET_MODE_SIZE (mode) - 1,
3471 -2048, 2047);
3472 }
3473 break;
3474
3475 case PLUS:
3476 x0 = XEXP (x, 0);
3477 x1 = XEXP (x, 1);
3478
3479 if (GET_CODE (x0) == SUBREG)
3480 x0 = SUBREG_REG (x0);
3481
3482 if (GET_CODE (x0) != REG)
3483 break;
3484
3485 regno0 = REGNO (x0);
3486 if (!frv_regno_ok_for_base_p (regno0, strict_p))
3487 break;
3488
3489 switch (GET_CODE (x1))
3490 {
3491 default:
3492 break;
3493
3494 case SUBREG:
3495 x1 = SUBREG_REG (x1);
3496 if (GET_CODE (x1) != REG)
3497 break;
3498
3499 /* Fall through. */
3500
3501 case REG:
3502 /* Do not allow reg+reg addressing for modes > 1 word if we
3503 can't depend on having move double instructions. */
3504 if (!allow_double_reg_p && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3505 ret = FALSE;
3506 else
3507 ret = frv_regno_ok_for_base_p (REGNO (x1), strict_p);
3508 break;
3509
3510 case CONST_INT:
3511 /* 12-bit immediate */
3512 if (condexec_p)
3513 ret = FALSE;
3514 else
3515 {
3516 value = INTVAL (x1);
3517 ret = IN_RANGE_P (value, -2048, 2047);
3518
3519 /* If we can't use load/store double operations, make sure we can
3520 address the second word. */
3521 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3522 ret = IN_RANGE_P (value + GET_MODE_SIZE (mode) - 1, -2048, 2047);
3523 }
3524 break;
3525
3526 case CONST:
3527 if (!condexec_p && got12_operand (x1, VOIDmode))
3528 ret = TRUE;
3529 break;
3530
3531 }
3532 break;
3533 }
3534
3535 if (TARGET_DEBUG_ADDR)
3536 {
3537 fprintf (stderr, "\n========== legitimate_address_p, mode = %s, result = %d, addresses are %sstrict%s\n",
3538 GET_MODE_NAME (mode), ret, (strict_p) ? "" : "not ",
3539 (condexec_p) ? ", inside conditional code" : "");
3540 debug_rtx (x);
3541 }
3542
3543 return ret;
3544 }
3545
3546 bool
3547 frv_legitimate_address_p (enum machine_mode mode, rtx x, bool strict_p)
3548 {
3549 return frv_legitimate_address_p_1 (mode, x, strict_p, FALSE, FALSE);
3550 }
3551
3552 /* Given an ADDR, generate code to inline the PLT. */
3553 static rtx
3554 gen_inlined_tls_plt (rtx addr)
3555 {
3556 rtx retval, dest;
3557 rtx picreg = get_hard_reg_initial_val (Pmode, FDPIC_REG);
3558
3559
3560 dest = gen_reg_rtx (DImode);
3561
3562 if (flag_pic == 1)
3563 {
3564 /*
3565 -fpic version:
3566
3567 lddi.p @(gr15, #gottlsdesc12(ADDR)), gr8
3568 calll #gettlsoff(ADDR)@(gr8, gr0)
3569 */
3570 emit_insn (gen_tls_lddi (dest, addr, picreg));
3571 }
3572 else
3573 {
3574 /*
3575 -fPIC version:
3576
3577 sethi.p #gottlsdeschi(ADDR), gr8
3578 setlo #gottlsdesclo(ADDR), gr8
3579 ldd #tlsdesc(ADDR)@(gr15, gr8), gr8
3580 calll #gettlsoff(ADDR)@(gr8, gr0)
3581 */
3582 rtx reguse = gen_reg_rtx (Pmode);
3583 emit_insn (gen_tlsoff_hilo (reguse, addr, GEN_INT (R_FRV_GOTTLSDESCHI)));
3584 emit_insn (gen_tls_tlsdesc_ldd (dest, picreg, reguse, addr));
3585 }
3586
3587 retval = gen_reg_rtx (Pmode);
3588 emit_insn (gen_tls_indirect_call (retval, addr, dest, picreg));
3589 return retval;
3590 }
3591
3592 /* Emit a TLSMOFF or TLSMOFF12 offset, depending on -mTLS. Returns
3593 the destination address. */
3594 static rtx
3595 gen_tlsmoff (rtx addr, rtx reg)
3596 {
3597 rtx dest = gen_reg_rtx (Pmode);
3598
3599 if (TARGET_BIG_TLS)
3600 {
3601 /* sethi.p #tlsmoffhi(x), grA
3602 setlo #tlsmofflo(x), grA
3603 */
3604 dest = gen_reg_rtx (Pmode);
3605 emit_insn (gen_tlsoff_hilo (dest, addr,
3606 GEN_INT (R_FRV_TLSMOFFHI)));
3607 dest = gen_rtx_PLUS (Pmode, dest, reg);
3608 }
3609 else
3610 {
3611 /* addi grB, #tlsmoff12(x), grC
3612 -or-
3613 ld/st @(grB, #tlsmoff12(x)), grC
3614 */
3615 dest = gen_reg_rtx (Pmode);
3616 emit_insn (gen_symGOTOFF2reg_i (dest, addr, reg,
3617 GEN_INT (R_FRV_TLSMOFF12)));
3618 }
3619 return dest;
3620 }
3621
3622 /* Generate code for a TLS address. */
3623 static rtx
3624 frv_legitimize_tls_address (rtx addr, enum tls_model model)
3625 {
3626 rtx dest, tp = gen_rtx_REG (Pmode, 29);
3627 rtx picreg = get_hard_reg_initial_val (Pmode, 15);
3628
3629 switch (model)
3630 {
3631 case TLS_MODEL_INITIAL_EXEC:
3632 if (flag_pic == 1)
3633 {
3634 /* -fpic version.
3635 ldi @(gr15, #gottlsoff12(x)), gr5
3636 */
3637 dest = gen_reg_rtx (Pmode);
3638 emit_insn (gen_tls_load_gottlsoff12 (dest, addr, picreg));
3639 dest = gen_rtx_PLUS (Pmode, tp, dest);
3640 }
3641 else
3642 {
3643 /* -fPIC or anything else.
3644
3645 sethi.p #gottlsoffhi(x), gr14
3646 setlo #gottlsofflo(x), gr14
3647 ld #tlsoff(x)@(gr15, gr14), gr9
3648 */
3649 rtx tmp = gen_reg_rtx (Pmode);
3650 dest = gen_reg_rtx (Pmode);
3651 emit_insn (gen_tlsoff_hilo (tmp, addr,
3652 GEN_INT (R_FRV_GOTTLSOFF_HI)));
3653
3654 emit_insn (gen_tls_tlsoff_ld (dest, picreg, tmp, addr));
3655 dest = gen_rtx_PLUS (Pmode, tp, dest);
3656 }
3657 break;
3658 case TLS_MODEL_LOCAL_DYNAMIC:
3659 {
3660 rtx reg, retval;
3661
3662 if (TARGET_INLINE_PLT)
3663 retval = gen_inlined_tls_plt (GEN_INT (0));
3664 else
3665 {
3666 /* call #gettlsoff(0) */
3667 retval = gen_reg_rtx (Pmode);
3668 emit_insn (gen_call_gettlsoff (retval, GEN_INT (0), picreg));
3669 }
3670
3671 reg = gen_reg_rtx (Pmode);
3672 emit_insn (gen_rtx_SET (VOIDmode, reg,
3673 gen_rtx_PLUS (Pmode,
3674 retval, tp)));
3675
3676 dest = gen_tlsmoff (addr, reg);
3677
3678 /*
3679 dest = gen_reg_rtx (Pmode);
3680 emit_insn (gen_tlsoff_hilo (dest, addr,
3681 GEN_INT (R_FRV_TLSMOFFHI)));
3682 dest = gen_rtx_PLUS (Pmode, dest, reg);
3683 */
3684 break;
3685 }
3686 case TLS_MODEL_LOCAL_EXEC:
3687 dest = gen_tlsmoff (addr, gen_rtx_REG (Pmode, 29));
3688 break;
3689 case TLS_MODEL_GLOBAL_DYNAMIC:
3690 {
3691 rtx retval;
3692
3693 if (TARGET_INLINE_PLT)
3694 retval = gen_inlined_tls_plt (addr);
3695 else
3696 {
3697 /* call #gettlsoff(x) */
3698 retval = gen_reg_rtx (Pmode);
3699 emit_insn (gen_call_gettlsoff (retval, addr, picreg));
3700 }
3701 dest = gen_rtx_PLUS (Pmode, retval, tp);
3702 break;
3703 }
3704 default:
3705 gcc_unreachable ();
3706 }
3707
3708 return dest;
3709 }
3710
3711 rtx
3712 frv_legitimize_address (rtx x,
3713 rtx oldx ATTRIBUTE_UNUSED,
3714 enum machine_mode mode ATTRIBUTE_UNUSED)
3715 {
3716 if (GET_CODE (x) == SYMBOL_REF)
3717 {
3718 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3719 if (model != 0)
3720 return frv_legitimize_tls_address (x, model);
3721 }
3722
3723 return x;
3724 }
3725 \f
3726 /* Test whether a local function descriptor is canonical, i.e.,
3727 whether we can use FUNCDESC_GOTOFF to compute the address of the
3728 function. */
3729
3730 static bool
3731 frv_local_funcdesc_p (rtx fnx)
3732 {
3733 tree fn;
3734 enum symbol_visibility vis;
3735 bool ret;
3736
3737 if (! SYMBOL_REF_LOCAL_P (fnx))
3738 return FALSE;
3739
3740 fn = SYMBOL_REF_DECL (fnx);
3741
3742 if (! fn)
3743 return FALSE;
3744
3745 vis = DECL_VISIBILITY (fn);
3746
3747 if (vis == VISIBILITY_PROTECTED)
3748 /* Private function descriptors for protected functions are not
3749 canonical. Temporarily change the visibility to global. */
3750 vis = VISIBILITY_DEFAULT;
3751 else if (flag_shlib)
3752 /* If we're already compiling for a shared library (that, unlike
3753 executables, can't assume that the existence of a definition
3754 implies local binding), we can skip the re-testing. */
3755 return TRUE;
3756
3757 ret = default_binds_local_p_1 (fn, flag_pic);
3758
3759 DECL_VISIBILITY (fn) = vis;
3760
3761 return ret;
3762 }
3763
3764 /* Load the _gp symbol into DEST. SRC is supposed to be the FDPIC
3765 register. */
3766
3767 rtx
3768 frv_gen_GPsym2reg (rtx dest, rtx src)
3769 {
3770 tree gp = get_identifier ("_gp");
3771 rtx gp_sym = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (gp));
3772
3773 return gen_symGOT2reg (dest, gp_sym, src, GEN_INT (R_FRV_GOT12));
3774 }
3775
3776 static const char *
3777 unspec_got_name (int i)
3778 {
3779 switch (i)
3780 {
3781 case R_FRV_GOT12: return "got12";
3782 case R_FRV_GOTHI: return "gothi";
3783 case R_FRV_GOTLO: return "gotlo";
3784 case R_FRV_FUNCDESC: return "funcdesc";
3785 case R_FRV_FUNCDESC_GOT12: return "gotfuncdesc12";
3786 case R_FRV_FUNCDESC_GOTHI: return "gotfuncdeschi";
3787 case R_FRV_FUNCDESC_GOTLO: return "gotfuncdesclo";
3788 case R_FRV_FUNCDESC_VALUE: return "funcdescvalue";
3789 case R_FRV_FUNCDESC_GOTOFF12: return "gotofffuncdesc12";
3790 case R_FRV_FUNCDESC_GOTOFFHI: return "gotofffuncdeschi";
3791 case R_FRV_FUNCDESC_GOTOFFLO: return "gotofffuncdesclo";
3792 case R_FRV_GOTOFF12: return "gotoff12";
3793 case R_FRV_GOTOFFHI: return "gotoffhi";
3794 case R_FRV_GOTOFFLO: return "gotofflo";
3795 case R_FRV_GPREL12: return "gprel12";
3796 case R_FRV_GPRELHI: return "gprelhi";
3797 case R_FRV_GPRELLO: return "gprello";
3798 case R_FRV_GOTTLSOFF_HI: return "gottlsoffhi";
3799 case R_FRV_GOTTLSOFF_LO: return "gottlsofflo";
3800 case R_FRV_TLSMOFFHI: return "tlsmoffhi";
3801 case R_FRV_TLSMOFFLO: return "tlsmofflo";
3802 case R_FRV_TLSMOFF12: return "tlsmoff12";
3803 case R_FRV_TLSDESCHI: return "tlsdeschi";
3804 case R_FRV_TLSDESCLO: return "tlsdesclo";
3805 case R_FRV_GOTTLSDESCHI: return "gottlsdeschi";
3806 case R_FRV_GOTTLSDESCLO: return "gottlsdesclo";
3807 default: gcc_unreachable ();
3808 }
3809 }
3810
3811 /* Write the assembler syntax for UNSPEC to STREAM. Note that any offset
3812 is added inside the relocation operator. */
3813
3814 static void
3815 frv_output_const_unspec (FILE *stream, const struct frv_unspec *unspec)
3816 {
3817 fprintf (stream, "#%s(", unspec_got_name (unspec->reloc));
3818 output_addr_const (stream, plus_constant (unspec->symbol, unspec->offset));
3819 fputs (")", stream);
3820 }
3821
3822 /* Implement FIND_BASE_TERM. See whether ORIG_X represents #gprel12(foo)
3823 or #gotoff12(foo) for some small data symbol foo. If so, return foo,
3824 otherwise return ORIG_X. */
3825
3826 rtx
3827 frv_find_base_term (rtx x)
3828 {
3829 struct frv_unspec unspec;
3830
3831 if (frv_const_unspec_p (x, &unspec)
3832 && frv_small_data_reloc_p (unspec.symbol, unspec.reloc))
3833 return plus_constant (unspec.symbol, unspec.offset);
3834
3835 return x;
3836 }
3837
3838 /* Return 1 if operand is a valid FRV address. CONDEXEC_P is true if
3839 the operand is used by a predicated instruction. */
3840
3841 int
3842 frv_legitimate_memory_operand (rtx op, enum machine_mode mode, int condexec_p)
3843 {
3844 return ((GET_MODE (op) == mode || mode == VOIDmode)
3845 && GET_CODE (op) == MEM
3846 && frv_legitimate_address_p_1 (mode, XEXP (op, 0),
3847 reload_completed, condexec_p, FALSE));
3848 }
3849
3850 void
3851 frv_expand_fdpic_call (rtx *operands, bool ret_value, bool sibcall)
3852 {
3853 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
3854 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REG);
3855 rtx c, rvrtx=0;
3856 rtx addr;
3857
3858 if (ret_value)
3859 {
3860 rvrtx = operands[0];
3861 operands ++;
3862 }
3863
3864 addr = XEXP (operands[0], 0);
3865
3866 /* Inline PLTs if we're optimizing for speed. We'd like to inline
3867 any calls that would involve a PLT, but can't tell, since we
3868 don't know whether an extern function is going to be provided by
3869 a separate translation unit or imported from a separate module.
3870 When compiling for shared libraries, if the function has default
3871 visibility, we assume it's overridable, so we inline the PLT, but
3872 for executables, we don't really have a way to make a good
3873 decision: a function is as likely to be imported from a shared
3874 library as it is to be defined in the executable itself. We
3875 assume executables will get global functions defined locally,
3876 whereas shared libraries will have them potentially overridden,
3877 so we only inline PLTs when compiling for shared libraries.
3878
3879 In order to mark a function as local to a shared library, any
3880 non-default visibility attribute suffices. Unfortunately,
3881 there's no simple way to tag a function declaration as ``in a
3882 different module'', which we could then use to trigger PLT
3883 inlining on executables. There's -minline-plt, but it affects
3884 all external functions, so one would have to also mark function
3885 declarations available in the same module with non-default
3886 visibility, which is advantageous in itself. */
3887 if (GET_CODE (addr) == SYMBOL_REF
3888 && ((!SYMBOL_REF_LOCAL_P (addr) && TARGET_INLINE_PLT)
3889 || sibcall))
3890 {
3891 rtx x, dest;
3892 dest = gen_reg_rtx (SImode);
3893 if (flag_pic != 1)
3894 x = gen_symGOTOFF2reg_hilo (dest, addr, OUR_FDPIC_REG,
3895 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3896 else
3897 x = gen_symGOTOFF2reg (dest, addr, OUR_FDPIC_REG,
3898 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3899 emit_insn (x);
3900 crtl->uses_pic_offset_table = TRUE;
3901 addr = dest;
3902 }
3903 else if (GET_CODE (addr) == SYMBOL_REF)
3904 {
3905 /* These are always either local, or handled through a local
3906 PLT. */
3907 if (ret_value)
3908 c = gen_call_value_fdpicsi (rvrtx, addr, operands[1],
3909 operands[2], picreg, lr);
3910 else
3911 c = gen_call_fdpicsi (addr, operands[1], operands[2], picreg, lr);
3912 emit_call_insn (c);
3913 return;
3914 }
3915 else if (! ldd_address_operand (addr, Pmode))
3916 addr = force_reg (Pmode, addr);
3917
3918 picreg = gen_reg_rtx (DImode);
3919 emit_insn (gen_movdi_ldd (picreg, addr));
3920
3921 if (sibcall && ret_value)
3922 c = gen_sibcall_value_fdpicdi (rvrtx, picreg, const0_rtx);
3923 else if (sibcall)
3924 c = gen_sibcall_fdpicdi (picreg, const0_rtx);
3925 else if (ret_value)
3926 c = gen_call_value_fdpicdi (rvrtx, picreg, const0_rtx, lr);
3927 else
3928 c = gen_call_fdpicdi (picreg, const0_rtx, lr);
3929 emit_call_insn (c);
3930 }
3931 \f
3932 /* Look for a SYMBOL_REF of a function in an rtx. We always want to
3933 process these separately from any offsets, such that we add any
3934 offsets to the function descriptor (the actual pointer), not to the
3935 function address. */
3936
3937 static bool
3938 frv_function_symbol_referenced_p (rtx x)
3939 {
3940 const char *format;
3941 int length;
3942 int j;
3943
3944 if (GET_CODE (x) == SYMBOL_REF)
3945 return SYMBOL_REF_FUNCTION_P (x);
3946
3947 length = GET_RTX_LENGTH (GET_CODE (x));
3948 format = GET_RTX_FORMAT (GET_CODE (x));
3949
3950 for (j = 0; j < length; ++j)
3951 {
3952 switch (format[j])
3953 {
3954 case 'e':
3955 if (frv_function_symbol_referenced_p (XEXP (x, j)))
3956 return TRUE;
3957 break;
3958
3959 case 'V':
3960 case 'E':
3961 if (XVEC (x, j) != 0)
3962 {
3963 int k;
3964 for (k = 0; k < XVECLEN (x, j); ++k)
3965 if (frv_function_symbol_referenced_p (XVECEXP (x, j, k)))
3966 return TRUE;
3967 }
3968 break;
3969
3970 default:
3971 /* Nothing to do. */
3972 break;
3973 }
3974 }
3975
3976 return FALSE;
3977 }
3978
3979 /* Return true if the memory operand is one that can be conditionally
3980 executed. */
3981
3982 int
3983 condexec_memory_operand (rtx op, enum machine_mode mode)
3984 {
3985 enum machine_mode op_mode = GET_MODE (op);
3986 rtx addr;
3987
3988 if (mode != VOIDmode && op_mode != mode)
3989 return FALSE;
3990
3991 switch (op_mode)
3992 {
3993 default:
3994 return FALSE;
3995
3996 case QImode:
3997 case HImode:
3998 case SImode:
3999 case SFmode:
4000 break;
4001 }
4002
4003 if (GET_CODE (op) != MEM)
4004 return FALSE;
4005
4006 addr = XEXP (op, 0);
4007 return frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE);
4008 }
4009 \f
4010 /* Return true if the bare return instruction can be used outside of the
4011 epilog code. For frv, we only do it if there was no stack allocation. */
4012
4013 int
4014 direct_return_p (void)
4015 {
4016 frv_stack_t *info;
4017
4018 if (!reload_completed)
4019 return FALSE;
4020
4021 info = frv_stack_info ();
4022 return (info->total_size == 0);
4023 }
4024
4025 \f
4026 void
4027 frv_emit_move (enum machine_mode mode, rtx dest, rtx src)
4028 {
4029 if (GET_CODE (src) == SYMBOL_REF)
4030 {
4031 enum tls_model model = SYMBOL_REF_TLS_MODEL (src);
4032 if (model != 0)
4033 src = frv_legitimize_tls_address (src, model);
4034 }
4035
4036 switch (mode)
4037 {
4038 case SImode:
4039 if (frv_emit_movsi (dest, src))
4040 return;
4041 break;
4042
4043 case QImode:
4044 case HImode:
4045 case DImode:
4046 case SFmode:
4047 case DFmode:
4048 if (!reload_in_progress
4049 && !reload_completed
4050 && !register_operand (dest, mode)
4051 && !reg_or_0_operand (src, mode))
4052 src = copy_to_mode_reg (mode, src);
4053 break;
4054
4055 default:
4056 gcc_unreachable ();
4057 }
4058
4059 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
4060 }
4061
4062 /* Emit code to handle a MOVSI, adding in the small data register or pic
4063 register if needed to load up addresses. Return TRUE if the appropriate
4064 instructions are emitted. */
4065
4066 int
4067 frv_emit_movsi (rtx dest, rtx src)
4068 {
4069 int base_regno = -1;
4070 int unspec = 0;
4071 rtx sym = src;
4072 struct frv_unspec old_unspec;
4073
4074 if (!reload_in_progress
4075 && !reload_completed
4076 && !register_operand (dest, SImode)
4077 && (!reg_or_0_operand (src, SImode)
4078 /* Virtual registers will almost always be replaced by an
4079 add instruction, so expose this to CSE by copying to
4080 an intermediate register. */
4081 || (GET_CODE (src) == REG
4082 && IN_RANGE_P (REGNO (src),
4083 FIRST_VIRTUAL_REGISTER,
4084 LAST_VIRTUAL_REGISTER))))
4085 {
4086 emit_insn (gen_rtx_SET (VOIDmode, dest, copy_to_mode_reg (SImode, src)));
4087 return TRUE;
4088 }
4089
4090 /* Explicitly add in the PIC or small data register if needed. */
4091 switch (GET_CODE (src))
4092 {
4093 default:
4094 break;
4095
4096 case LABEL_REF:
4097 handle_label:
4098 if (TARGET_FDPIC)
4099 {
4100 /* Using GPREL12, we use a single GOT entry for all symbols
4101 in read-only sections, but trade sequences such as:
4102
4103 sethi #gothi(label), gr#
4104 setlo #gotlo(label), gr#
4105 ld @(gr15,gr#), gr#
4106
4107 for
4108
4109 ld @(gr15,#got12(_gp)), gr#
4110 sethi #gprelhi(label), gr##
4111 setlo #gprello(label), gr##
4112 add gr#, gr##, gr##
4113
4114 We may often be able to share gr# for multiple
4115 computations of GPREL addresses, and we may often fold
4116 the final add into the pair of registers of a load or
4117 store instruction, so it's often profitable. Even when
4118 optimizing for size, we're trading a GOT entry for an
4119 additional instruction, which trades GOT space
4120 (read-write) for code size (read-only, shareable), as
4121 long as the symbol is not used in more than two different
4122 locations.
4123
4124 With -fpie/-fpic, we'd be trading a single load for a
4125 sequence of 4 instructions, because the offset of the
4126 label can't be assumed to be addressable with 12 bits, so
4127 we don't do this. */
4128 if (TARGET_GPREL_RO)
4129 unspec = R_FRV_GPREL12;
4130 else
4131 unspec = R_FRV_GOT12;
4132 }
4133 else if (flag_pic)
4134 base_regno = PIC_REGNO;
4135
4136 break;
4137
4138 case CONST:
4139 if (frv_const_unspec_p (src, &old_unspec))
4140 break;
4141
4142 if (TARGET_FDPIC && frv_function_symbol_referenced_p (XEXP (src, 0)))
4143 {
4144 handle_whatever:
4145 src = force_reg (GET_MODE (XEXP (src, 0)), XEXP (src, 0));
4146 emit_move_insn (dest, src);
4147 return TRUE;
4148 }
4149 else
4150 {
4151 sym = XEXP (sym, 0);
4152 if (GET_CODE (sym) == PLUS
4153 && GET_CODE (XEXP (sym, 0)) == SYMBOL_REF
4154 && GET_CODE (XEXP (sym, 1)) == CONST_INT)
4155 sym = XEXP (sym, 0);
4156 if (GET_CODE (sym) == SYMBOL_REF)
4157 goto handle_sym;
4158 else if (GET_CODE (sym) == LABEL_REF)
4159 goto handle_label;
4160 else
4161 goto handle_whatever;
4162 }
4163 break;
4164
4165 case SYMBOL_REF:
4166 handle_sym:
4167 if (TARGET_FDPIC)
4168 {
4169 enum tls_model model = SYMBOL_REF_TLS_MODEL (sym);
4170
4171 if (model != 0)
4172 {
4173 src = frv_legitimize_tls_address (src, model);
4174 emit_move_insn (dest, src);
4175 return TRUE;
4176 }
4177
4178 if (SYMBOL_REF_FUNCTION_P (sym))
4179 {
4180 if (frv_local_funcdesc_p (sym))
4181 unspec = R_FRV_FUNCDESC_GOTOFF12;
4182 else
4183 unspec = R_FRV_FUNCDESC_GOT12;
4184 }
4185 else
4186 {
4187 if (CONSTANT_POOL_ADDRESS_P (sym))
4188 switch (GET_CODE (get_pool_constant (sym)))
4189 {
4190 case CONST:
4191 case SYMBOL_REF:
4192 case LABEL_REF:
4193 if (flag_pic)
4194 {
4195 unspec = R_FRV_GOTOFF12;
4196 break;
4197 }
4198 /* Fall through. */
4199 default:
4200 if (TARGET_GPREL_RO)
4201 unspec = R_FRV_GPREL12;
4202 else
4203 unspec = R_FRV_GOT12;
4204 break;
4205 }
4206 else if (SYMBOL_REF_LOCAL_P (sym)
4207 && !SYMBOL_REF_EXTERNAL_P (sym)
4208 && SYMBOL_REF_DECL (sym)
4209 && (!DECL_P (SYMBOL_REF_DECL (sym))
4210 || !DECL_COMMON (SYMBOL_REF_DECL (sym))))
4211 {
4212 tree decl = SYMBOL_REF_DECL (sym);
4213 tree init = TREE_CODE (decl) == VAR_DECL
4214 ? DECL_INITIAL (decl)
4215 : TREE_CODE (decl) == CONSTRUCTOR
4216 ? decl : 0;
4217 int reloc = 0;
4218 bool named_section, readonly;
4219
4220 if (init && init != error_mark_node)
4221 reloc = compute_reloc_for_constant (init);
4222
4223 named_section = TREE_CODE (decl) == VAR_DECL
4224 && lookup_attribute ("section", DECL_ATTRIBUTES (decl));
4225 readonly = decl_readonly_section (decl, reloc);
4226
4227 if (named_section)
4228 unspec = R_FRV_GOT12;
4229 else if (!readonly)
4230 unspec = R_FRV_GOTOFF12;
4231 else if (readonly && TARGET_GPREL_RO)
4232 unspec = R_FRV_GPREL12;
4233 else
4234 unspec = R_FRV_GOT12;
4235 }
4236 else
4237 unspec = R_FRV_GOT12;
4238 }
4239 }
4240
4241 else if (SYMBOL_REF_SMALL_P (sym))
4242 base_regno = SDA_BASE_REG;
4243
4244 else if (flag_pic)
4245 base_regno = PIC_REGNO;
4246
4247 break;
4248 }
4249
4250 if (base_regno >= 0)
4251 {
4252 if (GET_CODE (sym) == SYMBOL_REF && SYMBOL_REF_SMALL_P (sym))
4253 emit_insn (gen_symGOTOFF2reg (dest, src,
4254 gen_rtx_REG (Pmode, base_regno),
4255 GEN_INT (R_FRV_GPREL12)));
4256 else
4257 emit_insn (gen_symGOTOFF2reg_hilo (dest, src,
4258 gen_rtx_REG (Pmode, base_regno),
4259 GEN_INT (R_FRV_GPREL12)));
4260 if (base_regno == PIC_REGNO)
4261 crtl->uses_pic_offset_table = TRUE;
4262 return TRUE;
4263 }
4264
4265 if (unspec)
4266 {
4267 rtx x;
4268
4269 /* Since OUR_FDPIC_REG is a pseudo register, we can't safely introduce
4270 new uses of it once reload has begun. */
4271 gcc_assert (!reload_in_progress && !reload_completed);
4272
4273 switch (unspec)
4274 {
4275 case R_FRV_GOTOFF12:
4276 if (!frv_small_data_reloc_p (sym, unspec))
4277 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4278 GEN_INT (unspec));
4279 else
4280 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4281 break;
4282 case R_FRV_GPREL12:
4283 if (!frv_small_data_reloc_p (sym, unspec))
4284 x = gen_symGPREL2reg_hilo (dest, src, OUR_FDPIC_REG,
4285 GEN_INT (unspec));
4286 else
4287 x = gen_symGPREL2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4288 break;
4289 case R_FRV_FUNCDESC_GOTOFF12:
4290 if (flag_pic != 1)
4291 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4292 GEN_INT (unspec));
4293 else
4294 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4295 break;
4296 default:
4297 if (flag_pic != 1)
4298 x = gen_symGOT2reg_hilo (dest, src, OUR_FDPIC_REG,
4299 GEN_INT (unspec));
4300 else
4301 x = gen_symGOT2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4302 break;
4303 }
4304 emit_insn (x);
4305 crtl->uses_pic_offset_table = TRUE;
4306 return TRUE;
4307 }
4308
4309
4310 return FALSE;
4311 }
4312
4313 \f
4314 /* Return a string to output a single word move. */
4315
4316 const char *
4317 output_move_single (rtx operands[], rtx insn)
4318 {
4319 rtx dest = operands[0];
4320 rtx src = operands[1];
4321
4322 if (GET_CODE (dest) == REG)
4323 {
4324 int dest_regno = REGNO (dest);
4325 enum machine_mode mode = GET_MODE (dest);
4326
4327 if (GPR_P (dest_regno))
4328 {
4329 if (GET_CODE (src) == REG)
4330 {
4331 /* gpr <- some sort of register */
4332 int src_regno = REGNO (src);
4333
4334 if (GPR_P (src_regno))
4335 return "mov %1, %0";
4336
4337 else if (FPR_P (src_regno))
4338 return "movfg %1, %0";
4339
4340 else if (SPR_P (src_regno))
4341 return "movsg %1, %0";
4342 }
4343
4344 else if (GET_CODE (src) == MEM)
4345 {
4346 /* gpr <- memory */
4347 switch (mode)
4348 {
4349 default:
4350 break;
4351
4352 case QImode:
4353 return "ldsb%I1%U1 %M1,%0";
4354
4355 case HImode:
4356 return "ldsh%I1%U1 %M1,%0";
4357
4358 case SImode:
4359 case SFmode:
4360 return "ld%I1%U1 %M1, %0";
4361 }
4362 }
4363
4364 else if (GET_CODE (src) == CONST_INT
4365 || GET_CODE (src) == CONST_DOUBLE)
4366 {
4367 /* gpr <- integer/floating constant */
4368 HOST_WIDE_INT value;
4369
4370 if (GET_CODE (src) == CONST_INT)
4371 value = INTVAL (src);
4372
4373 else if (mode == SFmode)
4374 {
4375 REAL_VALUE_TYPE rv;
4376 long l;
4377
4378 REAL_VALUE_FROM_CONST_DOUBLE (rv, src);
4379 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
4380 value = l;
4381 }
4382
4383 else
4384 value = CONST_DOUBLE_LOW (src);
4385
4386 if (IN_RANGE_P (value, -32768, 32767))
4387 return "setlos %1, %0";
4388
4389 return "#";
4390 }
4391
4392 else if (GET_CODE (src) == SYMBOL_REF
4393 || GET_CODE (src) == LABEL_REF
4394 || GET_CODE (src) == CONST)
4395 {
4396 return "#";
4397 }
4398 }
4399
4400 else if (FPR_P (dest_regno))
4401 {
4402 if (GET_CODE (src) == REG)
4403 {
4404 /* fpr <- some sort of register */
4405 int src_regno = REGNO (src);
4406
4407 if (GPR_P (src_regno))
4408 return "movgf %1, %0";
4409
4410 else if (FPR_P (src_regno))
4411 {
4412 if (TARGET_HARD_FLOAT)
4413 return "fmovs %1, %0";
4414 else
4415 return "mor %1, %1, %0";
4416 }
4417 }
4418
4419 else if (GET_CODE (src) == MEM)
4420 {
4421 /* fpr <- memory */
4422 switch (mode)
4423 {
4424 default:
4425 break;
4426
4427 case QImode:
4428 return "ldbf%I1%U1 %M1,%0";
4429
4430 case HImode:
4431 return "ldhf%I1%U1 %M1,%0";
4432
4433 case SImode:
4434 case SFmode:
4435 return "ldf%I1%U1 %M1, %0";
4436 }
4437 }
4438
4439 else if (ZERO_P (src))
4440 return "movgf %., %0";
4441 }
4442
4443 else if (SPR_P (dest_regno))
4444 {
4445 if (GET_CODE (src) == REG)
4446 {
4447 /* spr <- some sort of register */
4448 int src_regno = REGNO (src);
4449
4450 if (GPR_P (src_regno))
4451 return "movgs %1, %0";
4452 }
4453 else if (ZERO_P (src))
4454 return "movgs %., %0";
4455 }
4456 }
4457
4458 else if (GET_CODE (dest) == MEM)
4459 {
4460 if (GET_CODE (src) == REG)
4461 {
4462 int src_regno = REGNO (src);
4463 enum machine_mode mode = GET_MODE (dest);
4464
4465 if (GPR_P (src_regno))
4466 {
4467 switch (mode)
4468 {
4469 default:
4470 break;
4471
4472 case QImode:
4473 return "stb%I0%U0 %1, %M0";
4474
4475 case HImode:
4476 return "sth%I0%U0 %1, %M0";
4477
4478 case SImode:
4479 case SFmode:
4480 return "st%I0%U0 %1, %M0";
4481 }
4482 }
4483
4484 else if (FPR_P (src_regno))
4485 {
4486 switch (mode)
4487 {
4488 default:
4489 break;
4490
4491 case QImode:
4492 return "stbf%I0%U0 %1, %M0";
4493
4494 case HImode:
4495 return "sthf%I0%U0 %1, %M0";
4496
4497 case SImode:
4498 case SFmode:
4499 return "stf%I0%U0 %1, %M0";
4500 }
4501 }
4502 }
4503
4504 else if (ZERO_P (src))
4505 {
4506 switch (GET_MODE (dest))
4507 {
4508 default:
4509 break;
4510
4511 case QImode:
4512 return "stb%I0%U0 %., %M0";
4513
4514 case HImode:
4515 return "sth%I0%U0 %., %M0";
4516
4517 case SImode:
4518 case SFmode:
4519 return "st%I0%U0 %., %M0";
4520 }
4521 }
4522 }
4523
4524 fatal_insn ("bad output_move_single operand", insn);
4525 return "";
4526 }
4527
4528 \f
4529 /* Return a string to output a double word move. */
4530
4531 const char *
4532 output_move_double (rtx operands[], rtx insn)
4533 {
4534 rtx dest = operands[0];
4535 rtx src = operands[1];
4536 enum machine_mode mode = GET_MODE (dest);
4537
4538 if (GET_CODE (dest) == REG)
4539 {
4540 int dest_regno = REGNO (dest);
4541
4542 if (GPR_P (dest_regno))
4543 {
4544 if (GET_CODE (src) == REG)
4545 {
4546 /* gpr <- some sort of register */
4547 int src_regno = REGNO (src);
4548
4549 if (GPR_P (src_regno))
4550 return "#";
4551
4552 else if (FPR_P (src_regno))
4553 {
4554 if (((dest_regno - GPR_FIRST) & 1) == 0
4555 && ((src_regno - FPR_FIRST) & 1) == 0)
4556 return "movfgd %1, %0";
4557
4558 return "#";
4559 }
4560 }
4561
4562 else if (GET_CODE (src) == MEM)
4563 {
4564 /* gpr <- memory */
4565 if (dbl_memory_one_insn_operand (src, mode))
4566 return "ldd%I1%U1 %M1, %0";
4567
4568 return "#";
4569 }
4570
4571 else if (GET_CODE (src) == CONST_INT
4572 || GET_CODE (src) == CONST_DOUBLE)
4573 return "#";
4574 }
4575
4576 else if (FPR_P (dest_regno))
4577 {
4578 if (GET_CODE (src) == REG)
4579 {
4580 /* fpr <- some sort of register */
4581 int src_regno = REGNO (src);
4582
4583 if (GPR_P (src_regno))
4584 {
4585 if (((dest_regno - FPR_FIRST) & 1) == 0
4586 && ((src_regno - GPR_FIRST) & 1) == 0)
4587 return "movgfd %1, %0";
4588
4589 return "#";
4590 }
4591
4592 else if (FPR_P (src_regno))
4593 {
4594 if (TARGET_DOUBLE
4595 && ((dest_regno - FPR_FIRST) & 1) == 0
4596 && ((src_regno - FPR_FIRST) & 1) == 0)
4597 return "fmovd %1, %0";
4598
4599 return "#";
4600 }
4601 }
4602
4603 else if (GET_CODE (src) == MEM)
4604 {
4605 /* fpr <- memory */
4606 if (dbl_memory_one_insn_operand (src, mode))
4607 return "lddf%I1%U1 %M1, %0";
4608
4609 return "#";
4610 }
4611
4612 else if (ZERO_P (src))
4613 return "#";
4614 }
4615 }
4616
4617 else if (GET_CODE (dest) == MEM)
4618 {
4619 if (GET_CODE (src) == REG)
4620 {
4621 int src_regno = REGNO (src);
4622
4623 if (GPR_P (src_regno))
4624 {
4625 if (((src_regno - GPR_FIRST) & 1) == 0
4626 && dbl_memory_one_insn_operand (dest, mode))
4627 return "std%I0%U0 %1, %M0";
4628
4629 return "#";
4630 }
4631
4632 if (FPR_P (src_regno))
4633 {
4634 if (((src_regno - FPR_FIRST) & 1) == 0
4635 && dbl_memory_one_insn_operand (dest, mode))
4636 return "stdf%I0%U0 %1, %M0";
4637
4638 return "#";
4639 }
4640 }
4641
4642 else if (ZERO_P (src))
4643 {
4644 if (dbl_memory_one_insn_operand (dest, mode))
4645 return "std%I0%U0 %., %M0";
4646
4647 return "#";
4648 }
4649 }
4650
4651 fatal_insn ("bad output_move_double operand", insn);
4652 return "";
4653 }
4654
4655 \f
4656 /* Return a string to output a single word conditional move.
4657 Operand0 -- EQ/NE of ccr register and 0
4658 Operand1 -- CCR register
4659 Operand2 -- destination
4660 Operand3 -- source */
4661
4662 const char *
4663 output_condmove_single (rtx operands[], rtx insn)
4664 {
4665 rtx dest = operands[2];
4666 rtx src = operands[3];
4667
4668 if (GET_CODE (dest) == REG)
4669 {
4670 int dest_regno = REGNO (dest);
4671 enum machine_mode mode = GET_MODE (dest);
4672
4673 if (GPR_P (dest_regno))
4674 {
4675 if (GET_CODE (src) == REG)
4676 {
4677 /* gpr <- some sort of register */
4678 int src_regno = REGNO (src);
4679
4680 if (GPR_P (src_regno))
4681 return "cmov %z3, %2, %1, %e0";
4682
4683 else if (FPR_P (src_regno))
4684 return "cmovfg %3, %2, %1, %e0";
4685 }
4686
4687 else if (GET_CODE (src) == MEM)
4688 {
4689 /* gpr <- memory */
4690 switch (mode)
4691 {
4692 default:
4693 break;
4694
4695 case QImode:
4696 return "cldsb%I3%U3 %M3, %2, %1, %e0";
4697
4698 case HImode:
4699 return "cldsh%I3%U3 %M3, %2, %1, %e0";
4700
4701 case SImode:
4702 case SFmode:
4703 return "cld%I3%U3 %M3, %2, %1, %e0";
4704 }
4705 }
4706
4707 else if (ZERO_P (src))
4708 return "cmov %., %2, %1, %e0";
4709 }
4710
4711 else if (FPR_P (dest_regno))
4712 {
4713 if (GET_CODE (src) == REG)
4714 {
4715 /* fpr <- some sort of register */
4716 int src_regno = REGNO (src);
4717
4718 if (GPR_P (src_regno))
4719 return "cmovgf %3, %2, %1, %e0";
4720
4721 else if (FPR_P (src_regno))
4722 {
4723 if (TARGET_HARD_FLOAT)
4724 return "cfmovs %3,%2,%1,%e0";
4725 else
4726 return "cmor %3, %3, %2, %1, %e0";
4727 }
4728 }
4729
4730 else if (GET_CODE (src) == MEM)
4731 {
4732 /* fpr <- memory */
4733 if (mode == SImode || mode == SFmode)
4734 return "cldf%I3%U3 %M3, %2, %1, %e0";
4735 }
4736
4737 else if (ZERO_P (src))
4738 return "cmovgf %., %2, %1, %e0";
4739 }
4740 }
4741
4742 else if (GET_CODE (dest) == MEM)
4743 {
4744 if (GET_CODE (src) == REG)
4745 {
4746 int src_regno = REGNO (src);
4747 enum machine_mode mode = GET_MODE (dest);
4748
4749 if (GPR_P (src_regno))
4750 {
4751 switch (mode)
4752 {
4753 default:
4754 break;
4755
4756 case QImode:
4757 return "cstb%I2%U2 %3, %M2, %1, %e0";
4758
4759 case HImode:
4760 return "csth%I2%U2 %3, %M2, %1, %e0";
4761
4762 case SImode:
4763 case SFmode:
4764 return "cst%I2%U2 %3, %M2, %1, %e0";
4765 }
4766 }
4767
4768 else if (FPR_P (src_regno) && (mode == SImode || mode == SFmode))
4769 return "cstf%I2%U2 %3, %M2, %1, %e0";
4770 }
4771
4772 else if (ZERO_P (src))
4773 {
4774 enum machine_mode mode = GET_MODE (dest);
4775 switch (mode)
4776 {
4777 default:
4778 break;
4779
4780 case QImode:
4781 return "cstb%I2%U2 %., %M2, %1, %e0";
4782
4783 case HImode:
4784 return "csth%I2%U2 %., %M2, %1, %e0";
4785
4786 case SImode:
4787 case SFmode:
4788 return "cst%I2%U2 %., %M2, %1, %e0";
4789 }
4790 }
4791 }
4792
4793 fatal_insn ("bad output_condmove_single operand", insn);
4794 return "";
4795 }
4796
4797 \f
4798 /* Emit the appropriate code to do a comparison, returning the register the
4799 comparison was done it. */
4800
4801 static rtx
4802 frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1)
4803 {
4804 enum machine_mode cc_mode;
4805 rtx cc_reg;
4806
4807 /* Floating point doesn't have comparison against a constant. */
4808 if (GET_MODE (op0) == CC_FPmode && GET_CODE (op1) != REG)
4809 op1 = force_reg (GET_MODE (op0), op1);
4810
4811 /* Possibly disable using anything but a fixed register in order to work
4812 around cse moving comparisons past function calls. */
4813 cc_mode = SELECT_CC_MODE (test, op0, op1);
4814 cc_reg = ((TARGET_ALLOC_CC)
4815 ? gen_reg_rtx (cc_mode)
4816 : gen_rtx_REG (cc_mode,
4817 (cc_mode == CC_FPmode) ? FCC_FIRST : ICC_FIRST));
4818
4819 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4820 gen_rtx_COMPARE (cc_mode, op0, op1)));
4821
4822 return cc_reg;
4823 }
4824
4825 \f
4826 /* Emit code for a conditional branch.
4827 XXX: I originally wanted to add a clobber of a CCR register to use in
4828 conditional execution, but that confuses the rest of the compiler. */
4829
4830 int
4831 frv_emit_cond_branch (rtx operands[])
4832 {
4833 rtx test_rtx;
4834 rtx label_ref;
4835 rtx if_else;
4836 enum rtx_code test = GET_CODE (operands[0]);
4837 rtx cc_reg = frv_emit_comparison (test, operands[1], operands[2]);
4838 enum machine_mode cc_mode = GET_MODE (cc_reg);
4839
4840 /* Branches generate:
4841 (set (pc)
4842 (if_then_else (<test>, <cc_reg>, (const_int 0))
4843 (label_ref <branch_label>)
4844 (pc))) */
4845 label_ref = gen_rtx_LABEL_REF (VOIDmode, operands[3]);
4846 test_rtx = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4847 if_else = gen_rtx_IF_THEN_ELSE (cc_mode, test_rtx, label_ref, pc_rtx);
4848 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, if_else));
4849 return TRUE;
4850 }
4851
4852 \f
4853 /* Emit code to set a gpr to 1/0 based on a comparison. */
4854
4855 int
4856 frv_emit_scc (rtx operands[])
4857 {
4858 rtx set;
4859 rtx test_rtx;
4860 rtx clobber;
4861 rtx cr_reg;
4862 enum rtx_code test = GET_CODE (operands[1]);
4863 rtx cc_reg = frv_emit_comparison (test, operands[2], operands[3]);
4864
4865 /* SCC instructions generate:
4866 (parallel [(set <target> (<test>, <cc_reg>, (const_int 0))
4867 (clobber (<ccr_reg>))]) */
4868 test_rtx = gen_rtx_fmt_ee (test, SImode, cc_reg, const0_rtx);
4869 set = gen_rtx_SET (VOIDmode, operands[0], test_rtx);
4870
4871 cr_reg = ((TARGET_ALLOC_CC)
4872 ? gen_reg_rtx (CC_CCRmode)
4873 : gen_rtx_REG (CC_CCRmode,
4874 ((GET_MODE (cc_reg) == CC_FPmode)
4875 ? FCR_FIRST
4876 : ICR_FIRST)));
4877
4878 clobber = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4879 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
4880 return TRUE;
4881 }
4882
4883 \f
4884 /* Split a SCC instruction into component parts, returning a SEQUENCE to hold
4885 the separate insns. */
4886
4887 rtx
4888 frv_split_scc (rtx dest, rtx test, rtx cc_reg, rtx cr_reg, HOST_WIDE_INT value)
4889 {
4890 rtx ret;
4891
4892 start_sequence ();
4893
4894 /* Set the appropriate CCR bit. */
4895 emit_insn (gen_rtx_SET (VOIDmode,
4896 cr_reg,
4897 gen_rtx_fmt_ee (GET_CODE (test),
4898 GET_MODE (cr_reg),
4899 cc_reg,
4900 const0_rtx)));
4901
4902 /* Move the value into the destination. */
4903 emit_move_insn (dest, GEN_INT (value));
4904
4905 /* Move 0 into the destination if the test failed */
4906 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4907 gen_rtx_EQ (GET_MODE (cr_reg),
4908 cr_reg,
4909 const0_rtx),
4910 gen_rtx_SET (VOIDmode, dest, const0_rtx)));
4911
4912 /* Finish up, return sequence. */
4913 ret = get_insns ();
4914 end_sequence ();
4915 return ret;
4916 }
4917
4918 \f
4919 /* Emit the code for a conditional move, return TRUE if we could do the
4920 move. */
4921
4922 int
4923 frv_emit_cond_move (rtx dest, rtx test_rtx, rtx src1, rtx src2)
4924 {
4925 rtx set;
4926 rtx clobber_cc;
4927 rtx test2;
4928 rtx cr_reg;
4929 rtx if_rtx;
4930 enum rtx_code test = GET_CODE (test_rtx);
4931 rtx cc_reg = frv_emit_comparison (test,
4932 XEXP (test_rtx, 0), XEXP (test_rtx, 1));
4933 enum machine_mode cc_mode = GET_MODE (cc_reg);
4934
4935 /* Conditional move instructions generate:
4936 (parallel [(set <target>
4937 (if_then_else (<test> <cc_reg> (const_int 0))
4938 <src1>
4939 <src2>))
4940 (clobber (<ccr_reg>))]) */
4941
4942 /* Handle various cases of conditional move involving two constants. */
4943 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4944 {
4945 HOST_WIDE_INT value1 = INTVAL (src1);
4946 HOST_WIDE_INT value2 = INTVAL (src2);
4947
4948 /* Having 0 as one of the constants can be done by loading the other
4949 constant, and optionally moving in gr0. */
4950 if (value1 == 0 || value2 == 0)
4951 ;
4952
4953 /* If the first value is within an addi range and also the difference
4954 between the two fits in an addi's range, load up the difference, then
4955 conditionally move in 0, and then unconditionally add the first
4956 value. */
4957 else if (IN_RANGE_P (value1, -2048, 2047)
4958 && IN_RANGE_P (value2 - value1, -2048, 2047))
4959 ;
4960
4961 /* If neither condition holds, just force the constant into a
4962 register. */
4963 else
4964 {
4965 src1 = force_reg (GET_MODE (dest), src1);
4966 src2 = force_reg (GET_MODE (dest), src2);
4967 }
4968 }
4969
4970 /* If one value is a register, insure the other value is either 0 or a
4971 register. */
4972 else
4973 {
4974 if (GET_CODE (src1) == CONST_INT && INTVAL (src1) != 0)
4975 src1 = force_reg (GET_MODE (dest), src1);
4976
4977 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
4978 src2 = force_reg (GET_MODE (dest), src2);
4979 }
4980
4981 test2 = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4982 if_rtx = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), test2, src1, src2);
4983
4984 set = gen_rtx_SET (VOIDmode, dest, if_rtx);
4985
4986 cr_reg = ((TARGET_ALLOC_CC)
4987 ? gen_reg_rtx (CC_CCRmode)
4988 : gen_rtx_REG (CC_CCRmode,
4989 (cc_mode == CC_FPmode) ? FCR_FIRST : ICR_FIRST));
4990
4991 clobber_cc = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4992 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber_cc)));
4993 return TRUE;
4994 }
4995
4996 \f
4997 /* Split a conditional move into constituent parts, returning a SEQUENCE
4998 containing all of the insns. */
4999
5000 rtx
5001 frv_split_cond_move (rtx operands[])
5002 {
5003 rtx dest = operands[0];
5004 rtx test = operands[1];
5005 rtx cc_reg = operands[2];
5006 rtx src1 = operands[3];
5007 rtx src2 = operands[4];
5008 rtx cr_reg = operands[5];
5009 rtx ret;
5010 enum machine_mode cr_mode = GET_MODE (cr_reg);
5011
5012 start_sequence ();
5013
5014 /* Set the appropriate CCR bit. */
5015 emit_insn (gen_rtx_SET (VOIDmode,
5016 cr_reg,
5017 gen_rtx_fmt_ee (GET_CODE (test),
5018 GET_MODE (cr_reg),
5019 cc_reg,
5020 const0_rtx)));
5021
5022 /* Handle various cases of conditional move involving two constants. */
5023 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
5024 {
5025 HOST_WIDE_INT value1 = INTVAL (src1);
5026 HOST_WIDE_INT value2 = INTVAL (src2);
5027
5028 /* Having 0 as one of the constants can be done by loading the other
5029 constant, and optionally moving in gr0. */
5030 if (value1 == 0)
5031 {
5032 emit_move_insn (dest, src2);
5033 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5034 gen_rtx_NE (cr_mode, cr_reg,
5035 const0_rtx),
5036 gen_rtx_SET (VOIDmode, dest, src1)));
5037 }
5038
5039 else if (value2 == 0)
5040 {
5041 emit_move_insn (dest, src1);
5042 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5043 gen_rtx_EQ (cr_mode, cr_reg,
5044 const0_rtx),
5045 gen_rtx_SET (VOIDmode, dest, src2)));
5046 }
5047
5048 /* If the first value is within an addi range and also the difference
5049 between the two fits in an addi's range, load up the difference, then
5050 conditionally move in 0, and then unconditionally add the first
5051 value. */
5052 else if (IN_RANGE_P (value1, -2048, 2047)
5053 && IN_RANGE_P (value2 - value1, -2048, 2047))
5054 {
5055 rtx dest_si = ((GET_MODE (dest) == SImode)
5056 ? dest
5057 : gen_rtx_SUBREG (SImode, dest, 0));
5058
5059 emit_move_insn (dest_si, GEN_INT (value2 - value1));
5060 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5061 gen_rtx_NE (cr_mode, cr_reg,
5062 const0_rtx),
5063 gen_rtx_SET (VOIDmode, dest_si,
5064 const0_rtx)));
5065 emit_insn (gen_addsi3 (dest_si, dest_si, src1));
5066 }
5067
5068 else
5069 gcc_unreachable ();
5070 }
5071 else
5072 {
5073 /* Emit the conditional move for the test being true if needed. */
5074 if (! rtx_equal_p (dest, src1))
5075 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5076 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5077 gen_rtx_SET (VOIDmode, dest, src1)));
5078
5079 /* Emit the conditional move for the test being false if needed. */
5080 if (! rtx_equal_p (dest, src2))
5081 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5082 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5083 gen_rtx_SET (VOIDmode, dest, src2)));
5084 }
5085
5086 /* Finish up, return sequence. */
5087 ret = get_insns ();
5088 end_sequence ();
5089 return ret;
5090 }
5091
5092 \f
5093 /* Split (set DEST SOURCE), where DEST is a double register and SOURCE is a
5094 memory location that is not known to be dword-aligned. */
5095 void
5096 frv_split_double_load (rtx dest, rtx source)
5097 {
5098 int regno = REGNO (dest);
5099 rtx dest1 = gen_highpart (SImode, dest);
5100 rtx dest2 = gen_lowpart (SImode, dest);
5101 rtx address = XEXP (source, 0);
5102
5103 /* If the address is pre-modified, load the lower-numbered register
5104 first, then load the other register using an integer offset from
5105 the modified base register. This order should always be safe,
5106 since the pre-modification cannot affect the same registers as the
5107 load does.
5108
5109 The situation for other loads is more complicated. Loading one
5110 of the registers could affect the value of ADDRESS, so we must
5111 be careful which order we do them in. */
5112 if (GET_CODE (address) == PRE_MODIFY
5113 || ! refers_to_regno_p (regno, regno + 1, address, NULL))
5114 {
5115 /* It is safe to load the lower-numbered register first. */
5116 emit_move_insn (dest1, change_address (source, SImode, NULL));
5117 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
5118 }
5119 else
5120 {
5121 /* ADDRESS is not pre-modified and the address depends on the
5122 lower-numbered register. Load the higher-numbered register
5123 first. */
5124 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
5125 emit_move_insn (dest1, change_address (source, SImode, NULL));
5126 }
5127 }
5128
5129 /* Split (set DEST SOURCE), where DEST refers to a dword memory location
5130 and SOURCE is either a double register or the constant zero. */
5131 void
5132 frv_split_double_store (rtx dest, rtx source)
5133 {
5134 rtx dest1 = change_address (dest, SImode, NULL);
5135 rtx dest2 = frv_index_memory (dest, SImode, 1);
5136 if (ZERO_P (source))
5137 {
5138 emit_move_insn (dest1, CONST0_RTX (SImode));
5139 emit_move_insn (dest2, CONST0_RTX (SImode));
5140 }
5141 else
5142 {
5143 emit_move_insn (dest1, gen_highpart (SImode, source));
5144 emit_move_insn (dest2, gen_lowpart (SImode, source));
5145 }
5146 }
5147
5148 \f
5149 /* Split a min/max operation returning a SEQUENCE containing all of the
5150 insns. */
5151
5152 rtx
5153 frv_split_minmax (rtx operands[])
5154 {
5155 rtx dest = operands[0];
5156 rtx minmax = operands[1];
5157 rtx src1 = operands[2];
5158 rtx src2 = operands[3];
5159 rtx cc_reg = operands[4];
5160 rtx cr_reg = operands[5];
5161 rtx ret;
5162 enum rtx_code test_code;
5163 enum machine_mode cr_mode = GET_MODE (cr_reg);
5164
5165 start_sequence ();
5166
5167 /* Figure out which test to use. */
5168 switch (GET_CODE (minmax))
5169 {
5170 default:
5171 gcc_unreachable ();
5172
5173 case SMIN: test_code = LT; break;
5174 case SMAX: test_code = GT; break;
5175 case UMIN: test_code = LTU; break;
5176 case UMAX: test_code = GTU; break;
5177 }
5178
5179 /* Issue the compare instruction. */
5180 emit_insn (gen_rtx_SET (VOIDmode,
5181 cc_reg,
5182 gen_rtx_COMPARE (GET_MODE (cc_reg),
5183 src1, src2)));
5184
5185 /* Set the appropriate CCR bit. */
5186 emit_insn (gen_rtx_SET (VOIDmode,
5187 cr_reg,
5188 gen_rtx_fmt_ee (test_code,
5189 GET_MODE (cr_reg),
5190 cc_reg,
5191 const0_rtx)));
5192
5193 /* If are taking the min/max of a nonzero constant, load that first, and
5194 then do a conditional move of the other value. */
5195 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
5196 {
5197 gcc_assert (!rtx_equal_p (dest, src1));
5198
5199 emit_move_insn (dest, src2);
5200 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5201 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5202 gen_rtx_SET (VOIDmode, dest, src1)));
5203 }
5204
5205 /* Otherwise, do each half of the move. */
5206 else
5207 {
5208 /* Emit the conditional move for the test being true if needed. */
5209 if (! rtx_equal_p (dest, src1))
5210 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5211 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5212 gen_rtx_SET (VOIDmode, dest, src1)));
5213
5214 /* Emit the conditional move for the test being false if needed. */
5215 if (! rtx_equal_p (dest, src2))
5216 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5217 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5218 gen_rtx_SET (VOIDmode, dest, src2)));
5219 }
5220
5221 /* Finish up, return sequence. */
5222 ret = get_insns ();
5223 end_sequence ();
5224 return ret;
5225 }
5226
5227 \f
5228 /* Split an integer abs operation returning a SEQUENCE containing all of the
5229 insns. */
5230
5231 rtx
5232 frv_split_abs (rtx operands[])
5233 {
5234 rtx dest = operands[0];
5235 rtx src = operands[1];
5236 rtx cc_reg = operands[2];
5237 rtx cr_reg = operands[3];
5238 rtx ret;
5239
5240 start_sequence ();
5241
5242 /* Issue the compare < 0 instruction. */
5243 emit_insn (gen_rtx_SET (VOIDmode,
5244 cc_reg,
5245 gen_rtx_COMPARE (CCmode, src, const0_rtx)));
5246
5247 /* Set the appropriate CCR bit. */
5248 emit_insn (gen_rtx_SET (VOIDmode,
5249 cr_reg,
5250 gen_rtx_fmt_ee (LT, CC_CCRmode, cc_reg, const0_rtx)));
5251
5252 /* Emit the conditional negate if the value is negative. */
5253 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5254 gen_rtx_NE (CC_CCRmode, cr_reg, const0_rtx),
5255 gen_negsi2 (dest, src)));
5256
5257 /* Emit the conditional move for the test being false if needed. */
5258 if (! rtx_equal_p (dest, src))
5259 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5260 gen_rtx_EQ (CC_CCRmode, cr_reg, const0_rtx),
5261 gen_rtx_SET (VOIDmode, dest, src)));
5262
5263 /* Finish up, return sequence. */
5264 ret = get_insns ();
5265 end_sequence ();
5266 return ret;
5267 }
5268
5269 \f
5270 /* An internal function called by for_each_rtx to clear in a hard_reg set each
5271 register used in an insn. */
5272
5273 static int
5274 frv_clear_registers_used (rtx *ptr, void *data)
5275 {
5276 if (GET_CODE (*ptr) == REG)
5277 {
5278 int regno = REGNO (*ptr);
5279 HARD_REG_SET *p_regs = (HARD_REG_SET *)data;
5280
5281 if (regno < FIRST_PSEUDO_REGISTER)
5282 {
5283 int reg_max = regno + HARD_REGNO_NREGS (regno, GET_MODE (*ptr));
5284
5285 while (regno < reg_max)
5286 {
5287 CLEAR_HARD_REG_BIT (*p_regs, regno);
5288 regno++;
5289 }
5290 }
5291 }
5292
5293 return 0;
5294 }
5295
5296 \f
5297 /* Initialize the extra fields provided by IFCVT_EXTRA_FIELDS. */
5298
5299 /* On the FR-V, we don't have any extra fields per se, but it is useful hook to
5300 initialize the static storage. */
5301 void
5302 frv_ifcvt_init_extra_fields (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
5303 {
5304 frv_ifcvt.added_insns_list = NULL_RTX;
5305 frv_ifcvt.cur_scratch_regs = 0;
5306 frv_ifcvt.num_nested_cond_exec = 0;
5307 frv_ifcvt.cr_reg = NULL_RTX;
5308 frv_ifcvt.nested_cc_reg = NULL_RTX;
5309 frv_ifcvt.extra_int_cr = NULL_RTX;
5310 frv_ifcvt.extra_fp_cr = NULL_RTX;
5311 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5312 }
5313
5314 \f
5315 /* Internal function to add a potential insn to the list of insns to be inserted
5316 if the conditional execution conversion is successful. */
5317
5318 static void
5319 frv_ifcvt_add_insn (rtx pattern, rtx insn, int before_p)
5320 {
5321 rtx link = alloc_EXPR_LIST (VOIDmode, pattern, insn);
5322
5323 link->jump = before_p; /* Mark to add this before or after insn. */
5324 frv_ifcvt.added_insns_list = alloc_EXPR_LIST (VOIDmode, link,
5325 frv_ifcvt.added_insns_list);
5326
5327 if (TARGET_DEBUG_COND_EXEC)
5328 {
5329 fprintf (stderr,
5330 "\n:::::::::: frv_ifcvt_add_insn: add the following %s insn %d:\n",
5331 (before_p) ? "before" : "after",
5332 (int)INSN_UID (insn));
5333
5334 debug_rtx (pattern);
5335 }
5336 }
5337
5338 \f
5339 /* A C expression to modify the code described by the conditional if
5340 information CE_INFO, possibly updating the tests in TRUE_EXPR, and
5341 FALSE_EXPR for converting if-then and if-then-else code to conditional
5342 instructions. Set either TRUE_EXPR or FALSE_EXPR to a null pointer if the
5343 tests cannot be converted. */
5344
5345 void
5346 frv_ifcvt_modify_tests (ce_if_block_t *ce_info, rtx *p_true, rtx *p_false)
5347 {
5348 basic_block test_bb = ce_info->test_bb; /* test basic block */
5349 basic_block then_bb = ce_info->then_bb; /* THEN */
5350 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
5351 basic_block join_bb = ce_info->join_bb; /* join block or NULL */
5352 rtx true_expr = *p_true;
5353 rtx cr;
5354 rtx cc;
5355 rtx nested_cc;
5356 enum machine_mode mode = GET_MODE (true_expr);
5357 int j;
5358 basic_block *bb;
5359 int num_bb;
5360 frv_tmp_reg_t *tmp_reg = &frv_ifcvt.tmp_reg;
5361 rtx check_insn;
5362 rtx sub_cond_exec_reg;
5363 enum rtx_code code;
5364 enum rtx_code code_true;
5365 enum rtx_code code_false;
5366 enum reg_class cc_class;
5367 enum reg_class cr_class;
5368 int cc_first;
5369 int cc_last;
5370 reg_set_iterator rsi;
5371
5372 /* Make sure we are only dealing with hard registers. Also honor the
5373 -mno-cond-exec switch, and -mno-nested-cond-exec switches if
5374 applicable. */
5375 if (!reload_completed || !TARGET_COND_EXEC
5376 || (!TARGET_NESTED_CE && ce_info->pass > 1))
5377 goto fail;
5378
5379 /* Figure out which registers we can allocate for our own purposes. Only
5380 consider registers that are not preserved across function calls and are
5381 not fixed. However, allow the ICC/ICR temporary registers to be allocated
5382 if we did not need to use them in reloading other registers. */
5383 memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs));
5384 COPY_HARD_REG_SET (tmp_reg->regs, call_used_reg_set);
5385 AND_COMPL_HARD_REG_SET (tmp_reg->regs, fixed_reg_set);
5386 SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP);
5387 SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP);
5388
5389 /* If this is a nested IF, we need to discover whether the CC registers that
5390 are set/used inside of the block are used anywhere else. If not, we can
5391 change them to be the CC register that is paired with the CR register that
5392 controls the outermost IF block. */
5393 if (ce_info->pass > 1)
5394 {
5395 CLEAR_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite);
5396 for (j = CC_FIRST; j <= CC_LAST; j++)
5397 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5398 {
5399 if (REGNO_REG_SET_P (df_get_live_in (then_bb), j))
5400 continue;
5401
5402 if (else_bb
5403 && REGNO_REG_SET_P (df_get_live_in (else_bb), j))
5404 continue;
5405
5406 if (join_bb
5407 && REGNO_REG_SET_P (df_get_live_in (join_bb), j))
5408 continue;
5409
5410 SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j);
5411 }
5412 }
5413
5414 for (j = 0; j < frv_ifcvt.cur_scratch_regs; j++)
5415 frv_ifcvt.scratch_regs[j] = NULL_RTX;
5416
5417 frv_ifcvt.added_insns_list = NULL_RTX;
5418 frv_ifcvt.cur_scratch_regs = 0;
5419
5420 bb = (basic_block *) alloca ((2 + ce_info->num_multiple_test_blocks)
5421 * sizeof (basic_block));
5422
5423 if (join_bb)
5424 {
5425 unsigned int regno;
5426
5427 /* Remove anything live at the beginning of the join block from being
5428 available for allocation. */
5429 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (join_bb), 0, regno, rsi)
5430 {
5431 if (regno < FIRST_PSEUDO_REGISTER)
5432 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5433 }
5434 }
5435
5436 /* Add in all of the blocks in multiple &&/|| blocks to be scanned. */
5437 num_bb = 0;
5438 if (ce_info->num_multiple_test_blocks)
5439 {
5440 basic_block multiple_test_bb = ce_info->last_test_bb;
5441
5442 while (multiple_test_bb != test_bb)
5443 {
5444 bb[num_bb++] = multiple_test_bb;
5445 multiple_test_bb = EDGE_PRED (multiple_test_bb, 0)->src;
5446 }
5447 }
5448
5449 /* Add in the THEN and ELSE blocks to be scanned. */
5450 bb[num_bb++] = then_bb;
5451 if (else_bb)
5452 bb[num_bb++] = else_bb;
5453
5454 sub_cond_exec_reg = NULL_RTX;
5455 frv_ifcvt.num_nested_cond_exec = 0;
5456
5457 /* Scan all of the blocks for registers that must not be allocated. */
5458 for (j = 0; j < num_bb; j++)
5459 {
5460 rtx last_insn = BB_END (bb[j]);
5461 rtx insn = BB_HEAD (bb[j]);
5462 unsigned int regno;
5463
5464 if (dump_file)
5465 fprintf (dump_file, "Scanning %s block %d, start %d, end %d\n",
5466 (bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"),
5467 (int) bb[j]->index,
5468 (int) INSN_UID (BB_HEAD (bb[j])),
5469 (int) INSN_UID (BB_END (bb[j])));
5470
5471 /* Anything live at the beginning of the block is obviously unavailable
5472 for allocation. */
5473 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb[j]), 0, regno, rsi)
5474 {
5475 if (regno < FIRST_PSEUDO_REGISTER)
5476 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5477 }
5478
5479 /* Loop through the insns in the block. */
5480 for (;;)
5481 {
5482 /* Mark any new registers that are created as being unavailable for
5483 allocation. Also see if the CC register used in nested IFs can be
5484 reallocated. */
5485 if (INSN_P (insn))
5486 {
5487 rtx pattern;
5488 rtx set;
5489 int skip_nested_if = FALSE;
5490
5491 for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
5492 (void *)&tmp_reg->regs);
5493
5494 pattern = PATTERN (insn);
5495 if (GET_CODE (pattern) == COND_EXEC)
5496 {
5497 rtx reg = XEXP (COND_EXEC_TEST (pattern), 0);
5498
5499 if (reg != sub_cond_exec_reg)
5500 {
5501 sub_cond_exec_reg = reg;
5502 frv_ifcvt.num_nested_cond_exec++;
5503 }
5504 }
5505
5506 set = single_set_pattern (pattern);
5507 if (set)
5508 {
5509 rtx dest = SET_DEST (set);
5510 rtx src = SET_SRC (set);
5511
5512 if (GET_CODE (dest) == REG)
5513 {
5514 int regno = REGNO (dest);
5515 enum rtx_code src_code = GET_CODE (src);
5516
5517 if (CC_P (regno) && src_code == COMPARE)
5518 skip_nested_if = TRUE;
5519
5520 else if (CR_P (regno)
5521 && (src_code == IF_THEN_ELSE
5522 || COMPARISON_P (src)))
5523 skip_nested_if = TRUE;
5524 }
5525 }
5526
5527 if (! skip_nested_if)
5528 for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
5529 (void *)&frv_ifcvt.nested_cc_ok_rewrite);
5530 }
5531
5532 if (insn == last_insn)
5533 break;
5534
5535 insn = NEXT_INSN (insn);
5536 }
5537 }
5538
5539 /* If this is a nested if, rewrite the CC registers that are available to
5540 include the ones that can be rewritten, to increase the chance of being
5541 able to allocate a paired CC/CR register combination. */
5542 if (ce_info->pass > 1)
5543 {
5544 for (j = CC_FIRST; j <= CC_LAST; j++)
5545 if (TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j))
5546 SET_HARD_REG_BIT (tmp_reg->regs, j);
5547 else
5548 CLEAR_HARD_REG_BIT (tmp_reg->regs, j);
5549 }
5550
5551 if (dump_file)
5552 {
5553 int num_gprs = 0;
5554 fprintf (dump_file, "Available GPRs: ");
5555
5556 for (j = GPR_FIRST; j <= GPR_LAST; j++)
5557 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5558 {
5559 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5560 if (++num_gprs > GPR_TEMP_NUM+2)
5561 break;
5562 }
5563
5564 fprintf (dump_file, "%s\nAvailable CRs: ",
5565 (num_gprs > GPR_TEMP_NUM+2) ? " ..." : "");
5566
5567 for (j = CR_FIRST; j <= CR_LAST; j++)
5568 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5569 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5570
5571 fputs ("\n", dump_file);
5572
5573 if (ce_info->pass > 1)
5574 {
5575 fprintf (dump_file, "Modifiable CCs: ");
5576 for (j = CC_FIRST; j <= CC_LAST; j++)
5577 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5578 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5579
5580 fprintf (dump_file, "\n%d nested COND_EXEC statements\n",
5581 frv_ifcvt.num_nested_cond_exec);
5582 }
5583 }
5584
5585 /* Allocate the appropriate temporary condition code register. Try to
5586 allocate the ICR/FCR register that corresponds to the ICC/FCC register so
5587 that conditional cmp's can be done. */
5588 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5589 {
5590 cr_class = ICR_REGS;
5591 cc_class = ICC_REGS;
5592 cc_first = ICC_FIRST;
5593 cc_last = ICC_LAST;
5594 }
5595 else if (mode == CC_FPmode)
5596 {
5597 cr_class = FCR_REGS;
5598 cc_class = FCC_REGS;
5599 cc_first = FCC_FIRST;
5600 cc_last = FCC_LAST;
5601 }
5602 else
5603 {
5604 cc_first = cc_last = 0;
5605 cr_class = cc_class = NO_REGS;
5606 }
5607
5608 cc = XEXP (true_expr, 0);
5609 nested_cc = cr = NULL_RTX;
5610 if (cc_class != NO_REGS)
5611 {
5612 /* For nested IFs and &&/||, see if we can find a CC and CR register pair
5613 so we can execute a csubcc/caddcc/cfcmps instruction. */
5614 int cc_regno;
5615
5616 for (cc_regno = cc_first; cc_regno <= cc_last; cc_regno++)
5617 {
5618 int cr_regno = cc_regno - CC_FIRST + CR_FIRST;
5619
5620 if (TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cc_regno)
5621 && TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cr_regno))
5622 {
5623 frv_ifcvt.tmp_reg.next_reg[ (int)cr_class ] = cr_regno;
5624 cr = frv_alloc_temp_reg (tmp_reg, cr_class, CC_CCRmode, TRUE,
5625 TRUE);
5626
5627 frv_ifcvt.tmp_reg.next_reg[ (int)cc_class ] = cc_regno;
5628 nested_cc = frv_alloc_temp_reg (tmp_reg, cc_class, CCmode,
5629 TRUE, TRUE);
5630 break;
5631 }
5632 }
5633 }
5634
5635 if (! cr)
5636 {
5637 if (dump_file)
5638 fprintf (dump_file, "Could not allocate a CR temporary register\n");
5639
5640 goto fail;
5641 }
5642
5643 if (dump_file)
5644 fprintf (dump_file,
5645 "Will use %s for conditional execution, %s for nested comparisons\n",
5646 reg_names[ REGNO (cr)],
5647 (nested_cc) ? reg_names[ REGNO (nested_cc) ] : "<none>");
5648
5649 /* Set the CCR bit. Note for integer tests, we reverse the condition so that
5650 in an IF-THEN-ELSE sequence, we are testing the TRUE case against the CCR
5651 bit being true. We don't do this for floating point, because of NaNs. */
5652 code = GET_CODE (true_expr);
5653 if (GET_MODE (cc) != CC_FPmode)
5654 {
5655 code = reverse_condition (code);
5656 code_true = EQ;
5657 code_false = NE;
5658 }
5659 else
5660 {
5661 code_true = NE;
5662 code_false = EQ;
5663 }
5664
5665 check_insn = gen_rtx_SET (VOIDmode, cr,
5666 gen_rtx_fmt_ee (code, CC_CCRmode, cc, const0_rtx));
5667
5668 /* Record the check insn to be inserted later. */
5669 frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE);
5670
5671 /* Update the tests. */
5672 frv_ifcvt.cr_reg = cr;
5673 frv_ifcvt.nested_cc_reg = nested_cc;
5674 *p_true = gen_rtx_fmt_ee (code_true, CC_CCRmode, cr, const0_rtx);
5675 *p_false = gen_rtx_fmt_ee (code_false, CC_CCRmode, cr, const0_rtx);
5676 return;
5677
5678 /* Fail, don't do this conditional execution. */
5679 fail:
5680 *p_true = NULL_RTX;
5681 *p_false = NULL_RTX;
5682 if (dump_file)
5683 fprintf (dump_file, "Disabling this conditional execution.\n");
5684
5685 return;
5686 }
5687
5688 \f
5689 /* A C expression to modify the code described by the conditional if
5690 information CE_INFO, for the basic block BB, possibly updating the tests in
5691 TRUE_EXPR, and FALSE_EXPR for converting the && and || parts of if-then or
5692 if-then-else code to conditional instructions. Set either TRUE_EXPR or
5693 FALSE_EXPR to a null pointer if the tests cannot be converted. */
5694
5695 /* p_true and p_false are given expressions of the form:
5696
5697 (and (eq:CC_CCR (reg:CC_CCR)
5698 (const_int 0))
5699 (eq:CC (reg:CC)
5700 (const_int 0))) */
5701
5702 void
5703 frv_ifcvt_modify_multiple_tests (ce_if_block_t *ce_info,
5704 basic_block bb,
5705 rtx *p_true,
5706 rtx *p_false)
5707 {
5708 rtx old_true = XEXP (*p_true, 0);
5709 rtx old_false = XEXP (*p_false, 0);
5710 rtx true_expr = XEXP (*p_true, 1);
5711 rtx false_expr = XEXP (*p_false, 1);
5712 rtx test_expr;
5713 rtx old_test;
5714 rtx cr = XEXP (old_true, 0);
5715 rtx check_insn;
5716 rtx new_cr = NULL_RTX;
5717 rtx *p_new_cr = (rtx *)0;
5718 rtx if_else;
5719 rtx compare;
5720 rtx cc;
5721 enum reg_class cr_class;
5722 enum machine_mode mode = GET_MODE (true_expr);
5723 rtx (*logical_func)(rtx, rtx, rtx);
5724
5725 if (TARGET_DEBUG_COND_EXEC)
5726 {
5727 fprintf (stderr,
5728 "\n:::::::::: frv_ifcvt_modify_multiple_tests, before modification for %s\ntrue insn:\n",
5729 ce_info->and_and_p ? "&&" : "||");
5730
5731 debug_rtx (*p_true);
5732
5733 fputs ("\nfalse insn:\n", stderr);
5734 debug_rtx (*p_false);
5735 }
5736
5737 if (!TARGET_MULTI_CE)
5738 goto fail;
5739
5740 if (GET_CODE (cr) != REG)
5741 goto fail;
5742
5743 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5744 {
5745 cr_class = ICR_REGS;
5746 p_new_cr = &frv_ifcvt.extra_int_cr;
5747 }
5748 else if (mode == CC_FPmode)
5749 {
5750 cr_class = FCR_REGS;
5751 p_new_cr = &frv_ifcvt.extra_fp_cr;
5752 }
5753 else
5754 goto fail;
5755
5756 /* Allocate a temp CR, reusing a previously allocated temp CR if we have 3 or
5757 more &&/|| tests. */
5758 new_cr = *p_new_cr;
5759 if (! new_cr)
5760 {
5761 new_cr = *p_new_cr = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, cr_class,
5762 CC_CCRmode, TRUE, TRUE);
5763 if (! new_cr)
5764 goto fail;
5765 }
5766
5767 if (ce_info->and_and_p)
5768 {
5769 old_test = old_false;
5770 test_expr = true_expr;
5771 logical_func = (GET_CODE (old_true) == EQ) ? gen_andcr : gen_andncr;
5772 *p_true = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5773 *p_false = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5774 }
5775 else
5776 {
5777 old_test = old_false;
5778 test_expr = false_expr;
5779 logical_func = (GET_CODE (old_false) == EQ) ? gen_orcr : gen_orncr;
5780 *p_true = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5781 *p_false = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5782 }
5783
5784 /* First add the andcr/andncr/orcr/orncr, which will be added after the
5785 conditional check instruction, due to frv_ifcvt_add_insn being a LIFO
5786 stack. */
5787 frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE);
5788
5789 /* Now add the conditional check insn. */
5790 cc = XEXP (test_expr, 0);
5791 compare = gen_rtx_fmt_ee (GET_CODE (test_expr), CC_CCRmode, cc, const0_rtx);
5792 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, old_test, compare, const0_rtx);
5793
5794 check_insn = gen_rtx_SET (VOIDmode, new_cr, if_else);
5795
5796 /* Add the new check insn to the list of check insns that need to be
5797 inserted. */
5798 frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE);
5799
5800 if (TARGET_DEBUG_COND_EXEC)
5801 {
5802 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, after modification\ntrue insn:\n",
5803 stderr);
5804
5805 debug_rtx (*p_true);
5806
5807 fputs ("\nfalse insn:\n", stderr);
5808 debug_rtx (*p_false);
5809 }
5810
5811 return;
5812
5813 fail:
5814 *p_true = *p_false = NULL_RTX;
5815
5816 /* If we allocated a CR register, release it. */
5817 if (new_cr)
5818 {
5819 CLEAR_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, REGNO (new_cr));
5820 *p_new_cr = NULL_RTX;
5821 }
5822
5823 if (TARGET_DEBUG_COND_EXEC)
5824 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, failed.\n", stderr);
5825
5826 return;
5827 }
5828
5829 \f
5830 /* Return a register which will be loaded with a value if an IF block is
5831 converted to conditional execution. This is used to rewrite instructions
5832 that use constants to ones that just use registers. */
5833
5834 static rtx
5835 frv_ifcvt_load_value (rtx value, rtx insn ATTRIBUTE_UNUSED)
5836 {
5837 int num_alloc = frv_ifcvt.cur_scratch_regs;
5838 int i;
5839 rtx reg;
5840
5841 /* We know gr0 == 0, so replace any errant uses. */
5842 if (value == const0_rtx)
5843 return gen_rtx_REG (SImode, GPR_FIRST);
5844
5845 /* First search all registers currently loaded to see if we have an
5846 applicable constant. */
5847 if (CONSTANT_P (value)
5848 || (GET_CODE (value) == REG && REGNO (value) == LR_REGNO))
5849 {
5850 for (i = 0; i < num_alloc; i++)
5851 {
5852 if (rtx_equal_p (SET_SRC (frv_ifcvt.scratch_regs[i]), value))
5853 return SET_DEST (frv_ifcvt.scratch_regs[i]);
5854 }
5855 }
5856
5857 /* Have we exhausted the number of registers available? */
5858 if (num_alloc >= GPR_TEMP_NUM)
5859 {
5860 if (dump_file)
5861 fprintf (dump_file, "Too many temporary registers allocated\n");
5862
5863 return NULL_RTX;
5864 }
5865
5866 /* Allocate the new register. */
5867 reg = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, GPR_REGS, SImode, TRUE, TRUE);
5868 if (! reg)
5869 {
5870 if (dump_file)
5871 fputs ("Could not find a scratch register\n", dump_file);
5872
5873 return NULL_RTX;
5874 }
5875
5876 frv_ifcvt.cur_scratch_regs++;
5877 frv_ifcvt.scratch_regs[num_alloc] = gen_rtx_SET (VOIDmode, reg, value);
5878
5879 if (dump_file)
5880 {
5881 if (GET_CODE (value) == CONST_INT)
5882 fprintf (dump_file, "Register %s will hold %ld\n",
5883 reg_names[ REGNO (reg)], (long)INTVAL (value));
5884
5885 else if (GET_CODE (value) == REG && REGNO (value) == LR_REGNO)
5886 fprintf (dump_file, "Register %s will hold LR\n",
5887 reg_names[ REGNO (reg)]);
5888
5889 else
5890 fprintf (dump_file, "Register %s will hold a saved value\n",
5891 reg_names[ REGNO (reg)]);
5892 }
5893
5894 return reg;
5895 }
5896
5897 \f
5898 /* Update a MEM used in conditional code that might contain an offset to put
5899 the offset into a scratch register, so that the conditional load/store
5900 operations can be used. This function returns the original pointer if the
5901 MEM is valid to use in conditional code, NULL if we can't load up the offset
5902 into a temporary register, or the new MEM if we were successful. */
5903
5904 static rtx
5905 frv_ifcvt_rewrite_mem (rtx mem, enum machine_mode mode, rtx insn)
5906 {
5907 rtx addr = XEXP (mem, 0);
5908
5909 if (!frv_legitimate_address_p_1 (mode, addr, reload_completed, TRUE, FALSE))
5910 {
5911 if (GET_CODE (addr) == PLUS)
5912 {
5913 rtx addr_op0 = XEXP (addr, 0);
5914 rtx addr_op1 = XEXP (addr, 1);
5915
5916 if (GET_CODE (addr_op0) == REG && CONSTANT_P (addr_op1))
5917 {
5918 rtx reg = frv_ifcvt_load_value (addr_op1, insn);
5919 if (!reg)
5920 return NULL_RTX;
5921
5922 addr = gen_rtx_PLUS (Pmode, addr_op0, reg);
5923 }
5924
5925 else
5926 return NULL_RTX;
5927 }
5928
5929 else if (CONSTANT_P (addr))
5930 addr = frv_ifcvt_load_value (addr, insn);
5931
5932 else
5933 return NULL_RTX;
5934
5935 if (addr == NULL_RTX)
5936 return NULL_RTX;
5937
5938 else if (XEXP (mem, 0) != addr)
5939 return change_address (mem, mode, addr);
5940 }
5941
5942 return mem;
5943 }
5944
5945 \f
5946 /* Given a PATTERN, return a SET expression if this PATTERN has only a single
5947 SET, possibly conditionally executed. It may also have CLOBBERs, USEs. */
5948
5949 static rtx
5950 single_set_pattern (rtx pattern)
5951 {
5952 rtx set;
5953 int i;
5954
5955 if (GET_CODE (pattern) == COND_EXEC)
5956 pattern = COND_EXEC_CODE (pattern);
5957
5958 if (GET_CODE (pattern) == SET)
5959 return pattern;
5960
5961 else if (GET_CODE (pattern) == PARALLEL)
5962 {
5963 for (i = 0, set = 0; i < XVECLEN (pattern, 0); i++)
5964 {
5965 rtx sub = XVECEXP (pattern, 0, i);
5966
5967 switch (GET_CODE (sub))
5968 {
5969 case USE:
5970 case CLOBBER:
5971 break;
5972
5973 case SET:
5974 if (set)
5975 return 0;
5976 else
5977 set = sub;
5978 break;
5979
5980 default:
5981 return 0;
5982 }
5983 }
5984 return set;
5985 }
5986
5987 return 0;
5988 }
5989
5990 \f
5991 /* A C expression to modify the code described by the conditional if
5992 information CE_INFO with the new PATTERN in INSN. If PATTERN is a null
5993 pointer after the IFCVT_MODIFY_INSN macro executes, it is assumed that that
5994 insn cannot be converted to be executed conditionally. */
5995
5996 rtx
5997 frv_ifcvt_modify_insn (ce_if_block_t *ce_info,
5998 rtx pattern,
5999 rtx insn)
6000 {
6001 rtx orig_ce_pattern = pattern;
6002 rtx set;
6003 rtx op0;
6004 rtx op1;
6005 rtx test;
6006
6007 gcc_assert (GET_CODE (pattern) == COND_EXEC);
6008
6009 test = COND_EXEC_TEST (pattern);
6010 if (GET_CODE (test) == AND)
6011 {
6012 rtx cr = frv_ifcvt.cr_reg;
6013 rtx test_reg;
6014
6015 op0 = XEXP (test, 0);
6016 if (! rtx_equal_p (cr, XEXP (op0, 0)))
6017 goto fail;
6018
6019 op1 = XEXP (test, 1);
6020 test_reg = XEXP (op1, 0);
6021 if (GET_CODE (test_reg) != REG)
6022 goto fail;
6023
6024 /* Is this the first nested if block in this sequence? If so, generate
6025 an andcr or andncr. */
6026 if (! frv_ifcvt.last_nested_if_cr)
6027 {
6028 rtx and_op;
6029
6030 frv_ifcvt.last_nested_if_cr = test_reg;
6031 if (GET_CODE (op0) == NE)
6032 and_op = gen_andcr (test_reg, cr, test_reg);
6033 else
6034 and_op = gen_andncr (test_reg, cr, test_reg);
6035
6036 frv_ifcvt_add_insn (and_op, insn, TRUE);
6037 }
6038
6039 /* If this isn't the first statement in the nested if sequence, see if we
6040 are dealing with the same register. */
6041 else if (! rtx_equal_p (test_reg, frv_ifcvt.last_nested_if_cr))
6042 goto fail;
6043
6044 COND_EXEC_TEST (pattern) = test = op1;
6045 }
6046
6047 /* If this isn't a nested if, reset state variables. */
6048 else
6049 {
6050 frv_ifcvt.last_nested_if_cr = NULL_RTX;
6051 }
6052
6053 set = single_set_pattern (pattern);
6054 if (set)
6055 {
6056 rtx dest = SET_DEST (set);
6057 rtx src = SET_SRC (set);
6058 enum machine_mode mode = GET_MODE (dest);
6059
6060 /* Check for normal binary operators. */
6061 if (mode == SImode && ARITHMETIC_P (src))
6062 {
6063 op0 = XEXP (src, 0);
6064 op1 = XEXP (src, 1);
6065
6066 if (integer_register_operand (op0, SImode) && CONSTANT_P (op1))
6067 {
6068 op1 = frv_ifcvt_load_value (op1, insn);
6069 if (op1)
6070 COND_EXEC_CODE (pattern)
6071 = gen_rtx_SET (VOIDmode, dest, gen_rtx_fmt_ee (GET_CODE (src),
6072 GET_MODE (src),
6073 op0, op1));
6074 else
6075 goto fail;
6076 }
6077 }
6078
6079 /* For multiply by a constant, we need to handle the sign extending
6080 correctly. Add a USE of the value after the multiply to prevent flow
6081 from cratering because only one register out of the two were used. */
6082 else if (mode == DImode && GET_CODE (src) == MULT)
6083 {
6084 op0 = XEXP (src, 0);
6085 op1 = XEXP (src, 1);
6086 if (GET_CODE (op0) == SIGN_EXTEND && GET_CODE (op1) == CONST_INT)
6087 {
6088 op1 = frv_ifcvt_load_value (op1, insn);
6089 if (op1)
6090 {
6091 op1 = gen_rtx_SIGN_EXTEND (DImode, op1);
6092 COND_EXEC_CODE (pattern)
6093 = gen_rtx_SET (VOIDmode, dest,
6094 gen_rtx_MULT (DImode, op0, op1));
6095 }
6096 else
6097 goto fail;
6098 }
6099
6100 frv_ifcvt_add_insn (gen_use (dest), insn, FALSE);
6101 }
6102
6103 /* If we are just loading a constant created for a nested conditional
6104 execution statement, just load the constant without any conditional
6105 execution, since we know that the constant will not interfere with any
6106 other registers. */
6107 else if (frv_ifcvt.scratch_insns_bitmap
6108 && bitmap_bit_p (frv_ifcvt.scratch_insns_bitmap,
6109 INSN_UID (insn))
6110 && REG_P (SET_DEST (set))
6111 /* We must not unconditionally set a scratch reg chosen
6112 for a nested if-converted block if its incoming
6113 value from the TEST block (or the result of the THEN
6114 branch) could/should propagate to the JOIN block.
6115 It suffices to test whether the register is live at
6116 the JOIN point: if it's live there, we can infer
6117 that we set it in the former JOIN block of the
6118 nested if-converted block (otherwise it wouldn't
6119 have been available as a scratch register), and it
6120 is either propagated through or set in the other
6121 conditional block. It's probably not worth trying
6122 to catch the latter case, and it could actually
6123 limit scheduling of the combined block quite
6124 severely. */
6125 && ce_info->join_bb
6126 && ! (REGNO_REG_SET_P (df_get_live_in (ce_info->join_bb),
6127 REGNO (SET_DEST (set))))
6128 /* Similarly, we must not unconditionally set a reg
6129 used as scratch in the THEN branch if the same reg
6130 is live in the ELSE branch. */
6131 && (! ce_info->else_bb
6132 || BLOCK_FOR_INSN (insn) == ce_info->else_bb
6133 || ! (REGNO_REG_SET_P (df_get_live_in (ce_info->else_bb),
6134 REGNO (SET_DEST (set))))))
6135 pattern = set;
6136
6137 else if (mode == QImode || mode == HImode || mode == SImode
6138 || mode == SFmode)
6139 {
6140 int changed_p = FALSE;
6141
6142 /* Check for just loading up a constant */
6143 if (CONSTANT_P (src) && integer_register_operand (dest, mode))
6144 {
6145 src = frv_ifcvt_load_value (src, insn);
6146 if (!src)
6147 goto fail;
6148
6149 changed_p = TRUE;
6150 }
6151
6152 /* See if we need to fix up stores */
6153 if (GET_CODE (dest) == MEM)
6154 {
6155 rtx new_mem = frv_ifcvt_rewrite_mem (dest, mode, insn);
6156
6157 if (!new_mem)
6158 goto fail;
6159
6160 else if (new_mem != dest)
6161 {
6162 changed_p = TRUE;
6163 dest = new_mem;
6164 }
6165 }
6166
6167 /* See if we need to fix up loads */
6168 if (GET_CODE (src) == MEM)
6169 {
6170 rtx new_mem = frv_ifcvt_rewrite_mem (src, mode, insn);
6171
6172 if (!new_mem)
6173 goto fail;
6174
6175 else if (new_mem != src)
6176 {
6177 changed_p = TRUE;
6178 src = new_mem;
6179 }
6180 }
6181
6182 /* If either src or destination changed, redo SET. */
6183 if (changed_p)
6184 COND_EXEC_CODE (pattern) = gen_rtx_SET (VOIDmode, dest, src);
6185 }
6186
6187 /* Rewrite a nested set cccr in terms of IF_THEN_ELSE. Also deal with
6188 rewriting the CC register to be the same as the paired CC/CR register
6189 for nested ifs. */
6190 else if (mode == CC_CCRmode && COMPARISON_P (src))
6191 {
6192 int regno = REGNO (XEXP (src, 0));
6193 rtx if_else;
6194
6195 if (ce_info->pass > 1
6196 && regno != (int)REGNO (frv_ifcvt.nested_cc_reg)
6197 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, regno))
6198 {
6199 src = gen_rtx_fmt_ee (GET_CODE (src),
6200 CC_CCRmode,
6201 frv_ifcvt.nested_cc_reg,
6202 XEXP (src, 1));
6203 }
6204
6205 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, test, src, const0_rtx);
6206 pattern = gen_rtx_SET (VOIDmode, dest, if_else);
6207 }
6208
6209 /* Remap a nested compare instruction to use the paired CC/CR reg. */
6210 else if (ce_info->pass > 1
6211 && GET_CODE (dest) == REG
6212 && CC_P (REGNO (dest))
6213 && REGNO (dest) != REGNO (frv_ifcvt.nested_cc_reg)
6214 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite,
6215 REGNO (dest))
6216 && GET_CODE (src) == COMPARE)
6217 {
6218 PUT_MODE (frv_ifcvt.nested_cc_reg, GET_MODE (dest));
6219 COND_EXEC_CODE (pattern)
6220 = gen_rtx_SET (VOIDmode, frv_ifcvt.nested_cc_reg, copy_rtx (src));
6221 }
6222 }
6223
6224 if (TARGET_DEBUG_COND_EXEC)
6225 {
6226 rtx orig_pattern = PATTERN (insn);
6227
6228 PATTERN (insn) = pattern;
6229 fprintf (stderr,
6230 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn after modification:\n",
6231 ce_info->pass);
6232
6233 debug_rtx (insn);
6234 PATTERN (insn) = orig_pattern;
6235 }
6236
6237 return pattern;
6238
6239 fail:
6240 if (TARGET_DEBUG_COND_EXEC)
6241 {
6242 rtx orig_pattern = PATTERN (insn);
6243
6244 PATTERN (insn) = orig_ce_pattern;
6245 fprintf (stderr,
6246 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn could not be modified:\n",
6247 ce_info->pass);
6248
6249 debug_rtx (insn);
6250 PATTERN (insn) = orig_pattern;
6251 }
6252
6253 return NULL_RTX;
6254 }
6255
6256 \f
6257 /* A C expression to perform any final machine dependent modifications in
6258 converting code to conditional execution in the code described by the
6259 conditional if information CE_INFO. */
6260
6261 void
6262 frv_ifcvt_modify_final (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
6263 {
6264 rtx existing_insn;
6265 rtx check_insn;
6266 rtx p = frv_ifcvt.added_insns_list;
6267 int i;
6268
6269 /* Loop inserting the check insns. The last check insn is the first test,
6270 and is the appropriate place to insert constants. */
6271 gcc_assert (p);
6272
6273 do
6274 {
6275 rtx check_and_insert_insns = XEXP (p, 0);
6276 rtx old_p = p;
6277
6278 check_insn = XEXP (check_and_insert_insns, 0);
6279 existing_insn = XEXP (check_and_insert_insns, 1);
6280 p = XEXP (p, 1);
6281
6282 /* The jump bit is used to say that the new insn is to be inserted BEFORE
6283 the existing insn, otherwise it is to be inserted AFTER. */
6284 if (check_and_insert_insns->jump)
6285 {
6286 emit_insn_before (check_insn, existing_insn);
6287 check_and_insert_insns->jump = 0;
6288 }
6289 else
6290 emit_insn_after (check_insn, existing_insn);
6291
6292 free_EXPR_LIST_node (check_and_insert_insns);
6293 free_EXPR_LIST_node (old_p);
6294 }
6295 while (p != NULL_RTX);
6296
6297 /* Load up any constants needed into temp gprs */
6298 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6299 {
6300 rtx insn = emit_insn_before (frv_ifcvt.scratch_regs[i], existing_insn);
6301 if (! frv_ifcvt.scratch_insns_bitmap)
6302 frv_ifcvt.scratch_insns_bitmap = BITMAP_ALLOC (NULL);
6303 bitmap_set_bit (frv_ifcvt.scratch_insns_bitmap, INSN_UID (insn));
6304 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6305 }
6306
6307 frv_ifcvt.added_insns_list = NULL_RTX;
6308 frv_ifcvt.cur_scratch_regs = 0;
6309 }
6310
6311 \f
6312 /* A C expression to cancel any machine dependent modifications in converting
6313 code to conditional execution in the code described by the conditional if
6314 information CE_INFO. */
6315
6316 void
6317 frv_ifcvt_modify_cancel (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
6318 {
6319 int i;
6320 rtx p = frv_ifcvt.added_insns_list;
6321
6322 /* Loop freeing up the EXPR_LIST's allocated. */
6323 while (p != NULL_RTX)
6324 {
6325 rtx check_and_jump = XEXP (p, 0);
6326 rtx old_p = p;
6327
6328 p = XEXP (p, 1);
6329 free_EXPR_LIST_node (check_and_jump);
6330 free_EXPR_LIST_node (old_p);
6331 }
6332
6333 /* Release any temporary gprs allocated. */
6334 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6335 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6336
6337 frv_ifcvt.added_insns_list = NULL_RTX;
6338 frv_ifcvt.cur_scratch_regs = 0;
6339 return;
6340 }
6341 \f
6342 /* A C expression for the size in bytes of the trampoline, as an integer.
6343 The template is:
6344
6345 setlo #0, <jmp_reg>
6346 setlo #0, <static_chain>
6347 sethi #0, <jmp_reg>
6348 sethi #0, <static_chain>
6349 jmpl @(gr0,<jmp_reg>) */
6350
6351 int
6352 frv_trampoline_size (void)
6353 {
6354 if (TARGET_FDPIC)
6355 /* Allocate room for the function descriptor and the lddi
6356 instruction. */
6357 return 8 + 6 * 4;
6358 return 5 /* instructions */ * 4 /* instruction size. */;
6359 }
6360
6361 \f
6362 /* A C statement to initialize the variable parts of a trampoline. ADDR is an
6363 RTX for the address of the trampoline; FNADDR is an RTX for the address of
6364 the nested function; STATIC_CHAIN is an RTX for the static chain value that
6365 should be passed to the function when it is called.
6366
6367 The template is:
6368
6369 setlo #0, <jmp_reg>
6370 setlo #0, <static_chain>
6371 sethi #0, <jmp_reg>
6372 sethi #0, <static_chain>
6373 jmpl @(gr0,<jmp_reg>) */
6374
6375 static void
6376 frv_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
6377 {
6378 rtx addr = XEXP (m_tramp, 0);
6379 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
6380 rtx sc_reg = force_reg (Pmode, static_chain);
6381
6382 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
6383 FALSE, VOIDmode, 4,
6384 addr, Pmode,
6385 GEN_INT (frv_trampoline_size ()), SImode,
6386 fnaddr, Pmode,
6387 sc_reg, Pmode);
6388 }
6389
6390 \f
6391 /* Many machines have some registers that cannot be copied directly to or from
6392 memory or even from other types of registers. An example is the `MQ'
6393 register, which on most machines, can only be copied to or from general
6394 registers, but not memory. Some machines allow copying all registers to and
6395 from memory, but require a scratch register for stores to some memory
6396 locations (e.g., those with symbolic address on the RT, and those with
6397 certain symbolic address on the SPARC when compiling PIC). In some cases,
6398 both an intermediate and a scratch register are required.
6399
6400 You should define these macros to indicate to the reload phase that it may
6401 need to allocate at least one register for a reload in addition to the
6402 register to contain the data. Specifically, if copying X to a register
6403 RCLASS in MODE requires an intermediate register, you should define
6404 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
6405 whose registers can be used as intermediate registers or scratch registers.
6406
6407 If copying a register RCLASS in MODE to X requires an intermediate or scratch
6408 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
6409 largest register class required. If the requirements for input and output
6410 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
6411 instead of defining both macros identically.
6412
6413 The values returned by these macros are often `GENERAL_REGS'. Return
6414 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
6415 to or from a register of RCLASS in MODE without requiring a scratch register.
6416 Do not define this macro if it would always return `NO_REGS'.
6417
6418 If a scratch register is required (either with or without an intermediate
6419 register), you should define patterns for `reload_inM' or `reload_outM', as
6420 required.. These patterns, which will normally be implemented with a
6421 `define_expand', should be similar to the `movM' patterns, except that
6422 operand 2 is the scratch register.
6423
6424 Define constraints for the reload register and scratch register that contain
6425 a single register class. If the original reload register (whose class is
6426 RCLASS) can meet the constraint given in the pattern, the value returned by
6427 these macros is used for the class of the scratch register. Otherwise, two
6428 additional reload registers are required. Their classes are obtained from
6429 the constraints in the insn pattern.
6430
6431 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
6432 either be in a hard register or in memory. Use `true_regnum' to find out;
6433 it will return -1 if the pseudo is in memory and the hard register number if
6434 it is in a register.
6435
6436 These macros should not be used in the case where a particular class of
6437 registers can only be copied to memory and not to another class of
6438 registers. In that case, secondary reload registers are not needed and
6439 would not be helpful. Instead, a stack location must be used to perform the
6440 copy and the `movM' pattern should use memory as an intermediate storage.
6441 This case often occurs between floating-point and general registers. */
6442
6443 enum reg_class
6444 frv_secondary_reload_class (enum reg_class rclass,
6445 enum machine_mode mode ATTRIBUTE_UNUSED,
6446 rtx x)
6447 {
6448 enum reg_class ret;
6449
6450 switch (rclass)
6451 {
6452 default:
6453 ret = NO_REGS;
6454 break;
6455
6456 /* Accumulators/Accumulator guard registers need to go through floating
6457 point registers. */
6458 case QUAD_REGS:
6459 case EVEN_REGS:
6460 case GPR_REGS:
6461 ret = NO_REGS;
6462 if (x && GET_CODE (x) == REG)
6463 {
6464 int regno = REGNO (x);
6465
6466 if (ACC_P (regno) || ACCG_P (regno))
6467 ret = FPR_REGS;
6468 }
6469 break;
6470
6471 /* Nonzero constants should be loaded into an FPR through a GPR. */
6472 case QUAD_FPR_REGS:
6473 case FEVEN_REGS:
6474 case FPR_REGS:
6475 if (x && CONSTANT_P (x) && !ZERO_P (x))
6476 ret = GPR_REGS;
6477 else
6478 ret = NO_REGS;
6479 break;
6480
6481 /* All of these types need gpr registers. */
6482 case ICC_REGS:
6483 case FCC_REGS:
6484 case CC_REGS:
6485 case ICR_REGS:
6486 case FCR_REGS:
6487 case CR_REGS:
6488 case LCR_REG:
6489 case LR_REG:
6490 ret = GPR_REGS;
6491 break;
6492
6493 /* The accumulators need fpr registers. */
6494 case ACC_REGS:
6495 case EVEN_ACC_REGS:
6496 case QUAD_ACC_REGS:
6497 case ACCG_REGS:
6498 ret = FPR_REGS;
6499 break;
6500 }
6501
6502 return ret;
6503 }
6504
6505 /* This hook exists to catch the case where secondary_reload_class() is
6506 called from init_reg_autoinc() in regclass.c - before the reload optabs
6507 have been initialised. */
6508
6509 static reg_class_t
6510 frv_secondary_reload (bool in_p, rtx x, reg_class_t reload_class_i,
6511 enum machine_mode reload_mode,
6512 secondary_reload_info * sri)
6513 {
6514 enum reg_class rclass = NO_REGS;
6515 enum reg_class reload_class = (enum reg_class) reload_class_i;
6516
6517 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
6518 {
6519 sri->icode = sri->prev_sri->t_icode;
6520 return NO_REGS;
6521 }
6522
6523 rclass = frv_secondary_reload_class (reload_class, reload_mode, x);
6524
6525 if (rclass != NO_REGS)
6526 {
6527 enum insn_code icode
6528 = direct_optab_handler (in_p ? reload_in_optab : reload_out_optab,
6529 reload_mode);
6530 if (icode == 0)
6531 {
6532 /* This happens when then the reload_[in|out]_optabs have
6533 not been initialised. */
6534 sri->t_icode = CODE_FOR_nothing;
6535 return rclass;
6536 }
6537 }
6538
6539 /* Fall back to the default secondary reload handler. */
6540 return default_secondary_reload (in_p, x, reload_class, reload_mode, sri);
6541
6542 }
6543 \f
6544 /* Worker function for TARGET_CLASS_LIKELY_SPILLED_P. */
6545
6546 static bool
6547 frv_class_likely_spilled_p (reg_class_t rclass)
6548 {
6549 switch (rclass)
6550 {
6551 default:
6552 break;
6553
6554 case GR8_REGS:
6555 case GR9_REGS:
6556 case GR89_REGS:
6557 case FDPIC_FPTR_REGS:
6558 case FDPIC_REGS:
6559 case ICC_REGS:
6560 case FCC_REGS:
6561 case CC_REGS:
6562 case ICR_REGS:
6563 case FCR_REGS:
6564 case CR_REGS:
6565 case LCR_REG:
6566 case LR_REG:
6567 case SPR_REGS:
6568 case QUAD_ACC_REGS:
6569 case EVEN_ACC_REGS:
6570 case ACC_REGS:
6571 case ACCG_REGS:
6572 return true;
6573 }
6574
6575 return false;
6576 }
6577
6578 \f
6579 /* An expression for the alignment of a structure field FIELD if the
6580 alignment computed in the usual way is COMPUTED. GCC uses this
6581 value instead of the value in `BIGGEST_ALIGNMENT' or
6582 `BIGGEST_FIELD_ALIGNMENT', if defined, for structure fields only. */
6583
6584 /* The definition type of the bit field data is either char, short, long or
6585 long long. The maximum bit size is the number of bits of its own type.
6586
6587 The bit field data is assigned to a storage unit that has an adequate size
6588 for bit field data retention and is located at the smallest address.
6589
6590 Consecutive bit field data are packed at consecutive bits having the same
6591 storage unit, with regard to the type, beginning with the MSB and continuing
6592 toward the LSB.
6593
6594 If a field to be assigned lies over a bit field type boundary, its
6595 assignment is completed by aligning it with a boundary suitable for the
6596 type.
6597
6598 When a bit field having a bit length of 0 is declared, it is forcibly
6599 assigned to the next storage unit.
6600
6601 e.g)
6602 struct {
6603 int a:2;
6604 int b:6;
6605 char c:4;
6606 int d:10;
6607 int :0;
6608 int f:2;
6609 } x;
6610
6611 +0 +1 +2 +3
6612 &x 00000000 00000000 00000000 00000000
6613 MLM----L
6614 a b
6615 &x+4 00000000 00000000 00000000 00000000
6616 M--L
6617 c
6618 &x+8 00000000 00000000 00000000 00000000
6619 M----------L
6620 d
6621 &x+12 00000000 00000000 00000000 00000000
6622 ML
6623 f
6624 */
6625
6626 int
6627 frv_adjust_field_align (tree field, int computed)
6628 {
6629 /* Make sure that the bitfield is not wider than the type. */
6630 if (DECL_BIT_FIELD (field)
6631 && !DECL_ARTIFICIAL (field))
6632 {
6633 tree parent = DECL_CONTEXT (field);
6634 tree prev = NULL_TREE;
6635 tree cur;
6636
6637 for (cur = TYPE_FIELDS (parent); cur && cur != field; cur = DECL_CHAIN (cur))
6638 {
6639 if (TREE_CODE (cur) != FIELD_DECL)
6640 continue;
6641
6642 prev = cur;
6643 }
6644
6645 gcc_assert (cur);
6646
6647 /* If this isn't a :0 field and if the previous element is a bitfield
6648 also, see if the type is different, if so, we will need to align the
6649 bit-field to the next boundary. */
6650 if (prev
6651 && ! DECL_PACKED (field)
6652 && ! integer_zerop (DECL_SIZE (field))
6653 && DECL_BIT_FIELD_TYPE (field) != DECL_BIT_FIELD_TYPE (prev))
6654 {
6655 int prev_align = TYPE_ALIGN (TREE_TYPE (prev));
6656 int cur_align = TYPE_ALIGN (TREE_TYPE (field));
6657 computed = (prev_align > cur_align) ? prev_align : cur_align;
6658 }
6659 }
6660
6661 return computed;
6662 }
6663
6664 \f
6665 /* A C expression that is nonzero if it is permissible to store a value of mode
6666 MODE in hard register number REGNO (or in several registers starting with
6667 that one). For a machine where all registers are equivalent, a suitable
6668 definition is
6669
6670 #define HARD_REGNO_MODE_OK(REGNO, MODE) 1
6671
6672 It is not necessary for this macro to check for the numbers of fixed
6673 registers, because the allocation mechanism considers them to be always
6674 occupied.
6675
6676 On some machines, double-precision values must be kept in even/odd register
6677 pairs. The way to implement that is to define this macro to reject odd
6678 register numbers for such modes.
6679
6680 The minimum requirement for a mode to be OK in a register is that the
6681 `movMODE' instruction pattern support moves between the register and any
6682 other hard register for which the mode is OK; and that moving a value into
6683 the register and back out not alter it.
6684
6685 Since the same instruction used to move `SImode' will work for all narrower
6686 integer modes, it is not necessary on any machine for `HARD_REGNO_MODE_OK'
6687 to distinguish between these modes, provided you define patterns `movhi',
6688 etc., to take advantage of this. This is useful because of the interaction
6689 between `HARD_REGNO_MODE_OK' and `MODES_TIEABLE_P'; it is very desirable for
6690 all integer modes to be tieable.
6691
6692 Many machines have special registers for floating point arithmetic. Often
6693 people assume that floating point machine modes are allowed only in floating
6694 point registers. This is not true. Any registers that can hold integers
6695 can safely *hold* a floating point machine mode, whether or not floating
6696 arithmetic can be done on it in those registers. Integer move instructions
6697 can be used to move the values.
6698
6699 On some machines, though, the converse is true: fixed-point machine modes
6700 may not go in floating registers. This is true if the floating registers
6701 normalize any value stored in them, because storing a non-floating value
6702 there would garble it. In this case, `HARD_REGNO_MODE_OK' should reject
6703 fixed-point machine modes in floating registers. But if the floating
6704 registers do not automatically normalize, if you can store any bit pattern
6705 in one and retrieve it unchanged without a trap, then any machine mode may
6706 go in a floating register, so you can define this macro to say so.
6707
6708 The primary significance of special floating registers is rather that they
6709 are the registers acceptable in floating point arithmetic instructions.
6710 However, this is of no concern to `HARD_REGNO_MODE_OK'. You handle it by
6711 writing the proper constraints for those instructions.
6712
6713 On some machines, the floating registers are especially slow to access, so
6714 that it is better to store a value in a stack frame than in such a register
6715 if floating point arithmetic is not being done. As long as the floating
6716 registers are not in class `GENERAL_REGS', they will not be used unless some
6717 pattern's constraint asks for one. */
6718
6719 int
6720 frv_hard_regno_mode_ok (int regno, enum machine_mode mode)
6721 {
6722 int base;
6723 int mask;
6724
6725 switch (mode)
6726 {
6727 case CCmode:
6728 case CC_UNSmode:
6729 case CC_NZmode:
6730 return ICC_P (regno) || GPR_P (regno);
6731
6732 case CC_CCRmode:
6733 return CR_P (regno) || GPR_P (regno);
6734
6735 case CC_FPmode:
6736 return FCC_P (regno) || GPR_P (regno);
6737
6738 default:
6739 break;
6740 }
6741
6742 /* Set BASE to the first register in REGNO's class. Set MASK to the
6743 bits that must be clear in (REGNO - BASE) for the register to be
6744 well-aligned. */
6745 if (INTEGRAL_MODE_P (mode) || FLOAT_MODE_P (mode) || VECTOR_MODE_P (mode))
6746 {
6747 if (ACCG_P (regno))
6748 {
6749 /* ACCGs store one byte. Two-byte quantities must start in
6750 even-numbered registers, four-byte ones in registers whose
6751 numbers are divisible by four, and so on. */
6752 base = ACCG_FIRST;
6753 mask = GET_MODE_SIZE (mode) - 1;
6754 }
6755 else
6756 {
6757 /* The other registers store one word. */
6758 if (GPR_P (regno) || regno == AP_FIRST)
6759 base = GPR_FIRST;
6760
6761 else if (FPR_P (regno))
6762 base = FPR_FIRST;
6763
6764 else if (ACC_P (regno))
6765 base = ACC_FIRST;
6766
6767 else if (SPR_P (regno))
6768 return mode == SImode;
6769
6770 /* Fill in the table. */
6771 else
6772 return 0;
6773
6774 /* Anything smaller than an SI is OK in any word-sized register. */
6775 if (GET_MODE_SIZE (mode) < 4)
6776 return 1;
6777
6778 mask = (GET_MODE_SIZE (mode) / 4) - 1;
6779 }
6780 return (((regno - base) & mask) == 0);
6781 }
6782
6783 return 0;
6784 }
6785
6786 \f
6787 /* A C expression for the number of consecutive hard registers, starting at
6788 register number REGNO, required to hold a value of mode MODE.
6789
6790 On a machine where all registers are exactly one word, a suitable definition
6791 of this macro is
6792
6793 #define HARD_REGNO_NREGS(REGNO, MODE) \
6794 ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) \
6795 / UNITS_PER_WORD)) */
6796
6797 /* On the FRV, make the CC_FP mode take 3 words in the integer registers, so
6798 that we can build the appropriate instructions to properly reload the
6799 values. Also, make the byte-sized accumulator guards use one guard
6800 for each byte. */
6801
6802 int
6803 frv_hard_regno_nregs (int regno, enum machine_mode mode)
6804 {
6805 if (ACCG_P (regno))
6806 return GET_MODE_SIZE (mode);
6807 else
6808 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6809 }
6810
6811 \f
6812 /* A C expression for the maximum number of consecutive registers of
6813 class RCLASS needed to hold a value of mode MODE.
6814
6815 This is closely related to the macro `HARD_REGNO_NREGS'. In fact, the value
6816 of the macro `CLASS_MAX_NREGS (RCLASS, MODE)' should be the maximum value of
6817 `HARD_REGNO_NREGS (REGNO, MODE)' for all REGNO values in the class RCLASS.
6818
6819 This macro helps control the handling of multiple-word values in
6820 the reload pass.
6821
6822 This declaration is required. */
6823
6824 int
6825 frv_class_max_nregs (enum reg_class rclass, enum machine_mode mode)
6826 {
6827 if (rclass == ACCG_REGS)
6828 /* An N-byte value requires N accumulator guards. */
6829 return GET_MODE_SIZE (mode);
6830 else
6831 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6832 }
6833
6834 \f
6835 /* A C expression that is nonzero if X is a legitimate constant for an
6836 immediate operand on the target machine. You can assume that X satisfies
6837 `CONSTANT_P', so you need not check this. In fact, `1' is a suitable
6838 definition for this macro on machines where anything `CONSTANT_P' is valid. */
6839
6840 int
6841 frv_legitimate_constant_p (rtx x)
6842 {
6843 enum machine_mode mode = GET_MODE (x);
6844
6845 /* frv_cannot_force_const_mem always returns true for FDPIC. This
6846 means that the move expanders will be expected to deal with most
6847 kinds of constant, regardless of what we return here.
6848
6849 However, among its other duties, LEGITIMATE_CONSTANT_P decides whether
6850 a constant can be entered into reg_equiv_constant[]. If we return true,
6851 reload can create new instances of the constant whenever it likes.
6852
6853 The idea is therefore to accept as many constants as possible (to give
6854 reload more freedom) while rejecting constants that can only be created
6855 at certain times. In particular, anything with a symbolic component will
6856 require use of the pseudo FDPIC register, which is only available before
6857 reload. */
6858 if (TARGET_FDPIC)
6859 return LEGITIMATE_PIC_OPERAND_P (x);
6860
6861 /* All of the integer constants are ok. */
6862 if (GET_CODE (x) != CONST_DOUBLE)
6863 return TRUE;
6864
6865 /* double integer constants are ok. */
6866 if (mode == VOIDmode || mode == DImode)
6867 return TRUE;
6868
6869 /* 0 is always ok. */
6870 if (x == CONST0_RTX (mode))
6871 return TRUE;
6872
6873 /* If floating point is just emulated, allow any constant, since it will be
6874 constructed in the GPRs. */
6875 if (!TARGET_HAS_FPRS)
6876 return TRUE;
6877
6878 if (mode == DFmode && !TARGET_DOUBLE)
6879 return TRUE;
6880
6881 /* Otherwise store the constant away and do a load. */
6882 return FALSE;
6883 }
6884
6885 /* Implement SELECT_CC_MODE. Choose CC_FP for floating-point comparisons,
6886 CC_NZ for comparisons against zero in which a single Z or N flag test
6887 is enough, CC_UNS for other unsigned comparisons, and CC for other
6888 signed comparisons. */
6889
6890 enum machine_mode
6891 frv_select_cc_mode (enum rtx_code code, rtx x, rtx y)
6892 {
6893 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
6894 return CC_FPmode;
6895
6896 switch (code)
6897 {
6898 case EQ:
6899 case NE:
6900 case LT:
6901 case GE:
6902 return y == const0_rtx ? CC_NZmode : CCmode;
6903
6904 case GTU:
6905 case GEU:
6906 case LTU:
6907 case LEU:
6908 return y == const0_rtx ? CC_NZmode : CC_UNSmode;
6909
6910 default:
6911 return CCmode;
6912 }
6913 }
6914 \f
6915
6916 /* Worker function for TARGET_REGISTER_MOVE_COST. */
6917
6918 #define HIGH_COST 40
6919 #define MEDIUM_COST 3
6920 #define LOW_COST 1
6921
6922 static int
6923 frv_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
6924 reg_class_t from, reg_class_t to)
6925 {
6926 switch (from)
6927 {
6928 default:
6929 break;
6930
6931 case QUAD_REGS:
6932 case EVEN_REGS:
6933 case GPR_REGS:
6934 switch (to)
6935 {
6936 default:
6937 break;
6938
6939 case QUAD_REGS:
6940 case EVEN_REGS:
6941 case GPR_REGS:
6942 return LOW_COST;
6943
6944 case FEVEN_REGS:
6945 case FPR_REGS:
6946 return LOW_COST;
6947
6948 case LCR_REG:
6949 case LR_REG:
6950 case SPR_REGS:
6951 return LOW_COST;
6952 }
6953
6954 case FEVEN_REGS:
6955 case FPR_REGS:
6956 switch (to)
6957 {
6958 default:
6959 break;
6960
6961 case QUAD_REGS:
6962 case EVEN_REGS:
6963 case GPR_REGS:
6964 case ACC_REGS:
6965 case EVEN_ACC_REGS:
6966 case QUAD_ACC_REGS:
6967 case ACCG_REGS:
6968 return MEDIUM_COST;
6969
6970 case FEVEN_REGS:
6971 case FPR_REGS:
6972 return LOW_COST;
6973 }
6974
6975 case LCR_REG:
6976 case LR_REG:
6977 case SPR_REGS:
6978 switch (to)
6979 {
6980 default:
6981 break;
6982
6983 case QUAD_REGS:
6984 case EVEN_REGS:
6985 case GPR_REGS:
6986 return MEDIUM_COST;
6987 }
6988
6989 case ACC_REGS:
6990 case EVEN_ACC_REGS:
6991 case QUAD_ACC_REGS:
6992 case ACCG_REGS:
6993 switch (to)
6994 {
6995 default:
6996 break;
6997
6998 case FEVEN_REGS:
6999 case FPR_REGS:
7000 return MEDIUM_COST;
7001
7002 }
7003 }
7004
7005 return HIGH_COST;
7006 }
7007
7008 /* Worker function for TARGET_MEMORY_MOVE_COST. */
7009
7010 static int
7011 frv_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7012 reg_class_t rclass ATTRIBUTE_UNUSED,
7013 bool in ATTRIBUTE_UNUSED)
7014 {
7015 return 4;
7016 }
7017
7018 \f
7019 /* Implementation of TARGET_ASM_INTEGER. In the FRV case we need to
7020 use ".picptr" to generate safe relocations for PIC code. We also
7021 need a fixup entry for aligned (non-debugging) code. */
7022
7023 static bool
7024 frv_assemble_integer (rtx value, unsigned int size, int aligned_p)
7025 {
7026 if ((flag_pic || TARGET_FDPIC) && size == UNITS_PER_WORD)
7027 {
7028 if (GET_CODE (value) == CONST
7029 || GET_CODE (value) == SYMBOL_REF
7030 || GET_CODE (value) == LABEL_REF)
7031 {
7032 if (TARGET_FDPIC && GET_CODE (value) == SYMBOL_REF
7033 && SYMBOL_REF_FUNCTION_P (value))
7034 {
7035 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
7036 output_addr_const (asm_out_file, value);
7037 fputs (")\n", asm_out_file);
7038 return true;
7039 }
7040 else if (TARGET_FDPIC && GET_CODE (value) == CONST
7041 && frv_function_symbol_referenced_p (value))
7042 return false;
7043 if (aligned_p && !TARGET_FDPIC)
7044 {
7045 static int label_num = 0;
7046 char buf[256];
7047 const char *p;
7048
7049 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", label_num++);
7050 p = (* targetm.strip_name_encoding) (buf);
7051
7052 fprintf (asm_out_file, "%s:\n", p);
7053 fprintf (asm_out_file, "%s\n", FIXUP_SECTION_ASM_OP);
7054 fprintf (asm_out_file, "\t.picptr\t%s\n", p);
7055 fprintf (asm_out_file, "\t.previous\n");
7056 }
7057 assemble_integer_with_op ("\t.picptr\t", value);
7058 return true;
7059 }
7060 if (!aligned_p)
7061 {
7062 /* We've set the unaligned SI op to NULL, so we always have to
7063 handle the unaligned case here. */
7064 assemble_integer_with_op ("\t.4byte\t", value);
7065 return true;
7066 }
7067 }
7068 return default_assemble_integer (value, size, aligned_p);
7069 }
7070
7071 /* Function to set up the backend function structure. */
7072
7073 static struct machine_function *
7074 frv_init_machine_status (void)
7075 {
7076 return ggc_alloc_cleared_machine_function ();
7077 }
7078 \f
7079 /* Implement TARGET_SCHED_ISSUE_RATE. */
7080
7081 int
7082 frv_issue_rate (void)
7083 {
7084 if (!TARGET_PACK)
7085 return 1;
7086
7087 switch (frv_cpu_type)
7088 {
7089 default:
7090 case FRV_CPU_FR300:
7091 case FRV_CPU_SIMPLE:
7092 return 1;
7093
7094 case FRV_CPU_FR400:
7095 case FRV_CPU_FR405:
7096 case FRV_CPU_FR450:
7097 return 2;
7098
7099 case FRV_CPU_GENERIC:
7100 case FRV_CPU_FR500:
7101 case FRV_CPU_TOMCAT:
7102 return 4;
7103
7104 case FRV_CPU_FR550:
7105 return 8;
7106 }
7107 }
7108 \f
7109 /* A for_each_rtx callback. If X refers to an accumulator, return
7110 ACC_GROUP_ODD if the bit 2 of the register number is set and
7111 ACC_GROUP_EVEN if it is clear. Return 0 (ACC_GROUP_NONE)
7112 otherwise. */
7113
7114 static int
7115 frv_acc_group_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
7116 {
7117 if (REG_P (*x))
7118 {
7119 if (ACC_P (REGNO (*x)))
7120 return (REGNO (*x) - ACC_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
7121 if (ACCG_P (REGNO (*x)))
7122 return (REGNO (*x) - ACCG_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
7123 }
7124 return 0;
7125 }
7126
7127 /* Return the value of INSN's acc_group attribute. */
7128
7129 int
7130 frv_acc_group (rtx insn)
7131 {
7132 /* This distinction only applies to the FR550 packing constraints. */
7133 if (frv_cpu_type != FRV_CPU_FR550)
7134 return ACC_GROUP_NONE;
7135 return for_each_rtx (&PATTERN (insn), frv_acc_group_1, 0);
7136 }
7137
7138 /* Return the index of the DFA unit in FRV_UNIT_NAMES[] that instruction
7139 INSN will try to claim first. Since this value depends only on the
7140 type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */
7141
7142 static unsigned int
7143 frv_insn_unit (rtx insn)
7144 {
7145 enum attr_type type;
7146
7147 type = get_attr_type (insn);
7148 if (frv_type_to_unit[type] == ARRAY_SIZE (frv_unit_codes))
7149 {
7150 /* We haven't seen this type of instruction before. */
7151 state_t state;
7152 unsigned int unit;
7153
7154 /* Issue the instruction on its own to see which unit it prefers. */
7155 state = alloca (state_size ());
7156 state_reset (state);
7157 state_transition (state, insn);
7158
7159 /* Find out which unit was taken. */
7160 for (unit = 0; unit < ARRAY_SIZE (frv_unit_codes); unit++)
7161 if (cpu_unit_reservation_p (state, frv_unit_codes[unit]))
7162 break;
7163
7164 gcc_assert (unit != ARRAY_SIZE (frv_unit_codes));
7165
7166 frv_type_to_unit[type] = unit;
7167 }
7168 return frv_type_to_unit[type];
7169 }
7170
7171 /* Return true if INSN issues to a branch unit. */
7172
7173 static bool
7174 frv_issues_to_branch_unit_p (rtx insn)
7175 {
7176 return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B;
7177 }
7178 \f
7179 /* The current state of the packing pass, implemented by frv_pack_insns. */
7180 static struct {
7181 /* The state of the pipeline DFA. */
7182 state_t dfa_state;
7183
7184 /* Which hardware registers are set within the current packet,
7185 and the conditions under which they are set. */
7186 regstate_t regstate[FIRST_PSEUDO_REGISTER];
7187
7188 /* The memory locations that have been modified so far in this
7189 packet. MEM is the memref and COND is the regstate_t condition
7190 under which it is set. */
7191 struct {
7192 rtx mem;
7193 regstate_t cond;
7194 } mems[2];
7195
7196 /* The number of valid entries in MEMS. The value is larger than
7197 ARRAY_SIZE (mems) if there were too many mems to record. */
7198 unsigned int num_mems;
7199
7200 /* The maximum number of instructions that can be packed together. */
7201 unsigned int issue_rate;
7202
7203 /* The instructions in the packet, partitioned into groups. */
7204 struct frv_packet_group {
7205 /* How many instructions in the packet belong to this group. */
7206 unsigned int num_insns;
7207
7208 /* A list of the instructions that belong to this group, in the order
7209 they appear in the rtl stream. */
7210 rtx insns[ARRAY_SIZE (frv_unit_codes)];
7211
7212 /* The contents of INSNS after they have been sorted into the correct
7213 assembly-language order. Element X issues to unit X. The list may
7214 contain extra nops. */
7215 rtx sorted[ARRAY_SIZE (frv_unit_codes)];
7216
7217 /* The member of frv_nops[] to use in sorted[]. */
7218 rtx nop;
7219 } groups[NUM_GROUPS];
7220
7221 /* The instructions that make up the current packet. */
7222 rtx insns[ARRAY_SIZE (frv_unit_codes)];
7223 unsigned int num_insns;
7224 } frv_packet;
7225
7226 /* Return the regstate_t flags for the given COND_EXEC condition.
7227 Abort if the condition isn't in the right form. */
7228
7229 static int
7230 frv_cond_flags (rtx cond)
7231 {
7232 gcc_assert ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
7233 && GET_CODE (XEXP (cond, 0)) == REG
7234 && CR_P (REGNO (XEXP (cond, 0)))
7235 && XEXP (cond, 1) == const0_rtx);
7236 return ((REGNO (XEXP (cond, 0)) - CR_FIRST)
7237 | (GET_CODE (cond) == NE
7238 ? REGSTATE_IF_TRUE
7239 : REGSTATE_IF_FALSE));
7240 }
7241
7242
7243 /* Return true if something accessed under condition COND2 can
7244 conflict with something written under condition COND1. */
7245
7246 static bool
7247 frv_regstate_conflict_p (regstate_t cond1, regstate_t cond2)
7248 {
7249 /* If either reference was unconditional, we have a conflict. */
7250 if ((cond1 & REGSTATE_IF_EITHER) == 0
7251 || (cond2 & REGSTATE_IF_EITHER) == 0)
7252 return true;
7253
7254 /* The references might conflict if they were controlled by
7255 different CRs. */
7256 if ((cond1 & REGSTATE_CC_MASK) != (cond2 & REGSTATE_CC_MASK))
7257 return true;
7258
7259 /* They definitely conflict if they are controlled by the
7260 same condition. */
7261 if ((cond1 & cond2 & REGSTATE_IF_EITHER) != 0)
7262 return true;
7263
7264 return false;
7265 }
7266
7267
7268 /* A for_each_rtx callback. Return 1 if *X depends on an instruction in
7269 the current packet. DATA points to a regstate_t that describes the
7270 condition under which *X might be set or used. */
7271
7272 static int
7273 frv_registers_conflict_p_1 (rtx *x, void *data)
7274 {
7275 unsigned int regno, i;
7276 regstate_t cond;
7277
7278 cond = *(regstate_t *) data;
7279
7280 if (GET_CODE (*x) == REG)
7281 FOR_EACH_REGNO (regno, *x)
7282 if ((frv_packet.regstate[regno] & REGSTATE_MODIFIED) != 0)
7283 if (frv_regstate_conflict_p (frv_packet.regstate[regno], cond))
7284 return 1;
7285
7286 if (GET_CODE (*x) == MEM)
7287 {
7288 /* If we ran out of memory slots, assume a conflict. */
7289 if (frv_packet.num_mems > ARRAY_SIZE (frv_packet.mems))
7290 return 1;
7291
7292 /* Check for output or true dependencies with earlier MEMs. */
7293 for (i = 0; i < frv_packet.num_mems; i++)
7294 if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond))
7295 {
7296 if (true_dependence (frv_packet.mems[i].mem, VOIDmode,
7297 *x, rtx_varies_p))
7298 return 1;
7299
7300 if (output_dependence (frv_packet.mems[i].mem, *x))
7301 return 1;
7302 }
7303 }
7304
7305 /* The return values of calls aren't significant: they describe
7306 the effect of the call as a whole, not of the insn itself. */
7307 if (GET_CODE (*x) == SET && GET_CODE (SET_SRC (*x)) == CALL)
7308 {
7309 if (for_each_rtx (&SET_SRC (*x), frv_registers_conflict_p_1, data))
7310 return 1;
7311 return -1;
7312 }
7313
7314 /* Check subexpressions. */
7315 return 0;
7316 }
7317
7318
7319 /* Return true if something in X might depend on an instruction
7320 in the current packet. */
7321
7322 static bool
7323 frv_registers_conflict_p (rtx x)
7324 {
7325 regstate_t flags;
7326
7327 flags = 0;
7328 if (GET_CODE (x) == COND_EXEC)
7329 {
7330 if (for_each_rtx (&XEXP (x, 0), frv_registers_conflict_p_1, &flags))
7331 return true;
7332
7333 flags |= frv_cond_flags (XEXP (x, 0));
7334 x = XEXP (x, 1);
7335 }
7336 return for_each_rtx (&x, frv_registers_conflict_p_1, &flags);
7337 }
7338
7339
7340 /* A note_stores callback. DATA points to the regstate_t condition
7341 under which X is modified. Update FRV_PACKET accordingly. */
7342
7343 static void
7344 frv_registers_update_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7345 {
7346 unsigned int regno;
7347
7348 if (GET_CODE (x) == REG)
7349 FOR_EACH_REGNO (regno, x)
7350 frv_packet.regstate[regno] |= *(regstate_t *) data;
7351
7352 if (GET_CODE (x) == MEM)
7353 {
7354 if (frv_packet.num_mems < ARRAY_SIZE (frv_packet.mems))
7355 {
7356 frv_packet.mems[frv_packet.num_mems].mem = x;
7357 frv_packet.mems[frv_packet.num_mems].cond = *(regstate_t *) data;
7358 }
7359 frv_packet.num_mems++;
7360 }
7361 }
7362
7363
7364 /* Update the register state information for an instruction whose
7365 body is X. */
7366
7367 static void
7368 frv_registers_update (rtx x)
7369 {
7370 regstate_t flags;
7371
7372 flags = REGSTATE_MODIFIED;
7373 if (GET_CODE (x) == COND_EXEC)
7374 {
7375 flags |= frv_cond_flags (XEXP (x, 0));
7376 x = XEXP (x, 1);
7377 }
7378 note_stores (x, frv_registers_update_1, &flags);
7379 }
7380
7381
7382 /* Initialize frv_packet for the start of a new packet. */
7383
7384 static void
7385 frv_start_packet (void)
7386 {
7387 enum frv_insn_group group;
7388
7389 memset (frv_packet.regstate, 0, sizeof (frv_packet.regstate));
7390 frv_packet.num_mems = 0;
7391 frv_packet.num_insns = 0;
7392 for (group = 0; group < NUM_GROUPS; group++)
7393 frv_packet.groups[group].num_insns = 0;
7394 }
7395
7396
7397 /* Likewise for the start of a new basic block. */
7398
7399 static void
7400 frv_start_packet_block (void)
7401 {
7402 state_reset (frv_packet.dfa_state);
7403 frv_start_packet ();
7404 }
7405
7406
7407 /* Finish the current packet, if any, and start a new one. Call
7408 HANDLE_PACKET with FRV_PACKET describing the completed packet. */
7409
7410 static void
7411 frv_finish_packet (void (*handle_packet) (void))
7412 {
7413 if (frv_packet.num_insns > 0)
7414 {
7415 handle_packet ();
7416 state_transition (frv_packet.dfa_state, 0);
7417 frv_start_packet ();
7418 }
7419 }
7420
7421
7422 /* Return true if INSN can be added to the current packet. Update
7423 the DFA state on success. */
7424
7425 static bool
7426 frv_pack_insn_p (rtx insn)
7427 {
7428 /* See if the packet is already as long as it can be. */
7429 if (frv_packet.num_insns == frv_packet.issue_rate)
7430 return false;
7431
7432 /* If the scheduler thought that an instruction should start a packet,
7433 it's usually a good idea to believe it. It knows much more about
7434 the latencies than we do.
7435
7436 There are some exceptions though:
7437
7438 - Conditional instructions are scheduled on the assumption that
7439 they will be executed. This is usually a good thing, since it
7440 tends to avoid unnecessary stalls in the conditional code.
7441 But we want to pack conditional instructions as tightly as
7442 possible, in order to optimize the case where they aren't
7443 executed.
7444
7445 - The scheduler will always put branches on their own, even
7446 if there's no real dependency.
7447
7448 - There's no point putting a call in its own packet unless
7449 we have to. */
7450 if (frv_packet.num_insns > 0
7451 && GET_CODE (insn) == INSN
7452 && GET_MODE (insn) == TImode
7453 && GET_CODE (PATTERN (insn)) != COND_EXEC)
7454 return false;
7455
7456 /* Check for register conflicts. Don't do this for setlo since any
7457 conflict will be with the partnering sethi, with which it can
7458 be packed. */
7459 if (get_attr_type (insn) != TYPE_SETLO)
7460 if (frv_registers_conflict_p (PATTERN (insn)))
7461 return false;
7462
7463 return state_transition (frv_packet.dfa_state, insn) < 0;
7464 }
7465
7466
7467 /* Add instruction INSN to the current packet. */
7468
7469 static void
7470 frv_add_insn_to_packet (rtx insn)
7471 {
7472 struct frv_packet_group *packet_group;
7473
7474 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7475 packet_group->insns[packet_group->num_insns++] = insn;
7476 frv_packet.insns[frv_packet.num_insns++] = insn;
7477
7478 frv_registers_update (PATTERN (insn));
7479 }
7480
7481
7482 /* Insert INSN (a member of frv_nops[]) into the current packet. If the
7483 packet ends in a branch or call, insert the nop before it, otherwise
7484 add to the end. */
7485
7486 static void
7487 frv_insert_nop_in_packet (rtx insn)
7488 {
7489 struct frv_packet_group *packet_group;
7490 rtx last;
7491
7492 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7493 last = frv_packet.insns[frv_packet.num_insns - 1];
7494 if (GET_CODE (last) != INSN)
7495 {
7496 insn = emit_insn_before (PATTERN (insn), last);
7497 frv_packet.insns[frv_packet.num_insns - 1] = insn;
7498 frv_packet.insns[frv_packet.num_insns++] = last;
7499 }
7500 else
7501 {
7502 insn = emit_insn_after (PATTERN (insn), last);
7503 frv_packet.insns[frv_packet.num_insns++] = insn;
7504 }
7505 packet_group->insns[packet_group->num_insns++] = insn;
7506 }
7507
7508
7509 /* If packing is enabled, divide the instructions into packets and
7510 return true. Call HANDLE_PACKET for each complete packet. */
7511
7512 static bool
7513 frv_for_each_packet (void (*handle_packet) (void))
7514 {
7515 rtx insn, next_insn;
7516
7517 frv_packet.issue_rate = frv_issue_rate ();
7518
7519 /* Early exit if we don't want to pack insns. */
7520 if (!optimize
7521 || !flag_schedule_insns_after_reload
7522 || !TARGET_VLIW_BRANCH
7523 || frv_packet.issue_rate == 1)
7524 return false;
7525
7526 /* Set up the initial packing state. */
7527 dfa_start ();
7528 frv_packet.dfa_state = alloca (state_size ());
7529
7530 frv_start_packet_block ();
7531 for (insn = get_insns (); insn != 0; insn = next_insn)
7532 {
7533 enum rtx_code code;
7534 bool eh_insn_p;
7535
7536 code = GET_CODE (insn);
7537 next_insn = NEXT_INSN (insn);
7538
7539 if (code == CODE_LABEL)
7540 {
7541 frv_finish_packet (handle_packet);
7542 frv_start_packet_block ();
7543 }
7544
7545 if (INSN_P (insn))
7546 switch (GET_CODE (PATTERN (insn)))
7547 {
7548 case USE:
7549 case CLOBBER:
7550 case ADDR_VEC:
7551 case ADDR_DIFF_VEC:
7552 break;
7553
7554 default:
7555 /* Calls mustn't be packed on a TOMCAT. */
7556 if (GET_CODE (insn) == CALL_INSN && frv_cpu_type == FRV_CPU_TOMCAT)
7557 frv_finish_packet (handle_packet);
7558
7559 /* Since the last instruction in a packet determines the EH
7560 region, any exception-throwing instruction must come at
7561 the end of reordered packet. Insns that issue to a
7562 branch unit are bound to come last; for others it's
7563 too hard to predict. */
7564 eh_insn_p = (find_reg_note (insn, REG_EH_REGION, NULL) != NULL);
7565 if (eh_insn_p && !frv_issues_to_branch_unit_p (insn))
7566 frv_finish_packet (handle_packet);
7567
7568 /* Finish the current packet if we can't add INSN to it.
7569 Simulate cycles until INSN is ready to issue. */
7570 if (!frv_pack_insn_p (insn))
7571 {
7572 frv_finish_packet (handle_packet);
7573 while (!frv_pack_insn_p (insn))
7574 state_transition (frv_packet.dfa_state, 0);
7575 }
7576
7577 /* Add the instruction to the packet. */
7578 frv_add_insn_to_packet (insn);
7579
7580 /* Calls and jumps end a packet, as do insns that throw
7581 an exception. */
7582 if (code == CALL_INSN || code == JUMP_INSN || eh_insn_p)
7583 frv_finish_packet (handle_packet);
7584 break;
7585 }
7586 }
7587 frv_finish_packet (handle_packet);
7588 dfa_finish ();
7589 return true;
7590 }
7591 \f
7592 /* Subroutine of frv_sort_insn_group. We are trying to sort
7593 frv_packet.groups[GROUP].sorted[0...NUM_INSNS-1] into assembly
7594 language order. We have already picked a new position for
7595 frv_packet.groups[GROUP].sorted[X] if bit X of ISSUED is set.
7596 These instructions will occupy elements [0, LOWER_SLOT) and
7597 [UPPER_SLOT, NUM_INSNS) of the final (sorted) array. STATE is
7598 the DFA state after issuing these instructions.
7599
7600 Try filling elements [LOWER_SLOT, UPPER_SLOT) with every permutation
7601 of the unused instructions. Return true if one such permutation gives
7602 a valid ordering, leaving the successful permutation in sorted[].
7603 Do not modify sorted[] until a valid permutation is found. */
7604
7605 static bool
7606 frv_sort_insn_group_1 (enum frv_insn_group group,
7607 unsigned int lower_slot, unsigned int upper_slot,
7608 unsigned int issued, unsigned int num_insns,
7609 state_t state)
7610 {
7611 struct frv_packet_group *packet_group;
7612 unsigned int i;
7613 state_t test_state;
7614 size_t dfa_size;
7615 rtx insn;
7616
7617 /* Early success if we've filled all the slots. */
7618 if (lower_slot == upper_slot)
7619 return true;
7620
7621 packet_group = &frv_packet.groups[group];
7622 dfa_size = state_size ();
7623 test_state = alloca (dfa_size);
7624
7625 /* Try issuing each unused instruction. */
7626 for (i = num_insns - 1; i + 1 != 0; i--)
7627 if (~issued & (1 << i))
7628 {
7629 insn = packet_group->sorted[i];
7630 memcpy (test_state, state, dfa_size);
7631 if (state_transition (test_state, insn) < 0
7632 && cpu_unit_reservation_p (test_state,
7633 NTH_UNIT (group, upper_slot - 1))
7634 && frv_sort_insn_group_1 (group, lower_slot, upper_slot - 1,
7635 issued | (1 << i), num_insns,
7636 test_state))
7637 {
7638 packet_group->sorted[upper_slot - 1] = insn;
7639 return true;
7640 }
7641 }
7642
7643 return false;
7644 }
7645
7646 /* Compare two instructions by their frv_insn_unit. */
7647
7648 static int
7649 frv_compare_insns (const void *first, const void *second)
7650 {
7651 const rtx *const insn1 = (rtx const *) first,
7652 *const insn2 = (rtx const *) second;
7653 return frv_insn_unit (*insn1) - frv_insn_unit (*insn2);
7654 }
7655
7656 /* Copy frv_packet.groups[GROUP].insns[] to frv_packet.groups[GROUP].sorted[]
7657 and sort it into assembly language order. See frv.md for a description of
7658 the algorithm. */
7659
7660 static void
7661 frv_sort_insn_group (enum frv_insn_group group)
7662 {
7663 struct frv_packet_group *packet_group;
7664 unsigned int first, i, nop, max_unit, num_slots;
7665 state_t state, test_state;
7666 size_t dfa_size;
7667
7668 packet_group = &frv_packet.groups[group];
7669
7670 /* Assume no nop is needed. */
7671 packet_group->nop = 0;
7672
7673 if (packet_group->num_insns == 0)
7674 return;
7675
7676 /* Copy insns[] to sorted[]. */
7677 memcpy (packet_group->sorted, packet_group->insns,
7678 sizeof (rtx) * packet_group->num_insns);
7679
7680 /* Sort sorted[] by the unit that each insn tries to take first. */
7681 if (packet_group->num_insns > 1)
7682 qsort (packet_group->sorted, packet_group->num_insns,
7683 sizeof (rtx), frv_compare_insns);
7684
7685 /* That's always enough for branch and control insns. */
7686 if (group == GROUP_B || group == GROUP_C)
7687 return;
7688
7689 dfa_size = state_size ();
7690 state = alloca (dfa_size);
7691 test_state = alloca (dfa_size);
7692
7693 /* Find the highest FIRST such that sorted[0...FIRST-1] can issue
7694 consecutively and such that the DFA takes unit X when sorted[X]
7695 is added. Set STATE to the new DFA state. */
7696 state_reset (test_state);
7697 for (first = 0; first < packet_group->num_insns; first++)
7698 {
7699 memcpy (state, test_state, dfa_size);
7700 if (state_transition (test_state, packet_group->sorted[first]) >= 0
7701 || !cpu_unit_reservation_p (test_state, NTH_UNIT (group, first)))
7702 break;
7703 }
7704
7705 /* If all the instructions issued in ascending order, we're done. */
7706 if (first == packet_group->num_insns)
7707 return;
7708
7709 /* Add nops to the end of sorted[] and try each permutation until
7710 we find one that works. */
7711 for (nop = 0; nop < frv_num_nops; nop++)
7712 {
7713 max_unit = frv_insn_unit (frv_nops[nop]);
7714 if (frv_unit_groups[max_unit] == group)
7715 {
7716 packet_group->nop = frv_nops[nop];
7717 num_slots = UNIT_NUMBER (max_unit) + 1;
7718 for (i = packet_group->num_insns; i < num_slots; i++)
7719 packet_group->sorted[i] = frv_nops[nop];
7720 if (frv_sort_insn_group_1 (group, first, num_slots,
7721 (1 << first) - 1, num_slots, state))
7722 return;
7723 }
7724 }
7725 gcc_unreachable ();
7726 }
7727 \f
7728 /* Sort the current packet into assembly-language order. Set packing
7729 flags as appropriate. */
7730
7731 static void
7732 frv_reorder_packet (void)
7733 {
7734 unsigned int cursor[NUM_GROUPS];
7735 rtx insns[ARRAY_SIZE (frv_unit_groups)];
7736 unsigned int unit, to, from;
7737 enum frv_insn_group group;
7738 struct frv_packet_group *packet_group;
7739
7740 /* First sort each group individually. */
7741 for (group = 0; group < NUM_GROUPS; group++)
7742 {
7743 cursor[group] = 0;
7744 frv_sort_insn_group (group);
7745 }
7746
7747 /* Go through the unit template and try add an instruction from
7748 that unit's group. */
7749 to = 0;
7750 for (unit = 0; unit < ARRAY_SIZE (frv_unit_groups); unit++)
7751 {
7752 group = frv_unit_groups[unit];
7753 packet_group = &frv_packet.groups[group];
7754 if (cursor[group] < packet_group->num_insns)
7755 {
7756 /* frv_reorg should have added nops for us. */
7757 gcc_assert (packet_group->sorted[cursor[group]]
7758 != packet_group->nop);
7759 insns[to++] = packet_group->sorted[cursor[group]++];
7760 }
7761 }
7762
7763 gcc_assert (to == frv_packet.num_insns);
7764
7765 /* Clear the last instruction's packing flag, thus marking the end of
7766 a packet. Reorder the other instructions relative to it. */
7767 CLEAR_PACKING_FLAG (insns[to - 1]);
7768 for (from = 0; from < to - 1; from++)
7769 {
7770 remove_insn (insns[from]);
7771 add_insn_before (insns[from], insns[to - 1], NULL);
7772 SET_PACKING_FLAG (insns[from]);
7773 }
7774 }
7775
7776
7777 /* Divide instructions into packets. Reorder the contents of each
7778 packet so that they are in the correct assembly-language order.
7779
7780 Since this pass can change the raw meaning of the rtl stream, it must
7781 only be called at the last minute, just before the instructions are
7782 written out. */
7783
7784 static void
7785 frv_pack_insns (void)
7786 {
7787 if (frv_for_each_packet (frv_reorder_packet))
7788 frv_insn_packing_flag = 0;
7789 else
7790 frv_insn_packing_flag = -1;
7791 }
7792 \f
7793 /* See whether we need to add nops to group GROUP in order to
7794 make a valid packet. */
7795
7796 static void
7797 frv_fill_unused_units (enum frv_insn_group group)
7798 {
7799 unsigned int non_nops, nops, i;
7800 struct frv_packet_group *packet_group;
7801
7802 packet_group = &frv_packet.groups[group];
7803
7804 /* Sort the instructions into assembly-language order.
7805 Use nops to fill slots that are otherwise unused. */
7806 frv_sort_insn_group (group);
7807
7808 /* See how many nops are needed before the final useful instruction. */
7809 i = nops = 0;
7810 for (non_nops = 0; non_nops < packet_group->num_insns; non_nops++)
7811 while (packet_group->sorted[i++] == packet_group->nop)
7812 nops++;
7813
7814 /* Insert that many nops into the instruction stream. */
7815 while (nops-- > 0)
7816 frv_insert_nop_in_packet (packet_group->nop);
7817 }
7818
7819 /* Return true if accesses IO1 and IO2 refer to the same doubleword. */
7820
7821 static bool
7822 frv_same_doubleword_p (const struct frv_io *io1, const struct frv_io *io2)
7823 {
7824 if (io1->const_address != 0 && io2->const_address != 0)
7825 return io1->const_address == io2->const_address;
7826
7827 if (io1->var_address != 0 && io2->var_address != 0)
7828 return rtx_equal_p (io1->var_address, io2->var_address);
7829
7830 return false;
7831 }
7832
7833 /* Return true if operations IO1 and IO2 are guaranteed to complete
7834 in order. */
7835
7836 static bool
7837 frv_io_fixed_order_p (const struct frv_io *io1, const struct frv_io *io2)
7838 {
7839 /* The order of writes is always preserved. */
7840 if (io1->type == FRV_IO_WRITE && io2->type == FRV_IO_WRITE)
7841 return true;
7842
7843 /* The order of reads isn't preserved. */
7844 if (io1->type != FRV_IO_WRITE && io2->type != FRV_IO_WRITE)
7845 return false;
7846
7847 /* One operation is a write and the other is (or could be) a read.
7848 The order is only guaranteed if the accesses are to the same
7849 doubleword. */
7850 return frv_same_doubleword_p (io1, io2);
7851 }
7852
7853 /* Generalize I/O operation X so that it covers both X and Y. */
7854
7855 static void
7856 frv_io_union (struct frv_io *x, const struct frv_io *y)
7857 {
7858 if (x->type != y->type)
7859 x->type = FRV_IO_UNKNOWN;
7860 if (!frv_same_doubleword_p (x, y))
7861 {
7862 x->const_address = 0;
7863 x->var_address = 0;
7864 }
7865 }
7866
7867 /* Fill IO with information about the load or store associated with
7868 membar instruction INSN. */
7869
7870 static void
7871 frv_extract_membar (struct frv_io *io, rtx insn)
7872 {
7873 extract_insn (insn);
7874 io->type = INTVAL (recog_data.operand[2]);
7875 io->const_address = INTVAL (recog_data.operand[1]);
7876 io->var_address = XEXP (recog_data.operand[0], 0);
7877 }
7878
7879 /* A note_stores callback for which DATA points to an rtx. Nullify *DATA
7880 if X is a register and *DATA depends on X. */
7881
7882 static void
7883 frv_io_check_address (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7884 {
7885 rtx *other = (rtx *) data;
7886
7887 if (REG_P (x) && *other != 0 && reg_overlap_mentioned_p (x, *other))
7888 *other = 0;
7889 }
7890
7891 /* A note_stores callback for which DATA points to a HARD_REG_SET.
7892 Remove every modified register from the set. */
7893
7894 static void
7895 frv_io_handle_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7896 {
7897 HARD_REG_SET *set = (HARD_REG_SET *) data;
7898 unsigned int regno;
7899
7900 if (REG_P (x))
7901 FOR_EACH_REGNO (regno, x)
7902 CLEAR_HARD_REG_BIT (*set, regno);
7903 }
7904
7905 /* A for_each_rtx callback for which DATA points to a HARD_REG_SET.
7906 Add every register in *X to the set. */
7907
7908 static int
7909 frv_io_handle_use_1 (rtx *x, void *data)
7910 {
7911 HARD_REG_SET *set = (HARD_REG_SET *) data;
7912 unsigned int regno;
7913
7914 if (REG_P (*x))
7915 FOR_EACH_REGNO (regno, *x)
7916 SET_HARD_REG_BIT (*set, regno);
7917
7918 return 0;
7919 }
7920
7921 /* A note_stores callback that applies frv_io_handle_use_1 to an
7922 entire rhs value. */
7923
7924 static void
7925 frv_io_handle_use (rtx *x, void *data)
7926 {
7927 for_each_rtx (x, frv_io_handle_use_1, data);
7928 }
7929
7930 /* Go through block BB looking for membars to remove. There are two
7931 cases where intra-block analysis is enough:
7932
7933 - a membar is redundant if it occurs between two consecutive I/O
7934 operations and if those operations are guaranteed to complete
7935 in order.
7936
7937 - a membar for a __builtin_read is redundant if the result is
7938 used before the next I/O operation is issued.
7939
7940 If the last membar in the block could not be removed, and there
7941 are guaranteed to be no I/O operations between that membar and
7942 the end of the block, store the membar in *LAST_MEMBAR, otherwise
7943 store null.
7944
7945 Describe the block's first I/O operation in *NEXT_IO. Describe
7946 an unknown operation if the block doesn't do any I/O. */
7947
7948 static void
7949 frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
7950 rtx *last_membar)
7951 {
7952 HARD_REG_SET used_regs;
7953 rtx next_membar, set, insn;
7954 bool next_is_end_p;
7955
7956 /* NEXT_IO is the next I/O operation to be performed after the current
7957 instruction. It starts off as being an unknown operation. */
7958 memset (next_io, 0, sizeof (*next_io));
7959
7960 /* NEXT_IS_END_P is true if NEXT_IO describes the end of the block. */
7961 next_is_end_p = true;
7962
7963 /* If the current instruction is a __builtin_read or __builtin_write,
7964 NEXT_MEMBAR is the membar instruction associated with it. NEXT_MEMBAR
7965 is null if the membar has already been deleted.
7966
7967 Note that the initialization here should only be needed to
7968 suppress warnings. */
7969 next_membar = 0;
7970
7971 /* USED_REGS is the set of registers that are used before the
7972 next I/O instruction. */
7973 CLEAR_HARD_REG_SET (used_regs);
7974
7975 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
7976 if (GET_CODE (insn) == CALL_INSN)
7977 {
7978 /* We can't predict what a call will do to volatile memory. */
7979 memset (next_io, 0, sizeof (struct frv_io));
7980 next_is_end_p = false;
7981 CLEAR_HARD_REG_SET (used_regs);
7982 }
7983 else if (INSN_P (insn))
7984 switch (recog_memoized (insn))
7985 {
7986 case CODE_FOR_optional_membar_qi:
7987 case CODE_FOR_optional_membar_hi:
7988 case CODE_FOR_optional_membar_si:
7989 case CODE_FOR_optional_membar_di:
7990 next_membar = insn;
7991 if (next_is_end_p)
7992 {
7993 /* Local information isn't enough to decide whether this
7994 membar is needed. Stash it away for later. */
7995 *last_membar = insn;
7996 frv_extract_membar (next_io, insn);
7997 next_is_end_p = false;
7998 }
7999 else
8000 {
8001 /* Check whether the I/O operation before INSN could be
8002 reordered with one described by NEXT_IO. If it can't,
8003 INSN will not be needed. */
8004 struct frv_io prev_io;
8005
8006 frv_extract_membar (&prev_io, insn);
8007 if (frv_io_fixed_order_p (&prev_io, next_io))
8008 {
8009 if (dump_file)
8010 fprintf (dump_file,
8011 ";; [Local] Removing membar %d since order"
8012 " of accesses is guaranteed\n",
8013 INSN_UID (next_membar));
8014
8015 insn = NEXT_INSN (insn);
8016 delete_insn (next_membar);
8017 next_membar = 0;
8018 }
8019 *next_io = prev_io;
8020 }
8021 break;
8022
8023 default:
8024 /* Invalidate NEXT_IO's address if it depends on something that
8025 is clobbered by INSN. */
8026 if (next_io->var_address)
8027 note_stores (PATTERN (insn), frv_io_check_address,
8028 &next_io->var_address);
8029
8030 /* If the next membar is associated with a __builtin_read,
8031 see if INSN reads from that address. If it does, and if
8032 the destination register is used before the next I/O access,
8033 there is no need for the membar. */
8034 set = PATTERN (insn);
8035 if (next_io->type == FRV_IO_READ
8036 && next_io->var_address != 0
8037 && next_membar != 0
8038 && GET_CODE (set) == SET
8039 && GET_CODE (SET_DEST (set)) == REG
8040 && TEST_HARD_REG_BIT (used_regs, REGNO (SET_DEST (set))))
8041 {
8042 rtx src;
8043
8044 src = SET_SRC (set);
8045 if (GET_CODE (src) == ZERO_EXTEND)
8046 src = XEXP (src, 0);
8047
8048 if (GET_CODE (src) == MEM
8049 && rtx_equal_p (XEXP (src, 0), next_io->var_address))
8050 {
8051 if (dump_file)
8052 fprintf (dump_file,
8053 ";; [Local] Removing membar %d since the target"
8054 " of %d is used before the I/O operation\n",
8055 INSN_UID (next_membar), INSN_UID (insn));
8056
8057 if (next_membar == *last_membar)
8058 *last_membar = 0;
8059
8060 delete_insn (next_membar);
8061 next_membar = 0;
8062 }
8063 }
8064
8065 /* If INSN has volatile references, forget about any registers
8066 that are used after it. Otherwise forget about uses that
8067 are (or might be) defined by INSN. */
8068 if (volatile_refs_p (PATTERN (insn)))
8069 CLEAR_HARD_REG_SET (used_regs);
8070 else
8071 note_stores (PATTERN (insn), frv_io_handle_set, &used_regs);
8072
8073 note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs);
8074 break;
8075 }
8076 }
8077
8078 /* See if MEMBAR, the last membar instruction in BB, can be removed.
8079 FIRST_IO[X] describes the first operation performed by basic block X. */
8080
8081 static void
8082 frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
8083 rtx membar)
8084 {
8085 struct frv_io this_io, next_io;
8086 edge succ;
8087 edge_iterator ei;
8088
8089 /* We need to keep the membar if there is an edge to the exit block. */
8090 FOR_EACH_EDGE (succ, ei, bb->succs)
8091 /* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
8092 if (succ->dest == EXIT_BLOCK_PTR)
8093 return;
8094
8095 /* Work out the union of all successor blocks. */
8096 ei = ei_start (bb->succs);
8097 ei_cond (ei, &succ);
8098 /* next_io = first_io[bb->succ->dest->index]; */
8099 next_io = first_io[succ->dest->index];
8100 ei = ei_start (bb->succs);
8101 if (ei_cond (ei, &succ))
8102 {
8103 for (ei_next (&ei); ei_cond (ei, &succ); ei_next (&ei))
8104 /*for (succ = bb->succ->succ_next; succ != 0; succ = succ->succ_next)*/
8105 frv_io_union (&next_io, &first_io[succ->dest->index]);
8106 }
8107 else
8108 gcc_unreachable ();
8109
8110 frv_extract_membar (&this_io, membar);
8111 if (frv_io_fixed_order_p (&this_io, &next_io))
8112 {
8113 if (dump_file)
8114 fprintf (dump_file,
8115 ";; [Global] Removing membar %d since order of accesses"
8116 " is guaranteed\n", INSN_UID (membar));
8117
8118 delete_insn (membar);
8119 }
8120 }
8121
8122 /* Remove redundant membars from the current function. */
8123
8124 static void
8125 frv_optimize_membar (void)
8126 {
8127 basic_block bb;
8128 struct frv_io *first_io;
8129 rtx *last_membar;
8130
8131 compute_bb_for_insn ();
8132 first_io = XCNEWVEC (struct frv_io, last_basic_block);
8133 last_membar = XCNEWVEC (rtx, last_basic_block);
8134
8135 FOR_EACH_BB (bb)
8136 frv_optimize_membar_local (bb, &first_io[bb->index],
8137 &last_membar[bb->index]);
8138
8139 FOR_EACH_BB (bb)
8140 if (last_membar[bb->index] != 0)
8141 frv_optimize_membar_global (bb, first_io, last_membar[bb->index]);
8142
8143 free (first_io);
8144 free (last_membar);
8145 }
8146 \f
8147 /* Used by frv_reorg to keep track of the current packet's address. */
8148 static unsigned int frv_packet_address;
8149
8150 /* If the current packet falls through to a label, try to pad the packet
8151 with nops in order to fit the label's alignment requirements. */
8152
8153 static void
8154 frv_align_label (void)
8155 {
8156 unsigned int alignment, target, nop;
8157 rtx x, last, barrier, label;
8158
8159 /* Walk forward to the start of the next packet. Set ALIGNMENT to the
8160 maximum alignment of that packet, LABEL to the last label between
8161 the packets, and BARRIER to the last barrier. */
8162 last = frv_packet.insns[frv_packet.num_insns - 1];
8163 label = barrier = 0;
8164 alignment = 4;
8165 for (x = NEXT_INSN (last); x != 0 && !INSN_P (x); x = NEXT_INSN (x))
8166 {
8167 if (LABEL_P (x))
8168 {
8169 unsigned int subalign = 1 << label_to_alignment (x);
8170 alignment = MAX (alignment, subalign);
8171 label = x;
8172 }
8173 if (BARRIER_P (x))
8174 barrier = x;
8175 }
8176
8177 /* If -malign-labels, and the packet falls through to an unaligned
8178 label, try introducing a nop to align that label to 8 bytes. */
8179 if (TARGET_ALIGN_LABELS
8180 && label != 0
8181 && barrier == 0
8182 && frv_packet.num_insns < frv_packet.issue_rate)
8183 alignment = MAX (alignment, 8);
8184
8185 /* Advance the address to the end of the current packet. */
8186 frv_packet_address += frv_packet.num_insns * 4;
8187
8188 /* Work out the target address, after alignment. */
8189 target = (frv_packet_address + alignment - 1) & -alignment;
8190
8191 /* If the packet falls through to the label, try to find an efficient
8192 padding sequence. */
8193 if (barrier == 0)
8194 {
8195 /* First try adding nops to the current packet. */
8196 for (nop = 0; nop < frv_num_nops; nop++)
8197 while (frv_packet_address < target && frv_pack_insn_p (frv_nops[nop]))
8198 {
8199 frv_insert_nop_in_packet (frv_nops[nop]);
8200 frv_packet_address += 4;
8201 }
8202
8203 /* If we still haven't reached the target, add some new packets that
8204 contain only nops. If there are two types of nop, insert an
8205 alternating sequence of frv_nops[0] and frv_nops[1], which will
8206 lead to packets like:
8207
8208 nop.p
8209 mnop.p/fnop.p
8210 nop.p
8211 mnop/fnop
8212
8213 etc. Just emit frv_nops[0] if that's the only nop we have. */
8214 last = frv_packet.insns[frv_packet.num_insns - 1];
8215 nop = 0;
8216 while (frv_packet_address < target)
8217 {
8218 last = emit_insn_after (PATTERN (frv_nops[nop]), last);
8219 frv_packet_address += 4;
8220 if (frv_num_nops > 1)
8221 nop ^= 1;
8222 }
8223 }
8224
8225 frv_packet_address = target;
8226 }
8227
8228 /* Subroutine of frv_reorg, called after each packet has been constructed
8229 in frv_packet. */
8230
8231 static void
8232 frv_reorg_packet (void)
8233 {
8234 frv_fill_unused_units (GROUP_I);
8235 frv_fill_unused_units (GROUP_FM);
8236 frv_align_label ();
8237 }
8238
8239 /* Add an instruction with pattern NOP to frv_nops[]. */
8240
8241 static void
8242 frv_register_nop (rtx nop)
8243 {
8244 nop = make_insn_raw (nop);
8245 NEXT_INSN (nop) = 0;
8246 PREV_INSN (nop) = 0;
8247 frv_nops[frv_num_nops++] = nop;
8248 }
8249
8250 /* Implement TARGET_MACHINE_DEPENDENT_REORG. Divide the instructions
8251 into packets and check whether we need to insert nops in order to
8252 fulfill the processor's issue requirements. Also, if the user has
8253 requested a certain alignment for a label, try to meet that alignment
8254 by inserting nops in the previous packet. */
8255
8256 static void
8257 frv_reorg (void)
8258 {
8259 if (optimize > 0 && TARGET_OPTIMIZE_MEMBAR && cfun->machine->has_membar_p)
8260 frv_optimize_membar ();
8261
8262 frv_num_nops = 0;
8263 frv_register_nop (gen_nop ());
8264 if (TARGET_MEDIA)
8265 frv_register_nop (gen_mnop ());
8266 if (TARGET_HARD_FLOAT)
8267 frv_register_nop (gen_fnop ());
8268
8269 /* Estimate the length of each branch. Although this may change after
8270 we've inserted nops, it will only do so in big functions. */
8271 shorten_branches (get_insns ());
8272
8273 frv_packet_address = 0;
8274 frv_for_each_packet (frv_reorg_packet);
8275 }
8276 \f
8277 #define def_builtin(name, type, code) \
8278 add_builtin_function ((name), (type), (code), BUILT_IN_MD, NULL, NULL)
8279
8280 struct builtin_description
8281 {
8282 enum insn_code icode;
8283 const char *name;
8284 enum frv_builtins code;
8285 enum rtx_code comparison;
8286 unsigned int flag;
8287 };
8288
8289 /* Media intrinsics that take a single, constant argument. */
8290
8291 static struct builtin_description bdesc_set[] =
8292 {
8293 { CODE_FOR_mhdsets, "__MHDSETS", FRV_BUILTIN_MHDSETS, 0, 0 }
8294 };
8295
8296 /* Media intrinsics that take just one argument. */
8297
8298 static struct builtin_description bdesc_1arg[] =
8299 {
8300 { CODE_FOR_mnot, "__MNOT", FRV_BUILTIN_MNOT, 0, 0 },
8301 { CODE_FOR_munpackh, "__MUNPACKH", FRV_BUILTIN_MUNPACKH, 0, 0 },
8302 { CODE_FOR_mbtoh, "__MBTOH", FRV_BUILTIN_MBTOH, 0, 0 },
8303 { CODE_FOR_mhtob, "__MHTOB", FRV_BUILTIN_MHTOB, 0, 0 },
8304 { CODE_FOR_mabshs, "__MABSHS", FRV_BUILTIN_MABSHS, 0, 0 },
8305 { CODE_FOR_scutss, "__SCUTSS", FRV_BUILTIN_SCUTSS, 0, 0 }
8306 };
8307
8308 /* Media intrinsics that take two arguments. */
8309
8310 static struct builtin_description bdesc_2arg[] =
8311 {
8312 { CODE_FOR_mand, "__MAND", FRV_BUILTIN_MAND, 0, 0 },
8313 { CODE_FOR_mor, "__MOR", FRV_BUILTIN_MOR, 0, 0 },
8314 { CODE_FOR_mxor, "__MXOR", FRV_BUILTIN_MXOR, 0, 0 },
8315 { CODE_FOR_maveh, "__MAVEH", FRV_BUILTIN_MAVEH, 0, 0 },
8316 { CODE_FOR_msaths, "__MSATHS", FRV_BUILTIN_MSATHS, 0, 0 },
8317 { CODE_FOR_msathu, "__MSATHU", FRV_BUILTIN_MSATHU, 0, 0 },
8318 { CODE_FOR_maddhss, "__MADDHSS", FRV_BUILTIN_MADDHSS, 0, 0 },
8319 { CODE_FOR_maddhus, "__MADDHUS", FRV_BUILTIN_MADDHUS, 0, 0 },
8320 { CODE_FOR_msubhss, "__MSUBHSS", FRV_BUILTIN_MSUBHSS, 0, 0 },
8321 { CODE_FOR_msubhus, "__MSUBHUS", FRV_BUILTIN_MSUBHUS, 0, 0 },
8322 { CODE_FOR_mqaddhss, "__MQADDHSS", FRV_BUILTIN_MQADDHSS, 0, 0 },
8323 { CODE_FOR_mqaddhus, "__MQADDHUS", FRV_BUILTIN_MQADDHUS, 0, 0 },
8324 { CODE_FOR_mqsubhss, "__MQSUBHSS", FRV_BUILTIN_MQSUBHSS, 0, 0 },
8325 { CODE_FOR_mqsubhus, "__MQSUBHUS", FRV_BUILTIN_MQSUBHUS, 0, 0 },
8326 { CODE_FOR_mpackh, "__MPACKH", FRV_BUILTIN_MPACKH, 0, 0 },
8327 { CODE_FOR_mcop1, "__Mcop1", FRV_BUILTIN_MCOP1, 0, 0 },
8328 { CODE_FOR_mcop2, "__Mcop2", FRV_BUILTIN_MCOP2, 0, 0 },
8329 { CODE_FOR_mwcut, "__MWCUT", FRV_BUILTIN_MWCUT, 0, 0 },
8330 { CODE_FOR_mqsaths, "__MQSATHS", FRV_BUILTIN_MQSATHS, 0, 0 },
8331 { CODE_FOR_mqlclrhs, "__MQLCLRHS", FRV_BUILTIN_MQLCLRHS, 0, 0 },
8332 { CODE_FOR_mqlmths, "__MQLMTHS", FRV_BUILTIN_MQLMTHS, 0, 0 },
8333 { CODE_FOR_smul, "__SMUL", FRV_BUILTIN_SMUL, 0, 0 },
8334 { CODE_FOR_umul, "__UMUL", FRV_BUILTIN_UMUL, 0, 0 },
8335 { CODE_FOR_addss, "__ADDSS", FRV_BUILTIN_ADDSS, 0, 0 },
8336 { CODE_FOR_subss, "__SUBSS", FRV_BUILTIN_SUBSS, 0, 0 },
8337 { CODE_FOR_slass, "__SLASS", FRV_BUILTIN_SLASS, 0, 0 },
8338 { CODE_FOR_scan, "__SCAN", FRV_BUILTIN_SCAN, 0, 0 }
8339 };
8340
8341 /* Integer intrinsics that take two arguments and have no return value. */
8342
8343 static struct builtin_description bdesc_int_void2arg[] =
8344 {
8345 { CODE_FOR_smass, "__SMASS", FRV_BUILTIN_SMASS, 0, 0 },
8346 { CODE_FOR_smsss, "__SMSSS", FRV_BUILTIN_SMSSS, 0, 0 },
8347 { CODE_FOR_smu, "__SMU", FRV_BUILTIN_SMU, 0, 0 }
8348 };
8349
8350 static struct builtin_description bdesc_prefetches[] =
8351 {
8352 { CODE_FOR_frv_prefetch0, "__data_prefetch0", FRV_BUILTIN_PREFETCH0, 0, 0 },
8353 { CODE_FOR_frv_prefetch, "__data_prefetch", FRV_BUILTIN_PREFETCH, 0, 0 }
8354 };
8355
8356 /* Media intrinsics that take two arguments, the first being an ACC number. */
8357
8358 static struct builtin_description bdesc_cut[] =
8359 {
8360 { CODE_FOR_mcut, "__MCUT", FRV_BUILTIN_MCUT, 0, 0 },
8361 { CODE_FOR_mcutss, "__MCUTSS", FRV_BUILTIN_MCUTSS, 0, 0 },
8362 { CODE_FOR_mdcutssi, "__MDCUTSSI", FRV_BUILTIN_MDCUTSSI, 0, 0 }
8363 };
8364
8365 /* Two-argument media intrinsics with an immediate second argument. */
8366
8367 static struct builtin_description bdesc_2argimm[] =
8368 {
8369 { CODE_FOR_mrotli, "__MROTLI", FRV_BUILTIN_MROTLI, 0, 0 },
8370 { CODE_FOR_mrotri, "__MROTRI", FRV_BUILTIN_MROTRI, 0, 0 },
8371 { CODE_FOR_msllhi, "__MSLLHI", FRV_BUILTIN_MSLLHI, 0, 0 },
8372 { CODE_FOR_msrlhi, "__MSRLHI", FRV_BUILTIN_MSRLHI, 0, 0 },
8373 { CODE_FOR_msrahi, "__MSRAHI", FRV_BUILTIN_MSRAHI, 0, 0 },
8374 { CODE_FOR_mexpdhw, "__MEXPDHW", FRV_BUILTIN_MEXPDHW, 0, 0 },
8375 { CODE_FOR_mexpdhd, "__MEXPDHD", FRV_BUILTIN_MEXPDHD, 0, 0 },
8376 { CODE_FOR_mdrotli, "__MDROTLI", FRV_BUILTIN_MDROTLI, 0, 0 },
8377 { CODE_FOR_mcplhi, "__MCPLHI", FRV_BUILTIN_MCPLHI, 0, 0 },
8378 { CODE_FOR_mcpli, "__MCPLI", FRV_BUILTIN_MCPLI, 0, 0 },
8379 { CODE_FOR_mhsetlos, "__MHSETLOS", FRV_BUILTIN_MHSETLOS, 0, 0 },
8380 { CODE_FOR_mhsetloh, "__MHSETLOH", FRV_BUILTIN_MHSETLOH, 0, 0 },
8381 { CODE_FOR_mhsethis, "__MHSETHIS", FRV_BUILTIN_MHSETHIS, 0, 0 },
8382 { CODE_FOR_mhsethih, "__MHSETHIH", FRV_BUILTIN_MHSETHIH, 0, 0 },
8383 { CODE_FOR_mhdseth, "__MHDSETH", FRV_BUILTIN_MHDSETH, 0, 0 },
8384 { CODE_FOR_mqsllhi, "__MQSLLHI", FRV_BUILTIN_MQSLLHI, 0, 0 },
8385 { CODE_FOR_mqsrahi, "__MQSRAHI", FRV_BUILTIN_MQSRAHI, 0, 0 }
8386 };
8387
8388 /* Media intrinsics that take two arguments and return void, the first argument
8389 being a pointer to 4 words in memory. */
8390
8391 static struct builtin_description bdesc_void2arg[] =
8392 {
8393 { CODE_FOR_mdunpackh, "__MDUNPACKH", FRV_BUILTIN_MDUNPACKH, 0, 0 },
8394 { CODE_FOR_mbtohe, "__MBTOHE", FRV_BUILTIN_MBTOHE, 0, 0 },
8395 };
8396
8397 /* Media intrinsics that take three arguments, the first being a const_int that
8398 denotes an accumulator, and that return void. */
8399
8400 static struct builtin_description bdesc_void3arg[] =
8401 {
8402 { CODE_FOR_mcpxrs, "__MCPXRS", FRV_BUILTIN_MCPXRS, 0, 0 },
8403 { CODE_FOR_mcpxru, "__MCPXRU", FRV_BUILTIN_MCPXRU, 0, 0 },
8404 { CODE_FOR_mcpxis, "__MCPXIS", FRV_BUILTIN_MCPXIS, 0, 0 },
8405 { CODE_FOR_mcpxiu, "__MCPXIU", FRV_BUILTIN_MCPXIU, 0, 0 },
8406 { CODE_FOR_mmulhs, "__MMULHS", FRV_BUILTIN_MMULHS, 0, 0 },
8407 { CODE_FOR_mmulhu, "__MMULHU", FRV_BUILTIN_MMULHU, 0, 0 },
8408 { CODE_FOR_mmulxhs, "__MMULXHS", FRV_BUILTIN_MMULXHS, 0, 0 },
8409 { CODE_FOR_mmulxhu, "__MMULXHU", FRV_BUILTIN_MMULXHU, 0, 0 },
8410 { CODE_FOR_mmachs, "__MMACHS", FRV_BUILTIN_MMACHS, 0, 0 },
8411 { CODE_FOR_mmachu, "__MMACHU", FRV_BUILTIN_MMACHU, 0, 0 },
8412 { CODE_FOR_mmrdhs, "__MMRDHS", FRV_BUILTIN_MMRDHS, 0, 0 },
8413 { CODE_FOR_mmrdhu, "__MMRDHU", FRV_BUILTIN_MMRDHU, 0, 0 },
8414 { CODE_FOR_mqcpxrs, "__MQCPXRS", FRV_BUILTIN_MQCPXRS, 0, 0 },
8415 { CODE_FOR_mqcpxru, "__MQCPXRU", FRV_BUILTIN_MQCPXRU, 0, 0 },
8416 { CODE_FOR_mqcpxis, "__MQCPXIS", FRV_BUILTIN_MQCPXIS, 0, 0 },
8417 { CODE_FOR_mqcpxiu, "__MQCPXIU", FRV_BUILTIN_MQCPXIU, 0, 0 },
8418 { CODE_FOR_mqmulhs, "__MQMULHS", FRV_BUILTIN_MQMULHS, 0, 0 },
8419 { CODE_FOR_mqmulhu, "__MQMULHU", FRV_BUILTIN_MQMULHU, 0, 0 },
8420 { CODE_FOR_mqmulxhs, "__MQMULXHS", FRV_BUILTIN_MQMULXHS, 0, 0 },
8421 { CODE_FOR_mqmulxhu, "__MQMULXHU", FRV_BUILTIN_MQMULXHU, 0, 0 },
8422 { CODE_FOR_mqmachs, "__MQMACHS", FRV_BUILTIN_MQMACHS, 0, 0 },
8423 { CODE_FOR_mqmachu, "__MQMACHU", FRV_BUILTIN_MQMACHU, 0, 0 },
8424 { CODE_FOR_mqxmachs, "__MQXMACHS", FRV_BUILTIN_MQXMACHS, 0, 0 },
8425 { CODE_FOR_mqxmacxhs, "__MQXMACXHS", FRV_BUILTIN_MQXMACXHS, 0, 0 },
8426 { CODE_FOR_mqmacxhs, "__MQMACXHS", FRV_BUILTIN_MQMACXHS, 0, 0 }
8427 };
8428
8429 /* Media intrinsics that take two accumulator numbers as argument and
8430 return void. */
8431
8432 static struct builtin_description bdesc_voidacc[] =
8433 {
8434 { CODE_FOR_maddaccs, "__MADDACCS", FRV_BUILTIN_MADDACCS, 0, 0 },
8435 { CODE_FOR_msubaccs, "__MSUBACCS", FRV_BUILTIN_MSUBACCS, 0, 0 },
8436 { CODE_FOR_masaccs, "__MASACCS", FRV_BUILTIN_MASACCS, 0, 0 },
8437 { CODE_FOR_mdaddaccs, "__MDADDACCS", FRV_BUILTIN_MDADDACCS, 0, 0 },
8438 { CODE_FOR_mdsubaccs, "__MDSUBACCS", FRV_BUILTIN_MDSUBACCS, 0, 0 },
8439 { CODE_FOR_mdasaccs, "__MDASACCS", FRV_BUILTIN_MDASACCS, 0, 0 }
8440 };
8441
8442 /* Intrinsics that load a value and then issue a MEMBAR. The load is
8443 a normal move and the ICODE is for the membar. */
8444
8445 static struct builtin_description bdesc_loads[] =
8446 {
8447 { CODE_FOR_optional_membar_qi, "__builtin_read8",
8448 FRV_BUILTIN_READ8, 0, 0 },
8449 { CODE_FOR_optional_membar_hi, "__builtin_read16",
8450 FRV_BUILTIN_READ16, 0, 0 },
8451 { CODE_FOR_optional_membar_si, "__builtin_read32",
8452 FRV_BUILTIN_READ32, 0, 0 },
8453 { CODE_FOR_optional_membar_di, "__builtin_read64",
8454 FRV_BUILTIN_READ64, 0, 0 }
8455 };
8456
8457 /* Likewise stores. */
8458
8459 static struct builtin_description bdesc_stores[] =
8460 {
8461 { CODE_FOR_optional_membar_qi, "__builtin_write8",
8462 FRV_BUILTIN_WRITE8, 0, 0 },
8463 { CODE_FOR_optional_membar_hi, "__builtin_write16",
8464 FRV_BUILTIN_WRITE16, 0, 0 },
8465 { CODE_FOR_optional_membar_si, "__builtin_write32",
8466 FRV_BUILTIN_WRITE32, 0, 0 },
8467 { CODE_FOR_optional_membar_di, "__builtin_write64",
8468 FRV_BUILTIN_WRITE64, 0, 0 },
8469 };
8470
8471 /* Initialize media builtins. */
8472
8473 static void
8474 frv_init_builtins (void)
8475 {
8476 tree endlink = void_list_node;
8477 tree accumulator = integer_type_node;
8478 tree integer = integer_type_node;
8479 tree voidt = void_type_node;
8480 tree uhalf = short_unsigned_type_node;
8481 tree sword1 = long_integer_type_node;
8482 tree uword1 = long_unsigned_type_node;
8483 tree sword2 = long_long_integer_type_node;
8484 tree uword2 = long_long_unsigned_type_node;
8485 tree uword4 = build_pointer_type (uword1);
8486 tree vptr = build_pointer_type (build_type_variant (void_type_node, 0, 1));
8487 tree ubyte = unsigned_char_type_node;
8488 tree iacc = integer_type_node;
8489
8490 #define UNARY(RET, T1) \
8491 build_function_type (RET, tree_cons (NULL_TREE, T1, endlink))
8492
8493 #define BINARY(RET, T1, T2) \
8494 build_function_type (RET, tree_cons (NULL_TREE, T1, \
8495 tree_cons (NULL_TREE, T2, endlink)))
8496
8497 #define TRINARY(RET, T1, T2, T3) \
8498 build_function_type (RET, tree_cons (NULL_TREE, T1, \
8499 tree_cons (NULL_TREE, T2, \
8500 tree_cons (NULL_TREE, T3, endlink))))
8501
8502 #define QUAD(RET, T1, T2, T3, T4) \
8503 build_function_type (RET, tree_cons (NULL_TREE, T1, \
8504 tree_cons (NULL_TREE, T2, \
8505 tree_cons (NULL_TREE, T3, \
8506 tree_cons (NULL_TREE, T4, endlink)))))
8507
8508 tree void_ftype_void = build_function_type (voidt, endlink);
8509
8510 tree void_ftype_acc = UNARY (voidt, accumulator);
8511 tree void_ftype_uw4_uw1 = BINARY (voidt, uword4, uword1);
8512 tree void_ftype_uw4_uw2 = BINARY (voidt, uword4, uword2);
8513 tree void_ftype_acc_uw1 = BINARY (voidt, accumulator, uword1);
8514 tree void_ftype_acc_acc = BINARY (voidt, accumulator, accumulator);
8515 tree void_ftype_acc_uw1_uw1 = TRINARY (voidt, accumulator, uword1, uword1);
8516 tree void_ftype_acc_sw1_sw1 = TRINARY (voidt, accumulator, sword1, sword1);
8517 tree void_ftype_acc_uw2_uw2 = TRINARY (voidt, accumulator, uword2, uword2);
8518 tree void_ftype_acc_sw2_sw2 = TRINARY (voidt, accumulator, sword2, sword2);
8519
8520 tree uw1_ftype_uw1 = UNARY (uword1, uword1);
8521 tree uw1_ftype_sw1 = UNARY (uword1, sword1);
8522 tree uw1_ftype_uw2 = UNARY (uword1, uword2);
8523 tree uw1_ftype_acc = UNARY (uword1, accumulator);
8524 tree uw1_ftype_uh_uh = BINARY (uword1, uhalf, uhalf);
8525 tree uw1_ftype_uw1_uw1 = BINARY (uword1, uword1, uword1);
8526 tree uw1_ftype_uw1_int = BINARY (uword1, uword1, integer);
8527 tree uw1_ftype_acc_uw1 = BINARY (uword1, accumulator, uword1);
8528 tree uw1_ftype_acc_sw1 = BINARY (uword1, accumulator, sword1);
8529 tree uw1_ftype_uw2_uw1 = BINARY (uword1, uword2, uword1);
8530 tree uw1_ftype_uw2_int = BINARY (uword1, uword2, integer);
8531
8532 tree sw1_ftype_int = UNARY (sword1, integer);
8533 tree sw1_ftype_sw1_sw1 = BINARY (sword1, sword1, sword1);
8534 tree sw1_ftype_sw1_int = BINARY (sword1, sword1, integer);
8535
8536 tree uw2_ftype_uw1 = UNARY (uword2, uword1);
8537 tree uw2_ftype_uw1_int = BINARY (uword2, uword1, integer);
8538 tree uw2_ftype_uw2_uw2 = BINARY (uword2, uword2, uword2);
8539 tree uw2_ftype_uw2_int = BINARY (uword2, uword2, integer);
8540 tree uw2_ftype_acc_int = BINARY (uword2, accumulator, integer);
8541 tree uw2_ftype_uh_uh_uh_uh = QUAD (uword2, uhalf, uhalf, uhalf, uhalf);
8542
8543 tree sw2_ftype_sw2_sw2 = BINARY (sword2, sword2, sword2);
8544 tree sw2_ftype_sw2_int = BINARY (sword2, sword2, integer);
8545 tree uw2_ftype_uw1_uw1 = BINARY (uword2, uword1, uword1);
8546 tree sw2_ftype_sw1_sw1 = BINARY (sword2, sword1, sword1);
8547 tree void_ftype_sw1_sw1 = BINARY (voidt, sword1, sword1);
8548 tree void_ftype_iacc_sw2 = BINARY (voidt, iacc, sword2);
8549 tree void_ftype_iacc_sw1 = BINARY (voidt, iacc, sword1);
8550 tree sw1_ftype_sw1 = UNARY (sword1, sword1);
8551 tree sw2_ftype_iacc = UNARY (sword2, iacc);
8552 tree sw1_ftype_iacc = UNARY (sword1, iacc);
8553 tree void_ftype_ptr = UNARY (voidt, const_ptr_type_node);
8554 tree uw1_ftype_vptr = UNARY (uword1, vptr);
8555 tree uw2_ftype_vptr = UNARY (uword2, vptr);
8556 tree void_ftype_vptr_ub = BINARY (voidt, vptr, ubyte);
8557 tree void_ftype_vptr_uh = BINARY (voidt, vptr, uhalf);
8558 tree void_ftype_vptr_uw1 = BINARY (voidt, vptr, uword1);
8559 tree void_ftype_vptr_uw2 = BINARY (voidt, vptr, uword2);
8560
8561 def_builtin ("__MAND", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAND);
8562 def_builtin ("__MOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MOR);
8563 def_builtin ("__MXOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MXOR);
8564 def_builtin ("__MNOT", uw1_ftype_uw1, FRV_BUILTIN_MNOT);
8565 def_builtin ("__MROTLI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTLI);
8566 def_builtin ("__MROTRI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTRI);
8567 def_builtin ("__MWCUT", uw1_ftype_uw2_uw1, FRV_BUILTIN_MWCUT);
8568 def_builtin ("__MAVEH", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAVEH);
8569 def_builtin ("__MSLLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSLLHI);
8570 def_builtin ("__MSRLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSRLHI);
8571 def_builtin ("__MSRAHI", sw1_ftype_sw1_int, FRV_BUILTIN_MSRAHI);
8572 def_builtin ("__MSATHS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSATHS);
8573 def_builtin ("__MSATHU", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSATHU);
8574 def_builtin ("__MADDHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MADDHSS);
8575 def_builtin ("__MADDHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MADDHUS);
8576 def_builtin ("__MSUBHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSUBHSS);
8577 def_builtin ("__MSUBHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSUBHUS);
8578 def_builtin ("__MMULHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULHS);
8579 def_builtin ("__MMULHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULHU);
8580 def_builtin ("__MMULXHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULXHS);
8581 def_builtin ("__MMULXHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULXHU);
8582 def_builtin ("__MMACHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMACHS);
8583 def_builtin ("__MMACHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMACHU);
8584 def_builtin ("__MMRDHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMRDHS);
8585 def_builtin ("__MMRDHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMRDHU);
8586 def_builtin ("__MQADDHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQADDHSS);
8587 def_builtin ("__MQADDHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQADDHUS);
8588 def_builtin ("__MQSUBHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSUBHSS);
8589 def_builtin ("__MQSUBHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQSUBHUS);
8590 def_builtin ("__MQMULHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULHS);
8591 def_builtin ("__MQMULHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULHU);
8592 def_builtin ("__MQMULXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULXHS);
8593 def_builtin ("__MQMULXHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULXHU);
8594 def_builtin ("__MQMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACHS);
8595 def_builtin ("__MQMACHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMACHU);
8596 def_builtin ("__MCPXRS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXRS);
8597 def_builtin ("__MCPXRU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXRU);
8598 def_builtin ("__MCPXIS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXIS);
8599 def_builtin ("__MCPXIU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXIU);
8600 def_builtin ("__MQCPXRS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXRS);
8601 def_builtin ("__MQCPXRU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXRU);
8602 def_builtin ("__MQCPXIS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXIS);
8603 def_builtin ("__MQCPXIU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXIU);
8604 def_builtin ("__MCUT", uw1_ftype_acc_uw1, FRV_BUILTIN_MCUT);
8605 def_builtin ("__MCUTSS", uw1_ftype_acc_sw1, FRV_BUILTIN_MCUTSS);
8606 def_builtin ("__MEXPDHW", uw1_ftype_uw1_int, FRV_BUILTIN_MEXPDHW);
8607 def_builtin ("__MEXPDHD", uw2_ftype_uw1_int, FRV_BUILTIN_MEXPDHD);
8608 def_builtin ("__MPACKH", uw1_ftype_uh_uh, FRV_BUILTIN_MPACKH);
8609 def_builtin ("__MUNPACKH", uw2_ftype_uw1, FRV_BUILTIN_MUNPACKH);
8610 def_builtin ("__MDPACKH", uw2_ftype_uh_uh_uh_uh, FRV_BUILTIN_MDPACKH);
8611 def_builtin ("__MDUNPACKH", void_ftype_uw4_uw2, FRV_BUILTIN_MDUNPACKH);
8612 def_builtin ("__MBTOH", uw2_ftype_uw1, FRV_BUILTIN_MBTOH);
8613 def_builtin ("__MHTOB", uw1_ftype_uw2, FRV_BUILTIN_MHTOB);
8614 def_builtin ("__MBTOHE", void_ftype_uw4_uw1, FRV_BUILTIN_MBTOHE);
8615 def_builtin ("__MCLRACC", void_ftype_acc, FRV_BUILTIN_MCLRACC);
8616 def_builtin ("__MCLRACCA", void_ftype_void, FRV_BUILTIN_MCLRACCA);
8617 def_builtin ("__MRDACC", uw1_ftype_acc, FRV_BUILTIN_MRDACC);
8618 def_builtin ("__MRDACCG", uw1_ftype_acc, FRV_BUILTIN_MRDACCG);
8619 def_builtin ("__MWTACC", void_ftype_acc_uw1, FRV_BUILTIN_MWTACC);
8620 def_builtin ("__MWTACCG", void_ftype_acc_uw1, FRV_BUILTIN_MWTACCG);
8621 def_builtin ("__Mcop1", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP1);
8622 def_builtin ("__Mcop2", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP2);
8623 def_builtin ("__MTRAP", void_ftype_void, FRV_BUILTIN_MTRAP);
8624 def_builtin ("__MQXMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACHS);
8625 def_builtin ("__MQXMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACXHS);
8626 def_builtin ("__MQMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACXHS);
8627 def_builtin ("__MADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MADDACCS);
8628 def_builtin ("__MSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MSUBACCS);
8629 def_builtin ("__MASACCS", void_ftype_acc_acc, FRV_BUILTIN_MASACCS);
8630 def_builtin ("__MDADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MDADDACCS);
8631 def_builtin ("__MDSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MDSUBACCS);
8632 def_builtin ("__MDASACCS", void_ftype_acc_acc, FRV_BUILTIN_MDASACCS);
8633 def_builtin ("__MABSHS", uw1_ftype_sw1, FRV_BUILTIN_MABSHS);
8634 def_builtin ("__MDROTLI", uw2_ftype_uw2_int, FRV_BUILTIN_MDROTLI);
8635 def_builtin ("__MCPLHI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLHI);
8636 def_builtin ("__MCPLI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLI);
8637 def_builtin ("__MDCUTSSI", uw2_ftype_acc_int, FRV_BUILTIN_MDCUTSSI);
8638 def_builtin ("__MQSATHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSATHS);
8639 def_builtin ("__MHSETLOS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETLOS);
8640 def_builtin ("__MHSETHIS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETHIS);
8641 def_builtin ("__MHDSETS", sw1_ftype_int, FRV_BUILTIN_MHDSETS);
8642 def_builtin ("__MHSETLOH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETLOH);
8643 def_builtin ("__MHSETHIH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETHIH);
8644 def_builtin ("__MHDSETH", uw1_ftype_uw1_int, FRV_BUILTIN_MHDSETH);
8645 def_builtin ("__MQLCLRHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLCLRHS);
8646 def_builtin ("__MQLMTHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLMTHS);
8647 def_builtin ("__MQSLLHI", uw2_ftype_uw2_int, FRV_BUILTIN_MQSLLHI);
8648 def_builtin ("__MQSRAHI", sw2_ftype_sw2_int, FRV_BUILTIN_MQSRAHI);
8649 def_builtin ("__SMUL", sw2_ftype_sw1_sw1, FRV_BUILTIN_SMUL);
8650 def_builtin ("__UMUL", uw2_ftype_uw1_uw1, FRV_BUILTIN_UMUL);
8651 def_builtin ("__SMASS", void_ftype_sw1_sw1, FRV_BUILTIN_SMASS);
8652 def_builtin ("__SMSSS", void_ftype_sw1_sw1, FRV_BUILTIN_SMSSS);
8653 def_builtin ("__SMU", void_ftype_sw1_sw1, FRV_BUILTIN_SMU);
8654 def_builtin ("__ADDSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_ADDSS);
8655 def_builtin ("__SUBSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SUBSS);
8656 def_builtin ("__SLASS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SLASS);
8657 def_builtin ("__SCAN", sw1_ftype_sw1_sw1, FRV_BUILTIN_SCAN);
8658 def_builtin ("__SCUTSS", sw1_ftype_sw1, FRV_BUILTIN_SCUTSS);
8659 def_builtin ("__IACCreadll", sw2_ftype_iacc, FRV_BUILTIN_IACCreadll);
8660 def_builtin ("__IACCreadl", sw1_ftype_iacc, FRV_BUILTIN_IACCreadl);
8661 def_builtin ("__IACCsetll", void_ftype_iacc_sw2, FRV_BUILTIN_IACCsetll);
8662 def_builtin ("__IACCsetl", void_ftype_iacc_sw1, FRV_BUILTIN_IACCsetl);
8663 def_builtin ("__data_prefetch0", void_ftype_ptr, FRV_BUILTIN_PREFETCH0);
8664 def_builtin ("__data_prefetch", void_ftype_ptr, FRV_BUILTIN_PREFETCH);
8665 def_builtin ("__builtin_read8", uw1_ftype_vptr, FRV_BUILTIN_READ8);
8666 def_builtin ("__builtin_read16", uw1_ftype_vptr, FRV_BUILTIN_READ16);
8667 def_builtin ("__builtin_read32", uw1_ftype_vptr, FRV_BUILTIN_READ32);
8668 def_builtin ("__builtin_read64", uw2_ftype_vptr, FRV_BUILTIN_READ64);
8669
8670 def_builtin ("__builtin_write8", void_ftype_vptr_ub, FRV_BUILTIN_WRITE8);
8671 def_builtin ("__builtin_write16", void_ftype_vptr_uh, FRV_BUILTIN_WRITE16);
8672 def_builtin ("__builtin_write32", void_ftype_vptr_uw1, FRV_BUILTIN_WRITE32);
8673 def_builtin ("__builtin_write64", void_ftype_vptr_uw2, FRV_BUILTIN_WRITE64);
8674
8675 #undef UNARY
8676 #undef BINARY
8677 #undef TRINARY
8678 #undef QUAD
8679 }
8680
8681 /* Set the names for various arithmetic operations according to the
8682 FRV ABI. */
8683 static void
8684 frv_init_libfuncs (void)
8685 {
8686 set_optab_libfunc (smod_optab, SImode, "__modi");
8687 set_optab_libfunc (umod_optab, SImode, "__umodi");
8688
8689 set_optab_libfunc (add_optab, DImode, "__addll");
8690 set_optab_libfunc (sub_optab, DImode, "__subll");
8691 set_optab_libfunc (smul_optab, DImode, "__mulll");
8692 set_optab_libfunc (sdiv_optab, DImode, "__divll");
8693 set_optab_libfunc (smod_optab, DImode, "__modll");
8694 set_optab_libfunc (umod_optab, DImode, "__umodll");
8695 set_optab_libfunc (and_optab, DImode, "__andll");
8696 set_optab_libfunc (ior_optab, DImode, "__orll");
8697 set_optab_libfunc (xor_optab, DImode, "__xorll");
8698 set_optab_libfunc (one_cmpl_optab, DImode, "__notll");
8699
8700 set_optab_libfunc (add_optab, SFmode, "__addf");
8701 set_optab_libfunc (sub_optab, SFmode, "__subf");
8702 set_optab_libfunc (smul_optab, SFmode, "__mulf");
8703 set_optab_libfunc (sdiv_optab, SFmode, "__divf");
8704
8705 set_optab_libfunc (add_optab, DFmode, "__addd");
8706 set_optab_libfunc (sub_optab, DFmode, "__subd");
8707 set_optab_libfunc (smul_optab, DFmode, "__muld");
8708 set_optab_libfunc (sdiv_optab, DFmode, "__divd");
8709
8710 set_conv_libfunc (sext_optab, DFmode, SFmode, "__ftod");
8711 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__dtof");
8712
8713 set_conv_libfunc (sfix_optab, SImode, SFmode, "__ftoi");
8714 set_conv_libfunc (sfix_optab, DImode, SFmode, "__ftoll");
8715 set_conv_libfunc (sfix_optab, SImode, DFmode, "__dtoi");
8716 set_conv_libfunc (sfix_optab, DImode, DFmode, "__dtoll");
8717
8718 set_conv_libfunc (ufix_optab, SImode, SFmode, "__ftoui");
8719 set_conv_libfunc (ufix_optab, DImode, SFmode, "__ftoull");
8720 set_conv_libfunc (ufix_optab, SImode, DFmode, "__dtoui");
8721 set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoull");
8722
8723 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__itof");
8724 set_conv_libfunc (sfloat_optab, SFmode, DImode, "__lltof");
8725 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__itod");
8726 set_conv_libfunc (sfloat_optab, DFmode, DImode, "__lltod");
8727 }
8728
8729 /* Convert an integer constant to an accumulator register. ICODE is the
8730 code of the target instruction, OPNUM is the number of the
8731 accumulator operand and OPVAL is the constant integer. Try both
8732 ACC and ACCG registers; only report an error if neither fit the
8733 instruction. */
8734
8735 static rtx
8736 frv_int_to_acc (enum insn_code icode, int opnum, rtx opval)
8737 {
8738 rtx reg;
8739 int i;
8740
8741 /* ACCs and ACCGs are implicit global registers if media intrinsics
8742 are being used. We set up this lazily to avoid creating lots of
8743 unnecessary call_insn rtl in non-media code. */
8744 for (i = 0; i <= ACC_MASK; i++)
8745 if ((i & ACC_MASK) == i)
8746 global_regs[i + ACC_FIRST] = global_regs[i + ACCG_FIRST] = 1;
8747
8748 if (GET_CODE (opval) != CONST_INT)
8749 {
8750 error ("accumulator is not a constant integer");
8751 return NULL_RTX;
8752 }
8753 if ((INTVAL (opval) & ~ACC_MASK) != 0)
8754 {
8755 error ("accumulator number is out of bounds");
8756 return NULL_RTX;
8757 }
8758
8759 reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode,
8760 ACC_FIRST + INTVAL (opval));
8761 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8762 SET_REGNO (reg, ACCG_FIRST + INTVAL (opval));
8763
8764 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8765 {
8766 error ("inappropriate accumulator for %qs", insn_data[icode].name);
8767 return NULL_RTX;
8768 }
8769 return reg;
8770 }
8771
8772 /* If an ACC rtx has mode MODE, return the mode that the matching ACCG
8773 should have. */
8774
8775 static enum machine_mode
8776 frv_matching_accg_mode (enum machine_mode mode)
8777 {
8778 switch (mode)
8779 {
8780 case V4SImode:
8781 return V4QImode;
8782
8783 case DImode:
8784 return HImode;
8785
8786 case SImode:
8787 return QImode;
8788
8789 default:
8790 gcc_unreachable ();
8791 }
8792 }
8793
8794 /* Given that a __builtin_read or __builtin_write function is accessing
8795 address ADDRESS, return the value that should be used as operand 1
8796 of the membar. */
8797
8798 static rtx
8799 frv_io_address_cookie (rtx address)
8800 {
8801 return (GET_CODE (address) == CONST_INT
8802 ? GEN_INT (INTVAL (address) / 8 * 8)
8803 : const0_rtx);
8804 }
8805
8806 /* Return the accumulator guard that should be paired with accumulator
8807 register ACC. The mode of the returned register is in the same
8808 class as ACC, but is four times smaller. */
8809
8810 rtx
8811 frv_matching_accg_for_acc (rtx acc)
8812 {
8813 return gen_rtx_REG (frv_matching_accg_mode (GET_MODE (acc)),
8814 REGNO (acc) - ACC_FIRST + ACCG_FIRST);
8815 }
8816
8817 /* Read the requested argument from the call EXP given by INDEX.
8818 Return the value as an rtx. */
8819
8820 static rtx
8821 frv_read_argument (tree exp, unsigned int index)
8822 {
8823 return expand_expr (CALL_EXPR_ARG (exp, index),
8824 NULL_RTX, VOIDmode, 0);
8825 }
8826
8827 /* Like frv_read_argument, but interpret the argument as the number
8828 of an IACC register and return a (reg:MODE ...) rtx for it. */
8829
8830 static rtx
8831 frv_read_iacc_argument (enum machine_mode mode, tree call,
8832 unsigned int index)
8833 {
8834 int i, regno;
8835 rtx op;
8836
8837 op = frv_read_argument (call, index);
8838 if (GET_CODE (op) != CONST_INT
8839 || INTVAL (op) < 0
8840 || INTVAL (op) > IACC_LAST - IACC_FIRST
8841 || ((INTVAL (op) * 4) & (GET_MODE_SIZE (mode) - 1)) != 0)
8842 {
8843 error ("invalid IACC argument");
8844 op = const0_rtx;
8845 }
8846
8847 /* IACCs are implicit global registers. We set up this lazily to
8848 avoid creating lots of unnecessary call_insn rtl when IACCs aren't
8849 being used. */
8850 regno = INTVAL (op) + IACC_FIRST;
8851 for (i = 0; i < HARD_REGNO_NREGS (regno, mode); i++)
8852 global_regs[regno + i] = 1;
8853
8854 return gen_rtx_REG (mode, regno);
8855 }
8856
8857 /* Return true if OPVAL can be used for operand OPNUM of instruction ICODE.
8858 The instruction should require a constant operand of some sort. The
8859 function prints an error if OPVAL is not valid. */
8860
8861 static int
8862 frv_check_constant_argument (enum insn_code icode, int opnum, rtx opval)
8863 {
8864 if (GET_CODE (opval) != CONST_INT)
8865 {
8866 error ("%qs expects a constant argument", insn_data[icode].name);
8867 return FALSE;
8868 }
8869 if (! (*insn_data[icode].operand[opnum].predicate) (opval, VOIDmode))
8870 {
8871 error ("constant argument out of range for %qs", insn_data[icode].name);
8872 return FALSE;
8873 }
8874 return TRUE;
8875 }
8876
8877 /* Return a legitimate rtx for instruction ICODE's return value. Use TARGET
8878 if it's not null, has the right mode, and satisfies operand 0's
8879 predicate. */
8880
8881 static rtx
8882 frv_legitimize_target (enum insn_code icode, rtx target)
8883 {
8884 enum machine_mode mode = insn_data[icode].operand[0].mode;
8885
8886 if (! target
8887 || GET_MODE (target) != mode
8888 || ! (*insn_data[icode].operand[0].predicate) (target, mode))
8889 return gen_reg_rtx (mode);
8890 else
8891 return target;
8892 }
8893
8894 /* Given that ARG is being passed as operand OPNUM to instruction ICODE,
8895 check whether ARG satisfies the operand's constraints. If it doesn't,
8896 copy ARG to a temporary register and return that. Otherwise return ARG
8897 itself. */
8898
8899 static rtx
8900 frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg)
8901 {
8902 enum machine_mode mode = insn_data[icode].operand[opnum].mode;
8903
8904 if ((*insn_data[icode].operand[opnum].predicate) (arg, mode))
8905 return arg;
8906 else
8907 return copy_to_mode_reg (mode, arg);
8908 }
8909
8910 /* Return a volatile memory reference of mode MODE whose address is ARG. */
8911
8912 static rtx
8913 frv_volatile_memref (enum machine_mode mode, rtx arg)
8914 {
8915 rtx mem;
8916
8917 mem = gen_rtx_MEM (mode, memory_address (mode, arg));
8918 MEM_VOLATILE_P (mem) = 1;
8919 return mem;
8920 }
8921
8922 /* Expand builtins that take a single, constant argument. At the moment,
8923 only MHDSETS falls into this category. */
8924
8925 static rtx
8926 frv_expand_set_builtin (enum insn_code icode, tree call, rtx target)
8927 {
8928 rtx pat;
8929 rtx op0 = frv_read_argument (call, 0);
8930
8931 if (! frv_check_constant_argument (icode, 1, op0))
8932 return NULL_RTX;
8933
8934 target = frv_legitimize_target (icode, target);
8935 pat = GEN_FCN (icode) (target, op0);
8936 if (! pat)
8937 return NULL_RTX;
8938
8939 emit_insn (pat);
8940 return target;
8941 }
8942
8943 /* Expand builtins that take one operand. */
8944
8945 static rtx
8946 frv_expand_unop_builtin (enum insn_code icode, tree call, rtx target)
8947 {
8948 rtx pat;
8949 rtx op0 = frv_read_argument (call, 0);
8950
8951 target = frv_legitimize_target (icode, target);
8952 op0 = frv_legitimize_argument (icode, 1, op0);
8953 pat = GEN_FCN (icode) (target, op0);
8954 if (! pat)
8955 return NULL_RTX;
8956
8957 emit_insn (pat);
8958 return target;
8959 }
8960
8961 /* Expand builtins that take two operands. */
8962
8963 static rtx
8964 frv_expand_binop_builtin (enum insn_code icode, tree call, rtx target)
8965 {
8966 rtx pat;
8967 rtx op0 = frv_read_argument (call, 0);
8968 rtx op1 = frv_read_argument (call, 1);
8969
8970 target = frv_legitimize_target (icode, target);
8971 op0 = frv_legitimize_argument (icode, 1, op0);
8972 op1 = frv_legitimize_argument (icode, 2, op1);
8973 pat = GEN_FCN (icode) (target, op0, op1);
8974 if (! pat)
8975 return NULL_RTX;
8976
8977 emit_insn (pat);
8978 return target;
8979 }
8980
8981 /* Expand cut-style builtins, which take two operands and an implicit ACCG
8982 one. */
8983
8984 static rtx
8985 frv_expand_cut_builtin (enum insn_code icode, tree call, rtx target)
8986 {
8987 rtx pat;
8988 rtx op0 = frv_read_argument (call, 0);
8989 rtx op1 = frv_read_argument (call, 1);
8990 rtx op2;
8991
8992 target = frv_legitimize_target (icode, target);
8993 op0 = frv_int_to_acc (icode, 1, op0);
8994 if (! op0)
8995 return NULL_RTX;
8996
8997 if (icode == CODE_FOR_mdcutssi || GET_CODE (op1) == CONST_INT)
8998 {
8999 if (! frv_check_constant_argument (icode, 2, op1))
9000 return NULL_RTX;
9001 }
9002 else
9003 op1 = frv_legitimize_argument (icode, 2, op1);
9004
9005 op2 = frv_matching_accg_for_acc (op0);
9006 pat = GEN_FCN (icode) (target, op0, op1, op2);
9007 if (! pat)
9008 return NULL_RTX;
9009
9010 emit_insn (pat);
9011 return target;
9012 }
9013
9014 /* Expand builtins that take two operands and the second is immediate. */
9015
9016 static rtx
9017 frv_expand_binopimm_builtin (enum insn_code icode, tree call, rtx target)
9018 {
9019 rtx pat;
9020 rtx op0 = frv_read_argument (call, 0);
9021 rtx op1 = frv_read_argument (call, 1);
9022
9023 if (! frv_check_constant_argument (icode, 2, op1))
9024 return NULL_RTX;
9025
9026 target = frv_legitimize_target (icode, target);
9027 op0 = frv_legitimize_argument (icode, 1, op0);
9028 pat = GEN_FCN (icode) (target, op0, op1);
9029 if (! pat)
9030 return NULL_RTX;
9031
9032 emit_insn (pat);
9033 return target;
9034 }
9035
9036 /* Expand builtins that take two operands, the first operand being a pointer to
9037 ints and return void. */
9038
9039 static rtx
9040 frv_expand_voidbinop_builtin (enum insn_code icode, tree call)
9041 {
9042 rtx pat;
9043 rtx op0 = frv_read_argument (call, 0);
9044 rtx op1 = frv_read_argument (call, 1);
9045 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
9046 rtx addr;
9047
9048 if (GET_CODE (op0) != MEM)
9049 {
9050 rtx reg = op0;
9051
9052 if (! offsettable_address_p (0, mode0, op0))
9053 {
9054 reg = gen_reg_rtx (Pmode);
9055 emit_insn (gen_rtx_SET (VOIDmode, reg, op0));
9056 }
9057
9058 op0 = gen_rtx_MEM (SImode, reg);
9059 }
9060
9061 addr = XEXP (op0, 0);
9062 if (! offsettable_address_p (0, mode0, addr))
9063 addr = copy_to_mode_reg (Pmode, op0);
9064
9065 op0 = change_address (op0, V4SImode, addr);
9066 op1 = frv_legitimize_argument (icode, 1, op1);
9067 pat = GEN_FCN (icode) (op0, op1);
9068 if (! pat)
9069 return 0;
9070
9071 emit_insn (pat);
9072 return 0;
9073 }
9074
9075 /* Expand builtins that take two long operands and return void. */
9076
9077 static rtx
9078 frv_expand_int_void2arg (enum insn_code icode, tree call)
9079 {
9080 rtx pat;
9081 rtx op0 = frv_read_argument (call, 0);
9082 rtx op1 = frv_read_argument (call, 1);
9083
9084 op0 = frv_legitimize_argument (icode, 1, op0);
9085 op1 = frv_legitimize_argument (icode, 1, op1);
9086 pat = GEN_FCN (icode) (op0, op1);
9087 if (! pat)
9088 return NULL_RTX;
9089
9090 emit_insn (pat);
9091 return NULL_RTX;
9092 }
9093
9094 /* Expand prefetch builtins. These take a single address as argument. */
9095
9096 static rtx
9097 frv_expand_prefetches (enum insn_code icode, tree call)
9098 {
9099 rtx pat;
9100 rtx op0 = frv_read_argument (call, 0);
9101
9102 pat = GEN_FCN (icode) (force_reg (Pmode, op0));
9103 if (! pat)
9104 return 0;
9105
9106 emit_insn (pat);
9107 return 0;
9108 }
9109
9110 /* Expand builtins that take three operands and return void. The first
9111 argument must be a constant that describes a pair or quad accumulators. A
9112 fourth argument is created that is the accumulator guard register that
9113 corresponds to the accumulator. */
9114
9115 static rtx
9116 frv_expand_voidtriop_builtin (enum insn_code icode, tree call)
9117 {
9118 rtx pat;
9119 rtx op0 = frv_read_argument (call, 0);
9120 rtx op1 = frv_read_argument (call, 1);
9121 rtx op2 = frv_read_argument (call, 2);
9122 rtx op3;
9123
9124 op0 = frv_int_to_acc (icode, 0, op0);
9125 if (! op0)
9126 return NULL_RTX;
9127
9128 op1 = frv_legitimize_argument (icode, 1, op1);
9129 op2 = frv_legitimize_argument (icode, 2, op2);
9130 op3 = frv_matching_accg_for_acc (op0);
9131 pat = GEN_FCN (icode) (op0, op1, op2, op3);
9132 if (! pat)
9133 return NULL_RTX;
9134
9135 emit_insn (pat);
9136 return NULL_RTX;
9137 }
9138
9139 /* Expand builtins that perform accumulator-to-accumulator operations.
9140 These builtins take two accumulator numbers as argument and return
9141 void. */
9142
9143 static rtx
9144 frv_expand_voidaccop_builtin (enum insn_code icode, tree call)
9145 {
9146 rtx pat;
9147 rtx op0 = frv_read_argument (call, 0);
9148 rtx op1 = frv_read_argument (call, 1);
9149 rtx op2;
9150 rtx op3;
9151
9152 op0 = frv_int_to_acc (icode, 0, op0);
9153 if (! op0)
9154 return NULL_RTX;
9155
9156 op1 = frv_int_to_acc (icode, 1, op1);
9157 if (! op1)
9158 return NULL_RTX;
9159
9160 op2 = frv_matching_accg_for_acc (op0);
9161 op3 = frv_matching_accg_for_acc (op1);
9162 pat = GEN_FCN (icode) (op0, op1, op2, op3);
9163 if (! pat)
9164 return NULL_RTX;
9165
9166 emit_insn (pat);
9167 return NULL_RTX;
9168 }
9169
9170 /* Expand a __builtin_read* function. ICODE is the instruction code for the
9171 membar and TARGET_MODE is the mode that the loaded value should have. */
9172
9173 static rtx
9174 frv_expand_load_builtin (enum insn_code icode, enum machine_mode target_mode,
9175 tree call, rtx target)
9176 {
9177 rtx op0 = frv_read_argument (call, 0);
9178 rtx cookie = frv_io_address_cookie (op0);
9179
9180 if (target == 0 || !REG_P (target))
9181 target = gen_reg_rtx (target_mode);
9182 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
9183 convert_move (target, op0, 1);
9184 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_READ)));
9185 cfun->machine->has_membar_p = 1;
9186 return target;
9187 }
9188
9189 /* Likewise __builtin_write* functions. */
9190
9191 static rtx
9192 frv_expand_store_builtin (enum insn_code icode, tree call)
9193 {
9194 rtx op0 = frv_read_argument (call, 0);
9195 rtx op1 = frv_read_argument (call, 1);
9196 rtx cookie = frv_io_address_cookie (op0);
9197
9198 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
9199 convert_move (op0, force_reg (insn_data[icode].operand[0].mode, op1), 1);
9200 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_WRITE)));
9201 cfun->machine->has_membar_p = 1;
9202 return NULL_RTX;
9203 }
9204
9205 /* Expand the MDPACKH builtin. It takes four unsigned short arguments and
9206 each argument forms one word of the two double-word input registers.
9207 CALL is the tree for the call and TARGET, if nonnull, suggests a good place
9208 to put the return value. */
9209
9210 static rtx
9211 frv_expand_mdpackh_builtin (tree call, rtx target)
9212 {
9213 enum insn_code icode = CODE_FOR_mdpackh;
9214 rtx pat, op0, op1;
9215 rtx arg1 = frv_read_argument (call, 0);
9216 rtx arg2 = frv_read_argument (call, 1);
9217 rtx arg3 = frv_read_argument (call, 2);
9218 rtx arg4 = frv_read_argument (call, 3);
9219
9220 target = frv_legitimize_target (icode, target);
9221 op0 = gen_reg_rtx (DImode);
9222 op1 = gen_reg_rtx (DImode);
9223
9224 /* The high half of each word is not explicitly initialized, so indicate
9225 that the input operands are not live before this point. */
9226 emit_clobber (op0);
9227 emit_clobber (op1);
9228
9229 /* Move each argument into the low half of its associated input word. */
9230 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 2), arg1);
9231 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 6), arg2);
9232 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 2), arg3);
9233 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 6), arg4);
9234
9235 pat = GEN_FCN (icode) (target, op0, op1);
9236 if (! pat)
9237 return NULL_RTX;
9238
9239 emit_insn (pat);
9240 return target;
9241 }
9242
9243 /* Expand the MCLRACC builtin. This builtin takes a single accumulator
9244 number as argument. */
9245
9246 static rtx
9247 frv_expand_mclracc_builtin (tree call)
9248 {
9249 enum insn_code icode = CODE_FOR_mclracc;
9250 rtx pat;
9251 rtx op0 = frv_read_argument (call, 0);
9252
9253 op0 = frv_int_to_acc (icode, 0, op0);
9254 if (! op0)
9255 return NULL_RTX;
9256
9257 pat = GEN_FCN (icode) (op0);
9258 if (pat)
9259 emit_insn (pat);
9260
9261 return NULL_RTX;
9262 }
9263
9264 /* Expand builtins that take no arguments. */
9265
9266 static rtx
9267 frv_expand_noargs_builtin (enum insn_code icode)
9268 {
9269 rtx pat = GEN_FCN (icode) (const0_rtx);
9270 if (pat)
9271 emit_insn (pat);
9272
9273 return NULL_RTX;
9274 }
9275
9276 /* Expand MRDACC and MRDACCG. These builtins take a single accumulator
9277 number or accumulator guard number as argument and return an SI integer. */
9278
9279 static rtx
9280 frv_expand_mrdacc_builtin (enum insn_code icode, tree call)
9281 {
9282 rtx pat;
9283 rtx target = gen_reg_rtx (SImode);
9284 rtx op0 = frv_read_argument (call, 0);
9285
9286 op0 = frv_int_to_acc (icode, 1, op0);
9287 if (! op0)
9288 return NULL_RTX;
9289
9290 pat = GEN_FCN (icode) (target, op0);
9291 if (! pat)
9292 return NULL_RTX;
9293
9294 emit_insn (pat);
9295 return target;
9296 }
9297
9298 /* Expand MWTACC and MWTACCG. These builtins take an accumulator or
9299 accumulator guard as their first argument and an SImode value as their
9300 second. */
9301
9302 static rtx
9303 frv_expand_mwtacc_builtin (enum insn_code icode, tree call)
9304 {
9305 rtx pat;
9306 rtx op0 = frv_read_argument (call, 0);
9307 rtx op1 = frv_read_argument (call, 1);
9308
9309 op0 = frv_int_to_acc (icode, 0, op0);
9310 if (! op0)
9311 return NULL_RTX;
9312
9313 op1 = frv_legitimize_argument (icode, 1, op1);
9314 pat = GEN_FCN (icode) (op0, op1);
9315 if (pat)
9316 emit_insn (pat);
9317
9318 return NULL_RTX;
9319 }
9320
9321 /* Emit a move from SRC to DEST in SImode chunks. This can be used
9322 to move DImode values into and out of IACC0. */
9323
9324 static void
9325 frv_split_iacc_move (rtx dest, rtx src)
9326 {
9327 enum machine_mode inner;
9328 int i;
9329
9330 inner = GET_MODE (dest);
9331 for (i = 0; i < GET_MODE_SIZE (inner); i += GET_MODE_SIZE (SImode))
9332 emit_move_insn (simplify_gen_subreg (SImode, dest, inner, i),
9333 simplify_gen_subreg (SImode, src, inner, i));
9334 }
9335
9336 /* Expand builtins. */
9337
9338 static rtx
9339 frv_expand_builtin (tree exp,
9340 rtx target,
9341 rtx subtarget ATTRIBUTE_UNUSED,
9342 enum machine_mode mode ATTRIBUTE_UNUSED,
9343 int ignore ATTRIBUTE_UNUSED)
9344 {
9345 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9346 unsigned fcode = (unsigned)DECL_FUNCTION_CODE (fndecl);
9347 unsigned i;
9348 struct builtin_description *d;
9349
9350 if (fcode < FRV_BUILTIN_FIRST_NONMEDIA && !TARGET_MEDIA)
9351 {
9352 error ("media functions are not available unless -mmedia is used");
9353 return NULL_RTX;
9354 }
9355
9356 switch (fcode)
9357 {
9358 case FRV_BUILTIN_MCOP1:
9359 case FRV_BUILTIN_MCOP2:
9360 case FRV_BUILTIN_MDUNPACKH:
9361 case FRV_BUILTIN_MBTOHE:
9362 if (! TARGET_MEDIA_REV1)
9363 {
9364 error ("this media function is only available on the fr500");
9365 return NULL_RTX;
9366 }
9367 break;
9368
9369 case FRV_BUILTIN_MQXMACHS:
9370 case FRV_BUILTIN_MQXMACXHS:
9371 case FRV_BUILTIN_MQMACXHS:
9372 case FRV_BUILTIN_MADDACCS:
9373 case FRV_BUILTIN_MSUBACCS:
9374 case FRV_BUILTIN_MASACCS:
9375 case FRV_BUILTIN_MDADDACCS:
9376 case FRV_BUILTIN_MDSUBACCS:
9377 case FRV_BUILTIN_MDASACCS:
9378 case FRV_BUILTIN_MABSHS:
9379 case FRV_BUILTIN_MDROTLI:
9380 case FRV_BUILTIN_MCPLHI:
9381 case FRV_BUILTIN_MCPLI:
9382 case FRV_BUILTIN_MDCUTSSI:
9383 case FRV_BUILTIN_MQSATHS:
9384 case FRV_BUILTIN_MHSETLOS:
9385 case FRV_BUILTIN_MHSETLOH:
9386 case FRV_BUILTIN_MHSETHIS:
9387 case FRV_BUILTIN_MHSETHIH:
9388 case FRV_BUILTIN_MHDSETS:
9389 case FRV_BUILTIN_MHDSETH:
9390 if (! TARGET_MEDIA_REV2)
9391 {
9392 error ("this media function is only available on the fr400"
9393 " and fr550");
9394 return NULL_RTX;
9395 }
9396 break;
9397
9398 case FRV_BUILTIN_SMASS:
9399 case FRV_BUILTIN_SMSSS:
9400 case FRV_BUILTIN_SMU:
9401 case FRV_BUILTIN_ADDSS:
9402 case FRV_BUILTIN_SUBSS:
9403 case FRV_BUILTIN_SLASS:
9404 case FRV_BUILTIN_SCUTSS:
9405 case FRV_BUILTIN_IACCreadll:
9406 case FRV_BUILTIN_IACCreadl:
9407 case FRV_BUILTIN_IACCsetll:
9408 case FRV_BUILTIN_IACCsetl:
9409 if (!TARGET_FR405_BUILTINS)
9410 {
9411 error ("this builtin function is only available"
9412 " on the fr405 and fr450");
9413 return NULL_RTX;
9414 }
9415 break;
9416
9417 case FRV_BUILTIN_PREFETCH:
9418 if (!TARGET_FR500_FR550_BUILTINS)
9419 {
9420 error ("this builtin function is only available on the fr500"
9421 " and fr550");
9422 return NULL_RTX;
9423 }
9424 break;
9425
9426 case FRV_BUILTIN_MQLCLRHS:
9427 case FRV_BUILTIN_MQLMTHS:
9428 case FRV_BUILTIN_MQSLLHI:
9429 case FRV_BUILTIN_MQSRAHI:
9430 if (!TARGET_MEDIA_FR450)
9431 {
9432 error ("this builtin function is only available on the fr450");
9433 return NULL_RTX;
9434 }
9435 break;
9436
9437 default:
9438 break;
9439 }
9440
9441 /* Expand unique builtins. */
9442
9443 switch (fcode)
9444 {
9445 case FRV_BUILTIN_MTRAP:
9446 return frv_expand_noargs_builtin (CODE_FOR_mtrap);
9447
9448 case FRV_BUILTIN_MCLRACC:
9449 return frv_expand_mclracc_builtin (exp);
9450
9451 case FRV_BUILTIN_MCLRACCA:
9452 if (TARGET_ACC_8)
9453 return frv_expand_noargs_builtin (CODE_FOR_mclracca8);
9454 else
9455 return frv_expand_noargs_builtin (CODE_FOR_mclracca4);
9456
9457 case FRV_BUILTIN_MRDACC:
9458 return frv_expand_mrdacc_builtin (CODE_FOR_mrdacc, exp);
9459
9460 case FRV_BUILTIN_MRDACCG:
9461 return frv_expand_mrdacc_builtin (CODE_FOR_mrdaccg, exp);
9462
9463 case FRV_BUILTIN_MWTACC:
9464 return frv_expand_mwtacc_builtin (CODE_FOR_mwtacc, exp);
9465
9466 case FRV_BUILTIN_MWTACCG:
9467 return frv_expand_mwtacc_builtin (CODE_FOR_mwtaccg, exp);
9468
9469 case FRV_BUILTIN_MDPACKH:
9470 return frv_expand_mdpackh_builtin (exp, target);
9471
9472 case FRV_BUILTIN_IACCreadll:
9473 {
9474 rtx src = frv_read_iacc_argument (DImode, exp, 0);
9475 if (target == 0 || !REG_P (target))
9476 target = gen_reg_rtx (DImode);
9477 frv_split_iacc_move (target, src);
9478 return target;
9479 }
9480
9481 case FRV_BUILTIN_IACCreadl:
9482 return frv_read_iacc_argument (SImode, exp, 0);
9483
9484 case FRV_BUILTIN_IACCsetll:
9485 {
9486 rtx dest = frv_read_iacc_argument (DImode, exp, 0);
9487 rtx src = frv_read_argument (exp, 1);
9488 frv_split_iacc_move (dest, force_reg (DImode, src));
9489 return 0;
9490 }
9491
9492 case FRV_BUILTIN_IACCsetl:
9493 {
9494 rtx dest = frv_read_iacc_argument (SImode, exp, 0);
9495 rtx src = frv_read_argument (exp, 1);
9496 emit_move_insn (dest, force_reg (SImode, src));
9497 return 0;
9498 }
9499
9500 default:
9501 break;
9502 }
9503
9504 /* Expand groups of builtins. */
9505
9506 for (i = 0, d = bdesc_set; i < ARRAY_SIZE (bdesc_set); i++, d++)
9507 if (d->code == fcode)
9508 return frv_expand_set_builtin (d->icode, exp, target);
9509
9510 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9511 if (d->code == fcode)
9512 return frv_expand_unop_builtin (d->icode, exp, target);
9513
9514 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9515 if (d->code == fcode)
9516 return frv_expand_binop_builtin (d->icode, exp, target);
9517
9518 for (i = 0, d = bdesc_cut; i < ARRAY_SIZE (bdesc_cut); i++, d++)
9519 if (d->code == fcode)
9520 return frv_expand_cut_builtin (d->icode, exp, target);
9521
9522 for (i = 0, d = bdesc_2argimm; i < ARRAY_SIZE (bdesc_2argimm); i++, d++)
9523 if (d->code == fcode)
9524 return frv_expand_binopimm_builtin (d->icode, exp, target);
9525
9526 for (i = 0, d = bdesc_void2arg; i < ARRAY_SIZE (bdesc_void2arg); i++, d++)
9527 if (d->code == fcode)
9528 return frv_expand_voidbinop_builtin (d->icode, exp);
9529
9530 for (i = 0, d = bdesc_void3arg; i < ARRAY_SIZE (bdesc_void3arg); i++, d++)
9531 if (d->code == fcode)
9532 return frv_expand_voidtriop_builtin (d->icode, exp);
9533
9534 for (i = 0, d = bdesc_voidacc; i < ARRAY_SIZE (bdesc_voidacc); i++, d++)
9535 if (d->code == fcode)
9536 return frv_expand_voidaccop_builtin (d->icode, exp);
9537
9538 for (i = 0, d = bdesc_int_void2arg;
9539 i < ARRAY_SIZE (bdesc_int_void2arg); i++, d++)
9540 if (d->code == fcode)
9541 return frv_expand_int_void2arg (d->icode, exp);
9542
9543 for (i = 0, d = bdesc_prefetches;
9544 i < ARRAY_SIZE (bdesc_prefetches); i++, d++)
9545 if (d->code == fcode)
9546 return frv_expand_prefetches (d->icode, exp);
9547
9548 for (i = 0, d = bdesc_loads; i < ARRAY_SIZE (bdesc_loads); i++, d++)
9549 if (d->code == fcode)
9550 return frv_expand_load_builtin (d->icode, TYPE_MODE (TREE_TYPE (exp)),
9551 exp, target);
9552
9553 for (i = 0, d = bdesc_stores; i < ARRAY_SIZE (bdesc_stores); i++, d++)
9554 if (d->code == fcode)
9555 return frv_expand_store_builtin (d->icode, exp);
9556
9557 return 0;
9558 }
9559
9560 static bool
9561 frv_in_small_data_p (const_tree decl)
9562 {
9563 HOST_WIDE_INT size;
9564 const_tree section_name;
9565
9566 /* Don't apply the -G flag to internal compiler structures. We
9567 should leave such structures in the main data section, partly
9568 for efficiency and partly because the size of some of them
9569 (such as C++ typeinfos) is not known until later. */
9570 if (TREE_CODE (decl) != VAR_DECL || DECL_ARTIFICIAL (decl))
9571 return false;
9572
9573 /* If we already know which section the decl should be in, see if
9574 it's a small data section. */
9575 section_name = DECL_SECTION_NAME (decl);
9576 if (section_name)
9577 {
9578 gcc_assert (TREE_CODE (section_name) == STRING_CST);
9579 if (frv_string_begins_with (section_name, ".sdata"))
9580 return true;
9581 if (frv_string_begins_with (section_name, ".sbss"))
9582 return true;
9583 return false;
9584 }
9585
9586 size = int_size_in_bytes (TREE_TYPE (decl));
9587 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
9588 return true;
9589
9590 return false;
9591 }
9592 \f
9593 static bool
9594 frv_rtx_costs (rtx x,
9595 int code ATTRIBUTE_UNUSED,
9596 int outer_code ATTRIBUTE_UNUSED,
9597 int *total,
9598 bool speed ATTRIBUTE_UNUSED)
9599 {
9600 if (outer_code == MEM)
9601 {
9602 /* Don't differentiate between memory addresses. All the ones
9603 we accept have equal cost. */
9604 *total = COSTS_N_INSNS (0);
9605 return true;
9606 }
9607
9608 switch (code)
9609 {
9610 case CONST_INT:
9611 /* Make 12-bit integers really cheap. */
9612 if (IN_RANGE_P (INTVAL (x), -2048, 2047))
9613 {
9614 *total = 0;
9615 return true;
9616 }
9617 /* Fall through. */
9618
9619 case CONST:
9620 case LABEL_REF:
9621 case SYMBOL_REF:
9622 case CONST_DOUBLE:
9623 *total = COSTS_N_INSNS (2);
9624 return true;
9625
9626 case PLUS:
9627 case MINUS:
9628 case AND:
9629 case IOR:
9630 case XOR:
9631 case ASHIFT:
9632 case ASHIFTRT:
9633 case LSHIFTRT:
9634 case NOT:
9635 case NEG:
9636 case COMPARE:
9637 if (GET_MODE (x) == SImode)
9638 *total = COSTS_N_INSNS (1);
9639 else if (GET_MODE (x) == DImode)
9640 *total = COSTS_N_INSNS (2);
9641 else
9642 *total = COSTS_N_INSNS (3);
9643 return true;
9644
9645 case MULT:
9646 if (GET_MODE (x) == SImode)
9647 *total = COSTS_N_INSNS (2);
9648 else
9649 *total = COSTS_N_INSNS (6); /* guess */
9650 return true;
9651
9652 case DIV:
9653 case UDIV:
9654 case MOD:
9655 case UMOD:
9656 *total = COSTS_N_INSNS (18);
9657 return true;
9658
9659 case MEM:
9660 *total = COSTS_N_INSNS (3);
9661 return true;
9662
9663 default:
9664 return false;
9665 }
9666 }
9667 \f
9668 static void
9669 frv_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9670 {
9671 switch_to_section (ctors_section);
9672 assemble_align (POINTER_SIZE);
9673 if (TARGET_FDPIC)
9674 {
9675 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9676
9677 gcc_assert (ok);
9678 return;
9679 }
9680 assemble_integer_with_op ("\t.picptr\t", symbol);
9681 }
9682
9683 static void
9684 frv_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9685 {
9686 switch_to_section (dtors_section);
9687 assemble_align (POINTER_SIZE);
9688 if (TARGET_FDPIC)
9689 {
9690 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9691
9692 gcc_assert (ok);
9693 return;
9694 }
9695 assemble_integer_with_op ("\t.picptr\t", symbol);
9696 }
9697
9698 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
9699
9700 static rtx
9701 frv_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
9702 int incoming ATTRIBUTE_UNUSED)
9703 {
9704 return gen_rtx_REG (Pmode, FRV_STRUCT_VALUE_REGNUM);
9705 }
9706
9707 #define TLS_BIAS (2048 - 16)
9708
9709 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
9710 We need to emit DTP-relative relocations. */
9711
9712 static void
9713 frv_output_dwarf_dtprel (FILE *file, int size, rtx x)
9714 {
9715 gcc_assert (size == 4);
9716 fputs ("\t.picptr\ttlsmoff(", file);
9717 /* We want the unbiased TLS offset, so add the bias to the
9718 expression, such that the implicit biasing cancels out. */
9719 output_addr_const (file, plus_constant (x, TLS_BIAS));
9720 fputs (")", file);
9721 }
9722
9723 #include "gt-frv.h"