tm.texi (LEGITIMIZE_ADDRESS): Revise documentation.
[gcc.git] / gcc / config / frv / frv.c
1 /* Copyright (C) 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007,
2 2008 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "expr.h"
39 #include "obstack.h"
40 #include "except.h"
41 #include "function.h"
42 #include "optabs.h"
43 #include "toplev.h"
44 #include "basic-block.h"
45 #include "tm_p.h"
46 #include "ggc.h"
47 #include <ctype.h>
48 #include "target.h"
49 #include "target-def.h"
50 #include "targhooks.h"
51 #include "integrate.h"
52 #include "langhooks.h"
53 #include "df.h"
54
55 #ifndef FRV_INLINE
56 #define FRV_INLINE inline
57 #endif
58
59 /* The maximum number of distinct NOP patterns. There are three:
60 nop, fnop and mnop. */
61 #define NUM_NOP_PATTERNS 3
62
63 /* Classification of instructions and units: integer, floating-point/media,
64 branch and control. */
65 enum frv_insn_group { GROUP_I, GROUP_FM, GROUP_B, GROUP_C, NUM_GROUPS };
66
67 /* The DFA names of the units, in packet order. */
68 static const char *const frv_unit_names[] =
69 {
70 "c",
71 "i0", "f0",
72 "i1", "f1",
73 "i2", "f2",
74 "i3", "f3",
75 "b0", "b1"
76 };
77
78 /* The classification of each unit in frv_unit_names[]. */
79 static const enum frv_insn_group frv_unit_groups[ARRAY_SIZE (frv_unit_names)] =
80 {
81 GROUP_C,
82 GROUP_I, GROUP_FM,
83 GROUP_I, GROUP_FM,
84 GROUP_I, GROUP_FM,
85 GROUP_I, GROUP_FM,
86 GROUP_B, GROUP_B
87 };
88
89 /* Return the DFA unit code associated with the Nth unit of integer
90 or floating-point group GROUP, */
91 #define NTH_UNIT(GROUP, N) frv_unit_codes[(GROUP) + (N) * 2 + 1]
92
93 /* Return the number of integer or floating-point unit UNIT
94 (1 for I1, 2 for F2, etc.). */
95 #define UNIT_NUMBER(UNIT) (((UNIT) - 1) / 2)
96
97 /* The DFA unit number for each unit in frv_unit_names[]. */
98 static int frv_unit_codes[ARRAY_SIZE (frv_unit_names)];
99
100 /* FRV_TYPE_TO_UNIT[T] is the last unit in frv_unit_names[] that can issue
101 an instruction of type T. The value is ARRAY_SIZE (frv_unit_names) if
102 no instruction of type T has been seen. */
103 static unsigned int frv_type_to_unit[TYPE_UNKNOWN + 1];
104
105 /* An array of dummy nop INSNs, one for each type of nop that the
106 target supports. */
107 static GTY(()) rtx frv_nops[NUM_NOP_PATTERNS];
108
109 /* The number of nop instructions in frv_nops[]. */
110 static unsigned int frv_num_nops;
111
112 /* Information about one __builtin_read or __builtin_write access, or
113 the combination of several such accesses. The most general value
114 is all-zeros (an unknown access to an unknown address). */
115 struct frv_io {
116 /* The type of access. FRV_IO_UNKNOWN means the access can be either
117 a read or a write. */
118 enum { FRV_IO_UNKNOWN, FRV_IO_READ, FRV_IO_WRITE } type;
119
120 /* The constant address being accessed, or zero if not known. */
121 HOST_WIDE_INT const_address;
122
123 /* The run-time address, as used in operand 0 of the membar pattern. */
124 rtx var_address;
125 };
126
127 /* Return true if instruction INSN should be packed with the following
128 instruction. */
129 #define PACKING_FLAG_P(INSN) (GET_MODE (INSN) == TImode)
130
131 /* Set the value of PACKING_FLAG_P(INSN). */
132 #define SET_PACKING_FLAG(INSN) PUT_MODE (INSN, TImode)
133 #define CLEAR_PACKING_FLAG(INSN) PUT_MODE (INSN, VOIDmode)
134
135 /* Loop with REG set to each hard register in rtx X. */
136 #define FOR_EACH_REGNO(REG, X) \
137 for (REG = REGNO (X); \
138 REG < REGNO (X) + HARD_REGNO_NREGS (REGNO (X), GET_MODE (X)); \
139 REG++)
140
141 /* This structure contains machine specific function data. */
142 struct GTY(()) machine_function
143 {
144 /* True if we have created an rtx that relies on the stack frame. */
145 int frame_needed;
146
147 /* True if this function contains at least one __builtin_{read,write}*. */
148 bool has_membar_p;
149 };
150
151 /* Temporary register allocation support structure. */
152 typedef struct frv_tmp_reg_struct
153 {
154 HARD_REG_SET regs; /* possible registers to allocate */
155 int next_reg[N_REG_CLASSES]; /* next register to allocate per class */
156 }
157 frv_tmp_reg_t;
158
159 /* Register state information for VLIW re-packing phase. */
160 #define REGSTATE_CC_MASK 0x07 /* Mask to isolate CCn for cond exec */
161 #define REGSTATE_MODIFIED 0x08 /* reg modified in current VLIW insn */
162 #define REGSTATE_IF_TRUE 0x10 /* reg modified in cond exec true */
163 #define REGSTATE_IF_FALSE 0x20 /* reg modified in cond exec false */
164
165 #define REGSTATE_IF_EITHER (REGSTATE_IF_TRUE | REGSTATE_IF_FALSE)
166
167 typedef unsigned char regstate_t;
168
169 /* Used in frv_frame_accessor_t to indicate the direction of a register-to-
170 memory move. */
171 enum frv_stack_op
172 {
173 FRV_LOAD,
174 FRV_STORE
175 };
176
177 /* Information required by frv_frame_access. */
178 typedef struct
179 {
180 /* This field is FRV_LOAD if registers are to be loaded from the stack and
181 FRV_STORE if they should be stored onto the stack. FRV_STORE implies
182 the move is being done by the prologue code while FRV_LOAD implies it
183 is being done by the epilogue. */
184 enum frv_stack_op op;
185
186 /* The base register to use when accessing the stack. This may be the
187 frame pointer, stack pointer, or a temporary. The choice of register
188 depends on which part of the frame is being accessed and how big the
189 frame is. */
190 rtx base;
191
192 /* The offset of BASE from the bottom of the current frame, in bytes. */
193 int base_offset;
194 } frv_frame_accessor_t;
195
196 /* Define the information needed to generate branch and scc insns. This is
197 stored from the compare operation. */
198 rtx frv_compare_op0;
199 rtx frv_compare_op1;
200
201 /* Conditional execution support gathered together in one structure. */
202 typedef struct
203 {
204 /* Linked list of insns to add if the conditional execution conversion was
205 successful. Each link points to an EXPR_LIST which points to the pattern
206 of the insn to add, and the insn to be inserted before. */
207 rtx added_insns_list;
208
209 /* Identify which registers are safe to allocate for if conversions to
210 conditional execution. We keep the last allocated register in the
211 register classes between COND_EXEC statements. This will mean we allocate
212 different registers for each different COND_EXEC group if we can. This
213 might allow the scheduler to intermix two different COND_EXEC sections. */
214 frv_tmp_reg_t tmp_reg;
215
216 /* For nested IFs, identify which CC registers are used outside of setting
217 via a compare isnsn, and using via a check insn. This will allow us to
218 know if we can rewrite the register to use a different register that will
219 be paired with the CR register controlling the nested IF-THEN blocks. */
220 HARD_REG_SET nested_cc_ok_rewrite;
221
222 /* Temporary registers allocated to hold constants during conditional
223 execution. */
224 rtx scratch_regs[FIRST_PSEUDO_REGISTER];
225
226 /* Current number of temp registers available. */
227 int cur_scratch_regs;
228
229 /* Number of nested conditional execution blocks. */
230 int num_nested_cond_exec;
231
232 /* Map of insns that set up constants in scratch registers. */
233 bitmap scratch_insns_bitmap;
234
235 /* Conditional execution test register (CC0..CC7). */
236 rtx cr_reg;
237
238 /* Conditional execution compare register that is paired with cr_reg, so that
239 nested compares can be done. The csubcc and caddcc instructions don't
240 have enough bits to specify both a CC register to be set and a CR register
241 to do the test on, so the same bit number is used for both. Needless to
242 say, this is rather inconvenient for GCC. */
243 rtx nested_cc_reg;
244
245 /* Extra CR registers used for &&, ||. */
246 rtx extra_int_cr;
247 rtx extra_fp_cr;
248
249 /* Previous CR used in nested if, to make sure we are dealing with the same
250 nested if as the previous statement. */
251 rtx last_nested_if_cr;
252 }
253 frv_ifcvt_t;
254
255 static /* GTY(()) */ frv_ifcvt_t frv_ifcvt;
256
257 /* Map register number to smallest register class. */
258 enum reg_class regno_reg_class[FIRST_PSEUDO_REGISTER];
259
260 /* Map class letter into register class. */
261 enum reg_class reg_class_from_letter[256];
262
263 /* Cached value of frv_stack_info. */
264 static frv_stack_t *frv_stack_cache = (frv_stack_t *)0;
265
266 /* -mcpu= support */
267 frv_cpu_t frv_cpu_type = CPU_TYPE; /* value of -mcpu= */
268
269 /* Forward references */
270
271 static bool frv_handle_option (size_t, const char *, int);
272 static int frv_default_flags_for_cpu (void);
273 static int frv_string_begins_with (const_tree, const char *);
274 static FRV_INLINE bool frv_small_data_reloc_p (rtx, int);
275 static void frv_print_operand_memory_reference_reg
276 (FILE *, rtx);
277 static void frv_print_operand_memory_reference (FILE *, rtx, int);
278 static int frv_print_operand_jump_hint (rtx);
279 static const char *comparison_string (enum rtx_code, rtx);
280 static FRV_INLINE int frv_regno_ok_for_base_p (int, int);
281 static rtx single_set_pattern (rtx);
282 static int frv_function_contains_far_jump (void);
283 static rtx frv_alloc_temp_reg (frv_tmp_reg_t *,
284 enum reg_class,
285 enum machine_mode,
286 int, int);
287 static rtx frv_frame_offset_rtx (int);
288 static rtx frv_frame_mem (enum machine_mode, rtx, int);
289 static rtx frv_dwarf_store (rtx, int);
290 static void frv_frame_insn (rtx, rtx);
291 static void frv_frame_access (frv_frame_accessor_t*,
292 rtx, int);
293 static void frv_frame_access_multi (frv_frame_accessor_t*,
294 frv_stack_t *, int);
295 static void frv_frame_access_standard_regs (enum frv_stack_op,
296 frv_stack_t *);
297 static struct machine_function *frv_init_machine_status (void);
298 static rtx frv_int_to_acc (enum insn_code, int, rtx);
299 static enum machine_mode frv_matching_accg_mode (enum machine_mode);
300 static rtx frv_read_argument (tree, unsigned int);
301 static rtx frv_read_iacc_argument (enum machine_mode, tree, unsigned int);
302 static int frv_check_constant_argument (enum insn_code, int, rtx);
303 static rtx frv_legitimize_target (enum insn_code, rtx);
304 static rtx frv_legitimize_argument (enum insn_code, int, rtx);
305 static rtx frv_legitimize_tls_address (rtx, enum tls_model);
306 static rtx frv_legitimize_address (rtx, rtx, enum machine_mode);
307 static rtx frv_expand_set_builtin (enum insn_code, tree, rtx);
308 static rtx frv_expand_unop_builtin (enum insn_code, tree, rtx);
309 static rtx frv_expand_binop_builtin (enum insn_code, tree, rtx);
310 static rtx frv_expand_cut_builtin (enum insn_code, tree, rtx);
311 static rtx frv_expand_binopimm_builtin (enum insn_code, tree, rtx);
312 static rtx frv_expand_voidbinop_builtin (enum insn_code, tree);
313 static rtx frv_expand_int_void2arg (enum insn_code, tree);
314 static rtx frv_expand_prefetches (enum insn_code, tree);
315 static rtx frv_expand_voidtriop_builtin (enum insn_code, tree);
316 static rtx frv_expand_voidaccop_builtin (enum insn_code, tree);
317 static rtx frv_expand_mclracc_builtin (tree);
318 static rtx frv_expand_mrdacc_builtin (enum insn_code, tree);
319 static rtx frv_expand_mwtacc_builtin (enum insn_code, tree);
320 static rtx frv_expand_noargs_builtin (enum insn_code);
321 static void frv_split_iacc_move (rtx, rtx);
322 static rtx frv_emit_comparison (enum rtx_code, rtx, rtx);
323 static int frv_clear_registers_used (rtx *, void *);
324 static void frv_ifcvt_add_insn (rtx, rtx, int);
325 static rtx frv_ifcvt_rewrite_mem (rtx, enum machine_mode, rtx);
326 static rtx frv_ifcvt_load_value (rtx, rtx);
327 static int frv_acc_group_1 (rtx *, void *);
328 static unsigned int frv_insn_unit (rtx);
329 static bool frv_issues_to_branch_unit_p (rtx);
330 static int frv_cond_flags (rtx);
331 static bool frv_regstate_conflict_p (regstate_t, regstate_t);
332 static int frv_registers_conflict_p_1 (rtx *, void *);
333 static bool frv_registers_conflict_p (rtx);
334 static void frv_registers_update_1 (rtx, const_rtx, void *);
335 static void frv_registers_update (rtx);
336 static void frv_start_packet (void);
337 static void frv_start_packet_block (void);
338 static void frv_finish_packet (void (*) (void));
339 static bool frv_pack_insn_p (rtx);
340 static void frv_add_insn_to_packet (rtx);
341 static void frv_insert_nop_in_packet (rtx);
342 static bool frv_for_each_packet (void (*) (void));
343 static bool frv_sort_insn_group_1 (enum frv_insn_group,
344 unsigned int, unsigned int,
345 unsigned int, unsigned int,
346 state_t);
347 static int frv_compare_insns (const void *, const void *);
348 static void frv_sort_insn_group (enum frv_insn_group);
349 static void frv_reorder_packet (void);
350 static void frv_fill_unused_units (enum frv_insn_group);
351 static void frv_align_label (void);
352 static void frv_reorg_packet (void);
353 static void frv_register_nop (rtx);
354 static void frv_reorg (void);
355 static void frv_pack_insns (void);
356 static void frv_function_prologue (FILE *, HOST_WIDE_INT);
357 static void frv_function_epilogue (FILE *, HOST_WIDE_INT);
358 static bool frv_assemble_integer (rtx, unsigned, int);
359 static void frv_init_builtins (void);
360 static rtx frv_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
361 static void frv_init_libfuncs (void);
362 static bool frv_in_small_data_p (const_tree);
363 static void frv_asm_output_mi_thunk
364 (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, tree);
365 static void frv_setup_incoming_varargs (CUMULATIVE_ARGS *,
366 enum machine_mode,
367 tree, int *, int);
368 static rtx frv_expand_builtin_saveregs (void);
369 static void frv_expand_builtin_va_start (tree, rtx);
370 static bool frv_rtx_costs (rtx, int, int, int*, bool);
371 static void frv_asm_out_constructor (rtx, int);
372 static void frv_asm_out_destructor (rtx, int);
373 static bool frv_function_symbol_referenced_p (rtx);
374 static bool frv_cannot_force_const_mem (rtx);
375 static const char *unspec_got_name (int);
376 static void frv_output_const_unspec (FILE *,
377 const struct frv_unspec *);
378 static bool frv_function_ok_for_sibcall (tree, tree);
379 static rtx frv_struct_value_rtx (tree, int);
380 static bool frv_must_pass_in_stack (enum machine_mode mode, const_tree type);
381 static int frv_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
382 tree, bool);
383 static void frv_output_dwarf_dtprel (FILE *, int, rtx)
384 ATTRIBUTE_UNUSED;
385 static bool frv_secondary_reload (bool, rtx, enum reg_class,
386 enum machine_mode,
387 secondary_reload_info *);
388 \f
389 /* Allow us to easily change the default for -malloc-cc. */
390 #ifndef DEFAULT_NO_ALLOC_CC
391 #define MASK_DEFAULT_ALLOC_CC MASK_ALLOC_CC
392 #else
393 #define MASK_DEFAULT_ALLOC_CC 0
394 #endif
395 \f
396 /* Initialize the GCC target structure. */
397 #undef TARGET_ASM_FUNCTION_PROLOGUE
398 #define TARGET_ASM_FUNCTION_PROLOGUE frv_function_prologue
399 #undef TARGET_ASM_FUNCTION_EPILOGUE
400 #define TARGET_ASM_FUNCTION_EPILOGUE frv_function_epilogue
401 #undef TARGET_ASM_INTEGER
402 #define TARGET_ASM_INTEGER frv_assemble_integer
403 #undef TARGET_DEFAULT_TARGET_FLAGS
404 #define TARGET_DEFAULT_TARGET_FLAGS \
405 (MASK_DEFAULT_ALLOC_CC \
406 | MASK_COND_MOVE \
407 | MASK_SCC \
408 | MASK_COND_EXEC \
409 | MASK_VLIW_BRANCH \
410 | MASK_MULTI_CE \
411 | MASK_NESTED_CE)
412 #undef TARGET_HANDLE_OPTION
413 #define TARGET_HANDLE_OPTION frv_handle_option
414 #undef TARGET_INIT_BUILTINS
415 #define TARGET_INIT_BUILTINS frv_init_builtins
416 #undef TARGET_EXPAND_BUILTIN
417 #define TARGET_EXPAND_BUILTIN frv_expand_builtin
418 #undef TARGET_INIT_LIBFUNCS
419 #define TARGET_INIT_LIBFUNCS frv_init_libfuncs
420 #undef TARGET_IN_SMALL_DATA_P
421 #define TARGET_IN_SMALL_DATA_P frv_in_small_data_p
422 #undef TARGET_RTX_COSTS
423 #define TARGET_RTX_COSTS frv_rtx_costs
424 #undef TARGET_ASM_CONSTRUCTOR
425 #define TARGET_ASM_CONSTRUCTOR frv_asm_out_constructor
426 #undef TARGET_ASM_DESTRUCTOR
427 #define TARGET_ASM_DESTRUCTOR frv_asm_out_destructor
428
429 #undef TARGET_ASM_OUTPUT_MI_THUNK
430 #define TARGET_ASM_OUTPUT_MI_THUNK frv_asm_output_mi_thunk
431 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
432 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
433
434 #undef TARGET_SCHED_ISSUE_RATE
435 #define TARGET_SCHED_ISSUE_RATE frv_issue_rate
436
437 #undef TARGET_LEGITIMIZE_ADDRESS
438 #define TARGET_LEGITIMIZE_ADDRESS frv_legitimize_address
439
440 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
441 #define TARGET_FUNCTION_OK_FOR_SIBCALL frv_function_ok_for_sibcall
442 #undef TARGET_CANNOT_FORCE_CONST_MEM
443 #define TARGET_CANNOT_FORCE_CONST_MEM frv_cannot_force_const_mem
444
445 #undef TARGET_HAVE_TLS
446 #define TARGET_HAVE_TLS HAVE_AS_TLS
447
448 #undef TARGET_STRUCT_VALUE_RTX
449 #define TARGET_STRUCT_VALUE_RTX frv_struct_value_rtx
450 #undef TARGET_MUST_PASS_IN_STACK
451 #define TARGET_MUST_PASS_IN_STACK frv_must_pass_in_stack
452 #undef TARGET_PASS_BY_REFERENCE
453 #define TARGET_PASS_BY_REFERENCE hook_pass_by_reference_must_pass_in_stack
454 #undef TARGET_ARG_PARTIAL_BYTES
455 #define TARGET_ARG_PARTIAL_BYTES frv_arg_partial_bytes
456
457 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
458 #define TARGET_EXPAND_BUILTIN_SAVEREGS frv_expand_builtin_saveregs
459 #undef TARGET_SETUP_INCOMING_VARARGS
460 #define TARGET_SETUP_INCOMING_VARARGS frv_setup_incoming_varargs
461 #undef TARGET_MACHINE_DEPENDENT_REORG
462 #define TARGET_MACHINE_DEPENDENT_REORG frv_reorg
463
464 #undef TARGET_EXPAND_BUILTIN_VA_START
465 #define TARGET_EXPAND_BUILTIN_VA_START frv_expand_builtin_va_start
466
467 #if HAVE_AS_TLS
468 #undef TARGET_ASM_OUTPUT_DWARF_DTPREL
469 #define TARGET_ASM_OUTPUT_DWARF_DTPREL frv_output_dwarf_dtprel
470 #endif
471
472 #undef TARGET_SECONDARY_RELOAD
473 #define TARGET_SECONDARY_RELOAD frv_secondary_reload
474
475 struct gcc_target targetm = TARGET_INITIALIZER;
476
477 #define FRV_SYMBOL_REF_TLS_P(RTX) \
478 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
479
480 \f
481 /* Any function call that satisfies the machine-independent
482 requirements is eligible on FR-V. */
483
484 static bool
485 frv_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
486 tree exp ATTRIBUTE_UNUSED)
487 {
488 return true;
489 }
490
491 /* Return true if SYMBOL is a small data symbol and relocation RELOC
492 can be used to access it directly in a load or store. */
493
494 static FRV_INLINE bool
495 frv_small_data_reloc_p (rtx symbol, int reloc)
496 {
497 return (GET_CODE (symbol) == SYMBOL_REF
498 && SYMBOL_REF_SMALL_P (symbol)
499 && (!TARGET_FDPIC || flag_pic == 1)
500 && (reloc == R_FRV_GOTOFF12 || reloc == R_FRV_GPREL12));
501 }
502
503 /* Return true if X is a valid relocation unspec. If it is, fill in UNSPEC
504 appropriately. */
505
506 bool
507 frv_const_unspec_p (rtx x, struct frv_unspec *unspec)
508 {
509 if (GET_CODE (x) == CONST)
510 {
511 unspec->offset = 0;
512 x = XEXP (x, 0);
513 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
514 {
515 unspec->offset += INTVAL (XEXP (x, 1));
516 x = XEXP (x, 0);
517 }
518 if (GET_CODE (x) == UNSPEC && XINT (x, 1) == UNSPEC_GOT)
519 {
520 unspec->symbol = XVECEXP (x, 0, 0);
521 unspec->reloc = INTVAL (XVECEXP (x, 0, 1));
522
523 if (unspec->offset == 0)
524 return true;
525
526 if (frv_small_data_reloc_p (unspec->symbol, unspec->reloc)
527 && unspec->offset > 0
528 && (unsigned HOST_WIDE_INT) unspec->offset < g_switch_value)
529 return true;
530 }
531 }
532 return false;
533 }
534
535 /* Decide whether we can force certain constants to memory. If we
536 decide we can't, the caller should be able to cope with it in
537 another way.
538
539 We never allow constants to be forced into memory for TARGET_FDPIC.
540 This is necessary for several reasons:
541
542 1. Since LEGITIMATE_CONSTANT_P rejects constant pool addresses, the
543 target-independent code will try to force them into the constant
544 pool, thus leading to infinite recursion.
545
546 2. We can never introduce new constant pool references during reload.
547 Any such reference would require use of the pseudo FDPIC register.
548
549 3. We can't represent a constant added to a function pointer (which is
550 not the same as a pointer to a function+constant).
551
552 4. In many cases, it's more efficient to calculate the constant in-line. */
553
554 static bool
555 frv_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
556 {
557 return TARGET_FDPIC;
558 }
559 \f
560 /* Implement TARGET_HANDLE_OPTION. */
561
562 static bool
563 frv_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
564 {
565 switch (code)
566 {
567 case OPT_mcpu_:
568 if (strcmp (arg, "simple") == 0)
569 frv_cpu_type = FRV_CPU_SIMPLE;
570 else if (strcmp (arg, "tomcat") == 0)
571 frv_cpu_type = FRV_CPU_TOMCAT;
572 else if (strcmp (arg, "fr550") == 0)
573 frv_cpu_type = FRV_CPU_FR550;
574 else if (strcmp (arg, "fr500") == 0)
575 frv_cpu_type = FRV_CPU_FR500;
576 else if (strcmp (arg, "fr450") == 0)
577 frv_cpu_type = FRV_CPU_FR450;
578 else if (strcmp (arg, "fr405") == 0)
579 frv_cpu_type = FRV_CPU_FR405;
580 else if (strcmp (arg, "fr400") == 0)
581 frv_cpu_type = FRV_CPU_FR400;
582 else if (strcmp (arg, "fr300") == 0)
583 frv_cpu_type = FRV_CPU_FR300;
584 else if (strcmp (arg, "frv") == 0)
585 frv_cpu_type = FRV_CPU_GENERIC;
586 else
587 return false;
588 return true;
589
590 default:
591 return true;
592 }
593 }
594
595 static int
596 frv_default_flags_for_cpu (void)
597 {
598 switch (frv_cpu_type)
599 {
600 case FRV_CPU_GENERIC:
601 return MASK_DEFAULT_FRV;
602
603 case FRV_CPU_FR550:
604 return MASK_DEFAULT_FR550;
605
606 case FRV_CPU_FR500:
607 case FRV_CPU_TOMCAT:
608 return MASK_DEFAULT_FR500;
609
610 case FRV_CPU_FR450:
611 return MASK_DEFAULT_FR450;
612
613 case FRV_CPU_FR405:
614 case FRV_CPU_FR400:
615 return MASK_DEFAULT_FR400;
616
617 case FRV_CPU_FR300:
618 case FRV_CPU_SIMPLE:
619 return MASK_DEFAULT_SIMPLE;
620
621 default:
622 gcc_unreachable ();
623 }
624 }
625
626 /* Sometimes certain combinations of command options do not make
627 sense on a particular target machine. You can define a macro
628 `OVERRIDE_OPTIONS' to take account of this. This macro, if
629 defined, is executed once just after all the command options have
630 been parsed.
631
632 Don't use this macro to turn on various extra optimizations for
633 `-O'. That is what `OPTIMIZATION_OPTIONS' is for. */
634
635 void
636 frv_override_options (void)
637 {
638 int regno;
639 unsigned int i;
640
641 target_flags |= (frv_default_flags_for_cpu () & ~target_flags_explicit);
642
643 /* -mlibrary-pic sets -fPIC and -G0 and also suppresses warnings from the
644 linker about linking pic and non-pic code. */
645 if (TARGET_LIBPIC)
646 {
647 if (!flag_pic) /* -fPIC */
648 flag_pic = 2;
649
650 if (! g_switch_set) /* -G0 */
651 {
652 g_switch_set = 1;
653 g_switch_value = 0;
654 }
655 }
656
657 /* A C expression whose value is a register class containing hard
658 register REGNO. In general there is more than one such class;
659 choose a class which is "minimal", meaning that no smaller class
660 also contains the register. */
661
662 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
663 {
664 enum reg_class rclass;
665
666 if (GPR_P (regno))
667 {
668 int gpr_reg = regno - GPR_FIRST;
669
670 if (gpr_reg == GR8_REG)
671 rclass = GR8_REGS;
672
673 else if (gpr_reg == GR9_REG)
674 rclass = GR9_REGS;
675
676 else if (gpr_reg == GR14_REG)
677 rclass = FDPIC_FPTR_REGS;
678
679 else if (gpr_reg == FDPIC_REGNO)
680 rclass = FDPIC_REGS;
681
682 else if ((gpr_reg & 3) == 0)
683 rclass = QUAD_REGS;
684
685 else if ((gpr_reg & 1) == 0)
686 rclass = EVEN_REGS;
687
688 else
689 rclass = GPR_REGS;
690 }
691
692 else if (FPR_P (regno))
693 {
694 int fpr_reg = regno - GPR_FIRST;
695 if ((fpr_reg & 3) == 0)
696 rclass = QUAD_FPR_REGS;
697
698 else if ((fpr_reg & 1) == 0)
699 rclass = FEVEN_REGS;
700
701 else
702 rclass = FPR_REGS;
703 }
704
705 else if (regno == LR_REGNO)
706 rclass = LR_REG;
707
708 else if (regno == LCR_REGNO)
709 rclass = LCR_REG;
710
711 else if (ICC_P (regno))
712 rclass = ICC_REGS;
713
714 else if (FCC_P (regno))
715 rclass = FCC_REGS;
716
717 else if (ICR_P (regno))
718 rclass = ICR_REGS;
719
720 else if (FCR_P (regno))
721 rclass = FCR_REGS;
722
723 else if (ACC_P (regno))
724 {
725 int r = regno - ACC_FIRST;
726 if ((r & 3) == 0)
727 rclass = QUAD_ACC_REGS;
728 else if ((r & 1) == 0)
729 rclass = EVEN_ACC_REGS;
730 else
731 rclass = ACC_REGS;
732 }
733
734 else if (ACCG_P (regno))
735 rclass = ACCG_REGS;
736
737 else
738 rclass = NO_REGS;
739
740 regno_reg_class[regno] = rclass;
741 }
742
743 /* Check for small data option */
744 if (!g_switch_set)
745 g_switch_value = SDATA_DEFAULT_SIZE;
746
747 /* A C expression which defines the machine-dependent operand
748 constraint letters for register classes. If CHAR is such a
749 letter, the value should be the register class corresponding to
750 it. Otherwise, the value should be `NO_REGS'. The register
751 letter `r', corresponding to class `GENERAL_REGS', will not be
752 passed to this macro; you do not need to handle it.
753
754 The following letters are unavailable, due to being used as
755 constraints:
756 '0'..'9'
757 '<', '>'
758 'E', 'F', 'G', 'H'
759 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P'
760 'Q', 'R', 'S', 'T', 'U'
761 'V', 'X'
762 'g', 'i', 'm', 'n', 'o', 'p', 'r', 's' */
763
764 for (i = 0; i < 256; i++)
765 reg_class_from_letter[i] = NO_REGS;
766
767 reg_class_from_letter['a'] = ACC_REGS;
768 reg_class_from_letter['b'] = EVEN_ACC_REGS;
769 reg_class_from_letter['c'] = CC_REGS;
770 reg_class_from_letter['d'] = GPR_REGS;
771 reg_class_from_letter['e'] = EVEN_REGS;
772 reg_class_from_letter['f'] = FPR_REGS;
773 reg_class_from_letter['h'] = FEVEN_REGS;
774 reg_class_from_letter['l'] = LR_REG;
775 reg_class_from_letter['q'] = QUAD_REGS;
776 reg_class_from_letter['t'] = ICC_REGS;
777 reg_class_from_letter['u'] = FCC_REGS;
778 reg_class_from_letter['v'] = ICR_REGS;
779 reg_class_from_letter['w'] = FCR_REGS;
780 reg_class_from_letter['x'] = QUAD_FPR_REGS;
781 reg_class_from_letter['y'] = LCR_REG;
782 reg_class_from_letter['z'] = SPR_REGS;
783 reg_class_from_letter['A'] = QUAD_ACC_REGS;
784 reg_class_from_letter['B'] = ACCG_REGS;
785 reg_class_from_letter['C'] = CR_REGS;
786 reg_class_from_letter['W'] = FDPIC_CALL_REGS; /* gp14+15 */
787 reg_class_from_letter['Z'] = FDPIC_REGS; /* gp15 */
788
789 /* There is no single unaligned SI op for PIC code. Sometimes we
790 need to use ".4byte" and sometimes we need to use ".picptr".
791 See frv_assemble_integer for details. */
792 if (flag_pic || TARGET_FDPIC)
793 targetm.asm_out.unaligned_op.si = 0;
794
795 if ((target_flags_explicit & MASK_LINKED_FP) == 0)
796 target_flags |= MASK_LINKED_FP;
797
798 if ((target_flags_explicit & MASK_OPTIMIZE_MEMBAR) == 0)
799 target_flags |= MASK_OPTIMIZE_MEMBAR;
800
801 for (i = 0; i < ARRAY_SIZE (frv_unit_names); i++)
802 frv_unit_codes[i] = get_cpu_unit_code (frv_unit_names[i]);
803
804 for (i = 0; i < ARRAY_SIZE (frv_type_to_unit); i++)
805 frv_type_to_unit[i] = ARRAY_SIZE (frv_unit_codes);
806
807 init_machine_status = frv_init_machine_status;
808 }
809
810 \f
811 /* Some machines may desire to change what optimizations are performed for
812 various optimization levels. This macro, if defined, is executed once just
813 after the optimization level is determined and before the remainder of the
814 command options have been parsed. Values set in this macro are used as the
815 default values for the other command line options.
816
817 LEVEL is the optimization level specified; 2 if `-O2' is specified, 1 if
818 `-O' is specified, and 0 if neither is specified.
819
820 SIZE is nonzero if `-Os' is specified, 0 otherwise.
821
822 You should not use this macro to change options that are not
823 machine-specific. These should uniformly selected by the same optimization
824 level on all supported machines. Use this macro to enable machine-specific
825 optimizations.
826
827 *Do not examine `write_symbols' in this macro!* The debugging options are
828 *not supposed to alter the generated code. */
829
830 /* On the FRV, possibly disable VLIW packing which is done by the 2nd
831 scheduling pass at the current time. */
832 void
833 frv_optimization_options (int level, int size ATTRIBUTE_UNUSED)
834 {
835 if (level >= 2)
836 {
837 #ifdef DISABLE_SCHED2
838 flag_schedule_insns_after_reload = 0;
839 #endif
840 #ifdef ENABLE_RCSP
841 flag_rcsp = 1;
842 #endif
843 }
844 }
845
846 \f
847 /* Return true if NAME (a STRING_CST node) begins with PREFIX. */
848
849 static int
850 frv_string_begins_with (const_tree name, const char *prefix)
851 {
852 const int prefix_len = strlen (prefix);
853
854 /* Remember: NAME's length includes the null terminator. */
855 return (TREE_STRING_LENGTH (name) > prefix_len
856 && strncmp (TREE_STRING_POINTER (name), prefix, prefix_len) == 0);
857 }
858 \f
859 /* Zero or more C statements that may conditionally modify two variables
860 `fixed_regs' and `call_used_regs' (both of type `char []') after they have
861 been initialized from the two preceding macros.
862
863 This is necessary in case the fixed or call-clobbered registers depend on
864 target flags.
865
866 You need not define this macro if it has no work to do.
867
868 If the usage of an entire class of registers depends on the target flags,
869 you may indicate this to GCC by using this macro to modify `fixed_regs' and
870 `call_used_regs' to 1 for each of the registers in the classes which should
871 not be used by GCC. Also define the macro `REG_CLASS_FROM_LETTER' to return
872 `NO_REGS' if it is called with a letter for a class that shouldn't be used.
873
874 (However, if this class is not included in `GENERAL_REGS' and all of the
875 insn patterns whose constraints permit this class are controlled by target
876 switches, then GCC will automatically avoid using these registers when the
877 target switches are opposed to them.) */
878
879 void
880 frv_conditional_register_usage (void)
881 {
882 int i;
883
884 for (i = GPR_FIRST + NUM_GPRS; i <= GPR_LAST; i++)
885 fixed_regs[i] = call_used_regs[i] = 1;
886
887 for (i = FPR_FIRST + NUM_FPRS; i <= FPR_LAST; i++)
888 fixed_regs[i] = call_used_regs[i] = 1;
889
890 /* Reserve the registers used for conditional execution. At present, we need
891 1 ICC and 1 ICR register. */
892 fixed_regs[ICC_TEMP] = call_used_regs[ICC_TEMP] = 1;
893 fixed_regs[ICR_TEMP] = call_used_regs[ICR_TEMP] = 1;
894
895 if (TARGET_FIXED_CC)
896 {
897 fixed_regs[ICC_FIRST] = call_used_regs[ICC_FIRST] = 1;
898 fixed_regs[FCC_FIRST] = call_used_regs[FCC_FIRST] = 1;
899 fixed_regs[ICR_FIRST] = call_used_regs[ICR_FIRST] = 1;
900 fixed_regs[FCR_FIRST] = call_used_regs[FCR_FIRST] = 1;
901 }
902
903 if (TARGET_FDPIC)
904 fixed_regs[GPR_FIRST + 16] = fixed_regs[GPR_FIRST + 17] =
905 call_used_regs[GPR_FIRST + 16] = call_used_regs[GPR_FIRST + 17] = 0;
906
907 #if 0
908 /* If -fpic, SDA_BASE_REG is the PIC register. */
909 if (g_switch_value == 0 && !flag_pic)
910 fixed_regs[SDA_BASE_REG] = call_used_regs[SDA_BASE_REG] = 0;
911
912 if (!flag_pic)
913 fixed_regs[PIC_REGNO] = call_used_regs[PIC_REGNO] = 0;
914 #endif
915 }
916
917 \f
918 /*
919 * Compute the stack frame layout
920 *
921 * Register setup:
922 * +---------------+-----------------------+-----------------------+
923 * |Register |type |caller-save/callee-save|
924 * +---------------+-----------------------+-----------------------+
925 * |GR0 |Zero register | - |
926 * |GR1 |Stack pointer(SP) | - |
927 * |GR2 |Frame pointer(FP) | - |
928 * |GR3 |Hidden parameter | caller save |
929 * |GR4-GR7 | - | caller save |
930 * |GR8-GR13 |Argument register | caller save |
931 * |GR14-GR15 | - | caller save |
932 * |GR16-GR31 | - | callee save |
933 * |GR32-GR47 | - | caller save |
934 * |GR48-GR63 | - | callee save |
935 * |FR0-FR15 | - | caller save |
936 * |FR16-FR31 | - | callee save |
937 * |FR32-FR47 | - | caller save |
938 * |FR48-FR63 | - | callee save |
939 * +---------------+-----------------------+-----------------------+
940 *
941 * Stack frame setup:
942 * Low
943 * SP-> |-----------------------------------|
944 * | Argument area |
945 * |-----------------------------------|
946 * | Register save area |
947 * |-----------------------------------|
948 * | Local variable save area |
949 * FP-> |-----------------------------------|
950 * | Old FP |
951 * |-----------------------------------|
952 * | Hidden parameter save area |
953 * |-----------------------------------|
954 * | Return address(LR) storage area |
955 * |-----------------------------------|
956 * | Padding for alignment |
957 * |-----------------------------------|
958 * | Register argument area |
959 * OLD SP-> |-----------------------------------|
960 * | Parameter area |
961 * |-----------------------------------|
962 * High
963 *
964 * Argument area/Parameter area:
965 *
966 * When a function is called, this area is used for argument transfer. When
967 * the argument is set up by the caller function, this area is referred to as
968 * the argument area. When the argument is referenced by the callee function,
969 * this area is referred to as the parameter area. The area is allocated when
970 * all arguments cannot be placed on the argument register at the time of
971 * argument transfer.
972 *
973 * Register save area:
974 *
975 * This is a register save area that must be guaranteed for the caller
976 * function. This area is not secured when the register save operation is not
977 * needed.
978 *
979 * Local variable save area:
980 *
981 * This is the area for local variables and temporary variables.
982 *
983 * Old FP:
984 *
985 * This area stores the FP value of the caller function.
986 *
987 * Hidden parameter save area:
988 *
989 * This area stores the start address of the return value storage
990 * area for a struct/union return function.
991 * When a struct/union is used as the return value, the caller
992 * function stores the return value storage area start address in
993 * register GR3 and passes it to the caller function.
994 * The callee function interprets the address stored in the GR3
995 * as the return value storage area start address.
996 * When register GR3 needs to be saved into memory, the callee
997 * function saves it in the hidden parameter save area. This
998 * area is not secured when the save operation is not needed.
999 *
1000 * Return address(LR) storage area:
1001 *
1002 * This area saves the LR. The LR stores the address of a return to the caller
1003 * function for the purpose of function calling.
1004 *
1005 * Argument register area:
1006 *
1007 * This area saves the argument register. This area is not secured when the
1008 * save operation is not needed.
1009 *
1010 * Argument:
1011 *
1012 * Arguments, the count of which equals the count of argument registers (6
1013 * words), are positioned in registers GR8 to GR13 and delivered to the callee
1014 * function. When a struct/union return function is called, the return value
1015 * area address is stored in register GR3. Arguments not placed in the
1016 * argument registers will be stored in the stack argument area for transfer
1017 * purposes. When an 8-byte type argument is to be delivered using registers,
1018 * it is divided into two and placed in two registers for transfer. When
1019 * argument registers must be saved to memory, the callee function secures an
1020 * argument register save area in the stack. In this case, a continuous
1021 * argument register save area must be established in the parameter area. The
1022 * argument register save area must be allocated as needed to cover the size of
1023 * the argument register to be saved. If the function has a variable count of
1024 * arguments, it saves all argument registers in the argument register save
1025 * area.
1026 *
1027 * Argument Extension Format:
1028 *
1029 * When an argument is to be stored in the stack, its type is converted to an
1030 * extended type in accordance with the individual argument type. The argument
1031 * is freed by the caller function after the return from the callee function is
1032 * made.
1033 *
1034 * +-----------------------+---------------+------------------------+
1035 * | Argument Type |Extended Type |Stack Storage Size(byte)|
1036 * +-----------------------+---------------+------------------------+
1037 * |char |int | 4 |
1038 * |signed char |int | 4 |
1039 * |unsigned char |int | 4 |
1040 * |[signed] short int |int | 4 |
1041 * |unsigned short int |int | 4 |
1042 * |[signed] int |No extension | 4 |
1043 * |unsigned int |No extension | 4 |
1044 * |[signed] long int |No extension | 4 |
1045 * |unsigned long int |No extension | 4 |
1046 * |[signed] long long int |No extension | 8 |
1047 * |unsigned long long int |No extension | 8 |
1048 * |float |double | 8 |
1049 * |double |No extension | 8 |
1050 * |long double |No extension | 8 |
1051 * |pointer |No extension | 4 |
1052 * |struct/union |- | 4 (*1) |
1053 * +-----------------------+---------------+------------------------+
1054 *
1055 * When a struct/union is to be delivered as an argument, the caller copies it
1056 * to the local variable area and delivers the address of that area.
1057 *
1058 * Return Value:
1059 *
1060 * +-------------------------------+----------------------+
1061 * |Return Value Type |Return Value Interface|
1062 * +-------------------------------+----------------------+
1063 * |void |None |
1064 * |[signed|unsigned] char |GR8 |
1065 * |[signed|unsigned] short int |GR8 |
1066 * |[signed|unsigned] int |GR8 |
1067 * |[signed|unsigned] long int |GR8 |
1068 * |pointer |GR8 |
1069 * |[signed|unsigned] long long int|GR8 & GR9 |
1070 * |float |GR8 |
1071 * |double |GR8 & GR9 |
1072 * |long double |GR8 & GR9 |
1073 * |struct/union |(*1) |
1074 * +-------------------------------+----------------------+
1075 *
1076 * When a struct/union is used as the return value, the caller function stores
1077 * the start address of the return value storage area into GR3 and then passes
1078 * it to the callee function. The callee function interprets GR3 as the start
1079 * address of the return value storage area. When this address needs to be
1080 * saved in memory, the callee function secures the hidden parameter save area
1081 * and saves the address in that area.
1082 */
1083
1084 frv_stack_t *
1085 frv_stack_info (void)
1086 {
1087 static frv_stack_t info, zero_info;
1088 frv_stack_t *info_ptr = &info;
1089 tree fndecl = current_function_decl;
1090 int varargs_p = 0;
1091 tree cur_arg;
1092 tree next_arg;
1093 int range;
1094 int alignment;
1095 int offset;
1096
1097 /* If we've already calculated the values and reload is complete,
1098 just return now. */
1099 if (frv_stack_cache)
1100 return frv_stack_cache;
1101
1102 /* Zero all fields. */
1103 info = zero_info;
1104
1105 /* Set up the register range information. */
1106 info_ptr->regs[STACK_REGS_GPR].name = "gpr";
1107 info_ptr->regs[STACK_REGS_GPR].first = LAST_ARG_REGNUM + 1;
1108 info_ptr->regs[STACK_REGS_GPR].last = GPR_LAST;
1109 info_ptr->regs[STACK_REGS_GPR].dword_p = TRUE;
1110
1111 info_ptr->regs[STACK_REGS_FPR].name = "fpr";
1112 info_ptr->regs[STACK_REGS_FPR].first = FPR_FIRST;
1113 info_ptr->regs[STACK_REGS_FPR].last = FPR_LAST;
1114 info_ptr->regs[STACK_REGS_FPR].dword_p = TRUE;
1115
1116 info_ptr->regs[STACK_REGS_LR].name = "lr";
1117 info_ptr->regs[STACK_REGS_LR].first = LR_REGNO;
1118 info_ptr->regs[STACK_REGS_LR].last = LR_REGNO;
1119 info_ptr->regs[STACK_REGS_LR].special_p = 1;
1120
1121 info_ptr->regs[STACK_REGS_CC].name = "cc";
1122 info_ptr->regs[STACK_REGS_CC].first = CC_FIRST;
1123 info_ptr->regs[STACK_REGS_CC].last = CC_LAST;
1124 info_ptr->regs[STACK_REGS_CC].field_p = TRUE;
1125
1126 info_ptr->regs[STACK_REGS_LCR].name = "lcr";
1127 info_ptr->regs[STACK_REGS_LCR].first = LCR_REGNO;
1128 info_ptr->regs[STACK_REGS_LCR].last = LCR_REGNO;
1129
1130 info_ptr->regs[STACK_REGS_STDARG].name = "stdarg";
1131 info_ptr->regs[STACK_REGS_STDARG].first = FIRST_ARG_REGNUM;
1132 info_ptr->regs[STACK_REGS_STDARG].last = LAST_ARG_REGNUM;
1133 info_ptr->regs[STACK_REGS_STDARG].dword_p = 1;
1134 info_ptr->regs[STACK_REGS_STDARG].special_p = 1;
1135
1136 info_ptr->regs[STACK_REGS_STRUCT].name = "struct";
1137 info_ptr->regs[STACK_REGS_STRUCT].first = FRV_STRUCT_VALUE_REGNUM;
1138 info_ptr->regs[STACK_REGS_STRUCT].last = FRV_STRUCT_VALUE_REGNUM;
1139 info_ptr->regs[STACK_REGS_STRUCT].special_p = 1;
1140
1141 info_ptr->regs[STACK_REGS_FP].name = "fp";
1142 info_ptr->regs[STACK_REGS_FP].first = FRAME_POINTER_REGNUM;
1143 info_ptr->regs[STACK_REGS_FP].last = FRAME_POINTER_REGNUM;
1144 info_ptr->regs[STACK_REGS_FP].special_p = 1;
1145
1146 /* Determine if this is a stdarg function. If so, allocate space to store
1147 the 6 arguments. */
1148 if (cfun->stdarg)
1149 varargs_p = 1;
1150
1151 else
1152 {
1153 /* Find the last argument, and see if it is __builtin_va_alist. */
1154 for (cur_arg = DECL_ARGUMENTS (fndecl); cur_arg != (tree)0; cur_arg = next_arg)
1155 {
1156 next_arg = TREE_CHAIN (cur_arg);
1157 if (next_arg == (tree)0)
1158 {
1159 if (DECL_NAME (cur_arg)
1160 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (cur_arg)), "__builtin_va_alist"))
1161 varargs_p = 1;
1162
1163 break;
1164 }
1165 }
1166 }
1167
1168 /* Iterate over all of the register ranges. */
1169 for (range = 0; range < STACK_REGS_MAX; range++)
1170 {
1171 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1172 int first = reg_ptr->first;
1173 int last = reg_ptr->last;
1174 int size_1word = 0;
1175 int size_2words = 0;
1176 int regno;
1177
1178 /* Calculate which registers need to be saved & save area size. */
1179 switch (range)
1180 {
1181 default:
1182 for (regno = first; regno <= last; regno++)
1183 {
1184 if ((df_regs_ever_live_p (regno) && !call_used_regs[regno])
1185 || (crtl->calls_eh_return
1186 && (regno >= FIRST_EH_REGNUM && regno <= LAST_EH_REGNUM))
1187 || (!TARGET_FDPIC && flag_pic
1188 && crtl->uses_pic_offset_table && regno == PIC_REGNO))
1189 {
1190 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1191 size_1word += UNITS_PER_WORD;
1192 }
1193 }
1194 break;
1195
1196 /* Calculate whether we need to create a frame after everything else
1197 has been processed. */
1198 case STACK_REGS_FP:
1199 break;
1200
1201 case STACK_REGS_LR:
1202 if (df_regs_ever_live_p (LR_REGNO)
1203 || profile_flag
1204 /* This is set for __builtin_return_address, etc. */
1205 || cfun->machine->frame_needed
1206 || (TARGET_LINKED_FP && frame_pointer_needed)
1207 || (!TARGET_FDPIC && flag_pic
1208 && crtl->uses_pic_offset_table))
1209 {
1210 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1211 size_1word += UNITS_PER_WORD;
1212 }
1213 break;
1214
1215 case STACK_REGS_STDARG:
1216 if (varargs_p)
1217 {
1218 /* If this is a stdarg function with a non varardic
1219 argument split between registers and the stack,
1220 adjust the saved registers downward. */
1221 last -= (ADDR_ALIGN (crtl->args.pretend_args_size, UNITS_PER_WORD)
1222 / UNITS_PER_WORD);
1223
1224 for (regno = first; regno <= last; regno++)
1225 {
1226 info_ptr->save_p[regno] = REG_SAVE_1WORD;
1227 size_1word += UNITS_PER_WORD;
1228 }
1229
1230 info_ptr->stdarg_size = size_1word;
1231 }
1232 break;
1233
1234 case STACK_REGS_STRUCT:
1235 if (cfun->returns_struct)
1236 {
1237 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1238 size_1word += UNITS_PER_WORD;
1239 }
1240 break;
1241 }
1242
1243
1244 if (size_1word)
1245 {
1246 /* If this is a field, it only takes one word. */
1247 if (reg_ptr->field_p)
1248 size_1word = UNITS_PER_WORD;
1249
1250 /* Determine which register pairs can be saved together. */
1251 else if (reg_ptr->dword_p && TARGET_DWORD)
1252 {
1253 for (regno = first; regno < last; regno += 2)
1254 {
1255 if (info_ptr->save_p[regno] && info_ptr->save_p[regno+1])
1256 {
1257 size_2words += 2 * UNITS_PER_WORD;
1258 size_1word -= 2 * UNITS_PER_WORD;
1259 info_ptr->save_p[regno] = REG_SAVE_2WORDS;
1260 info_ptr->save_p[regno+1] = REG_SAVE_NO_SAVE;
1261 }
1262 }
1263 }
1264
1265 reg_ptr->size_1word = size_1word;
1266 reg_ptr->size_2words = size_2words;
1267
1268 if (! reg_ptr->special_p)
1269 {
1270 info_ptr->regs_size_1word += size_1word;
1271 info_ptr->regs_size_2words += size_2words;
1272 }
1273 }
1274 }
1275
1276 /* Set up the sizes of each each field in the frame body, making the sizes
1277 of each be divisible by the size of a dword if dword operations might
1278 be used, or the size of a word otherwise. */
1279 alignment = (TARGET_DWORD? 2 * UNITS_PER_WORD : UNITS_PER_WORD);
1280
1281 info_ptr->parameter_size = ADDR_ALIGN (crtl->outgoing_args_size, alignment);
1282 info_ptr->regs_size = ADDR_ALIGN (info_ptr->regs_size_2words
1283 + info_ptr->regs_size_1word,
1284 alignment);
1285 info_ptr->vars_size = ADDR_ALIGN (get_frame_size (), alignment);
1286
1287 info_ptr->pretend_size = crtl->args.pretend_args_size;
1288
1289 /* Work out the size of the frame, excluding the header. Both the frame
1290 body and register parameter area will be dword-aligned. */
1291 info_ptr->total_size
1292 = (ADDR_ALIGN (info_ptr->parameter_size
1293 + info_ptr->regs_size
1294 + info_ptr->vars_size,
1295 2 * UNITS_PER_WORD)
1296 + ADDR_ALIGN (info_ptr->pretend_size
1297 + info_ptr->stdarg_size,
1298 2 * UNITS_PER_WORD));
1299
1300 /* See if we need to create a frame at all, if so add header area. */
1301 if (info_ptr->total_size > 0
1302 || frame_pointer_needed
1303 || info_ptr->regs[STACK_REGS_LR].size_1word > 0
1304 || info_ptr->regs[STACK_REGS_STRUCT].size_1word > 0)
1305 {
1306 offset = info_ptr->parameter_size;
1307 info_ptr->header_size = 4 * UNITS_PER_WORD;
1308 info_ptr->total_size += 4 * UNITS_PER_WORD;
1309
1310 /* Calculate the offsets to save normal register pairs. */
1311 for (range = 0; range < STACK_REGS_MAX; range++)
1312 {
1313 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1314 if (! reg_ptr->special_p)
1315 {
1316 int first = reg_ptr->first;
1317 int last = reg_ptr->last;
1318 int regno;
1319
1320 for (regno = first; regno <= last; regno++)
1321 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS
1322 && regno != FRAME_POINTER_REGNUM
1323 && (regno < FIRST_ARG_REGNUM
1324 || regno > LAST_ARG_REGNUM))
1325 {
1326 info_ptr->reg_offset[regno] = offset;
1327 offset += 2 * UNITS_PER_WORD;
1328 }
1329 }
1330 }
1331
1332 /* Calculate the offsets to save normal single registers. */
1333 for (range = 0; range < STACK_REGS_MAX; range++)
1334 {
1335 frv_stack_regs_t *reg_ptr = &(info_ptr->regs[range]);
1336 if (! reg_ptr->special_p)
1337 {
1338 int first = reg_ptr->first;
1339 int last = reg_ptr->last;
1340 int regno;
1341
1342 for (regno = first; regno <= last; regno++)
1343 if (info_ptr->save_p[regno] == REG_SAVE_1WORD
1344 && regno != FRAME_POINTER_REGNUM
1345 && (regno < FIRST_ARG_REGNUM
1346 || regno > LAST_ARG_REGNUM))
1347 {
1348 info_ptr->reg_offset[regno] = offset;
1349 offset += UNITS_PER_WORD;
1350 }
1351 }
1352 }
1353
1354 /* Calculate the offset to save the local variables at. */
1355 offset = ADDR_ALIGN (offset, alignment);
1356 if (info_ptr->vars_size)
1357 {
1358 info_ptr->vars_offset = offset;
1359 offset += info_ptr->vars_size;
1360 }
1361
1362 /* Align header to a dword-boundary. */
1363 offset = ADDR_ALIGN (offset, 2 * UNITS_PER_WORD);
1364
1365 /* Calculate the offsets in the fixed frame. */
1366 info_ptr->save_p[FRAME_POINTER_REGNUM] = REG_SAVE_1WORD;
1367 info_ptr->reg_offset[FRAME_POINTER_REGNUM] = offset;
1368 info_ptr->regs[STACK_REGS_FP].size_1word = UNITS_PER_WORD;
1369
1370 info_ptr->save_p[LR_REGNO] = REG_SAVE_1WORD;
1371 info_ptr->reg_offset[LR_REGNO] = offset + 2*UNITS_PER_WORD;
1372 info_ptr->regs[STACK_REGS_LR].size_1word = UNITS_PER_WORD;
1373
1374 if (cfun->returns_struct)
1375 {
1376 info_ptr->save_p[FRV_STRUCT_VALUE_REGNUM] = REG_SAVE_1WORD;
1377 info_ptr->reg_offset[FRV_STRUCT_VALUE_REGNUM] = offset + UNITS_PER_WORD;
1378 info_ptr->regs[STACK_REGS_STRUCT].size_1word = UNITS_PER_WORD;
1379 }
1380
1381 /* Calculate the offsets to store the arguments passed in registers
1382 for stdarg functions. The register pairs are first and the single
1383 register if any is last. The register save area starts on a
1384 dword-boundary. */
1385 if (info_ptr->stdarg_size)
1386 {
1387 int first = info_ptr->regs[STACK_REGS_STDARG].first;
1388 int last = info_ptr->regs[STACK_REGS_STDARG].last;
1389 int regno;
1390
1391 /* Skip the header. */
1392 offset += 4 * UNITS_PER_WORD;
1393 for (regno = first; regno <= last; regno++)
1394 {
1395 if (info_ptr->save_p[regno] == REG_SAVE_2WORDS)
1396 {
1397 info_ptr->reg_offset[regno] = offset;
1398 offset += 2 * UNITS_PER_WORD;
1399 }
1400 else if (info_ptr->save_p[regno] == REG_SAVE_1WORD)
1401 {
1402 info_ptr->reg_offset[regno] = offset;
1403 offset += UNITS_PER_WORD;
1404 }
1405 }
1406 }
1407 }
1408
1409 if (reload_completed)
1410 frv_stack_cache = info_ptr;
1411
1412 return info_ptr;
1413 }
1414
1415 \f
1416 /* Print the information about the frv stack offsets, etc. when debugging. */
1417
1418 void
1419 frv_debug_stack (frv_stack_t *info)
1420 {
1421 int range;
1422
1423 if (!info)
1424 info = frv_stack_info ();
1425
1426 fprintf (stderr, "\nStack information for function %s:\n",
1427 ((current_function_decl && DECL_NAME (current_function_decl))
1428 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
1429 : "<unknown>"));
1430
1431 fprintf (stderr, "\ttotal_size\t= %6d\n", info->total_size);
1432 fprintf (stderr, "\tvars_size\t= %6d\n", info->vars_size);
1433 fprintf (stderr, "\tparam_size\t= %6d\n", info->parameter_size);
1434 fprintf (stderr, "\tregs_size\t= %6d, 1w = %3d, 2w = %3d\n",
1435 info->regs_size, info->regs_size_1word, info->regs_size_2words);
1436
1437 fprintf (stderr, "\theader_size\t= %6d\n", info->header_size);
1438 fprintf (stderr, "\tpretend_size\t= %6d\n", info->pretend_size);
1439 fprintf (stderr, "\tvars_offset\t= %6d\n", info->vars_offset);
1440 fprintf (stderr, "\tregs_offset\t= %6d\n", info->regs_offset);
1441
1442 for (range = 0; range < STACK_REGS_MAX; range++)
1443 {
1444 frv_stack_regs_t *regs = &(info->regs[range]);
1445 if ((regs->size_1word + regs->size_2words) > 0)
1446 {
1447 int first = regs->first;
1448 int last = regs->last;
1449 int regno;
1450
1451 fprintf (stderr, "\t%s\tsize\t= %6d, 1w = %3d, 2w = %3d, save =",
1452 regs->name, regs->size_1word + regs->size_2words,
1453 regs->size_1word, regs->size_2words);
1454
1455 for (regno = first; regno <= last; regno++)
1456 {
1457 if (info->save_p[regno] == REG_SAVE_1WORD)
1458 fprintf (stderr, " %s (%d)", reg_names[regno],
1459 info->reg_offset[regno]);
1460
1461 else if (info->save_p[regno] == REG_SAVE_2WORDS)
1462 fprintf (stderr, " %s-%s (%d)", reg_names[regno],
1463 reg_names[regno+1], info->reg_offset[regno]);
1464 }
1465
1466 fputc ('\n', stderr);
1467 }
1468 }
1469
1470 fflush (stderr);
1471 }
1472
1473
1474 \f
1475
1476 /* Used during final to control the packing of insns. The value is
1477 1 if the current instruction should be packed with the next one,
1478 0 if it shouldn't or -1 if packing is disabled altogether. */
1479
1480 static int frv_insn_packing_flag;
1481
1482 /* True if the current function contains a far jump. */
1483
1484 static int
1485 frv_function_contains_far_jump (void)
1486 {
1487 rtx insn = get_insns ();
1488 while (insn != NULL
1489 && !(GET_CODE (insn) == JUMP_INSN
1490 /* Ignore tablejump patterns. */
1491 && GET_CODE (PATTERN (insn)) != ADDR_VEC
1492 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
1493 && get_attr_far_jump (insn) == FAR_JUMP_YES))
1494 insn = NEXT_INSN (insn);
1495 return (insn != NULL);
1496 }
1497
1498 /* For the FRV, this function makes sure that a function with far jumps
1499 will return correctly. It also does the VLIW packing. */
1500
1501 static void
1502 frv_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1503 {
1504 /* If no frame was created, check whether the function uses a call
1505 instruction to implement a far jump. If so, save the link in gr3 and
1506 replace all returns to LR with returns to GR3. GR3 is used because it
1507 is call-clobbered, because is not available to the register allocator,
1508 and because all functions that take a hidden argument pointer will have
1509 a stack frame. */
1510 if (frv_stack_info ()->total_size == 0 && frv_function_contains_far_jump ())
1511 {
1512 rtx insn;
1513
1514 /* Just to check that the above comment is true. */
1515 gcc_assert (!df_regs_ever_live_p (GPR_FIRST + 3));
1516
1517 /* Generate the instruction that saves the link register. */
1518 fprintf (file, "\tmovsg lr,gr3\n");
1519
1520 /* Replace the LR with GR3 in *return_internal patterns. The insn
1521 will now return using jmpl @(gr3,0) rather than bralr. We cannot
1522 simply emit a different assembly directive because bralr and jmpl
1523 execute in different units. */
1524 for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
1525 if (GET_CODE (insn) == JUMP_INSN)
1526 {
1527 rtx pattern = PATTERN (insn);
1528 if (GET_CODE (pattern) == PARALLEL
1529 && XVECLEN (pattern, 0) >= 2
1530 && GET_CODE (XVECEXP (pattern, 0, 0)) == RETURN
1531 && GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
1532 {
1533 rtx address = XEXP (XVECEXP (pattern, 0, 1), 0);
1534 if (GET_CODE (address) == REG && REGNO (address) == LR_REGNO)
1535 SET_REGNO (address, GPR_FIRST + 3);
1536 }
1537 }
1538 }
1539
1540 frv_pack_insns ();
1541
1542 /* Allow the garbage collector to free the nops created by frv_reorg. */
1543 memset (frv_nops, 0, sizeof (frv_nops));
1544 }
1545
1546 \f
1547 /* Return the next available temporary register in a given class. */
1548
1549 static rtx
1550 frv_alloc_temp_reg (
1551 frv_tmp_reg_t *info, /* which registers are available */
1552 enum reg_class rclass, /* register class desired */
1553 enum machine_mode mode, /* mode to allocate register with */
1554 int mark_as_used, /* register not available after allocation */
1555 int no_abort) /* return NULL instead of aborting */
1556 {
1557 int regno = info->next_reg[ (int)rclass ];
1558 int orig_regno = regno;
1559 HARD_REG_SET *reg_in_class = &reg_class_contents[ (int)rclass ];
1560 int i, nr;
1561
1562 for (;;)
1563 {
1564 if (TEST_HARD_REG_BIT (*reg_in_class, regno)
1565 && TEST_HARD_REG_BIT (info->regs, regno))
1566 break;
1567
1568 if (++regno >= FIRST_PSEUDO_REGISTER)
1569 regno = 0;
1570 if (regno == orig_regno)
1571 {
1572 gcc_assert (no_abort);
1573 return NULL_RTX;
1574 }
1575 }
1576
1577 nr = HARD_REGNO_NREGS (regno, mode);
1578 info->next_reg[ (int)rclass ] = regno + nr;
1579
1580 if (mark_as_used)
1581 for (i = 0; i < nr; i++)
1582 CLEAR_HARD_REG_BIT (info->regs, regno+i);
1583
1584 return gen_rtx_REG (mode, regno);
1585 }
1586
1587 \f
1588 /* Return an rtx with the value OFFSET, which will either be a register or a
1589 signed 12-bit integer. It can be used as the second operand in an "add"
1590 instruction, or as the index in a load or store.
1591
1592 The function returns a constant rtx if OFFSET is small enough, otherwise
1593 it loads the constant into register OFFSET_REGNO and returns that. */
1594 static rtx
1595 frv_frame_offset_rtx (int offset)
1596 {
1597 rtx offset_rtx = GEN_INT (offset);
1598 if (IN_RANGE_P (offset, -2048, 2047))
1599 return offset_rtx;
1600 else
1601 {
1602 rtx reg_rtx = gen_rtx_REG (SImode, OFFSET_REGNO);
1603 if (IN_RANGE_P (offset, -32768, 32767))
1604 emit_insn (gen_movsi (reg_rtx, offset_rtx));
1605 else
1606 {
1607 emit_insn (gen_movsi_high (reg_rtx, offset_rtx));
1608 emit_insn (gen_movsi_lo_sum (reg_rtx, offset_rtx));
1609 }
1610 return reg_rtx;
1611 }
1612 }
1613
1614 /* Generate (mem:MODE (plus:Pmode BASE (frv_frame_offset OFFSET)))). The
1615 prologue and epilogue uses such expressions to access the stack. */
1616 static rtx
1617 frv_frame_mem (enum machine_mode mode, rtx base, int offset)
1618 {
1619 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode,
1620 base,
1621 frv_frame_offset_rtx (offset)));
1622 }
1623
1624 /* Generate a frame-related expression:
1625
1626 (set REG (mem (plus (sp) (const_int OFFSET)))).
1627
1628 Such expressions are used in FRAME_RELATED_EXPR notes for more complex
1629 instructions. Marking the expressions as frame-related is superfluous if
1630 the note contains just a single set. But if the note contains a PARALLEL
1631 or SEQUENCE that has several sets, each set must be individually marked
1632 as frame-related. */
1633 static rtx
1634 frv_dwarf_store (rtx reg, int offset)
1635 {
1636 rtx set = gen_rtx_SET (VOIDmode,
1637 gen_rtx_MEM (GET_MODE (reg),
1638 plus_constant (stack_pointer_rtx,
1639 offset)),
1640 reg);
1641 RTX_FRAME_RELATED_P (set) = 1;
1642 return set;
1643 }
1644
1645 /* Emit a frame-related instruction whose pattern is PATTERN. The
1646 instruction is the last in a sequence that cumulatively performs the
1647 operation described by DWARF_PATTERN. The instruction is marked as
1648 frame-related and has a REG_FRAME_RELATED_EXPR note containing
1649 DWARF_PATTERN. */
1650 static void
1651 frv_frame_insn (rtx pattern, rtx dwarf_pattern)
1652 {
1653 rtx insn = emit_insn (pattern);
1654 RTX_FRAME_RELATED_P (insn) = 1;
1655 REG_NOTES (insn) = alloc_EXPR_LIST (REG_FRAME_RELATED_EXPR,
1656 dwarf_pattern,
1657 REG_NOTES (insn));
1658 }
1659
1660 /* Emit instructions that transfer REG to or from the memory location (sp +
1661 STACK_OFFSET). The register is stored in memory if ACCESSOR->OP is
1662 FRV_STORE and loaded if it is FRV_LOAD. Only the prologue uses this
1663 function to store registers and only the epilogue uses it to load them.
1664
1665 The caller sets up ACCESSOR so that BASE is equal to (sp + BASE_OFFSET).
1666 The generated instruction will use BASE as its base register. BASE may
1667 simply be the stack pointer, but if several accesses are being made to a
1668 region far away from the stack pointer, it may be more efficient to set
1669 up a temporary instead.
1670
1671 Store instructions will be frame-related and will be annotated with the
1672 overall effect of the store. Load instructions will be followed by a
1673 (use) to prevent later optimizations from zapping them.
1674
1675 The function takes care of the moves to and from SPRs, using TEMP_REGNO
1676 as a temporary in such cases. */
1677 static void
1678 frv_frame_access (frv_frame_accessor_t *accessor, rtx reg, int stack_offset)
1679 {
1680 enum machine_mode mode = GET_MODE (reg);
1681 rtx mem = frv_frame_mem (mode,
1682 accessor->base,
1683 stack_offset - accessor->base_offset);
1684
1685 if (accessor->op == FRV_LOAD)
1686 {
1687 if (SPR_P (REGNO (reg)))
1688 {
1689 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1690 emit_insn (gen_rtx_SET (VOIDmode, temp, mem));
1691 emit_insn (gen_rtx_SET (VOIDmode, reg, temp));
1692 }
1693 else
1694 {
1695 /* We cannot use reg+reg addressing for DImode access. */
1696 if (mode == DImode
1697 && GET_CODE (XEXP (mem, 0)) == PLUS
1698 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1699 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1700 {
1701 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1702 rtx insn = emit_move_insn (temp,
1703 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1704 XEXP (XEXP (mem, 0), 1)));
1705 mem = gen_rtx_MEM (DImode, temp);
1706 }
1707 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1708 }
1709 emit_use (reg);
1710 }
1711 else
1712 {
1713 if (SPR_P (REGNO (reg)))
1714 {
1715 rtx temp = gen_rtx_REG (mode, TEMP_REGNO);
1716 emit_insn (gen_rtx_SET (VOIDmode, temp, reg));
1717 frv_frame_insn (gen_rtx_SET (Pmode, mem, temp),
1718 frv_dwarf_store (reg, stack_offset));
1719 }
1720 else if (mode == DImode)
1721 {
1722 /* For DImode saves, the dwarf2 version needs to be a SEQUENCE
1723 with a separate save for each register. */
1724 rtx reg1 = gen_rtx_REG (SImode, REGNO (reg));
1725 rtx reg2 = gen_rtx_REG (SImode, REGNO (reg) + 1);
1726 rtx set1 = frv_dwarf_store (reg1, stack_offset);
1727 rtx set2 = frv_dwarf_store (reg2, stack_offset + 4);
1728
1729 /* Also we cannot use reg+reg addressing. */
1730 if (GET_CODE (XEXP (mem, 0)) == PLUS
1731 && GET_CODE (XEXP (XEXP (mem, 0), 0)) == REG
1732 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == REG)
1733 {
1734 rtx temp = gen_rtx_REG (SImode, TEMP_REGNO);
1735 rtx insn = emit_move_insn (temp,
1736 gen_rtx_PLUS (SImode, XEXP (XEXP (mem, 0), 0),
1737 XEXP (XEXP (mem, 0), 1)));
1738 mem = gen_rtx_MEM (DImode, temp);
1739 }
1740
1741 frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
1742 gen_rtx_PARALLEL (VOIDmode,
1743 gen_rtvec (2, set1, set2)));
1744 }
1745 else
1746 frv_frame_insn (gen_rtx_SET (Pmode, mem, reg),
1747 frv_dwarf_store (reg, stack_offset));
1748 }
1749 }
1750
1751 /* A function that uses frv_frame_access to transfer a group of registers to
1752 or from the stack. ACCESSOR is passed directly to frv_frame_access, INFO
1753 is the stack information generated by frv_stack_info, and REG_SET is the
1754 number of the register set to transfer. */
1755 static void
1756 frv_frame_access_multi (frv_frame_accessor_t *accessor,
1757 frv_stack_t *info,
1758 int reg_set)
1759 {
1760 frv_stack_regs_t *regs_info;
1761 int regno;
1762
1763 regs_info = &info->regs[reg_set];
1764 for (regno = regs_info->first; regno <= regs_info->last; regno++)
1765 if (info->save_p[regno])
1766 frv_frame_access (accessor,
1767 info->save_p[regno] == REG_SAVE_2WORDS
1768 ? gen_rtx_REG (DImode, regno)
1769 : gen_rtx_REG (SImode, regno),
1770 info->reg_offset[regno]);
1771 }
1772
1773 /* Save or restore callee-saved registers that are kept outside the frame
1774 header. The function saves the registers if OP is FRV_STORE and restores
1775 them if OP is FRV_LOAD. INFO is the stack information generated by
1776 frv_stack_info. */
1777 static void
1778 frv_frame_access_standard_regs (enum frv_stack_op op, frv_stack_t *info)
1779 {
1780 frv_frame_accessor_t accessor;
1781
1782 accessor.op = op;
1783 accessor.base = stack_pointer_rtx;
1784 accessor.base_offset = 0;
1785 frv_frame_access_multi (&accessor, info, STACK_REGS_GPR);
1786 frv_frame_access_multi (&accessor, info, STACK_REGS_FPR);
1787 frv_frame_access_multi (&accessor, info, STACK_REGS_LCR);
1788 }
1789
1790
1791 /* Called after register allocation to add any instructions needed for the
1792 prologue. Using a prologue insn is favored compared to putting all of the
1793 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1794 it allows the scheduler to intermix instructions with the saves of
1795 the caller saved registers. In some cases, it might be necessary
1796 to emit a barrier instruction as the last insn to prevent such
1797 scheduling.
1798
1799 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1800 so that the debug info generation code can handle them properly. */
1801 void
1802 frv_expand_prologue (void)
1803 {
1804 frv_stack_t *info = frv_stack_info ();
1805 rtx sp = stack_pointer_rtx;
1806 rtx fp = frame_pointer_rtx;
1807 frv_frame_accessor_t accessor;
1808
1809 if (TARGET_DEBUG_STACK)
1810 frv_debug_stack (info);
1811
1812 if (info->total_size == 0)
1813 return;
1814
1815 /* We're interested in three areas of the frame here:
1816
1817 A: the register save area
1818 B: the old FP
1819 C: the header after B
1820
1821 If the frame pointer isn't used, we'll have to set up A, B and C
1822 using the stack pointer. If the frame pointer is used, we'll access
1823 them as follows:
1824
1825 A: set up using sp
1826 B: set up using sp or a temporary (see below)
1827 C: set up using fp
1828
1829 We set up B using the stack pointer if the frame is small enough.
1830 Otherwise, it's more efficient to copy the old stack pointer into a
1831 temporary and use that.
1832
1833 Note that it's important to make sure the prologue and epilogue use the
1834 same registers to access A and C, since doing otherwise will confuse
1835 the aliasing code. */
1836
1837 /* Set up ACCESSOR for accessing region B above. If the frame pointer
1838 isn't used, the same method will serve for C. */
1839 accessor.op = FRV_STORE;
1840 if (frame_pointer_needed && info->total_size > 2048)
1841 {
1842 rtx insn;
1843
1844 accessor.base = gen_rtx_REG (Pmode, OLD_SP_REGNO);
1845 accessor.base_offset = info->total_size;
1846 insn = emit_insn (gen_movsi (accessor.base, sp));
1847 }
1848 else
1849 {
1850 accessor.base = stack_pointer_rtx;
1851 accessor.base_offset = 0;
1852 }
1853
1854 /* Allocate the stack space. */
1855 {
1856 rtx asm_offset = frv_frame_offset_rtx (-info->total_size);
1857 rtx dwarf_offset = GEN_INT (-info->total_size);
1858
1859 frv_frame_insn (gen_stack_adjust (sp, sp, asm_offset),
1860 gen_rtx_SET (Pmode,
1861 sp,
1862 gen_rtx_PLUS (Pmode, sp, dwarf_offset)));
1863 }
1864
1865 /* If the frame pointer is needed, store the old one at (sp + FP_OFFSET)
1866 and point the new one to that location. */
1867 if (frame_pointer_needed)
1868 {
1869 int fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1870
1871 /* ASM_SRC and DWARF_SRC both point to the frame header. ASM_SRC is
1872 based on ACCESSOR.BASE but DWARF_SRC is always based on the stack
1873 pointer. */
1874 rtx asm_src = plus_constant (accessor.base,
1875 fp_offset - accessor.base_offset);
1876 rtx dwarf_src = plus_constant (sp, fp_offset);
1877
1878 /* Store the old frame pointer at (sp + FP_OFFSET). */
1879 frv_frame_access (&accessor, fp, fp_offset);
1880
1881 /* Set up the new frame pointer. */
1882 frv_frame_insn (gen_rtx_SET (VOIDmode, fp, asm_src),
1883 gen_rtx_SET (VOIDmode, fp, dwarf_src));
1884
1885 /* Access region C from the frame pointer. */
1886 accessor.base = fp;
1887 accessor.base_offset = fp_offset;
1888 }
1889
1890 /* Set up region C. */
1891 frv_frame_access_multi (&accessor, info, STACK_REGS_STRUCT);
1892 frv_frame_access_multi (&accessor, info, STACK_REGS_LR);
1893 frv_frame_access_multi (&accessor, info, STACK_REGS_STDARG);
1894
1895 /* Set up region A. */
1896 frv_frame_access_standard_regs (FRV_STORE, info);
1897
1898 /* If this is a varargs/stdarg function, issue a blockage to prevent the
1899 scheduler from moving loads before the stores saving the registers. */
1900 if (info->stdarg_size > 0)
1901 emit_insn (gen_blockage ());
1902
1903 /* Set up pic register/small data register for this function. */
1904 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
1905 emit_insn (gen_pic_prologue (gen_rtx_REG (Pmode, PIC_REGNO),
1906 gen_rtx_REG (Pmode, LR_REGNO),
1907 gen_rtx_REG (SImode, OFFSET_REGNO)));
1908 }
1909
1910 \f
1911 /* Under frv, all of the work is done via frv_expand_epilogue, but
1912 this function provides a convenient place to do cleanup. */
1913
1914 static void
1915 frv_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
1916 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1917 {
1918 frv_stack_cache = (frv_stack_t *)0;
1919
1920 /* Zap last used registers for conditional execution. */
1921 memset (&frv_ifcvt.tmp_reg, 0, sizeof (frv_ifcvt.tmp_reg));
1922
1923 /* Release the bitmap of created insns. */
1924 BITMAP_FREE (frv_ifcvt.scratch_insns_bitmap);
1925 }
1926
1927 \f
1928 /* Called after register allocation to add any instructions needed for the
1929 epilogue. Using an epilogue insn is favored compared to putting all of the
1930 instructions in the TARGET_ASM_FUNCTION_PROLOGUE target hook, since
1931 it allows the scheduler to intermix instructions with the saves of
1932 the caller saved registers. In some cases, it might be necessary
1933 to emit a barrier instruction as the last insn to prevent such
1934 scheduling. */
1935
1936 void
1937 frv_expand_epilogue (bool emit_return)
1938 {
1939 frv_stack_t *info = frv_stack_info ();
1940 rtx fp = frame_pointer_rtx;
1941 rtx sp = stack_pointer_rtx;
1942 rtx return_addr;
1943 int fp_offset;
1944
1945 fp_offset = info->reg_offset[FRAME_POINTER_REGNUM];
1946
1947 /* Restore the stack pointer to its original value if alloca or the like
1948 is used. */
1949 if (! current_function_sp_is_unchanging)
1950 emit_insn (gen_addsi3 (sp, fp, frv_frame_offset_rtx (-fp_offset)));
1951
1952 /* Restore the callee-saved registers that were used in this function. */
1953 frv_frame_access_standard_regs (FRV_LOAD, info);
1954
1955 /* Set RETURN_ADDR to the address we should return to. Set it to NULL if
1956 no return instruction should be emitted. */
1957 if (info->save_p[LR_REGNO])
1958 {
1959 int lr_offset;
1960 rtx mem;
1961
1962 /* Use the same method to access the link register's slot as we did in
1963 the prologue. In other words, use the frame pointer if available,
1964 otherwise use the stack pointer.
1965
1966 LR_OFFSET is the offset of the link register's slot from the start
1967 of the frame and MEM is a memory rtx for it. */
1968 lr_offset = info->reg_offset[LR_REGNO];
1969 if (frame_pointer_needed)
1970 mem = frv_frame_mem (Pmode, fp, lr_offset - fp_offset);
1971 else
1972 mem = frv_frame_mem (Pmode, sp, lr_offset);
1973
1974 /* Load the old link register into a GPR. */
1975 return_addr = gen_rtx_REG (Pmode, TEMP_REGNO);
1976 emit_insn (gen_rtx_SET (VOIDmode, return_addr, mem));
1977 }
1978 else
1979 return_addr = gen_rtx_REG (Pmode, LR_REGNO);
1980
1981 /* Restore the old frame pointer. Emit a USE afterwards to make sure
1982 the load is preserved. */
1983 if (frame_pointer_needed)
1984 {
1985 emit_insn (gen_rtx_SET (VOIDmode, fp, gen_rtx_MEM (Pmode, fp)));
1986 emit_use (fp);
1987 }
1988
1989 /* Deallocate the stack frame. */
1990 if (info->total_size != 0)
1991 {
1992 rtx offset = frv_frame_offset_rtx (info->total_size);
1993 emit_insn (gen_stack_adjust (sp, sp, offset));
1994 }
1995
1996 /* If this function uses eh_return, add the final stack adjustment now. */
1997 if (crtl->calls_eh_return)
1998 emit_insn (gen_stack_adjust (sp, sp, EH_RETURN_STACKADJ_RTX));
1999
2000 if (emit_return)
2001 emit_jump_insn (gen_epilogue_return (return_addr));
2002 else
2003 {
2004 rtx lr = return_addr;
2005
2006 if (REGNO (return_addr) != LR_REGNO)
2007 {
2008 lr = gen_rtx_REG (Pmode, LR_REGNO);
2009 emit_move_insn (lr, return_addr);
2010 }
2011
2012 emit_use (lr);
2013 }
2014 }
2015
2016 \f
2017 /* Worker function for TARGET_ASM_OUTPUT_MI_THUNK. */
2018
2019 static void
2020 frv_asm_output_mi_thunk (FILE *file,
2021 tree thunk_fndecl ATTRIBUTE_UNUSED,
2022 HOST_WIDE_INT delta,
2023 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
2024 tree function)
2025 {
2026 const char *name_func = XSTR (XEXP (DECL_RTL (function), 0), 0);
2027 const char *name_arg0 = reg_names[FIRST_ARG_REGNUM];
2028 const char *name_jmp = reg_names[JUMP_REGNO];
2029 const char *parallel = (frv_issue_rate () > 1 ? ".p" : "");
2030
2031 /* Do the add using an addi if possible. */
2032 if (IN_RANGE_P (delta, -2048, 2047))
2033 fprintf (file, "\taddi %s,#%d,%s\n", name_arg0, (int) delta, name_arg0);
2034 else
2035 {
2036 const char *const name_add = reg_names[TEMP_REGNO];
2037 fprintf (file, "\tsethi%s #hi(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
2038 parallel, delta, name_add);
2039 fprintf (file, "\tsetlo #lo(" HOST_WIDE_INT_PRINT_DEC "),%s\n",
2040 delta, name_add);
2041 fprintf (file, "\tadd %s,%s,%s\n", name_add, name_arg0, name_arg0);
2042 }
2043
2044 if (TARGET_FDPIC)
2045 {
2046 const char *name_pic = reg_names[FDPIC_REGNO];
2047 name_jmp = reg_names[FDPIC_FPTR_REGNO];
2048
2049 if (flag_pic != 1)
2050 {
2051 fprintf (file, "\tsethi%s #gotofffuncdeschi(", parallel);
2052 assemble_name (file, name_func);
2053 fprintf (file, "),%s\n", name_jmp);
2054
2055 fprintf (file, "\tsetlo #gotofffuncdesclo(");
2056 assemble_name (file, name_func);
2057 fprintf (file, "),%s\n", name_jmp);
2058
2059 fprintf (file, "\tldd @(%s,%s), %s\n", name_jmp, name_pic, name_jmp);
2060 }
2061 else
2062 {
2063 fprintf (file, "\tlddo @(%s,#gotofffuncdesc12(", name_pic);
2064 assemble_name (file, name_func);
2065 fprintf (file, "\t)), %s\n", name_jmp);
2066 }
2067 }
2068 else if (!flag_pic)
2069 {
2070 fprintf (file, "\tsethi%s #hi(", parallel);
2071 assemble_name (file, name_func);
2072 fprintf (file, "),%s\n", name_jmp);
2073
2074 fprintf (file, "\tsetlo #lo(");
2075 assemble_name (file, name_func);
2076 fprintf (file, "),%s\n", name_jmp);
2077 }
2078 else
2079 {
2080 /* Use JUMP_REGNO as a temporary PIC register. */
2081 const char *name_lr = reg_names[LR_REGNO];
2082 const char *name_gppic = name_jmp;
2083 const char *name_tmp = reg_names[TEMP_REGNO];
2084
2085 fprintf (file, "\tmovsg %s,%s\n", name_lr, name_tmp);
2086 fprintf (file, "\tcall 1f\n");
2087 fprintf (file, "1:\tmovsg %s,%s\n", name_lr, name_gppic);
2088 fprintf (file, "\tmovgs %s,%s\n", name_tmp, name_lr);
2089 fprintf (file, "\tsethi%s #gprelhi(1b),%s\n", parallel, name_tmp);
2090 fprintf (file, "\tsetlo #gprello(1b),%s\n", name_tmp);
2091 fprintf (file, "\tsub %s,%s,%s\n", name_gppic, name_tmp, name_gppic);
2092
2093 fprintf (file, "\tsethi%s #gprelhi(", parallel);
2094 assemble_name (file, name_func);
2095 fprintf (file, "),%s\n", name_tmp);
2096
2097 fprintf (file, "\tsetlo #gprello(");
2098 assemble_name (file, name_func);
2099 fprintf (file, "),%s\n", name_tmp);
2100
2101 fprintf (file, "\tadd %s,%s,%s\n", name_gppic, name_tmp, name_jmp);
2102 }
2103
2104 /* Jump to the function address. */
2105 fprintf (file, "\tjmpl @(%s,%s)\n", name_jmp, reg_names[GPR_FIRST+0]);
2106 }
2107
2108 \f
2109 /* A C expression which is nonzero if a function must have and use a frame
2110 pointer. This expression is evaluated in the reload pass. If its value is
2111 nonzero the function will have a frame pointer.
2112
2113 The expression can in principle examine the current function and decide
2114 according to the facts, but on most machines the constant 0 or the constant
2115 1 suffices. Use 0 when the machine allows code to be generated with no
2116 frame pointer, and doing so saves some time or space. Use 1 when there is
2117 no possible advantage to avoiding a frame pointer.
2118
2119 In certain cases, the compiler does not know how to produce valid code
2120 without a frame pointer. The compiler recognizes those cases and
2121 automatically gives the function a frame pointer regardless of what
2122 `FRAME_POINTER_REQUIRED' says. You don't need to worry about them.
2123
2124 In a function that does not require a frame pointer, the frame pointer
2125 register can be allocated for ordinary usage, unless you mark it as a fixed
2126 register. See `FIXED_REGISTERS' for more information. */
2127
2128 /* On frv, create a frame whenever we need to create stack. */
2129
2130 int
2131 frv_frame_pointer_required (void)
2132 {
2133 /* If we forgoing the usual linkage requirements, we only need
2134 a frame pointer if the stack pointer might change. */
2135 if (!TARGET_LINKED_FP)
2136 return !current_function_sp_is_unchanging;
2137
2138 if (! current_function_is_leaf)
2139 return TRUE;
2140
2141 if (get_frame_size () != 0)
2142 return TRUE;
2143
2144 if (cfun->stdarg)
2145 return TRUE;
2146
2147 if (!current_function_sp_is_unchanging)
2148 return TRUE;
2149
2150 if (!TARGET_FDPIC && flag_pic && crtl->uses_pic_offset_table)
2151 return TRUE;
2152
2153 if (profile_flag)
2154 return TRUE;
2155
2156 if (cfun->machine->frame_needed)
2157 return TRUE;
2158
2159 return FALSE;
2160 }
2161
2162 \f
2163 /* This macro is similar to `INITIAL_FRAME_POINTER_OFFSET'. It specifies the
2164 initial difference between the specified pair of registers. This macro must
2165 be defined if `ELIMINABLE_REGS' is defined. */
2166
2167 /* See frv_stack_info for more details on the frv stack frame. */
2168
2169 int
2170 frv_initial_elimination_offset (int from, int to)
2171 {
2172 frv_stack_t *info = frv_stack_info ();
2173 int ret = 0;
2174
2175 if (to == STACK_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2176 ret = info->total_size - info->pretend_size;
2177
2178 else if (to == STACK_POINTER_REGNUM && from == FRAME_POINTER_REGNUM)
2179 ret = info->reg_offset[FRAME_POINTER_REGNUM];
2180
2181 else if (to == FRAME_POINTER_REGNUM && from == ARG_POINTER_REGNUM)
2182 ret = (info->total_size
2183 - info->reg_offset[FRAME_POINTER_REGNUM]
2184 - info->pretend_size);
2185
2186 else
2187 gcc_unreachable ();
2188
2189 if (TARGET_DEBUG_STACK)
2190 fprintf (stderr, "Eliminate %s to %s by adding %d\n",
2191 reg_names [from], reg_names[to], ret);
2192
2193 return ret;
2194 }
2195
2196 \f
2197 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2198
2199 static void
2200 frv_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
2201 enum machine_mode mode,
2202 tree type ATTRIBUTE_UNUSED,
2203 int *pretend_size,
2204 int second_time)
2205 {
2206 if (TARGET_DEBUG_ARG)
2207 fprintf (stderr,
2208 "setup_vararg: words = %2d, mode = %4s, pretend_size = %d, second_time = %d\n",
2209 *cum, GET_MODE_NAME (mode), *pretend_size, second_time);
2210 }
2211
2212 \f
2213 /* Worker function for TARGET_EXPAND_BUILTIN_SAVEREGS. */
2214
2215 static rtx
2216 frv_expand_builtin_saveregs (void)
2217 {
2218 int offset = UNITS_PER_WORD * FRV_NUM_ARG_REGS;
2219
2220 if (TARGET_DEBUG_ARG)
2221 fprintf (stderr, "expand_builtin_saveregs: offset from ap = %d\n",
2222 offset);
2223
2224 return gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx, GEN_INT (- offset));
2225 }
2226
2227 \f
2228 /* Expand __builtin_va_start to do the va_start macro. */
2229
2230 static void
2231 frv_expand_builtin_va_start (tree valist, rtx nextarg)
2232 {
2233 tree t;
2234 int num = crtl->args.info - FIRST_ARG_REGNUM - FRV_NUM_ARG_REGS;
2235
2236 nextarg = gen_rtx_PLUS (Pmode, virtual_incoming_args_rtx,
2237 GEN_INT (UNITS_PER_WORD * num));
2238
2239 if (TARGET_DEBUG_ARG)
2240 {
2241 fprintf (stderr, "va_start: args_info = %d, num = %d\n",
2242 crtl->args.info, num);
2243
2244 debug_rtx (nextarg);
2245 }
2246
2247 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
2248 fold_convert (TREE_TYPE (valist),
2249 make_tree (sizetype, nextarg)));
2250 TREE_SIDE_EFFECTS (t) = 1;
2251
2252 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2253 }
2254
2255 \f
2256 /* Expand a block move operation, and return 1 if successful. Return 0
2257 if we should let the compiler generate normal code.
2258
2259 operands[0] is the destination
2260 operands[1] is the source
2261 operands[2] is the length
2262 operands[3] is the alignment */
2263
2264 /* Maximum number of loads to do before doing the stores */
2265 #ifndef MAX_MOVE_REG
2266 #define MAX_MOVE_REG 4
2267 #endif
2268
2269 /* Maximum number of total loads to do. */
2270 #ifndef TOTAL_MOVE_REG
2271 #define TOTAL_MOVE_REG 8
2272 #endif
2273
2274 int
2275 frv_expand_block_move (rtx operands[])
2276 {
2277 rtx orig_dest = operands[0];
2278 rtx orig_src = operands[1];
2279 rtx bytes_rtx = operands[2];
2280 rtx align_rtx = operands[3];
2281 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2282 int align;
2283 int bytes;
2284 int offset;
2285 int num_reg;
2286 int i;
2287 rtx src_reg;
2288 rtx dest_reg;
2289 rtx src_addr;
2290 rtx dest_addr;
2291 rtx src_mem;
2292 rtx dest_mem;
2293 rtx tmp_reg;
2294 rtx stores[MAX_MOVE_REG];
2295 int move_bytes;
2296 enum machine_mode mode;
2297
2298 /* If this is not a fixed size move, just call memcpy. */
2299 if (! constp)
2300 return FALSE;
2301
2302 /* This should be a fixed size alignment. */
2303 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2304
2305 align = INTVAL (align_rtx);
2306
2307 /* Anything to move? */
2308 bytes = INTVAL (bytes_rtx);
2309 if (bytes <= 0)
2310 return TRUE;
2311
2312 /* Don't support real large moves. */
2313 if (bytes > TOTAL_MOVE_REG*align)
2314 return FALSE;
2315
2316 /* Move the address into scratch registers. */
2317 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2318 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2319
2320 num_reg = offset = 0;
2321 for ( ; bytes > 0; (bytes -= move_bytes), (offset += move_bytes))
2322 {
2323 /* Calculate the correct offset for src/dest. */
2324 if (offset == 0)
2325 {
2326 src_addr = src_reg;
2327 dest_addr = dest_reg;
2328 }
2329 else
2330 {
2331 src_addr = plus_constant (src_reg, offset);
2332 dest_addr = plus_constant (dest_reg, offset);
2333 }
2334
2335 /* Generate the appropriate load and store, saving the stores
2336 for later. */
2337 if (bytes >= 4 && align >= 4)
2338 mode = SImode;
2339 else if (bytes >= 2 && align >= 2)
2340 mode = HImode;
2341 else
2342 mode = QImode;
2343
2344 move_bytes = GET_MODE_SIZE (mode);
2345 tmp_reg = gen_reg_rtx (mode);
2346 src_mem = change_address (orig_src, mode, src_addr);
2347 dest_mem = change_address (orig_dest, mode, dest_addr);
2348 emit_insn (gen_rtx_SET (VOIDmode, tmp_reg, src_mem));
2349 stores[num_reg++] = gen_rtx_SET (VOIDmode, dest_mem, tmp_reg);
2350
2351 if (num_reg >= MAX_MOVE_REG)
2352 {
2353 for (i = 0; i < num_reg; i++)
2354 emit_insn (stores[i]);
2355 num_reg = 0;
2356 }
2357 }
2358
2359 for (i = 0; i < num_reg; i++)
2360 emit_insn (stores[i]);
2361
2362 return TRUE;
2363 }
2364
2365 \f
2366 /* Expand a block clear operation, and return 1 if successful. Return 0
2367 if we should let the compiler generate normal code.
2368
2369 operands[0] is the destination
2370 operands[1] is the length
2371 operands[3] is the alignment */
2372
2373 int
2374 frv_expand_block_clear (rtx operands[])
2375 {
2376 rtx orig_dest = operands[0];
2377 rtx bytes_rtx = operands[1];
2378 rtx align_rtx = operands[3];
2379 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
2380 int align;
2381 int bytes;
2382 int offset;
2383 int num_reg;
2384 rtx dest_reg;
2385 rtx dest_addr;
2386 rtx dest_mem;
2387 int clear_bytes;
2388 enum machine_mode mode;
2389
2390 /* If this is not a fixed size move, just call memcpy. */
2391 if (! constp)
2392 return FALSE;
2393
2394 /* This should be a fixed size alignment. */
2395 gcc_assert (GET_CODE (align_rtx) == CONST_INT);
2396
2397 align = INTVAL (align_rtx);
2398
2399 /* Anything to move? */
2400 bytes = INTVAL (bytes_rtx);
2401 if (bytes <= 0)
2402 return TRUE;
2403
2404 /* Don't support real large clears. */
2405 if (bytes > TOTAL_MOVE_REG*align)
2406 return FALSE;
2407
2408 /* Move the address into a scratch register. */
2409 dest_reg = copy_addr_to_reg (XEXP (orig_dest, 0));
2410
2411 num_reg = offset = 0;
2412 for ( ; bytes > 0; (bytes -= clear_bytes), (offset += clear_bytes))
2413 {
2414 /* Calculate the correct offset for src/dest. */
2415 dest_addr = ((offset == 0)
2416 ? dest_reg
2417 : plus_constant (dest_reg, offset));
2418
2419 /* Generate the appropriate store of gr0. */
2420 if (bytes >= 4 && align >= 4)
2421 mode = SImode;
2422 else if (bytes >= 2 && align >= 2)
2423 mode = HImode;
2424 else
2425 mode = QImode;
2426
2427 clear_bytes = GET_MODE_SIZE (mode);
2428 dest_mem = change_address (orig_dest, mode, dest_addr);
2429 emit_insn (gen_rtx_SET (VOIDmode, dest_mem, const0_rtx));
2430 }
2431
2432 return TRUE;
2433 }
2434
2435 \f
2436 /* The following variable is used to output modifiers of assembler
2437 code of the current output insn. */
2438
2439 static rtx *frv_insn_operands;
2440
2441 /* The following function is used to add assembler insn code suffix .p
2442 if it is necessary. */
2443
2444 const char *
2445 frv_asm_output_opcode (FILE *f, const char *ptr)
2446 {
2447 int c;
2448
2449 if (frv_insn_packing_flag <= 0)
2450 return ptr;
2451
2452 for (; *ptr && *ptr != ' ' && *ptr != '\t';)
2453 {
2454 c = *ptr++;
2455 if (c == '%' && ((*ptr >= 'a' && *ptr <= 'z')
2456 || (*ptr >= 'A' && *ptr <= 'Z')))
2457 {
2458 int letter = *ptr++;
2459
2460 c = atoi (ptr);
2461 frv_print_operand (f, frv_insn_operands [c], letter);
2462 while ((c = *ptr) >= '0' && c <= '9')
2463 ptr++;
2464 }
2465 else
2466 fputc (c, f);
2467 }
2468
2469 fprintf (f, ".p");
2470
2471 return ptr;
2472 }
2473
2474 /* Set up the packing bit for the current output insn. Note that this
2475 function is not called for asm insns. */
2476
2477 void
2478 frv_final_prescan_insn (rtx insn, rtx *opvec,
2479 int noperands ATTRIBUTE_UNUSED)
2480 {
2481 if (INSN_P (insn))
2482 {
2483 if (frv_insn_packing_flag >= 0)
2484 {
2485 frv_insn_operands = opvec;
2486 frv_insn_packing_flag = PACKING_FLAG_P (insn);
2487 }
2488 else if (recog_memoized (insn) >= 0
2489 && get_attr_acc_group (insn) == ACC_GROUP_ODD)
2490 /* Packing optimizations have been disabled, but INSN can only
2491 be issued in M1. Insert an mnop in M0. */
2492 fprintf (asm_out_file, "\tmnop.p\n");
2493 }
2494 }
2495
2496
2497 \f
2498 /* A C expression whose value is RTL representing the address in a stack frame
2499 where the pointer to the caller's frame is stored. Assume that FRAMEADDR is
2500 an RTL expression for the address of the stack frame itself.
2501
2502 If you don't define this macro, the default is to return the value of
2503 FRAMEADDR--that is, the stack frame address is also the address of the stack
2504 word that points to the previous frame. */
2505
2506 /* The default is correct, but we need to make sure the frame gets created. */
2507 rtx
2508 frv_dynamic_chain_address (rtx frame)
2509 {
2510 cfun->machine->frame_needed = 1;
2511 return frame;
2512 }
2513
2514
2515 /* A C expression whose value is RTL representing the value of the return
2516 address for the frame COUNT steps up from the current frame, after the
2517 prologue. FRAMEADDR is the frame pointer of the COUNT frame, or the frame
2518 pointer of the COUNT - 1 frame if `RETURN_ADDR_IN_PREVIOUS_FRAME' is
2519 defined.
2520
2521 The value of the expression must always be the correct address when COUNT is
2522 zero, but may be `NULL_RTX' if there is not way to determine the return
2523 address of other frames. */
2524
2525 rtx
2526 frv_return_addr_rtx (int count, rtx frame)
2527 {
2528 if (count != 0)
2529 return const0_rtx;
2530 cfun->machine->frame_needed = 1;
2531 return gen_rtx_MEM (Pmode, plus_constant (frame, 8));
2532 }
2533
2534 /* Given a memory reference MEMREF, interpret the referenced memory as
2535 an array of MODE values, and return a reference to the element
2536 specified by INDEX. Assume that any pre-modification implicit in
2537 MEMREF has already happened.
2538
2539 MEMREF must be a legitimate operand for modes larger than SImode.
2540 GO_IF_LEGITIMATE_ADDRESS forbids register+register addresses, which
2541 this function cannot handle. */
2542 rtx
2543 frv_index_memory (rtx memref, enum machine_mode mode, int index)
2544 {
2545 rtx base = XEXP (memref, 0);
2546 if (GET_CODE (base) == PRE_MODIFY)
2547 base = XEXP (base, 0);
2548 return change_address (memref, mode,
2549 plus_constant (base, index * GET_MODE_SIZE (mode)));
2550 }
2551
2552 \f
2553 /* Print a memory address as an operand to reference that memory location. */
2554 void
2555 frv_print_operand_address (FILE * stream, rtx x)
2556 {
2557 if (GET_CODE (x) == MEM)
2558 x = XEXP (x, 0);
2559
2560 switch (GET_CODE (x))
2561 {
2562 case REG:
2563 fputs (reg_names [ REGNO (x)], stream);
2564 return;
2565
2566 case CONST_INT:
2567 fprintf (stream, "%ld", (long) INTVAL (x));
2568 return;
2569
2570 case SYMBOL_REF:
2571 assemble_name (stream, XSTR (x, 0));
2572 return;
2573
2574 case LABEL_REF:
2575 case CONST:
2576 output_addr_const (stream, x);
2577 return;
2578
2579 case PLUS:
2580 /* Poorly constructed asm statements can trigger this alternative.
2581 See gcc/testsuite/gcc.dg/asm-4.c for an example. */
2582 frv_print_operand_memory_reference (stream, x, 0);
2583 return;
2584
2585 default:
2586 break;
2587 }
2588
2589 fatal_insn ("bad insn to frv_print_operand_address:", x);
2590 }
2591
2592 \f
2593 static void
2594 frv_print_operand_memory_reference_reg (FILE * stream, rtx x)
2595 {
2596 int regno = true_regnum (x);
2597 if (GPR_P (regno))
2598 fputs (reg_names[regno], stream);
2599 else
2600 fatal_insn ("bad register to frv_print_operand_memory_reference_reg:", x);
2601 }
2602
2603 /* Print a memory reference suitable for the ld/st instructions. */
2604
2605 static void
2606 frv_print_operand_memory_reference (FILE * stream, rtx x, int addr_offset)
2607 {
2608 struct frv_unspec unspec;
2609 rtx x0 = NULL_RTX;
2610 rtx x1 = NULL_RTX;
2611
2612 switch (GET_CODE (x))
2613 {
2614 case SUBREG:
2615 case REG:
2616 x0 = x;
2617 break;
2618
2619 case PRE_MODIFY: /* (pre_modify (reg) (plus (reg) (reg))) */
2620 x0 = XEXP (x, 0);
2621 x1 = XEXP (XEXP (x, 1), 1);
2622 break;
2623
2624 case CONST_INT:
2625 x1 = x;
2626 break;
2627
2628 case PLUS:
2629 x0 = XEXP (x, 0);
2630 x1 = XEXP (x, 1);
2631 if (GET_CODE (x0) == CONST_INT)
2632 {
2633 x0 = XEXP (x, 1);
2634 x1 = XEXP (x, 0);
2635 }
2636 break;
2637
2638 default:
2639 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2640 break;
2641
2642 }
2643
2644 if (addr_offset)
2645 {
2646 if (!x1)
2647 x1 = const0_rtx;
2648 else if (GET_CODE (x1) != CONST_INT)
2649 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2650 }
2651
2652 fputs ("@(", stream);
2653 if (!x0)
2654 fputs (reg_names[GPR_R0], stream);
2655 else if (GET_CODE (x0) == REG || GET_CODE (x0) == SUBREG)
2656 frv_print_operand_memory_reference_reg (stream, x0);
2657 else
2658 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2659
2660 fputs (",", stream);
2661 if (!x1)
2662 fputs (reg_names [GPR_R0], stream);
2663
2664 else
2665 {
2666 switch (GET_CODE (x1))
2667 {
2668 case SUBREG:
2669 case REG:
2670 frv_print_operand_memory_reference_reg (stream, x1);
2671 break;
2672
2673 case CONST_INT:
2674 fprintf (stream, "%ld", (long) (INTVAL (x1) + addr_offset));
2675 break;
2676
2677 case CONST:
2678 if (!frv_const_unspec_p (x1, &unspec))
2679 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x1);
2680 frv_output_const_unspec (stream, &unspec);
2681 break;
2682
2683 default:
2684 fatal_insn ("bad insn to frv_print_operand_memory_reference:", x);
2685 }
2686 }
2687
2688 fputs (")", stream);
2689 }
2690
2691 \f
2692 /* Return 2 for likely branches and 0 for non-likely branches */
2693
2694 #define FRV_JUMP_LIKELY 2
2695 #define FRV_JUMP_NOT_LIKELY 0
2696
2697 static int
2698 frv_print_operand_jump_hint (rtx insn)
2699 {
2700 rtx note;
2701 rtx labelref;
2702 int ret;
2703 HOST_WIDE_INT prob = -1;
2704 enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
2705
2706 gcc_assert (GET_CODE (insn) == JUMP_INSN);
2707
2708 /* Assume any non-conditional jump is likely. */
2709 if (! any_condjump_p (insn))
2710 ret = FRV_JUMP_LIKELY;
2711
2712 else
2713 {
2714 labelref = condjump_label (insn);
2715 if (labelref)
2716 {
2717 rtx label = XEXP (labelref, 0);
2718 jump_type = (insn_current_address > INSN_ADDRESSES (INSN_UID (label))
2719 ? BACKWARD
2720 : FORWARD);
2721 }
2722
2723 note = find_reg_note (insn, REG_BR_PROB, 0);
2724 if (!note)
2725 ret = ((jump_type == BACKWARD) ? FRV_JUMP_LIKELY : FRV_JUMP_NOT_LIKELY);
2726
2727 else
2728 {
2729 prob = INTVAL (XEXP (note, 0));
2730 ret = ((prob >= (REG_BR_PROB_BASE / 2))
2731 ? FRV_JUMP_LIKELY
2732 : FRV_JUMP_NOT_LIKELY);
2733 }
2734 }
2735
2736 #if 0
2737 if (TARGET_DEBUG)
2738 {
2739 char *direction;
2740
2741 switch (jump_type)
2742 {
2743 default:
2744 case UNKNOWN: direction = "unknown jump direction"; break;
2745 case BACKWARD: direction = "jump backward"; break;
2746 case FORWARD: direction = "jump forward"; break;
2747 }
2748
2749 fprintf (stderr,
2750 "%s: uid %ld, %s, probability = %ld, max prob. = %ld, hint = %d\n",
2751 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
2752 (long)INSN_UID (insn), direction, (long)prob,
2753 (long)REG_BR_PROB_BASE, ret);
2754 }
2755 #endif
2756
2757 return ret;
2758 }
2759
2760 \f
2761 /* Return the comparison operator to use for CODE given that the ICC
2762 register is OP0. */
2763
2764 static const char *
2765 comparison_string (enum rtx_code code, rtx op0)
2766 {
2767 bool is_nz_p = GET_MODE (op0) == CC_NZmode;
2768 switch (code)
2769 {
2770 default: output_operand_lossage ("bad condition code");
2771 case EQ: return "eq";
2772 case NE: return "ne";
2773 case LT: return is_nz_p ? "n" : "lt";
2774 case LE: return "le";
2775 case GT: return "gt";
2776 case GE: return is_nz_p ? "p" : "ge";
2777 case LTU: return is_nz_p ? "no" : "c";
2778 case LEU: return is_nz_p ? "eq" : "ls";
2779 case GTU: return is_nz_p ? "ne" : "hi";
2780 case GEU: return is_nz_p ? "ra" : "nc";
2781 }
2782 }
2783
2784 /* Print an operand to an assembler instruction.
2785
2786 `%' followed by a letter and a digit says to output an operand in an
2787 alternate fashion. Four letters have standard, built-in meanings described
2788 below. The machine description macro `PRINT_OPERAND' can define additional
2789 letters with nonstandard meanings.
2790
2791 `%cDIGIT' can be used to substitute an operand that is a constant value
2792 without the syntax that normally indicates an immediate operand.
2793
2794 `%nDIGIT' is like `%cDIGIT' except that the value of the constant is negated
2795 before printing.
2796
2797 `%aDIGIT' can be used to substitute an operand as if it were a memory
2798 reference, with the actual operand treated as the address. This may be
2799 useful when outputting a "load address" instruction, because often the
2800 assembler syntax for such an instruction requires you to write the operand
2801 as if it were a memory reference.
2802
2803 `%lDIGIT' is used to substitute a `label_ref' into a jump instruction.
2804
2805 `%=' outputs a number which is unique to each instruction in the entire
2806 compilation. This is useful for making local labels to be referred to more
2807 than once in a single template that generates multiple assembler
2808 instructions.
2809
2810 `%' followed by a punctuation character specifies a substitution that does
2811 not use an operand. Only one case is standard: `%%' outputs a `%' into the
2812 assembler code. Other nonstandard cases can be defined in the
2813 `PRINT_OPERAND' macro. You must also define which punctuation characters
2814 are valid with the `PRINT_OPERAND_PUNCT_VALID_P' macro. */
2815
2816 void
2817 frv_print_operand (FILE * file, rtx x, int code)
2818 {
2819 struct frv_unspec unspec;
2820 HOST_WIDE_INT value;
2821 int offset;
2822
2823 if (code != 0 && !ISALPHA (code))
2824 value = 0;
2825
2826 else if (GET_CODE (x) == CONST_INT)
2827 value = INTVAL (x);
2828
2829 else if (GET_CODE (x) == CONST_DOUBLE)
2830 {
2831 if (GET_MODE (x) == SFmode)
2832 {
2833 REAL_VALUE_TYPE rv;
2834 long l;
2835
2836 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
2837 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
2838 value = l;
2839 }
2840
2841 else if (GET_MODE (x) == VOIDmode)
2842 value = CONST_DOUBLE_LOW (x);
2843
2844 else
2845 fatal_insn ("bad insn in frv_print_operand, bad const_double", x);
2846 }
2847
2848 else
2849 value = 0;
2850
2851 switch (code)
2852 {
2853
2854 case '.':
2855 /* Output r0. */
2856 fputs (reg_names[GPR_R0], file);
2857 break;
2858
2859 case '#':
2860 fprintf (file, "%d", frv_print_operand_jump_hint (current_output_insn));
2861 break;
2862
2863 case '@':
2864 /* Output small data area base register (gr16). */
2865 fputs (reg_names[SDA_BASE_REG], file);
2866 break;
2867
2868 case '~':
2869 /* Output pic register (gr17). */
2870 fputs (reg_names[PIC_REGNO], file);
2871 break;
2872
2873 case '*':
2874 /* Output the temporary integer CCR register. */
2875 fputs (reg_names[ICR_TEMP], file);
2876 break;
2877
2878 case '&':
2879 /* Output the temporary integer CC register. */
2880 fputs (reg_names[ICC_TEMP], file);
2881 break;
2882
2883 /* case 'a': print an address. */
2884
2885 case 'C':
2886 /* Print appropriate test for integer branch false operation. */
2887 fputs (comparison_string (reverse_condition (GET_CODE (x)),
2888 XEXP (x, 0)), file);
2889 break;
2890
2891 case 'c':
2892 /* Print appropriate test for integer branch true operation. */
2893 fputs (comparison_string (GET_CODE (x), XEXP (x, 0)), file);
2894 break;
2895
2896 case 'e':
2897 /* Print 1 for a NE and 0 for an EQ to give the final argument
2898 for a conditional instruction. */
2899 if (GET_CODE (x) == NE)
2900 fputs ("1", file);
2901
2902 else if (GET_CODE (x) == EQ)
2903 fputs ("0", file);
2904
2905 else
2906 fatal_insn ("bad insn to frv_print_operand, 'e' modifier:", x);
2907 break;
2908
2909 case 'F':
2910 /* Print appropriate test for floating point branch false operation. */
2911 switch (GET_CODE (x))
2912 {
2913 default:
2914 fatal_insn ("bad insn to frv_print_operand, 'F' modifier:", x);
2915
2916 case EQ: fputs ("ne", file); break;
2917 case NE: fputs ("eq", file); break;
2918 case LT: fputs ("uge", file); break;
2919 case LE: fputs ("ug", file); break;
2920 case GT: fputs ("ule", file); break;
2921 case GE: fputs ("ul", file); break;
2922 }
2923 break;
2924
2925 case 'f':
2926 /* Print appropriate test for floating point branch true operation. */
2927 switch (GET_CODE (x))
2928 {
2929 default:
2930 fatal_insn ("bad insn to frv_print_operand, 'f' modifier:", x);
2931
2932 case EQ: fputs ("eq", file); break;
2933 case NE: fputs ("ne", file); break;
2934 case LT: fputs ("lt", file); break;
2935 case LE: fputs ("le", file); break;
2936 case GT: fputs ("gt", file); break;
2937 case GE: fputs ("ge", file); break;
2938 }
2939 break;
2940
2941 case 'g':
2942 /* Print appropriate GOT function. */
2943 if (GET_CODE (x) != CONST_INT)
2944 fatal_insn ("bad insn to frv_print_operand, 'g' modifier:", x);
2945 fputs (unspec_got_name (INTVAL (x)), file);
2946 break;
2947
2948 case 'I':
2949 /* Print 'i' if the operand is a constant, or is a memory reference that
2950 adds a constant. */
2951 if (GET_CODE (x) == MEM)
2952 x = ((GET_CODE (XEXP (x, 0)) == PLUS)
2953 ? XEXP (XEXP (x, 0), 1)
2954 : XEXP (x, 0));
2955 else if (GET_CODE (x) == PLUS)
2956 x = XEXP (x, 1);
2957
2958 switch (GET_CODE (x))
2959 {
2960 default:
2961 break;
2962
2963 case CONST_INT:
2964 case SYMBOL_REF:
2965 case CONST:
2966 fputs ("i", file);
2967 break;
2968 }
2969 break;
2970
2971 case 'i':
2972 /* For jump instructions, print 'i' if the operand is a constant or
2973 is an expression that adds a constant. */
2974 if (GET_CODE (x) == CONST_INT)
2975 fputs ("i", file);
2976
2977 else
2978 {
2979 if (GET_CODE (x) == CONST_INT
2980 || (GET_CODE (x) == PLUS
2981 && (GET_CODE (XEXP (x, 1)) == CONST_INT
2982 || GET_CODE (XEXP (x, 0)) == CONST_INT)))
2983 fputs ("i", file);
2984 }
2985 break;
2986
2987 case 'L':
2988 /* Print the lower register of a double word register pair */
2989 if (GET_CODE (x) == REG)
2990 fputs (reg_names[ REGNO (x)+1 ], file);
2991 else
2992 fatal_insn ("bad insn to frv_print_operand, 'L' modifier:", x);
2993 break;
2994
2995 /* case 'l': print a LABEL_REF. */
2996
2997 case 'M':
2998 case 'N':
2999 /* Print a memory reference for ld/st/jmp, %N prints a memory reference
3000 for the second word of double memory operations. */
3001 offset = (code == 'M') ? 0 : UNITS_PER_WORD;
3002 switch (GET_CODE (x))
3003 {
3004 default:
3005 fatal_insn ("bad insn to frv_print_operand, 'M/N' modifier:", x);
3006
3007 case MEM:
3008 frv_print_operand_memory_reference (file, XEXP (x, 0), offset);
3009 break;
3010
3011 case REG:
3012 case SUBREG:
3013 case CONST_INT:
3014 case PLUS:
3015 case SYMBOL_REF:
3016 frv_print_operand_memory_reference (file, x, offset);
3017 break;
3018 }
3019 break;
3020
3021 case 'O':
3022 /* Print the opcode of a command. */
3023 switch (GET_CODE (x))
3024 {
3025 default:
3026 fatal_insn ("bad insn to frv_print_operand, 'O' modifier:", x);
3027
3028 case PLUS: fputs ("add", file); break;
3029 case MINUS: fputs ("sub", file); break;
3030 case AND: fputs ("and", file); break;
3031 case IOR: fputs ("or", file); break;
3032 case XOR: fputs ("xor", file); break;
3033 case ASHIFT: fputs ("sll", file); break;
3034 case ASHIFTRT: fputs ("sra", file); break;
3035 case LSHIFTRT: fputs ("srl", file); break;
3036 }
3037 break;
3038
3039 /* case 'n': negate and print a constant int. */
3040
3041 case 'P':
3042 /* Print PIC label using operand as the number. */
3043 if (GET_CODE (x) != CONST_INT)
3044 fatal_insn ("bad insn to frv_print_operand, P modifier:", x);
3045
3046 fprintf (file, ".LCF%ld", (long)INTVAL (x));
3047 break;
3048
3049 case 'U':
3050 /* Print 'u' if the operand is a update load/store. */
3051 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == PRE_MODIFY)
3052 fputs ("u", file);
3053 break;
3054
3055 case 'z':
3056 /* If value is 0, print gr0, otherwise it must be a register. */
3057 if (GET_CODE (x) == CONST_INT && INTVAL (x) == 0)
3058 fputs (reg_names[GPR_R0], file);
3059
3060 else if (GET_CODE (x) == REG)
3061 fputs (reg_names [REGNO (x)], file);
3062
3063 else
3064 fatal_insn ("bad insn in frv_print_operand, z case", x);
3065 break;
3066
3067 case 'x':
3068 /* Print constant in hex. */
3069 if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
3070 {
3071 fprintf (file, "%s0x%.4lx", IMMEDIATE_PREFIX, (long) value);
3072 break;
3073 }
3074
3075 /* Fall through. */
3076
3077 case '\0':
3078 if (GET_CODE (x) == REG)
3079 fputs (reg_names [REGNO (x)], file);
3080
3081 else if (GET_CODE (x) == CONST_INT
3082 || GET_CODE (x) == CONST_DOUBLE)
3083 fprintf (file, "%s%ld", IMMEDIATE_PREFIX, (long) value);
3084
3085 else if (frv_const_unspec_p (x, &unspec))
3086 frv_output_const_unspec (file, &unspec);
3087
3088 else if (GET_CODE (x) == MEM)
3089 frv_print_operand_address (file, XEXP (x, 0));
3090
3091 else if (CONSTANT_ADDRESS_P (x))
3092 frv_print_operand_address (file, x);
3093
3094 else
3095 fatal_insn ("bad insn in frv_print_operand, 0 case", x);
3096
3097 break;
3098
3099 default:
3100 fatal_insn ("frv_print_operand: unknown code", x);
3101 break;
3102 }
3103
3104 return;
3105 }
3106
3107 \f
3108 /* A C statement (sans semicolon) for initializing the variable CUM for the
3109 state at the beginning of the argument list. The variable has type
3110 `CUMULATIVE_ARGS'. The value of FNTYPE is the tree node for the data type
3111 of the function which will receive the args, or 0 if the args are to a
3112 compiler support library function. The value of INDIRECT is nonzero when
3113 processing an indirect call, for example a call through a function pointer.
3114 The value of INDIRECT is zero for a call to an explicitly named function, a
3115 library function call, or when `INIT_CUMULATIVE_ARGS' is used to find
3116 arguments for the function being compiled.
3117
3118 When processing a call to a compiler support library function, LIBNAME
3119 identifies which one. It is a `symbol_ref' rtx which contains the name of
3120 the function, as a string. LIBNAME is 0 when an ordinary C function call is
3121 being processed. Thus, each time this macro is called, either LIBNAME or
3122 FNTYPE is nonzero, but never both of them at once. */
3123
3124 void
3125 frv_init_cumulative_args (CUMULATIVE_ARGS *cum,
3126 tree fntype,
3127 rtx libname,
3128 tree fndecl,
3129 int incoming)
3130 {
3131 *cum = FIRST_ARG_REGNUM;
3132
3133 if (TARGET_DEBUG_ARG)
3134 {
3135 fprintf (stderr, "\ninit_cumulative_args:");
3136 if (!fndecl && fntype)
3137 fputs (" indirect", stderr);
3138
3139 if (incoming)
3140 fputs (" incoming", stderr);
3141
3142 if (fntype)
3143 {
3144 tree ret_type = TREE_TYPE (fntype);
3145 fprintf (stderr, " return=%s,",
3146 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3147 }
3148
3149 if (libname && GET_CODE (libname) == SYMBOL_REF)
3150 fprintf (stderr, " libname=%s", XSTR (libname, 0));
3151
3152 if (cfun->returns_struct)
3153 fprintf (stderr, " return-struct");
3154
3155 putc ('\n', stderr);
3156 }
3157 }
3158
3159 \f
3160 /* Return true if we should pass an argument on the stack rather than
3161 in registers. */
3162
3163 static bool
3164 frv_must_pass_in_stack (enum machine_mode mode, const_tree type)
3165 {
3166 if (mode == BLKmode)
3167 return true;
3168 if (type == NULL)
3169 return false;
3170 return AGGREGATE_TYPE_P (type);
3171 }
3172
3173 /* If defined, a C expression that gives the alignment boundary, in bits, of an
3174 argument with the specified mode and type. If it is not defined,
3175 `PARM_BOUNDARY' is used for all arguments. */
3176
3177 int
3178 frv_function_arg_boundary (enum machine_mode mode ATTRIBUTE_UNUSED,
3179 tree type ATTRIBUTE_UNUSED)
3180 {
3181 return BITS_PER_WORD;
3182 }
3183
3184 rtx
3185 frv_function_arg (CUMULATIVE_ARGS *cum,
3186 enum machine_mode mode,
3187 tree type ATTRIBUTE_UNUSED,
3188 int named,
3189 int incoming ATTRIBUTE_UNUSED)
3190 {
3191 enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3192 int arg_num = *cum;
3193 rtx ret;
3194 const char *debstr;
3195
3196 /* Return a marker for use in the call instruction. */
3197 if (xmode == VOIDmode)
3198 {
3199 ret = const0_rtx;
3200 debstr = "<0>";
3201 }
3202
3203 else if (arg_num <= LAST_ARG_REGNUM)
3204 {
3205 ret = gen_rtx_REG (xmode, arg_num);
3206 debstr = reg_names[arg_num];
3207 }
3208
3209 else
3210 {
3211 ret = NULL_RTX;
3212 debstr = "memory";
3213 }
3214
3215 if (TARGET_DEBUG_ARG)
3216 fprintf (stderr,
3217 "function_arg: words = %2d, mode = %4s, named = %d, size = %3d, arg = %s\n",
3218 arg_num, GET_MODE_NAME (mode), named, GET_MODE_SIZE (mode), debstr);
3219
3220 return ret;
3221 }
3222
3223 \f
3224 /* A C statement (sans semicolon) to update the summarizer variable CUM to
3225 advance past an argument in the argument list. The values MODE, TYPE and
3226 NAMED describe that argument. Once this is done, the variable CUM is
3227 suitable for analyzing the *following* argument with `FUNCTION_ARG', etc.
3228
3229 This macro need not do anything if the argument in question was passed on
3230 the stack. The compiler knows how to track the amount of stack space used
3231 for arguments without any special help. */
3232
3233 void
3234 frv_function_arg_advance (CUMULATIVE_ARGS *cum,
3235 enum machine_mode mode,
3236 tree type ATTRIBUTE_UNUSED,
3237 int named)
3238 {
3239 enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3240 int bytes = GET_MODE_SIZE (xmode);
3241 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3242 int arg_num = *cum;
3243
3244 *cum = arg_num + words;
3245
3246 if (TARGET_DEBUG_ARG)
3247 fprintf (stderr,
3248 "function_adv: words = %2d, mode = %4s, named = %d, size = %3d\n",
3249 arg_num, GET_MODE_NAME (mode), named, words * UNITS_PER_WORD);
3250 }
3251
3252 \f
3253 /* A C expression for the number of words, at the beginning of an argument,
3254 must be put in registers. The value must be zero for arguments that are
3255 passed entirely in registers or that are entirely pushed on the stack.
3256
3257 On some machines, certain arguments must be passed partially in registers
3258 and partially in memory. On these machines, typically the first N words of
3259 arguments are passed in registers, and the rest on the stack. If a
3260 multi-word argument (a `double' or a structure) crosses that boundary, its
3261 first few words must be passed in registers and the rest must be pushed.
3262 This macro tells the compiler when this occurs, and how many of the words
3263 should go in registers.
3264
3265 `FUNCTION_ARG' for these arguments should return the first register to be
3266 used by the caller for this argument; likewise `FUNCTION_INCOMING_ARG', for
3267 the called function. */
3268
3269 static int
3270 frv_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3271 tree type ATTRIBUTE_UNUSED, bool named ATTRIBUTE_UNUSED)
3272 {
3273 enum machine_mode xmode = (mode == BLKmode) ? SImode : mode;
3274 int bytes = GET_MODE_SIZE (xmode);
3275 int words = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3276 int arg_num = *cum;
3277 int ret;
3278
3279 ret = ((arg_num <= LAST_ARG_REGNUM && arg_num + words > LAST_ARG_REGNUM+1)
3280 ? LAST_ARG_REGNUM - arg_num + 1
3281 : 0);
3282 ret *= UNITS_PER_WORD;
3283
3284 if (TARGET_DEBUG_ARG && ret)
3285 fprintf (stderr, "frv_arg_partial_bytes: %d\n", ret);
3286
3287 return ret;
3288 }
3289
3290 \f
3291 /* Return true if a register is ok to use as a base or index register. */
3292
3293 static FRV_INLINE int
3294 frv_regno_ok_for_base_p (int regno, int strict_p)
3295 {
3296 if (GPR_P (regno))
3297 return TRUE;
3298
3299 if (strict_p)
3300 return (reg_renumber[regno] >= 0 && GPR_P (reg_renumber[regno]));
3301
3302 if (regno == ARG_POINTER_REGNUM)
3303 return TRUE;
3304
3305 return (regno >= FIRST_PSEUDO_REGISTER);
3306 }
3307
3308 \f
3309 /* A C compound statement with a conditional `goto LABEL;' executed if X (an
3310 RTX) is a legitimate memory address on the target machine for a memory
3311 operand of mode MODE.
3312
3313 It usually pays to define several simpler macros to serve as subroutines for
3314 this one. Otherwise it may be too complicated to understand.
3315
3316 This macro must exist in two variants: a strict variant and a non-strict
3317 one. The strict variant is used in the reload pass. It must be defined so
3318 that any pseudo-register that has not been allocated a hard register is
3319 considered a memory reference. In contexts where some kind of register is
3320 required, a pseudo-register with no hard register must be rejected.
3321
3322 The non-strict variant is used in other passes. It must be defined to
3323 accept all pseudo-registers in every context where some kind of register is
3324 required.
3325
3326 Compiler source files that want to use the strict variant of this macro
3327 define the macro `REG_OK_STRICT'. You should use an `#ifdef REG_OK_STRICT'
3328 conditional to define the strict variant in that case and the non-strict
3329 variant otherwise.
3330
3331 Subroutines to check for acceptable registers for various purposes (one for
3332 base registers, one for index registers, and so on) are typically among the
3333 subroutines used to define `GO_IF_LEGITIMATE_ADDRESS'. Then only these
3334 subroutine macros need have two variants; the higher levels of macros may be
3335 the same whether strict or not.
3336
3337 Normally, constant addresses which are the sum of a `symbol_ref' and an
3338 integer are stored inside a `const' RTX to mark them as constant.
3339 Therefore, there is no need to recognize such sums specifically as
3340 legitimate addresses. Normally you would simply recognize any `const' as
3341 legitimate.
3342
3343 Usually `PRINT_OPERAND_ADDRESS' is not prepared to handle constant sums that
3344 are not marked with `const'. It assumes that a naked `plus' indicates
3345 indexing. If so, then you *must* reject such naked constant sums as
3346 illegitimate addresses, so that none of them will be given to
3347 `PRINT_OPERAND_ADDRESS'.
3348
3349 On some machines, whether a symbolic address is legitimate depends on the
3350 section that the address refers to. On these machines, define the macro
3351 `ENCODE_SECTION_INFO' to store the information into the `symbol_ref', and
3352 then check for it here. When you see a `const', you will have to look
3353 inside it to find the `symbol_ref' in order to determine the section.
3354
3355 The best way to modify the name string is by adding text to the beginning,
3356 with suitable punctuation to prevent any ambiguity. Allocate the new name
3357 in `saveable_obstack'. You will have to modify `ASM_OUTPUT_LABELREF' to
3358 remove and decode the added text and output the name accordingly, and define
3359 `(* targetm.strip_name_encoding)' to access the original name string.
3360
3361 You can check the information stored here into the `symbol_ref' in the
3362 definitions of the macros `GO_IF_LEGITIMATE_ADDRESS' and
3363 `PRINT_OPERAND_ADDRESS'. */
3364
3365 int
3366 frv_legitimate_address_p (enum machine_mode mode,
3367 rtx x,
3368 int strict_p,
3369 int condexec_p,
3370 int allow_double_reg_p)
3371 {
3372 rtx x0, x1;
3373 int ret = 0;
3374 HOST_WIDE_INT value;
3375 unsigned regno0;
3376
3377 if (FRV_SYMBOL_REF_TLS_P (x))
3378 return 0;
3379
3380 switch (GET_CODE (x))
3381 {
3382 default:
3383 break;
3384
3385 case SUBREG:
3386 x = SUBREG_REG (x);
3387 if (GET_CODE (x) != REG)
3388 break;
3389
3390 /* Fall through. */
3391
3392 case REG:
3393 ret = frv_regno_ok_for_base_p (REGNO (x), strict_p);
3394 break;
3395
3396 case PRE_MODIFY:
3397 x0 = XEXP (x, 0);
3398 x1 = XEXP (x, 1);
3399 if (GET_CODE (x0) != REG
3400 || ! frv_regno_ok_for_base_p (REGNO (x0), strict_p)
3401 || GET_CODE (x1) != PLUS
3402 || ! rtx_equal_p (x0, XEXP (x1, 0))
3403 || GET_CODE (XEXP (x1, 1)) != REG
3404 || ! frv_regno_ok_for_base_p (REGNO (XEXP (x1, 1)), strict_p))
3405 break;
3406
3407 ret = 1;
3408 break;
3409
3410 case CONST_INT:
3411 /* 12-bit immediate */
3412 if (condexec_p)
3413 ret = FALSE;
3414 else
3415 {
3416 ret = IN_RANGE_P (INTVAL (x), -2048, 2047);
3417
3418 /* If we can't use load/store double operations, make sure we can
3419 address the second word. */
3420 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3421 ret = IN_RANGE_P (INTVAL (x) + GET_MODE_SIZE (mode) - 1,
3422 -2048, 2047);
3423 }
3424 break;
3425
3426 case PLUS:
3427 x0 = XEXP (x, 0);
3428 x1 = XEXP (x, 1);
3429
3430 if (GET_CODE (x0) == SUBREG)
3431 x0 = SUBREG_REG (x0);
3432
3433 if (GET_CODE (x0) != REG)
3434 break;
3435
3436 regno0 = REGNO (x0);
3437 if (!frv_regno_ok_for_base_p (regno0, strict_p))
3438 break;
3439
3440 switch (GET_CODE (x1))
3441 {
3442 default:
3443 break;
3444
3445 case SUBREG:
3446 x1 = SUBREG_REG (x1);
3447 if (GET_CODE (x1) != REG)
3448 break;
3449
3450 /* Fall through. */
3451
3452 case REG:
3453 /* Do not allow reg+reg addressing for modes > 1 word if we
3454 can't depend on having move double instructions. */
3455 if (!allow_double_reg_p && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3456 ret = FALSE;
3457 else
3458 ret = frv_regno_ok_for_base_p (REGNO (x1), strict_p);
3459 break;
3460
3461 case CONST_INT:
3462 /* 12-bit immediate */
3463 if (condexec_p)
3464 ret = FALSE;
3465 else
3466 {
3467 value = INTVAL (x1);
3468 ret = IN_RANGE_P (value, -2048, 2047);
3469
3470 /* If we can't use load/store double operations, make sure we can
3471 address the second word. */
3472 if (ret && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3473 ret = IN_RANGE_P (value + GET_MODE_SIZE (mode) - 1, -2048, 2047);
3474 }
3475 break;
3476
3477 case CONST:
3478 if (!condexec_p && got12_operand (x1, VOIDmode))
3479 ret = TRUE;
3480 break;
3481
3482 }
3483 break;
3484 }
3485
3486 if (TARGET_DEBUG_ADDR)
3487 {
3488 fprintf (stderr, "\n========== GO_IF_LEGITIMATE_ADDRESS, mode = %s, result = %d, addresses are %sstrict%s\n",
3489 GET_MODE_NAME (mode), ret, (strict_p) ? "" : "not ",
3490 (condexec_p) ? ", inside conditional code" : "");
3491 debug_rtx (x);
3492 }
3493
3494 return ret;
3495 }
3496
3497 /* Given an ADDR, generate code to inline the PLT. */
3498 static rtx
3499 gen_inlined_tls_plt (rtx addr)
3500 {
3501 rtx retval, dest;
3502 rtx picreg = get_hard_reg_initial_val (Pmode, FDPIC_REG);
3503
3504
3505 dest = gen_reg_rtx (DImode);
3506
3507 if (flag_pic == 1)
3508 {
3509 /*
3510 -fpic version:
3511
3512 lddi.p @(gr15, #gottlsdesc12(ADDR)), gr8
3513 calll #gettlsoff(ADDR)@(gr8, gr0)
3514 */
3515 emit_insn (gen_tls_lddi (dest, addr, picreg));
3516 }
3517 else
3518 {
3519 /*
3520 -fPIC version:
3521
3522 sethi.p #gottlsdeschi(ADDR), gr8
3523 setlo #gottlsdesclo(ADDR), gr8
3524 ldd #tlsdesc(ADDR)@(gr15, gr8), gr8
3525 calll #gettlsoff(ADDR)@(gr8, gr0)
3526 */
3527 rtx reguse = gen_reg_rtx (Pmode);
3528 emit_insn (gen_tlsoff_hilo (reguse, addr, GEN_INT (R_FRV_GOTTLSDESCHI)));
3529 emit_insn (gen_tls_tlsdesc_ldd (dest, picreg, reguse, addr));
3530 }
3531
3532 retval = gen_reg_rtx (Pmode);
3533 emit_insn (gen_tls_indirect_call (retval, addr, dest, picreg));
3534 return retval;
3535 }
3536
3537 /* Emit a TLSMOFF or TLSMOFF12 offset, depending on -mTLS. Returns
3538 the destination address. */
3539 static rtx
3540 gen_tlsmoff (rtx addr, rtx reg)
3541 {
3542 rtx dest = gen_reg_rtx (Pmode);
3543
3544 if (TARGET_BIG_TLS)
3545 {
3546 /* sethi.p #tlsmoffhi(x), grA
3547 setlo #tlsmofflo(x), grA
3548 */
3549 dest = gen_reg_rtx (Pmode);
3550 emit_insn (gen_tlsoff_hilo (dest, addr,
3551 GEN_INT (R_FRV_TLSMOFFHI)));
3552 dest = gen_rtx_PLUS (Pmode, dest, reg);
3553 }
3554 else
3555 {
3556 /* addi grB, #tlsmoff12(x), grC
3557 -or-
3558 ld/st @(grB, #tlsmoff12(x)), grC
3559 */
3560 dest = gen_reg_rtx (Pmode);
3561 emit_insn (gen_symGOTOFF2reg_i (dest, addr, reg,
3562 GEN_INT (R_FRV_TLSMOFF12)));
3563 }
3564 return dest;
3565 }
3566
3567 /* Generate code for a TLS address. */
3568 static rtx
3569 frv_legitimize_tls_address (rtx addr, enum tls_model model)
3570 {
3571 rtx dest, tp = gen_rtx_REG (Pmode, 29);
3572 rtx picreg = get_hard_reg_initial_val (Pmode, 15);
3573
3574 switch (model)
3575 {
3576 case TLS_MODEL_INITIAL_EXEC:
3577 if (flag_pic == 1)
3578 {
3579 /* -fpic version.
3580 ldi @(gr15, #gottlsoff12(x)), gr5
3581 */
3582 dest = gen_reg_rtx (Pmode);
3583 emit_insn (gen_tls_load_gottlsoff12 (dest, addr, picreg));
3584 dest = gen_rtx_PLUS (Pmode, tp, dest);
3585 }
3586 else
3587 {
3588 /* -fPIC or anything else.
3589
3590 sethi.p #gottlsoffhi(x), gr14
3591 setlo #gottlsofflo(x), gr14
3592 ld #tlsoff(x)@(gr15, gr14), gr9
3593 */
3594 rtx tmp = gen_reg_rtx (Pmode);
3595 dest = gen_reg_rtx (Pmode);
3596 emit_insn (gen_tlsoff_hilo (tmp, addr,
3597 GEN_INT (R_FRV_GOTTLSOFF_HI)));
3598
3599 emit_insn (gen_tls_tlsoff_ld (dest, picreg, tmp, addr));
3600 dest = gen_rtx_PLUS (Pmode, tp, dest);
3601 }
3602 break;
3603 case TLS_MODEL_LOCAL_DYNAMIC:
3604 {
3605 rtx reg, retval;
3606
3607 if (TARGET_INLINE_PLT)
3608 retval = gen_inlined_tls_plt (GEN_INT (0));
3609 else
3610 {
3611 /* call #gettlsoff(0) */
3612 retval = gen_reg_rtx (Pmode);
3613 emit_insn (gen_call_gettlsoff (retval, GEN_INT (0), picreg));
3614 }
3615
3616 reg = gen_reg_rtx (Pmode);
3617 emit_insn (gen_rtx_SET (VOIDmode, reg,
3618 gen_rtx_PLUS (Pmode,
3619 retval, tp)));
3620
3621 dest = gen_tlsmoff (addr, reg);
3622
3623 /*
3624 dest = gen_reg_rtx (Pmode);
3625 emit_insn (gen_tlsoff_hilo (dest, addr,
3626 GEN_INT (R_FRV_TLSMOFFHI)));
3627 dest = gen_rtx_PLUS (Pmode, dest, reg);
3628 */
3629 break;
3630 }
3631 case TLS_MODEL_LOCAL_EXEC:
3632 dest = gen_tlsmoff (addr, gen_rtx_REG (Pmode, 29));
3633 break;
3634 case TLS_MODEL_GLOBAL_DYNAMIC:
3635 {
3636 rtx retval;
3637
3638 if (TARGET_INLINE_PLT)
3639 retval = gen_inlined_tls_plt (addr);
3640 else
3641 {
3642 /* call #gettlsoff(x) */
3643 retval = gen_reg_rtx (Pmode);
3644 emit_insn (gen_call_gettlsoff (retval, addr, picreg));
3645 }
3646 dest = gen_rtx_PLUS (Pmode, retval, tp);
3647 break;
3648 }
3649 default:
3650 gcc_unreachable ();
3651 }
3652
3653 return dest;
3654 }
3655
3656 rtx
3657 frv_legitimize_address (rtx x,
3658 rtx oldx ATTRIBUTE_UNUSED,
3659 enum machine_mode mode ATTRIBUTE_UNUSED)
3660 {
3661 if (GET_CODE (x) == SYMBOL_REF)
3662 {
3663 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
3664 if (model != 0)
3665 return frv_legitimize_tls_address (x, model);
3666 }
3667
3668 return x;
3669 }
3670 \f
3671 /* Test whether a local function descriptor is canonical, i.e.,
3672 whether we can use FUNCDESC_GOTOFF to compute the address of the
3673 function. */
3674
3675 static bool
3676 frv_local_funcdesc_p (rtx fnx)
3677 {
3678 tree fn;
3679 enum symbol_visibility vis;
3680 bool ret;
3681
3682 if (! SYMBOL_REF_LOCAL_P (fnx))
3683 return FALSE;
3684
3685 fn = SYMBOL_REF_DECL (fnx);
3686
3687 if (! fn)
3688 return FALSE;
3689
3690 vis = DECL_VISIBILITY (fn);
3691
3692 if (vis == VISIBILITY_PROTECTED)
3693 /* Private function descriptors for protected functions are not
3694 canonical. Temporarily change the visibility to global. */
3695 vis = VISIBILITY_DEFAULT;
3696 else if (flag_shlib)
3697 /* If we're already compiling for a shared library (that, unlike
3698 executables, can't assume that the existence of a definition
3699 implies local binding), we can skip the re-testing. */
3700 return TRUE;
3701
3702 ret = default_binds_local_p_1 (fn, flag_pic);
3703
3704 DECL_VISIBILITY (fn) = vis;
3705
3706 return ret;
3707 }
3708
3709 /* Load the _gp symbol into DEST. SRC is supposed to be the FDPIC
3710 register. */
3711
3712 rtx
3713 frv_gen_GPsym2reg (rtx dest, rtx src)
3714 {
3715 tree gp = get_identifier ("_gp");
3716 rtx gp_sym = gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (gp));
3717
3718 return gen_symGOT2reg (dest, gp_sym, src, GEN_INT (R_FRV_GOT12));
3719 }
3720
3721 static const char *
3722 unspec_got_name (int i)
3723 {
3724 switch (i)
3725 {
3726 case R_FRV_GOT12: return "got12";
3727 case R_FRV_GOTHI: return "gothi";
3728 case R_FRV_GOTLO: return "gotlo";
3729 case R_FRV_FUNCDESC: return "funcdesc";
3730 case R_FRV_FUNCDESC_GOT12: return "gotfuncdesc12";
3731 case R_FRV_FUNCDESC_GOTHI: return "gotfuncdeschi";
3732 case R_FRV_FUNCDESC_GOTLO: return "gotfuncdesclo";
3733 case R_FRV_FUNCDESC_VALUE: return "funcdescvalue";
3734 case R_FRV_FUNCDESC_GOTOFF12: return "gotofffuncdesc12";
3735 case R_FRV_FUNCDESC_GOTOFFHI: return "gotofffuncdeschi";
3736 case R_FRV_FUNCDESC_GOTOFFLO: return "gotofffuncdesclo";
3737 case R_FRV_GOTOFF12: return "gotoff12";
3738 case R_FRV_GOTOFFHI: return "gotoffhi";
3739 case R_FRV_GOTOFFLO: return "gotofflo";
3740 case R_FRV_GPREL12: return "gprel12";
3741 case R_FRV_GPRELHI: return "gprelhi";
3742 case R_FRV_GPRELLO: return "gprello";
3743 case R_FRV_GOTTLSOFF_HI: return "gottlsoffhi";
3744 case R_FRV_GOTTLSOFF_LO: return "gottlsofflo";
3745 case R_FRV_TLSMOFFHI: return "tlsmoffhi";
3746 case R_FRV_TLSMOFFLO: return "tlsmofflo";
3747 case R_FRV_TLSMOFF12: return "tlsmoff12";
3748 case R_FRV_TLSDESCHI: return "tlsdeschi";
3749 case R_FRV_TLSDESCLO: return "tlsdesclo";
3750 case R_FRV_GOTTLSDESCHI: return "gottlsdeschi";
3751 case R_FRV_GOTTLSDESCLO: return "gottlsdesclo";
3752 default: gcc_unreachable ();
3753 }
3754 }
3755
3756 /* Write the assembler syntax for UNSPEC to STREAM. Note that any offset
3757 is added inside the relocation operator. */
3758
3759 static void
3760 frv_output_const_unspec (FILE *stream, const struct frv_unspec *unspec)
3761 {
3762 fprintf (stream, "#%s(", unspec_got_name (unspec->reloc));
3763 output_addr_const (stream, plus_constant (unspec->symbol, unspec->offset));
3764 fputs (")", stream);
3765 }
3766
3767 /* Implement FIND_BASE_TERM. See whether ORIG_X represents #gprel12(foo)
3768 or #gotoff12(foo) for some small data symbol foo. If so, return foo,
3769 otherwise return ORIG_X. */
3770
3771 rtx
3772 frv_find_base_term (rtx x)
3773 {
3774 struct frv_unspec unspec;
3775
3776 if (frv_const_unspec_p (x, &unspec)
3777 && frv_small_data_reloc_p (unspec.symbol, unspec.reloc))
3778 return plus_constant (unspec.symbol, unspec.offset);
3779
3780 return x;
3781 }
3782
3783 /* Return 1 if operand is a valid FRV address. CONDEXEC_P is true if
3784 the operand is used by a predicated instruction. */
3785
3786 int
3787 frv_legitimate_memory_operand (rtx op, enum machine_mode mode, int condexec_p)
3788 {
3789 return ((GET_MODE (op) == mode || mode == VOIDmode)
3790 && GET_CODE (op) == MEM
3791 && frv_legitimate_address_p (mode, XEXP (op, 0),
3792 reload_completed, condexec_p, FALSE));
3793 }
3794
3795 void
3796 frv_expand_fdpic_call (rtx *operands, bool ret_value, bool sibcall)
3797 {
3798 rtx lr = gen_rtx_REG (Pmode, LR_REGNO);
3799 rtx picreg = get_hard_reg_initial_val (SImode, FDPIC_REG);
3800 rtx c, rvrtx=0;
3801 rtx addr;
3802
3803 if (ret_value)
3804 {
3805 rvrtx = operands[0];
3806 operands ++;
3807 }
3808
3809 addr = XEXP (operands[0], 0);
3810
3811 /* Inline PLTs if we're optimizing for speed. We'd like to inline
3812 any calls that would involve a PLT, but can't tell, since we
3813 don't know whether an extern function is going to be provided by
3814 a separate translation unit or imported from a separate module.
3815 When compiling for shared libraries, if the function has default
3816 visibility, we assume it's overridable, so we inline the PLT, but
3817 for executables, we don't really have a way to make a good
3818 decision: a function is as likely to be imported from a shared
3819 library as it is to be defined in the executable itself. We
3820 assume executables will get global functions defined locally,
3821 whereas shared libraries will have them potentially overridden,
3822 so we only inline PLTs when compiling for shared libraries.
3823
3824 In order to mark a function as local to a shared library, any
3825 non-default visibility attribute suffices. Unfortunately,
3826 there's no simple way to tag a function declaration as ``in a
3827 different module'', which we could then use to trigger PLT
3828 inlining on executables. There's -minline-plt, but it affects
3829 all external functions, so one would have to also mark function
3830 declarations available in the same module with non-default
3831 visibility, which is advantageous in itself. */
3832 if (GET_CODE (addr) == SYMBOL_REF
3833 && ((!SYMBOL_REF_LOCAL_P (addr) && TARGET_INLINE_PLT)
3834 || sibcall))
3835 {
3836 rtx x, dest;
3837 dest = gen_reg_rtx (SImode);
3838 if (flag_pic != 1)
3839 x = gen_symGOTOFF2reg_hilo (dest, addr, OUR_FDPIC_REG,
3840 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3841 else
3842 x = gen_symGOTOFF2reg (dest, addr, OUR_FDPIC_REG,
3843 GEN_INT (R_FRV_FUNCDESC_GOTOFF12));
3844 emit_insn (x);
3845 crtl->uses_pic_offset_table = TRUE;
3846 addr = dest;
3847 }
3848 else if (GET_CODE (addr) == SYMBOL_REF)
3849 {
3850 /* These are always either local, or handled through a local
3851 PLT. */
3852 if (ret_value)
3853 c = gen_call_value_fdpicsi (rvrtx, addr, operands[1],
3854 operands[2], picreg, lr);
3855 else
3856 c = gen_call_fdpicsi (addr, operands[1], operands[2], picreg, lr);
3857 emit_call_insn (c);
3858 return;
3859 }
3860 else if (! ldd_address_operand (addr, Pmode))
3861 addr = force_reg (Pmode, addr);
3862
3863 picreg = gen_reg_rtx (DImode);
3864 emit_insn (gen_movdi_ldd (picreg, addr));
3865
3866 if (sibcall && ret_value)
3867 c = gen_sibcall_value_fdpicdi (rvrtx, picreg, const0_rtx);
3868 else if (sibcall)
3869 c = gen_sibcall_fdpicdi (picreg, const0_rtx);
3870 else if (ret_value)
3871 c = gen_call_value_fdpicdi (rvrtx, picreg, const0_rtx, lr);
3872 else
3873 c = gen_call_fdpicdi (picreg, const0_rtx, lr);
3874 emit_call_insn (c);
3875 }
3876 \f
3877 /* Look for a SYMBOL_REF of a function in an rtx. We always want to
3878 process these separately from any offsets, such that we add any
3879 offsets to the function descriptor (the actual pointer), not to the
3880 function address. */
3881
3882 static bool
3883 frv_function_symbol_referenced_p (rtx x)
3884 {
3885 const char *format;
3886 int length;
3887 int j;
3888
3889 if (GET_CODE (x) == SYMBOL_REF)
3890 return SYMBOL_REF_FUNCTION_P (x);
3891
3892 length = GET_RTX_LENGTH (GET_CODE (x));
3893 format = GET_RTX_FORMAT (GET_CODE (x));
3894
3895 for (j = 0; j < length; ++j)
3896 {
3897 switch (format[j])
3898 {
3899 case 'e':
3900 if (frv_function_symbol_referenced_p (XEXP (x, j)))
3901 return TRUE;
3902 break;
3903
3904 case 'V':
3905 case 'E':
3906 if (XVEC (x, j) != 0)
3907 {
3908 int k;
3909 for (k = 0; k < XVECLEN (x, j); ++k)
3910 if (frv_function_symbol_referenced_p (XVECEXP (x, j, k)))
3911 return TRUE;
3912 }
3913 break;
3914
3915 default:
3916 /* Nothing to do. */
3917 break;
3918 }
3919 }
3920
3921 return FALSE;
3922 }
3923
3924 /* Return true if the memory operand is one that can be conditionally
3925 executed. */
3926
3927 int
3928 condexec_memory_operand (rtx op, enum machine_mode mode)
3929 {
3930 enum machine_mode op_mode = GET_MODE (op);
3931 rtx addr;
3932
3933 if (mode != VOIDmode && op_mode != mode)
3934 return FALSE;
3935
3936 switch (op_mode)
3937 {
3938 default:
3939 return FALSE;
3940
3941 case QImode:
3942 case HImode:
3943 case SImode:
3944 case SFmode:
3945 break;
3946 }
3947
3948 if (GET_CODE (op) != MEM)
3949 return FALSE;
3950
3951 addr = XEXP (op, 0);
3952 return frv_legitimate_address_p (mode, addr, reload_completed, TRUE, FALSE);
3953 }
3954 \f
3955 /* Return true if the bare return instruction can be used outside of the
3956 epilog code. For frv, we only do it if there was no stack allocation. */
3957
3958 int
3959 direct_return_p (void)
3960 {
3961 frv_stack_t *info;
3962
3963 if (!reload_completed)
3964 return FALSE;
3965
3966 info = frv_stack_info ();
3967 return (info->total_size == 0);
3968 }
3969
3970 \f
3971 void
3972 frv_emit_move (enum machine_mode mode, rtx dest, rtx src)
3973 {
3974 if (GET_CODE (src) == SYMBOL_REF)
3975 {
3976 enum tls_model model = SYMBOL_REF_TLS_MODEL (src);
3977 if (model != 0)
3978 src = frv_legitimize_tls_address (src, model);
3979 }
3980
3981 switch (mode)
3982 {
3983 case SImode:
3984 if (frv_emit_movsi (dest, src))
3985 return;
3986 break;
3987
3988 case QImode:
3989 case HImode:
3990 case DImode:
3991 case SFmode:
3992 case DFmode:
3993 if (!reload_in_progress
3994 && !reload_completed
3995 && !register_operand (dest, mode)
3996 && !reg_or_0_operand (src, mode))
3997 src = copy_to_mode_reg (mode, src);
3998 break;
3999
4000 default:
4001 gcc_unreachable ();
4002 }
4003
4004 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
4005 }
4006
4007 /* Emit code to handle a MOVSI, adding in the small data register or pic
4008 register if needed to load up addresses. Return TRUE if the appropriate
4009 instructions are emitted. */
4010
4011 int
4012 frv_emit_movsi (rtx dest, rtx src)
4013 {
4014 int base_regno = -1;
4015 int unspec = 0;
4016 rtx sym = src;
4017 struct frv_unspec old_unspec;
4018
4019 if (!reload_in_progress
4020 && !reload_completed
4021 && !register_operand (dest, SImode)
4022 && (!reg_or_0_operand (src, SImode)
4023 /* Virtual registers will almost always be replaced by an
4024 add instruction, so expose this to CSE by copying to
4025 an intermediate register. */
4026 || (GET_CODE (src) == REG
4027 && IN_RANGE_P (REGNO (src),
4028 FIRST_VIRTUAL_REGISTER,
4029 LAST_VIRTUAL_REGISTER))))
4030 {
4031 emit_insn (gen_rtx_SET (VOIDmode, dest, copy_to_mode_reg (SImode, src)));
4032 return TRUE;
4033 }
4034
4035 /* Explicitly add in the PIC or small data register if needed. */
4036 switch (GET_CODE (src))
4037 {
4038 default:
4039 break;
4040
4041 case LABEL_REF:
4042 handle_label:
4043 if (TARGET_FDPIC)
4044 {
4045 /* Using GPREL12, we use a single GOT entry for all symbols
4046 in read-only sections, but trade sequences such as:
4047
4048 sethi #gothi(label), gr#
4049 setlo #gotlo(label), gr#
4050 ld @(gr15,gr#), gr#
4051
4052 for
4053
4054 ld @(gr15,#got12(_gp)), gr#
4055 sethi #gprelhi(label), gr##
4056 setlo #gprello(label), gr##
4057 add gr#, gr##, gr##
4058
4059 We may often be able to share gr# for multiple
4060 computations of GPREL addresses, and we may often fold
4061 the final add into the pair of registers of a load or
4062 store instruction, so it's often profitable. Even when
4063 optimizing for size, we're trading a GOT entry for an
4064 additional instruction, which trades GOT space
4065 (read-write) for code size (read-only, shareable), as
4066 long as the symbol is not used in more than two different
4067 locations.
4068
4069 With -fpie/-fpic, we'd be trading a single load for a
4070 sequence of 4 instructions, because the offset of the
4071 label can't be assumed to be addressable with 12 bits, so
4072 we don't do this. */
4073 if (TARGET_GPREL_RO)
4074 unspec = R_FRV_GPREL12;
4075 else
4076 unspec = R_FRV_GOT12;
4077 }
4078 else if (flag_pic)
4079 base_regno = PIC_REGNO;
4080
4081 break;
4082
4083 case CONST:
4084 if (frv_const_unspec_p (src, &old_unspec))
4085 break;
4086
4087 if (TARGET_FDPIC && frv_function_symbol_referenced_p (XEXP (src, 0)))
4088 {
4089 handle_whatever:
4090 src = force_reg (GET_MODE (XEXP (src, 0)), XEXP (src, 0));
4091 emit_move_insn (dest, src);
4092 return TRUE;
4093 }
4094 else
4095 {
4096 sym = XEXP (sym, 0);
4097 if (GET_CODE (sym) == PLUS
4098 && GET_CODE (XEXP (sym, 0)) == SYMBOL_REF
4099 && GET_CODE (XEXP (sym, 1)) == CONST_INT)
4100 sym = XEXP (sym, 0);
4101 if (GET_CODE (sym) == SYMBOL_REF)
4102 goto handle_sym;
4103 else if (GET_CODE (sym) == LABEL_REF)
4104 goto handle_label;
4105 else
4106 goto handle_whatever;
4107 }
4108 break;
4109
4110 case SYMBOL_REF:
4111 handle_sym:
4112 if (TARGET_FDPIC)
4113 {
4114 enum tls_model model = SYMBOL_REF_TLS_MODEL (sym);
4115
4116 if (model != 0)
4117 {
4118 src = frv_legitimize_tls_address (src, model);
4119 emit_move_insn (dest, src);
4120 return TRUE;
4121 }
4122
4123 if (SYMBOL_REF_FUNCTION_P (sym))
4124 {
4125 if (frv_local_funcdesc_p (sym))
4126 unspec = R_FRV_FUNCDESC_GOTOFF12;
4127 else
4128 unspec = R_FRV_FUNCDESC_GOT12;
4129 }
4130 else
4131 {
4132 if (CONSTANT_POOL_ADDRESS_P (sym))
4133 switch (GET_CODE (get_pool_constant (sym)))
4134 {
4135 case CONST:
4136 case SYMBOL_REF:
4137 case LABEL_REF:
4138 if (flag_pic)
4139 {
4140 unspec = R_FRV_GOTOFF12;
4141 break;
4142 }
4143 /* Fall through. */
4144 default:
4145 if (TARGET_GPREL_RO)
4146 unspec = R_FRV_GPREL12;
4147 else
4148 unspec = R_FRV_GOT12;
4149 break;
4150 }
4151 else if (SYMBOL_REF_LOCAL_P (sym)
4152 && !SYMBOL_REF_EXTERNAL_P (sym)
4153 && SYMBOL_REF_DECL (sym)
4154 && (!DECL_P (SYMBOL_REF_DECL (sym))
4155 || !DECL_COMMON (SYMBOL_REF_DECL (sym))))
4156 {
4157 tree decl = SYMBOL_REF_DECL (sym);
4158 tree init = TREE_CODE (decl) == VAR_DECL
4159 ? DECL_INITIAL (decl)
4160 : TREE_CODE (decl) == CONSTRUCTOR
4161 ? decl : 0;
4162 int reloc = 0;
4163 bool named_section, readonly;
4164
4165 if (init && init != error_mark_node)
4166 reloc = compute_reloc_for_constant (init);
4167
4168 named_section = TREE_CODE (decl) == VAR_DECL
4169 && lookup_attribute ("section", DECL_ATTRIBUTES (decl));
4170 readonly = decl_readonly_section (decl, reloc);
4171
4172 if (named_section)
4173 unspec = R_FRV_GOT12;
4174 else if (!readonly)
4175 unspec = R_FRV_GOTOFF12;
4176 else if (readonly && TARGET_GPREL_RO)
4177 unspec = R_FRV_GPREL12;
4178 else
4179 unspec = R_FRV_GOT12;
4180 }
4181 else
4182 unspec = R_FRV_GOT12;
4183 }
4184 }
4185
4186 else if (SYMBOL_REF_SMALL_P (sym))
4187 base_regno = SDA_BASE_REG;
4188
4189 else if (flag_pic)
4190 base_regno = PIC_REGNO;
4191
4192 break;
4193 }
4194
4195 if (base_regno >= 0)
4196 {
4197 if (GET_CODE (sym) == SYMBOL_REF && SYMBOL_REF_SMALL_P (sym))
4198 emit_insn (gen_symGOTOFF2reg (dest, src,
4199 gen_rtx_REG (Pmode, base_regno),
4200 GEN_INT (R_FRV_GPREL12)));
4201 else
4202 emit_insn (gen_symGOTOFF2reg_hilo (dest, src,
4203 gen_rtx_REG (Pmode, base_regno),
4204 GEN_INT (R_FRV_GPREL12)));
4205 if (base_regno == PIC_REGNO)
4206 crtl->uses_pic_offset_table = TRUE;
4207 return TRUE;
4208 }
4209
4210 if (unspec)
4211 {
4212 rtx x;
4213
4214 /* Since OUR_FDPIC_REG is a pseudo register, we can't safely introduce
4215 new uses of it once reload has begun. */
4216 gcc_assert (!reload_in_progress && !reload_completed);
4217
4218 switch (unspec)
4219 {
4220 case R_FRV_GOTOFF12:
4221 if (!frv_small_data_reloc_p (sym, unspec))
4222 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4223 GEN_INT (unspec));
4224 else
4225 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4226 break;
4227 case R_FRV_GPREL12:
4228 if (!frv_small_data_reloc_p (sym, unspec))
4229 x = gen_symGPREL2reg_hilo (dest, src, OUR_FDPIC_REG,
4230 GEN_INT (unspec));
4231 else
4232 x = gen_symGPREL2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4233 break;
4234 case R_FRV_FUNCDESC_GOTOFF12:
4235 if (flag_pic != 1)
4236 x = gen_symGOTOFF2reg_hilo (dest, src, OUR_FDPIC_REG,
4237 GEN_INT (unspec));
4238 else
4239 x = gen_symGOTOFF2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4240 break;
4241 default:
4242 if (flag_pic != 1)
4243 x = gen_symGOT2reg_hilo (dest, src, OUR_FDPIC_REG,
4244 GEN_INT (unspec));
4245 else
4246 x = gen_symGOT2reg (dest, src, OUR_FDPIC_REG, GEN_INT (unspec));
4247 break;
4248 }
4249 emit_insn (x);
4250 crtl->uses_pic_offset_table = TRUE;
4251 return TRUE;
4252 }
4253
4254
4255 return FALSE;
4256 }
4257
4258 \f
4259 /* Return a string to output a single word move. */
4260
4261 const char *
4262 output_move_single (rtx operands[], rtx insn)
4263 {
4264 rtx dest = operands[0];
4265 rtx src = operands[1];
4266
4267 if (GET_CODE (dest) == REG)
4268 {
4269 int dest_regno = REGNO (dest);
4270 enum machine_mode mode = GET_MODE (dest);
4271
4272 if (GPR_P (dest_regno))
4273 {
4274 if (GET_CODE (src) == REG)
4275 {
4276 /* gpr <- some sort of register */
4277 int src_regno = REGNO (src);
4278
4279 if (GPR_P (src_regno))
4280 return "mov %1, %0";
4281
4282 else if (FPR_P (src_regno))
4283 return "movfg %1, %0";
4284
4285 else if (SPR_P (src_regno))
4286 return "movsg %1, %0";
4287 }
4288
4289 else if (GET_CODE (src) == MEM)
4290 {
4291 /* gpr <- memory */
4292 switch (mode)
4293 {
4294 default:
4295 break;
4296
4297 case QImode:
4298 return "ldsb%I1%U1 %M1,%0";
4299
4300 case HImode:
4301 return "ldsh%I1%U1 %M1,%0";
4302
4303 case SImode:
4304 case SFmode:
4305 return "ld%I1%U1 %M1, %0";
4306 }
4307 }
4308
4309 else if (GET_CODE (src) == CONST_INT
4310 || GET_CODE (src) == CONST_DOUBLE)
4311 {
4312 /* gpr <- integer/floating constant */
4313 HOST_WIDE_INT value;
4314
4315 if (GET_CODE (src) == CONST_INT)
4316 value = INTVAL (src);
4317
4318 else if (mode == SFmode)
4319 {
4320 REAL_VALUE_TYPE rv;
4321 long l;
4322
4323 REAL_VALUE_FROM_CONST_DOUBLE (rv, src);
4324 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
4325 value = l;
4326 }
4327
4328 else
4329 value = CONST_DOUBLE_LOW (src);
4330
4331 if (IN_RANGE_P (value, -32768, 32767))
4332 return "setlos %1, %0";
4333
4334 return "#";
4335 }
4336
4337 else if (GET_CODE (src) == SYMBOL_REF
4338 || GET_CODE (src) == LABEL_REF
4339 || GET_CODE (src) == CONST)
4340 {
4341 return "#";
4342 }
4343 }
4344
4345 else if (FPR_P (dest_regno))
4346 {
4347 if (GET_CODE (src) == REG)
4348 {
4349 /* fpr <- some sort of register */
4350 int src_regno = REGNO (src);
4351
4352 if (GPR_P (src_regno))
4353 return "movgf %1, %0";
4354
4355 else if (FPR_P (src_regno))
4356 {
4357 if (TARGET_HARD_FLOAT)
4358 return "fmovs %1, %0";
4359 else
4360 return "mor %1, %1, %0";
4361 }
4362 }
4363
4364 else if (GET_CODE (src) == MEM)
4365 {
4366 /* fpr <- memory */
4367 switch (mode)
4368 {
4369 default:
4370 break;
4371
4372 case QImode:
4373 return "ldbf%I1%U1 %M1,%0";
4374
4375 case HImode:
4376 return "ldhf%I1%U1 %M1,%0";
4377
4378 case SImode:
4379 case SFmode:
4380 return "ldf%I1%U1 %M1, %0";
4381 }
4382 }
4383
4384 else if (ZERO_P (src))
4385 return "movgf %., %0";
4386 }
4387
4388 else if (SPR_P (dest_regno))
4389 {
4390 if (GET_CODE (src) == REG)
4391 {
4392 /* spr <- some sort of register */
4393 int src_regno = REGNO (src);
4394
4395 if (GPR_P (src_regno))
4396 return "movgs %1, %0";
4397 }
4398 else if (ZERO_P (src))
4399 return "movgs %., %0";
4400 }
4401 }
4402
4403 else if (GET_CODE (dest) == MEM)
4404 {
4405 if (GET_CODE (src) == REG)
4406 {
4407 int src_regno = REGNO (src);
4408 enum machine_mode mode = GET_MODE (dest);
4409
4410 if (GPR_P (src_regno))
4411 {
4412 switch (mode)
4413 {
4414 default:
4415 break;
4416
4417 case QImode:
4418 return "stb%I0%U0 %1, %M0";
4419
4420 case HImode:
4421 return "sth%I0%U0 %1, %M0";
4422
4423 case SImode:
4424 case SFmode:
4425 return "st%I0%U0 %1, %M0";
4426 }
4427 }
4428
4429 else if (FPR_P (src_regno))
4430 {
4431 switch (mode)
4432 {
4433 default:
4434 break;
4435
4436 case QImode:
4437 return "stbf%I0%U0 %1, %M0";
4438
4439 case HImode:
4440 return "sthf%I0%U0 %1, %M0";
4441
4442 case SImode:
4443 case SFmode:
4444 return "stf%I0%U0 %1, %M0";
4445 }
4446 }
4447 }
4448
4449 else if (ZERO_P (src))
4450 {
4451 switch (GET_MODE (dest))
4452 {
4453 default:
4454 break;
4455
4456 case QImode:
4457 return "stb%I0%U0 %., %M0";
4458
4459 case HImode:
4460 return "sth%I0%U0 %., %M0";
4461
4462 case SImode:
4463 case SFmode:
4464 return "st%I0%U0 %., %M0";
4465 }
4466 }
4467 }
4468
4469 fatal_insn ("bad output_move_single operand", insn);
4470 return "";
4471 }
4472
4473 \f
4474 /* Return a string to output a double word move. */
4475
4476 const char *
4477 output_move_double (rtx operands[], rtx insn)
4478 {
4479 rtx dest = operands[0];
4480 rtx src = operands[1];
4481 enum machine_mode mode = GET_MODE (dest);
4482
4483 if (GET_CODE (dest) == REG)
4484 {
4485 int dest_regno = REGNO (dest);
4486
4487 if (GPR_P (dest_regno))
4488 {
4489 if (GET_CODE (src) == REG)
4490 {
4491 /* gpr <- some sort of register */
4492 int src_regno = REGNO (src);
4493
4494 if (GPR_P (src_regno))
4495 return "#";
4496
4497 else if (FPR_P (src_regno))
4498 {
4499 if (((dest_regno - GPR_FIRST) & 1) == 0
4500 && ((src_regno - FPR_FIRST) & 1) == 0)
4501 return "movfgd %1, %0";
4502
4503 return "#";
4504 }
4505 }
4506
4507 else if (GET_CODE (src) == MEM)
4508 {
4509 /* gpr <- memory */
4510 if (dbl_memory_one_insn_operand (src, mode))
4511 return "ldd%I1%U1 %M1, %0";
4512
4513 return "#";
4514 }
4515
4516 else if (GET_CODE (src) == CONST_INT
4517 || GET_CODE (src) == CONST_DOUBLE)
4518 return "#";
4519 }
4520
4521 else if (FPR_P (dest_regno))
4522 {
4523 if (GET_CODE (src) == REG)
4524 {
4525 /* fpr <- some sort of register */
4526 int src_regno = REGNO (src);
4527
4528 if (GPR_P (src_regno))
4529 {
4530 if (((dest_regno - FPR_FIRST) & 1) == 0
4531 && ((src_regno - GPR_FIRST) & 1) == 0)
4532 return "movgfd %1, %0";
4533
4534 return "#";
4535 }
4536
4537 else if (FPR_P (src_regno))
4538 {
4539 if (TARGET_DOUBLE
4540 && ((dest_regno - FPR_FIRST) & 1) == 0
4541 && ((src_regno - FPR_FIRST) & 1) == 0)
4542 return "fmovd %1, %0";
4543
4544 return "#";
4545 }
4546 }
4547
4548 else if (GET_CODE (src) == MEM)
4549 {
4550 /* fpr <- memory */
4551 if (dbl_memory_one_insn_operand (src, mode))
4552 return "lddf%I1%U1 %M1, %0";
4553
4554 return "#";
4555 }
4556
4557 else if (ZERO_P (src))
4558 return "#";
4559 }
4560 }
4561
4562 else if (GET_CODE (dest) == MEM)
4563 {
4564 if (GET_CODE (src) == REG)
4565 {
4566 int src_regno = REGNO (src);
4567
4568 if (GPR_P (src_regno))
4569 {
4570 if (((src_regno - GPR_FIRST) & 1) == 0
4571 && dbl_memory_one_insn_operand (dest, mode))
4572 return "std%I0%U0 %1, %M0";
4573
4574 return "#";
4575 }
4576
4577 if (FPR_P (src_regno))
4578 {
4579 if (((src_regno - FPR_FIRST) & 1) == 0
4580 && dbl_memory_one_insn_operand (dest, mode))
4581 return "stdf%I0%U0 %1, %M0";
4582
4583 return "#";
4584 }
4585 }
4586
4587 else if (ZERO_P (src))
4588 {
4589 if (dbl_memory_one_insn_operand (dest, mode))
4590 return "std%I0%U0 %., %M0";
4591
4592 return "#";
4593 }
4594 }
4595
4596 fatal_insn ("bad output_move_double operand", insn);
4597 return "";
4598 }
4599
4600 \f
4601 /* Return a string to output a single word conditional move.
4602 Operand0 -- EQ/NE of ccr register and 0
4603 Operand1 -- CCR register
4604 Operand2 -- destination
4605 Operand3 -- source */
4606
4607 const char *
4608 output_condmove_single (rtx operands[], rtx insn)
4609 {
4610 rtx dest = operands[2];
4611 rtx src = operands[3];
4612
4613 if (GET_CODE (dest) == REG)
4614 {
4615 int dest_regno = REGNO (dest);
4616 enum machine_mode mode = GET_MODE (dest);
4617
4618 if (GPR_P (dest_regno))
4619 {
4620 if (GET_CODE (src) == REG)
4621 {
4622 /* gpr <- some sort of register */
4623 int src_regno = REGNO (src);
4624
4625 if (GPR_P (src_regno))
4626 return "cmov %z3, %2, %1, %e0";
4627
4628 else if (FPR_P (src_regno))
4629 return "cmovfg %3, %2, %1, %e0";
4630 }
4631
4632 else if (GET_CODE (src) == MEM)
4633 {
4634 /* gpr <- memory */
4635 switch (mode)
4636 {
4637 default:
4638 break;
4639
4640 case QImode:
4641 return "cldsb%I3%U3 %M3, %2, %1, %e0";
4642
4643 case HImode:
4644 return "cldsh%I3%U3 %M3, %2, %1, %e0";
4645
4646 case SImode:
4647 case SFmode:
4648 return "cld%I3%U3 %M3, %2, %1, %e0";
4649 }
4650 }
4651
4652 else if (ZERO_P (src))
4653 return "cmov %., %2, %1, %e0";
4654 }
4655
4656 else if (FPR_P (dest_regno))
4657 {
4658 if (GET_CODE (src) == REG)
4659 {
4660 /* fpr <- some sort of register */
4661 int src_regno = REGNO (src);
4662
4663 if (GPR_P (src_regno))
4664 return "cmovgf %3, %2, %1, %e0";
4665
4666 else if (FPR_P (src_regno))
4667 {
4668 if (TARGET_HARD_FLOAT)
4669 return "cfmovs %3,%2,%1,%e0";
4670 else
4671 return "cmor %3, %3, %2, %1, %e0";
4672 }
4673 }
4674
4675 else if (GET_CODE (src) == MEM)
4676 {
4677 /* fpr <- memory */
4678 if (mode == SImode || mode == SFmode)
4679 return "cldf%I3%U3 %M3, %2, %1, %e0";
4680 }
4681
4682 else if (ZERO_P (src))
4683 return "cmovgf %., %2, %1, %e0";
4684 }
4685 }
4686
4687 else if (GET_CODE (dest) == MEM)
4688 {
4689 if (GET_CODE (src) == REG)
4690 {
4691 int src_regno = REGNO (src);
4692 enum machine_mode mode = GET_MODE (dest);
4693
4694 if (GPR_P (src_regno))
4695 {
4696 switch (mode)
4697 {
4698 default:
4699 break;
4700
4701 case QImode:
4702 return "cstb%I2%U2 %3, %M2, %1, %e0";
4703
4704 case HImode:
4705 return "csth%I2%U2 %3, %M2, %1, %e0";
4706
4707 case SImode:
4708 case SFmode:
4709 return "cst%I2%U2 %3, %M2, %1, %e0";
4710 }
4711 }
4712
4713 else if (FPR_P (src_regno) && (mode == SImode || mode == SFmode))
4714 return "cstf%I2%U2 %3, %M2, %1, %e0";
4715 }
4716
4717 else if (ZERO_P (src))
4718 {
4719 enum machine_mode mode = GET_MODE (dest);
4720 switch (mode)
4721 {
4722 default:
4723 break;
4724
4725 case QImode:
4726 return "cstb%I2%U2 %., %M2, %1, %e0";
4727
4728 case HImode:
4729 return "csth%I2%U2 %., %M2, %1, %e0";
4730
4731 case SImode:
4732 case SFmode:
4733 return "cst%I2%U2 %., %M2, %1, %e0";
4734 }
4735 }
4736 }
4737
4738 fatal_insn ("bad output_condmove_single operand", insn);
4739 return "";
4740 }
4741
4742 \f
4743 /* Emit the appropriate code to do a comparison, returning the register the
4744 comparison was done it. */
4745
4746 static rtx
4747 frv_emit_comparison (enum rtx_code test, rtx op0, rtx op1)
4748 {
4749 enum machine_mode cc_mode;
4750 rtx cc_reg;
4751
4752 /* Floating point doesn't have comparison against a constant. */
4753 if (GET_MODE (op0) == CC_FPmode && GET_CODE (op1) != REG)
4754 op1 = force_reg (GET_MODE (op0), op1);
4755
4756 /* Possibly disable using anything but a fixed register in order to work
4757 around cse moving comparisons past function calls. */
4758 cc_mode = SELECT_CC_MODE (test, op0, op1);
4759 cc_reg = ((TARGET_ALLOC_CC)
4760 ? gen_reg_rtx (cc_mode)
4761 : gen_rtx_REG (cc_mode,
4762 (cc_mode == CC_FPmode) ? FCC_FIRST : ICC_FIRST));
4763
4764 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
4765 gen_rtx_COMPARE (cc_mode, op0, op1)));
4766
4767 return cc_reg;
4768 }
4769
4770 \f
4771 /* Emit code for a conditional branch. The comparison operands were previously
4772 stored in frv_compare_op0 and frv_compare_op1.
4773
4774 XXX: I originally wanted to add a clobber of a CCR register to use in
4775 conditional execution, but that confuses the rest of the compiler. */
4776
4777 int
4778 frv_emit_cond_branch (enum rtx_code test, rtx label)
4779 {
4780 rtx test_rtx;
4781 rtx label_ref;
4782 rtx if_else;
4783 rtx cc_reg = frv_emit_comparison (test, frv_compare_op0, frv_compare_op1);
4784 enum machine_mode cc_mode = GET_MODE (cc_reg);
4785
4786 /* Branches generate:
4787 (set (pc)
4788 (if_then_else (<test>, <cc_reg>, (const_int 0))
4789 (label_ref <branch_label>)
4790 (pc))) */
4791 label_ref = gen_rtx_LABEL_REF (VOIDmode, label);
4792 test_rtx = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4793 if_else = gen_rtx_IF_THEN_ELSE (cc_mode, test_rtx, label_ref, pc_rtx);
4794 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, if_else));
4795 return TRUE;
4796 }
4797
4798 \f
4799 /* Emit code to set a gpr to 1/0 based on a comparison. The comparison
4800 operands were previously stored in frv_compare_op0 and frv_compare_op1. */
4801
4802 int
4803 frv_emit_scc (enum rtx_code test, rtx target)
4804 {
4805 rtx set;
4806 rtx test_rtx;
4807 rtx clobber;
4808 rtx cr_reg;
4809 rtx cc_reg = frv_emit_comparison (test, frv_compare_op0, frv_compare_op1);
4810
4811 /* SCC instructions generate:
4812 (parallel [(set <target> (<test>, <cc_reg>, (const_int 0))
4813 (clobber (<ccr_reg>))]) */
4814 test_rtx = gen_rtx_fmt_ee (test, SImode, cc_reg, const0_rtx);
4815 set = gen_rtx_SET (VOIDmode, target, test_rtx);
4816
4817 cr_reg = ((TARGET_ALLOC_CC)
4818 ? gen_reg_rtx (CC_CCRmode)
4819 : gen_rtx_REG (CC_CCRmode,
4820 ((GET_MODE (cc_reg) == CC_FPmode)
4821 ? FCR_FIRST
4822 : ICR_FIRST)));
4823
4824 clobber = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4825 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
4826 return TRUE;
4827 }
4828
4829 \f
4830 /* Split a SCC instruction into component parts, returning a SEQUENCE to hold
4831 the separate insns. */
4832
4833 rtx
4834 frv_split_scc (rtx dest, rtx test, rtx cc_reg, rtx cr_reg, HOST_WIDE_INT value)
4835 {
4836 rtx ret;
4837
4838 start_sequence ();
4839
4840 /* Set the appropriate CCR bit. */
4841 emit_insn (gen_rtx_SET (VOIDmode,
4842 cr_reg,
4843 gen_rtx_fmt_ee (GET_CODE (test),
4844 GET_MODE (cr_reg),
4845 cc_reg,
4846 const0_rtx)));
4847
4848 /* Move the value into the destination. */
4849 emit_move_insn (dest, GEN_INT (value));
4850
4851 /* Move 0 into the destination if the test failed */
4852 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4853 gen_rtx_EQ (GET_MODE (cr_reg),
4854 cr_reg,
4855 const0_rtx),
4856 gen_rtx_SET (VOIDmode, dest, const0_rtx)));
4857
4858 /* Finish up, return sequence. */
4859 ret = get_insns ();
4860 end_sequence ();
4861 return ret;
4862 }
4863
4864 \f
4865 /* Emit the code for a conditional move, return TRUE if we could do the
4866 move. */
4867
4868 int
4869 frv_emit_cond_move (rtx dest, rtx test_rtx, rtx src1, rtx src2)
4870 {
4871 rtx set;
4872 rtx clobber_cc;
4873 rtx test2;
4874 rtx cr_reg;
4875 rtx if_rtx;
4876 enum rtx_code test = GET_CODE (test_rtx);
4877 rtx cc_reg = frv_emit_comparison (test, frv_compare_op0, frv_compare_op1);
4878 enum machine_mode cc_mode = GET_MODE (cc_reg);
4879
4880 /* Conditional move instructions generate:
4881 (parallel [(set <target>
4882 (if_then_else (<test> <cc_reg> (const_int 0))
4883 <src1>
4884 <src2>))
4885 (clobber (<ccr_reg>))]) */
4886
4887 /* Handle various cases of conditional move involving two constants. */
4888 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4889 {
4890 HOST_WIDE_INT value1 = INTVAL (src1);
4891 HOST_WIDE_INT value2 = INTVAL (src2);
4892
4893 /* Having 0 as one of the constants can be done by loading the other
4894 constant, and optionally moving in gr0. */
4895 if (value1 == 0 || value2 == 0)
4896 ;
4897
4898 /* If the first value is within an addi range and also the difference
4899 between the two fits in an addi's range, load up the difference, then
4900 conditionally move in 0, and then unconditionally add the first
4901 value. */
4902 else if (IN_RANGE_P (value1, -2048, 2047)
4903 && IN_RANGE_P (value2 - value1, -2048, 2047))
4904 ;
4905
4906 /* If neither condition holds, just force the constant into a
4907 register. */
4908 else
4909 {
4910 src1 = force_reg (GET_MODE (dest), src1);
4911 src2 = force_reg (GET_MODE (dest), src2);
4912 }
4913 }
4914
4915 /* If one value is a register, insure the other value is either 0 or a
4916 register. */
4917 else
4918 {
4919 if (GET_CODE (src1) == CONST_INT && INTVAL (src1) != 0)
4920 src1 = force_reg (GET_MODE (dest), src1);
4921
4922 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
4923 src2 = force_reg (GET_MODE (dest), src2);
4924 }
4925
4926 test2 = gen_rtx_fmt_ee (test, cc_mode, cc_reg, const0_rtx);
4927 if_rtx = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), test2, src1, src2);
4928
4929 set = gen_rtx_SET (VOIDmode, dest, if_rtx);
4930
4931 cr_reg = ((TARGET_ALLOC_CC)
4932 ? gen_reg_rtx (CC_CCRmode)
4933 : gen_rtx_REG (CC_CCRmode,
4934 (cc_mode == CC_FPmode) ? FCR_FIRST : ICR_FIRST));
4935
4936 clobber_cc = gen_rtx_CLOBBER (VOIDmode, cr_reg);
4937 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber_cc)));
4938 return TRUE;
4939 }
4940
4941 \f
4942 /* Split a conditional move into constituent parts, returning a SEQUENCE
4943 containing all of the insns. */
4944
4945 rtx
4946 frv_split_cond_move (rtx operands[])
4947 {
4948 rtx dest = operands[0];
4949 rtx test = operands[1];
4950 rtx cc_reg = operands[2];
4951 rtx src1 = operands[3];
4952 rtx src2 = operands[4];
4953 rtx cr_reg = operands[5];
4954 rtx ret;
4955 enum machine_mode cr_mode = GET_MODE (cr_reg);
4956
4957 start_sequence ();
4958
4959 /* Set the appropriate CCR bit. */
4960 emit_insn (gen_rtx_SET (VOIDmode,
4961 cr_reg,
4962 gen_rtx_fmt_ee (GET_CODE (test),
4963 GET_MODE (cr_reg),
4964 cc_reg,
4965 const0_rtx)));
4966
4967 /* Handle various cases of conditional move involving two constants. */
4968 if (GET_CODE (src1) == CONST_INT && GET_CODE (src2) == CONST_INT)
4969 {
4970 HOST_WIDE_INT value1 = INTVAL (src1);
4971 HOST_WIDE_INT value2 = INTVAL (src2);
4972
4973 /* Having 0 as one of the constants can be done by loading the other
4974 constant, and optionally moving in gr0. */
4975 if (value1 == 0)
4976 {
4977 emit_move_insn (dest, src2);
4978 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4979 gen_rtx_NE (cr_mode, cr_reg,
4980 const0_rtx),
4981 gen_rtx_SET (VOIDmode, dest, src1)));
4982 }
4983
4984 else if (value2 == 0)
4985 {
4986 emit_move_insn (dest, src1);
4987 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4988 gen_rtx_EQ (cr_mode, cr_reg,
4989 const0_rtx),
4990 gen_rtx_SET (VOIDmode, dest, src2)));
4991 }
4992
4993 /* If the first value is within an addi range and also the difference
4994 between the two fits in an addi's range, load up the difference, then
4995 conditionally move in 0, and then unconditionally add the first
4996 value. */
4997 else if (IN_RANGE_P (value1, -2048, 2047)
4998 && IN_RANGE_P (value2 - value1, -2048, 2047))
4999 {
5000 rtx dest_si = ((GET_MODE (dest) == SImode)
5001 ? dest
5002 : gen_rtx_SUBREG (SImode, dest, 0));
5003
5004 emit_move_insn (dest_si, GEN_INT (value2 - value1));
5005 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5006 gen_rtx_NE (cr_mode, cr_reg,
5007 const0_rtx),
5008 gen_rtx_SET (VOIDmode, dest_si,
5009 const0_rtx)));
5010 emit_insn (gen_addsi3 (dest_si, dest_si, src1));
5011 }
5012
5013 else
5014 gcc_unreachable ();
5015 }
5016 else
5017 {
5018 /* Emit the conditional move for the test being true if needed. */
5019 if (! rtx_equal_p (dest, src1))
5020 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5021 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5022 gen_rtx_SET (VOIDmode, dest, src1)));
5023
5024 /* Emit the conditional move for the test being false if needed. */
5025 if (! rtx_equal_p (dest, src2))
5026 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5027 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5028 gen_rtx_SET (VOIDmode, dest, src2)));
5029 }
5030
5031 /* Finish up, return sequence. */
5032 ret = get_insns ();
5033 end_sequence ();
5034 return ret;
5035 }
5036
5037 \f
5038 /* Split (set DEST SOURCE), where DEST is a double register and SOURCE is a
5039 memory location that is not known to be dword-aligned. */
5040 void
5041 frv_split_double_load (rtx dest, rtx source)
5042 {
5043 int regno = REGNO (dest);
5044 rtx dest1 = gen_highpart (SImode, dest);
5045 rtx dest2 = gen_lowpart (SImode, dest);
5046 rtx address = XEXP (source, 0);
5047
5048 /* If the address is pre-modified, load the lower-numbered register
5049 first, then load the other register using an integer offset from
5050 the modified base register. This order should always be safe,
5051 since the pre-modification cannot affect the same registers as the
5052 load does.
5053
5054 The situation for other loads is more complicated. Loading one
5055 of the registers could affect the value of ADDRESS, so we must
5056 be careful which order we do them in. */
5057 if (GET_CODE (address) == PRE_MODIFY
5058 || ! refers_to_regno_p (regno, regno + 1, address, NULL))
5059 {
5060 /* It is safe to load the lower-numbered register first. */
5061 emit_move_insn (dest1, change_address (source, SImode, NULL));
5062 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
5063 }
5064 else
5065 {
5066 /* ADDRESS is not pre-modified and the address depends on the
5067 lower-numbered register. Load the higher-numbered register
5068 first. */
5069 emit_move_insn (dest2, frv_index_memory (source, SImode, 1));
5070 emit_move_insn (dest1, change_address (source, SImode, NULL));
5071 }
5072 }
5073
5074 /* Split (set DEST SOURCE), where DEST refers to a dword memory location
5075 and SOURCE is either a double register or the constant zero. */
5076 void
5077 frv_split_double_store (rtx dest, rtx source)
5078 {
5079 rtx dest1 = change_address (dest, SImode, NULL);
5080 rtx dest2 = frv_index_memory (dest, SImode, 1);
5081 if (ZERO_P (source))
5082 {
5083 emit_move_insn (dest1, CONST0_RTX (SImode));
5084 emit_move_insn (dest2, CONST0_RTX (SImode));
5085 }
5086 else
5087 {
5088 emit_move_insn (dest1, gen_highpart (SImode, source));
5089 emit_move_insn (dest2, gen_lowpart (SImode, source));
5090 }
5091 }
5092
5093 \f
5094 /* Split a min/max operation returning a SEQUENCE containing all of the
5095 insns. */
5096
5097 rtx
5098 frv_split_minmax (rtx operands[])
5099 {
5100 rtx dest = operands[0];
5101 rtx minmax = operands[1];
5102 rtx src1 = operands[2];
5103 rtx src2 = operands[3];
5104 rtx cc_reg = operands[4];
5105 rtx cr_reg = operands[5];
5106 rtx ret;
5107 enum rtx_code test_code;
5108 enum machine_mode cr_mode = GET_MODE (cr_reg);
5109
5110 start_sequence ();
5111
5112 /* Figure out which test to use. */
5113 switch (GET_CODE (minmax))
5114 {
5115 default:
5116 gcc_unreachable ();
5117
5118 case SMIN: test_code = LT; break;
5119 case SMAX: test_code = GT; break;
5120 case UMIN: test_code = LTU; break;
5121 case UMAX: test_code = GTU; break;
5122 }
5123
5124 /* Issue the compare instruction. */
5125 emit_insn (gen_rtx_SET (VOIDmode,
5126 cc_reg,
5127 gen_rtx_COMPARE (GET_MODE (cc_reg),
5128 src1, src2)));
5129
5130 /* Set the appropriate CCR bit. */
5131 emit_insn (gen_rtx_SET (VOIDmode,
5132 cr_reg,
5133 gen_rtx_fmt_ee (test_code,
5134 GET_MODE (cr_reg),
5135 cc_reg,
5136 const0_rtx)));
5137
5138 /* If are taking the min/max of a nonzero constant, load that first, and
5139 then do a conditional move of the other value. */
5140 if (GET_CODE (src2) == CONST_INT && INTVAL (src2) != 0)
5141 {
5142 gcc_assert (!rtx_equal_p (dest, src1));
5143
5144 emit_move_insn (dest, src2);
5145 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5146 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5147 gen_rtx_SET (VOIDmode, dest, src1)));
5148 }
5149
5150 /* Otherwise, do each half of the move. */
5151 else
5152 {
5153 /* Emit the conditional move for the test being true if needed. */
5154 if (! rtx_equal_p (dest, src1))
5155 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5156 gen_rtx_NE (cr_mode, cr_reg, const0_rtx),
5157 gen_rtx_SET (VOIDmode, dest, src1)));
5158
5159 /* Emit the conditional move for the test being false if needed. */
5160 if (! rtx_equal_p (dest, src2))
5161 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5162 gen_rtx_EQ (cr_mode, cr_reg, const0_rtx),
5163 gen_rtx_SET (VOIDmode, dest, src2)));
5164 }
5165
5166 /* Finish up, return sequence. */
5167 ret = get_insns ();
5168 end_sequence ();
5169 return ret;
5170 }
5171
5172 \f
5173 /* Split an integer abs operation returning a SEQUENCE containing all of the
5174 insns. */
5175
5176 rtx
5177 frv_split_abs (rtx operands[])
5178 {
5179 rtx dest = operands[0];
5180 rtx src = operands[1];
5181 rtx cc_reg = operands[2];
5182 rtx cr_reg = operands[3];
5183 rtx ret;
5184
5185 start_sequence ();
5186
5187 /* Issue the compare < 0 instruction. */
5188 emit_insn (gen_rtx_SET (VOIDmode,
5189 cc_reg,
5190 gen_rtx_COMPARE (CCmode, src, const0_rtx)));
5191
5192 /* Set the appropriate CCR bit. */
5193 emit_insn (gen_rtx_SET (VOIDmode,
5194 cr_reg,
5195 gen_rtx_fmt_ee (LT, CC_CCRmode, cc_reg, const0_rtx)));
5196
5197 /* Emit the conditional negate if the value is negative. */
5198 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5199 gen_rtx_NE (CC_CCRmode, cr_reg, const0_rtx),
5200 gen_negsi2 (dest, src)));
5201
5202 /* Emit the conditional move for the test being false if needed. */
5203 if (! rtx_equal_p (dest, src))
5204 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
5205 gen_rtx_EQ (CC_CCRmode, cr_reg, const0_rtx),
5206 gen_rtx_SET (VOIDmode, dest, src)));
5207
5208 /* Finish up, return sequence. */
5209 ret = get_insns ();
5210 end_sequence ();
5211 return ret;
5212 }
5213
5214 \f
5215 /* An internal function called by for_each_rtx to clear in a hard_reg set each
5216 register used in an insn. */
5217
5218 static int
5219 frv_clear_registers_used (rtx *ptr, void *data)
5220 {
5221 if (GET_CODE (*ptr) == REG)
5222 {
5223 int regno = REGNO (*ptr);
5224 HARD_REG_SET *p_regs = (HARD_REG_SET *)data;
5225
5226 if (regno < FIRST_PSEUDO_REGISTER)
5227 {
5228 int reg_max = regno + HARD_REGNO_NREGS (regno, GET_MODE (*ptr));
5229
5230 while (regno < reg_max)
5231 {
5232 CLEAR_HARD_REG_BIT (*p_regs, regno);
5233 regno++;
5234 }
5235 }
5236 }
5237
5238 return 0;
5239 }
5240
5241 \f
5242 /* Initialize the extra fields provided by IFCVT_EXTRA_FIELDS. */
5243
5244 /* On the FR-V, we don't have any extra fields per se, but it is useful hook to
5245 initialize the static storage. */
5246 void
5247 frv_ifcvt_init_extra_fields (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
5248 {
5249 frv_ifcvt.added_insns_list = NULL_RTX;
5250 frv_ifcvt.cur_scratch_regs = 0;
5251 frv_ifcvt.num_nested_cond_exec = 0;
5252 frv_ifcvt.cr_reg = NULL_RTX;
5253 frv_ifcvt.nested_cc_reg = NULL_RTX;
5254 frv_ifcvt.extra_int_cr = NULL_RTX;
5255 frv_ifcvt.extra_fp_cr = NULL_RTX;
5256 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5257 }
5258
5259 \f
5260 /* Internal function to add a potential insn to the list of insns to be inserted
5261 if the conditional execution conversion is successful. */
5262
5263 static void
5264 frv_ifcvt_add_insn (rtx pattern, rtx insn, int before_p)
5265 {
5266 rtx link = alloc_EXPR_LIST (VOIDmode, pattern, insn);
5267
5268 link->jump = before_p; /* Mark to add this before or after insn. */
5269 frv_ifcvt.added_insns_list = alloc_EXPR_LIST (VOIDmode, link,
5270 frv_ifcvt.added_insns_list);
5271
5272 if (TARGET_DEBUG_COND_EXEC)
5273 {
5274 fprintf (stderr,
5275 "\n:::::::::: frv_ifcvt_add_insn: add the following %s insn %d:\n",
5276 (before_p) ? "before" : "after",
5277 (int)INSN_UID (insn));
5278
5279 debug_rtx (pattern);
5280 }
5281 }
5282
5283 \f
5284 /* A C expression to modify the code described by the conditional if
5285 information CE_INFO, possibly updating the tests in TRUE_EXPR, and
5286 FALSE_EXPR for converting if-then and if-then-else code to conditional
5287 instructions. Set either TRUE_EXPR or FALSE_EXPR to a null pointer if the
5288 tests cannot be converted. */
5289
5290 void
5291 frv_ifcvt_modify_tests (ce_if_block_t *ce_info, rtx *p_true, rtx *p_false)
5292 {
5293 basic_block test_bb = ce_info->test_bb; /* test basic block */
5294 basic_block then_bb = ce_info->then_bb; /* THEN */
5295 basic_block else_bb = ce_info->else_bb; /* ELSE or NULL */
5296 basic_block join_bb = ce_info->join_bb; /* join block or NULL */
5297 rtx true_expr = *p_true;
5298 rtx cr;
5299 rtx cc;
5300 rtx nested_cc;
5301 enum machine_mode mode = GET_MODE (true_expr);
5302 int j;
5303 basic_block *bb;
5304 int num_bb;
5305 frv_tmp_reg_t *tmp_reg = &frv_ifcvt.tmp_reg;
5306 rtx check_insn;
5307 rtx sub_cond_exec_reg;
5308 enum rtx_code code;
5309 enum rtx_code code_true;
5310 enum rtx_code code_false;
5311 enum reg_class cc_class;
5312 enum reg_class cr_class;
5313 int cc_first;
5314 int cc_last;
5315 reg_set_iterator rsi;
5316
5317 /* Make sure we are only dealing with hard registers. Also honor the
5318 -mno-cond-exec switch, and -mno-nested-cond-exec switches if
5319 applicable. */
5320 if (!reload_completed || !TARGET_COND_EXEC
5321 || (!TARGET_NESTED_CE && ce_info->pass > 1))
5322 goto fail;
5323
5324 /* Figure out which registers we can allocate for our own purposes. Only
5325 consider registers that are not preserved across function calls and are
5326 not fixed. However, allow the ICC/ICR temporary registers to be allocated
5327 if we did not need to use them in reloading other registers. */
5328 memset (&tmp_reg->regs, 0, sizeof (tmp_reg->regs));
5329 COPY_HARD_REG_SET (tmp_reg->regs, call_used_reg_set);
5330 AND_COMPL_HARD_REG_SET (tmp_reg->regs, fixed_reg_set);
5331 SET_HARD_REG_BIT (tmp_reg->regs, ICC_TEMP);
5332 SET_HARD_REG_BIT (tmp_reg->regs, ICR_TEMP);
5333
5334 /* If this is a nested IF, we need to discover whether the CC registers that
5335 are set/used inside of the block are used anywhere else. If not, we can
5336 change them to be the CC register that is paired with the CR register that
5337 controls the outermost IF block. */
5338 if (ce_info->pass > 1)
5339 {
5340 CLEAR_HARD_REG_SET (frv_ifcvt.nested_cc_ok_rewrite);
5341 for (j = CC_FIRST; j <= CC_LAST; j++)
5342 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5343 {
5344 if (REGNO_REG_SET_P (df_get_live_in (then_bb), j))
5345 continue;
5346
5347 if (else_bb
5348 && REGNO_REG_SET_P (df_get_live_in (else_bb), j))
5349 continue;
5350
5351 if (join_bb
5352 && REGNO_REG_SET_P (df_get_live_in (join_bb), j))
5353 continue;
5354
5355 SET_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j);
5356 }
5357 }
5358
5359 for (j = 0; j < frv_ifcvt.cur_scratch_regs; j++)
5360 frv_ifcvt.scratch_regs[j] = NULL_RTX;
5361
5362 frv_ifcvt.added_insns_list = NULL_RTX;
5363 frv_ifcvt.cur_scratch_regs = 0;
5364
5365 bb = (basic_block *) alloca ((2 + ce_info->num_multiple_test_blocks)
5366 * sizeof (basic_block));
5367
5368 if (join_bb)
5369 {
5370 unsigned int regno;
5371
5372 /* Remove anything live at the beginning of the join block from being
5373 available for allocation. */
5374 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (join_bb), 0, regno, rsi)
5375 {
5376 if (regno < FIRST_PSEUDO_REGISTER)
5377 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5378 }
5379 }
5380
5381 /* Add in all of the blocks in multiple &&/|| blocks to be scanned. */
5382 num_bb = 0;
5383 if (ce_info->num_multiple_test_blocks)
5384 {
5385 basic_block multiple_test_bb = ce_info->last_test_bb;
5386
5387 while (multiple_test_bb != test_bb)
5388 {
5389 bb[num_bb++] = multiple_test_bb;
5390 multiple_test_bb = EDGE_PRED (multiple_test_bb, 0)->src;
5391 }
5392 }
5393
5394 /* Add in the THEN and ELSE blocks to be scanned. */
5395 bb[num_bb++] = then_bb;
5396 if (else_bb)
5397 bb[num_bb++] = else_bb;
5398
5399 sub_cond_exec_reg = NULL_RTX;
5400 frv_ifcvt.num_nested_cond_exec = 0;
5401
5402 /* Scan all of the blocks for registers that must not be allocated. */
5403 for (j = 0; j < num_bb; j++)
5404 {
5405 rtx last_insn = BB_END (bb[j]);
5406 rtx insn = BB_HEAD (bb[j]);
5407 unsigned int regno;
5408
5409 if (dump_file)
5410 fprintf (dump_file, "Scanning %s block %d, start %d, end %d\n",
5411 (bb[j] == else_bb) ? "else" : ((bb[j] == then_bb) ? "then" : "test"),
5412 (int) bb[j]->index,
5413 (int) INSN_UID (BB_HEAD (bb[j])),
5414 (int) INSN_UID (BB_END (bb[j])));
5415
5416 /* Anything live at the beginning of the block is obviously unavailable
5417 for allocation. */
5418 EXECUTE_IF_SET_IN_REG_SET (df_get_live_in (bb[j]), 0, regno, rsi)
5419 {
5420 if (regno < FIRST_PSEUDO_REGISTER)
5421 CLEAR_HARD_REG_BIT (tmp_reg->regs, regno);
5422 }
5423
5424 /* Loop through the insns in the block. */
5425 for (;;)
5426 {
5427 /* Mark any new registers that are created as being unavailable for
5428 allocation. Also see if the CC register used in nested IFs can be
5429 reallocated. */
5430 if (INSN_P (insn))
5431 {
5432 rtx pattern;
5433 rtx set;
5434 int skip_nested_if = FALSE;
5435
5436 for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
5437 (void *)&tmp_reg->regs);
5438
5439 pattern = PATTERN (insn);
5440 if (GET_CODE (pattern) == COND_EXEC)
5441 {
5442 rtx reg = XEXP (COND_EXEC_TEST (pattern), 0);
5443
5444 if (reg != sub_cond_exec_reg)
5445 {
5446 sub_cond_exec_reg = reg;
5447 frv_ifcvt.num_nested_cond_exec++;
5448 }
5449 }
5450
5451 set = single_set_pattern (pattern);
5452 if (set)
5453 {
5454 rtx dest = SET_DEST (set);
5455 rtx src = SET_SRC (set);
5456
5457 if (GET_CODE (dest) == REG)
5458 {
5459 int regno = REGNO (dest);
5460 enum rtx_code src_code = GET_CODE (src);
5461
5462 if (CC_P (regno) && src_code == COMPARE)
5463 skip_nested_if = TRUE;
5464
5465 else if (CR_P (regno)
5466 && (src_code == IF_THEN_ELSE
5467 || COMPARISON_P (src)))
5468 skip_nested_if = TRUE;
5469 }
5470 }
5471
5472 if (! skip_nested_if)
5473 for_each_rtx (&PATTERN (insn), frv_clear_registers_used,
5474 (void *)&frv_ifcvt.nested_cc_ok_rewrite);
5475 }
5476
5477 if (insn == last_insn)
5478 break;
5479
5480 insn = NEXT_INSN (insn);
5481 }
5482 }
5483
5484 /* If this is a nested if, rewrite the CC registers that are available to
5485 include the ones that can be rewritten, to increase the chance of being
5486 able to allocate a paired CC/CR register combination. */
5487 if (ce_info->pass > 1)
5488 {
5489 for (j = CC_FIRST; j <= CC_LAST; j++)
5490 if (TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, j))
5491 SET_HARD_REG_BIT (tmp_reg->regs, j);
5492 else
5493 CLEAR_HARD_REG_BIT (tmp_reg->regs, j);
5494 }
5495
5496 if (dump_file)
5497 {
5498 int num_gprs = 0;
5499 fprintf (dump_file, "Available GPRs: ");
5500
5501 for (j = GPR_FIRST; j <= GPR_LAST; j++)
5502 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5503 {
5504 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5505 if (++num_gprs > GPR_TEMP_NUM+2)
5506 break;
5507 }
5508
5509 fprintf (dump_file, "%s\nAvailable CRs: ",
5510 (num_gprs > GPR_TEMP_NUM+2) ? " ..." : "");
5511
5512 for (j = CR_FIRST; j <= CR_LAST; j++)
5513 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5514 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5515
5516 fputs ("\n", dump_file);
5517
5518 if (ce_info->pass > 1)
5519 {
5520 fprintf (dump_file, "Modifiable CCs: ");
5521 for (j = CC_FIRST; j <= CC_LAST; j++)
5522 if (TEST_HARD_REG_BIT (tmp_reg->regs, j))
5523 fprintf (dump_file, " %d [%s]", j, reg_names[j]);
5524
5525 fprintf (dump_file, "\n%d nested COND_EXEC statements\n",
5526 frv_ifcvt.num_nested_cond_exec);
5527 }
5528 }
5529
5530 /* Allocate the appropriate temporary condition code register. Try to
5531 allocate the ICR/FCR register that corresponds to the ICC/FCC register so
5532 that conditional cmp's can be done. */
5533 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5534 {
5535 cr_class = ICR_REGS;
5536 cc_class = ICC_REGS;
5537 cc_first = ICC_FIRST;
5538 cc_last = ICC_LAST;
5539 }
5540 else if (mode == CC_FPmode)
5541 {
5542 cr_class = FCR_REGS;
5543 cc_class = FCC_REGS;
5544 cc_first = FCC_FIRST;
5545 cc_last = FCC_LAST;
5546 }
5547 else
5548 {
5549 cc_first = cc_last = 0;
5550 cr_class = cc_class = NO_REGS;
5551 }
5552
5553 cc = XEXP (true_expr, 0);
5554 nested_cc = cr = NULL_RTX;
5555 if (cc_class != NO_REGS)
5556 {
5557 /* For nested IFs and &&/||, see if we can find a CC and CR register pair
5558 so we can execute a csubcc/caddcc/cfcmps instruction. */
5559 int cc_regno;
5560
5561 for (cc_regno = cc_first; cc_regno <= cc_last; cc_regno++)
5562 {
5563 int cr_regno = cc_regno - CC_FIRST + CR_FIRST;
5564
5565 if (TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cc_regno)
5566 && TEST_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, cr_regno))
5567 {
5568 frv_ifcvt.tmp_reg.next_reg[ (int)cr_class ] = cr_regno;
5569 cr = frv_alloc_temp_reg (tmp_reg, cr_class, CC_CCRmode, TRUE,
5570 TRUE);
5571
5572 frv_ifcvt.tmp_reg.next_reg[ (int)cc_class ] = cc_regno;
5573 nested_cc = frv_alloc_temp_reg (tmp_reg, cc_class, CCmode,
5574 TRUE, TRUE);
5575 break;
5576 }
5577 }
5578 }
5579
5580 if (! cr)
5581 {
5582 if (dump_file)
5583 fprintf (dump_file, "Could not allocate a CR temporary register\n");
5584
5585 goto fail;
5586 }
5587
5588 if (dump_file)
5589 fprintf (dump_file,
5590 "Will use %s for conditional execution, %s for nested comparisons\n",
5591 reg_names[ REGNO (cr)],
5592 (nested_cc) ? reg_names[ REGNO (nested_cc) ] : "<none>");
5593
5594 /* Set the CCR bit. Note for integer tests, we reverse the condition so that
5595 in an IF-THEN-ELSE sequence, we are testing the TRUE case against the CCR
5596 bit being true. We don't do this for floating point, because of NaNs. */
5597 code = GET_CODE (true_expr);
5598 if (GET_MODE (cc) != CC_FPmode)
5599 {
5600 code = reverse_condition (code);
5601 code_true = EQ;
5602 code_false = NE;
5603 }
5604 else
5605 {
5606 code_true = NE;
5607 code_false = EQ;
5608 }
5609
5610 check_insn = gen_rtx_SET (VOIDmode, cr,
5611 gen_rtx_fmt_ee (code, CC_CCRmode, cc, const0_rtx));
5612
5613 /* Record the check insn to be inserted later. */
5614 frv_ifcvt_add_insn (check_insn, BB_END (test_bb), TRUE);
5615
5616 /* Update the tests. */
5617 frv_ifcvt.cr_reg = cr;
5618 frv_ifcvt.nested_cc_reg = nested_cc;
5619 *p_true = gen_rtx_fmt_ee (code_true, CC_CCRmode, cr, const0_rtx);
5620 *p_false = gen_rtx_fmt_ee (code_false, CC_CCRmode, cr, const0_rtx);
5621 return;
5622
5623 /* Fail, don't do this conditional execution. */
5624 fail:
5625 *p_true = NULL_RTX;
5626 *p_false = NULL_RTX;
5627 if (dump_file)
5628 fprintf (dump_file, "Disabling this conditional execution.\n");
5629
5630 return;
5631 }
5632
5633 \f
5634 /* A C expression to modify the code described by the conditional if
5635 information CE_INFO, for the basic block BB, possibly updating the tests in
5636 TRUE_EXPR, and FALSE_EXPR for converting the && and || parts of if-then or
5637 if-then-else code to conditional instructions. Set either TRUE_EXPR or
5638 FALSE_EXPR to a null pointer if the tests cannot be converted. */
5639
5640 /* p_true and p_false are given expressions of the form:
5641
5642 (and (eq:CC_CCR (reg:CC_CCR)
5643 (const_int 0))
5644 (eq:CC (reg:CC)
5645 (const_int 0))) */
5646
5647 void
5648 frv_ifcvt_modify_multiple_tests (ce_if_block_t *ce_info,
5649 basic_block bb,
5650 rtx *p_true,
5651 rtx *p_false)
5652 {
5653 rtx old_true = XEXP (*p_true, 0);
5654 rtx old_false = XEXP (*p_false, 0);
5655 rtx true_expr = XEXP (*p_true, 1);
5656 rtx false_expr = XEXP (*p_false, 1);
5657 rtx test_expr;
5658 rtx old_test;
5659 rtx cr = XEXP (old_true, 0);
5660 rtx check_insn;
5661 rtx new_cr = NULL_RTX;
5662 rtx *p_new_cr = (rtx *)0;
5663 rtx if_else;
5664 rtx compare;
5665 rtx cc;
5666 enum reg_class cr_class;
5667 enum machine_mode mode = GET_MODE (true_expr);
5668 rtx (*logical_func)(rtx, rtx, rtx);
5669
5670 if (TARGET_DEBUG_COND_EXEC)
5671 {
5672 fprintf (stderr,
5673 "\n:::::::::: frv_ifcvt_modify_multiple_tests, before modification for %s\ntrue insn:\n",
5674 ce_info->and_and_p ? "&&" : "||");
5675
5676 debug_rtx (*p_true);
5677
5678 fputs ("\nfalse insn:\n", stderr);
5679 debug_rtx (*p_false);
5680 }
5681
5682 if (!TARGET_MULTI_CE)
5683 goto fail;
5684
5685 if (GET_CODE (cr) != REG)
5686 goto fail;
5687
5688 if (mode == CCmode || mode == CC_UNSmode || mode == CC_NZmode)
5689 {
5690 cr_class = ICR_REGS;
5691 p_new_cr = &frv_ifcvt.extra_int_cr;
5692 }
5693 else if (mode == CC_FPmode)
5694 {
5695 cr_class = FCR_REGS;
5696 p_new_cr = &frv_ifcvt.extra_fp_cr;
5697 }
5698 else
5699 goto fail;
5700
5701 /* Allocate a temp CR, reusing a previously allocated temp CR if we have 3 or
5702 more &&/|| tests. */
5703 new_cr = *p_new_cr;
5704 if (! new_cr)
5705 {
5706 new_cr = *p_new_cr = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, cr_class,
5707 CC_CCRmode, TRUE, TRUE);
5708 if (! new_cr)
5709 goto fail;
5710 }
5711
5712 if (ce_info->and_and_p)
5713 {
5714 old_test = old_false;
5715 test_expr = true_expr;
5716 logical_func = (GET_CODE (old_true) == EQ) ? gen_andcr : gen_andncr;
5717 *p_true = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5718 *p_false = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5719 }
5720 else
5721 {
5722 old_test = old_false;
5723 test_expr = false_expr;
5724 logical_func = (GET_CODE (old_false) == EQ) ? gen_orcr : gen_orncr;
5725 *p_true = gen_rtx_EQ (CC_CCRmode, cr, const0_rtx);
5726 *p_false = gen_rtx_NE (CC_CCRmode, cr, const0_rtx);
5727 }
5728
5729 /* First add the andcr/andncr/orcr/orncr, which will be added after the
5730 conditional check instruction, due to frv_ifcvt_add_insn being a LIFO
5731 stack. */
5732 frv_ifcvt_add_insn ((*logical_func) (cr, cr, new_cr), BB_END (bb), TRUE);
5733
5734 /* Now add the conditional check insn. */
5735 cc = XEXP (test_expr, 0);
5736 compare = gen_rtx_fmt_ee (GET_CODE (test_expr), CC_CCRmode, cc, const0_rtx);
5737 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, old_test, compare, const0_rtx);
5738
5739 check_insn = gen_rtx_SET (VOIDmode, new_cr, if_else);
5740
5741 /* Add the new check insn to the list of check insns that need to be
5742 inserted. */
5743 frv_ifcvt_add_insn (check_insn, BB_END (bb), TRUE);
5744
5745 if (TARGET_DEBUG_COND_EXEC)
5746 {
5747 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, after modification\ntrue insn:\n",
5748 stderr);
5749
5750 debug_rtx (*p_true);
5751
5752 fputs ("\nfalse insn:\n", stderr);
5753 debug_rtx (*p_false);
5754 }
5755
5756 return;
5757
5758 fail:
5759 *p_true = *p_false = NULL_RTX;
5760
5761 /* If we allocated a CR register, release it. */
5762 if (new_cr)
5763 {
5764 CLEAR_HARD_REG_BIT (frv_ifcvt.tmp_reg.regs, REGNO (new_cr));
5765 *p_new_cr = NULL_RTX;
5766 }
5767
5768 if (TARGET_DEBUG_COND_EXEC)
5769 fputs ("\n:::::::::: frv_ifcvt_modify_multiple_tests, failed.\n", stderr);
5770
5771 return;
5772 }
5773
5774 \f
5775 /* Return a register which will be loaded with a value if an IF block is
5776 converted to conditional execution. This is used to rewrite instructions
5777 that use constants to ones that just use registers. */
5778
5779 static rtx
5780 frv_ifcvt_load_value (rtx value, rtx insn ATTRIBUTE_UNUSED)
5781 {
5782 int num_alloc = frv_ifcvt.cur_scratch_regs;
5783 int i;
5784 rtx reg;
5785
5786 /* We know gr0 == 0, so replace any errant uses. */
5787 if (value == const0_rtx)
5788 return gen_rtx_REG (SImode, GPR_FIRST);
5789
5790 /* First search all registers currently loaded to see if we have an
5791 applicable constant. */
5792 if (CONSTANT_P (value)
5793 || (GET_CODE (value) == REG && REGNO (value) == LR_REGNO))
5794 {
5795 for (i = 0; i < num_alloc; i++)
5796 {
5797 if (rtx_equal_p (SET_SRC (frv_ifcvt.scratch_regs[i]), value))
5798 return SET_DEST (frv_ifcvt.scratch_regs[i]);
5799 }
5800 }
5801
5802 /* Have we exhausted the number of registers available? */
5803 if (num_alloc >= GPR_TEMP_NUM)
5804 {
5805 if (dump_file)
5806 fprintf (dump_file, "Too many temporary registers allocated\n");
5807
5808 return NULL_RTX;
5809 }
5810
5811 /* Allocate the new register. */
5812 reg = frv_alloc_temp_reg (&frv_ifcvt.tmp_reg, GPR_REGS, SImode, TRUE, TRUE);
5813 if (! reg)
5814 {
5815 if (dump_file)
5816 fputs ("Could not find a scratch register\n", dump_file);
5817
5818 return NULL_RTX;
5819 }
5820
5821 frv_ifcvt.cur_scratch_regs++;
5822 frv_ifcvt.scratch_regs[num_alloc] = gen_rtx_SET (VOIDmode, reg, value);
5823
5824 if (dump_file)
5825 {
5826 if (GET_CODE (value) == CONST_INT)
5827 fprintf (dump_file, "Register %s will hold %ld\n",
5828 reg_names[ REGNO (reg)], (long)INTVAL (value));
5829
5830 else if (GET_CODE (value) == REG && REGNO (value) == LR_REGNO)
5831 fprintf (dump_file, "Register %s will hold LR\n",
5832 reg_names[ REGNO (reg)]);
5833
5834 else
5835 fprintf (dump_file, "Register %s will hold a saved value\n",
5836 reg_names[ REGNO (reg)]);
5837 }
5838
5839 return reg;
5840 }
5841
5842 \f
5843 /* Update a MEM used in conditional code that might contain an offset to put
5844 the offset into a scratch register, so that the conditional load/store
5845 operations can be used. This function returns the original pointer if the
5846 MEM is valid to use in conditional code, NULL if we can't load up the offset
5847 into a temporary register, or the new MEM if we were successful. */
5848
5849 static rtx
5850 frv_ifcvt_rewrite_mem (rtx mem, enum machine_mode mode, rtx insn)
5851 {
5852 rtx addr = XEXP (mem, 0);
5853
5854 if (!frv_legitimate_address_p (mode, addr, reload_completed, TRUE, FALSE))
5855 {
5856 if (GET_CODE (addr) == PLUS)
5857 {
5858 rtx addr_op0 = XEXP (addr, 0);
5859 rtx addr_op1 = XEXP (addr, 1);
5860
5861 if (GET_CODE (addr_op0) == REG && CONSTANT_P (addr_op1))
5862 {
5863 rtx reg = frv_ifcvt_load_value (addr_op1, insn);
5864 if (!reg)
5865 return NULL_RTX;
5866
5867 addr = gen_rtx_PLUS (Pmode, addr_op0, reg);
5868 }
5869
5870 else
5871 return NULL_RTX;
5872 }
5873
5874 else if (CONSTANT_P (addr))
5875 addr = frv_ifcvt_load_value (addr, insn);
5876
5877 else
5878 return NULL_RTX;
5879
5880 if (addr == NULL_RTX)
5881 return NULL_RTX;
5882
5883 else if (XEXP (mem, 0) != addr)
5884 return change_address (mem, mode, addr);
5885 }
5886
5887 return mem;
5888 }
5889
5890 \f
5891 /* Given a PATTERN, return a SET expression if this PATTERN has only a single
5892 SET, possibly conditionally executed. It may also have CLOBBERs, USEs. */
5893
5894 static rtx
5895 single_set_pattern (rtx pattern)
5896 {
5897 rtx set;
5898 int i;
5899
5900 if (GET_CODE (pattern) == COND_EXEC)
5901 pattern = COND_EXEC_CODE (pattern);
5902
5903 if (GET_CODE (pattern) == SET)
5904 return pattern;
5905
5906 else if (GET_CODE (pattern) == PARALLEL)
5907 {
5908 for (i = 0, set = 0; i < XVECLEN (pattern, 0); i++)
5909 {
5910 rtx sub = XVECEXP (pattern, 0, i);
5911
5912 switch (GET_CODE (sub))
5913 {
5914 case USE:
5915 case CLOBBER:
5916 break;
5917
5918 case SET:
5919 if (set)
5920 return 0;
5921 else
5922 set = sub;
5923 break;
5924
5925 default:
5926 return 0;
5927 }
5928 }
5929 return set;
5930 }
5931
5932 return 0;
5933 }
5934
5935 \f
5936 /* A C expression to modify the code described by the conditional if
5937 information CE_INFO with the new PATTERN in INSN. If PATTERN is a null
5938 pointer after the IFCVT_MODIFY_INSN macro executes, it is assumed that that
5939 insn cannot be converted to be executed conditionally. */
5940
5941 rtx
5942 frv_ifcvt_modify_insn (ce_if_block_t *ce_info,
5943 rtx pattern,
5944 rtx insn)
5945 {
5946 rtx orig_ce_pattern = pattern;
5947 rtx set;
5948 rtx op0;
5949 rtx op1;
5950 rtx test;
5951
5952 gcc_assert (GET_CODE (pattern) == COND_EXEC);
5953
5954 test = COND_EXEC_TEST (pattern);
5955 if (GET_CODE (test) == AND)
5956 {
5957 rtx cr = frv_ifcvt.cr_reg;
5958 rtx test_reg;
5959
5960 op0 = XEXP (test, 0);
5961 if (! rtx_equal_p (cr, XEXP (op0, 0)))
5962 goto fail;
5963
5964 op1 = XEXP (test, 1);
5965 test_reg = XEXP (op1, 0);
5966 if (GET_CODE (test_reg) != REG)
5967 goto fail;
5968
5969 /* Is this the first nested if block in this sequence? If so, generate
5970 an andcr or andncr. */
5971 if (! frv_ifcvt.last_nested_if_cr)
5972 {
5973 rtx and_op;
5974
5975 frv_ifcvt.last_nested_if_cr = test_reg;
5976 if (GET_CODE (op0) == NE)
5977 and_op = gen_andcr (test_reg, cr, test_reg);
5978 else
5979 and_op = gen_andncr (test_reg, cr, test_reg);
5980
5981 frv_ifcvt_add_insn (and_op, insn, TRUE);
5982 }
5983
5984 /* If this isn't the first statement in the nested if sequence, see if we
5985 are dealing with the same register. */
5986 else if (! rtx_equal_p (test_reg, frv_ifcvt.last_nested_if_cr))
5987 goto fail;
5988
5989 COND_EXEC_TEST (pattern) = test = op1;
5990 }
5991
5992 /* If this isn't a nested if, reset state variables. */
5993 else
5994 {
5995 frv_ifcvt.last_nested_if_cr = NULL_RTX;
5996 }
5997
5998 set = single_set_pattern (pattern);
5999 if (set)
6000 {
6001 rtx dest = SET_DEST (set);
6002 rtx src = SET_SRC (set);
6003 enum machine_mode mode = GET_MODE (dest);
6004
6005 /* Check for normal binary operators. */
6006 if (mode == SImode && ARITHMETIC_P (src))
6007 {
6008 op0 = XEXP (src, 0);
6009 op1 = XEXP (src, 1);
6010
6011 if (integer_register_operand (op0, SImode) && CONSTANT_P (op1))
6012 {
6013 op1 = frv_ifcvt_load_value (op1, insn);
6014 if (op1)
6015 COND_EXEC_CODE (pattern)
6016 = gen_rtx_SET (VOIDmode, dest, gen_rtx_fmt_ee (GET_CODE (src),
6017 GET_MODE (src),
6018 op0, op1));
6019 else
6020 goto fail;
6021 }
6022 }
6023
6024 /* For multiply by a constant, we need to handle the sign extending
6025 correctly. Add a USE of the value after the multiply to prevent flow
6026 from cratering because only one register out of the two were used. */
6027 else if (mode == DImode && GET_CODE (src) == MULT)
6028 {
6029 op0 = XEXP (src, 0);
6030 op1 = XEXP (src, 1);
6031 if (GET_CODE (op0) == SIGN_EXTEND && GET_CODE (op1) == CONST_INT)
6032 {
6033 op1 = frv_ifcvt_load_value (op1, insn);
6034 if (op1)
6035 {
6036 op1 = gen_rtx_SIGN_EXTEND (DImode, op1);
6037 COND_EXEC_CODE (pattern)
6038 = gen_rtx_SET (VOIDmode, dest,
6039 gen_rtx_MULT (DImode, op0, op1));
6040 }
6041 else
6042 goto fail;
6043 }
6044
6045 frv_ifcvt_add_insn (gen_use (dest), insn, FALSE);
6046 }
6047
6048 /* If we are just loading a constant created for a nested conditional
6049 execution statement, just load the constant without any conditional
6050 execution, since we know that the constant will not interfere with any
6051 other registers. */
6052 else if (frv_ifcvt.scratch_insns_bitmap
6053 && bitmap_bit_p (frv_ifcvt.scratch_insns_bitmap,
6054 INSN_UID (insn))
6055 && REG_P (SET_DEST (set))
6056 /* We must not unconditionally set a scratch reg chosen
6057 for a nested if-converted block if its incoming
6058 value from the TEST block (or the result of the THEN
6059 branch) could/should propagate to the JOIN block.
6060 It suffices to test whether the register is live at
6061 the JOIN point: if it's live there, we can infer
6062 that we set it in the former JOIN block of the
6063 nested if-converted block (otherwise it wouldn't
6064 have been available as a scratch register), and it
6065 is either propagated through or set in the other
6066 conditional block. It's probably not worth trying
6067 to catch the latter case, and it could actually
6068 limit scheduling of the combined block quite
6069 severely. */
6070 && ce_info->join_bb
6071 && ! (REGNO_REG_SET_P (df_get_live_in (ce_info->join_bb),
6072 REGNO (SET_DEST (set))))
6073 /* Similarly, we must not unconditionally set a reg
6074 used as scratch in the THEN branch if the same reg
6075 is live in the ELSE branch. */
6076 && (! ce_info->else_bb
6077 || BLOCK_FOR_INSN (insn) == ce_info->else_bb
6078 || ! (REGNO_REG_SET_P (df_get_live_in (ce_info->else_bb),
6079 REGNO (SET_DEST (set))))))
6080 pattern = set;
6081
6082 else if (mode == QImode || mode == HImode || mode == SImode
6083 || mode == SFmode)
6084 {
6085 int changed_p = FALSE;
6086
6087 /* Check for just loading up a constant */
6088 if (CONSTANT_P (src) && integer_register_operand (dest, mode))
6089 {
6090 src = frv_ifcvt_load_value (src, insn);
6091 if (!src)
6092 goto fail;
6093
6094 changed_p = TRUE;
6095 }
6096
6097 /* See if we need to fix up stores */
6098 if (GET_CODE (dest) == MEM)
6099 {
6100 rtx new_mem = frv_ifcvt_rewrite_mem (dest, mode, insn);
6101
6102 if (!new_mem)
6103 goto fail;
6104
6105 else if (new_mem != dest)
6106 {
6107 changed_p = TRUE;
6108 dest = new_mem;
6109 }
6110 }
6111
6112 /* See if we need to fix up loads */
6113 if (GET_CODE (src) == MEM)
6114 {
6115 rtx new_mem = frv_ifcvt_rewrite_mem (src, mode, insn);
6116
6117 if (!new_mem)
6118 goto fail;
6119
6120 else if (new_mem != src)
6121 {
6122 changed_p = TRUE;
6123 src = new_mem;
6124 }
6125 }
6126
6127 /* If either src or destination changed, redo SET. */
6128 if (changed_p)
6129 COND_EXEC_CODE (pattern) = gen_rtx_SET (VOIDmode, dest, src);
6130 }
6131
6132 /* Rewrite a nested set cccr in terms of IF_THEN_ELSE. Also deal with
6133 rewriting the CC register to be the same as the paired CC/CR register
6134 for nested ifs. */
6135 else if (mode == CC_CCRmode && COMPARISON_P (src))
6136 {
6137 int regno = REGNO (XEXP (src, 0));
6138 rtx if_else;
6139
6140 if (ce_info->pass > 1
6141 && regno != (int)REGNO (frv_ifcvt.nested_cc_reg)
6142 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite, regno))
6143 {
6144 src = gen_rtx_fmt_ee (GET_CODE (src),
6145 CC_CCRmode,
6146 frv_ifcvt.nested_cc_reg,
6147 XEXP (src, 1));
6148 }
6149
6150 if_else = gen_rtx_IF_THEN_ELSE (CC_CCRmode, test, src, const0_rtx);
6151 pattern = gen_rtx_SET (VOIDmode, dest, if_else);
6152 }
6153
6154 /* Remap a nested compare instruction to use the paired CC/CR reg. */
6155 else if (ce_info->pass > 1
6156 && GET_CODE (dest) == REG
6157 && CC_P (REGNO (dest))
6158 && REGNO (dest) != REGNO (frv_ifcvt.nested_cc_reg)
6159 && TEST_HARD_REG_BIT (frv_ifcvt.nested_cc_ok_rewrite,
6160 REGNO (dest))
6161 && GET_CODE (src) == COMPARE)
6162 {
6163 PUT_MODE (frv_ifcvt.nested_cc_reg, GET_MODE (dest));
6164 COND_EXEC_CODE (pattern)
6165 = gen_rtx_SET (VOIDmode, frv_ifcvt.nested_cc_reg, copy_rtx (src));
6166 }
6167 }
6168
6169 if (TARGET_DEBUG_COND_EXEC)
6170 {
6171 rtx orig_pattern = PATTERN (insn);
6172
6173 PATTERN (insn) = pattern;
6174 fprintf (stderr,
6175 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn after modification:\n",
6176 ce_info->pass);
6177
6178 debug_rtx (insn);
6179 PATTERN (insn) = orig_pattern;
6180 }
6181
6182 return pattern;
6183
6184 fail:
6185 if (TARGET_DEBUG_COND_EXEC)
6186 {
6187 rtx orig_pattern = PATTERN (insn);
6188
6189 PATTERN (insn) = orig_ce_pattern;
6190 fprintf (stderr,
6191 "\n:::::::::: frv_ifcvt_modify_insn: pass = %d, insn could not be modified:\n",
6192 ce_info->pass);
6193
6194 debug_rtx (insn);
6195 PATTERN (insn) = orig_pattern;
6196 }
6197
6198 return NULL_RTX;
6199 }
6200
6201 \f
6202 /* A C expression to perform any final machine dependent modifications in
6203 converting code to conditional execution in the code described by the
6204 conditional if information CE_INFO. */
6205
6206 void
6207 frv_ifcvt_modify_final (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
6208 {
6209 rtx existing_insn;
6210 rtx check_insn;
6211 rtx p = frv_ifcvt.added_insns_list;
6212 int i;
6213
6214 /* Loop inserting the check insns. The last check insn is the first test,
6215 and is the appropriate place to insert constants. */
6216 gcc_assert (p);
6217
6218 do
6219 {
6220 rtx check_and_insert_insns = XEXP (p, 0);
6221 rtx old_p = p;
6222
6223 check_insn = XEXP (check_and_insert_insns, 0);
6224 existing_insn = XEXP (check_and_insert_insns, 1);
6225 p = XEXP (p, 1);
6226
6227 /* The jump bit is used to say that the new insn is to be inserted BEFORE
6228 the existing insn, otherwise it is to be inserted AFTER. */
6229 if (check_and_insert_insns->jump)
6230 {
6231 emit_insn_before (check_insn, existing_insn);
6232 check_and_insert_insns->jump = 0;
6233 }
6234 else
6235 emit_insn_after (check_insn, existing_insn);
6236
6237 free_EXPR_LIST_node (check_and_insert_insns);
6238 free_EXPR_LIST_node (old_p);
6239 }
6240 while (p != NULL_RTX);
6241
6242 /* Load up any constants needed into temp gprs */
6243 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6244 {
6245 rtx insn = emit_insn_before (frv_ifcvt.scratch_regs[i], existing_insn);
6246 if (! frv_ifcvt.scratch_insns_bitmap)
6247 frv_ifcvt.scratch_insns_bitmap = BITMAP_ALLOC (NULL);
6248 bitmap_set_bit (frv_ifcvt.scratch_insns_bitmap, INSN_UID (insn));
6249 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6250 }
6251
6252 frv_ifcvt.added_insns_list = NULL_RTX;
6253 frv_ifcvt.cur_scratch_regs = 0;
6254 }
6255
6256 \f
6257 /* A C expression to cancel any machine dependent modifications in converting
6258 code to conditional execution in the code described by the conditional if
6259 information CE_INFO. */
6260
6261 void
6262 frv_ifcvt_modify_cancel (ce_if_block_t *ce_info ATTRIBUTE_UNUSED)
6263 {
6264 int i;
6265 rtx p = frv_ifcvt.added_insns_list;
6266
6267 /* Loop freeing up the EXPR_LIST's allocated. */
6268 while (p != NULL_RTX)
6269 {
6270 rtx check_and_jump = XEXP (p, 0);
6271 rtx old_p = p;
6272
6273 p = XEXP (p, 1);
6274 free_EXPR_LIST_node (check_and_jump);
6275 free_EXPR_LIST_node (old_p);
6276 }
6277
6278 /* Release any temporary gprs allocated. */
6279 for (i = 0; i < frv_ifcvt.cur_scratch_regs; i++)
6280 frv_ifcvt.scratch_regs[i] = NULL_RTX;
6281
6282 frv_ifcvt.added_insns_list = NULL_RTX;
6283 frv_ifcvt.cur_scratch_regs = 0;
6284 return;
6285 }
6286 \f
6287 /* A C expression for the size in bytes of the trampoline, as an integer.
6288 The template is:
6289
6290 setlo #0, <jmp_reg>
6291 setlo #0, <static_chain>
6292 sethi #0, <jmp_reg>
6293 sethi #0, <static_chain>
6294 jmpl @(gr0,<jmp_reg>) */
6295
6296 int
6297 frv_trampoline_size (void)
6298 {
6299 if (TARGET_FDPIC)
6300 /* Allocate room for the function descriptor and the lddi
6301 instruction. */
6302 return 8 + 6 * 4;
6303 return 5 /* instructions */ * 4 /* instruction size. */;
6304 }
6305
6306 \f
6307 /* A C statement to initialize the variable parts of a trampoline. ADDR is an
6308 RTX for the address of the trampoline; FNADDR is an RTX for the address of
6309 the nested function; STATIC_CHAIN is an RTX for the static chain value that
6310 should be passed to the function when it is called.
6311
6312 The template is:
6313
6314 setlo #0, <jmp_reg>
6315 setlo #0, <static_chain>
6316 sethi #0, <jmp_reg>
6317 sethi #0, <static_chain>
6318 jmpl @(gr0,<jmp_reg>) */
6319
6320 void
6321 frv_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
6322 {
6323 rtx sc_reg = force_reg (Pmode, static_chain);
6324
6325 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
6326 FALSE, VOIDmode, 4,
6327 addr, Pmode,
6328 GEN_INT (frv_trampoline_size ()), SImode,
6329 fnaddr, Pmode,
6330 sc_reg, Pmode);
6331 }
6332
6333 \f
6334 /* Many machines have some registers that cannot be copied directly to or from
6335 memory or even from other types of registers. An example is the `MQ'
6336 register, which on most machines, can only be copied to or from general
6337 registers, but not memory. Some machines allow copying all registers to and
6338 from memory, but require a scratch register for stores to some memory
6339 locations (e.g., those with symbolic address on the RT, and those with
6340 certain symbolic address on the SPARC when compiling PIC). In some cases,
6341 both an intermediate and a scratch register are required.
6342
6343 You should define these macros to indicate to the reload phase that it may
6344 need to allocate at least one register for a reload in addition to the
6345 register to contain the data. Specifically, if copying X to a register
6346 RCLASS in MODE requires an intermediate register, you should define
6347 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
6348 whose registers can be used as intermediate registers or scratch registers.
6349
6350 If copying a register RCLASS in MODE to X requires an intermediate or scratch
6351 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
6352 largest register class required. If the requirements for input and output
6353 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
6354 instead of defining both macros identically.
6355
6356 The values returned by these macros are often `GENERAL_REGS'. Return
6357 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
6358 to or from a register of RCLASS in MODE without requiring a scratch register.
6359 Do not define this macro if it would always return `NO_REGS'.
6360
6361 If a scratch register is required (either with or without an intermediate
6362 register), you should define patterns for `reload_inM' or `reload_outM', as
6363 required.. These patterns, which will normally be implemented with a
6364 `define_expand', should be similar to the `movM' patterns, except that
6365 operand 2 is the scratch register.
6366
6367 Define constraints for the reload register and scratch register that contain
6368 a single register class. If the original reload register (whose class is
6369 RCLASS) can meet the constraint given in the pattern, the value returned by
6370 these macros is used for the class of the scratch register. Otherwise, two
6371 additional reload registers are required. Their classes are obtained from
6372 the constraints in the insn pattern.
6373
6374 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
6375 either be in a hard register or in memory. Use `true_regnum' to find out;
6376 it will return -1 if the pseudo is in memory and the hard register number if
6377 it is in a register.
6378
6379 These macros should not be used in the case where a particular class of
6380 registers can only be copied to memory and not to another class of
6381 registers. In that case, secondary reload registers are not needed and
6382 would not be helpful. Instead, a stack location must be used to perform the
6383 copy and the `movM' pattern should use memory as an intermediate storage.
6384 This case often occurs between floating-point and general registers. */
6385
6386 enum reg_class
6387 frv_secondary_reload_class (enum reg_class rclass,
6388 enum machine_mode mode ATTRIBUTE_UNUSED,
6389 rtx x)
6390 {
6391 enum reg_class ret;
6392
6393 switch (rclass)
6394 {
6395 default:
6396 ret = NO_REGS;
6397 break;
6398
6399 /* Accumulators/Accumulator guard registers need to go through floating
6400 point registers. */
6401 case QUAD_REGS:
6402 case EVEN_REGS:
6403 case GPR_REGS:
6404 ret = NO_REGS;
6405 if (x && GET_CODE (x) == REG)
6406 {
6407 int regno = REGNO (x);
6408
6409 if (ACC_P (regno) || ACCG_P (regno))
6410 ret = FPR_REGS;
6411 }
6412 break;
6413
6414 /* Nonzero constants should be loaded into an FPR through a GPR. */
6415 case QUAD_FPR_REGS:
6416 case FEVEN_REGS:
6417 case FPR_REGS:
6418 if (x && CONSTANT_P (x) && !ZERO_P (x))
6419 ret = GPR_REGS;
6420 else
6421 ret = NO_REGS;
6422 break;
6423
6424 /* All of these types need gpr registers. */
6425 case ICC_REGS:
6426 case FCC_REGS:
6427 case CC_REGS:
6428 case ICR_REGS:
6429 case FCR_REGS:
6430 case CR_REGS:
6431 case LCR_REG:
6432 case LR_REG:
6433 ret = GPR_REGS;
6434 break;
6435
6436 /* The accumulators need fpr registers. */
6437 case ACC_REGS:
6438 case EVEN_ACC_REGS:
6439 case QUAD_ACC_REGS:
6440 case ACCG_REGS:
6441 ret = FPR_REGS;
6442 break;
6443 }
6444
6445 return ret;
6446 }
6447
6448 /* This hook exists to catch the case where secondary_reload_class() is
6449 called from init_reg_autoinc() in regclass.c - before the reload optabs
6450 have been initialised. */
6451
6452 static bool
6453 frv_secondary_reload (bool in_p, rtx x, enum reg_class reload_class,
6454 enum machine_mode reload_mode,
6455 secondary_reload_info * sri)
6456 {
6457 enum reg_class rclass = NO_REGS;
6458
6459 if (sri->prev_sri && sri->prev_sri->t_icode != CODE_FOR_nothing)
6460 {
6461 sri->icode = sri->prev_sri->t_icode;
6462 return NO_REGS;
6463 }
6464
6465 rclass = frv_secondary_reload_class (reload_class, reload_mode, x);
6466
6467 if (rclass != NO_REGS)
6468 {
6469 enum insn_code icode = (in_p ? reload_in_optab[(int) reload_mode]
6470 : reload_out_optab[(int) reload_mode]);
6471 if (icode == 0)
6472 {
6473 /* This happens when then the reload_[in|out]_optabs have
6474 not been initialised. */
6475 sri->t_icode = CODE_FOR_nothing;
6476 return rclass;
6477 }
6478 }
6479
6480 /* Fall back to the default secondary reload handler. */
6481 return default_secondary_reload (in_p, x, reload_class, reload_mode, sri);
6482
6483 }
6484 \f
6485 /* A C expression whose value is nonzero if pseudos that have been assigned to
6486 registers of class RCLASS would likely be spilled because registers of RCLASS
6487 are needed for spill registers.
6488
6489 The default value of this macro returns 1 if RCLASS has exactly one register
6490 and zero otherwise. On most machines, this default should be used. Only
6491 define this macro to some other expression if pseudo allocated by
6492 `local-alloc.c' end up in memory because their hard registers were needed
6493 for spill registers. If this macro returns nonzero for those classes, those
6494 pseudos will only be allocated by `global.c', which knows how to reallocate
6495 the pseudo to another register. If there would not be another register
6496 available for reallocation, you should not change the definition of this
6497 macro since the only effect of such a definition would be to slow down
6498 register allocation. */
6499
6500 int
6501 frv_class_likely_spilled_p (enum reg_class rclass)
6502 {
6503 switch (rclass)
6504 {
6505 default:
6506 break;
6507
6508 case GR8_REGS:
6509 case GR9_REGS:
6510 case GR89_REGS:
6511 case FDPIC_FPTR_REGS:
6512 case FDPIC_REGS:
6513 case ICC_REGS:
6514 case FCC_REGS:
6515 case CC_REGS:
6516 case ICR_REGS:
6517 case FCR_REGS:
6518 case CR_REGS:
6519 case LCR_REG:
6520 case LR_REG:
6521 case SPR_REGS:
6522 case QUAD_ACC_REGS:
6523 case EVEN_ACC_REGS:
6524 case ACC_REGS:
6525 case ACCG_REGS:
6526 return TRUE;
6527 }
6528
6529 return FALSE;
6530 }
6531
6532 \f
6533 /* An expression for the alignment of a structure field FIELD if the
6534 alignment computed in the usual way is COMPUTED. GCC uses this
6535 value instead of the value in `BIGGEST_ALIGNMENT' or
6536 `BIGGEST_FIELD_ALIGNMENT', if defined, for structure fields only. */
6537
6538 /* The definition type of the bit field data is either char, short, long or
6539 long long. The maximum bit size is the number of bits of its own type.
6540
6541 The bit field data is assigned to a storage unit that has an adequate size
6542 for bit field data retention and is located at the smallest address.
6543
6544 Consecutive bit field data are packed at consecutive bits having the same
6545 storage unit, with regard to the type, beginning with the MSB and continuing
6546 toward the LSB.
6547
6548 If a field to be assigned lies over a bit field type boundary, its
6549 assignment is completed by aligning it with a boundary suitable for the
6550 type.
6551
6552 When a bit field having a bit length of 0 is declared, it is forcibly
6553 assigned to the next storage unit.
6554
6555 e.g)
6556 struct {
6557 int a:2;
6558 int b:6;
6559 char c:4;
6560 int d:10;
6561 int :0;
6562 int f:2;
6563 } x;
6564
6565 +0 +1 +2 +3
6566 &x 00000000 00000000 00000000 00000000
6567 MLM----L
6568 a b
6569 &x+4 00000000 00000000 00000000 00000000
6570 M--L
6571 c
6572 &x+8 00000000 00000000 00000000 00000000
6573 M----------L
6574 d
6575 &x+12 00000000 00000000 00000000 00000000
6576 ML
6577 f
6578 */
6579
6580 int
6581 frv_adjust_field_align (tree field, int computed)
6582 {
6583 /* Make sure that the bitfield is not wider than the type. */
6584 if (DECL_BIT_FIELD (field)
6585 && !DECL_ARTIFICIAL (field))
6586 {
6587 tree parent = DECL_CONTEXT (field);
6588 tree prev = NULL_TREE;
6589 tree cur;
6590
6591 for (cur = TYPE_FIELDS (parent); cur && cur != field; cur = TREE_CHAIN (cur))
6592 {
6593 if (TREE_CODE (cur) != FIELD_DECL)
6594 continue;
6595
6596 prev = cur;
6597 }
6598
6599 gcc_assert (cur);
6600
6601 /* If this isn't a :0 field and if the previous element is a bitfield
6602 also, see if the type is different, if so, we will need to align the
6603 bit-field to the next boundary. */
6604 if (prev
6605 && ! DECL_PACKED (field)
6606 && ! integer_zerop (DECL_SIZE (field))
6607 && DECL_BIT_FIELD_TYPE (field) != DECL_BIT_FIELD_TYPE (prev))
6608 {
6609 int prev_align = TYPE_ALIGN (TREE_TYPE (prev));
6610 int cur_align = TYPE_ALIGN (TREE_TYPE (field));
6611 computed = (prev_align > cur_align) ? prev_align : cur_align;
6612 }
6613 }
6614
6615 return computed;
6616 }
6617
6618 \f
6619 /* A C expression that is nonzero if it is permissible to store a value of mode
6620 MODE in hard register number REGNO (or in several registers starting with
6621 that one). For a machine where all registers are equivalent, a suitable
6622 definition is
6623
6624 #define HARD_REGNO_MODE_OK(REGNO, MODE) 1
6625
6626 It is not necessary for this macro to check for the numbers of fixed
6627 registers, because the allocation mechanism considers them to be always
6628 occupied.
6629
6630 On some machines, double-precision values must be kept in even/odd register
6631 pairs. The way to implement that is to define this macro to reject odd
6632 register numbers for such modes.
6633
6634 The minimum requirement for a mode to be OK in a register is that the
6635 `movMODE' instruction pattern support moves between the register and any
6636 other hard register for which the mode is OK; and that moving a value into
6637 the register and back out not alter it.
6638
6639 Since the same instruction used to move `SImode' will work for all narrower
6640 integer modes, it is not necessary on any machine for `HARD_REGNO_MODE_OK'
6641 to distinguish between these modes, provided you define patterns `movhi',
6642 etc., to take advantage of this. This is useful because of the interaction
6643 between `HARD_REGNO_MODE_OK' and `MODES_TIEABLE_P'; it is very desirable for
6644 all integer modes to be tieable.
6645
6646 Many machines have special registers for floating point arithmetic. Often
6647 people assume that floating point machine modes are allowed only in floating
6648 point registers. This is not true. Any registers that can hold integers
6649 can safely *hold* a floating point machine mode, whether or not floating
6650 arithmetic can be done on it in those registers. Integer move instructions
6651 can be used to move the values.
6652
6653 On some machines, though, the converse is true: fixed-point machine modes
6654 may not go in floating registers. This is true if the floating registers
6655 normalize any value stored in them, because storing a non-floating value
6656 there would garble it. In this case, `HARD_REGNO_MODE_OK' should reject
6657 fixed-point machine modes in floating registers. But if the floating
6658 registers do not automatically normalize, if you can store any bit pattern
6659 in one and retrieve it unchanged without a trap, then any machine mode may
6660 go in a floating register, so you can define this macro to say so.
6661
6662 The primary significance of special floating registers is rather that they
6663 are the registers acceptable in floating point arithmetic instructions.
6664 However, this is of no concern to `HARD_REGNO_MODE_OK'. You handle it by
6665 writing the proper constraints for those instructions.
6666
6667 On some machines, the floating registers are especially slow to access, so
6668 that it is better to store a value in a stack frame than in such a register
6669 if floating point arithmetic is not being done. As long as the floating
6670 registers are not in class `GENERAL_REGS', they will not be used unless some
6671 pattern's constraint asks for one. */
6672
6673 int
6674 frv_hard_regno_mode_ok (int regno, enum machine_mode mode)
6675 {
6676 int base;
6677 int mask;
6678
6679 switch (mode)
6680 {
6681 case CCmode:
6682 case CC_UNSmode:
6683 case CC_NZmode:
6684 return ICC_P (regno) || GPR_P (regno);
6685
6686 case CC_CCRmode:
6687 return CR_P (regno) || GPR_P (regno);
6688
6689 case CC_FPmode:
6690 return FCC_P (regno) || GPR_P (regno);
6691
6692 default:
6693 break;
6694 }
6695
6696 /* Set BASE to the first register in REGNO's class. Set MASK to the
6697 bits that must be clear in (REGNO - BASE) for the register to be
6698 well-aligned. */
6699 if (INTEGRAL_MODE_P (mode) || FLOAT_MODE_P (mode) || VECTOR_MODE_P (mode))
6700 {
6701 if (ACCG_P (regno))
6702 {
6703 /* ACCGs store one byte. Two-byte quantities must start in
6704 even-numbered registers, four-byte ones in registers whose
6705 numbers are divisible by four, and so on. */
6706 base = ACCG_FIRST;
6707 mask = GET_MODE_SIZE (mode) - 1;
6708 }
6709 else
6710 {
6711 /* The other registers store one word. */
6712 if (GPR_P (regno) || regno == AP_FIRST)
6713 base = GPR_FIRST;
6714
6715 else if (FPR_P (regno))
6716 base = FPR_FIRST;
6717
6718 else if (ACC_P (regno))
6719 base = ACC_FIRST;
6720
6721 else if (SPR_P (regno))
6722 return mode == SImode;
6723
6724 /* Fill in the table. */
6725 else
6726 return 0;
6727
6728 /* Anything smaller than an SI is OK in any word-sized register. */
6729 if (GET_MODE_SIZE (mode) < 4)
6730 return 1;
6731
6732 mask = (GET_MODE_SIZE (mode) / 4) - 1;
6733 }
6734 return (((regno - base) & mask) == 0);
6735 }
6736
6737 return 0;
6738 }
6739
6740 \f
6741 /* A C expression for the number of consecutive hard registers, starting at
6742 register number REGNO, required to hold a value of mode MODE.
6743
6744 On a machine where all registers are exactly one word, a suitable definition
6745 of this macro is
6746
6747 #define HARD_REGNO_NREGS(REGNO, MODE) \
6748 ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) \
6749 / UNITS_PER_WORD)) */
6750
6751 /* On the FRV, make the CC_FP mode take 3 words in the integer registers, so
6752 that we can build the appropriate instructions to properly reload the
6753 values. Also, make the byte-sized accumulator guards use one guard
6754 for each byte. */
6755
6756 int
6757 frv_hard_regno_nregs (int regno, enum machine_mode mode)
6758 {
6759 if (ACCG_P (regno))
6760 return GET_MODE_SIZE (mode);
6761 else
6762 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6763 }
6764
6765 \f
6766 /* A C expression for the maximum number of consecutive registers of
6767 class RCLASS needed to hold a value of mode MODE.
6768
6769 This is closely related to the macro `HARD_REGNO_NREGS'. In fact, the value
6770 of the macro `CLASS_MAX_NREGS (RCLASS, MODE)' should be the maximum value of
6771 `HARD_REGNO_NREGS (REGNO, MODE)' for all REGNO values in the class RCLASS.
6772
6773 This macro helps control the handling of multiple-word values in
6774 the reload pass.
6775
6776 This declaration is required. */
6777
6778 int
6779 frv_class_max_nregs (enum reg_class rclass, enum machine_mode mode)
6780 {
6781 if (rclass == ACCG_REGS)
6782 /* An N-byte value requires N accumulator guards. */
6783 return GET_MODE_SIZE (mode);
6784 else
6785 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
6786 }
6787
6788 \f
6789 /* A C expression that is nonzero if X is a legitimate constant for an
6790 immediate operand on the target machine. You can assume that X satisfies
6791 `CONSTANT_P', so you need not check this. In fact, `1' is a suitable
6792 definition for this macro on machines where anything `CONSTANT_P' is valid. */
6793
6794 int
6795 frv_legitimate_constant_p (rtx x)
6796 {
6797 enum machine_mode mode = GET_MODE (x);
6798
6799 /* frv_cannot_force_const_mem always returns true for FDPIC. This
6800 means that the move expanders will be expected to deal with most
6801 kinds of constant, regardless of what we return here.
6802
6803 However, among its other duties, LEGITIMATE_CONSTANT_P decides whether
6804 a constant can be entered into reg_equiv_constant[]. If we return true,
6805 reload can create new instances of the constant whenever it likes.
6806
6807 The idea is therefore to accept as many constants as possible (to give
6808 reload more freedom) while rejecting constants that can only be created
6809 at certain times. In particular, anything with a symbolic component will
6810 require use of the pseudo FDPIC register, which is only available before
6811 reload. */
6812 if (TARGET_FDPIC)
6813 return LEGITIMATE_PIC_OPERAND_P (x);
6814
6815 /* All of the integer constants are ok. */
6816 if (GET_CODE (x) != CONST_DOUBLE)
6817 return TRUE;
6818
6819 /* double integer constants are ok. */
6820 if (mode == VOIDmode || mode == DImode)
6821 return TRUE;
6822
6823 /* 0 is always ok. */
6824 if (x == CONST0_RTX (mode))
6825 return TRUE;
6826
6827 /* If floating point is just emulated, allow any constant, since it will be
6828 constructed in the GPRs. */
6829 if (!TARGET_HAS_FPRS)
6830 return TRUE;
6831
6832 if (mode == DFmode && !TARGET_DOUBLE)
6833 return TRUE;
6834
6835 /* Otherwise store the constant away and do a load. */
6836 return FALSE;
6837 }
6838
6839 /* Implement SELECT_CC_MODE. Choose CC_FP for floating-point comparisons,
6840 CC_NZ for comparisons against zero in which a single Z or N flag test
6841 is enough, CC_UNS for other unsigned comparisons, and CC for other
6842 signed comparisons. */
6843
6844 enum machine_mode
6845 frv_select_cc_mode (enum rtx_code code, rtx x, rtx y)
6846 {
6847 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
6848 return CC_FPmode;
6849
6850 switch (code)
6851 {
6852 case EQ:
6853 case NE:
6854 case LT:
6855 case GE:
6856 return y == const0_rtx ? CC_NZmode : CCmode;
6857
6858 case GTU:
6859 case GEU:
6860 case LTU:
6861 case LEU:
6862 return y == const0_rtx ? CC_NZmode : CC_UNSmode;
6863
6864 default:
6865 return CCmode;
6866 }
6867 }
6868 \f
6869 /* A C expression for the cost of moving data from a register in class FROM to
6870 one in class TO. The classes are expressed using the enumeration values
6871 such as `GENERAL_REGS'. A value of 4 is the default; other values are
6872 interpreted relative to that.
6873
6874 It is not required that the cost always equal 2 when FROM is the same as TO;
6875 on some machines it is expensive to move between registers if they are not
6876 general registers.
6877
6878 If reload sees an insn consisting of a single `set' between two hard
6879 registers, and if `REGISTER_MOVE_COST' applied to their classes returns a
6880 value of 2, reload does not check to ensure that the constraints of the insn
6881 are met. Setting a cost of other than 2 will allow reload to verify that
6882 the constraints are met. You should do this if the `movM' pattern's
6883 constraints do not allow such copying. */
6884
6885 #define HIGH_COST 40
6886 #define MEDIUM_COST 3
6887 #define LOW_COST 1
6888
6889 int
6890 frv_register_move_cost (enum reg_class from, enum reg_class to)
6891 {
6892 switch (from)
6893 {
6894 default:
6895 break;
6896
6897 case QUAD_REGS:
6898 case EVEN_REGS:
6899 case GPR_REGS:
6900 switch (to)
6901 {
6902 default:
6903 break;
6904
6905 case QUAD_REGS:
6906 case EVEN_REGS:
6907 case GPR_REGS:
6908 return LOW_COST;
6909
6910 case FEVEN_REGS:
6911 case FPR_REGS:
6912 return LOW_COST;
6913
6914 case LCR_REG:
6915 case LR_REG:
6916 case SPR_REGS:
6917 return LOW_COST;
6918 }
6919
6920 case FEVEN_REGS:
6921 case FPR_REGS:
6922 switch (to)
6923 {
6924 default:
6925 break;
6926
6927 case QUAD_REGS:
6928 case EVEN_REGS:
6929 case GPR_REGS:
6930 case ACC_REGS:
6931 case EVEN_ACC_REGS:
6932 case QUAD_ACC_REGS:
6933 case ACCG_REGS:
6934 return MEDIUM_COST;
6935
6936 case FEVEN_REGS:
6937 case FPR_REGS:
6938 return LOW_COST;
6939 }
6940
6941 case LCR_REG:
6942 case LR_REG:
6943 case SPR_REGS:
6944 switch (to)
6945 {
6946 default:
6947 break;
6948
6949 case QUAD_REGS:
6950 case EVEN_REGS:
6951 case GPR_REGS:
6952 return MEDIUM_COST;
6953 }
6954
6955 case ACC_REGS:
6956 case EVEN_ACC_REGS:
6957 case QUAD_ACC_REGS:
6958 case ACCG_REGS:
6959 switch (to)
6960 {
6961 default:
6962 break;
6963
6964 case FEVEN_REGS:
6965 case FPR_REGS:
6966 return MEDIUM_COST;
6967
6968 }
6969 }
6970
6971 return HIGH_COST;
6972 }
6973 \f
6974 /* Implementation of TARGET_ASM_INTEGER. In the FRV case we need to
6975 use ".picptr" to generate safe relocations for PIC code. We also
6976 need a fixup entry for aligned (non-debugging) code. */
6977
6978 static bool
6979 frv_assemble_integer (rtx value, unsigned int size, int aligned_p)
6980 {
6981 if ((flag_pic || TARGET_FDPIC) && size == UNITS_PER_WORD)
6982 {
6983 if (GET_CODE (value) == CONST
6984 || GET_CODE (value) == SYMBOL_REF
6985 || GET_CODE (value) == LABEL_REF)
6986 {
6987 if (TARGET_FDPIC && GET_CODE (value) == SYMBOL_REF
6988 && SYMBOL_REF_FUNCTION_P (value))
6989 {
6990 fputs ("\t.picptr\tfuncdesc(", asm_out_file);
6991 output_addr_const (asm_out_file, value);
6992 fputs (")\n", asm_out_file);
6993 return true;
6994 }
6995 else if (TARGET_FDPIC && GET_CODE (value) == CONST
6996 && frv_function_symbol_referenced_p (value))
6997 return false;
6998 if (aligned_p && !TARGET_FDPIC)
6999 {
7000 static int label_num = 0;
7001 char buf[256];
7002 const char *p;
7003
7004 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", label_num++);
7005 p = (* targetm.strip_name_encoding) (buf);
7006
7007 fprintf (asm_out_file, "%s:\n", p);
7008 fprintf (asm_out_file, "%s\n", FIXUP_SECTION_ASM_OP);
7009 fprintf (asm_out_file, "\t.picptr\t%s\n", p);
7010 fprintf (asm_out_file, "\t.previous\n");
7011 }
7012 assemble_integer_with_op ("\t.picptr\t", value);
7013 return true;
7014 }
7015 if (!aligned_p)
7016 {
7017 /* We've set the unaligned SI op to NULL, so we always have to
7018 handle the unaligned case here. */
7019 assemble_integer_with_op ("\t.4byte\t", value);
7020 return true;
7021 }
7022 }
7023 return default_assemble_integer (value, size, aligned_p);
7024 }
7025
7026 /* Function to set up the backend function structure. */
7027
7028 static struct machine_function *
7029 frv_init_machine_status (void)
7030 {
7031 return GGC_CNEW (struct machine_function);
7032 }
7033 \f
7034 /* Implement TARGET_SCHED_ISSUE_RATE. */
7035
7036 int
7037 frv_issue_rate (void)
7038 {
7039 if (!TARGET_PACK)
7040 return 1;
7041
7042 switch (frv_cpu_type)
7043 {
7044 default:
7045 case FRV_CPU_FR300:
7046 case FRV_CPU_SIMPLE:
7047 return 1;
7048
7049 case FRV_CPU_FR400:
7050 case FRV_CPU_FR405:
7051 case FRV_CPU_FR450:
7052 return 2;
7053
7054 case FRV_CPU_GENERIC:
7055 case FRV_CPU_FR500:
7056 case FRV_CPU_TOMCAT:
7057 return 4;
7058
7059 case FRV_CPU_FR550:
7060 return 8;
7061 }
7062 }
7063 \f
7064 /* A for_each_rtx callback. If X refers to an accumulator, return
7065 ACC_GROUP_ODD if the bit 2 of the register number is set and
7066 ACC_GROUP_EVEN if it is clear. Return 0 (ACC_GROUP_NONE)
7067 otherwise. */
7068
7069 static int
7070 frv_acc_group_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
7071 {
7072 if (REG_P (*x))
7073 {
7074 if (ACC_P (REGNO (*x)))
7075 return (REGNO (*x) - ACC_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
7076 if (ACCG_P (REGNO (*x)))
7077 return (REGNO (*x) - ACCG_FIRST) & 4 ? ACC_GROUP_ODD : ACC_GROUP_EVEN;
7078 }
7079 return 0;
7080 }
7081
7082 /* Return the value of INSN's acc_group attribute. */
7083
7084 int
7085 frv_acc_group (rtx insn)
7086 {
7087 /* This distinction only applies to the FR550 packing constraints. */
7088 if (frv_cpu_type != FRV_CPU_FR550)
7089 return ACC_GROUP_NONE;
7090 return for_each_rtx (&PATTERN (insn), frv_acc_group_1, 0);
7091 }
7092
7093 /* Return the index of the DFA unit in FRV_UNIT_NAMES[] that instruction
7094 INSN will try to claim first. Since this value depends only on the
7095 type attribute, we can cache the results in FRV_TYPE_TO_UNIT[]. */
7096
7097 static unsigned int
7098 frv_insn_unit (rtx insn)
7099 {
7100 enum attr_type type;
7101
7102 type = get_attr_type (insn);
7103 if (frv_type_to_unit[type] == ARRAY_SIZE (frv_unit_codes))
7104 {
7105 /* We haven't seen this type of instruction before. */
7106 state_t state;
7107 unsigned int unit;
7108
7109 /* Issue the instruction on its own to see which unit it prefers. */
7110 state = alloca (state_size ());
7111 state_reset (state);
7112 state_transition (state, insn);
7113
7114 /* Find out which unit was taken. */
7115 for (unit = 0; unit < ARRAY_SIZE (frv_unit_codes); unit++)
7116 if (cpu_unit_reservation_p (state, frv_unit_codes[unit]))
7117 break;
7118
7119 gcc_assert (unit != ARRAY_SIZE (frv_unit_codes));
7120
7121 frv_type_to_unit[type] = unit;
7122 }
7123 return frv_type_to_unit[type];
7124 }
7125
7126 /* Return true if INSN issues to a branch unit. */
7127
7128 static bool
7129 frv_issues_to_branch_unit_p (rtx insn)
7130 {
7131 return frv_unit_groups[frv_insn_unit (insn)] == GROUP_B;
7132 }
7133 \f
7134 /* The current state of the packing pass, implemented by frv_pack_insns. */
7135 static struct {
7136 /* The state of the pipeline DFA. */
7137 state_t dfa_state;
7138
7139 /* Which hardware registers are set within the current packet,
7140 and the conditions under which they are set. */
7141 regstate_t regstate[FIRST_PSEUDO_REGISTER];
7142
7143 /* The memory locations that have been modified so far in this
7144 packet. MEM is the memref and COND is the regstate_t condition
7145 under which it is set. */
7146 struct {
7147 rtx mem;
7148 regstate_t cond;
7149 } mems[2];
7150
7151 /* The number of valid entries in MEMS. The value is larger than
7152 ARRAY_SIZE (mems) if there were too many mems to record. */
7153 unsigned int num_mems;
7154
7155 /* The maximum number of instructions that can be packed together. */
7156 unsigned int issue_rate;
7157
7158 /* The instructions in the packet, partitioned into groups. */
7159 struct frv_packet_group {
7160 /* How many instructions in the packet belong to this group. */
7161 unsigned int num_insns;
7162
7163 /* A list of the instructions that belong to this group, in the order
7164 they appear in the rtl stream. */
7165 rtx insns[ARRAY_SIZE (frv_unit_codes)];
7166
7167 /* The contents of INSNS after they have been sorted into the correct
7168 assembly-language order. Element X issues to unit X. The list may
7169 contain extra nops. */
7170 rtx sorted[ARRAY_SIZE (frv_unit_codes)];
7171
7172 /* The member of frv_nops[] to use in sorted[]. */
7173 rtx nop;
7174 } groups[NUM_GROUPS];
7175
7176 /* The instructions that make up the current packet. */
7177 rtx insns[ARRAY_SIZE (frv_unit_codes)];
7178 unsigned int num_insns;
7179 } frv_packet;
7180
7181 /* Return the regstate_t flags for the given COND_EXEC condition.
7182 Abort if the condition isn't in the right form. */
7183
7184 static int
7185 frv_cond_flags (rtx cond)
7186 {
7187 gcc_assert ((GET_CODE (cond) == EQ || GET_CODE (cond) == NE)
7188 && GET_CODE (XEXP (cond, 0)) == REG
7189 && CR_P (REGNO (XEXP (cond, 0)))
7190 && XEXP (cond, 1) == const0_rtx);
7191 return ((REGNO (XEXP (cond, 0)) - CR_FIRST)
7192 | (GET_CODE (cond) == NE
7193 ? REGSTATE_IF_TRUE
7194 : REGSTATE_IF_FALSE));
7195 }
7196
7197
7198 /* Return true if something accessed under condition COND2 can
7199 conflict with something written under condition COND1. */
7200
7201 static bool
7202 frv_regstate_conflict_p (regstate_t cond1, regstate_t cond2)
7203 {
7204 /* If either reference was unconditional, we have a conflict. */
7205 if ((cond1 & REGSTATE_IF_EITHER) == 0
7206 || (cond2 & REGSTATE_IF_EITHER) == 0)
7207 return true;
7208
7209 /* The references might conflict if they were controlled by
7210 different CRs. */
7211 if ((cond1 & REGSTATE_CC_MASK) != (cond2 & REGSTATE_CC_MASK))
7212 return true;
7213
7214 /* They definitely conflict if they are controlled by the
7215 same condition. */
7216 if ((cond1 & cond2 & REGSTATE_IF_EITHER) != 0)
7217 return true;
7218
7219 return false;
7220 }
7221
7222
7223 /* A for_each_rtx callback. Return 1 if *X depends on an instruction in
7224 the current packet. DATA points to a regstate_t that describes the
7225 condition under which *X might be set or used. */
7226
7227 static int
7228 frv_registers_conflict_p_1 (rtx *x, void *data)
7229 {
7230 unsigned int regno, i;
7231 regstate_t cond;
7232
7233 cond = *(regstate_t *) data;
7234
7235 if (GET_CODE (*x) == REG)
7236 FOR_EACH_REGNO (regno, *x)
7237 if ((frv_packet.regstate[regno] & REGSTATE_MODIFIED) != 0)
7238 if (frv_regstate_conflict_p (frv_packet.regstate[regno], cond))
7239 return 1;
7240
7241 if (GET_CODE (*x) == MEM)
7242 {
7243 /* If we ran out of memory slots, assume a conflict. */
7244 if (frv_packet.num_mems > ARRAY_SIZE (frv_packet.mems))
7245 return 1;
7246
7247 /* Check for output or true dependencies with earlier MEMs. */
7248 for (i = 0; i < frv_packet.num_mems; i++)
7249 if (frv_regstate_conflict_p (frv_packet.mems[i].cond, cond))
7250 {
7251 if (true_dependence (frv_packet.mems[i].mem, VOIDmode,
7252 *x, rtx_varies_p))
7253 return 1;
7254
7255 if (output_dependence (frv_packet.mems[i].mem, *x))
7256 return 1;
7257 }
7258 }
7259
7260 /* The return values of calls aren't significant: they describe
7261 the effect of the call as a whole, not of the insn itself. */
7262 if (GET_CODE (*x) == SET && GET_CODE (SET_SRC (*x)) == CALL)
7263 {
7264 if (for_each_rtx (&SET_SRC (*x), frv_registers_conflict_p_1, data))
7265 return 1;
7266 return -1;
7267 }
7268
7269 /* Check subexpressions. */
7270 return 0;
7271 }
7272
7273
7274 /* Return true if something in X might depend on an instruction
7275 in the current packet. */
7276
7277 static bool
7278 frv_registers_conflict_p (rtx x)
7279 {
7280 regstate_t flags;
7281
7282 flags = 0;
7283 if (GET_CODE (x) == COND_EXEC)
7284 {
7285 if (for_each_rtx (&XEXP (x, 0), frv_registers_conflict_p_1, &flags))
7286 return true;
7287
7288 flags |= frv_cond_flags (XEXP (x, 0));
7289 x = XEXP (x, 1);
7290 }
7291 return for_each_rtx (&x, frv_registers_conflict_p_1, &flags);
7292 }
7293
7294
7295 /* A note_stores callback. DATA points to the regstate_t condition
7296 under which X is modified. Update FRV_PACKET accordingly. */
7297
7298 static void
7299 frv_registers_update_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7300 {
7301 unsigned int regno;
7302
7303 if (GET_CODE (x) == REG)
7304 FOR_EACH_REGNO (regno, x)
7305 frv_packet.regstate[regno] |= *(regstate_t *) data;
7306
7307 if (GET_CODE (x) == MEM)
7308 {
7309 if (frv_packet.num_mems < ARRAY_SIZE (frv_packet.mems))
7310 {
7311 frv_packet.mems[frv_packet.num_mems].mem = x;
7312 frv_packet.mems[frv_packet.num_mems].cond = *(regstate_t *) data;
7313 }
7314 frv_packet.num_mems++;
7315 }
7316 }
7317
7318
7319 /* Update the register state information for an instruction whose
7320 body is X. */
7321
7322 static void
7323 frv_registers_update (rtx x)
7324 {
7325 regstate_t flags;
7326
7327 flags = REGSTATE_MODIFIED;
7328 if (GET_CODE (x) == COND_EXEC)
7329 {
7330 flags |= frv_cond_flags (XEXP (x, 0));
7331 x = XEXP (x, 1);
7332 }
7333 note_stores (x, frv_registers_update_1, &flags);
7334 }
7335
7336
7337 /* Initialize frv_packet for the start of a new packet. */
7338
7339 static void
7340 frv_start_packet (void)
7341 {
7342 enum frv_insn_group group;
7343
7344 memset (frv_packet.regstate, 0, sizeof (frv_packet.regstate));
7345 frv_packet.num_mems = 0;
7346 frv_packet.num_insns = 0;
7347 for (group = 0; group < NUM_GROUPS; group++)
7348 frv_packet.groups[group].num_insns = 0;
7349 }
7350
7351
7352 /* Likewise for the start of a new basic block. */
7353
7354 static void
7355 frv_start_packet_block (void)
7356 {
7357 state_reset (frv_packet.dfa_state);
7358 frv_start_packet ();
7359 }
7360
7361
7362 /* Finish the current packet, if any, and start a new one. Call
7363 HANDLE_PACKET with FRV_PACKET describing the completed packet. */
7364
7365 static void
7366 frv_finish_packet (void (*handle_packet) (void))
7367 {
7368 if (frv_packet.num_insns > 0)
7369 {
7370 handle_packet ();
7371 state_transition (frv_packet.dfa_state, 0);
7372 frv_start_packet ();
7373 }
7374 }
7375
7376
7377 /* Return true if INSN can be added to the current packet. Update
7378 the DFA state on success. */
7379
7380 static bool
7381 frv_pack_insn_p (rtx insn)
7382 {
7383 /* See if the packet is already as long as it can be. */
7384 if (frv_packet.num_insns == frv_packet.issue_rate)
7385 return false;
7386
7387 /* If the scheduler thought that an instruction should start a packet,
7388 it's usually a good idea to believe it. It knows much more about
7389 the latencies than we do.
7390
7391 There are some exceptions though:
7392
7393 - Conditional instructions are scheduled on the assumption that
7394 they will be executed. This is usually a good thing, since it
7395 tends to avoid unnecessary stalls in the conditional code.
7396 But we want to pack conditional instructions as tightly as
7397 possible, in order to optimize the case where they aren't
7398 executed.
7399
7400 - The scheduler will always put branches on their own, even
7401 if there's no real dependency.
7402
7403 - There's no point putting a call in its own packet unless
7404 we have to. */
7405 if (frv_packet.num_insns > 0
7406 && GET_CODE (insn) == INSN
7407 && GET_MODE (insn) == TImode
7408 && GET_CODE (PATTERN (insn)) != COND_EXEC)
7409 return false;
7410
7411 /* Check for register conflicts. Don't do this for setlo since any
7412 conflict will be with the partnering sethi, with which it can
7413 be packed. */
7414 if (get_attr_type (insn) != TYPE_SETLO)
7415 if (frv_registers_conflict_p (PATTERN (insn)))
7416 return false;
7417
7418 return state_transition (frv_packet.dfa_state, insn) < 0;
7419 }
7420
7421
7422 /* Add instruction INSN to the current packet. */
7423
7424 static void
7425 frv_add_insn_to_packet (rtx insn)
7426 {
7427 struct frv_packet_group *packet_group;
7428
7429 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7430 packet_group->insns[packet_group->num_insns++] = insn;
7431 frv_packet.insns[frv_packet.num_insns++] = insn;
7432
7433 frv_registers_update (PATTERN (insn));
7434 }
7435
7436
7437 /* Insert INSN (a member of frv_nops[]) into the current packet. If the
7438 packet ends in a branch or call, insert the nop before it, otherwise
7439 add to the end. */
7440
7441 static void
7442 frv_insert_nop_in_packet (rtx insn)
7443 {
7444 struct frv_packet_group *packet_group;
7445 rtx last;
7446
7447 packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
7448 last = frv_packet.insns[frv_packet.num_insns - 1];
7449 if (GET_CODE (last) != INSN)
7450 {
7451 insn = emit_insn_before (PATTERN (insn), last);
7452 frv_packet.insns[frv_packet.num_insns - 1] = insn;
7453 frv_packet.insns[frv_packet.num_insns++] = last;
7454 }
7455 else
7456 {
7457 insn = emit_insn_after (PATTERN (insn), last);
7458 frv_packet.insns[frv_packet.num_insns++] = insn;
7459 }
7460 packet_group->insns[packet_group->num_insns++] = insn;
7461 }
7462
7463
7464 /* If packing is enabled, divide the instructions into packets and
7465 return true. Call HANDLE_PACKET for each complete packet. */
7466
7467 static bool
7468 frv_for_each_packet (void (*handle_packet) (void))
7469 {
7470 rtx insn, next_insn;
7471
7472 frv_packet.issue_rate = frv_issue_rate ();
7473
7474 /* Early exit if we don't want to pack insns. */
7475 if (!optimize
7476 || !flag_schedule_insns_after_reload
7477 || !TARGET_VLIW_BRANCH
7478 || frv_packet.issue_rate == 1)
7479 return false;
7480
7481 /* Set up the initial packing state. */
7482 dfa_start ();
7483 frv_packet.dfa_state = alloca (state_size ());
7484
7485 frv_start_packet_block ();
7486 for (insn = get_insns (); insn != 0; insn = next_insn)
7487 {
7488 enum rtx_code code;
7489 bool eh_insn_p;
7490
7491 code = GET_CODE (insn);
7492 next_insn = NEXT_INSN (insn);
7493
7494 if (code == CODE_LABEL)
7495 {
7496 frv_finish_packet (handle_packet);
7497 frv_start_packet_block ();
7498 }
7499
7500 if (INSN_P (insn))
7501 switch (GET_CODE (PATTERN (insn)))
7502 {
7503 case USE:
7504 case CLOBBER:
7505 case ADDR_VEC:
7506 case ADDR_DIFF_VEC:
7507 break;
7508
7509 default:
7510 /* Calls mustn't be packed on a TOMCAT. */
7511 if (GET_CODE (insn) == CALL_INSN && frv_cpu_type == FRV_CPU_TOMCAT)
7512 frv_finish_packet (handle_packet);
7513
7514 /* Since the last instruction in a packet determines the EH
7515 region, any exception-throwing instruction must come at
7516 the end of reordered packet. Insns that issue to a
7517 branch unit are bound to come last; for others it's
7518 too hard to predict. */
7519 eh_insn_p = (find_reg_note (insn, REG_EH_REGION, NULL) != NULL);
7520 if (eh_insn_p && !frv_issues_to_branch_unit_p (insn))
7521 frv_finish_packet (handle_packet);
7522
7523 /* Finish the current packet if we can't add INSN to it.
7524 Simulate cycles until INSN is ready to issue. */
7525 if (!frv_pack_insn_p (insn))
7526 {
7527 frv_finish_packet (handle_packet);
7528 while (!frv_pack_insn_p (insn))
7529 state_transition (frv_packet.dfa_state, 0);
7530 }
7531
7532 /* Add the instruction to the packet. */
7533 frv_add_insn_to_packet (insn);
7534
7535 /* Calls and jumps end a packet, as do insns that throw
7536 an exception. */
7537 if (code == CALL_INSN || code == JUMP_INSN || eh_insn_p)
7538 frv_finish_packet (handle_packet);
7539 break;
7540 }
7541 }
7542 frv_finish_packet (handle_packet);
7543 dfa_finish ();
7544 return true;
7545 }
7546 \f
7547 /* Subroutine of frv_sort_insn_group. We are trying to sort
7548 frv_packet.groups[GROUP].sorted[0...NUM_INSNS-1] into assembly
7549 language order. We have already picked a new position for
7550 frv_packet.groups[GROUP].sorted[X] if bit X of ISSUED is set.
7551 These instructions will occupy elements [0, LOWER_SLOT) and
7552 [UPPER_SLOT, NUM_INSNS) of the final (sorted) array. STATE is
7553 the DFA state after issuing these instructions.
7554
7555 Try filling elements [LOWER_SLOT, UPPER_SLOT) with every permutation
7556 of the unused instructions. Return true if one such permutation gives
7557 a valid ordering, leaving the successful permutation in sorted[].
7558 Do not modify sorted[] until a valid permutation is found. */
7559
7560 static bool
7561 frv_sort_insn_group_1 (enum frv_insn_group group,
7562 unsigned int lower_slot, unsigned int upper_slot,
7563 unsigned int issued, unsigned int num_insns,
7564 state_t state)
7565 {
7566 struct frv_packet_group *packet_group;
7567 unsigned int i;
7568 state_t test_state;
7569 size_t dfa_size;
7570 rtx insn;
7571
7572 /* Early success if we've filled all the slots. */
7573 if (lower_slot == upper_slot)
7574 return true;
7575
7576 packet_group = &frv_packet.groups[group];
7577 dfa_size = state_size ();
7578 test_state = alloca (dfa_size);
7579
7580 /* Try issuing each unused instruction. */
7581 for (i = num_insns - 1; i + 1 != 0; i--)
7582 if (~issued & (1 << i))
7583 {
7584 insn = packet_group->sorted[i];
7585 memcpy (test_state, state, dfa_size);
7586 if (state_transition (test_state, insn) < 0
7587 && cpu_unit_reservation_p (test_state,
7588 NTH_UNIT (group, upper_slot - 1))
7589 && frv_sort_insn_group_1 (group, lower_slot, upper_slot - 1,
7590 issued | (1 << i), num_insns,
7591 test_state))
7592 {
7593 packet_group->sorted[upper_slot - 1] = insn;
7594 return true;
7595 }
7596 }
7597
7598 return false;
7599 }
7600
7601 /* Compare two instructions by their frv_insn_unit. */
7602
7603 static int
7604 frv_compare_insns (const void *first, const void *second)
7605 {
7606 const rtx *const insn1 = (rtx const *) first,
7607 *const insn2 = (rtx const *) second;
7608 return frv_insn_unit (*insn1) - frv_insn_unit (*insn2);
7609 }
7610
7611 /* Copy frv_packet.groups[GROUP].insns[] to frv_packet.groups[GROUP].sorted[]
7612 and sort it into assembly language order. See frv.md for a description of
7613 the algorithm. */
7614
7615 static void
7616 frv_sort_insn_group (enum frv_insn_group group)
7617 {
7618 struct frv_packet_group *packet_group;
7619 unsigned int first, i, nop, max_unit, num_slots;
7620 state_t state, test_state;
7621 size_t dfa_size;
7622
7623 packet_group = &frv_packet.groups[group];
7624
7625 /* Assume no nop is needed. */
7626 packet_group->nop = 0;
7627
7628 if (packet_group->num_insns == 0)
7629 return;
7630
7631 /* Copy insns[] to sorted[]. */
7632 memcpy (packet_group->sorted, packet_group->insns,
7633 sizeof (rtx) * packet_group->num_insns);
7634
7635 /* Sort sorted[] by the unit that each insn tries to take first. */
7636 if (packet_group->num_insns > 1)
7637 qsort (packet_group->sorted, packet_group->num_insns,
7638 sizeof (rtx), frv_compare_insns);
7639
7640 /* That's always enough for branch and control insns. */
7641 if (group == GROUP_B || group == GROUP_C)
7642 return;
7643
7644 dfa_size = state_size ();
7645 state = alloca (dfa_size);
7646 test_state = alloca (dfa_size);
7647
7648 /* Find the highest FIRST such that sorted[0...FIRST-1] can issue
7649 consecutively and such that the DFA takes unit X when sorted[X]
7650 is added. Set STATE to the new DFA state. */
7651 state_reset (test_state);
7652 for (first = 0; first < packet_group->num_insns; first++)
7653 {
7654 memcpy (state, test_state, dfa_size);
7655 if (state_transition (test_state, packet_group->sorted[first]) >= 0
7656 || !cpu_unit_reservation_p (test_state, NTH_UNIT (group, first)))
7657 break;
7658 }
7659
7660 /* If all the instructions issued in ascending order, we're done. */
7661 if (first == packet_group->num_insns)
7662 return;
7663
7664 /* Add nops to the end of sorted[] and try each permutation until
7665 we find one that works. */
7666 for (nop = 0; nop < frv_num_nops; nop++)
7667 {
7668 max_unit = frv_insn_unit (frv_nops[nop]);
7669 if (frv_unit_groups[max_unit] == group)
7670 {
7671 packet_group->nop = frv_nops[nop];
7672 num_slots = UNIT_NUMBER (max_unit) + 1;
7673 for (i = packet_group->num_insns; i < num_slots; i++)
7674 packet_group->sorted[i] = frv_nops[nop];
7675 if (frv_sort_insn_group_1 (group, first, num_slots,
7676 (1 << first) - 1, num_slots, state))
7677 return;
7678 }
7679 }
7680 gcc_unreachable ();
7681 }
7682 \f
7683 /* Sort the current packet into assembly-language order. Set packing
7684 flags as appropriate. */
7685
7686 static void
7687 frv_reorder_packet (void)
7688 {
7689 unsigned int cursor[NUM_GROUPS];
7690 rtx insns[ARRAY_SIZE (frv_unit_groups)];
7691 unsigned int unit, to, from;
7692 enum frv_insn_group group;
7693 struct frv_packet_group *packet_group;
7694
7695 /* First sort each group individually. */
7696 for (group = 0; group < NUM_GROUPS; group++)
7697 {
7698 cursor[group] = 0;
7699 frv_sort_insn_group (group);
7700 }
7701
7702 /* Go through the unit template and try add an instruction from
7703 that unit's group. */
7704 to = 0;
7705 for (unit = 0; unit < ARRAY_SIZE (frv_unit_groups); unit++)
7706 {
7707 group = frv_unit_groups[unit];
7708 packet_group = &frv_packet.groups[group];
7709 if (cursor[group] < packet_group->num_insns)
7710 {
7711 /* frv_reorg should have added nops for us. */
7712 gcc_assert (packet_group->sorted[cursor[group]]
7713 != packet_group->nop);
7714 insns[to++] = packet_group->sorted[cursor[group]++];
7715 }
7716 }
7717
7718 gcc_assert (to == frv_packet.num_insns);
7719
7720 /* Clear the last instruction's packing flag, thus marking the end of
7721 a packet. Reorder the other instructions relative to it. */
7722 CLEAR_PACKING_FLAG (insns[to - 1]);
7723 for (from = 0; from < to - 1; from++)
7724 {
7725 remove_insn (insns[from]);
7726 add_insn_before (insns[from], insns[to - 1], NULL);
7727 SET_PACKING_FLAG (insns[from]);
7728 }
7729 }
7730
7731
7732 /* Divide instructions into packets. Reorder the contents of each
7733 packet so that they are in the correct assembly-language order.
7734
7735 Since this pass can change the raw meaning of the rtl stream, it must
7736 only be called at the last minute, just before the instructions are
7737 written out. */
7738
7739 static void
7740 frv_pack_insns (void)
7741 {
7742 if (frv_for_each_packet (frv_reorder_packet))
7743 frv_insn_packing_flag = 0;
7744 else
7745 frv_insn_packing_flag = -1;
7746 }
7747 \f
7748 /* See whether we need to add nops to group GROUP in order to
7749 make a valid packet. */
7750
7751 static void
7752 frv_fill_unused_units (enum frv_insn_group group)
7753 {
7754 unsigned int non_nops, nops, i;
7755 struct frv_packet_group *packet_group;
7756
7757 packet_group = &frv_packet.groups[group];
7758
7759 /* Sort the instructions into assembly-language order.
7760 Use nops to fill slots that are otherwise unused. */
7761 frv_sort_insn_group (group);
7762
7763 /* See how many nops are needed before the final useful instruction. */
7764 i = nops = 0;
7765 for (non_nops = 0; non_nops < packet_group->num_insns; non_nops++)
7766 while (packet_group->sorted[i++] == packet_group->nop)
7767 nops++;
7768
7769 /* Insert that many nops into the instruction stream. */
7770 while (nops-- > 0)
7771 frv_insert_nop_in_packet (packet_group->nop);
7772 }
7773
7774 /* Return true if accesses IO1 and IO2 refer to the same doubleword. */
7775
7776 static bool
7777 frv_same_doubleword_p (const struct frv_io *io1, const struct frv_io *io2)
7778 {
7779 if (io1->const_address != 0 && io2->const_address != 0)
7780 return io1->const_address == io2->const_address;
7781
7782 if (io1->var_address != 0 && io2->var_address != 0)
7783 return rtx_equal_p (io1->var_address, io2->var_address);
7784
7785 return false;
7786 }
7787
7788 /* Return true if operations IO1 and IO2 are guaranteed to complete
7789 in order. */
7790
7791 static bool
7792 frv_io_fixed_order_p (const struct frv_io *io1, const struct frv_io *io2)
7793 {
7794 /* The order of writes is always preserved. */
7795 if (io1->type == FRV_IO_WRITE && io2->type == FRV_IO_WRITE)
7796 return true;
7797
7798 /* The order of reads isn't preserved. */
7799 if (io1->type != FRV_IO_WRITE && io2->type != FRV_IO_WRITE)
7800 return false;
7801
7802 /* One operation is a write and the other is (or could be) a read.
7803 The order is only guaranteed if the accesses are to the same
7804 doubleword. */
7805 return frv_same_doubleword_p (io1, io2);
7806 }
7807
7808 /* Generalize I/O operation X so that it covers both X and Y. */
7809
7810 static void
7811 frv_io_union (struct frv_io *x, const struct frv_io *y)
7812 {
7813 if (x->type != y->type)
7814 x->type = FRV_IO_UNKNOWN;
7815 if (!frv_same_doubleword_p (x, y))
7816 {
7817 x->const_address = 0;
7818 x->var_address = 0;
7819 }
7820 }
7821
7822 /* Fill IO with information about the load or store associated with
7823 membar instruction INSN. */
7824
7825 static void
7826 frv_extract_membar (struct frv_io *io, rtx insn)
7827 {
7828 extract_insn (insn);
7829 io->type = INTVAL (recog_data.operand[2]);
7830 io->const_address = INTVAL (recog_data.operand[1]);
7831 io->var_address = XEXP (recog_data.operand[0], 0);
7832 }
7833
7834 /* A note_stores callback for which DATA points to an rtx. Nullify *DATA
7835 if X is a register and *DATA depends on X. */
7836
7837 static void
7838 frv_io_check_address (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7839 {
7840 rtx *other = (rtx *) data;
7841
7842 if (REG_P (x) && *other != 0 && reg_overlap_mentioned_p (x, *other))
7843 *other = 0;
7844 }
7845
7846 /* A note_stores callback for which DATA points to a HARD_REG_SET.
7847 Remove every modified register from the set. */
7848
7849 static void
7850 frv_io_handle_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
7851 {
7852 HARD_REG_SET *set = (HARD_REG_SET *) data;
7853 unsigned int regno;
7854
7855 if (REG_P (x))
7856 FOR_EACH_REGNO (regno, x)
7857 CLEAR_HARD_REG_BIT (*set, regno);
7858 }
7859
7860 /* A for_each_rtx callback for which DATA points to a HARD_REG_SET.
7861 Add every register in *X to the set. */
7862
7863 static int
7864 frv_io_handle_use_1 (rtx *x, void *data)
7865 {
7866 HARD_REG_SET *set = (HARD_REG_SET *) data;
7867 unsigned int regno;
7868
7869 if (REG_P (*x))
7870 FOR_EACH_REGNO (regno, *x)
7871 SET_HARD_REG_BIT (*set, regno);
7872
7873 return 0;
7874 }
7875
7876 /* A note_stores callback that applies frv_io_handle_use_1 to an
7877 entire rhs value. */
7878
7879 static void
7880 frv_io_handle_use (rtx *x, void *data)
7881 {
7882 for_each_rtx (x, frv_io_handle_use_1, data);
7883 }
7884
7885 /* Go through block BB looking for membars to remove. There are two
7886 cases where intra-block analysis is enough:
7887
7888 - a membar is redundant if it occurs between two consecutive I/O
7889 operations and if those operations are guaranteed to complete
7890 in order.
7891
7892 - a membar for a __builtin_read is redundant if the result is
7893 used before the next I/O operation is issued.
7894
7895 If the last membar in the block could not be removed, and there
7896 are guaranteed to be no I/O operations between that membar and
7897 the end of the block, store the membar in *LAST_MEMBAR, otherwise
7898 store null.
7899
7900 Describe the block's first I/O operation in *NEXT_IO. Describe
7901 an unknown operation if the block doesn't do any I/O. */
7902
7903 static void
7904 frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
7905 rtx *last_membar)
7906 {
7907 HARD_REG_SET used_regs;
7908 rtx next_membar, set, insn;
7909 bool next_is_end_p;
7910
7911 /* NEXT_IO is the next I/O operation to be performed after the current
7912 instruction. It starts off as being an unknown operation. */
7913 memset (next_io, 0, sizeof (*next_io));
7914
7915 /* NEXT_IS_END_P is true if NEXT_IO describes the end of the block. */
7916 next_is_end_p = true;
7917
7918 /* If the current instruction is a __builtin_read or __builtin_write,
7919 NEXT_MEMBAR is the membar instruction associated with it. NEXT_MEMBAR
7920 is null if the membar has already been deleted.
7921
7922 Note that the initialization here should only be needed to
7923 suppress warnings. */
7924 next_membar = 0;
7925
7926 /* USED_REGS is the set of registers that are used before the
7927 next I/O instruction. */
7928 CLEAR_HARD_REG_SET (used_regs);
7929
7930 for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
7931 if (GET_CODE (insn) == CALL_INSN)
7932 {
7933 /* We can't predict what a call will do to volatile memory. */
7934 memset (next_io, 0, sizeof (struct frv_io));
7935 next_is_end_p = false;
7936 CLEAR_HARD_REG_SET (used_regs);
7937 }
7938 else if (INSN_P (insn))
7939 switch (recog_memoized (insn))
7940 {
7941 case CODE_FOR_optional_membar_qi:
7942 case CODE_FOR_optional_membar_hi:
7943 case CODE_FOR_optional_membar_si:
7944 case CODE_FOR_optional_membar_di:
7945 next_membar = insn;
7946 if (next_is_end_p)
7947 {
7948 /* Local information isn't enough to decide whether this
7949 membar is needed. Stash it away for later. */
7950 *last_membar = insn;
7951 frv_extract_membar (next_io, insn);
7952 next_is_end_p = false;
7953 }
7954 else
7955 {
7956 /* Check whether the I/O operation before INSN could be
7957 reordered with one described by NEXT_IO. If it can't,
7958 INSN will not be needed. */
7959 struct frv_io prev_io;
7960
7961 frv_extract_membar (&prev_io, insn);
7962 if (frv_io_fixed_order_p (&prev_io, next_io))
7963 {
7964 if (dump_file)
7965 fprintf (dump_file,
7966 ";; [Local] Removing membar %d since order"
7967 " of accesses is guaranteed\n",
7968 INSN_UID (next_membar));
7969
7970 insn = NEXT_INSN (insn);
7971 delete_insn (next_membar);
7972 next_membar = 0;
7973 }
7974 *next_io = prev_io;
7975 }
7976 break;
7977
7978 default:
7979 /* Invalidate NEXT_IO's address if it depends on something that
7980 is clobbered by INSN. */
7981 if (next_io->var_address)
7982 note_stores (PATTERN (insn), frv_io_check_address,
7983 &next_io->var_address);
7984
7985 /* If the next membar is associated with a __builtin_read,
7986 see if INSN reads from that address. If it does, and if
7987 the destination register is used before the next I/O access,
7988 there is no need for the membar. */
7989 set = PATTERN (insn);
7990 if (next_io->type == FRV_IO_READ
7991 && next_io->var_address != 0
7992 && next_membar != 0
7993 && GET_CODE (set) == SET
7994 && GET_CODE (SET_DEST (set)) == REG
7995 && TEST_HARD_REG_BIT (used_regs, REGNO (SET_DEST (set))))
7996 {
7997 rtx src;
7998
7999 src = SET_SRC (set);
8000 if (GET_CODE (src) == ZERO_EXTEND)
8001 src = XEXP (src, 0);
8002
8003 if (GET_CODE (src) == MEM
8004 && rtx_equal_p (XEXP (src, 0), next_io->var_address))
8005 {
8006 if (dump_file)
8007 fprintf (dump_file,
8008 ";; [Local] Removing membar %d since the target"
8009 " of %d is used before the I/O operation\n",
8010 INSN_UID (next_membar), INSN_UID (insn));
8011
8012 if (next_membar == *last_membar)
8013 *last_membar = 0;
8014
8015 delete_insn (next_membar);
8016 next_membar = 0;
8017 }
8018 }
8019
8020 /* If INSN has volatile references, forget about any registers
8021 that are used after it. Otherwise forget about uses that
8022 are (or might be) defined by INSN. */
8023 if (volatile_refs_p (PATTERN (insn)))
8024 CLEAR_HARD_REG_SET (used_regs);
8025 else
8026 note_stores (PATTERN (insn), frv_io_handle_set, &used_regs);
8027
8028 note_uses (&PATTERN (insn), frv_io_handle_use, &used_regs);
8029 break;
8030 }
8031 }
8032
8033 /* See if MEMBAR, the last membar instruction in BB, can be removed.
8034 FIRST_IO[X] describes the first operation performed by basic block X. */
8035
8036 static void
8037 frv_optimize_membar_global (basic_block bb, struct frv_io *first_io,
8038 rtx membar)
8039 {
8040 struct frv_io this_io, next_io;
8041 edge succ;
8042 edge_iterator ei;
8043
8044 /* We need to keep the membar if there is an edge to the exit block. */
8045 FOR_EACH_EDGE (succ, ei, bb->succs)
8046 /* for (succ = bb->succ; succ != 0; succ = succ->succ_next) */
8047 if (succ->dest == EXIT_BLOCK_PTR)
8048 return;
8049
8050 /* Work out the union of all successor blocks. */
8051 ei = ei_start (bb->succs);
8052 ei_cond (ei, &succ);
8053 /* next_io = first_io[bb->succ->dest->index]; */
8054 next_io = first_io[succ->dest->index];
8055 ei = ei_start (bb->succs);
8056 if (ei_cond (ei, &succ))
8057 {
8058 for (ei_next (&ei); ei_cond (ei, &succ); ei_next (&ei))
8059 /*for (succ = bb->succ->succ_next; succ != 0; succ = succ->succ_next)*/
8060 frv_io_union (&next_io, &first_io[succ->dest->index]);
8061 }
8062 else
8063 gcc_unreachable ();
8064
8065 frv_extract_membar (&this_io, membar);
8066 if (frv_io_fixed_order_p (&this_io, &next_io))
8067 {
8068 if (dump_file)
8069 fprintf (dump_file,
8070 ";; [Global] Removing membar %d since order of accesses"
8071 " is guaranteed\n", INSN_UID (membar));
8072
8073 delete_insn (membar);
8074 }
8075 }
8076
8077 /* Remove redundant membars from the current function. */
8078
8079 static void
8080 frv_optimize_membar (void)
8081 {
8082 basic_block bb;
8083 struct frv_io *first_io;
8084 rtx *last_membar;
8085
8086 compute_bb_for_insn ();
8087 first_io = XCNEWVEC (struct frv_io, last_basic_block);
8088 last_membar = XCNEWVEC (rtx, last_basic_block);
8089
8090 FOR_EACH_BB (bb)
8091 frv_optimize_membar_local (bb, &first_io[bb->index],
8092 &last_membar[bb->index]);
8093
8094 FOR_EACH_BB (bb)
8095 if (last_membar[bb->index] != 0)
8096 frv_optimize_membar_global (bb, first_io, last_membar[bb->index]);
8097
8098 free (first_io);
8099 free (last_membar);
8100 }
8101 \f
8102 /* Used by frv_reorg to keep track of the current packet's address. */
8103 static unsigned int frv_packet_address;
8104
8105 /* If the current packet falls through to a label, try to pad the packet
8106 with nops in order to fit the label's alignment requirements. */
8107
8108 static void
8109 frv_align_label (void)
8110 {
8111 unsigned int alignment, target, nop;
8112 rtx x, last, barrier, label;
8113
8114 /* Walk forward to the start of the next packet. Set ALIGNMENT to the
8115 maximum alignment of that packet, LABEL to the last label between
8116 the packets, and BARRIER to the last barrier. */
8117 last = frv_packet.insns[frv_packet.num_insns - 1];
8118 label = barrier = 0;
8119 alignment = 4;
8120 for (x = NEXT_INSN (last); x != 0 && !INSN_P (x); x = NEXT_INSN (x))
8121 {
8122 if (LABEL_P (x))
8123 {
8124 unsigned int subalign = 1 << label_to_alignment (x);
8125 alignment = MAX (alignment, subalign);
8126 label = x;
8127 }
8128 if (BARRIER_P (x))
8129 barrier = x;
8130 }
8131
8132 /* If -malign-labels, and the packet falls through to an unaligned
8133 label, try introducing a nop to align that label to 8 bytes. */
8134 if (TARGET_ALIGN_LABELS
8135 && label != 0
8136 && barrier == 0
8137 && frv_packet.num_insns < frv_packet.issue_rate)
8138 alignment = MAX (alignment, 8);
8139
8140 /* Advance the address to the end of the current packet. */
8141 frv_packet_address += frv_packet.num_insns * 4;
8142
8143 /* Work out the target address, after alignment. */
8144 target = (frv_packet_address + alignment - 1) & -alignment;
8145
8146 /* If the packet falls through to the label, try to find an efficient
8147 padding sequence. */
8148 if (barrier == 0)
8149 {
8150 /* First try adding nops to the current packet. */
8151 for (nop = 0; nop < frv_num_nops; nop++)
8152 while (frv_packet_address < target && frv_pack_insn_p (frv_nops[nop]))
8153 {
8154 frv_insert_nop_in_packet (frv_nops[nop]);
8155 frv_packet_address += 4;
8156 }
8157
8158 /* If we still haven't reached the target, add some new packets that
8159 contain only nops. If there are two types of nop, insert an
8160 alternating sequence of frv_nops[0] and frv_nops[1], which will
8161 lead to packets like:
8162
8163 nop.p
8164 mnop.p/fnop.p
8165 nop.p
8166 mnop/fnop
8167
8168 etc. Just emit frv_nops[0] if that's the only nop we have. */
8169 last = frv_packet.insns[frv_packet.num_insns - 1];
8170 nop = 0;
8171 while (frv_packet_address < target)
8172 {
8173 last = emit_insn_after (PATTERN (frv_nops[nop]), last);
8174 frv_packet_address += 4;
8175 if (frv_num_nops > 1)
8176 nop ^= 1;
8177 }
8178 }
8179
8180 frv_packet_address = target;
8181 }
8182
8183 /* Subroutine of frv_reorg, called after each packet has been constructed
8184 in frv_packet. */
8185
8186 static void
8187 frv_reorg_packet (void)
8188 {
8189 frv_fill_unused_units (GROUP_I);
8190 frv_fill_unused_units (GROUP_FM);
8191 frv_align_label ();
8192 }
8193
8194 /* Add an instruction with pattern NOP to frv_nops[]. */
8195
8196 static void
8197 frv_register_nop (rtx nop)
8198 {
8199 nop = make_insn_raw (nop);
8200 NEXT_INSN (nop) = 0;
8201 PREV_INSN (nop) = 0;
8202 frv_nops[frv_num_nops++] = nop;
8203 }
8204
8205 /* Implement TARGET_MACHINE_DEPENDENT_REORG. Divide the instructions
8206 into packets and check whether we need to insert nops in order to
8207 fulfill the processor's issue requirements. Also, if the user has
8208 requested a certain alignment for a label, try to meet that alignment
8209 by inserting nops in the previous packet. */
8210
8211 static void
8212 frv_reorg (void)
8213 {
8214 if (optimize > 0 && TARGET_OPTIMIZE_MEMBAR && cfun->machine->has_membar_p)
8215 frv_optimize_membar ();
8216
8217 frv_num_nops = 0;
8218 frv_register_nop (gen_nop ());
8219 if (TARGET_MEDIA)
8220 frv_register_nop (gen_mnop ());
8221 if (TARGET_HARD_FLOAT)
8222 frv_register_nop (gen_fnop ());
8223
8224 /* Estimate the length of each branch. Although this may change after
8225 we've inserted nops, it will only do so in big functions. */
8226 shorten_branches (get_insns ());
8227
8228 frv_packet_address = 0;
8229 frv_for_each_packet (frv_reorg_packet);
8230 }
8231 \f
8232 #define def_builtin(name, type, code) \
8233 add_builtin_function ((name), (type), (code), BUILT_IN_MD, NULL, NULL)
8234
8235 struct builtin_description
8236 {
8237 enum insn_code icode;
8238 const char *name;
8239 enum frv_builtins code;
8240 enum rtx_code comparison;
8241 unsigned int flag;
8242 };
8243
8244 /* Media intrinsics that take a single, constant argument. */
8245
8246 static struct builtin_description bdesc_set[] =
8247 {
8248 { CODE_FOR_mhdsets, "__MHDSETS", FRV_BUILTIN_MHDSETS, 0, 0 }
8249 };
8250
8251 /* Media intrinsics that take just one argument. */
8252
8253 static struct builtin_description bdesc_1arg[] =
8254 {
8255 { CODE_FOR_mnot, "__MNOT", FRV_BUILTIN_MNOT, 0, 0 },
8256 { CODE_FOR_munpackh, "__MUNPACKH", FRV_BUILTIN_MUNPACKH, 0, 0 },
8257 { CODE_FOR_mbtoh, "__MBTOH", FRV_BUILTIN_MBTOH, 0, 0 },
8258 { CODE_FOR_mhtob, "__MHTOB", FRV_BUILTIN_MHTOB, 0, 0 },
8259 { CODE_FOR_mabshs, "__MABSHS", FRV_BUILTIN_MABSHS, 0, 0 },
8260 { CODE_FOR_scutss, "__SCUTSS", FRV_BUILTIN_SCUTSS, 0, 0 }
8261 };
8262
8263 /* Media intrinsics that take two arguments. */
8264
8265 static struct builtin_description bdesc_2arg[] =
8266 {
8267 { CODE_FOR_mand, "__MAND", FRV_BUILTIN_MAND, 0, 0 },
8268 { CODE_FOR_mor, "__MOR", FRV_BUILTIN_MOR, 0, 0 },
8269 { CODE_FOR_mxor, "__MXOR", FRV_BUILTIN_MXOR, 0, 0 },
8270 { CODE_FOR_maveh, "__MAVEH", FRV_BUILTIN_MAVEH, 0, 0 },
8271 { CODE_FOR_msaths, "__MSATHS", FRV_BUILTIN_MSATHS, 0, 0 },
8272 { CODE_FOR_msathu, "__MSATHU", FRV_BUILTIN_MSATHU, 0, 0 },
8273 { CODE_FOR_maddhss, "__MADDHSS", FRV_BUILTIN_MADDHSS, 0, 0 },
8274 { CODE_FOR_maddhus, "__MADDHUS", FRV_BUILTIN_MADDHUS, 0, 0 },
8275 { CODE_FOR_msubhss, "__MSUBHSS", FRV_BUILTIN_MSUBHSS, 0, 0 },
8276 { CODE_FOR_msubhus, "__MSUBHUS", FRV_BUILTIN_MSUBHUS, 0, 0 },
8277 { CODE_FOR_mqaddhss, "__MQADDHSS", FRV_BUILTIN_MQADDHSS, 0, 0 },
8278 { CODE_FOR_mqaddhus, "__MQADDHUS", FRV_BUILTIN_MQADDHUS, 0, 0 },
8279 { CODE_FOR_mqsubhss, "__MQSUBHSS", FRV_BUILTIN_MQSUBHSS, 0, 0 },
8280 { CODE_FOR_mqsubhus, "__MQSUBHUS", FRV_BUILTIN_MQSUBHUS, 0, 0 },
8281 { CODE_FOR_mpackh, "__MPACKH", FRV_BUILTIN_MPACKH, 0, 0 },
8282 { CODE_FOR_mcop1, "__Mcop1", FRV_BUILTIN_MCOP1, 0, 0 },
8283 { CODE_FOR_mcop2, "__Mcop2", FRV_BUILTIN_MCOP2, 0, 0 },
8284 { CODE_FOR_mwcut, "__MWCUT", FRV_BUILTIN_MWCUT, 0, 0 },
8285 { CODE_FOR_mqsaths, "__MQSATHS", FRV_BUILTIN_MQSATHS, 0, 0 },
8286 { CODE_FOR_mqlclrhs, "__MQLCLRHS", FRV_BUILTIN_MQLCLRHS, 0, 0 },
8287 { CODE_FOR_mqlmths, "__MQLMTHS", FRV_BUILTIN_MQLMTHS, 0, 0 },
8288 { CODE_FOR_smul, "__SMUL", FRV_BUILTIN_SMUL, 0, 0 },
8289 { CODE_FOR_umul, "__UMUL", FRV_BUILTIN_UMUL, 0, 0 },
8290 { CODE_FOR_addss, "__ADDSS", FRV_BUILTIN_ADDSS, 0, 0 },
8291 { CODE_FOR_subss, "__SUBSS", FRV_BUILTIN_SUBSS, 0, 0 },
8292 { CODE_FOR_slass, "__SLASS", FRV_BUILTIN_SLASS, 0, 0 },
8293 { CODE_FOR_scan, "__SCAN", FRV_BUILTIN_SCAN, 0, 0 }
8294 };
8295
8296 /* Integer intrinsics that take two arguments and have no return value. */
8297
8298 static struct builtin_description bdesc_int_void2arg[] =
8299 {
8300 { CODE_FOR_smass, "__SMASS", FRV_BUILTIN_SMASS, 0, 0 },
8301 { CODE_FOR_smsss, "__SMSSS", FRV_BUILTIN_SMSSS, 0, 0 },
8302 { CODE_FOR_smu, "__SMU", FRV_BUILTIN_SMU, 0, 0 }
8303 };
8304
8305 static struct builtin_description bdesc_prefetches[] =
8306 {
8307 { CODE_FOR_frv_prefetch0, "__data_prefetch0", FRV_BUILTIN_PREFETCH0, 0, 0 },
8308 { CODE_FOR_frv_prefetch, "__data_prefetch", FRV_BUILTIN_PREFETCH, 0, 0 }
8309 };
8310
8311 /* Media intrinsics that take two arguments, the first being an ACC number. */
8312
8313 static struct builtin_description bdesc_cut[] =
8314 {
8315 { CODE_FOR_mcut, "__MCUT", FRV_BUILTIN_MCUT, 0, 0 },
8316 { CODE_FOR_mcutss, "__MCUTSS", FRV_BUILTIN_MCUTSS, 0, 0 },
8317 { CODE_FOR_mdcutssi, "__MDCUTSSI", FRV_BUILTIN_MDCUTSSI, 0, 0 }
8318 };
8319
8320 /* Two-argument media intrinsics with an immediate second argument. */
8321
8322 static struct builtin_description bdesc_2argimm[] =
8323 {
8324 { CODE_FOR_mrotli, "__MROTLI", FRV_BUILTIN_MROTLI, 0, 0 },
8325 { CODE_FOR_mrotri, "__MROTRI", FRV_BUILTIN_MROTRI, 0, 0 },
8326 { CODE_FOR_msllhi, "__MSLLHI", FRV_BUILTIN_MSLLHI, 0, 0 },
8327 { CODE_FOR_msrlhi, "__MSRLHI", FRV_BUILTIN_MSRLHI, 0, 0 },
8328 { CODE_FOR_msrahi, "__MSRAHI", FRV_BUILTIN_MSRAHI, 0, 0 },
8329 { CODE_FOR_mexpdhw, "__MEXPDHW", FRV_BUILTIN_MEXPDHW, 0, 0 },
8330 { CODE_FOR_mexpdhd, "__MEXPDHD", FRV_BUILTIN_MEXPDHD, 0, 0 },
8331 { CODE_FOR_mdrotli, "__MDROTLI", FRV_BUILTIN_MDROTLI, 0, 0 },
8332 { CODE_FOR_mcplhi, "__MCPLHI", FRV_BUILTIN_MCPLHI, 0, 0 },
8333 { CODE_FOR_mcpli, "__MCPLI", FRV_BUILTIN_MCPLI, 0, 0 },
8334 { CODE_FOR_mhsetlos, "__MHSETLOS", FRV_BUILTIN_MHSETLOS, 0, 0 },
8335 { CODE_FOR_mhsetloh, "__MHSETLOH", FRV_BUILTIN_MHSETLOH, 0, 0 },
8336 { CODE_FOR_mhsethis, "__MHSETHIS", FRV_BUILTIN_MHSETHIS, 0, 0 },
8337 { CODE_FOR_mhsethih, "__MHSETHIH", FRV_BUILTIN_MHSETHIH, 0, 0 },
8338 { CODE_FOR_mhdseth, "__MHDSETH", FRV_BUILTIN_MHDSETH, 0, 0 },
8339 { CODE_FOR_mqsllhi, "__MQSLLHI", FRV_BUILTIN_MQSLLHI, 0, 0 },
8340 { CODE_FOR_mqsrahi, "__MQSRAHI", FRV_BUILTIN_MQSRAHI, 0, 0 }
8341 };
8342
8343 /* Media intrinsics that take two arguments and return void, the first argument
8344 being a pointer to 4 words in memory. */
8345
8346 static struct builtin_description bdesc_void2arg[] =
8347 {
8348 { CODE_FOR_mdunpackh, "__MDUNPACKH", FRV_BUILTIN_MDUNPACKH, 0, 0 },
8349 { CODE_FOR_mbtohe, "__MBTOHE", FRV_BUILTIN_MBTOHE, 0, 0 },
8350 };
8351
8352 /* Media intrinsics that take three arguments, the first being a const_int that
8353 denotes an accumulator, and that return void. */
8354
8355 static struct builtin_description bdesc_void3arg[] =
8356 {
8357 { CODE_FOR_mcpxrs, "__MCPXRS", FRV_BUILTIN_MCPXRS, 0, 0 },
8358 { CODE_FOR_mcpxru, "__MCPXRU", FRV_BUILTIN_MCPXRU, 0, 0 },
8359 { CODE_FOR_mcpxis, "__MCPXIS", FRV_BUILTIN_MCPXIS, 0, 0 },
8360 { CODE_FOR_mcpxiu, "__MCPXIU", FRV_BUILTIN_MCPXIU, 0, 0 },
8361 { CODE_FOR_mmulhs, "__MMULHS", FRV_BUILTIN_MMULHS, 0, 0 },
8362 { CODE_FOR_mmulhu, "__MMULHU", FRV_BUILTIN_MMULHU, 0, 0 },
8363 { CODE_FOR_mmulxhs, "__MMULXHS", FRV_BUILTIN_MMULXHS, 0, 0 },
8364 { CODE_FOR_mmulxhu, "__MMULXHU", FRV_BUILTIN_MMULXHU, 0, 0 },
8365 { CODE_FOR_mmachs, "__MMACHS", FRV_BUILTIN_MMACHS, 0, 0 },
8366 { CODE_FOR_mmachu, "__MMACHU", FRV_BUILTIN_MMACHU, 0, 0 },
8367 { CODE_FOR_mmrdhs, "__MMRDHS", FRV_BUILTIN_MMRDHS, 0, 0 },
8368 { CODE_FOR_mmrdhu, "__MMRDHU", FRV_BUILTIN_MMRDHU, 0, 0 },
8369 { CODE_FOR_mqcpxrs, "__MQCPXRS", FRV_BUILTIN_MQCPXRS, 0, 0 },
8370 { CODE_FOR_mqcpxru, "__MQCPXRU", FRV_BUILTIN_MQCPXRU, 0, 0 },
8371 { CODE_FOR_mqcpxis, "__MQCPXIS", FRV_BUILTIN_MQCPXIS, 0, 0 },
8372 { CODE_FOR_mqcpxiu, "__MQCPXIU", FRV_BUILTIN_MQCPXIU, 0, 0 },
8373 { CODE_FOR_mqmulhs, "__MQMULHS", FRV_BUILTIN_MQMULHS, 0, 0 },
8374 { CODE_FOR_mqmulhu, "__MQMULHU", FRV_BUILTIN_MQMULHU, 0, 0 },
8375 { CODE_FOR_mqmulxhs, "__MQMULXHS", FRV_BUILTIN_MQMULXHS, 0, 0 },
8376 { CODE_FOR_mqmulxhu, "__MQMULXHU", FRV_BUILTIN_MQMULXHU, 0, 0 },
8377 { CODE_FOR_mqmachs, "__MQMACHS", FRV_BUILTIN_MQMACHS, 0, 0 },
8378 { CODE_FOR_mqmachu, "__MQMACHU", FRV_BUILTIN_MQMACHU, 0, 0 },
8379 { CODE_FOR_mqxmachs, "__MQXMACHS", FRV_BUILTIN_MQXMACHS, 0, 0 },
8380 { CODE_FOR_mqxmacxhs, "__MQXMACXHS", FRV_BUILTIN_MQXMACXHS, 0, 0 },
8381 { CODE_FOR_mqmacxhs, "__MQMACXHS", FRV_BUILTIN_MQMACXHS, 0, 0 }
8382 };
8383
8384 /* Media intrinsics that take two accumulator numbers as argument and
8385 return void. */
8386
8387 static struct builtin_description bdesc_voidacc[] =
8388 {
8389 { CODE_FOR_maddaccs, "__MADDACCS", FRV_BUILTIN_MADDACCS, 0, 0 },
8390 { CODE_FOR_msubaccs, "__MSUBACCS", FRV_BUILTIN_MSUBACCS, 0, 0 },
8391 { CODE_FOR_masaccs, "__MASACCS", FRV_BUILTIN_MASACCS, 0, 0 },
8392 { CODE_FOR_mdaddaccs, "__MDADDACCS", FRV_BUILTIN_MDADDACCS, 0, 0 },
8393 { CODE_FOR_mdsubaccs, "__MDSUBACCS", FRV_BUILTIN_MDSUBACCS, 0, 0 },
8394 { CODE_FOR_mdasaccs, "__MDASACCS", FRV_BUILTIN_MDASACCS, 0, 0 }
8395 };
8396
8397 /* Intrinsics that load a value and then issue a MEMBAR. The load is
8398 a normal move and the ICODE is for the membar. */
8399
8400 static struct builtin_description bdesc_loads[] =
8401 {
8402 { CODE_FOR_optional_membar_qi, "__builtin_read8",
8403 FRV_BUILTIN_READ8, 0, 0 },
8404 { CODE_FOR_optional_membar_hi, "__builtin_read16",
8405 FRV_BUILTIN_READ16, 0, 0 },
8406 { CODE_FOR_optional_membar_si, "__builtin_read32",
8407 FRV_BUILTIN_READ32, 0, 0 },
8408 { CODE_FOR_optional_membar_di, "__builtin_read64",
8409 FRV_BUILTIN_READ64, 0, 0 }
8410 };
8411
8412 /* Likewise stores. */
8413
8414 static struct builtin_description bdesc_stores[] =
8415 {
8416 { CODE_FOR_optional_membar_qi, "__builtin_write8",
8417 FRV_BUILTIN_WRITE8, 0, 0 },
8418 { CODE_FOR_optional_membar_hi, "__builtin_write16",
8419 FRV_BUILTIN_WRITE16, 0, 0 },
8420 { CODE_FOR_optional_membar_si, "__builtin_write32",
8421 FRV_BUILTIN_WRITE32, 0, 0 },
8422 { CODE_FOR_optional_membar_di, "__builtin_write64",
8423 FRV_BUILTIN_WRITE64, 0, 0 },
8424 };
8425
8426 /* Initialize media builtins. */
8427
8428 static void
8429 frv_init_builtins (void)
8430 {
8431 tree endlink = void_list_node;
8432 tree accumulator = integer_type_node;
8433 tree integer = integer_type_node;
8434 tree voidt = void_type_node;
8435 tree uhalf = short_unsigned_type_node;
8436 tree sword1 = long_integer_type_node;
8437 tree uword1 = long_unsigned_type_node;
8438 tree sword2 = long_long_integer_type_node;
8439 tree uword2 = long_long_unsigned_type_node;
8440 tree uword4 = build_pointer_type (uword1);
8441 tree vptr = build_pointer_type (build_type_variant (void_type_node, 0, 1));
8442 tree ubyte = unsigned_char_type_node;
8443 tree iacc = integer_type_node;
8444
8445 #define UNARY(RET, T1) \
8446 build_function_type (RET, tree_cons (NULL_TREE, T1, endlink))
8447
8448 #define BINARY(RET, T1, T2) \
8449 build_function_type (RET, tree_cons (NULL_TREE, T1, \
8450 tree_cons (NULL_TREE, T2, endlink)))
8451
8452 #define TRINARY(RET, T1, T2, T3) \
8453 build_function_type (RET, tree_cons (NULL_TREE, T1, \
8454 tree_cons (NULL_TREE, T2, \
8455 tree_cons (NULL_TREE, T3, endlink))))
8456
8457 #define QUAD(RET, T1, T2, T3, T4) \
8458 build_function_type (RET, tree_cons (NULL_TREE, T1, \
8459 tree_cons (NULL_TREE, T2, \
8460 tree_cons (NULL_TREE, T3, \
8461 tree_cons (NULL_TREE, T4, endlink)))))
8462
8463 tree void_ftype_void = build_function_type (voidt, endlink);
8464
8465 tree void_ftype_acc = UNARY (voidt, accumulator);
8466 tree void_ftype_uw4_uw1 = BINARY (voidt, uword4, uword1);
8467 tree void_ftype_uw4_uw2 = BINARY (voidt, uword4, uword2);
8468 tree void_ftype_acc_uw1 = BINARY (voidt, accumulator, uword1);
8469 tree void_ftype_acc_acc = BINARY (voidt, accumulator, accumulator);
8470 tree void_ftype_acc_uw1_uw1 = TRINARY (voidt, accumulator, uword1, uword1);
8471 tree void_ftype_acc_sw1_sw1 = TRINARY (voidt, accumulator, sword1, sword1);
8472 tree void_ftype_acc_uw2_uw2 = TRINARY (voidt, accumulator, uword2, uword2);
8473 tree void_ftype_acc_sw2_sw2 = TRINARY (voidt, accumulator, sword2, sword2);
8474
8475 tree uw1_ftype_uw1 = UNARY (uword1, uword1);
8476 tree uw1_ftype_sw1 = UNARY (uword1, sword1);
8477 tree uw1_ftype_uw2 = UNARY (uword1, uword2);
8478 tree uw1_ftype_acc = UNARY (uword1, accumulator);
8479 tree uw1_ftype_uh_uh = BINARY (uword1, uhalf, uhalf);
8480 tree uw1_ftype_uw1_uw1 = BINARY (uword1, uword1, uword1);
8481 tree uw1_ftype_uw1_int = BINARY (uword1, uword1, integer);
8482 tree uw1_ftype_acc_uw1 = BINARY (uword1, accumulator, uword1);
8483 tree uw1_ftype_acc_sw1 = BINARY (uword1, accumulator, sword1);
8484 tree uw1_ftype_uw2_uw1 = BINARY (uword1, uword2, uword1);
8485 tree uw1_ftype_uw2_int = BINARY (uword1, uword2, integer);
8486
8487 tree sw1_ftype_int = UNARY (sword1, integer);
8488 tree sw1_ftype_sw1_sw1 = BINARY (sword1, sword1, sword1);
8489 tree sw1_ftype_sw1_int = BINARY (sword1, sword1, integer);
8490
8491 tree uw2_ftype_uw1 = UNARY (uword2, uword1);
8492 tree uw2_ftype_uw1_int = BINARY (uword2, uword1, integer);
8493 tree uw2_ftype_uw2_uw2 = BINARY (uword2, uword2, uword2);
8494 tree uw2_ftype_uw2_int = BINARY (uword2, uword2, integer);
8495 tree uw2_ftype_acc_int = BINARY (uword2, accumulator, integer);
8496 tree uw2_ftype_uh_uh_uh_uh = QUAD (uword2, uhalf, uhalf, uhalf, uhalf);
8497
8498 tree sw2_ftype_sw2_sw2 = BINARY (sword2, sword2, sword2);
8499 tree sw2_ftype_sw2_int = BINARY (sword2, sword2, integer);
8500 tree uw2_ftype_uw1_uw1 = BINARY (uword2, uword1, uword1);
8501 tree sw2_ftype_sw1_sw1 = BINARY (sword2, sword1, sword1);
8502 tree void_ftype_sw1_sw1 = BINARY (voidt, sword1, sword1);
8503 tree void_ftype_iacc_sw2 = BINARY (voidt, iacc, sword2);
8504 tree void_ftype_iacc_sw1 = BINARY (voidt, iacc, sword1);
8505 tree sw1_ftype_sw1 = UNARY (sword1, sword1);
8506 tree sw2_ftype_iacc = UNARY (sword2, iacc);
8507 tree sw1_ftype_iacc = UNARY (sword1, iacc);
8508 tree void_ftype_ptr = UNARY (voidt, const_ptr_type_node);
8509 tree uw1_ftype_vptr = UNARY (uword1, vptr);
8510 tree uw2_ftype_vptr = UNARY (uword2, vptr);
8511 tree void_ftype_vptr_ub = BINARY (voidt, vptr, ubyte);
8512 tree void_ftype_vptr_uh = BINARY (voidt, vptr, uhalf);
8513 tree void_ftype_vptr_uw1 = BINARY (voidt, vptr, uword1);
8514 tree void_ftype_vptr_uw2 = BINARY (voidt, vptr, uword2);
8515
8516 def_builtin ("__MAND", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAND);
8517 def_builtin ("__MOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MOR);
8518 def_builtin ("__MXOR", uw1_ftype_uw1_uw1, FRV_BUILTIN_MXOR);
8519 def_builtin ("__MNOT", uw1_ftype_uw1, FRV_BUILTIN_MNOT);
8520 def_builtin ("__MROTLI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTLI);
8521 def_builtin ("__MROTRI", uw1_ftype_uw1_int, FRV_BUILTIN_MROTRI);
8522 def_builtin ("__MWCUT", uw1_ftype_uw2_uw1, FRV_BUILTIN_MWCUT);
8523 def_builtin ("__MAVEH", uw1_ftype_uw1_uw1, FRV_BUILTIN_MAVEH);
8524 def_builtin ("__MSLLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSLLHI);
8525 def_builtin ("__MSRLHI", uw1_ftype_uw1_int, FRV_BUILTIN_MSRLHI);
8526 def_builtin ("__MSRAHI", sw1_ftype_sw1_int, FRV_BUILTIN_MSRAHI);
8527 def_builtin ("__MSATHS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSATHS);
8528 def_builtin ("__MSATHU", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSATHU);
8529 def_builtin ("__MADDHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MADDHSS);
8530 def_builtin ("__MADDHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MADDHUS);
8531 def_builtin ("__MSUBHSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_MSUBHSS);
8532 def_builtin ("__MSUBHUS", uw1_ftype_uw1_uw1, FRV_BUILTIN_MSUBHUS);
8533 def_builtin ("__MMULHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULHS);
8534 def_builtin ("__MMULHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULHU);
8535 def_builtin ("__MMULXHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMULXHS);
8536 def_builtin ("__MMULXHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMULXHU);
8537 def_builtin ("__MMACHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMACHS);
8538 def_builtin ("__MMACHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMACHU);
8539 def_builtin ("__MMRDHS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MMRDHS);
8540 def_builtin ("__MMRDHU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MMRDHU);
8541 def_builtin ("__MQADDHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQADDHSS);
8542 def_builtin ("__MQADDHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQADDHUS);
8543 def_builtin ("__MQSUBHSS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSUBHSS);
8544 def_builtin ("__MQSUBHUS", uw2_ftype_uw2_uw2, FRV_BUILTIN_MQSUBHUS);
8545 def_builtin ("__MQMULHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULHS);
8546 def_builtin ("__MQMULHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULHU);
8547 def_builtin ("__MQMULXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMULXHS);
8548 def_builtin ("__MQMULXHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMULXHU);
8549 def_builtin ("__MQMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACHS);
8550 def_builtin ("__MQMACHU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQMACHU);
8551 def_builtin ("__MCPXRS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXRS);
8552 def_builtin ("__MCPXRU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXRU);
8553 def_builtin ("__MCPXIS", void_ftype_acc_sw1_sw1, FRV_BUILTIN_MCPXIS);
8554 def_builtin ("__MCPXIU", void_ftype_acc_uw1_uw1, FRV_BUILTIN_MCPXIU);
8555 def_builtin ("__MQCPXRS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXRS);
8556 def_builtin ("__MQCPXRU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXRU);
8557 def_builtin ("__MQCPXIS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQCPXIS);
8558 def_builtin ("__MQCPXIU", void_ftype_acc_uw2_uw2, FRV_BUILTIN_MQCPXIU);
8559 def_builtin ("__MCUT", uw1_ftype_acc_uw1, FRV_BUILTIN_MCUT);
8560 def_builtin ("__MCUTSS", uw1_ftype_acc_sw1, FRV_BUILTIN_MCUTSS);
8561 def_builtin ("__MEXPDHW", uw1_ftype_uw1_int, FRV_BUILTIN_MEXPDHW);
8562 def_builtin ("__MEXPDHD", uw2_ftype_uw1_int, FRV_BUILTIN_MEXPDHD);
8563 def_builtin ("__MPACKH", uw1_ftype_uh_uh, FRV_BUILTIN_MPACKH);
8564 def_builtin ("__MUNPACKH", uw2_ftype_uw1, FRV_BUILTIN_MUNPACKH);
8565 def_builtin ("__MDPACKH", uw2_ftype_uh_uh_uh_uh, FRV_BUILTIN_MDPACKH);
8566 def_builtin ("__MDUNPACKH", void_ftype_uw4_uw2, FRV_BUILTIN_MDUNPACKH);
8567 def_builtin ("__MBTOH", uw2_ftype_uw1, FRV_BUILTIN_MBTOH);
8568 def_builtin ("__MHTOB", uw1_ftype_uw2, FRV_BUILTIN_MHTOB);
8569 def_builtin ("__MBTOHE", void_ftype_uw4_uw1, FRV_BUILTIN_MBTOHE);
8570 def_builtin ("__MCLRACC", void_ftype_acc, FRV_BUILTIN_MCLRACC);
8571 def_builtin ("__MCLRACCA", void_ftype_void, FRV_BUILTIN_MCLRACCA);
8572 def_builtin ("__MRDACC", uw1_ftype_acc, FRV_BUILTIN_MRDACC);
8573 def_builtin ("__MRDACCG", uw1_ftype_acc, FRV_BUILTIN_MRDACCG);
8574 def_builtin ("__MWTACC", void_ftype_acc_uw1, FRV_BUILTIN_MWTACC);
8575 def_builtin ("__MWTACCG", void_ftype_acc_uw1, FRV_BUILTIN_MWTACCG);
8576 def_builtin ("__Mcop1", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP1);
8577 def_builtin ("__Mcop2", uw1_ftype_uw1_uw1, FRV_BUILTIN_MCOP2);
8578 def_builtin ("__MTRAP", void_ftype_void, FRV_BUILTIN_MTRAP);
8579 def_builtin ("__MQXMACHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACHS);
8580 def_builtin ("__MQXMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQXMACXHS);
8581 def_builtin ("__MQMACXHS", void_ftype_acc_sw2_sw2, FRV_BUILTIN_MQMACXHS);
8582 def_builtin ("__MADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MADDACCS);
8583 def_builtin ("__MSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MSUBACCS);
8584 def_builtin ("__MASACCS", void_ftype_acc_acc, FRV_BUILTIN_MASACCS);
8585 def_builtin ("__MDADDACCS", void_ftype_acc_acc, FRV_BUILTIN_MDADDACCS);
8586 def_builtin ("__MDSUBACCS", void_ftype_acc_acc, FRV_BUILTIN_MDSUBACCS);
8587 def_builtin ("__MDASACCS", void_ftype_acc_acc, FRV_BUILTIN_MDASACCS);
8588 def_builtin ("__MABSHS", uw1_ftype_sw1, FRV_BUILTIN_MABSHS);
8589 def_builtin ("__MDROTLI", uw2_ftype_uw2_int, FRV_BUILTIN_MDROTLI);
8590 def_builtin ("__MCPLHI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLHI);
8591 def_builtin ("__MCPLI", uw1_ftype_uw2_int, FRV_BUILTIN_MCPLI);
8592 def_builtin ("__MDCUTSSI", uw2_ftype_acc_int, FRV_BUILTIN_MDCUTSSI);
8593 def_builtin ("__MQSATHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQSATHS);
8594 def_builtin ("__MHSETLOS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETLOS);
8595 def_builtin ("__MHSETHIS", sw1_ftype_sw1_int, FRV_BUILTIN_MHSETHIS);
8596 def_builtin ("__MHDSETS", sw1_ftype_int, FRV_BUILTIN_MHDSETS);
8597 def_builtin ("__MHSETLOH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETLOH);
8598 def_builtin ("__MHSETHIH", uw1_ftype_uw1_int, FRV_BUILTIN_MHSETHIH);
8599 def_builtin ("__MHDSETH", uw1_ftype_uw1_int, FRV_BUILTIN_MHDSETH);
8600 def_builtin ("__MQLCLRHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLCLRHS);
8601 def_builtin ("__MQLMTHS", sw2_ftype_sw2_sw2, FRV_BUILTIN_MQLMTHS);
8602 def_builtin ("__MQSLLHI", uw2_ftype_uw2_int, FRV_BUILTIN_MQSLLHI);
8603 def_builtin ("__MQSRAHI", sw2_ftype_sw2_int, FRV_BUILTIN_MQSRAHI);
8604 def_builtin ("__SMUL", sw2_ftype_sw1_sw1, FRV_BUILTIN_SMUL);
8605 def_builtin ("__UMUL", uw2_ftype_uw1_uw1, FRV_BUILTIN_UMUL);
8606 def_builtin ("__SMASS", void_ftype_sw1_sw1, FRV_BUILTIN_SMASS);
8607 def_builtin ("__SMSSS", void_ftype_sw1_sw1, FRV_BUILTIN_SMSSS);
8608 def_builtin ("__SMU", void_ftype_sw1_sw1, FRV_BUILTIN_SMU);
8609 def_builtin ("__ADDSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_ADDSS);
8610 def_builtin ("__SUBSS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SUBSS);
8611 def_builtin ("__SLASS", sw1_ftype_sw1_sw1, FRV_BUILTIN_SLASS);
8612 def_builtin ("__SCAN", sw1_ftype_sw1_sw1, FRV_BUILTIN_SCAN);
8613 def_builtin ("__SCUTSS", sw1_ftype_sw1, FRV_BUILTIN_SCUTSS);
8614 def_builtin ("__IACCreadll", sw2_ftype_iacc, FRV_BUILTIN_IACCreadll);
8615 def_builtin ("__IACCreadl", sw1_ftype_iacc, FRV_BUILTIN_IACCreadl);
8616 def_builtin ("__IACCsetll", void_ftype_iacc_sw2, FRV_BUILTIN_IACCsetll);
8617 def_builtin ("__IACCsetl", void_ftype_iacc_sw1, FRV_BUILTIN_IACCsetl);
8618 def_builtin ("__data_prefetch0", void_ftype_ptr, FRV_BUILTIN_PREFETCH0);
8619 def_builtin ("__data_prefetch", void_ftype_ptr, FRV_BUILTIN_PREFETCH);
8620 def_builtin ("__builtin_read8", uw1_ftype_vptr, FRV_BUILTIN_READ8);
8621 def_builtin ("__builtin_read16", uw1_ftype_vptr, FRV_BUILTIN_READ16);
8622 def_builtin ("__builtin_read32", uw1_ftype_vptr, FRV_BUILTIN_READ32);
8623 def_builtin ("__builtin_read64", uw2_ftype_vptr, FRV_BUILTIN_READ64);
8624
8625 def_builtin ("__builtin_write8", void_ftype_vptr_ub, FRV_BUILTIN_WRITE8);
8626 def_builtin ("__builtin_write16", void_ftype_vptr_uh, FRV_BUILTIN_WRITE16);
8627 def_builtin ("__builtin_write32", void_ftype_vptr_uw1, FRV_BUILTIN_WRITE32);
8628 def_builtin ("__builtin_write64", void_ftype_vptr_uw2, FRV_BUILTIN_WRITE64);
8629
8630 #undef UNARY
8631 #undef BINARY
8632 #undef TRINARY
8633 #undef QUAD
8634 }
8635
8636 /* Set the names for various arithmetic operations according to the
8637 FRV ABI. */
8638 static void
8639 frv_init_libfuncs (void)
8640 {
8641 set_optab_libfunc (smod_optab, SImode, "__modi");
8642 set_optab_libfunc (umod_optab, SImode, "__umodi");
8643
8644 set_optab_libfunc (add_optab, DImode, "__addll");
8645 set_optab_libfunc (sub_optab, DImode, "__subll");
8646 set_optab_libfunc (smul_optab, DImode, "__mulll");
8647 set_optab_libfunc (sdiv_optab, DImode, "__divll");
8648 set_optab_libfunc (smod_optab, DImode, "__modll");
8649 set_optab_libfunc (umod_optab, DImode, "__umodll");
8650 set_optab_libfunc (and_optab, DImode, "__andll");
8651 set_optab_libfunc (ior_optab, DImode, "__orll");
8652 set_optab_libfunc (xor_optab, DImode, "__xorll");
8653 set_optab_libfunc (one_cmpl_optab, DImode, "__notll");
8654
8655 set_optab_libfunc (add_optab, SFmode, "__addf");
8656 set_optab_libfunc (sub_optab, SFmode, "__subf");
8657 set_optab_libfunc (smul_optab, SFmode, "__mulf");
8658 set_optab_libfunc (sdiv_optab, SFmode, "__divf");
8659
8660 set_optab_libfunc (add_optab, DFmode, "__addd");
8661 set_optab_libfunc (sub_optab, DFmode, "__subd");
8662 set_optab_libfunc (smul_optab, DFmode, "__muld");
8663 set_optab_libfunc (sdiv_optab, DFmode, "__divd");
8664
8665 set_conv_libfunc (sext_optab, DFmode, SFmode, "__ftod");
8666 set_conv_libfunc (trunc_optab, SFmode, DFmode, "__dtof");
8667
8668 set_conv_libfunc (sfix_optab, SImode, SFmode, "__ftoi");
8669 set_conv_libfunc (sfix_optab, DImode, SFmode, "__ftoll");
8670 set_conv_libfunc (sfix_optab, SImode, DFmode, "__dtoi");
8671 set_conv_libfunc (sfix_optab, DImode, DFmode, "__dtoll");
8672
8673 set_conv_libfunc (ufix_optab, SImode, SFmode, "__ftoui");
8674 set_conv_libfunc (ufix_optab, DImode, SFmode, "__ftoull");
8675 set_conv_libfunc (ufix_optab, SImode, DFmode, "__dtoui");
8676 set_conv_libfunc (ufix_optab, DImode, DFmode, "__dtoull");
8677
8678 set_conv_libfunc (sfloat_optab, SFmode, SImode, "__itof");
8679 set_conv_libfunc (sfloat_optab, SFmode, DImode, "__lltof");
8680 set_conv_libfunc (sfloat_optab, DFmode, SImode, "__itod");
8681 set_conv_libfunc (sfloat_optab, DFmode, DImode, "__lltod");
8682 }
8683
8684 /* Convert an integer constant to an accumulator register. ICODE is the
8685 code of the target instruction, OPNUM is the number of the
8686 accumulator operand and OPVAL is the constant integer. Try both
8687 ACC and ACCG registers; only report an error if neither fit the
8688 instruction. */
8689
8690 static rtx
8691 frv_int_to_acc (enum insn_code icode, int opnum, rtx opval)
8692 {
8693 rtx reg;
8694 int i;
8695
8696 /* ACCs and ACCGs are implicit global registers if media intrinsics
8697 are being used. We set up this lazily to avoid creating lots of
8698 unnecessary call_insn rtl in non-media code. */
8699 for (i = 0; i <= ACC_MASK; i++)
8700 if ((i & ACC_MASK) == i)
8701 global_regs[i + ACC_FIRST] = global_regs[i + ACCG_FIRST] = 1;
8702
8703 if (GET_CODE (opval) != CONST_INT)
8704 {
8705 error ("accumulator is not a constant integer");
8706 return NULL_RTX;
8707 }
8708 if ((INTVAL (opval) & ~ACC_MASK) != 0)
8709 {
8710 error ("accumulator number is out of bounds");
8711 return NULL_RTX;
8712 }
8713
8714 reg = gen_rtx_REG (insn_data[icode].operand[opnum].mode,
8715 ACC_FIRST + INTVAL (opval));
8716 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8717 SET_REGNO (reg, ACCG_FIRST + INTVAL (opval));
8718
8719 if (! (*insn_data[icode].operand[opnum].predicate) (reg, VOIDmode))
8720 {
8721 error ("inappropriate accumulator for %qs", insn_data[icode].name);
8722 return NULL_RTX;
8723 }
8724 return reg;
8725 }
8726
8727 /* If an ACC rtx has mode MODE, return the mode that the matching ACCG
8728 should have. */
8729
8730 static enum machine_mode
8731 frv_matching_accg_mode (enum machine_mode mode)
8732 {
8733 switch (mode)
8734 {
8735 case V4SImode:
8736 return V4QImode;
8737
8738 case DImode:
8739 return HImode;
8740
8741 case SImode:
8742 return QImode;
8743
8744 default:
8745 gcc_unreachable ();
8746 }
8747 }
8748
8749 /* Given that a __builtin_read or __builtin_write function is accessing
8750 address ADDRESS, return the value that should be used as operand 1
8751 of the membar. */
8752
8753 static rtx
8754 frv_io_address_cookie (rtx address)
8755 {
8756 return (GET_CODE (address) == CONST_INT
8757 ? GEN_INT (INTVAL (address) / 8 * 8)
8758 : const0_rtx);
8759 }
8760
8761 /* Return the accumulator guard that should be paired with accumulator
8762 register ACC. The mode of the returned register is in the same
8763 class as ACC, but is four times smaller. */
8764
8765 rtx
8766 frv_matching_accg_for_acc (rtx acc)
8767 {
8768 return gen_rtx_REG (frv_matching_accg_mode (GET_MODE (acc)),
8769 REGNO (acc) - ACC_FIRST + ACCG_FIRST);
8770 }
8771
8772 /* Read the requested argument from the call EXP given by INDEX.
8773 Return the value as an rtx. */
8774
8775 static rtx
8776 frv_read_argument (tree exp, unsigned int index)
8777 {
8778 return expand_expr (CALL_EXPR_ARG (exp, index),
8779 NULL_RTX, VOIDmode, 0);
8780 }
8781
8782 /* Like frv_read_argument, but interpret the argument as the number
8783 of an IACC register and return a (reg:MODE ...) rtx for it. */
8784
8785 static rtx
8786 frv_read_iacc_argument (enum machine_mode mode, tree call,
8787 unsigned int index)
8788 {
8789 int i, regno;
8790 rtx op;
8791
8792 op = frv_read_argument (call, index);
8793 if (GET_CODE (op) != CONST_INT
8794 || INTVAL (op) < 0
8795 || INTVAL (op) > IACC_LAST - IACC_FIRST
8796 || ((INTVAL (op) * 4) & (GET_MODE_SIZE (mode) - 1)) != 0)
8797 {
8798 error ("invalid IACC argument");
8799 op = const0_rtx;
8800 }
8801
8802 /* IACCs are implicit global registers. We set up this lazily to
8803 avoid creating lots of unnecessary call_insn rtl when IACCs aren't
8804 being used. */
8805 regno = INTVAL (op) + IACC_FIRST;
8806 for (i = 0; i < HARD_REGNO_NREGS (regno, mode); i++)
8807 global_regs[regno + i] = 1;
8808
8809 return gen_rtx_REG (mode, regno);
8810 }
8811
8812 /* Return true if OPVAL can be used for operand OPNUM of instruction ICODE.
8813 The instruction should require a constant operand of some sort. The
8814 function prints an error if OPVAL is not valid. */
8815
8816 static int
8817 frv_check_constant_argument (enum insn_code icode, int opnum, rtx opval)
8818 {
8819 if (GET_CODE (opval) != CONST_INT)
8820 {
8821 error ("%qs expects a constant argument", insn_data[icode].name);
8822 return FALSE;
8823 }
8824 if (! (*insn_data[icode].operand[opnum].predicate) (opval, VOIDmode))
8825 {
8826 error ("constant argument out of range for %qs", insn_data[icode].name);
8827 return FALSE;
8828 }
8829 return TRUE;
8830 }
8831
8832 /* Return a legitimate rtx for instruction ICODE's return value. Use TARGET
8833 if it's not null, has the right mode, and satisfies operand 0's
8834 predicate. */
8835
8836 static rtx
8837 frv_legitimize_target (enum insn_code icode, rtx target)
8838 {
8839 enum machine_mode mode = insn_data[icode].operand[0].mode;
8840
8841 if (! target
8842 || GET_MODE (target) != mode
8843 || ! (*insn_data[icode].operand[0].predicate) (target, mode))
8844 return gen_reg_rtx (mode);
8845 else
8846 return target;
8847 }
8848
8849 /* Given that ARG is being passed as operand OPNUM to instruction ICODE,
8850 check whether ARG satisfies the operand's constraints. If it doesn't,
8851 copy ARG to a temporary register and return that. Otherwise return ARG
8852 itself. */
8853
8854 static rtx
8855 frv_legitimize_argument (enum insn_code icode, int opnum, rtx arg)
8856 {
8857 enum machine_mode mode = insn_data[icode].operand[opnum].mode;
8858
8859 if ((*insn_data[icode].operand[opnum].predicate) (arg, mode))
8860 return arg;
8861 else
8862 return copy_to_mode_reg (mode, arg);
8863 }
8864
8865 /* Return a volatile memory reference of mode MODE whose address is ARG. */
8866
8867 static rtx
8868 frv_volatile_memref (enum machine_mode mode, rtx arg)
8869 {
8870 rtx mem;
8871
8872 mem = gen_rtx_MEM (mode, memory_address (mode, arg));
8873 MEM_VOLATILE_P (mem) = 1;
8874 return mem;
8875 }
8876
8877 /* Expand builtins that take a single, constant argument. At the moment,
8878 only MHDSETS falls into this category. */
8879
8880 static rtx
8881 frv_expand_set_builtin (enum insn_code icode, tree call, rtx target)
8882 {
8883 rtx pat;
8884 rtx op0 = frv_read_argument (call, 0);
8885
8886 if (! frv_check_constant_argument (icode, 1, op0))
8887 return NULL_RTX;
8888
8889 target = frv_legitimize_target (icode, target);
8890 pat = GEN_FCN (icode) (target, op0);
8891 if (! pat)
8892 return NULL_RTX;
8893
8894 emit_insn (pat);
8895 return target;
8896 }
8897
8898 /* Expand builtins that take one operand. */
8899
8900 static rtx
8901 frv_expand_unop_builtin (enum insn_code icode, tree call, rtx target)
8902 {
8903 rtx pat;
8904 rtx op0 = frv_read_argument (call, 0);
8905
8906 target = frv_legitimize_target (icode, target);
8907 op0 = frv_legitimize_argument (icode, 1, op0);
8908 pat = GEN_FCN (icode) (target, op0);
8909 if (! pat)
8910 return NULL_RTX;
8911
8912 emit_insn (pat);
8913 return target;
8914 }
8915
8916 /* Expand builtins that take two operands. */
8917
8918 static rtx
8919 frv_expand_binop_builtin (enum insn_code icode, tree call, rtx target)
8920 {
8921 rtx pat;
8922 rtx op0 = frv_read_argument (call, 0);
8923 rtx op1 = frv_read_argument (call, 1);
8924
8925 target = frv_legitimize_target (icode, target);
8926 op0 = frv_legitimize_argument (icode, 1, op0);
8927 op1 = frv_legitimize_argument (icode, 2, op1);
8928 pat = GEN_FCN (icode) (target, op0, op1);
8929 if (! pat)
8930 return NULL_RTX;
8931
8932 emit_insn (pat);
8933 return target;
8934 }
8935
8936 /* Expand cut-style builtins, which take two operands and an implicit ACCG
8937 one. */
8938
8939 static rtx
8940 frv_expand_cut_builtin (enum insn_code icode, tree call, rtx target)
8941 {
8942 rtx pat;
8943 rtx op0 = frv_read_argument (call, 0);
8944 rtx op1 = frv_read_argument (call, 1);
8945 rtx op2;
8946
8947 target = frv_legitimize_target (icode, target);
8948 op0 = frv_int_to_acc (icode, 1, op0);
8949 if (! op0)
8950 return NULL_RTX;
8951
8952 if (icode == CODE_FOR_mdcutssi || GET_CODE (op1) == CONST_INT)
8953 {
8954 if (! frv_check_constant_argument (icode, 2, op1))
8955 return NULL_RTX;
8956 }
8957 else
8958 op1 = frv_legitimize_argument (icode, 2, op1);
8959
8960 op2 = frv_matching_accg_for_acc (op0);
8961 pat = GEN_FCN (icode) (target, op0, op1, op2);
8962 if (! pat)
8963 return NULL_RTX;
8964
8965 emit_insn (pat);
8966 return target;
8967 }
8968
8969 /* Expand builtins that take two operands and the second is immediate. */
8970
8971 static rtx
8972 frv_expand_binopimm_builtin (enum insn_code icode, tree call, rtx target)
8973 {
8974 rtx pat;
8975 rtx op0 = frv_read_argument (call, 0);
8976 rtx op1 = frv_read_argument (call, 1);
8977
8978 if (! frv_check_constant_argument (icode, 2, op1))
8979 return NULL_RTX;
8980
8981 target = frv_legitimize_target (icode, target);
8982 op0 = frv_legitimize_argument (icode, 1, op0);
8983 pat = GEN_FCN (icode) (target, op0, op1);
8984 if (! pat)
8985 return NULL_RTX;
8986
8987 emit_insn (pat);
8988 return target;
8989 }
8990
8991 /* Expand builtins that take two operands, the first operand being a pointer to
8992 ints and return void. */
8993
8994 static rtx
8995 frv_expand_voidbinop_builtin (enum insn_code icode, tree call)
8996 {
8997 rtx pat;
8998 rtx op0 = frv_read_argument (call, 0);
8999 rtx op1 = frv_read_argument (call, 1);
9000 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
9001 rtx addr;
9002
9003 if (GET_CODE (op0) != MEM)
9004 {
9005 rtx reg = op0;
9006
9007 if (! offsettable_address_p (0, mode0, op0))
9008 {
9009 reg = gen_reg_rtx (Pmode);
9010 emit_insn (gen_rtx_SET (VOIDmode, reg, op0));
9011 }
9012
9013 op0 = gen_rtx_MEM (SImode, reg);
9014 }
9015
9016 addr = XEXP (op0, 0);
9017 if (! offsettable_address_p (0, mode0, addr))
9018 addr = copy_to_mode_reg (Pmode, op0);
9019
9020 op0 = change_address (op0, V4SImode, addr);
9021 op1 = frv_legitimize_argument (icode, 1, op1);
9022 pat = GEN_FCN (icode) (op0, op1);
9023 if (! pat)
9024 return 0;
9025
9026 emit_insn (pat);
9027 return 0;
9028 }
9029
9030 /* Expand builtins that take two long operands and return void. */
9031
9032 static rtx
9033 frv_expand_int_void2arg (enum insn_code icode, tree call)
9034 {
9035 rtx pat;
9036 rtx op0 = frv_read_argument (call, 0);
9037 rtx op1 = frv_read_argument (call, 1);
9038
9039 op0 = frv_legitimize_argument (icode, 1, op0);
9040 op1 = frv_legitimize_argument (icode, 1, op1);
9041 pat = GEN_FCN (icode) (op0, op1);
9042 if (! pat)
9043 return NULL_RTX;
9044
9045 emit_insn (pat);
9046 return NULL_RTX;
9047 }
9048
9049 /* Expand prefetch builtins. These take a single address as argument. */
9050
9051 static rtx
9052 frv_expand_prefetches (enum insn_code icode, tree call)
9053 {
9054 rtx pat;
9055 rtx op0 = frv_read_argument (call, 0);
9056
9057 pat = GEN_FCN (icode) (force_reg (Pmode, op0));
9058 if (! pat)
9059 return 0;
9060
9061 emit_insn (pat);
9062 return 0;
9063 }
9064
9065 /* Expand builtins that take three operands and return void. The first
9066 argument must be a constant that describes a pair or quad accumulators. A
9067 fourth argument is created that is the accumulator guard register that
9068 corresponds to the accumulator. */
9069
9070 static rtx
9071 frv_expand_voidtriop_builtin (enum insn_code icode, tree call)
9072 {
9073 rtx pat;
9074 rtx op0 = frv_read_argument (call, 0);
9075 rtx op1 = frv_read_argument (call, 1);
9076 rtx op2 = frv_read_argument (call, 2);
9077 rtx op3;
9078
9079 op0 = frv_int_to_acc (icode, 0, op0);
9080 if (! op0)
9081 return NULL_RTX;
9082
9083 op1 = frv_legitimize_argument (icode, 1, op1);
9084 op2 = frv_legitimize_argument (icode, 2, op2);
9085 op3 = frv_matching_accg_for_acc (op0);
9086 pat = GEN_FCN (icode) (op0, op1, op2, op3);
9087 if (! pat)
9088 return NULL_RTX;
9089
9090 emit_insn (pat);
9091 return NULL_RTX;
9092 }
9093
9094 /* Expand builtins that perform accumulator-to-accumulator operations.
9095 These builtins take two accumulator numbers as argument and return
9096 void. */
9097
9098 static rtx
9099 frv_expand_voidaccop_builtin (enum insn_code icode, tree call)
9100 {
9101 rtx pat;
9102 rtx op0 = frv_read_argument (call, 0);
9103 rtx op1 = frv_read_argument (call, 1);
9104 rtx op2;
9105 rtx op3;
9106
9107 op0 = frv_int_to_acc (icode, 0, op0);
9108 if (! op0)
9109 return NULL_RTX;
9110
9111 op1 = frv_int_to_acc (icode, 1, op1);
9112 if (! op1)
9113 return NULL_RTX;
9114
9115 op2 = frv_matching_accg_for_acc (op0);
9116 op3 = frv_matching_accg_for_acc (op1);
9117 pat = GEN_FCN (icode) (op0, op1, op2, op3);
9118 if (! pat)
9119 return NULL_RTX;
9120
9121 emit_insn (pat);
9122 return NULL_RTX;
9123 }
9124
9125 /* Expand a __builtin_read* function. ICODE is the instruction code for the
9126 membar and TARGET_MODE is the mode that the loaded value should have. */
9127
9128 static rtx
9129 frv_expand_load_builtin (enum insn_code icode, enum machine_mode target_mode,
9130 tree call, rtx target)
9131 {
9132 rtx op0 = frv_read_argument (call, 0);
9133 rtx cookie = frv_io_address_cookie (op0);
9134
9135 if (target == 0 || !REG_P (target))
9136 target = gen_reg_rtx (target_mode);
9137 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
9138 convert_move (target, op0, 1);
9139 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_READ)));
9140 cfun->machine->has_membar_p = 1;
9141 return target;
9142 }
9143
9144 /* Likewise __builtin_write* functions. */
9145
9146 static rtx
9147 frv_expand_store_builtin (enum insn_code icode, tree call)
9148 {
9149 rtx op0 = frv_read_argument (call, 0);
9150 rtx op1 = frv_read_argument (call, 1);
9151 rtx cookie = frv_io_address_cookie (op0);
9152
9153 op0 = frv_volatile_memref (insn_data[icode].operand[0].mode, op0);
9154 convert_move (op0, force_reg (insn_data[icode].operand[0].mode, op1), 1);
9155 emit_insn (GEN_FCN (icode) (copy_rtx (op0), cookie, GEN_INT (FRV_IO_WRITE)));
9156 cfun->machine->has_membar_p = 1;
9157 return NULL_RTX;
9158 }
9159
9160 /* Expand the MDPACKH builtin. It takes four unsigned short arguments and
9161 each argument forms one word of the two double-word input registers.
9162 CALL is the tree for the call and TARGET, if nonnull, suggests a good place
9163 to put the return value. */
9164
9165 static rtx
9166 frv_expand_mdpackh_builtin (tree call, rtx target)
9167 {
9168 enum insn_code icode = CODE_FOR_mdpackh;
9169 rtx pat, op0, op1;
9170 rtx arg1 = frv_read_argument (call, 0);
9171 rtx arg2 = frv_read_argument (call, 1);
9172 rtx arg3 = frv_read_argument (call, 2);
9173 rtx arg4 = frv_read_argument (call, 3);
9174
9175 target = frv_legitimize_target (icode, target);
9176 op0 = gen_reg_rtx (DImode);
9177 op1 = gen_reg_rtx (DImode);
9178
9179 /* The high half of each word is not explicitly initialized, so indicate
9180 that the input operands are not live before this point. */
9181 emit_clobber (op0);
9182 emit_clobber (op1);
9183
9184 /* Move each argument into the low half of its associated input word. */
9185 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 2), arg1);
9186 emit_move_insn (simplify_gen_subreg (HImode, op0, DImode, 6), arg2);
9187 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 2), arg3);
9188 emit_move_insn (simplify_gen_subreg (HImode, op1, DImode, 6), arg4);
9189
9190 pat = GEN_FCN (icode) (target, op0, op1);
9191 if (! pat)
9192 return NULL_RTX;
9193
9194 emit_insn (pat);
9195 return target;
9196 }
9197
9198 /* Expand the MCLRACC builtin. This builtin takes a single accumulator
9199 number as argument. */
9200
9201 static rtx
9202 frv_expand_mclracc_builtin (tree call)
9203 {
9204 enum insn_code icode = CODE_FOR_mclracc;
9205 rtx pat;
9206 rtx op0 = frv_read_argument (call, 0);
9207
9208 op0 = frv_int_to_acc (icode, 0, op0);
9209 if (! op0)
9210 return NULL_RTX;
9211
9212 pat = GEN_FCN (icode) (op0);
9213 if (pat)
9214 emit_insn (pat);
9215
9216 return NULL_RTX;
9217 }
9218
9219 /* Expand builtins that take no arguments. */
9220
9221 static rtx
9222 frv_expand_noargs_builtin (enum insn_code icode)
9223 {
9224 rtx pat = GEN_FCN (icode) (const0_rtx);
9225 if (pat)
9226 emit_insn (pat);
9227
9228 return NULL_RTX;
9229 }
9230
9231 /* Expand MRDACC and MRDACCG. These builtins take a single accumulator
9232 number or accumulator guard number as argument and return an SI integer. */
9233
9234 static rtx
9235 frv_expand_mrdacc_builtin (enum insn_code icode, tree call)
9236 {
9237 rtx pat;
9238 rtx target = gen_reg_rtx (SImode);
9239 rtx op0 = frv_read_argument (call, 0);
9240
9241 op0 = frv_int_to_acc (icode, 1, op0);
9242 if (! op0)
9243 return NULL_RTX;
9244
9245 pat = GEN_FCN (icode) (target, op0);
9246 if (! pat)
9247 return NULL_RTX;
9248
9249 emit_insn (pat);
9250 return target;
9251 }
9252
9253 /* Expand MWTACC and MWTACCG. These builtins take an accumulator or
9254 accumulator guard as their first argument and an SImode value as their
9255 second. */
9256
9257 static rtx
9258 frv_expand_mwtacc_builtin (enum insn_code icode, tree call)
9259 {
9260 rtx pat;
9261 rtx op0 = frv_read_argument (call, 0);
9262 rtx op1 = frv_read_argument (call, 1);
9263
9264 op0 = frv_int_to_acc (icode, 0, op0);
9265 if (! op0)
9266 return NULL_RTX;
9267
9268 op1 = frv_legitimize_argument (icode, 1, op1);
9269 pat = GEN_FCN (icode) (op0, op1);
9270 if (pat)
9271 emit_insn (pat);
9272
9273 return NULL_RTX;
9274 }
9275
9276 /* Emit a move from SRC to DEST in SImode chunks. This can be used
9277 to move DImode values into and out of IACC0. */
9278
9279 static void
9280 frv_split_iacc_move (rtx dest, rtx src)
9281 {
9282 enum machine_mode inner;
9283 int i;
9284
9285 inner = GET_MODE (dest);
9286 for (i = 0; i < GET_MODE_SIZE (inner); i += GET_MODE_SIZE (SImode))
9287 emit_move_insn (simplify_gen_subreg (SImode, dest, inner, i),
9288 simplify_gen_subreg (SImode, src, inner, i));
9289 }
9290
9291 /* Expand builtins. */
9292
9293 static rtx
9294 frv_expand_builtin (tree exp,
9295 rtx target,
9296 rtx subtarget ATTRIBUTE_UNUSED,
9297 enum machine_mode mode ATTRIBUTE_UNUSED,
9298 int ignore ATTRIBUTE_UNUSED)
9299 {
9300 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
9301 unsigned fcode = (unsigned)DECL_FUNCTION_CODE (fndecl);
9302 unsigned i;
9303 struct builtin_description *d;
9304
9305 if (fcode < FRV_BUILTIN_FIRST_NONMEDIA && !TARGET_MEDIA)
9306 {
9307 error ("media functions are not available unless -mmedia is used");
9308 return NULL_RTX;
9309 }
9310
9311 switch (fcode)
9312 {
9313 case FRV_BUILTIN_MCOP1:
9314 case FRV_BUILTIN_MCOP2:
9315 case FRV_BUILTIN_MDUNPACKH:
9316 case FRV_BUILTIN_MBTOHE:
9317 if (! TARGET_MEDIA_REV1)
9318 {
9319 error ("this media function is only available on the fr500");
9320 return NULL_RTX;
9321 }
9322 break;
9323
9324 case FRV_BUILTIN_MQXMACHS:
9325 case FRV_BUILTIN_MQXMACXHS:
9326 case FRV_BUILTIN_MQMACXHS:
9327 case FRV_BUILTIN_MADDACCS:
9328 case FRV_BUILTIN_MSUBACCS:
9329 case FRV_BUILTIN_MASACCS:
9330 case FRV_BUILTIN_MDADDACCS:
9331 case FRV_BUILTIN_MDSUBACCS:
9332 case FRV_BUILTIN_MDASACCS:
9333 case FRV_BUILTIN_MABSHS:
9334 case FRV_BUILTIN_MDROTLI:
9335 case FRV_BUILTIN_MCPLHI:
9336 case FRV_BUILTIN_MCPLI:
9337 case FRV_BUILTIN_MDCUTSSI:
9338 case FRV_BUILTIN_MQSATHS:
9339 case FRV_BUILTIN_MHSETLOS:
9340 case FRV_BUILTIN_MHSETLOH:
9341 case FRV_BUILTIN_MHSETHIS:
9342 case FRV_BUILTIN_MHSETHIH:
9343 case FRV_BUILTIN_MHDSETS:
9344 case FRV_BUILTIN_MHDSETH:
9345 if (! TARGET_MEDIA_REV2)
9346 {
9347 error ("this media function is only available on the fr400"
9348 " and fr550");
9349 return NULL_RTX;
9350 }
9351 break;
9352
9353 case FRV_BUILTIN_SMASS:
9354 case FRV_BUILTIN_SMSSS:
9355 case FRV_BUILTIN_SMU:
9356 case FRV_BUILTIN_ADDSS:
9357 case FRV_BUILTIN_SUBSS:
9358 case FRV_BUILTIN_SLASS:
9359 case FRV_BUILTIN_SCUTSS:
9360 case FRV_BUILTIN_IACCreadll:
9361 case FRV_BUILTIN_IACCreadl:
9362 case FRV_BUILTIN_IACCsetll:
9363 case FRV_BUILTIN_IACCsetl:
9364 if (!TARGET_FR405_BUILTINS)
9365 {
9366 error ("this builtin function is only available"
9367 " on the fr405 and fr450");
9368 return NULL_RTX;
9369 }
9370 break;
9371
9372 case FRV_BUILTIN_PREFETCH:
9373 if (!TARGET_FR500_FR550_BUILTINS)
9374 {
9375 error ("this builtin function is only available on the fr500"
9376 " and fr550");
9377 return NULL_RTX;
9378 }
9379 break;
9380
9381 case FRV_BUILTIN_MQLCLRHS:
9382 case FRV_BUILTIN_MQLMTHS:
9383 case FRV_BUILTIN_MQSLLHI:
9384 case FRV_BUILTIN_MQSRAHI:
9385 if (!TARGET_MEDIA_FR450)
9386 {
9387 error ("this builtin function is only available on the fr450");
9388 return NULL_RTX;
9389 }
9390 break;
9391
9392 default:
9393 break;
9394 }
9395
9396 /* Expand unique builtins. */
9397
9398 switch (fcode)
9399 {
9400 case FRV_BUILTIN_MTRAP:
9401 return frv_expand_noargs_builtin (CODE_FOR_mtrap);
9402
9403 case FRV_BUILTIN_MCLRACC:
9404 return frv_expand_mclracc_builtin (exp);
9405
9406 case FRV_BUILTIN_MCLRACCA:
9407 if (TARGET_ACC_8)
9408 return frv_expand_noargs_builtin (CODE_FOR_mclracca8);
9409 else
9410 return frv_expand_noargs_builtin (CODE_FOR_mclracca4);
9411
9412 case FRV_BUILTIN_MRDACC:
9413 return frv_expand_mrdacc_builtin (CODE_FOR_mrdacc, exp);
9414
9415 case FRV_BUILTIN_MRDACCG:
9416 return frv_expand_mrdacc_builtin (CODE_FOR_mrdaccg, exp);
9417
9418 case FRV_BUILTIN_MWTACC:
9419 return frv_expand_mwtacc_builtin (CODE_FOR_mwtacc, exp);
9420
9421 case FRV_BUILTIN_MWTACCG:
9422 return frv_expand_mwtacc_builtin (CODE_FOR_mwtaccg, exp);
9423
9424 case FRV_BUILTIN_MDPACKH:
9425 return frv_expand_mdpackh_builtin (exp, target);
9426
9427 case FRV_BUILTIN_IACCreadll:
9428 {
9429 rtx src = frv_read_iacc_argument (DImode, exp, 0);
9430 if (target == 0 || !REG_P (target))
9431 target = gen_reg_rtx (DImode);
9432 frv_split_iacc_move (target, src);
9433 return target;
9434 }
9435
9436 case FRV_BUILTIN_IACCreadl:
9437 return frv_read_iacc_argument (SImode, exp, 0);
9438
9439 case FRV_BUILTIN_IACCsetll:
9440 {
9441 rtx dest = frv_read_iacc_argument (DImode, exp, 0);
9442 rtx src = frv_read_argument (exp, 1);
9443 frv_split_iacc_move (dest, force_reg (DImode, src));
9444 return 0;
9445 }
9446
9447 case FRV_BUILTIN_IACCsetl:
9448 {
9449 rtx dest = frv_read_iacc_argument (SImode, exp, 0);
9450 rtx src = frv_read_argument (exp, 1);
9451 emit_move_insn (dest, force_reg (SImode, src));
9452 return 0;
9453 }
9454
9455 default:
9456 break;
9457 }
9458
9459 /* Expand groups of builtins. */
9460
9461 for (i = 0, d = bdesc_set; i < ARRAY_SIZE (bdesc_set); i++, d++)
9462 if (d->code == fcode)
9463 return frv_expand_set_builtin (d->icode, exp, target);
9464
9465 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
9466 if (d->code == fcode)
9467 return frv_expand_unop_builtin (d->icode, exp, target);
9468
9469 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
9470 if (d->code == fcode)
9471 return frv_expand_binop_builtin (d->icode, exp, target);
9472
9473 for (i = 0, d = bdesc_cut; i < ARRAY_SIZE (bdesc_cut); i++, d++)
9474 if (d->code == fcode)
9475 return frv_expand_cut_builtin (d->icode, exp, target);
9476
9477 for (i = 0, d = bdesc_2argimm; i < ARRAY_SIZE (bdesc_2argimm); i++, d++)
9478 if (d->code == fcode)
9479 return frv_expand_binopimm_builtin (d->icode, exp, target);
9480
9481 for (i = 0, d = bdesc_void2arg; i < ARRAY_SIZE (bdesc_void2arg); i++, d++)
9482 if (d->code == fcode)
9483 return frv_expand_voidbinop_builtin (d->icode, exp);
9484
9485 for (i = 0, d = bdesc_void3arg; i < ARRAY_SIZE (bdesc_void3arg); i++, d++)
9486 if (d->code == fcode)
9487 return frv_expand_voidtriop_builtin (d->icode, exp);
9488
9489 for (i = 0, d = bdesc_voidacc; i < ARRAY_SIZE (bdesc_voidacc); i++, d++)
9490 if (d->code == fcode)
9491 return frv_expand_voidaccop_builtin (d->icode, exp);
9492
9493 for (i = 0, d = bdesc_int_void2arg;
9494 i < ARRAY_SIZE (bdesc_int_void2arg); i++, d++)
9495 if (d->code == fcode)
9496 return frv_expand_int_void2arg (d->icode, exp);
9497
9498 for (i = 0, d = bdesc_prefetches;
9499 i < ARRAY_SIZE (bdesc_prefetches); i++, d++)
9500 if (d->code == fcode)
9501 return frv_expand_prefetches (d->icode, exp);
9502
9503 for (i = 0, d = bdesc_loads; i < ARRAY_SIZE (bdesc_loads); i++, d++)
9504 if (d->code == fcode)
9505 return frv_expand_load_builtin (d->icode, TYPE_MODE (TREE_TYPE (exp)),
9506 exp, target);
9507
9508 for (i = 0, d = bdesc_stores; i < ARRAY_SIZE (bdesc_stores); i++, d++)
9509 if (d->code == fcode)
9510 return frv_expand_store_builtin (d->icode, exp);
9511
9512 return 0;
9513 }
9514
9515 static bool
9516 frv_in_small_data_p (const_tree decl)
9517 {
9518 HOST_WIDE_INT size;
9519 const_tree section_name;
9520
9521 /* Don't apply the -G flag to internal compiler structures. We
9522 should leave such structures in the main data section, partly
9523 for efficiency and partly because the size of some of them
9524 (such as C++ typeinfos) is not known until later. */
9525 if (TREE_CODE (decl) != VAR_DECL || DECL_ARTIFICIAL (decl))
9526 return false;
9527
9528 /* If we already know which section the decl should be in, see if
9529 it's a small data section. */
9530 section_name = DECL_SECTION_NAME (decl);
9531 if (section_name)
9532 {
9533 gcc_assert (TREE_CODE (section_name) == STRING_CST);
9534 if (frv_string_begins_with (section_name, ".sdata"))
9535 return true;
9536 if (frv_string_begins_with (section_name, ".sbss"))
9537 return true;
9538 return false;
9539 }
9540
9541 size = int_size_in_bytes (TREE_TYPE (decl));
9542 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
9543 return true;
9544
9545 return false;
9546 }
9547 \f
9548 static bool
9549 frv_rtx_costs (rtx x,
9550 int code ATTRIBUTE_UNUSED,
9551 int outer_code ATTRIBUTE_UNUSED,
9552 int *total,
9553 bool speed ATTRIBUTE_UNUSED)
9554 {
9555 if (outer_code == MEM)
9556 {
9557 /* Don't differentiate between memory addresses. All the ones
9558 we accept have equal cost. */
9559 *total = COSTS_N_INSNS (0);
9560 return true;
9561 }
9562
9563 switch (code)
9564 {
9565 case CONST_INT:
9566 /* Make 12-bit integers really cheap. */
9567 if (IN_RANGE_P (INTVAL (x), -2048, 2047))
9568 {
9569 *total = 0;
9570 return true;
9571 }
9572 /* Fall through. */
9573
9574 case CONST:
9575 case LABEL_REF:
9576 case SYMBOL_REF:
9577 case CONST_DOUBLE:
9578 *total = COSTS_N_INSNS (2);
9579 return true;
9580
9581 case PLUS:
9582 case MINUS:
9583 case AND:
9584 case IOR:
9585 case XOR:
9586 case ASHIFT:
9587 case ASHIFTRT:
9588 case LSHIFTRT:
9589 case NOT:
9590 case NEG:
9591 case COMPARE:
9592 if (GET_MODE (x) == SImode)
9593 *total = COSTS_N_INSNS (1);
9594 else if (GET_MODE (x) == DImode)
9595 *total = COSTS_N_INSNS (2);
9596 else
9597 *total = COSTS_N_INSNS (3);
9598 return true;
9599
9600 case MULT:
9601 if (GET_MODE (x) == SImode)
9602 *total = COSTS_N_INSNS (2);
9603 else
9604 *total = COSTS_N_INSNS (6); /* guess */
9605 return true;
9606
9607 case DIV:
9608 case UDIV:
9609 case MOD:
9610 case UMOD:
9611 *total = COSTS_N_INSNS (18);
9612 return true;
9613
9614 case MEM:
9615 *total = COSTS_N_INSNS (3);
9616 return true;
9617
9618 default:
9619 return false;
9620 }
9621 }
9622 \f
9623 static void
9624 frv_asm_out_constructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9625 {
9626 switch_to_section (ctors_section);
9627 assemble_align (POINTER_SIZE);
9628 if (TARGET_FDPIC)
9629 {
9630 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9631
9632 gcc_assert (ok);
9633 return;
9634 }
9635 assemble_integer_with_op ("\t.picptr\t", symbol);
9636 }
9637
9638 static void
9639 frv_asm_out_destructor (rtx symbol, int priority ATTRIBUTE_UNUSED)
9640 {
9641 switch_to_section (dtors_section);
9642 assemble_align (POINTER_SIZE);
9643 if (TARGET_FDPIC)
9644 {
9645 int ok = frv_assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, 1);
9646
9647 gcc_assert (ok);
9648 return;
9649 }
9650 assemble_integer_with_op ("\t.picptr\t", symbol);
9651 }
9652
9653 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
9654
9655 static rtx
9656 frv_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
9657 int incoming ATTRIBUTE_UNUSED)
9658 {
9659 return gen_rtx_REG (Pmode, FRV_STRUCT_VALUE_REGNUM);
9660 }
9661
9662 #define TLS_BIAS (2048 - 16)
9663
9664 /* This is called from dwarf2out.c via TARGET_ASM_OUTPUT_DWARF_DTPREL.
9665 We need to emit DTP-relative relocations. */
9666
9667 static void
9668 frv_output_dwarf_dtprel (FILE *file, int size, rtx x)
9669 {
9670 gcc_assert (size == 4);
9671 fputs ("\t.picptr\ttlsmoff(", file);
9672 /* We want the unbiased TLS offset, so add the bias to the
9673 expression, such that the implicit biasing cancels out. */
9674 output_addr_const (file, plus_constant (x, TLS_BIAS));
9675 fputs (")", file);
9676 }
9677
9678 #include "gt-frv.h"